[4/4] Dissect expand

Michael Matz matz@suse.de
Wed Aug 26 17:04:00 GMT 2009


Hi,

and this finally splits expand_expr_real_1 into expand_expr_real_1 and 
expand_expr_real_2.  The latter takes an exploded expression representing 
one of the unary or binary simple expressions.  The former retains all 
other cases, handles all expands where the result is to be ignored (hence 
expand_expr_real_2 doesn't need to care), and calls the latter for codes 
it doesn't handle itself.

The patch is extremely noisy, but it was produced by simply copying 
expand_expr_real_1 into expand_expr_real_2 with an empty switch(code) and 
then moving all unary/binary cases from _1 to _2.  Afterwards useless 
variables/macros/labels and the like were removed from both functions.

There is only one functional change.  In case PAREN_EXPR/CASE_CONVERT 
set_mem_attributes was called with 'exp'.  That isn't necessary we can use 
the outer type directly, the underlying attributes will be set by the 
recursive expand_expr call, and then overwritten by the ones from the type 
(like alias set and alignment).

Regstrapped on x86_64-linux, all langs with Ada.  No regressions, but two 
new (to me) bugs crept in since the last revision I checked, abi_check 
fails (probably due to r151106) and gfortran.dg/fmt_with_extra.f, probably 
due to r151112 (reverting a change, but not the testcase).  At least I see 
both also on recent gcc-testresults@ posts.

This will finally allow me to make use of expand_expr_real_2 in 
cfg-expand, avoiding to build temporary trees for simple assignments.  
Slowly but surely ...

Okay for trunk?


Ciao,
Michael.
-- 
	* expr.c (expand_expr_real_2): New function taking exploded
	unary or binary expression, split out from ...
	(expand_expr_real_1): ... here.  Move over all unary/binary
	switch parts to above function, in particular these codes:
	PAREN_EXPR, NOP_EXPR, CONVERT_EXPR, POINTER_PLUS_EXPR, PLUS_EXPR,
        MINUS_EXPR, MULT_EXPR, TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR,
        ROUND_DIV_EXPR, EXACT_DIV_EXPR, RDIV_EXPR, TRUNC_MOD_EXPR,
        FLOOR_MOD_EXPR, CEIL_MOD_EXPR, ROUND_MOD_EXPR, FIXED_CONVERT_EXPR,
        FIX_TRUNC_EXPR, FLOAT_EXPR, NEGATE_EXPR, ABS_EXPR, MAX_EXPR, MIN_EXPR,
        BIT_NOT_EXPR, TRUTH_AND_EXPR, BIT_AND_EXPR, TRUTH_OR_EXPR,
        BIT_IOR_EXPR, TRUTH_XOR_EXPR, BIT_XOR_EXPR, LROTATE_EXPR, RROTATE_EXPR,
        LSHIFT_EXPR, RSHIFT_EXPR, LT_EXPR, LE_EXPR, GT_EXPR, GE_EXPR, EQ_EXPR,
        NE_EXPR, UNORDERED_EXPR, ORDERED_EXPR, UNLT_EXPR, UNLE_EXPR, UNGT_EXPR,
        UNGE_EXPR, UNEQ_EXPR, LTGT_EXPR, TRUTH_NOT_EXPR, COMPLEX_EXPR,
        WIDEN_SUM_EXPR, REDUC_MAX_EXPR, REDUC_MIN_EXPR, REDUC_PLUS_EXPR,
        VEC_EXTRACT_EVEN_EXPR, VEC_EXTRACT_ODD_EXPR, VEC_INTERLEAVE_HIGH_EXPR,
        VEC_INTERLEAVE_LOW_EXPR, VEC_LSHIFT_EXPR, VEC_RSHIFT_EXPR,
        VEC_UNPACK_HI_EXPR, VEC_UNPACK_LO_EXPR, VEC_UNPACK_FLOAT_HI_EXPR,
        VEC_UNPACK_FLOAT_LO_EXPR, VEC_WIDEN_MULT_HI_EXPR,
        VEC_WIDEN_MULT_LO_EXPR, VEC_PACK_TRUNC_EXPR, VEC_PACK_SAT_EXPR,
        VEC_PACK_FIX_TRUNC_EXPR.
	(<case PAREN_EXPR>): Call set_mem_attributes() with type, not the
	full expression.

Index: gcc/expr.c
===================================================================
--- gcc.orig/expr.c	2009-08-25 08:33:17.000000000 +0200
+++ gcc/expr.c	2009-08-26 06:21:33.000000000 +0200
@@ -7204,23 +7204,22 @@ expand_expr_real (tree exp, rtx target,
 }
 
 static rtx
-expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
-		    enum expand_modifier modifier, rtx *alt_rtl)
+expand_expr_real_2 (sepops ops, rtx target, enum machine_mode tmode,
+		    enum expand_modifier modifier)
 {
-  rtx op0, op1, op2, temp, decl_rtl;
+  rtx op0, op1, op2, temp;
   tree type;
   int unsignedp;
   enum machine_mode mode;
-  enum tree_code code = TREE_CODE (exp);
+  enum tree_code code = ops->code;
   optab this_optab;
   rtx subtarget, original_target;
   int ignore;
-  tree context, subexp0, subexp1;
+  tree subexp0, subexp1;
   bool reduce_bit_field;
   gimple subexp0_def, subexp1_def;
   tree top0, top1;
-  location_t loc = EXPR_LOCATION (exp);
-  struct separate_ops ops;
+  location_t loc = ops->location;
   tree treeop0, treeop1, treeop2;
 #define REDUCE_BIT_FIELD(expr)	(reduce_bit_field			  \
 				 ? reduce_to_bit_field_precision ((expr), \
@@ -7228,87 +7227,34 @@ expand_expr_real_1 (tree exp, rtx target
 								  type)	  \
 				 : (expr))
 
-  type = TREE_TYPE (exp);
+  type = ops->type;
   mode = TYPE_MODE (type);
   unsignedp = TYPE_UNSIGNED (type);
 
-  treeop0 = treeop1 = treeop2 = NULL_TREE;
-  if (!VL_EXP_CLASS_P (exp))
-    switch (TREE_CODE_LENGTH (code))
-      {
-	default:
-	case 3: treeop2 = TREE_OPERAND (exp, 2);
-	case 2: treeop1 = TREE_OPERAND (exp, 1);
-	case 1: treeop0 = TREE_OPERAND (exp, 0);
-	case 0: break;
-      }
-  ops.code = code;
-  ops.type = type;
-  ops.op0 = treeop0;
-  ops.op1 = treeop1;
-  ops.op2 = treeop2;
-  ops.location = loc;
+  treeop0 = ops->op0;
+  treeop1 = ops->op1;
+  treeop2 = ops->op2;
+
+  /* We should be called only on simple (binary or unary) expressions,
+     exactly those that are valid in gimple expressions that aren't
+     GIMPLE_SINGLE_RHS (or invalid).  */
+  gcc_assert (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS
+	      || get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS);
 
   ignore = (target == const0_rtx
 	    || ((CONVERT_EXPR_CODE_P (code)
 		 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
 		&& TREE_CODE (type) == VOID_TYPE));
 
+  /* We should be called only if we need the result.  */
+  gcc_assert (!ignore);
+
   /* An operation in what may be a bit-field type needs the
      result to be reduced to the precision of the bit-field type,
      which is narrower than that of the type's mode.  */
-  reduce_bit_field = (!ignore
-		      && TREE_CODE (type) == INTEGER_TYPE
+  reduce_bit_field = (TREE_CODE (type) == INTEGER_TYPE
 		      && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type));
 
-  /* If we are going to ignore this result, we need only do something
-     if there is a side-effect somewhere in the expression.  If there
-     is, short-circuit the most common cases here.  Note that we must
-     not call expand_expr with anything but const0_rtx in case this
-     is an initial expansion of a size that contains a PLACEHOLDER_EXPR.  */
-
-  if (ignore)
-    {
-      if (! TREE_SIDE_EFFECTS (exp))
-	return const0_rtx;
-
-      /* Ensure we reference a volatile object even if value is ignored, but
-	 don't do this if all we are doing is taking its address.  */
-      if (TREE_THIS_VOLATILE (exp)
-	  && TREE_CODE (exp) != FUNCTION_DECL
-	  && mode != VOIDmode && mode != BLKmode
-	  && modifier != EXPAND_CONST_ADDRESS)
-	{
-	  temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
-	  if (MEM_P (temp))
-	    temp = copy_to_reg (temp);
-	  return const0_rtx;
-	}
-
-      if (TREE_CODE_CLASS (code) == tcc_unary
-	  || code == COMPONENT_REF || code == INDIRECT_REF)
-	return expand_expr (treeop0, const0_rtx, VOIDmode,
-			    modifier);
-
-      else if (TREE_CODE_CLASS (code) == tcc_binary
-	       || TREE_CODE_CLASS (code) == tcc_comparison
-	       || code == ARRAY_REF || code == ARRAY_RANGE_REF)
-	{
-	  expand_expr (treeop0, const0_rtx, VOIDmode, modifier);
-	  expand_expr (treeop1, const0_rtx, VOIDmode, modifier);
-	  return const0_rtx;
-	}
-      else if (code == BIT_FIELD_REF)
-	{
-	  expand_expr (treeop0, const0_rtx, VOIDmode, modifier);
-	  expand_expr (treeop1, const0_rtx, VOIDmode, modifier);
-	  expand_expr (treeop2, const0_rtx, VOIDmode, modifier);
-	  return const0_rtx;
-	}
-
-      target = 0;
-    }
-
   if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
     target = 0;
 
@@ -7318,1833 +7264,2109 @@ expand_expr_real_1 (tree exp, rtx target
 
   switch (code)
     {
-    case LABEL_DECL:
-      {
-	tree function = decl_function_context (exp);
-
-	temp = label_rtx (exp);
-	temp = gen_rtx_LABEL_REF (Pmode, temp);
+    case PAREN_EXPR:
+    CASE_CONVERT:
+      if (treeop0 == error_mark_node)
+	return const0_rtx;
 
-	if (function != current_function_decl
-	    && function != 0)
-	  LABEL_REF_NONLOCAL_P (temp) = 1;
+      if (TREE_CODE (type) == UNION_TYPE)
+	{
+	  tree valtype = TREE_TYPE (treeop0);
 
-	temp = gen_rtx_MEM (FUNCTION_MODE, temp);
-	return temp;
-      }
+	  /* If both input and output are BLKmode, this conversion isn't doing
+	     anything except possibly changing memory attribute.  */
+	  if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
+	    {
+	      rtx result = expand_expr (treeop0, target, tmode,
+					modifier);
 
-    case SSA_NAME:
-      /* ??? ivopts calls expander, without any preparation from
-         out-of-ssa.  So fake instructions as if this was an access to the
-	 base variable.  This unnecessarily allocates a pseudo, see how we can
-	 reuse it, if partition base vars have it set already.  */
-      if (!currently_expanding_to_rtl)
-	return expand_expr_real_1 (SSA_NAME_VAR (exp), target, tmode, modifier, NULL);
-      {
-	gimple g = get_gimple_for_ssa_name (exp);
-	if (g)
-	  return expand_expr_real_1 (gimple_assign_rhs_to_tree (g), target,
-				     tmode, modifier, NULL);
-      }
-      decl_rtl = get_rtx_for_ssa_name (exp);
-      exp = SSA_NAME_VAR (exp);
-      goto expand_decl_rtl;
+	      result = copy_rtx (result);
+	      set_mem_attributes (result, type, 0);
+	      return result;
+	    }
 
-    case PARM_DECL:
-    case VAR_DECL:
-      /* If a static var's type was incomplete when the decl was written,
-	 but the type is complete now, lay out the decl now.  */
-      if (DECL_SIZE (exp) == 0
-	  && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
-	  && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
-	layout_decl (exp, 0);
+	  if (target == 0)
+	    {
+	      if (TYPE_MODE (type) != BLKmode)
+		target = gen_reg_rtx (TYPE_MODE (type));
+	      else
+		target = assign_temp (type, 0, 1, 1);
+	    }
 
-      /* TLS emulation hook - replace __thread vars with
-	 *__emutls_get_address (&_emutls.var).  */
-      if (! targetm.have_tls
-	  && TREE_CODE (exp) == VAR_DECL
-	  && DECL_THREAD_LOCAL_P (exp))
-	{
-	  exp = build_fold_indirect_ref_loc (loc, emutls_var_address (exp));
-	  return expand_expr_real_1 (exp, target, tmode, modifier, NULL);
-	}
+	  if (MEM_P (target))
+	    /* Store data into beginning of memory target.  */
+	    store_expr (treeop0,
+			adjust_address (target, TYPE_MODE (valtype), 0),
+			modifier == EXPAND_STACK_PARM,
+			false);
 
-      /* ... fall through ...  */
+	  else
+	    {
+	      gcc_assert (REG_P (target));
 
-    case FUNCTION_DECL:
-    case RESULT_DECL:
-      decl_rtl = DECL_RTL (exp);
-    expand_decl_rtl:
-      gcc_assert (decl_rtl);
-      decl_rtl = copy_rtx (decl_rtl);
+	      /* Store this field into a union of the proper type.  */
+	      store_field (target,
+			   MIN ((int_size_in_bytes (TREE_TYPE
+						    (treeop0))
+				 * BITS_PER_UNIT),
+				(HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
+			   0, TYPE_MODE (valtype), treeop0,
+			   type, 0, false);
+	    }
 
-      /* Ensure variable marked as used even if it doesn't go through
-	 a parser.  If it hasn't be used yet, write out an external
-	 definition.  */
-      if (! TREE_USED (exp))
-	{
-	  assemble_external (exp);
-	  TREE_USED (exp) = 1;
+	  /* Return the entire union.  */
+	  return target;
 	}
 
-      /* Show we haven't gotten RTL for this yet.  */
-      temp = 0;
-
-      /* Variables inherited from containing functions should have
-	 been lowered by this point.  */
-      context = decl_function_context (exp);
-      gcc_assert (!context
-		  || context == current_function_decl
-		  || TREE_STATIC (exp)
-		  /* ??? C++ creates functions that are not TREE_STATIC.  */
-		  || TREE_CODE (exp) == FUNCTION_DECL);
-
-      /* This is the case of an array whose size is to be determined
-	 from its initializer, while the initializer is still being parsed.
-	 See expand_decl.  */
-
-      if (MEM_P (decl_rtl) && REG_P (XEXP (decl_rtl, 0)))
-	temp = validize_mem (decl_rtl);
+      if (mode == TYPE_MODE (TREE_TYPE (treeop0)))
+	{
+	  op0 = expand_expr (treeop0, target, VOIDmode,
+			     modifier);
 
-      /* If DECL_RTL is memory, we are in the normal case and the
-	 address is not valid, get the address into a register.  */
+	  /* If the signedness of the conversion differs and OP0 is
+	     a promoted SUBREG, clear that indication since we now
+	     have to do the proper extension.  */
+	  if (TYPE_UNSIGNED (TREE_TYPE (treeop0)) != unsignedp
+	      && GET_CODE (op0) == SUBREG)
+	    SUBREG_PROMOTED_VAR_P (op0) = 0;
 
-      else if (MEM_P (decl_rtl) && modifier != EXPAND_INITIALIZER)
-	{
-	  if (alt_rtl)
-	    *alt_rtl = decl_rtl;
-	  decl_rtl = use_anchored_address (decl_rtl);
-	  if (modifier != EXPAND_CONST_ADDRESS
-	      && modifier != EXPAND_SUM
-	      && !memory_address_p (DECL_MODE (exp), XEXP (decl_rtl, 0)))
-	    temp = replace_equiv_address (decl_rtl,
-					  copy_rtx (XEXP (decl_rtl, 0)));
+	  return REDUCE_BIT_FIELD (op0);
 	}
 
-      /* If we got something, return it.  But first, set the alignment
-	 if the address is a register.  */
-      if (temp != 0)
+      op0 = expand_expr (treeop0, NULL_RTX, mode,
+			 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier);
+      if (GET_MODE (op0) == mode)
+	;
+
+      /* If OP0 is a constant, just convert it into the proper mode.  */
+      else if (CONSTANT_P (op0))
 	{
-	  if (MEM_P (temp) && REG_P (XEXP (temp, 0)))
-	    mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
+	  tree inner_type = TREE_TYPE (treeop0);
+	  enum machine_mode inner_mode = TYPE_MODE (inner_type);
 
-	  return temp;
+	  if (modifier == EXPAND_INITIALIZER)
+	    op0 = simplify_gen_subreg (mode, op0, inner_mode,
+				       subreg_lowpart_offset (mode,
+							      inner_mode));
+	  else
+	    op0=  convert_modes (mode, inner_mode, op0,
+				 TYPE_UNSIGNED (inner_type));
 	}
 
-      /* If the mode of DECL_RTL does not match that of the decl, it
-	 must be a promoted value.  We return a SUBREG of the wanted mode,
-	 but mark it so that we know that it was already extended.  */
+      else if (modifier == EXPAND_INITIALIZER)
+	op0 = gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
 
-      if (REG_P (decl_rtl)
-	  && GET_MODE (decl_rtl) != DECL_MODE (exp))
+      else if (target == 0)
+	op0 = convert_to_mode (mode, op0,
+			       TYPE_UNSIGNED (TREE_TYPE
+					      (treeop0)));
+      else
 	{
-	  enum machine_mode pmode;
+	  convert_move (target, op0,
+			TYPE_UNSIGNED (TREE_TYPE (treeop0)));
+	  op0 = target;
+	}
 
-	  /* Get the signedness used for this variable.  Ensure we get the
-	     same mode we got when the variable was declared.  */
-	  pmode = promote_decl_mode (exp, &unsignedp);
-	  gcc_assert (GET_MODE (decl_rtl) == pmode);
-
-	  temp = gen_lowpart_SUBREG (mode, decl_rtl);
-	  SUBREG_PROMOTED_VAR_P (temp) = 1;
-	  SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
-	  return temp;
-	}
-
-      return decl_rtl;
+      return REDUCE_BIT_FIELD (op0);
 
-    case INTEGER_CST:
-      temp = immed_double_const (TREE_INT_CST_LOW (exp),
-				 TREE_INT_CST_HIGH (exp), mode);
+    case POINTER_PLUS_EXPR: 
+      /* Even though the sizetype mode and the pointer's mode can be different
+         expand is able to handle this correctly and get the correct result out 
+         of the PLUS_EXPR code.  */
+      /* Make sure to sign-extend the sizetype offset in a POINTER_PLUS_EXPR
+         if sizetype precision is smaller than pointer precision.  */
+      if (TYPE_PRECISION (sizetype) < TYPE_PRECISION (type))
+	treeop1 = fold_convert_loc (loc, type,
+				    fold_convert_loc (loc, ssizetype,
+						      treeop1));
+    case PLUS_EXPR:
 
-      return temp;
+      /* Check if this is a case for multiplication and addition.  */
+      if ((TREE_CODE (type) == INTEGER_TYPE
+	   || TREE_CODE (type) == FIXED_POINT_TYPE)
+	  && (subexp0_def = get_def_for_expr (treeop0,
+					      MULT_EXPR)))
+	{
+	  tree subsubexp0, subsubexp1;
+	  gimple subsubexp0_def, subsubexp1_def;
+	  enum tree_code this_code;
 
-    case VECTOR_CST:
-      {
-	tree tmp = NULL_TREE;
-	if (GET_MODE_CLASS (mode) == MODE_VECTOR_INT
-	    || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT
-	    || GET_MODE_CLASS (mode) == MODE_VECTOR_FRACT
-	    || GET_MODE_CLASS (mode) == MODE_VECTOR_UFRACT
-	    || GET_MODE_CLASS (mode) == MODE_VECTOR_ACCUM
-	    || GET_MODE_CLASS (mode) == MODE_VECTOR_UACCUM)
-	  return const_vector_from_tree (exp);
-	if (GET_MODE_CLASS (mode) == MODE_INT)
-	  {
-	    tree type_for_mode = lang_hooks.types.type_for_mode (mode, 1);
-	    if (type_for_mode)
-	      tmp = fold_unary_loc (loc, VIEW_CONVERT_EXPR, type_for_mode, exp);
-	  }
-	if (!tmp)
-	  tmp = build_constructor_from_list (type,
-					     TREE_VECTOR_CST_ELTS (exp));
-	return expand_expr (tmp, ignore ? const0_rtx : target,
-			    tmode, modifier);
-      }
+	  this_code = TREE_CODE (type) == INTEGER_TYPE ? NOP_EXPR
+						       : FIXED_CONVERT_EXPR;
+	  subsubexp0 = gimple_assign_rhs1 (subexp0_def);
+	  subsubexp0_def = get_def_for_expr (subsubexp0, this_code);
+	  subsubexp1 = gimple_assign_rhs2 (subexp0_def);
+	  subsubexp1_def = get_def_for_expr (subsubexp1, this_code);
+	  if (subsubexp0_def && subsubexp1_def
+	      && (top0 = gimple_assign_rhs1 (subsubexp0_def))
+	      && (top1 = gimple_assign_rhs1 (subsubexp1_def))
+	      && (TYPE_PRECISION (TREE_TYPE (top0))
+		  < TYPE_PRECISION (TREE_TYPE (subsubexp0)))
+	      && (TYPE_PRECISION (TREE_TYPE (top0))
+		  == TYPE_PRECISION (TREE_TYPE (top1)))
+	      && (TYPE_UNSIGNED (TREE_TYPE (top0))
+		  == TYPE_UNSIGNED (TREE_TYPE (top1))))
+	    {
+	      tree op0type = TREE_TYPE (top0);
+	      enum machine_mode innermode = TYPE_MODE (op0type);
+	      bool zextend_p = TYPE_UNSIGNED (op0type);
+	      bool sat_p = TYPE_SATURATING (TREE_TYPE (subsubexp0));
+	      if (sat_p == 0)
+		this_optab = zextend_p ? umadd_widen_optab : smadd_widen_optab;
+	      else
+		this_optab = zextend_p ? usmadd_widen_optab
+				       : ssmadd_widen_optab;
+	      if (mode == GET_MODE_2XWIDER_MODE (innermode)
+		  && (optab_handler (this_optab, mode)->insn_code
+		      != CODE_FOR_nothing))
+		{
+		  expand_operands (top0, top1, NULL_RTX, &op0, &op1,
+				   EXPAND_NORMAL);
+		  op2 = expand_expr (treeop1, subtarget,
+				     VOIDmode, EXPAND_NORMAL);
+		  temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
+					    target, unsignedp);
+		  gcc_assert (temp);
+		  return REDUCE_BIT_FIELD (temp);
+		}
+	    }
+	}
 
-    case CONST_DECL:
-      return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
+      /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
+	 something else, make sure we add the register to the constant and
+	 then to the other thing.  This case can occur during strength
+	 reduction and doing it this way will produce better code if the
+	 frame pointer or argument pointer is eliminated.
 
-    case REAL_CST:
-      /* If optimized, generate immediate CONST_DOUBLE
-	 which will be turned into memory by reload if necessary.
+	 fold-const.c will ensure that the constant is always in the inner
+	 PLUS_EXPR, so the only case we need to do anything about is if
+	 sp, ap, or fp is our second argument, in which case we must swap
+	 the innermost first argument and our second argument.  */
 
-	 We used to force a register so that loop.c could see it.  But
-	 this does not allow gen_* patterns to perform optimizations with
-	 the constants.  It also produces two insns in cases like "x = 1.0;".
-	 On most machines, floating-point constants are not permitted in
-	 many insns, so we'd end up copying it to a register in any case.
+      if (TREE_CODE (treeop0) == PLUS_EXPR
+	  && TREE_CODE (TREE_OPERAND (treeop0, 1)) == INTEGER_CST
+	  && TREE_CODE (treeop1) == VAR_DECL
+	  && (DECL_RTL (treeop1) == frame_pointer_rtx
+	      || DECL_RTL (treeop1) == stack_pointer_rtx
+	      || DECL_RTL (treeop1) == arg_pointer_rtx))
+	{
+	  tree t = treeop1;
 
-	 Now, we do the copying in expand_binop, if appropriate.  */
-      return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
-					   TYPE_MODE (TREE_TYPE (exp)));
+	  treeop1 = TREE_OPERAND (treeop0, 0);
+	  TREE_OPERAND (treeop0, 0) = t;
+	}
 
-    case FIXED_CST:
-      return CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (exp),
-					   TYPE_MODE (TREE_TYPE (exp)));
+      /* If the result is to be ptr_mode and we are adding an integer to
+	 something, we might be forming a constant.  So try to use
+	 plus_constant.  If it produces a sum and we can't accept it,
+	 use force_operand.  This allows P = &ARR[const] to generate
+	 efficient code on machines where a SYMBOL_REF is not a valid
+	 address.
 
-    case COMPLEX_CST:
-      /* Handle evaluating a complex constant in a CONCAT target.  */
-      if (original_target && GET_CODE (original_target) == CONCAT)
+	 If this is an EXPAND_SUM call, always return the sum.  */
+      if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
+	  || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
 	{
-	  enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
-	  rtx rtarg, itarg;
-
-	  rtarg = XEXP (original_target, 0);
-	  itarg = XEXP (original_target, 1);
+	  if (modifier == EXPAND_STACK_PARM)
+	    target = 0;
+	  if (TREE_CODE (treeop0) == INTEGER_CST
+	      && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
+	      && TREE_CONSTANT (treeop1))
+	    {
+	      rtx constant_part;
 
-	  /* Move the real and imaginary parts separately.  */
-	  op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, EXPAND_NORMAL);
-	  op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, EXPAND_NORMAL);
+	      op1 = expand_expr (treeop1, subtarget, VOIDmode,
+				 EXPAND_SUM);
+	      /* Use immed_double_const to ensure that the constant is
+		 truncated according to the mode of OP1, then sign extended
+		 to a HOST_WIDE_INT.  Using the constant directly can result
+		 in non-canonical RTL in a 64x32 cross compile.  */
+	      constant_part
+		= immed_double_const (TREE_INT_CST_LOW (treeop0),
+				      (HOST_WIDE_INT) 0,
+				      TYPE_MODE (TREE_TYPE (treeop1)));
+	      op1 = plus_constant (op1, INTVAL (constant_part));
+	      if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
+		op1 = force_operand (op1, target);
+	      return REDUCE_BIT_FIELD (op1);
+	    }
 
-	  if (op0 != rtarg)
-	    emit_move_insn (rtarg, op0);
-	  if (op1 != itarg)
-	    emit_move_insn (itarg, op1);
+	  else if (TREE_CODE (treeop1) == INTEGER_CST
+		   && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
+		   && TREE_CONSTANT (treeop0))
+	    {
+	      rtx constant_part;
 
-	  return original_target;
+	      op0 = expand_expr (treeop0, subtarget, VOIDmode,
+				 (modifier == EXPAND_INITIALIZER
+				 ? EXPAND_INITIALIZER : EXPAND_SUM));
+	      if (! CONSTANT_P (op0))
+		{
+		  op1 = expand_expr (treeop1, NULL_RTX,
+				     VOIDmode, modifier);
+		  /* Return a PLUS if modifier says it's OK.  */
+		  if (modifier == EXPAND_SUM
+		      || modifier == EXPAND_INITIALIZER)
+		    return simplify_gen_binary (PLUS, mode, op0, op1);
+		  goto binop2;
+		}
+	      /* Use immed_double_const to ensure that the constant is
+		 truncated according to the mode of OP1, then sign extended
+		 to a HOST_WIDE_INT.  Using the constant directly can result
+		 in non-canonical RTL in a 64x32 cross compile.  */
+	      constant_part
+		= immed_double_const (TREE_INT_CST_LOW (treeop1),
+				      (HOST_WIDE_INT) 0,
+				      TYPE_MODE (TREE_TYPE (treeop0)));
+	      op0 = plus_constant (op0, INTVAL (constant_part));
+	      if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
+		op0 = force_operand (op0, target);
+	      return REDUCE_BIT_FIELD (op0);
+	    }
 	}
 
-      /* ... fall through ...  */
+      /* No sense saving up arithmetic to be done
+	 if it's all in the wrong mode to form part of an address.
+	 And force_operand won't know whether to sign-extend or
+	 zero-extend.  */
+      if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
+	  || mode != ptr_mode)
+	{
+	  expand_operands (treeop0, treeop1,
+			   subtarget, &op0, &op1, EXPAND_NORMAL);
+	  if (op0 == const0_rtx)
+	    return op1;
+	  if (op1 == const0_rtx)
+	    return op0;
+	  goto binop2;
+	}
 
-    case STRING_CST:
-      temp = expand_expr_constant (exp, 1, modifier);
+      expand_operands (treeop0, treeop1,
+		       subtarget, &op0, &op1, modifier);
+      return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
 
-      /* temp contains a constant address.
-	 On RISC machines where a constant address isn't valid,
-	 make some insns to get that address into a register.  */
-      if (modifier != EXPAND_CONST_ADDRESS
-	  && modifier != EXPAND_INITIALIZER
-	  && modifier != EXPAND_SUM
-	  && ! memory_address_p (mode, XEXP (temp, 0)))
-	return replace_equiv_address (temp,
-				      copy_rtx (XEXP (temp, 0)));
-      return temp;
+    case MINUS_EXPR:
+      /* Check if this is a case for multiplication and subtraction.  */
+      if ((TREE_CODE (type) == INTEGER_TYPE
+	   || TREE_CODE (type) == FIXED_POINT_TYPE)
+	  && (subexp1_def = get_def_for_expr (treeop1,
+					      MULT_EXPR)))
+	{
+	  tree subsubexp0, subsubexp1;
+	  gimple subsubexp0_def, subsubexp1_def;
+	  enum tree_code this_code;
 
-    case SAVE_EXPR:
-      {
-	tree val = treeop0;
-	rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl);
-
-	if (!SAVE_EXPR_RESOLVED_P (exp))
-	  {
-	    /* We can indeed still hit this case, typically via builtin
-	       expanders calling save_expr immediately before expanding
-	       something.  Assume this means that we only have to deal
-	       with non-BLKmode values.  */
-	    gcc_assert (GET_MODE (ret) != BLKmode);
+	  this_code = TREE_CODE (type) == INTEGER_TYPE ? NOP_EXPR
+						       : FIXED_CONVERT_EXPR;
+	  subsubexp0 = gimple_assign_rhs1 (subexp1_def);
+	  subsubexp0_def = get_def_for_expr (subsubexp0, this_code);
+	  subsubexp1 = gimple_assign_rhs2 (subexp1_def);
+	  subsubexp1_def = get_def_for_expr (subsubexp1, this_code);
+	  if (subsubexp0_def && subsubexp1_def
+	      && (top0 = gimple_assign_rhs1 (subsubexp0_def))
+	      && (top1 = gimple_assign_rhs1 (subsubexp1_def))
+	      && (TYPE_PRECISION (TREE_TYPE (top0))
+		  < TYPE_PRECISION (TREE_TYPE (subsubexp0)))
+	      && (TYPE_PRECISION (TREE_TYPE (top0))
+		  == TYPE_PRECISION (TREE_TYPE (top1)))
+	      && (TYPE_UNSIGNED (TREE_TYPE (top0))
+		  == TYPE_UNSIGNED (TREE_TYPE (top1))))
+	    {
+	      tree op0type = TREE_TYPE (top0);
+	      enum machine_mode innermode = TYPE_MODE (op0type);
+	      bool zextend_p = TYPE_UNSIGNED (op0type);
+	      bool sat_p = TYPE_SATURATING (TREE_TYPE (subsubexp0));
+	      if (sat_p == 0)
+		this_optab = zextend_p ? umsub_widen_optab : smsub_widen_optab;
+	      else
+		this_optab = zextend_p ? usmsub_widen_optab
+				       : ssmsub_widen_optab;
+	      if (mode == GET_MODE_2XWIDER_MODE (innermode)
+		  && (optab_handler (this_optab, mode)->insn_code
+		      != CODE_FOR_nothing))
+		{
+		  expand_operands (top0, top1, NULL_RTX, &op0, &op1,
+				   EXPAND_NORMAL);
+		  op2 = expand_expr (treeop0, subtarget,
+				     VOIDmode, EXPAND_NORMAL);
+		  temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
+					    target, unsignedp);
+		  gcc_assert (temp);
+		  return REDUCE_BIT_FIELD (temp);
+		}
+	    }
+	}
 
-	    val = build_decl (EXPR_LOCATION (exp),
-			      VAR_DECL, NULL, TREE_TYPE (exp));
-	    DECL_ARTIFICIAL (val) = 1;
-	    DECL_IGNORED_P (val) = 1;
-	    treeop0 = val;
-	    TREE_OPERAND (exp, 0) = treeop0;
-	    SAVE_EXPR_RESOLVED_P (exp) = 1;
+      /* For initializers, we are allowed to return a MINUS of two
+	 symbolic constants.  Here we handle all cases when both operands
+	 are constant.  */
+      /* Handle difference of two symbolic constants,
+	 for the sake of an initializer.  */
+      if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
+	  && really_constant_p (treeop0)
+	  && really_constant_p (treeop1))
+	{
+	  expand_operands (treeop0, treeop1,
+			   NULL_RTX, &op0, &op1, modifier);
 
-	    if (!CONSTANT_P (ret))
-	      ret = copy_to_reg (ret);
-	    SET_DECL_RTL (val, ret);
-	  }
+	  /* If the last operand is a CONST_INT, use plus_constant of
+	     the negated constant.  Else make the MINUS.  */
+	  if (CONST_INT_P (op1))
+	    return REDUCE_BIT_FIELD (plus_constant (op0, - INTVAL (op1)));
+	  else
+	    return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
+	}
 
-        return ret;
-      }
+      /* No sense saving up arithmetic to be done
+	 if it's all in the wrong mode to form part of an address.
+	 And force_operand won't know whether to sign-extend or
+	 zero-extend.  */
+      if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
+	  || mode != ptr_mode)
+	goto binop;
 
-    case GOTO_EXPR:
-      if (TREE_CODE (treeop0) == LABEL_DECL)
-	expand_goto (treeop0);
-      else
-	expand_computed_goto (treeop0);
-      return const0_rtx;
+      expand_operands (treeop0, treeop1,
+		       subtarget, &op0, &op1, modifier);
 
-    case CONSTRUCTOR:
-      /* If we don't need the result, just ensure we evaluate any
-	 subexpressions.  */
-      if (ignore)
+      /* Convert A - const to A + (-const).  */
+      if (CONST_INT_P (op1))
 	{
-	  unsigned HOST_WIDE_INT idx;
-	  tree value;
-
-	  FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
-	    expand_expr (value, const0_rtx, VOIDmode, EXPAND_NORMAL);
-
-	  return const0_rtx;
+	  op1 = negate_rtx (mode, op1);
+	  return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
 	}
 
-      return expand_constructor (exp, target, modifier, false);
-
-    case MISALIGNED_INDIRECT_REF:
-    case ALIGN_INDIRECT_REF:
-    case INDIRECT_REF:
-      {
-	tree exp1 = treeop0;
+      goto binop2;
 
-	if (modifier != EXPAND_WRITE)
-	  {
-	    tree t;
+    case MULT_EXPR:
+      /* If this is a fixed-point operation, then we cannot use the code
+	 below because "expand_mult" doesn't support sat/no-sat fixed-point
+         multiplications.   */
+      if (ALL_FIXED_POINT_MODE_P (mode))
+	goto binop;
 
-	    t = fold_read_from_constant_string (exp);
-	    if (t)
-	      return expand_expr (t, target, tmode, modifier);
-	  }
+      /* If first operand is constant, swap them.
+	 Thus the following special case checks need only
+	 check the second operand.  */
+      if (TREE_CODE (treeop0) == INTEGER_CST)
+	{
+	  tree t1 = treeop0;
+	  treeop0 = treeop1;
+	  treeop1 = t1;
+	}
 
-	op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
-	op0 = memory_address (mode, op0);
+      /* Attempt to return something suitable for generating an
+	 indexed address, for machines that support that.  */
 
-	if (code == ALIGN_INDIRECT_REF)
-	  {
-	    int align = TYPE_ALIGN_UNIT (type);
-	    op0 = gen_rtx_AND (Pmode, op0, GEN_INT (-align));
-	    op0 = memory_address (mode, op0);
-	  }
+      if (modifier == EXPAND_SUM && mode == ptr_mode
+	  && host_integerp (treeop1, 0))
+	{
+	  tree exp1 = treeop1;
 
-	temp = gen_rtx_MEM (mode, op0);
+	  op0 = expand_expr (treeop0, subtarget, VOIDmode,
+			     EXPAND_SUM);
 
-	set_mem_attributes (temp, exp, 0);
+	  if (!REG_P (op0))
+	    op0 = force_operand (op0, NULL_RTX);
+	  if (!REG_P (op0))
+	    op0 = copy_to_mode_reg (mode, op0);
 
-	/* Resolve the misalignment now, so that we don't have to remember
-	   to resolve it later.  Of course, this only works for reads.  */
-	if (code == MISALIGNED_INDIRECT_REF)
-	  {
-	    int icode;
-	    rtx reg, insn;
+	  return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
+			       gen_int_mode (tree_low_cst (exp1, 0),
+					     TYPE_MODE (TREE_TYPE (exp1)))));
+	}
 
-	    gcc_assert (modifier == EXPAND_NORMAL
-			|| modifier == EXPAND_STACK_PARM);
+      if (modifier == EXPAND_STACK_PARM)
+	target = 0;
 
-	    /* The vectorizer should have already checked the mode.  */
-	    icode = optab_handler (movmisalign_optab, mode)->insn_code;
-	    gcc_assert (icode != CODE_FOR_nothing);
+      /* Check for multiplying things that have been extended
+	 from a narrower type.  If this machine supports multiplying
+	 in that narrower type with a result in the desired type,
+	 do it that way, and avoid the explicit type-conversion.  */
 
-	    /* We've already validated the memory, and we're creating a
-	       new pseudo destination.  The predicates really can't fail.  */
-	    reg = gen_reg_rtx (mode);
+      subexp0 = treeop0;
+      subexp1 = treeop1;
+      subexp0_def = get_def_for_expr (subexp0, NOP_EXPR);
+      subexp1_def = get_def_for_expr (subexp1, NOP_EXPR);
+      top0 = top1 = NULL_TREE;
 
-	    /* Nor can the insn generator.  */
-	    insn = GEN_FCN (icode) (reg, temp);
-	    emit_insn (insn);
+      /* First, check if we have a multiplication of one signed and one
+	 unsigned operand.  */
+      if (subexp0_def
+	  && (top0 = gimple_assign_rhs1 (subexp0_def))
+	  && subexp1_def
+	  && (top1 = gimple_assign_rhs1 (subexp1_def))
+	  && TREE_CODE (type) == INTEGER_TYPE
+	  && (TYPE_PRECISION (TREE_TYPE (top0))
+	      < TYPE_PRECISION (TREE_TYPE (subexp0)))
+	  && (TYPE_PRECISION (TREE_TYPE (top0))
+	      == TYPE_PRECISION (TREE_TYPE (top1)))
+	  && (TYPE_UNSIGNED (TREE_TYPE (top0))
+	      != TYPE_UNSIGNED (TREE_TYPE (top1))))
+	{
+	  enum machine_mode innermode
+	    = TYPE_MODE (TREE_TYPE (top0));
+	  this_optab = usmul_widen_optab;
+	  if (mode == GET_MODE_WIDER_MODE (innermode))
+	    {
+	      if (optab_handler (this_optab, mode)->insn_code != CODE_FOR_nothing)
+		{
+		  if (TYPE_UNSIGNED (TREE_TYPE (top0)))
+		    expand_operands (top0, top1, NULL_RTX, &op0, &op1,
+				     EXPAND_NORMAL);
+		  else
+		    expand_operands (top0, top1, NULL_RTX, &op1, &op0,
+				     EXPAND_NORMAL);
 
-	    return reg;
-	  }
+		  goto binop3;
+		}
+	    }
+	}
+      /* Check for a multiplication with matching signedness.  If
+	 valid, TOP0 and TOP1 were set in the previous if
+	 condition.  */
+      else if (top0
+	  && TREE_CODE (type) == INTEGER_TYPE
+	  && (TYPE_PRECISION (TREE_TYPE (top0))
+	      < TYPE_PRECISION (TREE_TYPE (subexp0)))
+	  && ((TREE_CODE (subexp1) == INTEGER_CST
+	       && int_fits_type_p (subexp1, TREE_TYPE (top0))
+	       /* Don't use a widening multiply if a shift will do.  */
+	       && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (subexp1)))
+		    > HOST_BITS_PER_WIDE_INT)
+		   || exact_log2 (TREE_INT_CST_LOW (subexp1)) < 0))
+	      ||
+	      (top1
+	       && (TYPE_PRECISION (TREE_TYPE (top1))
+		   == TYPE_PRECISION (TREE_TYPE (top0))
+	       /* If both operands are extended, they must either both
+		  be zero-extended or both be sign-extended.  */
+	       && (TYPE_UNSIGNED (TREE_TYPE (top1))
+		   == TYPE_UNSIGNED (TREE_TYPE (top0)))))))
+	{
+	  tree op0type = TREE_TYPE (top0);
+	  enum machine_mode innermode = TYPE_MODE (op0type);
+	  bool zextend_p = TYPE_UNSIGNED (op0type);
+	  optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
+	  this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
 
-	return temp;
-      }
+	  if (mode == GET_MODE_2XWIDER_MODE (innermode))
+	    {
+	      if (optab_handler (this_optab, mode)->insn_code != CODE_FOR_nothing)
+		{
+		  if (TREE_CODE (subexp1) == INTEGER_CST)
+		    expand_operands (top0, subexp1, NULL_RTX, &op0, &op1,
+				     EXPAND_NORMAL);
+		  else
+		    expand_operands (top0, top1, NULL_RTX, &op0, &op1,
+				     EXPAND_NORMAL);
+		  goto binop3;
+		}
+	      else if (optab_handler (other_optab, mode)->insn_code != CODE_FOR_nothing
+		       && innermode == word_mode)
+		{
+		  rtx htem, hipart;
+		  op0 = expand_normal (top0);
+		  if (TREE_CODE (subexp1) == INTEGER_CST)
+		    op1 = convert_modes (innermode, mode,
+					 expand_normal (subexp1), unsignedp);
+		  else
+		    op1 = expand_normal (top1);
+		  temp = expand_binop (mode, other_optab, op0, op1, target,
+				       unsignedp, OPTAB_LIB_WIDEN);
+		  hipart = gen_highpart (innermode, temp);
+		  htem = expand_mult_highpart_adjust (innermode, hipart,
+						      op0, op1, hipart,
+						      zextend_p);
+		  if (htem != hipart)
+		    emit_move_insn (hipart, htem);
+		  return REDUCE_BIT_FIELD (temp);
+		}
+	    }
+	}
+      expand_operands (subexp0, subexp1, subtarget, &op0, &op1, EXPAND_NORMAL);
+      return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
 
-    case TARGET_MEM_REF:
-      {
-	struct mem_address addr;
+    case TRUNC_DIV_EXPR:
+    case FLOOR_DIV_EXPR:
+    case CEIL_DIV_EXPR:
+    case ROUND_DIV_EXPR:
+    case EXACT_DIV_EXPR:
+      /* If this is a fixed-point operation, then we cannot use the code
+	 below because "expand_divmod" doesn't support sat/no-sat fixed-point
+         divisions.   */
+      if (ALL_FIXED_POINT_MODE_P (mode))
+	goto binop;
 
-	get_address_description (exp, &addr);
-	op0 = addr_for_mem_ref (&addr, true);
-	op0 = memory_address (mode, op0);
-	temp = gen_rtx_MEM (mode, op0);
-	set_mem_attributes (temp, TMR_ORIGINAL (exp), 0);
-      }
-      return temp;
+      if (modifier == EXPAND_STACK_PARM)
+	target = 0;
+      /* Possible optimization: compute the dividend with EXPAND_SUM
+	 then if the divisor is constant can optimize the case
+	 where some terms of the dividend have coeffs divisible by it.  */
+      expand_operands (treeop0, treeop1,
+		       subtarget, &op0, &op1, EXPAND_NORMAL);
+      return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
 
-    case ARRAY_REF:
+    case RDIV_EXPR:
+      goto binop;
 
-      {
-	tree array = treeop0;
-	tree index = treeop1;
+    case TRUNC_MOD_EXPR:
+    case FLOOR_MOD_EXPR:
+    case CEIL_MOD_EXPR:
+    case ROUND_MOD_EXPR:
+      if (modifier == EXPAND_STACK_PARM)
+	target = 0;
+      expand_operands (treeop0, treeop1,
+		       subtarget, &op0, &op1, EXPAND_NORMAL);
+      return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
 
-	/* Fold an expression like: "foo"[2].
-	   This is not done in fold so it won't happen inside &.
-	   Don't fold if this is for wide characters since it's too
-	   difficult to do correctly and this is a very rare case.  */
+    case FIXED_CONVERT_EXPR:
+      op0 = expand_normal (treeop0);
+      if (target == 0 || modifier == EXPAND_STACK_PARM)
+	target = gen_reg_rtx (mode);
 
-	if (modifier != EXPAND_CONST_ADDRESS
-	    && modifier != EXPAND_INITIALIZER
-	    && modifier != EXPAND_MEMORY)
-	  {
-	    tree t = fold_read_from_constant_string (exp);
+      if ((TREE_CODE (TREE_TYPE (treeop0)) == INTEGER_TYPE
+	   && TYPE_UNSIGNED (TREE_TYPE (treeop0)))
+          || (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type)))
+	expand_fixed_convert (target, op0, 1, TYPE_SATURATING (type));
+      else
+	expand_fixed_convert (target, op0, 0, TYPE_SATURATING (type));
+      return target;
 
-	    if (t)
-	      return expand_expr (t, target, tmode, modifier);
-	  }
+    case FIX_TRUNC_EXPR:
+      op0 = expand_normal (treeop0);
+      if (target == 0 || modifier == EXPAND_STACK_PARM)
+	target = gen_reg_rtx (mode);
+      expand_fix (target, op0, unsignedp);
+      return target;
 
-	/* If this is a constant index into a constant array,
-	   just get the value from the array.  Handle both the cases when
-	   we have an explicit constructor and when our operand is a variable
-	   that was declared const.  */
+    case FLOAT_EXPR:
+      op0 = expand_normal (treeop0);
+      if (target == 0 || modifier == EXPAND_STACK_PARM)
+	target = gen_reg_rtx (mode);
+      /* expand_float can't figure out what to do if FROM has VOIDmode.
+	 So give it the correct mode.  With -O, cse will optimize this.  */
+      if (GET_MODE (op0) == VOIDmode)
+	op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (treeop0)),
+				op0);
+      expand_float (target, op0,
+		    TYPE_UNSIGNED (TREE_TYPE (treeop0)));
+      return target;
 
-	if (modifier != EXPAND_CONST_ADDRESS
-	    && modifier != EXPAND_INITIALIZER
-	    && modifier != EXPAND_MEMORY
-	    && TREE_CODE (array) == CONSTRUCTOR
-	    && ! TREE_SIDE_EFFECTS (array)
-	    && TREE_CODE (index) == INTEGER_CST)
-	  {
-	    unsigned HOST_WIDE_INT ix;
-	    tree field, value;
+    case NEGATE_EXPR:
+      op0 = expand_expr (treeop0, subtarget,
+			 VOIDmode, EXPAND_NORMAL);
+      if (modifier == EXPAND_STACK_PARM)
+	target = 0;
+      temp = expand_unop (mode,
+      			  optab_for_tree_code (NEGATE_EXPR, type,
+					       optab_default),
+			  op0, target, 0);
+      gcc_assert (temp);
+      return REDUCE_BIT_FIELD (temp);
 
-	    FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array), ix,
-				      field, value)
-	      if (tree_int_cst_equal (field, index))
-		{
-		  if (!TREE_SIDE_EFFECTS (value))
-		    return expand_expr (fold (value), target, tmode, modifier);
-		  break;
-		}
-	  }
+    case ABS_EXPR:
+      op0 = expand_expr (treeop0, subtarget,
+			 VOIDmode, EXPAND_NORMAL);
+      if (modifier == EXPAND_STACK_PARM)
+	target = 0;
 
-	else if (optimize >= 1
-		 && modifier != EXPAND_CONST_ADDRESS
-		 && modifier != EXPAND_INITIALIZER
-		 && modifier != EXPAND_MEMORY
-		 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
-		 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
-		 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK
-		 && targetm.binds_local_p (array))
-	  {
-	    if (TREE_CODE (index) == INTEGER_CST)
-	      {
-		tree init = DECL_INITIAL (array);
+      /* ABS_EXPR is not valid for complex arguments.  */
+      gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
+		  && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT);
 
-		if (TREE_CODE (init) == CONSTRUCTOR)
-		  {
-		    unsigned HOST_WIDE_INT ix;
-		    tree field, value;
+      /* Unsigned abs is simply the operand.  Testing here means we don't
+	 risk generating incorrect code below.  */
+      if (TYPE_UNSIGNED (type))
+	return op0;
 
-		    FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), ix,
-					      field, value)
-		      if (tree_int_cst_equal (field, index))
-			{
-			  if (TREE_SIDE_EFFECTS (value))
-			    break;
+      return expand_abs (mode, op0, target, unsignedp,
+			 safe_from_p (target, treeop0, 1));
 
-			  if (TREE_CODE (value) == CONSTRUCTOR)
-			    {
-			      /* If VALUE is a CONSTRUCTOR, this
-				 optimization is only useful if
-				 this doesn't store the CONSTRUCTOR
-				 into memory.  If it does, it is more
-				 efficient to just load the data from
-				 the array directly.  */
-			      rtx ret = expand_constructor (value, target,
-							    modifier, true);
-			      if (ret == NULL_RTX)
-				break;
-			    }
+    case MAX_EXPR:
+    case MIN_EXPR:
+      target = original_target;
+      if (target == 0
+	  || modifier == EXPAND_STACK_PARM
+	  || (MEM_P (target) && MEM_VOLATILE_P (target))
+	  || GET_MODE (target) != mode
+	  || (REG_P (target)
+	      && REGNO (target) < FIRST_PSEUDO_REGISTER))
+	target = gen_reg_rtx (mode);
+      expand_operands (treeop0, treeop1,
+		       target, &op0, &op1, EXPAND_NORMAL);
 
-			  return expand_expr (fold (value), target, tmode,
-					      modifier);
-			}
-		  }
-		else if(TREE_CODE (init) == STRING_CST)
-		  {
-		    tree index1 = index;
-		    tree low_bound = array_ref_low_bound (exp);
-		    index1 = fold_convert_loc (loc, sizetype,
-					       treeop1);
+      /* First try to do it with a special MIN or MAX instruction.
+	 If that does not win, use a conditional jump to select the proper
+	 value.  */
+      this_optab = optab_for_tree_code (code, type, optab_default);
+      temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
+			   OPTAB_WIDEN);
+      if (temp != 0)
+	return temp;
 
-		    /* Optimize the special-case of a zero lower bound.
+      /* At this point, a MEM target is no longer useful; we will get better
+	 code without it.  */
 
-		       We convert the low_bound to sizetype to avoid some problems
-		       with constant folding.  (E.g. suppose the lower bound is 1,
-		       and its mode is QI.  Without the conversion,l (ARRAY
-		       +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
-		       +INDEX), which becomes (ARRAY+255+INDEX).  Opps!)  */
+      if (! REG_P (target))
+	target = gen_reg_rtx (mode);
 
-		    if (! integer_zerop (low_bound))
-		      index1 = size_diffop_loc (loc, index1,
-					    fold_convert_loc (loc, sizetype,
-							      low_bound));
+      /* If op1 was placed in target, swap op0 and op1.  */
+      if (target != op0 && target == op1)
+	{
+	  temp = op0;
+	  op0 = op1;
+	  op1 = temp;
+	}
 
-		    if (0 > compare_tree_int (index1,
-					      TREE_STRING_LENGTH (init)))
-		      {
-			tree type = TREE_TYPE (TREE_TYPE (init));
-			enum machine_mode mode = TYPE_MODE (type);
-
-			if (GET_MODE_CLASS (mode) == MODE_INT
-			    && GET_MODE_SIZE (mode) == 1)
-			  return gen_int_mode (TREE_STRING_POINTER (init)
-					       [TREE_INT_CST_LOW (index1)],
-					       mode);
-		      }
-		  }
-	      }
-	  }
-      }
-      goto normal_inner_ref;
-
-    case COMPONENT_REF:
-      /* If the operand is a CONSTRUCTOR, we can just extract the
-	 appropriate field if it is present.  */
-      if (TREE_CODE (treeop0) == CONSTRUCTOR)
-	{
-	  unsigned HOST_WIDE_INT idx;
-	  tree field, value;
-
-	  FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (treeop0),
-				    idx, field, value)
-	    if (field == treeop1
-		/* We can normally use the value of the field in the
-		   CONSTRUCTOR.  However, if this is a bitfield in
-		   an integral mode that we can fit in a HOST_WIDE_INT,
-		   we must mask only the number of bits in the bitfield,
-		   since this is done implicitly by the constructor.  If
-		   the bitfield does not meet either of those conditions,
-		   we can't do this optimization.  */
-		&& (! DECL_BIT_FIELD (field)
-		    || ((GET_MODE_CLASS (DECL_MODE (field)) == MODE_INT)
-			&& (GET_MODE_BITSIZE (DECL_MODE (field))
-			    <= HOST_BITS_PER_WIDE_INT))))
-	      {
-		if (DECL_BIT_FIELD (field)
-		    && modifier == EXPAND_STACK_PARM)
-		  target = 0;
-		op0 = expand_expr (value, target, tmode, modifier);
-		if (DECL_BIT_FIELD (field))
-		  {
-		    HOST_WIDE_INT bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
-		    enum machine_mode imode = TYPE_MODE (TREE_TYPE (field));
-
-		    if (TYPE_UNSIGNED (TREE_TYPE (field)))
-		      {
-			op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
-			op0 = expand_and (imode, op0, op1, target);
-		      }
-		    else
-		      {
-			tree count
-			  = build_int_cst (NULL_TREE,
-					   GET_MODE_BITSIZE (imode) - bitsize);
-
-			op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
-					    target, 0);
-			op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
-					    target, 0);
-		      }
-		  }
-
-		return op0;
-	      }
-	}
-      goto normal_inner_ref;
+      /* We generate better code and avoid problems with op1 mentioning
+	 target by forcing op1 into a pseudo if it isn't a constant.  */
+      if (! CONSTANT_P (op1))
+	op1 = force_reg (mode, op1);
 
-    case BIT_FIELD_REF:
-    case ARRAY_RANGE_REF:
-    normal_inner_ref:
       {
-	enum machine_mode mode1, mode2;
-	HOST_WIDE_INT bitsize, bitpos;
-	tree offset;
-	int volatilep = 0, must_force_mem;
-	tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
-					&mode1, &unsignedp, &volatilep, true);
-	rtx orig_op0, memloc;
-
-	/* If we got back the original object, something is wrong.  Perhaps
-	   we are evaluating an expression too early.  In any event, don't
-	   infinitely recurse.  */
-	gcc_assert (tem != exp);
-
-	/* If TEM's type is a union of variable size, pass TARGET to the inner
-	   computation, since it will need a temporary and TARGET is known
-	   to have to do.  This occurs in unchecked conversion in Ada.  */
-	orig_op0 = op0
-	  = expand_expr (tem,
-			 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
-			  && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
-			      != INTEGER_CST)
-			  && modifier != EXPAND_STACK_PARM
-			  ? target : NULL_RTX),
-			 VOIDmode,
-			 (modifier == EXPAND_INITIALIZER
-			  || modifier == EXPAND_CONST_ADDRESS
-			  || modifier == EXPAND_STACK_PARM)
-			 ? modifier : EXPAND_NORMAL);
-
-	mode2
-	  = CONSTANT_P (op0) ? TYPE_MODE (TREE_TYPE (tem)) : GET_MODE (op0);
+	enum rtx_code comparison_code;
+	rtx cmpop1 = op1;
 
-	/* If we have either an offset, a BLKmode result, or a reference
-	   outside the underlying object, we must force it to memory.
-	   Such a case can occur in Ada if we have unchecked conversion
-	   of an expression from a scalar type to an aggregate type or
-	   for an ARRAY_RANGE_REF whose type is BLKmode, or if we were
-	   passed a partially uninitialized object or a view-conversion
-	   to a larger size.  */
-	must_force_mem = (offset
-			  || mode1 == BLKmode
-			  || bitpos + bitsize > GET_MODE_BITSIZE (mode2));
+	if (code == MAX_EXPR)
+	  comparison_code = unsignedp ? GEU : GE;
+	else
+	  comparison_code = unsignedp ? LEU : LE;
 
-	/* Handle CONCAT first.  */
-	if (GET_CODE (op0) == CONCAT && !must_force_mem)
+	/* Canonicalize to comparisons against 0.  */
+	if (op1 == const1_rtx)
 	  {
-	    if (bitpos == 0
-		&& bitsize == GET_MODE_BITSIZE (GET_MODE (op0)))
-	      return op0;
-	    if (bitpos == 0
-		&& bitsize == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
-		&& bitsize)
-	      {
-		op0 = XEXP (op0, 0);
-		mode2 = GET_MODE (op0);
-	      }
-	    else if (bitpos == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
-		     && bitsize == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 1)))
-		     && bitpos
-		     && bitsize)
-	      {
-		op0 = XEXP (op0, 1);
-		bitpos = 0;
-		mode2 = GET_MODE (op0);
-	      }
-	    else
-	      /* Otherwise force into memory.  */
-	      must_force_mem = 1;
+	    /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
+	       or (a != 0 ? a : 1) for unsigned.
+	       For MIN we are safe converting (a <= 1 ? a : 1)
+	       into (a <= 0 ? a : 1)  */
+	    cmpop1 = const0_rtx;
+	    if (code == MAX_EXPR)
+	      comparison_code = unsignedp ? NE : GT;
 	  }
-
-	/* If this is a constant, put it in a register if it is a legitimate
-	   constant and we don't need a memory reference.  */
-	if (CONSTANT_P (op0)
-	    && mode2 != BLKmode
-	    && LEGITIMATE_CONSTANT_P (op0)
-	    && !must_force_mem)
-	  op0 = force_reg (mode2, op0);
-
-	/* Otherwise, if this is a constant, try to force it to the constant
-	   pool.  Note that back-ends, e.g. MIPS, may refuse to do so if it
-	   is a legitimate constant.  */
-	else if (CONSTANT_P (op0) && (memloc = force_const_mem (mode2, op0)))
-	  op0 = validize_mem (memloc);
-
-	/* Otherwise, if this is a constant or the object is not in memory
-	   and need be, put it there.  */
-	else if (CONSTANT_P (op0) || (!MEM_P (op0) && must_force_mem))
+	if (op1 == constm1_rtx && !unsignedp)
 	  {
-	    tree nt = build_qualified_type (TREE_TYPE (tem),
-					    (TYPE_QUALS (TREE_TYPE (tem))
-					     | TYPE_QUAL_CONST));
-	    memloc = assign_temp (nt, 1, 1, 1);
-	    emit_move_insn (memloc, op0);
-	    op0 = memloc;
+	    /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
+	       and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
+	    cmpop1 = const0_rtx;
+	    if (code == MIN_EXPR)
+	      comparison_code = LT;
 	  }
-
-	if (offset)
+#ifdef HAVE_conditional_move
+	/* Use a conditional move if possible.  */
+	if (can_conditionally_move_p (mode))
 	  {
-	    rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
-					  EXPAND_SUM);
+	    rtx insn;
 
-	    gcc_assert (MEM_P (op0));
+	    /* ??? Same problem as in expmed.c: emit_conditional_move
+	       forces a stack adjustment via compare_from_rtx, and we
+	       lose the stack adjustment if the sequence we are about
+	       to create is discarded.  */
+	    do_pending_stack_adjust ();
 
-#ifdef POINTERS_EXTEND_UNSIGNED
-	    if (GET_MODE (offset_rtx) != Pmode)
-	      offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
-#else
-	    if (GET_MODE (offset_rtx) != ptr_mode)
-	      offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
-#endif
+	    start_sequence ();
 
-	    if (GET_MODE (op0) == BLKmode
-		/* A constant address in OP0 can have VOIDmode, we must
-		   not try to call force_reg in that case.  */
-		&& GET_MODE (XEXP (op0, 0)) != VOIDmode
-		&& bitsize != 0
-		&& (bitpos % bitsize) == 0
-		&& (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
-		&& MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
+	    /* Try to emit the conditional move.  */
+	    insn = emit_conditional_move (target, comparison_code,
+					  op0, cmpop1, mode,
+					  op0, op1, mode,
+					  unsignedp);
+
+	    /* If we could do the conditional move, emit the sequence,
+	       and return.  */
+	    if (insn)
 	      {
-		op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
-		bitpos = 0;
+		rtx seq = get_insns ();
+		end_sequence ();
+		emit_insn (seq);
+		return target;
 	      }
 
-	    op0 = offset_address (op0, offset_rtx,
-				  highest_pow2_factor (offset));
+	    /* Otherwise discard the sequence and fall back to code with
+	       branches.  */
+	    end_sequence ();
 	  }
+#endif
+	if (target != op0)
+	  emit_move_insn (target, op0);
 
-	/* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
-	   record its alignment as BIGGEST_ALIGNMENT.  */
-	if (MEM_P (op0) && bitpos == 0 && offset != 0
-	    && is_aligning_offset (offset, tem))
-	  set_mem_align (op0, BIGGEST_ALIGNMENT);
+	temp = gen_label_rtx ();
+	do_compare_rtx_and_jump (target, cmpop1, comparison_code,
+				 unsignedp, mode, NULL_RTX, NULL_RTX, temp);
+      }
+      emit_move_insn (target, op1);
+      emit_label (temp);
+      return target;
 
-	/* Don't forget about volatility even if this is a bitfield.  */
-	if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
-	  {
-	    if (op0 == orig_op0)
-	      op0 = copy_rtx (op0);
+    case BIT_NOT_EXPR:
+      op0 = expand_expr (treeop0, subtarget,
+			 VOIDmode, EXPAND_NORMAL);
+      if (modifier == EXPAND_STACK_PARM)
+	target = 0;
+      temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
+      gcc_assert (temp);
+      return temp;
 
-	    MEM_VOLATILE_P (op0) = 1;
-	  }
+      /* ??? Can optimize bitwise operations with one arg constant.
+	 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
+	 and (a bitwise1 b) bitwise2 b (etc)
+	 but that is probably not worth while.  */
 
-	/* In cases where an aligned union has an unaligned object
-	   as a field, we might be extracting a BLKmode value from
-	   an integer-mode (e.g., SImode) object.  Handle this case
-	   by doing the extract into an object as wide as the field
-	   (which we know to be the width of a basic mode), then
-	   storing into memory, and changing the mode to BLKmode.  */
-	if (mode1 == VOIDmode
-	    || REG_P (op0) || GET_CODE (op0) == SUBREG
-	    || (mode1 != BLKmode && ! direct_load[(int) mode1]
-		&& GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
-		&& GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
-		&& modifier != EXPAND_CONST_ADDRESS
-		&& modifier != EXPAND_INITIALIZER)
-	    /* If the field isn't aligned enough to fetch as a memref,
-	       fetch it as a bit field.  */
-	    || (mode1 != BLKmode
-		&& (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
-		      || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
-		      || (MEM_P (op0)
-			  && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
-			      || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
-		     && ((modifier == EXPAND_CONST_ADDRESS
-			  || modifier == EXPAND_INITIALIZER)
-			 ? STRICT_ALIGNMENT
-			 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
-		    || (bitpos % BITS_PER_UNIT != 0)))
-	    /* If the type and the field are a constant size and the
-	       size of the type isn't the same size as the bitfield,
-	       we must use bitfield operations.  */
-	    || (bitsize >= 0
-		&& TYPE_SIZE (TREE_TYPE (exp))
-		&& TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
-		&& 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
-					  bitsize)))
-	  {
-	    enum machine_mode ext_mode = mode;
+      /* BIT_AND_EXPR is for bitwise anding.  TRUTH_AND_EXPR is for anding two
+	 boolean values when we want in all cases to compute both of them.  In
+	 general it is fastest to do TRUTH_AND_EXPR by computing both operands
+	 as actual zero-or-1 values and then bitwise anding.  In cases where
+	 there cannot be any side effects, better code would be made by
+	 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
+	 how to recognize those cases.  */
 
-	    if (ext_mode == BLKmode
-		&& ! (target != 0 && MEM_P (op0)
-		      && MEM_P (target)
-		      && bitpos % BITS_PER_UNIT == 0))
-	      ext_mode = mode_for_size (bitsize, MODE_INT, 1);
+    case TRUTH_AND_EXPR:
+      code = BIT_AND_EXPR;
+    case BIT_AND_EXPR:
+      goto binop;
 
-	    if (ext_mode == BLKmode)
-	      {
-		if (target == 0)
-		  target = assign_temp (type, 0, 1, 1);
+    case TRUTH_OR_EXPR:
+      code = BIT_IOR_EXPR;
+    case BIT_IOR_EXPR:
+      goto binop;
 
-		if (bitsize == 0)
-		  return target;
+    case TRUTH_XOR_EXPR:
+      code = BIT_XOR_EXPR;
+    case BIT_XOR_EXPR:
+      goto binop;
 
-		/* In this case, BITPOS must start at a byte boundary and
-		   TARGET, if specified, must be a MEM.  */
-		gcc_assert (MEM_P (op0)
-			    && (!target || MEM_P (target))
-			    && !(bitpos % BITS_PER_UNIT));
+    case LROTATE_EXPR:
+    case RROTATE_EXPR:
+      gcc_assert (VECTOR_MODE_P (TYPE_MODE (type))
+		  || (GET_MODE_PRECISION (TYPE_MODE (type))
+		      == TYPE_PRECISION (type)));
+      /* fall through */
 
-		emit_block_move (target,
-				 adjust_address (op0, VOIDmode,
-						 bitpos / BITS_PER_UNIT),
-				 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
-					  / BITS_PER_UNIT),
-				 (modifier == EXPAND_STACK_PARM
-				  ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
+    case LSHIFT_EXPR:
+    case RSHIFT_EXPR:
+      /* If this is a fixed-point operation, then we cannot use the code
+	 below because "expand_shift" doesn't support sat/no-sat fixed-point
+         shifts.   */
+      if (ALL_FIXED_POINT_MODE_P (mode))
+	goto binop;
 
-		return target;
-	      }
+      if (! safe_from_p (subtarget, treeop1, 1))
+	subtarget = 0;
+      if (modifier == EXPAND_STACK_PARM)
+	target = 0;
+      op0 = expand_expr (treeop0, subtarget,
+			 VOIDmode, EXPAND_NORMAL);
+      temp = expand_shift (code, mode, op0, treeop1, target,
+			   unsignedp);
+      if (code == LSHIFT_EXPR)
+	temp = REDUCE_BIT_FIELD (temp);
+      return temp;
 
-	    op0 = validize_mem (op0);
+      /* Could determine the answer when only additive constants differ.  Also,
+	 the addition of one can be handled by changing the condition.  */
+    case LT_EXPR:
+    case LE_EXPR:
+    case GT_EXPR:
+    case GE_EXPR:
+    case EQ_EXPR:
+    case NE_EXPR:
+    case UNORDERED_EXPR:
+    case ORDERED_EXPR:
+    case UNLT_EXPR:
+    case UNLE_EXPR:
+    case UNGT_EXPR:
+    case UNGE_EXPR:
+    case UNEQ_EXPR:
+    case LTGT_EXPR:
+      temp = do_store_flag (ops,
+			    modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
+			    tmode != VOIDmode ? tmode : mode);
+      if (temp)
+	return temp;
 
-	    if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
-	      mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
+      /* Use a compare and a jump for BLKmode comparisons, or for function
+	 type comparisons is HAVE_canonicalize_funcptr_for_compare.  */
 
-	    op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
-				     (modifier == EXPAND_STACK_PARM
-				      ? NULL_RTX : target),
-				     ext_mode, ext_mode);
+      if ((target == 0
+	   || modifier == EXPAND_STACK_PARM
+	   || ! safe_from_p (target, treeop0, 1)
+	   || ! safe_from_p (target, treeop1, 1)
+	   /* Make sure we don't have a hard reg (such as function's return
+	      value) live across basic blocks, if not optimizing.  */
+	   || (!optimize && REG_P (target)
+	       && REGNO (target) < FIRST_PSEUDO_REGISTER)))
+	target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
 
-	    /* If the result is a record type and BITSIZE is narrower than
-	       the mode of OP0, an integral mode, and this is a big endian
-	       machine, we must put the field into the high-order bits.  */
-	    if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
-		&& GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
-		&& bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
-	      op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
-				  size_int (GET_MODE_BITSIZE (GET_MODE (op0))
-					    - bitsize),
-				  op0, 1);
+      emit_move_insn (target, const0_rtx);
 
-	    /* If the result type is BLKmode, store the data into a temporary
-	       of the appropriate type, but with the mode corresponding to the
-	       mode for the data we have (op0's mode).  It's tempting to make
-	       this a constant type, since we know it's only being stored once,
-	       but that can cause problems if we are taking the address of this
-	       COMPONENT_REF because the MEM of any reference via that address
-	       will have flags corresponding to the type, which will not
-	       necessarily be constant.  */
-	    if (mode == BLKmode)
-	      {
-		HOST_WIDE_INT size = GET_MODE_BITSIZE (ext_mode);
-		rtx new_rtx;
+      op1 = gen_label_rtx ();
+      jumpifnot_1 (code, treeop0, treeop1, op1);
 
-		/* If the reference doesn't use the alias set of its type,
-		   we cannot create the temporary using that type.  */
-		if (component_uses_parent_alias_set (exp))
-		  {
-		    new_rtx = assign_stack_local (ext_mode, size, 0);
-		    set_mem_alias_set (new_rtx, get_alias_set (exp));
-		  }
-		else
-		  new_rtx = assign_stack_temp_for_type (ext_mode, size, 0, type);
+      emit_move_insn (target, const1_rtx);
 
-		emit_move_insn (new_rtx, op0);
-		op0 = copy_rtx (new_rtx);
-		PUT_MODE (op0, BLKmode);
-		set_mem_attributes (op0, exp, 1);
-	      }
+      emit_label (op1);
+      return target;
 
-	    return op0;
-	  }
+    case TRUTH_NOT_EXPR:
+      if (modifier == EXPAND_STACK_PARM)
+	target = 0;
+      op0 = expand_expr (treeop0, target,
+			 VOIDmode, EXPAND_NORMAL);
+      /* The parser is careful to generate TRUTH_NOT_EXPR
+	 only with operands that are always zero or one.  */
+      temp = expand_binop (mode, xor_optab, op0, const1_rtx,
+			   target, 1, OPTAB_LIB_WIDEN);
+      gcc_assert (temp);
+      return temp;
 
-	/* If the result is BLKmode, use that to access the object
-	   now as well.  */
-	if (mode == BLKmode)
-	  mode1 = BLKmode;
+    case COMPLEX_EXPR:
+      /* Get the rtx code of the operands.  */
+      op0 = expand_normal (treeop0);
+      op1 = expand_normal (treeop1);
 
-	/* Get a reference to just this component.  */
-	if (modifier == EXPAND_CONST_ADDRESS
-	    || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
-	  op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
-	else
-	  op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
+      if (!target)
+	target = gen_reg_rtx (TYPE_MODE (type));
 
-	if (op0 == orig_op0)
-	  op0 = copy_rtx (op0);
+      /* Move the real (op0) and imaginary (op1) parts to their location.  */
+      write_complex_part (target, op0, false);
+      write_complex_part (target, op1, true);
 
-	set_mem_attributes (op0, exp, 0);
-	if (REG_P (XEXP (op0, 0)))
-	  mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
+      return target;
 
-	MEM_VOLATILE_P (op0) |= volatilep;
-	if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
-	    || modifier == EXPAND_CONST_ADDRESS
-	    || modifier == EXPAND_INITIALIZER)
-	  return op0;
-	else if (target == 0)
-	  target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
+    case WIDEN_SUM_EXPR:
+      {
+        tree oprnd0 = treeop0;
+        tree oprnd1 = treeop1;
 
-	convert_move (target, op0, unsignedp);
-	return target;
+        expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
+        target = expand_widen_pattern_expr (ops, op0, NULL_RTX, op1,
+                                            target, unsignedp);
+        return target;
       }
 
-    case OBJ_TYPE_REF:
-      return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
-
-    case CALL_EXPR:
-      /* All valid uses of __builtin_va_arg_pack () are removed during
-	 inlining.  */
-      if (CALL_EXPR_VA_ARG_PACK (exp))
-	error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
+    case REDUC_MAX_EXPR:
+    case REDUC_MIN_EXPR:
+    case REDUC_PLUS_EXPR:
       {
-	tree fndecl = get_callee_fndecl (exp), attr;
-
-	if (fndecl
-	    && (attr = lookup_attribute ("error",
-					 DECL_ATTRIBUTES (fndecl))) != NULL)
-	  error ("%Kcall to %qs declared with attribute error: %s",
-		 exp, identifier_to_locale (lang_hooks.decl_printable_name (fndecl, 1)),
-		 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
-	if (fndecl
-	    && (attr = lookup_attribute ("warning",
-					 DECL_ATTRIBUTES (fndecl))) != NULL)
-	  warning_at (tree_nonartificial_location (exp),
-		      0, "%Kcall to %qs declared with attribute warning: %s",
-		      exp, identifier_to_locale (lang_hooks.decl_printable_name (fndecl, 1)),
-		      TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
+        op0 = expand_normal (treeop0);
+        this_optab = optab_for_tree_code (code, type, optab_default);
+        temp = expand_unop (mode, this_optab, op0, target, unsignedp);
+        gcc_assert (temp);
+        return temp;
+      }
 
-	/* Check for a built-in function.  */
-	if (fndecl && DECL_BUILT_IN (fndecl))
-	  {
-	    gcc_assert (DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_FRONTEND);
-	    return expand_builtin (exp, target, subtarget, tmode, ignore);
-	  }
+    case VEC_EXTRACT_EVEN_EXPR:
+    case VEC_EXTRACT_ODD_EXPR:
+      {
+        expand_operands (treeop0,  treeop1,
+                         NULL_RTX, &op0, &op1, EXPAND_NORMAL);
+        this_optab = optab_for_tree_code (code, type, optab_default);
+        temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
+                             OPTAB_WIDEN);
+        gcc_assert (temp);
+        return temp;
       }
-      return expand_call (exp, target, ignore);
 
-    case PAREN_EXPR:
-    CASE_CONVERT:
-      if (treeop0 == error_mark_node)
-	return const0_rtx;
+    case VEC_INTERLEAVE_HIGH_EXPR:
+    case VEC_INTERLEAVE_LOW_EXPR:
+      {
+        expand_operands (treeop0,  treeop1,
+                         NULL_RTX, &op0, &op1, EXPAND_NORMAL);
+        this_optab = optab_for_tree_code (code, type, optab_default);
+        temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
+                             OPTAB_WIDEN);
+        gcc_assert (temp);
+        return temp;
+      }
 
-      if (TREE_CODE (type) == UNION_TYPE)
-	{
-	  tree valtype = TREE_TYPE (treeop0);
+    case VEC_LSHIFT_EXPR:
+    case VEC_RSHIFT_EXPR:
+      {
+	target = expand_vec_shift_expr (ops, target);
+	return target;
+      }
 
-	  /* If both input and output are BLKmode, this conversion isn't doing
-	     anything except possibly changing memory attribute.  */
-	  if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
-	    {
-	      rtx result = expand_expr (treeop0, target, tmode,
-					modifier);
+    case VEC_UNPACK_HI_EXPR:
+    case VEC_UNPACK_LO_EXPR:
+      {
+	op0 = expand_normal (treeop0);
+	this_optab = optab_for_tree_code (code, type, optab_default);
+	temp = expand_widen_pattern_expr (ops, op0, NULL_RTX, NULL_RTX,
+					  target, unsignedp);
+	gcc_assert (temp);
+	return temp;
+      }
 
-	      result = copy_rtx (result);
-	      set_mem_attributes (result, exp, 0);
-	      return result;
-	    }
+    case VEC_UNPACK_FLOAT_HI_EXPR:
+    case VEC_UNPACK_FLOAT_LO_EXPR:
+      {
+	op0 = expand_normal (treeop0);
+	/* The signedness is determined from input operand.  */
+	this_optab = optab_for_tree_code (code,
+					  TREE_TYPE (treeop0),
+					  optab_default);
+	temp = expand_widen_pattern_expr
+	  (ops, op0, NULL_RTX, NULL_RTX,
+	   target, TYPE_UNSIGNED (TREE_TYPE (treeop0)));
 
-	  if (target == 0)
-	    {
-	      if (TYPE_MODE (type) != BLKmode)
-		target = gen_reg_rtx (TYPE_MODE (type));
-	      else
-		target = assign_temp (type, 0, 1, 1);
-	    }
+	gcc_assert (temp);
+	return temp;
+      }
 
-	  if (MEM_P (target))
-	    /* Store data into beginning of memory target.  */
-	    store_expr (treeop0,
-			adjust_address (target, TYPE_MODE (valtype), 0),
-			modifier == EXPAND_STACK_PARM,
-			false);
+    case VEC_WIDEN_MULT_HI_EXPR:
+    case VEC_WIDEN_MULT_LO_EXPR:
+      {
+	tree oprnd0 = treeop0;
+	tree oprnd1 = treeop1;
 
-	  else
-	    {
-	      gcc_assert (REG_P (target));
+	expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
+	target = expand_widen_pattern_expr (ops, op0, op1, NULL_RTX,
+					    target, unsignedp);
+	gcc_assert (target);
+	return target;
+      }
 
-	      /* Store this field into a union of the proper type.  */
-	      store_field (target,
-			   MIN ((int_size_in_bytes (TREE_TYPE
-						    (treeop0))
-				 * BITS_PER_UNIT),
-				(HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
-			   0, TYPE_MODE (valtype), treeop0,
-			   type, 0, false);
-	    }
+    case VEC_PACK_TRUNC_EXPR:
+    case VEC_PACK_SAT_EXPR:
+    case VEC_PACK_FIX_TRUNC_EXPR:
+      mode = TYPE_MODE (TREE_TYPE (treeop0));
+      goto binop;
 
-	  /* Return the entire union.  */
-	  return target;
-	}
+    default:
+      gcc_unreachable ();
+    }
 
-      if (mode == TYPE_MODE (TREE_TYPE (treeop0)))
-	{
-	  op0 = expand_expr (treeop0, target, VOIDmode,
-			     modifier);
+  /* Here to do an ordinary binary operator.  */
+ binop:
+  expand_operands (treeop0, treeop1,
+		   subtarget, &op0, &op1, EXPAND_NORMAL);
+ binop2:
+  this_optab = optab_for_tree_code (code, type, optab_default);
+ binop3:
+  if (modifier == EXPAND_STACK_PARM)
+    target = 0;
+  temp = expand_binop (mode, this_optab, op0, op1, target,
+		       unsignedp, OPTAB_LIB_WIDEN);
+  gcc_assert (temp);
+  return REDUCE_BIT_FIELD (temp);
+}
+#undef REDUCE_BIT_FIELD
 
-	  /* If the signedness of the conversion differs and OP0 is
-	     a promoted SUBREG, clear that indication since we now
-	     have to do the proper extension.  */
-	  if (TYPE_UNSIGNED (TREE_TYPE (treeop0)) != unsignedp
-	      && GET_CODE (op0) == SUBREG)
-	    SUBREG_PROMOTED_VAR_P (op0) = 0;
+static rtx
+expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
+		    enum expand_modifier modifier, rtx *alt_rtl)
+{
+  rtx op0, op1, temp, decl_rtl;
+  tree type;
+  int unsignedp;
+  enum machine_mode mode;
+  enum tree_code code = TREE_CODE (exp);
+  optab this_optab;
+  rtx subtarget, original_target;
+  int ignore;
+  tree context;
+  bool reduce_bit_field;
+  location_t loc = EXPR_LOCATION (exp);
+  struct separate_ops ops;
+  tree treeop0, treeop1, treeop2;
 
-	  return REDUCE_BIT_FIELD (op0);
-	}
+  type = TREE_TYPE (exp);
+  mode = TYPE_MODE (type);
+  unsignedp = TYPE_UNSIGNED (type);
 
-      op0 = expand_expr (treeop0, NULL_RTX, mode,
-			 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier);
-      if (GET_MODE (op0) == mode)
-	;
+  treeop0 = treeop1 = treeop2 = NULL_TREE;
+  if (!VL_EXP_CLASS_P (exp))
+    switch (TREE_CODE_LENGTH (code))
+      {
+	default:
+	case 3: treeop2 = TREE_OPERAND (exp, 2);
+	case 2: treeop1 = TREE_OPERAND (exp, 1);
+	case 1: treeop0 = TREE_OPERAND (exp, 0);
+	case 0: break;
+      }
+  ops.code = code;
+  ops.type = type;
+  ops.op0 = treeop0;
+  ops.op1 = treeop1;
+  ops.op2 = treeop2;
+  ops.location = loc;
 
-      /* If OP0 is a constant, just convert it into the proper mode.  */
-      else if (CONSTANT_P (op0))
-	{
-	  tree inner_type = TREE_TYPE (treeop0);
-	  enum machine_mode inner_mode = TYPE_MODE (inner_type);
+  ignore = (target == const0_rtx
+	    || ((CONVERT_EXPR_CODE_P (code)
+		 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
+		&& TREE_CODE (type) == VOID_TYPE));
 
-	  if (modifier == EXPAND_INITIALIZER)
-	    op0 = simplify_gen_subreg (mode, op0, inner_mode,
-				       subreg_lowpart_offset (mode,
-							      inner_mode));
-	  else
-	    op0=  convert_modes (mode, inner_mode, op0,
-				 TYPE_UNSIGNED (inner_type));
-	}
+  /* An operation in what may be a bit-field type needs the
+     result to be reduced to the precision of the bit-field type,
+     which is narrower than that of the type's mode.  */
+  reduce_bit_field = (!ignore
+		      && TREE_CODE (type) == INTEGER_TYPE
+		      && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type));
 
-      else if (modifier == EXPAND_INITIALIZER)
-	op0 = gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
+  /* If we are going to ignore this result, we need only do something
+     if there is a side-effect somewhere in the expression.  If there
+     is, short-circuit the most common cases here.  Note that we must
+     not call expand_expr with anything but const0_rtx in case this
+     is an initial expansion of a size that contains a PLACEHOLDER_EXPR.  */
 
-      else if (target == 0)
-	op0 = convert_to_mode (mode, op0,
-			       TYPE_UNSIGNED (TREE_TYPE
-					      (treeop0)));
-      else
+  if (ignore)
+    {
+      if (! TREE_SIDE_EFFECTS (exp))
+	return const0_rtx;
+
+      /* Ensure we reference a volatile object even if value is ignored, but
+	 don't do this if all we are doing is taking its address.  */
+      if (TREE_THIS_VOLATILE (exp)
+	  && TREE_CODE (exp) != FUNCTION_DECL
+	  && mode != VOIDmode && mode != BLKmode
+	  && modifier != EXPAND_CONST_ADDRESS)
 	{
-	  convert_move (target, op0,
-			TYPE_UNSIGNED (TREE_TYPE (treeop0)));
-	  op0 = target;
+	  temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
+	  if (MEM_P (temp))
+	    temp = copy_to_reg (temp);
+	  return const0_rtx;
 	}
 
-      return REDUCE_BIT_FIELD (op0);
-
-    case VIEW_CONVERT_EXPR:
-      op0 = NULL_RTX;
+      if (TREE_CODE_CLASS (code) == tcc_unary
+	  || code == COMPONENT_REF || code == INDIRECT_REF)
+	return expand_expr (treeop0, const0_rtx, VOIDmode,
+			    modifier);
 
-      /* If we are converting to BLKmode, try to avoid an intermediate
-	 temporary by fetching an inner memory reference.  */
-      if (mode == BLKmode
-	  && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
-	  && TYPE_MODE (TREE_TYPE (treeop0)) != BLKmode
-	  && handled_component_p (treeop0))
-      {
-	enum machine_mode mode1;
-	HOST_WIDE_INT bitsize, bitpos;
-	tree offset;
-	int unsignedp;
-	int volatilep = 0;
-	tree tem
-	  = get_inner_reference (treeop0, &bitsize, &bitpos,
-				 &offset, &mode1, &unsignedp, &volatilep,
-				 true);
-	rtx orig_op0;
+      else if (TREE_CODE_CLASS (code) == tcc_binary
+	       || TREE_CODE_CLASS (code) == tcc_comparison
+	       || code == ARRAY_REF || code == ARRAY_RANGE_REF)
+	{
+	  expand_expr (treeop0, const0_rtx, VOIDmode, modifier);
+	  expand_expr (treeop1, const0_rtx, VOIDmode, modifier);
+	  return const0_rtx;
+	}
+      else if (code == BIT_FIELD_REF)
+	{
+	  expand_expr (treeop0, const0_rtx, VOIDmode, modifier);
+	  expand_expr (treeop1, const0_rtx, VOIDmode, modifier);
+	  expand_expr (treeop2, const0_rtx, VOIDmode, modifier);
+	  return const0_rtx;
+	}
 
-	/* ??? We should work harder and deal with non-zero offsets.  */
-	if (!offset
-	    && (bitpos % BITS_PER_UNIT) == 0
-	    && bitsize >= 0
-	    && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) == 0)
-	  {
-	    /* See the normal_inner_ref case for the rationale.  */
-	    orig_op0
-	      = expand_expr (tem,
-			     (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
-			      && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
-				  != INTEGER_CST)
-			      && modifier != EXPAND_STACK_PARM
-			      ? target : NULL_RTX),
-			     VOIDmode,
-			     (modifier == EXPAND_INITIALIZER
-			      || modifier == EXPAND_CONST_ADDRESS
-			      || modifier == EXPAND_STACK_PARM)
-			     ? modifier : EXPAND_NORMAL);
+      target = 0;
+    }
 
-	    if (MEM_P (orig_op0))
-	      {
-		op0 = orig_op0;
+  if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
+    target = 0;
 
-		/* Get a reference to just this component.  */
-		if (modifier == EXPAND_CONST_ADDRESS
-		    || modifier == EXPAND_SUM
-		    || modifier == EXPAND_INITIALIZER)
-		  op0 = adjust_address_nv (op0, mode, bitpos / BITS_PER_UNIT);
-		else
-		  op0 = adjust_address (op0, mode, bitpos / BITS_PER_UNIT);
+  /* Use subtarget as the target for operand 0 of a binary operation.  */
+  subtarget = get_subtarget (target);
+  original_target = target;
 
-		if (op0 == orig_op0)
-		  op0 = copy_rtx (op0);
+  switch (code)
+    {
+    case LABEL_DECL:
+      {
+	tree function = decl_function_context (exp);
 
-		set_mem_attributes (op0, treeop0, 0);
-		if (REG_P (XEXP (op0, 0)))
-		  mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
+	temp = label_rtx (exp);
+	temp = gen_rtx_LABEL_REF (Pmode, temp);
 
-		MEM_VOLATILE_P (op0) |= volatilep;
-	      }
-	  }
+	if (function != current_function_decl
+	    && function != 0)
+	  LABEL_REF_NONLOCAL_P (temp) = 1;
+
+	temp = gen_rtx_MEM (FUNCTION_MODE, temp);
+	return temp;
       }
 
-      if (!op0)
-	op0 = expand_expr (treeop0,
-			   NULL_RTX, VOIDmode, modifier);
+    case SSA_NAME:
+      /* ??? ivopts calls expander, without any preparation from
+         out-of-ssa.  So fake instructions as if this was an access to the
+	 base variable.  This unnecessarily allocates a pseudo, see how we can
+	 reuse it, if partition base vars have it set already.  */
+      if (!currently_expanding_to_rtl)
+	return expand_expr_real_1 (SSA_NAME_VAR (exp), target, tmode, modifier, NULL);
+      {
+	gimple g = get_gimple_for_ssa_name (exp);
+	if (g)
+	  return expand_expr_real_1 (gimple_assign_rhs_to_tree (g), target,
+				     tmode, modifier, NULL);
+      }
+      decl_rtl = get_rtx_for_ssa_name (exp);
+      exp = SSA_NAME_VAR (exp);
+      goto expand_decl_rtl;
 
-      /* If the input and output modes are both the same, we are done.  */
-      if (mode == GET_MODE (op0))
-	;
-      /* If neither mode is BLKmode, and both modes are the same size
-	 then we can use gen_lowpart.  */
-      else if (mode != BLKmode && GET_MODE (op0) != BLKmode
-	       && GET_MODE_SIZE (mode) == GET_MODE_SIZE (GET_MODE (op0))
-	       && !COMPLEX_MODE_P (GET_MODE (op0)))
+    case PARM_DECL:
+    case VAR_DECL:
+      /* If a static var's type was incomplete when the decl was written,
+	 but the type is complete now, lay out the decl now.  */
+      if (DECL_SIZE (exp) == 0
+	  && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
+	  && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
+	layout_decl (exp, 0);
+
+      /* TLS emulation hook - replace __thread vars with
+	 *__emutls_get_address (&_emutls.var).  */
+      if (! targetm.have_tls
+	  && TREE_CODE (exp) == VAR_DECL
+	  && DECL_THREAD_LOCAL_P (exp))
 	{
-	  if (GET_CODE (op0) == SUBREG)
-	    op0 = force_reg (GET_MODE (op0), op0);
-	  op0 = gen_lowpart (mode, op0);
+	  exp = build_fold_indirect_ref_loc (loc, emutls_var_address (exp));
+	  return expand_expr_real_1 (exp, target, tmode, modifier, NULL);
 	}
-      /* If both modes are integral, then we can convert from one to the
-	 other.  */
-      else if (SCALAR_INT_MODE_P (GET_MODE (op0)) && SCALAR_INT_MODE_P (mode))
-	op0 = convert_modes (mode, GET_MODE (op0), op0, 
-			     TYPE_UNSIGNED (TREE_TYPE (treeop0)));
-      /* As a last resort, spill op0 to memory, and reload it in a
-	 different mode.  */
-      else if (!MEM_P (op0))
-	{
-	  /* If the operand is not a MEM, force it into memory.  Since we
-	     are going to be changing the mode of the MEM, don't call
-	     force_const_mem for constants because we don't allow pool
-	     constants to change mode.  */
-	  tree inner_type = TREE_TYPE (treeop0);
 
-	  gcc_assert (!TREE_ADDRESSABLE (exp));
+      /* ... fall through ...  */
 
-	  if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
-	    target
-	      = assign_stack_temp_for_type
-		(TYPE_MODE (inner_type),
-		 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
+    case FUNCTION_DECL:
+    case RESULT_DECL:
+      decl_rtl = DECL_RTL (exp);
+    expand_decl_rtl:
+      gcc_assert (decl_rtl);
+      decl_rtl = copy_rtx (decl_rtl);
 
-	  emit_move_insn (target, op0);
-	  op0 = target;
+      /* Ensure variable marked as used even if it doesn't go through
+	 a parser.  If it hasn't be used yet, write out an external
+	 definition.  */
+      if (! TREE_USED (exp))
+	{
+	  assemble_external (exp);
+	  TREE_USED (exp) = 1;
 	}
 
-      /* At this point, OP0 is in the correct mode.  If the output type is
-	 such that the operand is known to be aligned, indicate that it is.
-	 Otherwise, we need only be concerned about alignment for non-BLKmode
-	 results.  */
-      if (MEM_P (op0))
-	{
-	  op0 = copy_rtx (op0);
+      /* Show we haven't gotten RTL for this yet.  */
+      temp = 0;
 
-	  if (TYPE_ALIGN_OK (type))
-	    set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
-	  else if (STRICT_ALIGNMENT
-		   && mode != BLKmode
-		   && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode))
-	    {
-	      tree inner_type = TREE_TYPE (treeop0);
-	      HOST_WIDE_INT temp_size
-		= MAX (int_size_in_bytes (inner_type),
-		       (HOST_WIDE_INT) GET_MODE_SIZE (mode));
-	      rtx new_rtx
-		= assign_stack_temp_for_type (mode, temp_size, 0, type);
-	      rtx new_with_op0_mode
-		= adjust_address (new_rtx, GET_MODE (op0), 0);
+      /* Variables inherited from containing functions should have
+	 been lowered by this point.  */
+      context = decl_function_context (exp);
+      gcc_assert (!context
+		  || context == current_function_decl
+		  || TREE_STATIC (exp)
+		  /* ??? C++ creates functions that are not TREE_STATIC.  */
+		  || TREE_CODE (exp) == FUNCTION_DECL);
 
-	      gcc_assert (!TREE_ADDRESSABLE (exp));
+      /* This is the case of an array whose size is to be determined
+	 from its initializer, while the initializer is still being parsed.
+	 See expand_decl.  */
 
-	      if (GET_MODE (op0) == BLKmode)
-		emit_block_move (new_with_op0_mode, op0,
-				 GEN_INT (GET_MODE_SIZE (mode)),
-				 (modifier == EXPAND_STACK_PARM
-				  ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
-	      else
-		emit_move_insn (new_with_op0_mode, op0);
+      if (MEM_P (decl_rtl) && REG_P (XEXP (decl_rtl, 0)))
+	temp = validize_mem (decl_rtl);
 
-	      op0 = new_rtx;
-	    }
+      /* If DECL_RTL is memory, we are in the normal case and the
+	 address is not valid, get the address into a register.  */
 
-	  op0 = adjust_address (op0, mode, 0);
+      else if (MEM_P (decl_rtl) && modifier != EXPAND_INITIALIZER)
+	{
+	  if (alt_rtl)
+	    *alt_rtl = decl_rtl;
+	  decl_rtl = use_anchored_address (decl_rtl);
+	  if (modifier != EXPAND_CONST_ADDRESS
+	      && modifier != EXPAND_SUM
+	      && !memory_address_p (DECL_MODE (exp), XEXP (decl_rtl, 0)))
+	    temp = replace_equiv_address (decl_rtl,
+					  copy_rtx (XEXP (decl_rtl, 0)));
 	}
 
-      return op0;
+      /* If we got something, return it.  But first, set the alignment
+	 if the address is a register.  */
+      if (temp != 0)
+	{
+	  if (MEM_P (temp) && REG_P (XEXP (temp, 0)))
+	    mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
 
-    case POINTER_PLUS_EXPR: 
-      /* Even though the sizetype mode and the pointer's mode can be different
-         expand is able to handle this correctly and get the correct result out 
-         of the PLUS_EXPR code.  */
-      /* Make sure to sign-extend the sizetype offset in a POINTER_PLUS_EXPR
-         if sizetype precision is smaller than pointer precision.  */
-      if (TYPE_PRECISION (sizetype) < TYPE_PRECISION (type))
-	treeop1 = fold_convert_loc (loc, type,
-				    fold_convert_loc (loc, ssizetype,
-						      treeop1));
-    case PLUS_EXPR:
+	  return temp;
+	}
 
-      /* Check if this is a case for multiplication and addition.  */
-      if ((TREE_CODE (type) == INTEGER_TYPE
-	   || TREE_CODE (type) == FIXED_POINT_TYPE)
-	  && (subexp0_def = get_def_for_expr (treeop0,
-					      MULT_EXPR)))
+      /* If the mode of DECL_RTL does not match that of the decl, it
+	 must be a promoted value.  We return a SUBREG of the wanted mode,
+	 but mark it so that we know that it was already extended.  */
+
+      if (REG_P (decl_rtl)
+	  && GET_MODE (decl_rtl) != DECL_MODE (exp))
 	{
-	  tree subsubexp0, subsubexp1;
-	  gimple subsubexp0_def, subsubexp1_def;
-	  enum tree_code this_code;
+	  enum machine_mode pmode;
 
-	  this_code = TREE_CODE (type) == INTEGER_TYPE ? NOP_EXPR
-						       : FIXED_CONVERT_EXPR;
-	  subsubexp0 = gimple_assign_rhs1 (subexp0_def);
-	  subsubexp0_def = get_def_for_expr (subsubexp0, this_code);
-	  subsubexp1 = gimple_assign_rhs2 (subexp0_def);
-	  subsubexp1_def = get_def_for_expr (subsubexp1, this_code);
-	  if (subsubexp0_def && subsubexp1_def
-	      && (top0 = gimple_assign_rhs1 (subsubexp0_def))
-	      && (top1 = gimple_assign_rhs1 (subsubexp1_def))
-	      && (TYPE_PRECISION (TREE_TYPE (top0))
-		  < TYPE_PRECISION (TREE_TYPE (subsubexp0)))
-	      && (TYPE_PRECISION (TREE_TYPE (top0))
-		  == TYPE_PRECISION (TREE_TYPE (top1)))
-	      && (TYPE_UNSIGNED (TREE_TYPE (top0))
-		  == TYPE_UNSIGNED (TREE_TYPE (top1))))
-	    {
-	      tree op0type = TREE_TYPE (top0);
-	      enum machine_mode innermode = TYPE_MODE (op0type);
-	      bool zextend_p = TYPE_UNSIGNED (op0type);
-	      bool sat_p = TYPE_SATURATING (TREE_TYPE (subsubexp0));
-	      if (sat_p == 0)
-		this_optab = zextend_p ? umadd_widen_optab : smadd_widen_optab;
-	      else
-		this_optab = zextend_p ? usmadd_widen_optab
-				       : ssmadd_widen_optab;
-	      if (mode == GET_MODE_2XWIDER_MODE (innermode)
-		  && (optab_handler (this_optab, mode)->insn_code
-		      != CODE_FOR_nothing))
-		{
-		  expand_operands (top0, top1, NULL_RTX, &op0, &op1,
-				   EXPAND_NORMAL);
-		  op2 = expand_expr (treeop1, subtarget,
-				     VOIDmode, EXPAND_NORMAL);
-		  temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
-					    target, unsignedp);
-		  gcc_assert (temp);
-		  return REDUCE_BIT_FIELD (temp);
-		}
-	    }
+	  /* Get the signedness used for this variable.  Ensure we get the
+	     same mode we got when the variable was declared.  */
+	  pmode = promote_decl_mode (exp, &unsignedp);
+	  gcc_assert (GET_MODE (decl_rtl) == pmode);
+
+	  temp = gen_lowpart_SUBREG (mode, decl_rtl);
+	  SUBREG_PROMOTED_VAR_P (temp) = 1;
+	  SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
+	  return temp;
 	}
 
-      /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
-	 something else, make sure we add the register to the constant and
-	 then to the other thing.  This case can occur during strength
-	 reduction and doing it this way will produce better code if the
-	 frame pointer or argument pointer is eliminated.
+      return decl_rtl;
 
-	 fold-const.c will ensure that the constant is always in the inner
-	 PLUS_EXPR, so the only case we need to do anything about is if
-	 sp, ap, or fp is our second argument, in which case we must swap
-	 the innermost first argument and our second argument.  */
+    case INTEGER_CST:
+      temp = immed_double_const (TREE_INT_CST_LOW (exp),
+				 TREE_INT_CST_HIGH (exp), mode);
 
-      if (TREE_CODE (treeop0) == PLUS_EXPR
-	  && TREE_CODE (TREE_OPERAND (treeop0, 1)) == INTEGER_CST
-	  && TREE_CODE (treeop1) == VAR_DECL
-	  && (DECL_RTL (treeop1) == frame_pointer_rtx
-	      || DECL_RTL (treeop1) == stack_pointer_rtx
-	      || DECL_RTL (treeop1) == arg_pointer_rtx))
-	{
-	  tree t = treeop1;
+      return temp;
 
-	  treeop1 = TREE_OPERAND (treeop0, 0);
-	  TREE_OPERAND (treeop0, 0) = t;
-	}
+    case VECTOR_CST:
+      {
+	tree tmp = NULL_TREE;
+	if (GET_MODE_CLASS (mode) == MODE_VECTOR_INT
+	    || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT
+	    || GET_MODE_CLASS (mode) == MODE_VECTOR_FRACT
+	    || GET_MODE_CLASS (mode) == MODE_VECTOR_UFRACT
+	    || GET_MODE_CLASS (mode) == MODE_VECTOR_ACCUM
+	    || GET_MODE_CLASS (mode) == MODE_VECTOR_UACCUM)
+	  return const_vector_from_tree (exp);
+	if (GET_MODE_CLASS (mode) == MODE_INT)
+	  {
+	    tree type_for_mode = lang_hooks.types.type_for_mode (mode, 1);
+	    if (type_for_mode)
+	      tmp = fold_unary_loc (loc, VIEW_CONVERT_EXPR, type_for_mode, exp);
+	  }
+	if (!tmp)
+	  tmp = build_constructor_from_list (type,
+					     TREE_VECTOR_CST_ELTS (exp));
+	return expand_expr (tmp, ignore ? const0_rtx : target,
+			    tmode, modifier);
+      }
 
-      /* If the result is to be ptr_mode and we are adding an integer to
-	 something, we might be forming a constant.  So try to use
-	 plus_constant.  If it produces a sum and we can't accept it,
-	 use force_operand.  This allows P = &ARR[const] to generate
-	 efficient code on machines where a SYMBOL_REF is not a valid
-	 address.
+    case CONST_DECL:
+      return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
 
-	 If this is an EXPAND_SUM call, always return the sum.  */
-      if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
-	  || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
-	{
-	  if (modifier == EXPAND_STACK_PARM)
-	    target = 0;
-	  if (TREE_CODE (treeop0) == INTEGER_CST
-	      && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
-	      && TREE_CONSTANT (treeop1))
-	    {
-	      rtx constant_part;
+    case REAL_CST:
+      /* If optimized, generate immediate CONST_DOUBLE
+	 which will be turned into memory by reload if necessary.
 
-	      op1 = expand_expr (treeop1, subtarget, VOIDmode,
-				 EXPAND_SUM);
-	      /* Use immed_double_const to ensure that the constant is
-		 truncated according to the mode of OP1, then sign extended
-		 to a HOST_WIDE_INT.  Using the constant directly can result
-		 in non-canonical RTL in a 64x32 cross compile.  */
-	      constant_part
-		= immed_double_const (TREE_INT_CST_LOW (treeop0),
-				      (HOST_WIDE_INT) 0,
-				      TYPE_MODE (TREE_TYPE (treeop1)));
-	      op1 = plus_constant (op1, INTVAL (constant_part));
-	      if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
-		op1 = force_operand (op1, target);
-	      return REDUCE_BIT_FIELD (op1);
-	    }
+	 We used to force a register so that loop.c could see it.  But
+	 this does not allow gen_* patterns to perform optimizations with
+	 the constants.  It also produces two insns in cases like "x = 1.0;".
+	 On most machines, floating-point constants are not permitted in
+	 many insns, so we'd end up copying it to a register in any case.
 
-	  else if (TREE_CODE (treeop1) == INTEGER_CST
-		   && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
-		   && TREE_CONSTANT (treeop0))
-	    {
-	      rtx constant_part;
+	 Now, we do the copying in expand_binop, if appropriate.  */
+      return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
+					   TYPE_MODE (TREE_TYPE (exp)));
 
-	      op0 = expand_expr (treeop0, subtarget, VOIDmode,
-				 (modifier == EXPAND_INITIALIZER
-				 ? EXPAND_INITIALIZER : EXPAND_SUM));
-	      if (! CONSTANT_P (op0))
-		{
-		  op1 = expand_expr (treeop1, NULL_RTX,
-				     VOIDmode, modifier);
-		  /* Return a PLUS if modifier says it's OK.  */
-		  if (modifier == EXPAND_SUM
-		      || modifier == EXPAND_INITIALIZER)
-		    return simplify_gen_binary (PLUS, mode, op0, op1);
-		  goto binop2;
-		}
-	      /* Use immed_double_const to ensure that the constant is
-		 truncated according to the mode of OP1, then sign extended
-		 to a HOST_WIDE_INT.  Using the constant directly can result
-		 in non-canonical RTL in a 64x32 cross compile.  */
-	      constant_part
-		= immed_double_const (TREE_INT_CST_LOW (treeop1),
-				      (HOST_WIDE_INT) 0,
-				      TYPE_MODE (TREE_TYPE (treeop0)));
-	      op0 = plus_constant (op0, INTVAL (constant_part));
-	      if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
-		op0 = force_operand (op0, target);
-	      return REDUCE_BIT_FIELD (op0);
-	    }
-	}
+    case FIXED_CST:
+      return CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (exp),
+					   TYPE_MODE (TREE_TYPE (exp)));
 
-      /* No sense saving up arithmetic to be done
-	 if it's all in the wrong mode to form part of an address.
-	 And force_operand won't know whether to sign-extend or
-	 zero-extend.  */
-      if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
-	  || mode != ptr_mode)
+    case COMPLEX_CST:
+      /* Handle evaluating a complex constant in a CONCAT target.  */
+      if (original_target && GET_CODE (original_target) == CONCAT)
 	{
-	  expand_operands (treeop0, treeop1,
-			   subtarget, &op0, &op1, EXPAND_NORMAL);
-	  if (op0 == const0_rtx)
-	    return op1;
-	  if (op1 == const0_rtx)
-	    return op0;
-	  goto binop2;
+	  enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
+	  rtx rtarg, itarg;
+
+	  rtarg = XEXP (original_target, 0);
+	  itarg = XEXP (original_target, 1);
+
+	  /* Move the real and imaginary parts separately.  */
+	  op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, EXPAND_NORMAL);
+	  op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, EXPAND_NORMAL);
+
+	  if (op0 != rtarg)
+	    emit_move_insn (rtarg, op0);
+	  if (op1 != itarg)
+	    emit_move_insn (itarg, op1);
+
+	  return original_target;
 	}
 
-      expand_operands (treeop0, treeop1,
-		       subtarget, &op0, &op1, modifier);
-      return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
+      /* ... fall through ...  */
 
-    case MINUS_EXPR:
-      /* Check if this is a case for multiplication and subtraction.  */
-      if ((TREE_CODE (type) == INTEGER_TYPE
-	   || TREE_CODE (type) == FIXED_POINT_TYPE)
-	  && (subexp1_def = get_def_for_expr (treeop1,
-					      MULT_EXPR)))
-	{
-	  tree subsubexp0, subsubexp1;
-	  gimple subsubexp0_def, subsubexp1_def;
-	  enum tree_code this_code;
+    case STRING_CST:
+      temp = expand_expr_constant (exp, 1, modifier);
 
-	  this_code = TREE_CODE (type) == INTEGER_TYPE ? NOP_EXPR
-						       : FIXED_CONVERT_EXPR;
-	  subsubexp0 = gimple_assign_rhs1 (subexp1_def);
-	  subsubexp0_def = get_def_for_expr (subsubexp0, this_code);
-	  subsubexp1 = gimple_assign_rhs2 (subexp1_def);
-	  subsubexp1_def = get_def_for_expr (subsubexp1, this_code);
-	  if (subsubexp0_def && subsubexp1_def
-	      && (top0 = gimple_assign_rhs1 (subsubexp0_def))
-	      && (top1 = gimple_assign_rhs1 (subsubexp1_def))
-	      && (TYPE_PRECISION (TREE_TYPE (top0))
-		  < TYPE_PRECISION (TREE_TYPE (subsubexp0)))
-	      && (TYPE_PRECISION (TREE_TYPE (top0))
-		  == TYPE_PRECISION (TREE_TYPE (top1)))
-	      && (TYPE_UNSIGNED (TREE_TYPE (top0))
-		  == TYPE_UNSIGNED (TREE_TYPE (top1))))
-	    {
-	      tree op0type = TREE_TYPE (top0);
-	      enum machine_mode innermode = TYPE_MODE (op0type);
-	      bool zextend_p = TYPE_UNSIGNED (op0type);
-	      bool sat_p = TYPE_SATURATING (TREE_TYPE (subsubexp0));
-	      if (sat_p == 0)
-		this_optab = zextend_p ? umsub_widen_optab : smsub_widen_optab;
-	      else
-		this_optab = zextend_p ? usmsub_widen_optab
-				       : ssmsub_widen_optab;
-	      if (mode == GET_MODE_2XWIDER_MODE (innermode)
-		  && (optab_handler (this_optab, mode)->insn_code
-		      != CODE_FOR_nothing))
-		{
-		  expand_operands (top0, top1, NULL_RTX, &op0, &op1,
-				   EXPAND_NORMAL);
-		  op2 = expand_expr (treeop0, subtarget,
-				     VOIDmode, EXPAND_NORMAL);
-		  temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
-					    target, unsignedp);
-		  gcc_assert (temp);
-		  return REDUCE_BIT_FIELD (temp);
-		}
-	    }
-	}
+      /* temp contains a constant address.
+	 On RISC machines where a constant address isn't valid,
+	 make some insns to get that address into a register.  */
+      if (modifier != EXPAND_CONST_ADDRESS
+	  && modifier != EXPAND_INITIALIZER
+	  && modifier != EXPAND_SUM
+	  && ! memory_address_p (mode, XEXP (temp, 0)))
+	return replace_equiv_address (temp,
+				      copy_rtx (XEXP (temp, 0)));
+      return temp;
 
-      /* For initializers, we are allowed to return a MINUS of two
-	 symbolic constants.  Here we handle all cases when both operands
-	 are constant.  */
-      /* Handle difference of two symbolic constants,
-	 for the sake of an initializer.  */
-      if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
-	  && really_constant_p (treeop0)
-	  && really_constant_p (treeop1))
-	{
-	  expand_operands (treeop0, treeop1,
-			   NULL_RTX, &op0, &op1, modifier);
+    case SAVE_EXPR:
+      {
+	tree val = treeop0;
+	rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl);
 
-	  /* If the last operand is a CONST_INT, use plus_constant of
-	     the negated constant.  Else make the MINUS.  */
-	  if (CONST_INT_P (op1))
-	    return REDUCE_BIT_FIELD (plus_constant (op0, - INTVAL (op1)));
-	  else
-	    return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
-	}
+	if (!SAVE_EXPR_RESOLVED_P (exp))
+	  {
+	    /* We can indeed still hit this case, typically via builtin
+	       expanders calling save_expr immediately before expanding
+	       something.  Assume this means that we only have to deal
+	       with non-BLKmode values.  */
+	    gcc_assert (GET_MODE (ret) != BLKmode);
 
-      /* No sense saving up arithmetic to be done
-	 if it's all in the wrong mode to form part of an address.
-	 And force_operand won't know whether to sign-extend or
-	 zero-extend.  */
-      if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
-	  || mode != ptr_mode)
-	goto binop;
+	    val = build_decl (EXPR_LOCATION (exp),
+			      VAR_DECL, NULL, TREE_TYPE (exp));
+	    DECL_ARTIFICIAL (val) = 1;
+	    DECL_IGNORED_P (val) = 1;
+	    treeop0 = val;
+	    TREE_OPERAND (exp, 0) = treeop0;
+	    SAVE_EXPR_RESOLVED_P (exp) = 1;
 
-      expand_operands (treeop0, treeop1,
-		       subtarget, &op0, &op1, modifier);
+	    if (!CONSTANT_P (ret))
+	      ret = copy_to_reg (ret);
+	    SET_DECL_RTL (val, ret);
+	  }
 
-      /* Convert A - const to A + (-const).  */
-      if (CONST_INT_P (op1))
+        return ret;
+      }
+
+    case GOTO_EXPR:
+      if (TREE_CODE (treeop0) == LABEL_DECL)
+	expand_goto (treeop0);
+      else
+	expand_computed_goto (treeop0);
+      return const0_rtx;
+
+    case CONSTRUCTOR:
+      /* If we don't need the result, just ensure we evaluate any
+	 subexpressions.  */
+      if (ignore)
 	{
-	  op1 = negate_rtx (mode, op1);
-	  return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
+	  unsigned HOST_WIDE_INT idx;
+	  tree value;
+
+	  FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
+	    expand_expr (value, const0_rtx, VOIDmode, EXPAND_NORMAL);
+
+	  return const0_rtx;
 	}
 
-      goto binop2;
+      return expand_constructor (exp, target, modifier, false);
 
-    case MULT_EXPR:
-      /* If this is a fixed-point operation, then we cannot use the code
-	 below because "expand_mult" doesn't support sat/no-sat fixed-point
-         multiplications.   */
-      if (ALL_FIXED_POINT_MODE_P (mode))
-	goto binop;
+    case MISALIGNED_INDIRECT_REF:
+    case ALIGN_INDIRECT_REF:
+    case INDIRECT_REF:
+      {
+	tree exp1 = treeop0;
 
-      /* If first operand is constant, swap them.
-	 Thus the following special case checks need only
-	 check the second operand.  */
-      if (TREE_CODE (treeop0) == INTEGER_CST)
-	{
-	  tree t1 = treeop0;
-	  treeop0 = treeop1;
-	  treeop1 = t1;
-	}
+	if (modifier != EXPAND_WRITE)
+	  {
+	    tree t;
 
-      /* Attempt to return something suitable for generating an
-	 indexed address, for machines that support that.  */
+	    t = fold_read_from_constant_string (exp);
+	    if (t)
+	      return expand_expr (t, target, tmode, modifier);
+	  }
 
-      if (modifier == EXPAND_SUM && mode == ptr_mode
-	  && host_integerp (treeop1, 0))
-	{
-	  tree exp1 = treeop1;
+	op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
+	op0 = memory_address (mode, op0);
 
-	  op0 = expand_expr (treeop0, subtarget, VOIDmode,
-			     EXPAND_SUM);
+	if (code == ALIGN_INDIRECT_REF)
+	  {
+	    int align = TYPE_ALIGN_UNIT (type);
+	    op0 = gen_rtx_AND (Pmode, op0, GEN_INT (-align));
+	    op0 = memory_address (mode, op0);
+	  }
 
-	  if (!REG_P (op0))
-	    op0 = force_operand (op0, NULL_RTX);
-	  if (!REG_P (op0))
-	    op0 = copy_to_mode_reg (mode, op0);
+	temp = gen_rtx_MEM (mode, op0);
 
-	  return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
-			       gen_int_mode (tree_low_cst (exp1, 0),
-					     TYPE_MODE (TREE_TYPE (exp1)))));
-	}
+	set_mem_attributes (temp, exp, 0);
 
-      if (modifier == EXPAND_STACK_PARM)
-	target = 0;
+	/* Resolve the misalignment now, so that we don't have to remember
+	   to resolve it later.  Of course, this only works for reads.  */
+	if (code == MISALIGNED_INDIRECT_REF)
+	  {
+	    int icode;
+	    rtx reg, insn;
 
-      /* Check for multiplying things that have been extended
-	 from a narrower type.  If this machine supports multiplying
-	 in that narrower type with a result in the desired type,
-	 do it that way, and avoid the explicit type-conversion.  */
+	    gcc_assert (modifier == EXPAND_NORMAL
+			|| modifier == EXPAND_STACK_PARM);
 
-      subexp0 = treeop0;
-      subexp1 = treeop1;
-      subexp0_def = get_def_for_expr (subexp0, NOP_EXPR);
-      subexp1_def = get_def_for_expr (subexp1, NOP_EXPR);
-      top0 = top1 = NULL_TREE;
+	    /* The vectorizer should have already checked the mode.  */
+	    icode = optab_handler (movmisalign_optab, mode)->insn_code;
+	    gcc_assert (icode != CODE_FOR_nothing);
 
-      /* First, check if we have a multiplication of one signed and one
-	 unsigned operand.  */
-      if (subexp0_def
-	  && (top0 = gimple_assign_rhs1 (subexp0_def))
-	  && subexp1_def
-	  && (top1 = gimple_assign_rhs1 (subexp1_def))
-	  && TREE_CODE (type) == INTEGER_TYPE
-	  && (TYPE_PRECISION (TREE_TYPE (top0))
-	      < TYPE_PRECISION (TREE_TYPE (subexp0)))
-	  && (TYPE_PRECISION (TREE_TYPE (top0))
-	      == TYPE_PRECISION (TREE_TYPE (top1)))
-	  && (TYPE_UNSIGNED (TREE_TYPE (top0))
-	      != TYPE_UNSIGNED (TREE_TYPE (top1))))
-	{
-	  enum machine_mode innermode
-	    = TYPE_MODE (TREE_TYPE (top0));
-	  this_optab = usmul_widen_optab;
-	  if (mode == GET_MODE_WIDER_MODE (innermode))
-	    {
-	      if (optab_handler (this_optab, mode)->insn_code != CODE_FOR_nothing)
-		{
-		  if (TYPE_UNSIGNED (TREE_TYPE (top0)))
-		    expand_operands (top0, top1, NULL_RTX, &op0, &op1,
-				     EXPAND_NORMAL);
-		  else
-		    expand_operands (top0, top1, NULL_RTX, &op1, &op0,
-				     EXPAND_NORMAL);
+	    /* We've already validated the memory, and we're creating a
+	       new pseudo destination.  The predicates really can't fail.  */
+	    reg = gen_reg_rtx (mode);
 
-		  goto binop3;
-		}
-	    }
-	}
-      /* Check for a multiplication with matching signedness.  If
-	 valid, TOP0 and TOP1 were set in the previous if
-	 condition.  */
-      else if (top0
-	  && TREE_CODE (type) == INTEGER_TYPE
-	  && (TYPE_PRECISION (TREE_TYPE (top0))
-	      < TYPE_PRECISION (TREE_TYPE (subexp0)))
-	  && ((TREE_CODE (subexp1) == INTEGER_CST
-	       && int_fits_type_p (subexp1, TREE_TYPE (top0))
-	       /* Don't use a widening multiply if a shift will do.  */
-	       && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (subexp1)))
-		    > HOST_BITS_PER_WIDE_INT)
-		   || exact_log2 (TREE_INT_CST_LOW (subexp1)) < 0))
-	      ||
-	      (top1
-	       && (TYPE_PRECISION (TREE_TYPE (top1))
-		   == TYPE_PRECISION (TREE_TYPE (top0))
-	       /* If both operands are extended, they must either both
-		  be zero-extended or both be sign-extended.  */
-	       && (TYPE_UNSIGNED (TREE_TYPE (top1))
-		   == TYPE_UNSIGNED (TREE_TYPE (top0)))))))
-	{
-	  tree op0type = TREE_TYPE (top0);
-	  enum machine_mode innermode = TYPE_MODE (op0type);
-	  bool zextend_p = TYPE_UNSIGNED (op0type);
-	  optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
-	  this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
+	    /* Nor can the insn generator.  */
+	    insn = GEN_FCN (icode) (reg, temp);
+	    emit_insn (insn);
 
-	  if (mode == GET_MODE_2XWIDER_MODE (innermode))
-	    {
-	      if (optab_handler (this_optab, mode)->insn_code != CODE_FOR_nothing)
-		{
-		  if (TREE_CODE (subexp1) == INTEGER_CST)
-		    expand_operands (top0, subexp1, NULL_RTX, &op0, &op1,
-				     EXPAND_NORMAL);
-		  else
-		    expand_operands (top0, top1, NULL_RTX, &op0, &op1,
-				     EXPAND_NORMAL);
-		  goto binop3;
-		}
-	      else if (optab_handler (other_optab, mode)->insn_code != CODE_FOR_nothing
-		       && innermode == word_mode)
+	    return reg;
+	  }
+
+	return temp;
+      }
+
+    case TARGET_MEM_REF:
+      {
+	struct mem_address addr;
+
+	get_address_description (exp, &addr);
+	op0 = addr_for_mem_ref (&addr, true);
+	op0 = memory_address (mode, op0);
+	temp = gen_rtx_MEM (mode, op0);
+	set_mem_attributes (temp, TMR_ORIGINAL (exp), 0);
+      }
+      return temp;
+
+    case ARRAY_REF:
+
+      {
+	tree array = treeop0;
+	tree index = treeop1;
+
+	/* Fold an expression like: "foo"[2].
+	   This is not done in fold so it won't happen inside &.
+	   Don't fold if this is for wide characters since it's too
+	   difficult to do correctly and this is a very rare case.  */
+
+	if (modifier != EXPAND_CONST_ADDRESS
+	    && modifier != EXPAND_INITIALIZER
+	    && modifier != EXPAND_MEMORY)
+	  {
+	    tree t = fold_read_from_constant_string (exp);
+
+	    if (t)
+	      return expand_expr (t, target, tmode, modifier);
+	  }
+
+	/* If this is a constant index into a constant array,
+	   just get the value from the array.  Handle both the cases when
+	   we have an explicit constructor and when our operand is a variable
+	   that was declared const.  */
+
+	if (modifier != EXPAND_CONST_ADDRESS
+	    && modifier != EXPAND_INITIALIZER
+	    && modifier != EXPAND_MEMORY
+	    && TREE_CODE (array) == CONSTRUCTOR
+	    && ! TREE_SIDE_EFFECTS (array)
+	    && TREE_CODE (index) == INTEGER_CST)
+	  {
+	    unsigned HOST_WIDE_INT ix;
+	    tree field, value;
+
+	    FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array), ix,
+				      field, value)
+	      if (tree_int_cst_equal (field, index))
 		{
-		  rtx htem, hipart;
-		  op0 = expand_normal (top0);
-		  if (TREE_CODE (subexp1) == INTEGER_CST)
-		    op1 = convert_modes (innermode, mode,
-					 expand_normal (subexp1), unsignedp);
-		  else
-		    op1 = expand_normal (top1);
-		  temp = expand_binop (mode, other_optab, op0, op1, target,
-				       unsignedp, OPTAB_LIB_WIDEN);
-		  hipart = gen_highpart (innermode, temp);
-		  htem = expand_mult_highpart_adjust (innermode, hipart,
-						      op0, op1, hipart,
-						      zextend_p);
-		  if (htem != hipart)
-		    emit_move_insn (hipart, htem);
-		  return REDUCE_BIT_FIELD (temp);
+		  if (!TREE_SIDE_EFFECTS (value))
+		    return expand_expr (fold (value), target, tmode, modifier);
+		  break;
 		}
-	    }
-	}
-      expand_operands (subexp0, subexp1, subtarget, &op0, &op1, EXPAND_NORMAL);
-      return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
+	  }
 
-    case TRUNC_DIV_EXPR:
-    case FLOOR_DIV_EXPR:
-    case CEIL_DIV_EXPR:
-    case ROUND_DIV_EXPR:
-    case EXACT_DIV_EXPR:
-      /* If this is a fixed-point operation, then we cannot use the code
-	 below because "expand_divmod" doesn't support sat/no-sat fixed-point
-         divisions.   */
-      if (ALL_FIXED_POINT_MODE_P (mode))
-	goto binop;
+	else if (optimize >= 1
+		 && modifier != EXPAND_CONST_ADDRESS
+		 && modifier != EXPAND_INITIALIZER
+		 && modifier != EXPAND_MEMORY
+		 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
+		 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
+		 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK
+		 && targetm.binds_local_p (array))
+	  {
+	    if (TREE_CODE (index) == INTEGER_CST)
+	      {
+		tree init = DECL_INITIAL (array);
 
-      if (modifier == EXPAND_STACK_PARM)
-	target = 0;
-      /* Possible optimization: compute the dividend with EXPAND_SUM
-	 then if the divisor is constant can optimize the case
-	 where some terms of the dividend have coeffs divisible by it.  */
-      expand_operands (treeop0, treeop1,
-		       subtarget, &op0, &op1, EXPAND_NORMAL);
-      return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
+		if (TREE_CODE (init) == CONSTRUCTOR)
+		  {
+		    unsigned HOST_WIDE_INT ix;
+		    tree field, value;
 
-    case RDIV_EXPR:
-      goto binop;
+		    FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), ix,
+					      field, value)
+		      if (tree_int_cst_equal (field, index))
+			{
+			  if (TREE_SIDE_EFFECTS (value))
+			    break;
 
-    case TRUNC_MOD_EXPR:
-    case FLOOR_MOD_EXPR:
-    case CEIL_MOD_EXPR:
-    case ROUND_MOD_EXPR:
-      if (modifier == EXPAND_STACK_PARM)
-	target = 0;
-      expand_operands (treeop0, treeop1,
-		       subtarget, &op0, &op1, EXPAND_NORMAL);
-      return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
+			  if (TREE_CODE (value) == CONSTRUCTOR)
+			    {
+			      /* If VALUE is a CONSTRUCTOR, this
+				 optimization is only useful if
+				 this doesn't store the CONSTRUCTOR
+				 into memory.  If it does, it is more
+				 efficient to just load the data from
+				 the array directly.  */
+			      rtx ret = expand_constructor (value, target,
+							    modifier, true);
+			      if (ret == NULL_RTX)
+				break;
+			    }
 
-    case FIXED_CONVERT_EXPR:
-      op0 = expand_normal (treeop0);
-      if (target == 0 || modifier == EXPAND_STACK_PARM)
-	target = gen_reg_rtx (mode);
+			  return expand_expr (fold (value), target, tmode,
+					      modifier);
+			}
+		  }
+		else if(TREE_CODE (init) == STRING_CST)
+		  {
+		    tree index1 = index;
+		    tree low_bound = array_ref_low_bound (exp);
+		    index1 = fold_convert_loc (loc, sizetype,
+					       treeop1);
 
-      if ((TREE_CODE (TREE_TYPE (treeop0)) == INTEGER_TYPE
-	   && TYPE_UNSIGNED (TREE_TYPE (treeop0)))
-          || (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type)))
-	expand_fixed_convert (target, op0, 1, TYPE_SATURATING (type));
-      else
-	expand_fixed_convert (target, op0, 0, TYPE_SATURATING (type));
-      return target;
+		    /* Optimize the special-case of a zero lower bound.
 
-    case FIX_TRUNC_EXPR:
-      op0 = expand_normal (treeop0);
-      if (target == 0 || modifier == EXPAND_STACK_PARM)
-	target = gen_reg_rtx (mode);
-      expand_fix (target, op0, unsignedp);
-      return target;
+		       We convert the low_bound to sizetype to avoid some problems
+		       with constant folding.  (E.g. suppose the lower bound is 1,
+		       and its mode is QI.  Without the conversion,l (ARRAY
+		       +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
+		       +INDEX), which becomes (ARRAY+255+INDEX).  Opps!)  */
 
-    case FLOAT_EXPR:
-      op0 = expand_normal (treeop0);
-      if (target == 0 || modifier == EXPAND_STACK_PARM)
-	target = gen_reg_rtx (mode);
-      /* expand_float can't figure out what to do if FROM has VOIDmode.
-	 So give it the correct mode.  With -O, cse will optimize this.  */
-      if (GET_MODE (op0) == VOIDmode)
-	op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (treeop0)),
-				op0);
-      expand_float (target, op0,
-		    TYPE_UNSIGNED (TREE_TYPE (treeop0)));
-      return target;
+		    if (! integer_zerop (low_bound))
+		      index1 = size_diffop_loc (loc, index1,
+					    fold_convert_loc (loc, sizetype,
+							      low_bound));
 
-    case NEGATE_EXPR:
-      op0 = expand_expr (treeop0, subtarget,
-			 VOIDmode, EXPAND_NORMAL);
-      if (modifier == EXPAND_STACK_PARM)
-	target = 0;
-      temp = expand_unop (mode,
-      			  optab_for_tree_code (NEGATE_EXPR, type,
-					       optab_default),
-			  op0, target, 0);
-      gcc_assert (temp);
-      return REDUCE_BIT_FIELD (temp);
+		    if (0 > compare_tree_int (index1,
+					      TREE_STRING_LENGTH (init)))
+		      {
+			tree type = TREE_TYPE (TREE_TYPE (init));
+			enum machine_mode mode = TYPE_MODE (type);
 
-    case ABS_EXPR:
-      op0 = expand_expr (treeop0, subtarget,
-			 VOIDmode, EXPAND_NORMAL);
-      if (modifier == EXPAND_STACK_PARM)
-	target = 0;
+			if (GET_MODE_CLASS (mode) == MODE_INT
+			    && GET_MODE_SIZE (mode) == 1)
+			  return gen_int_mode (TREE_STRING_POINTER (init)
+					       [TREE_INT_CST_LOW (index1)],
+					       mode);
+		      }
+		  }
+	      }
+	  }
+      }
+      goto normal_inner_ref;
+
+    case COMPONENT_REF:
+      /* If the operand is a CONSTRUCTOR, we can just extract the
+	 appropriate field if it is present.  */
+      if (TREE_CODE (treeop0) == CONSTRUCTOR)
+	{
+	  unsigned HOST_WIDE_INT idx;
+	  tree field, value;
+
+	  FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (treeop0),
+				    idx, field, value)
+	    if (field == treeop1
+		/* We can normally use the value of the field in the
+		   CONSTRUCTOR.  However, if this is a bitfield in
+		   an integral mode that we can fit in a HOST_WIDE_INT,
+		   we must mask only the number of bits in the bitfield,
+		   since this is done implicitly by the constructor.  If
+		   the bitfield does not meet either of those conditions,
+		   we can't do this optimization.  */
+		&& (! DECL_BIT_FIELD (field)
+		    || ((GET_MODE_CLASS (DECL_MODE (field)) == MODE_INT)
+			&& (GET_MODE_BITSIZE (DECL_MODE (field))
+			    <= HOST_BITS_PER_WIDE_INT))))
+	      {
+		if (DECL_BIT_FIELD (field)
+		    && modifier == EXPAND_STACK_PARM)
+		  target = 0;
+		op0 = expand_expr (value, target, tmode, modifier);
+		if (DECL_BIT_FIELD (field))
+		  {
+		    HOST_WIDE_INT bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
+		    enum machine_mode imode = TYPE_MODE (TREE_TYPE (field));
+
+		    if (TYPE_UNSIGNED (TREE_TYPE (field)))
+		      {
+			op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
+			op0 = expand_and (imode, op0, op1, target);
+		      }
+		    else
+		      {
+			tree count
+			  = build_int_cst (NULL_TREE,
+					   GET_MODE_BITSIZE (imode) - bitsize);
+
+			op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
+					    target, 0);
+			op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
+					    target, 0);
+		      }
+		  }
+
+		return op0;
+	      }
+	}
+      goto normal_inner_ref;
+
+    case BIT_FIELD_REF:
+    case ARRAY_RANGE_REF:
+    normal_inner_ref:
+      {
+	enum machine_mode mode1, mode2;
+	HOST_WIDE_INT bitsize, bitpos;
+	tree offset;
+	int volatilep = 0, must_force_mem;
+	tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
+					&mode1, &unsignedp, &volatilep, true);
+	rtx orig_op0, memloc;
+
+	/* If we got back the original object, something is wrong.  Perhaps
+	   we are evaluating an expression too early.  In any event, don't
+	   infinitely recurse.  */
+	gcc_assert (tem != exp);
+
+	/* If TEM's type is a union of variable size, pass TARGET to the inner
+	   computation, since it will need a temporary and TARGET is known
+	   to have to do.  This occurs in unchecked conversion in Ada.  */
+	orig_op0 = op0
+	  = expand_expr (tem,
+			 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
+			  && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
+			      != INTEGER_CST)
+			  && modifier != EXPAND_STACK_PARM
+			  ? target : NULL_RTX),
+			 VOIDmode,
+			 (modifier == EXPAND_INITIALIZER
+			  || modifier == EXPAND_CONST_ADDRESS
+			  || modifier == EXPAND_STACK_PARM)
+			 ? modifier : EXPAND_NORMAL);
+
+	mode2
+	  = CONSTANT_P (op0) ? TYPE_MODE (TREE_TYPE (tem)) : GET_MODE (op0);
+
+	/* If we have either an offset, a BLKmode result, or a reference
+	   outside the underlying object, we must force it to memory.
+	   Such a case can occur in Ada if we have unchecked conversion
+	   of an expression from a scalar type to an aggregate type or
+	   for an ARRAY_RANGE_REF whose type is BLKmode, or if we were
+	   passed a partially uninitialized object or a view-conversion
+	   to a larger size.  */
+	must_force_mem = (offset
+			  || mode1 == BLKmode
+			  || bitpos + bitsize > GET_MODE_BITSIZE (mode2));
+
+	/* Handle CONCAT first.  */
+	if (GET_CODE (op0) == CONCAT && !must_force_mem)
+	  {
+	    if (bitpos == 0
+		&& bitsize == GET_MODE_BITSIZE (GET_MODE (op0)))
+	      return op0;
+	    if (bitpos == 0
+		&& bitsize == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
+		&& bitsize)
+	      {
+		op0 = XEXP (op0, 0);
+		mode2 = GET_MODE (op0);
+	      }
+	    else if (bitpos == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
+		     && bitsize == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 1)))
+		     && bitpos
+		     && bitsize)
+	      {
+		op0 = XEXP (op0, 1);
+		bitpos = 0;
+		mode2 = GET_MODE (op0);
+	      }
+	    else
+	      /* Otherwise force into memory.  */
+	      must_force_mem = 1;
+	  }
+
+	/* If this is a constant, put it in a register if it is a legitimate
+	   constant and we don't need a memory reference.  */
+	if (CONSTANT_P (op0)
+	    && mode2 != BLKmode
+	    && LEGITIMATE_CONSTANT_P (op0)
+	    && !must_force_mem)
+	  op0 = force_reg (mode2, op0);
+
+	/* Otherwise, if this is a constant, try to force it to the constant
+	   pool.  Note that back-ends, e.g. MIPS, may refuse to do so if it
+	   is a legitimate constant.  */
+	else if (CONSTANT_P (op0) && (memloc = force_const_mem (mode2, op0)))
+	  op0 = validize_mem (memloc);
+
+	/* Otherwise, if this is a constant or the object is not in memory
+	   and need be, put it there.  */
+	else if (CONSTANT_P (op0) || (!MEM_P (op0) && must_force_mem))
+	  {
+	    tree nt = build_qualified_type (TREE_TYPE (tem),
+					    (TYPE_QUALS (TREE_TYPE (tem))
+					     | TYPE_QUAL_CONST));
+	    memloc = assign_temp (nt, 1, 1, 1);
+	    emit_move_insn (memloc, op0);
+	    op0 = memloc;
+	  }
+
+	if (offset)
+	  {
+	    rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
+					  EXPAND_SUM);
+
+	    gcc_assert (MEM_P (op0));
+
+#ifdef POINTERS_EXTEND_UNSIGNED
+	    if (GET_MODE (offset_rtx) != Pmode)
+	      offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
+#else
+	    if (GET_MODE (offset_rtx) != ptr_mode)
+	      offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
+#endif
+
+	    if (GET_MODE (op0) == BLKmode
+		/* A constant address in OP0 can have VOIDmode, we must
+		   not try to call force_reg in that case.  */
+		&& GET_MODE (XEXP (op0, 0)) != VOIDmode
+		&& bitsize != 0
+		&& (bitpos % bitsize) == 0
+		&& (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
+		&& MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
+	      {
+		op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
+		bitpos = 0;
+	      }
+
+	    op0 = offset_address (op0, offset_rtx,
+				  highest_pow2_factor (offset));
+	  }
+
+	/* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
+	   record its alignment as BIGGEST_ALIGNMENT.  */
+	if (MEM_P (op0) && bitpos == 0 && offset != 0
+	    && is_aligning_offset (offset, tem))
+	  set_mem_align (op0, BIGGEST_ALIGNMENT);
+
+	/* Don't forget about volatility even if this is a bitfield.  */
+	if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
+	  {
+	    if (op0 == orig_op0)
+	      op0 = copy_rtx (op0);
+
+	    MEM_VOLATILE_P (op0) = 1;
+	  }
+
+	/* In cases where an aligned union has an unaligned object
+	   as a field, we might be extracting a BLKmode value from
+	   an integer-mode (e.g., SImode) object.  Handle this case
+	   by doing the extract into an object as wide as the field
+	   (which we know to be the width of a basic mode), then
+	   storing into memory, and changing the mode to BLKmode.  */
+	if (mode1 == VOIDmode
+	    || REG_P (op0) || GET_CODE (op0) == SUBREG
+	    || (mode1 != BLKmode && ! direct_load[(int) mode1]
+		&& GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
+		&& GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
+		&& modifier != EXPAND_CONST_ADDRESS
+		&& modifier != EXPAND_INITIALIZER)
+	    /* If the field isn't aligned enough to fetch as a memref,
+	       fetch it as a bit field.  */
+	    || (mode1 != BLKmode
+		&& (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
+		      || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
+		      || (MEM_P (op0)
+			  && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
+			      || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
+		     && ((modifier == EXPAND_CONST_ADDRESS
+			  || modifier == EXPAND_INITIALIZER)
+			 ? STRICT_ALIGNMENT
+			 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
+		    || (bitpos % BITS_PER_UNIT != 0)))
+	    /* If the type and the field are a constant size and the
+	       size of the type isn't the same size as the bitfield,
+	       we must use bitfield operations.  */
+	    || (bitsize >= 0
+		&& TYPE_SIZE (TREE_TYPE (exp))
+		&& TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
+		&& 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
+					  bitsize)))
+	  {
+	    enum machine_mode ext_mode = mode;
+
+	    if (ext_mode == BLKmode
+		&& ! (target != 0 && MEM_P (op0)
+		      && MEM_P (target)
+		      && bitpos % BITS_PER_UNIT == 0))
+	      ext_mode = mode_for_size (bitsize, MODE_INT, 1);
+
+	    if (ext_mode == BLKmode)
+	      {
+		if (target == 0)
+		  target = assign_temp (type, 0, 1, 1);
+
+		if (bitsize == 0)
+		  return target;
+
+		/* In this case, BITPOS must start at a byte boundary and
+		   TARGET, if specified, must be a MEM.  */
+		gcc_assert (MEM_P (op0)
+			    && (!target || MEM_P (target))
+			    && !(bitpos % BITS_PER_UNIT));
+
+		emit_block_move (target,
+				 adjust_address (op0, VOIDmode,
+						 bitpos / BITS_PER_UNIT),
+				 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
+					  / BITS_PER_UNIT),
+				 (modifier == EXPAND_STACK_PARM
+				  ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
+
+		return target;
+	      }
+
+	    op0 = validize_mem (op0);
+
+	    if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
+	      mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
+
+	    op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
+				     (modifier == EXPAND_STACK_PARM
+				      ? NULL_RTX : target),
+				     ext_mode, ext_mode);
+
+	    /* If the result is a record type and BITSIZE is narrower than
+	       the mode of OP0, an integral mode, and this is a big endian
+	       machine, we must put the field into the high-order bits.  */
+	    if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
+		&& GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
+		&& bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
+	      op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
+				  size_int (GET_MODE_BITSIZE (GET_MODE (op0))
+					    - bitsize),
+				  op0, 1);
+
+	    /* If the result type is BLKmode, store the data into a temporary
+	       of the appropriate type, but with the mode corresponding to the
+	       mode for the data we have (op0's mode).  It's tempting to make
+	       this a constant type, since we know it's only being stored once,
+	       but that can cause problems if we are taking the address of this
+	       COMPONENT_REF because the MEM of any reference via that address
+	       will have flags corresponding to the type, which will not
+	       necessarily be constant.  */
+	    if (mode == BLKmode)
+	      {
+		HOST_WIDE_INT size = GET_MODE_BITSIZE (ext_mode);
+		rtx new_rtx;
+
+		/* If the reference doesn't use the alias set of its type,
+		   we cannot create the temporary using that type.  */
+		if (component_uses_parent_alias_set (exp))
+		  {
+		    new_rtx = assign_stack_local (ext_mode, size, 0);
+		    set_mem_alias_set (new_rtx, get_alias_set (exp));
+		  }
+		else
+		  new_rtx = assign_stack_temp_for_type (ext_mode, size, 0, type);
 
-      /* ABS_EXPR is not valid for complex arguments.  */
-      gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
-		  && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT);
+		emit_move_insn (new_rtx, op0);
+		op0 = copy_rtx (new_rtx);
+		PUT_MODE (op0, BLKmode);
+		set_mem_attributes (op0, exp, 1);
+	      }
 
-      /* Unsigned abs is simply the operand.  Testing here means we don't
-	 risk generating incorrect code below.  */
-      if (TYPE_UNSIGNED (type))
-	return op0;
+	    return op0;
+	  }
 
-      return expand_abs (mode, op0, target, unsignedp,
-			 safe_from_p (target, treeop0, 1));
+	/* If the result is BLKmode, use that to access the object
+	   now as well.  */
+	if (mode == BLKmode)
+	  mode1 = BLKmode;
 
-    case MAX_EXPR:
-    case MIN_EXPR:
-      target = original_target;
-      if (target == 0
-	  || modifier == EXPAND_STACK_PARM
-	  || (MEM_P (target) && MEM_VOLATILE_P (target))
-	  || GET_MODE (target) != mode
-	  || (REG_P (target)
-	      && REGNO (target) < FIRST_PSEUDO_REGISTER))
-	target = gen_reg_rtx (mode);
-      expand_operands (treeop0, treeop1,
-		       target, &op0, &op1, EXPAND_NORMAL);
+	/* Get a reference to just this component.  */
+	if (modifier == EXPAND_CONST_ADDRESS
+	    || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
+	  op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
+	else
+	  op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
 
-      /* First try to do it with a special MIN or MAX instruction.
-	 If that does not win, use a conditional jump to select the proper
-	 value.  */
-      this_optab = optab_for_tree_code (code, type, optab_default);
-      temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
-			   OPTAB_WIDEN);
-      if (temp != 0)
-	return temp;
+	if (op0 == orig_op0)
+	  op0 = copy_rtx (op0);
 
-      /* At this point, a MEM target is no longer useful; we will get better
-	 code without it.  */
+	set_mem_attributes (op0, exp, 0);
+	if (REG_P (XEXP (op0, 0)))
+	  mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
 
-      if (! REG_P (target))
-	target = gen_reg_rtx (mode);
+	MEM_VOLATILE_P (op0) |= volatilep;
+	if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
+	    || modifier == EXPAND_CONST_ADDRESS
+	    || modifier == EXPAND_INITIALIZER)
+	  return op0;
+	else if (target == 0)
+	  target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
 
-      /* If op1 was placed in target, swap op0 and op1.  */
-      if (target != op0 && target == op1)
-	{
-	  temp = op0;
-	  op0 = op1;
-	  op1 = temp;
-	}
+	convert_move (target, op0, unsignedp);
+	return target;
+      }
 
-      /* We generate better code and avoid problems with op1 mentioning
-	 target by forcing op1 into a pseudo if it isn't a constant.  */
-      if (! CONSTANT_P (op1))
-	op1 = force_reg (mode, op1);
+    case OBJ_TYPE_REF:
+      return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
 
+    case CALL_EXPR:
+      /* All valid uses of __builtin_va_arg_pack () are removed during
+	 inlining.  */
+      if (CALL_EXPR_VA_ARG_PACK (exp))
+	error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
       {
-	enum rtx_code comparison_code;
-	rtx cmpop1 = op1;
+	tree fndecl = get_callee_fndecl (exp), attr;
 
-	if (code == MAX_EXPR)
-	  comparison_code = unsignedp ? GEU : GE;
-	else
-	  comparison_code = unsignedp ? LEU : LE;
+	if (fndecl
+	    && (attr = lookup_attribute ("error",
+					 DECL_ATTRIBUTES (fndecl))) != NULL)
+	  error ("%Kcall to %qs declared with attribute error: %s",
+		 exp, identifier_to_locale (lang_hooks.decl_printable_name (fndecl, 1)),
+		 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
+	if (fndecl
+	    && (attr = lookup_attribute ("warning",
+					 DECL_ATTRIBUTES (fndecl))) != NULL)
+	  warning_at (tree_nonartificial_location (exp),
+		      0, "%Kcall to %qs declared with attribute warning: %s",
+		      exp, identifier_to_locale (lang_hooks.decl_printable_name (fndecl, 1)),
+		      TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
 
-	/* Canonicalize to comparisons against 0.  */
-	if (op1 == const1_rtx)
-	  {
-	    /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
-	       or (a != 0 ? a : 1) for unsigned.
-	       For MIN we are safe converting (a <= 1 ? a : 1)
-	       into (a <= 0 ? a : 1)  */
-	    cmpop1 = const0_rtx;
-	    if (code == MAX_EXPR)
-	      comparison_code = unsignedp ? NE : GT;
-	  }
-	if (op1 == constm1_rtx && !unsignedp)
+	/* Check for a built-in function.  */
+	if (fndecl && DECL_BUILT_IN (fndecl))
 	  {
-	    /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
-	       and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
-	    cmpop1 = const0_rtx;
-	    if (code == MIN_EXPR)
-	      comparison_code = LT;
+	    gcc_assert (DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_FRONTEND);
+	    return expand_builtin (exp, target, subtarget, tmode, ignore);
 	  }
-#ifdef HAVE_conditional_move
-	/* Use a conditional move if possible.  */
-	if (can_conditionally_move_p (mode))
-	  {
-	    rtx insn;
+      }
+      return expand_call (exp, target, ignore);
 
-	    /* ??? Same problem as in expmed.c: emit_conditional_move
-	       forces a stack adjustment via compare_from_rtx, and we
-	       lose the stack adjustment if the sequence we are about
-	       to create is discarded.  */
-	    do_pending_stack_adjust ();
+    case VIEW_CONVERT_EXPR:
+      op0 = NULL_RTX;
 
-	    start_sequence ();
+      /* If we are converting to BLKmode, try to avoid an intermediate
+	 temporary by fetching an inner memory reference.  */
+      if (mode == BLKmode
+	  && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
+	  && TYPE_MODE (TREE_TYPE (treeop0)) != BLKmode
+	  && handled_component_p (treeop0))
+      {
+	enum machine_mode mode1;
+	HOST_WIDE_INT bitsize, bitpos;
+	tree offset;
+	int unsignedp;
+	int volatilep = 0;
+	tree tem
+	  = get_inner_reference (treeop0, &bitsize, &bitpos,
+				 &offset, &mode1, &unsignedp, &volatilep,
+				 true);
+	rtx orig_op0;
 
-	    /* Try to emit the conditional move.  */
-	    insn = emit_conditional_move (target, comparison_code,
-					  op0, cmpop1, mode,
-					  op0, op1, mode,
-					  unsignedp);
+	/* ??? We should work harder and deal with non-zero offsets.  */
+	if (!offset
+	    && (bitpos % BITS_PER_UNIT) == 0
+	    && bitsize >= 0
+	    && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) == 0)
+	  {
+	    /* See the normal_inner_ref case for the rationale.  */
+	    orig_op0
+	      = expand_expr (tem,
+			     (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
+			      && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
+				  != INTEGER_CST)
+			      && modifier != EXPAND_STACK_PARM
+			      ? target : NULL_RTX),
+			     VOIDmode,
+			     (modifier == EXPAND_INITIALIZER
+			      || modifier == EXPAND_CONST_ADDRESS
+			      || modifier == EXPAND_STACK_PARM)
+			     ? modifier : EXPAND_NORMAL);
 
-	    /* If we could do the conditional move, emit the sequence,
-	       and return.  */
-	    if (insn)
+	    if (MEM_P (orig_op0))
 	      {
-		rtx seq = get_insns ();
-		end_sequence ();
-		emit_insn (seq);
-		return target;
-	      }
+		op0 = orig_op0;
 
-	    /* Otherwise discard the sequence and fall back to code with
-	       branches.  */
-	    end_sequence ();
-	  }
-#endif
-	if (target != op0)
-	  emit_move_insn (target, op0);
+		/* Get a reference to just this component.  */
+		if (modifier == EXPAND_CONST_ADDRESS
+		    || modifier == EXPAND_SUM
+		    || modifier == EXPAND_INITIALIZER)
+		  op0 = adjust_address_nv (op0, mode, bitpos / BITS_PER_UNIT);
+		else
+		  op0 = adjust_address (op0, mode, bitpos / BITS_PER_UNIT);
 
-	temp = gen_label_rtx ();
-	do_compare_rtx_and_jump (target, cmpop1, comparison_code,
-				 unsignedp, mode, NULL_RTX, NULL_RTX, temp);
+		if (op0 == orig_op0)
+		  op0 = copy_rtx (op0);
+
+		set_mem_attributes (op0, treeop0, 0);
+		if (REG_P (XEXP (op0, 0)))
+		  mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
+
+		MEM_VOLATILE_P (op0) |= volatilep;
+	      }
+	  }
       }
-      emit_move_insn (target, op1);
-      emit_label (temp);
-      return target;
 
-    case BIT_NOT_EXPR:
-      op0 = expand_expr (treeop0, subtarget,
-			 VOIDmode, EXPAND_NORMAL);
-      if (modifier == EXPAND_STACK_PARM)
-	target = 0;
-      temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
-      gcc_assert (temp);
-      return temp;
+      if (!op0)
+	op0 = expand_expr (treeop0,
+			   NULL_RTX, VOIDmode, modifier);
+
+      /* If the input and output modes are both the same, we are done.  */
+      if (mode == GET_MODE (op0))
+	;
+      /* If neither mode is BLKmode, and both modes are the same size
+	 then we can use gen_lowpart.  */
+      else if (mode != BLKmode && GET_MODE (op0) != BLKmode
+	       && GET_MODE_SIZE (mode) == GET_MODE_SIZE (GET_MODE (op0))
+	       && !COMPLEX_MODE_P (GET_MODE (op0)))
+	{
+	  if (GET_CODE (op0) == SUBREG)
+	    op0 = force_reg (GET_MODE (op0), op0);
+	  op0 = gen_lowpart (mode, op0);
+	}
+      /* If both modes are integral, then we can convert from one to the
+	 other.  */
+      else if (SCALAR_INT_MODE_P (GET_MODE (op0)) && SCALAR_INT_MODE_P (mode))
+	op0 = convert_modes (mode, GET_MODE (op0), op0, 
+			     TYPE_UNSIGNED (TREE_TYPE (treeop0)));
+      /* As a last resort, spill op0 to memory, and reload it in a
+	 different mode.  */
+      else if (!MEM_P (op0))
+	{
+	  /* If the operand is not a MEM, force it into memory.  Since we
+	     are going to be changing the mode of the MEM, don't call
+	     force_const_mem for constants because we don't allow pool
+	     constants to change mode.  */
+	  tree inner_type = TREE_TYPE (treeop0);
+
+	  gcc_assert (!TREE_ADDRESSABLE (exp));
 
-      /* ??? Can optimize bitwise operations with one arg constant.
-	 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
-	 and (a bitwise1 b) bitwise2 b (etc)
-	 but that is probably not worth while.  */
+	  if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
+	    target
+	      = assign_stack_temp_for_type
+		(TYPE_MODE (inner_type),
+		 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
 
-      /* BIT_AND_EXPR is for bitwise anding.  TRUTH_AND_EXPR is for anding two
-	 boolean values when we want in all cases to compute both of them.  In
-	 general it is fastest to do TRUTH_AND_EXPR by computing both operands
-	 as actual zero-or-1 values and then bitwise anding.  In cases where
-	 there cannot be any side effects, better code would be made by
-	 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
-	 how to recognize those cases.  */
+	  emit_move_insn (target, op0);
+	  op0 = target;
+	}
 
-    case TRUTH_AND_EXPR:
-      code = BIT_AND_EXPR;
-    case BIT_AND_EXPR:
-      goto binop;
+      /* At this point, OP0 is in the correct mode.  If the output type is
+	 such that the operand is known to be aligned, indicate that it is.
+	 Otherwise, we need only be concerned about alignment for non-BLKmode
+	 results.  */
+      if (MEM_P (op0))
+	{
+	  op0 = copy_rtx (op0);
 
-    case TRUTH_OR_EXPR:
-      code = BIT_IOR_EXPR;
-    case BIT_IOR_EXPR:
-      goto binop;
+	  if (TYPE_ALIGN_OK (type))
+	    set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
+	  else if (STRICT_ALIGNMENT
+		   && mode != BLKmode
+		   && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode))
+	    {
+	      tree inner_type = TREE_TYPE (treeop0);
+	      HOST_WIDE_INT temp_size
+		= MAX (int_size_in_bytes (inner_type),
+		       (HOST_WIDE_INT) GET_MODE_SIZE (mode));
+	      rtx new_rtx
+		= assign_stack_temp_for_type (mode, temp_size, 0, type);
+	      rtx new_with_op0_mode
+		= adjust_address (new_rtx, GET_MODE (op0), 0);
 
-    case TRUTH_XOR_EXPR:
-      code = BIT_XOR_EXPR;
-    case BIT_XOR_EXPR:
-      goto binop;
+	      gcc_assert (!TREE_ADDRESSABLE (exp));
 
-    case LROTATE_EXPR:
-    case RROTATE_EXPR:
-      gcc_assert (VECTOR_MODE_P (TYPE_MODE (type))
-		  || (GET_MODE_PRECISION (TYPE_MODE (type))
-		      == TYPE_PRECISION (type)));
-      /* fall through */
+	      if (GET_MODE (op0) == BLKmode)
+		emit_block_move (new_with_op0_mode, op0,
+				 GEN_INT (GET_MODE_SIZE (mode)),
+				 (modifier == EXPAND_STACK_PARM
+				  ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
+	      else
+		emit_move_insn (new_with_op0_mode, op0);
 
-    case LSHIFT_EXPR:
-    case RSHIFT_EXPR:
-      /* If this is a fixed-point operation, then we cannot use the code
-	 below because "expand_shift" doesn't support sat/no-sat fixed-point
-         shifts.   */
-      if (ALL_FIXED_POINT_MODE_P (mode))
-	goto binop;
+	      op0 = new_rtx;
+	    }
 
-      if (! safe_from_p (subtarget, treeop1, 1))
-	subtarget = 0;
-      if (modifier == EXPAND_STACK_PARM)
-	target = 0;
-      op0 = expand_expr (treeop0, subtarget,
-			 VOIDmode, EXPAND_NORMAL);
-      temp = expand_shift (code, mode, op0, treeop1, target,
-			   unsignedp);
-      if (code == LSHIFT_EXPR)
-	temp = REDUCE_BIT_FIELD (temp);
-      return temp;
+	  op0 = adjust_address (op0, mode, 0);
+	}
 
-      /* Could determine the answer when only additive constants differ.  Also,
-	 the addition of one can be handled by changing the condition.  */
-    case LT_EXPR:
-    case LE_EXPR:
-    case GT_EXPR:
-    case GE_EXPR:
-    case EQ_EXPR:
-    case NE_EXPR:
-    case UNORDERED_EXPR:
-    case ORDERED_EXPR:
-    case UNLT_EXPR:
-    case UNLE_EXPR:
-    case UNGT_EXPR:
-    case UNGE_EXPR:
-    case UNEQ_EXPR:
-    case LTGT_EXPR:
-      temp = do_store_flag (&ops,
-			    modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
-			    tmode != VOIDmode ? tmode : mode);
-      if (temp)
-	return temp;
+      return op0;
 
       /* Use a compare and a jump for BLKmode comparisons, or for function
 	 type comparisons is HAVE_canonicalize_funcptr_for_compare.  */
@@ -9176,18 +9398,6 @@ expand_expr_real_1 (tree exp, rtx target
       emit_label (op1);
       return ignore ? const0_rtx : target;
 
-    case TRUTH_NOT_EXPR:
-      if (modifier == EXPAND_STACK_PARM)
-	target = 0;
-      op0 = expand_expr (treeop0, target,
-			 VOIDmode, EXPAND_NORMAL);
-      /* The parser is careful to generate TRUTH_NOT_EXPR
-	 only with operands that are always zero or one.  */
-      temp = expand_binop (mode, xor_optab, op0, const1_rtx,
-			   target, 1, OPTAB_LIB_WIDEN);
-      gcc_assert (temp);
-      return temp;
-
     case STATEMENT_LIST:
       {
 	tree_stmt_iterator iter;
@@ -9305,20 +9515,6 @@ expand_expr_real_1 (tree exp, rtx target
     case ADDR_EXPR:
       return expand_expr_addr_expr (exp, target, tmode, modifier);
 
-    case COMPLEX_EXPR:
-      /* Get the rtx code of the operands.  */
-      op0 = expand_normal (treeop0);
-      op1 = expand_normal (treeop1);
-
-      if (!target)
-	target = gen_reg_rtx (TYPE_MODE (type));
-
-      /* Move the real (op0) and imaginary (op1) parts to their location.  */
-      write_complex_part (target, op0, false);
-      write_complex_part (target, op1, true);
-
-      return target;
-
     case REALPART_EXPR:
       op0 = expand_normal (treeop0);
       return read_complex_part (op0, false);
@@ -9415,105 +9611,6 @@ expand_expr_real_1 (tree exp, rtx target
 	return target;
       }
 
-    case WIDEN_SUM_EXPR:
-      {
-        tree oprnd0 = treeop0;
-        tree oprnd1 = treeop1;
-
-        expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
-        target = expand_widen_pattern_expr (&ops, op0, NULL_RTX, op1,
-                                            target, unsignedp);
-        return target;
-      }
-
-    case REDUC_MAX_EXPR:
-    case REDUC_MIN_EXPR:
-    case REDUC_PLUS_EXPR:
-      {
-        op0 = expand_normal (treeop0);
-        this_optab = optab_for_tree_code (code, type, optab_default);
-        temp = expand_unop (mode, this_optab, op0, target, unsignedp);
-        gcc_assert (temp);
-        return temp;
-      }
-
-    case VEC_EXTRACT_EVEN_EXPR:
-    case VEC_EXTRACT_ODD_EXPR:
-      {
-        expand_operands (treeop0,  treeop1,
-                         NULL_RTX, &op0, &op1, EXPAND_NORMAL);
-        this_optab = optab_for_tree_code (code, type, optab_default);
-        temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
-                             OPTAB_WIDEN);
-        gcc_assert (temp);
-        return temp;
-      }
-
-    case VEC_INTERLEAVE_HIGH_EXPR:
-    case VEC_INTERLEAVE_LOW_EXPR:
-      {
-        expand_operands (treeop0,  treeop1,
-                         NULL_RTX, &op0, &op1, EXPAND_NORMAL);
-        this_optab = optab_for_tree_code (code, type, optab_default);
-        temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
-                             OPTAB_WIDEN);
-        gcc_assert (temp);
-        return temp;
-      }
-
-    case VEC_LSHIFT_EXPR:
-    case VEC_RSHIFT_EXPR:
-      {
-	target = expand_vec_shift_expr (&ops, target);
-	return target;
-      }
-
-    case VEC_UNPACK_HI_EXPR:
-    case VEC_UNPACK_LO_EXPR:
-      {
-	op0 = expand_normal (treeop0);
-	this_optab = optab_for_tree_code (code, type, optab_default);
-	temp = expand_widen_pattern_expr (&ops, op0, NULL_RTX, NULL_RTX,
-					  target, unsignedp);
-	gcc_assert (temp);
-	return temp;
-      }
-
-    case VEC_UNPACK_FLOAT_HI_EXPR:
-    case VEC_UNPACK_FLOAT_LO_EXPR:
-      {
-	op0 = expand_normal (treeop0);
-	/* The signedness is determined from input operand.  */
-	this_optab = optab_for_tree_code (code,
-					  TREE_TYPE (treeop0),
-					  optab_default);
-	temp = expand_widen_pattern_expr
-	  (&ops, op0, NULL_RTX, NULL_RTX,
-	   target, TYPE_UNSIGNED (TREE_TYPE (treeop0)));
-
-	gcc_assert (temp);
-	return temp;
-      }
-
-    case VEC_WIDEN_MULT_HI_EXPR:
-    case VEC_WIDEN_MULT_LO_EXPR:
-      {
-	tree oprnd0 = treeop0;
-	tree oprnd1 = treeop1;
-
-	expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
-	target = expand_widen_pattern_expr (&ops, op0, op1, NULL_RTX,
-					    target, unsignedp);
-	gcc_assert (target);
-	return target;
-      }
-
-    case VEC_PACK_TRUNC_EXPR:
-    case VEC_PACK_SAT_EXPR:
-    case VEC_PACK_FIX_TRUNC_EXPR:
-      mode = TYPE_MODE (TREE_TYPE (treeop0));
-      goto binop;
-
     case COMPOUND_LITERAL_EXPR:
       {
 	/* Initialize the anonymous variable declared in the compound
@@ -9536,24 +9633,9 @@ expand_expr_real_1 (tree exp, rtx target
       }
 
     default:
-      gcc_unreachable ();
+      return expand_expr_real_2 (&ops, target, tmode, modifier);
     }
-
-  /* Here to do an ordinary binary operator.  */
- binop:
-  expand_operands (treeop0, treeop1,
-		   subtarget, &op0, &op1, EXPAND_NORMAL);
- binop2:
-  this_optab = optab_for_tree_code (code, type, optab_default);
- binop3:
-  if (modifier == EXPAND_STACK_PARM)
-    target = 0;
-  temp = expand_binop (mode, this_optab, op0, op1, target,
-		       unsignedp, OPTAB_LIB_WIDEN);
-  gcc_assert (temp);
-  return REDUCE_BIT_FIELD (temp);
 }
-#undef REDUCE_BIT_FIELD
 
 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
    signedness of TYPE), possibly returning the result in TARGET.  */



More information about the Gcc-patches mailing list