This is the mail archive of the gcc-patches@gcc.gnu.org mailing list for the GCC project.


Index Nav: [Date Index] [Subject Index] [Author Index] [Thread Index]
Message Nav: [Date Prev] [Date Next] [Thread Prev] [Thread Next]
Other format: [Raw text]

Re: VTA merge - ssa to rtl


ssa-to-rtl (24K) - convert debug bind stmts to debug insns

Index: gcc/cfgexpand.c
===================================================================
--- gcc/cfgexpand.c.orig	2009-06-05 05:07:49.000000000 -0300
+++ gcc/cfgexpand.c	2009-06-05 05:07:56.000000000 -0300
@@ -1675,7 +1675,8 @@ maybe_dump_rtl_for_gimple_stmt (gimple s
   if (dump_file && (dump_flags & TDF_DETAILS))
     {
       fprintf (dump_file, "\n;; ");
-      print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
+      print_gimple_stmt (dump_file, stmt, 0,
+			 TDF_SLIM | (dump_flags & TDF_LINENO));
       fprintf (dump_file, "\n");
 
       print_rtl (dump_file, since ? NEXT_INSN (since) : since);
@@ -1988,6 +1989,768 @@ expand_gimple_tailcall (basic_block bb, 
   return bb;
 }
 
+/* Return the difference between the floor and the truncated result of
+   a signed division by OP1 with remainder MOD.  */
+static rtx
+floor_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
+{
+  /* (mod != 0 ? (op1 / mod < 0 ? -1 : 0) : 0) */
+  return gen_rtx_IF_THEN_ELSE
+    (mode, gen_rtx_NE (BImode, mod, const0_rtx),
+     gen_rtx_IF_THEN_ELSE
+     (mode, gen_rtx_LT (BImode,
+			gen_rtx_DIV (mode, op1, mod),
+			const0_rtx),
+      constm1_rtx, const0_rtx),
+     const0_rtx);
+}
+
+/* Return the difference between the ceil and the truncated result of
+   a signed division by OP1 with remainder MOD.  */
+static rtx
+ceil_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
+{
+  /* (mod != 0 ? (op1 / mod > 0 ? 1 : 0) : 0) */
+  return gen_rtx_IF_THEN_ELSE
+    (mode, gen_rtx_NE (BImode, mod, const0_rtx),
+     gen_rtx_IF_THEN_ELSE
+     (mode, gen_rtx_GT (BImode,
+			gen_rtx_DIV (mode, op1, mod),
+			const0_rtx),
+      const1_rtx, const0_rtx),
+     const0_rtx);
+}
+
+/* Return the difference between the ceil and the truncated result of
+   an unsigned division by OP1 with remainder MOD.  */
+static rtx
+ceil_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1 ATTRIBUTE_UNUSED)
+{
+  /* (mod != 0 ? 1 : 0) */
+  return gen_rtx_IF_THEN_ELSE
+    (mode, gen_rtx_NE (BImode, mod, const0_rtx),
+     const1_rtx, const0_rtx);
+}
+
+/* Return the difference between the rounded and the truncated result
+   of a signed division by OP1 with remainder MOD.  Halfway cases are
+   rounded away from zero, rather than to the nearest even number.  */
+static rtx
+round_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
+{
+  /* (abs (mod) >= abs (op1) - abs (mod)
+      ? (op1 / mod > 0 ? 1 : -1)
+      : 0) */
+  return gen_rtx_IF_THEN_ELSE
+    (mode, gen_rtx_GE (BImode, gen_rtx_ABS (mode, mod),
+		       gen_rtx_MINUS (mode,
+				      gen_rtx_ABS (mode, op1),
+				      gen_rtx_ABS (mode, mod))),
+     gen_rtx_IF_THEN_ELSE
+     (mode, gen_rtx_GT (BImode,
+			gen_rtx_DIV (mode, op1, mod),
+			const0_rtx),
+      const1_rtx, constm1_rtx),
+     const0_rtx);
+}
+
+/* Return the difference between the rounded and the truncated result
+   of a unsigned division by OP1 with remainder MOD.  Halfway cases
+   are rounded away from zero, rather than to the nearest even
+   number.  */
+static rtx
+round_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
+{
+  /* (mod >= op1 - mod ? 1 : 0) */
+  return gen_rtx_IF_THEN_ELSE
+    (mode, gen_rtx_GE (BImode, mod,
+		       gen_rtx_MINUS (mode, op1, mod)),
+     const1_rtx, const0_rtx);
+}
+
+/* for_each_rtx function that adjusts VAR_LOCATION placeholders for
+   debug string constants.  */
+
+static int
+adjust_debug_string_constant (rtx *x, void *nothing ATTRIBUTE_UNUSED)
+{
+  rtx rtstr, cst;
+  tree exp;
+
+  if (GET_CODE (*x) != VAR_LOCATION)
+    return 0;
+
+  exp = PAT_VAR_LOCATION_DECL (*x);
+  rtstr = PAT_VAR_LOCATION_LOC (*x);
+
+  gcc_assert (TREE_CODE (exp) == STRING_CST && GET_CODE (rtstr) == CONST_STRING
+	      && TREE_STRING_POINTER (exp) == XSTR (rtstr, 0));
+
+  cst = lookup_constant_def (exp);
+  if (cst && MEM_P (cst))
+    *x = XEXP (cst, 0);
+  else
+    *x = rtstr;
+
+  return -1;
+}
+
+/* Set to true when a debug string constant is expanded as a
+   VAR_LOCATION placeholder rather than as regular rtl.  */
+
+static bool debug_string_constants_p = false;
+
+/* Adjust any debug string constants expanded as VAR_LOCATION
+   placeholders (see how STRING_CST is handled in expand_debug_expr)
+   to regular rtx or debug-only CONST_STRINGs.  */
+
+static void
+adjust_debug_string_constants (void)
+{
+  rtx insn;
+
+  if (!debug_string_constants_p)
+    return;
+
+  for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
+    if (DEBUG_INSN_P (insn))
+      for_each_rtx (&INSN_VAR_LOCATION_LOC (insn),
+		    adjust_debug_string_constant, NULL);
+
+  debug_string_constants_p = false;
+}
+
+/* Wrap modeless constants in CONST:MODE.  */
+rtx
+wrap_constant (enum machine_mode mode, rtx x)
+{
+  if (GET_CODE (x) != CONST_INT && GET_CODE (x) != CONST_FIXED
+      && (GET_CODE (x) != CONST_DOUBLE || GET_MODE (x) != VOIDmode))
+    return x;
+  gcc_assert (mode != VOIDmode);
+  return gen_rtx_CONST (mode, x);
+}
+
+/* Remove CONST wrapper added by wrap_constant().  */
+rtx
+unwrap_constant (rtx x)
+{
+  rtx orig = x;
+
+  if (GET_CODE (x) != CONST)
+    return x;
+
+  x = XEXP (x, 0);
+  if (GET_CODE (x) != CONST_INT && GET_CODE (x) != CONST_FIXED
+      && (GET_CODE (x) != CONST_DOUBLE || GET_MODE (x) != VOIDmode))
+    return orig;
+
+  return x;
+}
+
+/* Return an RTX equivalent to the value of the tree expression
+   EXP.  */
+
+static rtx
+expand_debug_expr (tree exp)
+{
+  rtx op0 = NULL_RTX, op1 = NULL_RTX, op2 = NULL_RTX;
+  enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
+  int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
+
+  switch (TREE_CODE_CLASS (TREE_CODE (exp)))
+    {
+    case tcc_expression:
+      switch (TREE_CODE (exp))
+	{
+	case COND_EXPR:
+	  goto ternary;
+
+	case TRUTH_ANDIF_EXPR:
+	case TRUTH_ORIF_EXPR:
+	case TRUTH_AND_EXPR:
+	case TRUTH_OR_EXPR:
+	case TRUTH_XOR_EXPR:
+	  goto binary;
+
+	case TRUTH_NOT_EXPR:
+	  goto unary;
+
+	default:
+	  break;
+	}
+      break;
+
+    ternary:
+      op2 = expand_debug_expr (TREE_OPERAND (exp, 2));
+      if (!op2)
+	return NULL_RTX;
+      /* Fall through.  */
+
+    binary:
+    case tcc_binary:
+    case tcc_comparison:
+      op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
+      if (!op1)
+	return NULL_RTX;
+      /* Fall through.  */
+
+    unary:
+    case tcc_unary:
+      op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
+      if (!op0)
+	return NULL_RTX;
+      break;
+
+    case tcc_type:
+    case tcc_statement:
+      gcc_unreachable ();
+
+    case tcc_constant:
+    case tcc_exceptional:
+    case tcc_declaration:
+    case tcc_reference:
+    case tcc_vl_exp:
+      break;
+    }
+
+  switch (TREE_CODE (exp))
+    {
+    case STRING_CST:
+      if (!lookup_constant_def (exp))
+	{
+	  op0 = gen_rtx_CONST_STRING (Pmode, TREE_STRING_POINTER (exp));
+	  op0 = gen_rtx_VAR_LOCATION (Pmode, exp, op0, 0);
+	  op0 = gen_rtx_MEM (mode, op0);
+	  debug_string_constants_p = true;
+	  return op0;
+	}
+      /* Fall through...  */
+
+    case INTEGER_CST:
+    case REAL_CST:
+    case FIXED_CST:
+      op0 = expand_expr (exp, NULL_RTX, mode, EXPAND_INITIALIZER);
+      return op0;
+
+    case COMPLEX_CST:
+      gcc_assert (COMPLEX_MODE_P (mode));
+      op0 = expand_debug_expr (TREE_REALPART (exp));
+      op0 = wrap_constant (GET_MODE_INNER (mode), op0);
+      op1 = expand_debug_expr (TREE_IMAGPART (exp));
+      op1 = wrap_constant (GET_MODE_INNER (mode), op1);
+      return gen_rtx_CONCAT (mode, op0, op1);
+
+    case VAR_DECL:
+    case PARM_DECL:
+    case FUNCTION_DECL:
+    case LABEL_DECL:
+    case CONST_DECL:
+    case RESULT_DECL:
+      /* This decl was optimized away.  */
+      if (!DECL_RTL_SET_P (exp))
+	return NULL;
+
+      op0 = DECL_RTL (exp);
+
+      if (GET_MODE (op0) == BLKmode)
+	{
+	  gcc_assert (MEM_P (op0));
+	  op0 = adjust_address_nv (op0, mode, 0);
+	  return op0;
+	}
+
+      /* Fall through.  */
+
+    case PAREN_EXPR:
+    case NOP_EXPR:
+    case CONVERT_EXPR:
+      {
+	enum machine_mode inner_mode = GET_MODE (op0);
+
+	if (mode == inner_mode)
+	  return op0;
+
+	if (inner_mode == VOIDmode)
+	  {
+	    inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
+	    if (mode == inner_mode)
+	      return op0;
+	  }
+
+	if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
+	  {
+	    if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode))
+	      op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
+	    else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode))
+	      op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
+	    else
+	      op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
+	  }
+	else if (FLOAT_MODE_P (mode))
+	  {
+	    if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
+	      op0 = simplify_gen_unary (UNSIGNED_FLOAT, mode, op0, inner_mode);
+	    else
+	      op0 = simplify_gen_unary (FLOAT, mode, op0, inner_mode);
+	  }
+	else if (FLOAT_MODE_P (inner_mode))
+	  {
+	    if (unsignedp)
+	      op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
+	    else
+	      op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
+	  }
+	else if (CONSTANT_P (op0)
+		 || GET_MODE_BITSIZE (mode) <= GET_MODE_BITSIZE (inner_mode))
+	  op0 = simplify_gen_subreg (mode, op0, inner_mode,
+				     subreg_lowpart_offset (mode,
+							    inner_mode));
+	else if (unsignedp)
+	  op0 = gen_rtx_ZERO_EXTEND (mode, op0);
+	else
+	  op0 = gen_rtx_SIGN_EXTEND (mode, op0);
+
+	return op0;
+      }
+
+    case INDIRECT_REF:
+    case ALIGN_INDIRECT_REF:
+    case MISALIGNED_INDIRECT_REF:
+      op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
+      if (!op0)
+	return NULL;
+
+      gcc_assert (GET_MODE (op0) == Pmode
+		  || GET_CODE (op0) == CONST_INT
+		  || GET_CODE (op0) == CONST_DOUBLE);
+
+      if (TREE_CODE (exp) == ALIGN_INDIRECT_REF)
+	{
+	  int align = TYPE_ALIGN_UNIT (TREE_TYPE (exp));
+	  op0 = gen_rtx_AND (Pmode, op0, GEN_INT (-align));
+	}
+
+      op0 = gen_rtx_MEM (mode, op0);
+
+      set_mem_attributes (op0, exp, 0);
+
+      return op0;
+
+    case TARGET_MEM_REF:
+      if (TMR_SYMBOL (exp) && !DECL_RTL_SET_P (TMR_SYMBOL (exp)))
+	return NULL;
+
+      op0 = expand_debug_expr
+	(tree_mem_ref_addr (build_pointer_type (TREE_TYPE (exp)),
+			    exp));
+      if (!op0)
+	return NULL;
+
+      gcc_assert (GET_MODE (op0) == Pmode
+		  || GET_CODE (op0) == CONST_INT
+		  || GET_CODE (op0) == CONST_DOUBLE);
+
+      op0 = gen_rtx_MEM (mode, op0);
+
+      set_mem_attributes (op0, exp, 0);
+
+      return op0;
+
+    case ARRAY_REF:
+    case ARRAY_RANGE_REF:
+    case COMPONENT_REF:
+    case BIT_FIELD_REF:
+    case REALPART_EXPR:
+    case IMAGPART_EXPR:
+    case VIEW_CONVERT_EXPR:
+      {
+	enum machine_mode mode1;
+	HOST_WIDE_INT bitsize, bitpos;
+	tree offset;
+	int volatilep = 0;
+	tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
+					&mode1, &unsignedp, &volatilep, false);
+	rtx orig_op0;
+
+	orig_op0 = op0 = expand_debug_expr (tem);
+
+	if (!op0)
+	  return NULL;
+
+	if (offset)
+	  {
+	    gcc_assert (MEM_P (op0));
+
+	    op1 = expand_debug_expr (offset);
+	    if (!op1)
+	      return NULL;
+
+	    op0 = gen_rtx_MEM (mode, gen_rtx_PLUS (Pmode, XEXP (op0, 0), op1));
+	  }
+
+	if (MEM_P (op0))
+	  {
+	    if (bitpos >= BITS_PER_UNIT)
+	      {
+		op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
+		bitpos %= BITS_PER_UNIT;
+	      }
+	    else if (bitpos < 0)
+	      {
+		int units = (-bitpos + BITS_PER_UNIT - 1) / BITS_PER_UNIT;
+		op0 = adjust_address_nv (op0, mode1, units);
+		bitpos += units * BITS_PER_UNIT;
+	      }
+	    else if (bitpos == 0 && bitsize == GET_MODE_BITSIZE (mode))
+	      op0 = adjust_address_nv (op0, mode, 0);
+	    else if (GET_MODE (op0) != mode1)
+	      op0 = adjust_address_nv (op0, mode1, 0);
+	    else
+	      op0 = copy_rtx (op0);
+	    if (op0 == orig_op0)
+	      op0 = shallow_copy_rtx (op0);
+	    set_mem_attributes (op0, exp, 0);
+	  }
+
+	if (bitpos == 0 && mode == GET_MODE (op0))
+	  return op0;
+
+	if ((bitpos % BITS_PER_UNIT) == 0
+	    && bitsize == GET_MODE_BITSIZE (mode1))
+	  return simplify_gen_subreg (mode, op0,
+				      GET_MODE (op0) != VOIDmode
+				      ? GET_MODE (op0) : mode1,
+				      bitpos / BITS_PER_UNIT);
+
+	return simplify_gen_ternary (SCALAR_INT_MODE_P (GET_MODE (op0))
+				     && TYPE_UNSIGNED (TREE_TYPE (exp))
+				     ? SIGN_EXTRACT
+				     : ZERO_EXTRACT, mode,
+				     GET_MODE (op0) != VOIDmode
+				     ? GET_MODE (op0) : mode1,
+				     op0, GEN_INT (bitsize), GEN_INT (bitpos));
+      }
+
+    case EXC_PTR_EXPR:
+      /* ??? Do not call get_exception_pointer(), we don't want to gen
+	 it if it hasn't been created yet.  */
+      return get_exception_pointer ();
+
+    case FILTER_EXPR:
+      /* Likewise get_exception_filter().  */
+      return get_exception_filter ();
+
+    case ABS_EXPR:
+      return gen_rtx_ABS (mode, op0);
+
+    case NEGATE_EXPR:
+      return gen_rtx_NEG (mode, op0);
+
+    case BIT_NOT_EXPR:
+      return gen_rtx_NOT (mode, op0);
+
+    case FLOAT_EXPR:
+      if (unsignedp)
+	return gen_rtx_UNSIGNED_FLOAT (mode, op0);
+      else
+	return gen_rtx_FLOAT (mode, op0);
+
+    case FIX_TRUNC_EXPR:
+      if (unsignedp)
+	return gen_rtx_UNSIGNED_FIX (mode, op0);
+      else
+	return gen_rtx_FIX (mode, op0);
+
+    case POINTER_PLUS_EXPR:
+    case PLUS_EXPR:
+      return gen_rtx_PLUS (mode, op0, op1);
+
+    case MINUS_EXPR:
+      return gen_rtx_MINUS (mode, op0, op1);
+
+    case MULT_EXPR:
+      return gen_rtx_MULT (mode, op0, op1);
+
+    case RDIV_EXPR:
+    case TRUNC_DIV_EXPR:
+    case EXACT_DIV_EXPR:
+      if (unsignedp)
+	return gen_rtx_UDIV (mode, op0, op1);
+      else
+	return gen_rtx_DIV (mode, op0, op1);
+
+    case TRUNC_MOD_EXPR:
+      if (unsignedp)
+	return gen_rtx_UMOD (mode, op0, op1);
+      else
+	return gen_rtx_MOD (mode, op0, op1);
+
+    case FLOOR_DIV_EXPR:
+      if (unsignedp)
+	return gen_rtx_UDIV (mode, op0, op1);
+      else
+	{
+	  rtx div = gen_rtx_DIV (mode, op0, op1);
+	  rtx mod = gen_rtx_MOD (mode, op0, op1);
+	  rtx adj = floor_sdiv_adjust (mode, mod, op1);
+	  return gen_rtx_PLUS (mode, div, adj);
+	}
+
+    case FLOOR_MOD_EXPR:
+      if (unsignedp)
+	return gen_rtx_UMOD (mode, op0, op1);
+      else
+	{
+	  rtx mod = gen_rtx_MOD (mode, op0, op1);
+	  rtx adj = floor_sdiv_adjust (mode, mod, op1);
+	  adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
+	  return gen_rtx_PLUS (mode, mod, adj);
+	}
+
+    case CEIL_DIV_EXPR:
+      if (unsignedp)
+	{
+	  rtx div = gen_rtx_UDIV (mode, op0, op1);
+	  rtx mod = gen_rtx_UMOD (mode, op0, op1);
+	  rtx adj = ceil_udiv_adjust (mode, mod, op1);
+	  return gen_rtx_PLUS (mode, div, adj);
+	}
+      else
+	{
+	  rtx div = gen_rtx_DIV (mode, op0, op1);
+	  rtx mod = gen_rtx_MOD (mode, op0, op1);
+	  rtx adj = ceil_sdiv_adjust (mode, mod, op1);
+	  return gen_rtx_PLUS (mode, div, adj);
+	}
+
+    case CEIL_MOD_EXPR:
+      if (unsignedp)
+	{
+	  rtx mod = gen_rtx_UMOD (mode, op0, op1);
+	  rtx adj = ceil_udiv_adjust (mode, mod, op1);
+	  adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
+	  return gen_rtx_PLUS (mode, mod, adj);
+	}
+      else
+	{
+	  rtx mod = gen_rtx_MOD (mode, op0, op1);
+	  rtx adj = ceil_sdiv_adjust (mode, mod, op1);
+	  adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
+	  return gen_rtx_PLUS (mode, mod, adj);
+	}
+
+    case ROUND_DIV_EXPR:
+      if (unsignedp)
+	{
+	  rtx div = gen_rtx_UDIV (mode, op0, op1);
+	  rtx mod = gen_rtx_UMOD (mode, op0, op1);
+	  rtx adj = round_udiv_adjust (mode, mod, op1);
+	  return gen_rtx_PLUS (mode, div, adj);
+	}
+      else
+	{
+	  rtx div = gen_rtx_DIV (mode, op0, op1);
+	  rtx mod = gen_rtx_MOD (mode, op0, op1);
+	  rtx adj = round_sdiv_adjust (mode, mod, op1);
+	  return gen_rtx_PLUS (mode, div, adj);
+	}
+
+    case ROUND_MOD_EXPR:
+      if (unsignedp)
+	{
+	  rtx mod = gen_rtx_UMOD (mode, op0, op1);
+	  rtx adj = round_udiv_adjust (mode, mod, op1);
+	  adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
+	  return gen_rtx_PLUS (mode, mod, adj);
+	}
+      else
+	{
+	  rtx mod = gen_rtx_MOD (mode, op0, op1);
+	  rtx adj = round_sdiv_adjust (mode, mod, op1);
+	  adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
+	  return gen_rtx_PLUS (mode, mod, adj);
+	}
+
+    case LSHIFT_EXPR:
+      return gen_rtx_ASHIFT (mode, op0, op1);
+
+    case RSHIFT_EXPR:
+      if (unsignedp)
+	return gen_rtx_LSHIFTRT (mode, op0, op1);
+      else
+	return gen_rtx_ASHIFTRT (mode, op0, op1);
+
+    case LROTATE_EXPR:
+      return gen_rtx_ROTATE (mode, op0, op1);
+
+    case RROTATE_EXPR:
+      return gen_rtx_ROTATERT (mode, op0, op1);
+
+    case MIN_EXPR:
+      if (unsignedp)
+	return gen_rtx_UMIN (mode, op0, op1);
+      else
+	return gen_rtx_SMIN (mode, op0, op1);
+
+    case MAX_EXPR:
+      if (unsignedp)
+	return gen_rtx_UMAX (mode, op0, op1);
+      else
+	return gen_rtx_SMAX (mode, op0, op1);
+
+    case BIT_AND_EXPR:
+    case TRUTH_AND_EXPR:
+      return gen_rtx_AND (mode, op0, op1);
+
+    case BIT_IOR_EXPR:
+    case TRUTH_OR_EXPR:
+      return gen_rtx_IOR (mode, op0, op1);
+
+    case BIT_XOR_EXPR:
+    case TRUTH_XOR_EXPR:
+      return gen_rtx_XOR (mode, op0, op1);
+
+    case TRUTH_ANDIF_EXPR:
+      return gen_rtx_IF_THEN_ELSE (mode, op0, op1, const0_rtx);
+
+    case TRUTH_ORIF_EXPR:
+      return gen_rtx_IF_THEN_ELSE (mode, op0, const_true_rtx, op1);
+
+    case TRUTH_NOT_EXPR:
+      return gen_rtx_EQ (mode, op0, const0_rtx);
+
+    case LT_EXPR:
+      if (unsignedp)
+	return gen_rtx_LTU (mode, op0, op1);
+      else
+	return gen_rtx_LT (mode, op0, op1);
+
+    case LE_EXPR:
+      if (unsignedp)
+	return gen_rtx_LEU (mode, op0, op1);
+      else
+	return gen_rtx_LE (mode, op0, op1);
+
+    case GT_EXPR:
+      if (unsignedp)
+	return gen_rtx_GTU (mode, op0, op1);
+      else
+	return gen_rtx_GT (mode, op0, op1);
+
+    case GE_EXPR:
+      if (unsignedp)
+	return gen_rtx_GEU (mode, op0, op1);
+      else
+	return gen_rtx_GE (mode, op0, op1);
+
+    case EQ_EXPR:
+      return gen_rtx_EQ (mode, op0, op1);
+
+    case NE_EXPR:
+      return gen_rtx_NE (mode, op0, op1);
+
+    case UNORDERED_EXPR:
+      return gen_rtx_UNORDERED (mode, op0, op1);
+
+    case ORDERED_EXPR:
+      return gen_rtx_ORDERED (mode, op0, op1);
+
+    case UNLT_EXPR:
+      return gen_rtx_UNLT (mode, op0, op1);
+
+    case UNLE_EXPR:
+      return gen_rtx_UNLE (mode, op0, op1);
+
+    case UNGT_EXPR:
+      return gen_rtx_UNGT (mode, op0, op1);
+
+    case UNGE_EXPR:
+      return gen_rtx_UNGE (mode, op0, op1);
+
+    case UNEQ_EXPR:
+      return gen_rtx_UNEQ (mode, op0, op1);
+
+    case LTGT_EXPR:
+      return gen_rtx_LTGT (mode, op0, op1);
+
+    case COND_EXPR:
+      return gen_rtx_IF_THEN_ELSE (mode, op0, op1, op2);
+
+    case COMPLEX_EXPR:
+      gcc_assert (COMPLEX_MODE_P (mode));
+      if (GET_MODE (op0) == VOIDmode)
+	op0 = gen_rtx_CONST (GET_MODE_INNER (mode), op0);
+      if (GET_MODE (op1) == VOIDmode)
+	op1 = gen_rtx_CONST (GET_MODE_INNER (mode), op1);
+      return gen_rtx_CONCAT (mode, op0, op1);
+
+    case ADDR_EXPR:
+      op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
+      if (!op0 || !MEM_P (op0))
+	return NULL;
+
+      return XEXP (op0, 0);
+
+    case VECTOR_CST:
+      exp = build_constructor_from_list (TREE_TYPE (exp),
+					 TREE_VECTOR_CST_ELTS (exp));
+      /* Fall through.  */
+
+    case CONSTRUCTOR:
+      if (TREE_CODE (TREE_TYPE (exp)) == VECTOR_TYPE)
+	{
+	  unsigned i;
+	  tree val;
+
+	  op0 = gen_rtx_CONCATN
+	    (mode, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp))));
+
+	  FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), i, val)
+	    {
+	      op1 = expand_debug_expr (val);
+	      if (!op1)
+		return NULL;
+	      XVECEXP (op0, 0, i) = op1;
+	    }
+
+	  if (i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)))
+	    {
+	      op1 = expand_debug_expr
+		(fold_convert (TREE_TYPE (TREE_TYPE (exp)), integer_zero_node));
+
+	      if (!op1)
+		return NULL;
+
+	      for (; i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)); i++)
+		XVECEXP (op0, 0, i) = op1;
+	    }
+
+	  return op0;
+	}
+      else
+	goto flag_unsupported;
+
+    case CALL_EXPR:
+      /* ??? Maybe handle some builtins?  */
+      return NULL;
+
+      /* SSA names of optimized-away variables can survive un-SSA.  */
+    case SSA_NAME:
+    case ERROR_MARK:
+      return NULL;
+
+
+    default:
+    flag_unsupported:
+#if 0
+      return NULL;
+#endif
+      debug_tree (exp);
+      gcc_unreachable ();
+    }
+}
+
 /* Expand basic block BB from GIMPLE trees to RTL.  */
 
 static basic_block
@@ -2075,9 +2838,10 @@ expand_gimple_basic_block (basic_block b
 
   for (; !gsi_end_p (gsi); gsi_next (&gsi))
     {
-      gimple stmt = gsi_stmt (gsi);
       basic_block new_bb;
 
+      stmt = gsi_stmt (gsi);
+
       /* Expand this statement, then evaluate the resulting RTL and
 	 fixup the CFG accordingly.  */
       if (gimple_code (stmt) == GIMPLE_COND)
@@ -2086,6 +2850,69 @@ expand_gimple_basic_block (basic_block b
 	  if (new_bb)
 	    return new_bb;
 	}
+      else if (IS_DEBUG_STMT (stmt))
+	{
+	  location_t sloc = get_curr_insn_source_location ();
+	  tree sblock = get_curr_insn_block ();
+	  gimple_stmt_iterator nsi = gsi;
+
+	  for (;;)
+	    {
+	      tree var = VAR_DEBUG_VALUE_VAR (stmt);
+	      tree value = VAR_DEBUG_VALUE_VALUE (stmt);
+	      rtx val;
+	      enum machine_mode mode;
+
+	      last = get_last_insn ();
+
+	      if (value == VAR_DEBUG_VALUE_NOVALUE)
+		val = NULL_RTX;
+	      else
+		val = expand_debug_expr (value);
+
+	      set_curr_insn_source_location (gimple_location (stmt));
+	      set_curr_insn_block (gimple_block (stmt));
+
+	      if (DECL_P (var))
+		mode = DECL_MODE (var);
+	      else
+		mode = TYPE_MODE (TREE_TYPE (var));
+
+	      if (!val)
+		val = gen_rtx_UNKNOWN_VAR_LOC ();
+	      else
+		gcc_assert (mode == GET_MODE (val)
+			    || GET_CODE (val) == CONST_INT
+			    || GET_CODE (val) == CONST_FIXED
+			    || GET_CODE (val) == CONST_DOUBLE);
+
+	      val = gen_rtx_VAR_LOCATION
+		(mode, var, val, VAR_INIT_STATUS_INITIALIZED);
+
+	      val = emit_debug_insn (val);
+
+	      maybe_dump_rtl_for_gimple_stmt (stmt, last);
+
+	      if (last != PREV_INSN (val))
+		{
+		  debug_gimple_stmt (stmt);
+		  debug_rtx_range (NEXT_INSN (last), get_last_insn ());
+		  internal_error ("debug stmt expanded to too many insns");
+		  gcc_unreachable ();
+		}
+
+	      gsi = nsi;
+	      gsi_next (&nsi);
+	      if (gsi_end_p (nsi))
+		break;
+	      stmt = gsi_stmt (nsi);
+	      if (!IS_DEBUG_STMT (stmt))
+		break;
+	    }
+
+	  set_curr_insn_source_location (sloc);
+	  set_curr_insn_block (sblock);
+	}
       else
 	{
 	  if (is_gimple_call (stmt) && gimple_call_tail_p (stmt))
@@ -2561,6 +3388,8 @@ gimple_expand_cfg (void)
   pointer_map_destroy (lab_rtx_for_bb);
   free_histograms ();
 
+  adjust_debug_string_constants ();
+
   construct_exit_block ();
   set_curr_insn_block (DECL_INITIAL (current_function_decl));
   insn_locators_finalize ();
Index: gcc/cselib.h
===================================================================
--- gcc/cselib.h.orig	2009-06-05 05:07:49.000000000 -0300
+++ gcc/cselib.h	2009-06-05 05:07:56.000000000 -0300
@@ -53,7 +53,18 @@ struct GTY(()) elt_list {
   cselib_val *elt;
 };
 
+/* Describe a single set that is part of an insn.  */
+struct cselib_set
+{
+  rtx src;
+  rtx dest;
+  cselib_val *src_elt;
+  cselib_val *dest_addr_elt;
+};
+
 extern void (*cselib_discard_hook) (cselib_val *);
+extern void (*cselib_record_sets_hook) (rtx insn, struct cselib_set *sets,
+					int n_sets);
 
 extern cselib_val *cselib_lookup (rtx, enum machine_mode, int);
 extern void cselib_init (bool record_memory);
@@ -64,5 +75,16 @@ extern enum machine_mode cselib_reg_set_
 extern int rtx_equal_for_cselib_p (rtx, rtx);
 extern int references_value_p (const_rtx, int);
 extern rtx cselib_expand_value_rtx (rtx, bitmap, int);
+typedef rtx (*cselib_expand_callback)(rtx, bitmap, int, void *);
+extern rtx cselib_expand_value_rtx_cb (rtx, bitmap, int,
+				       cselib_expand_callback, void*);
 extern rtx cselib_subst_to_values (rtx);
 extern void cselib_invalidate_rtx (rtx);
+
+extern void cselib_reset_table_with_next_value (unsigned int);
+extern unsigned int cselib_get_next_unknown_value (void);
+extern void cselib_preserve_value (cselib_val *);
+extern bool cselib_preserved_value_p (cselib_val *);
+extern void cselib_preserve_only_values (bool);
+
+extern void dump_cselib_table (FILE *);
-- 
Alexandre Oliva, freedom fighter    http://FSFLA.org/~lxoliva/
You must be the change you wish to see in the world. -- Gandhi
Be Free! -- http://FSFLA.org/   FSF Latin America board member
Free Software Evangelist      Red Hat Brazil Compiler Engineer

Index Nav: [Date Index] [Subject Index] [Author Index] [Thread Index]
Message Nav: [Date Prev] [Date Next] [Thread Prev] [Thread Next]