get_ref_base_and_extend based ctor folding

Richard Guenther rguenther@suse.de
Tue Sep 14 16:43:00 GMT 2010


On Tue, 14 Sep 2010, Jan Hubicka wrote:

> Hi,
> this patch adds fold_ctor_reference that basically take output of get_ref_base_and_extend and is able
> to look up the value in corresponding constructors.  It fixes several defects I know of in original
> implementations:
>   1) ARRAY folding was incomplete.  It ignored fields with INDEX NULL (it is valid and fortran FE does
>      that often and so does C++ FE for vtables and other special stuff.  Normal array initializers
>      in C and C++ always have indexes).
>      It also ignored RANGE_EXPR indexes.
>   2) Incomplete constructors was not hanled.
>      This involve cases like
>      static const char a[6]="t";
>      a[3] is 0.
>      struct a {int a,b;} a={1};
>      a.b is 0.
>      or incomplette array constructor
>   3) MEM_REF handling ignored low bounds of array. I guess it is just fortran-fu to create a wrong
>      code testcase
>   4) We did not handle BITFIELD_REF and TARGET_MEM_REF.
>   5) C++ vtables might be DECL_EXTERN and still have constructor available. This matters for
>      devirtualization, we want to be able to look inside even when we will not produce constructor
>      in current unit.  However it happends that vtables reffer static construction vtables from
>      other compilation units.  This never caused troubles in past because of 1) (we missed all
>      the folding), but now we need to check for this case.
>      I am checking for variables having both STATIC and EXTERN flags as I believe it is how
>      C++ FE represent those and I hope normal vars are always either STATIC or EXTERN.
> 
> I think the resulting code is pretty much same amount of code and it takes advantage of all logic
> in get_ref_base_and_extend. Also fold-const string folding can just use this so we can remove
> some code duplication.
> 
> There are few things I am not happy about:
> 
> 1) array refs with variable offset needs to be handled specially since we need to call get_value.
>    Perhaps we can move get_ref_base_and_extend to tree-ssa-ccp and make it to do so?
>    It is not _that_ critical since my current implementation miss only nested references and
>    they are still handled by iteration.

Worry about that later.

> 2) I think offset computation in array ref code and in mem_ref handling can overflow.  It is taken
>    directly from get_ref_base_and_extend, but perhaps we should compute the bit offset in double_int
>    and only round down for actual folding? (I do not care about static objects large enough
>    so bit offsets does not fit in HOST_WIDE_INT, but we should leave them unfolded rather than
>    returning random numbers)

I do have a patch lying around to convert get_ref_base_and_extent to
use double_ints - I just never got around to update and submit it.

> Bootstrapped/regtested x86_64-linux, seems sane?

I will have a detailled look tomorrow - can you deal with fallout
from your latest patches first?

Thanks,
Richard.

> 
> Honza
> 
> /* { dg-do compile } */
> /* { dg-options "-O -fdump-tree-ccp1" } */
> 
> 
> static const char a[5]="t";
> static const int b[5]={1,2};
> static const struct a {int a : 6; int b : 6;} c = {5,9};
> test()
> {
>   return a[2]+b[1]+b[3]+c.b;
> }
> /* { dg-final { scan-tree-dump "return 11;" "ccp1" } } */
> /* { dg-final { cleanup-tree-dump "ccp1" } } */
> 	* tree-ssa-ccp.c (fold_ctor_reference): New function.
> 	(fold_const_aggregate_ref): Use it.
> 	* fold-const.c (canonicalize_constructor_val): Check that we don't fold
> 	into external static.
> Index: tree-ssa-ccp.c
> ===================================================================
> *** tree-ssa-ccp.c	(revision 164250)
> --- tree-ssa-ccp.c	(working copy)
> *************** get_base_constructor (tree base, tree *o
> *** 1371,1376 ****
> --- 1371,1588 ----
>       }
>   }
>   
> + /* CTOR is value initializing memory, fold reference of TYPE to the memory at
> +    bit OFFSET.  */
> + 
> + static tree
> + fold_ctor_reference (tree type, tree ctor, unsigned HOST_WIDE_INT offset,
> + 		     unsigned HOST_WIDE_INT size)
> + {
> +   tree ret;
> + 
> +   if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
> +       && compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor)), size) >=0)
> +     {
> +       ret = canonicalize_constructor_val (ctor);
> +       if (ret
> +           && !useless_type_conversion_p (type, TREE_TYPE (ret)))
> + 	{
> + 	  /* VIEW_CONVERT_EXPR is valid only for matching operand sizes.
> + 	     For bitfields, the size of memory acccess differ from size of type,
> + 	     thus we need both compare that ctor is greater than memory access
> + 	     and that type sizes match.  */
> + 	  if (!operand_equal_p (TYPE_SIZE (type),
> + 				TYPE_SIZE (TREE_TYPE (ctor)), 0))
> + 	    return NULL_TREE;
> + 	  ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
> + 	  if (ret)
> + 	    STRIP_NOPS (ret);
> + 	}
> +      return ret;
> +     }
> +   /* For aggregates and offset 0, we can either return
> +      VIEW_CONVERT_EXPR of CTOR
> +      or look inside ctor for possibly better match.
> + 
> +      If possible, choose alternative that has no conversion.  */
> +   if (AGGREGATE_TYPE_P (type)
> +       && useless_type_conversion_p (type, TREE_TYPE (ctor))
> +       && !offset)
> +     goto maybe_whole_ctor;
> +   if (TREE_CODE (ctor) == STRING_CST)
> +     {
> +       if (INTEGRAL_TYPE_P (type)
> + 	  && (TYPE_MODE (type)
> + 	      == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
> + 	  && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
> + 	      == MODE_INT)
> + 	  && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) == 1
> + 	  && !(offset % BITS_PER_UNIT))
> + 	{
> + 	  offset /= BITS_PER_UNIT;
> + 	  if (offset < (unsigned HOST_WIDE_INT) TREE_STRING_LENGTH (ctor))
> + 	    return build_int_cst_type (type, (TREE_STRING_POINTER (ctor)
> + 				       [offset]));
> + 	  /* Folding
> + 	     const char a[20]="hello";
> + 	     return a[10];
> + 
> + 	     might lead to offset greater than string length.  In this case we
> + 	     know value is either initialized to 0 or out of bounds.  Return 0
> + 	     in both cases.  */
> + 	  return build_zero_cst (type);
> + 	}
> +       goto maybe_whole_ctor;
> +     }
> +   if (TREE_CODE (ctor) == CONSTRUCTOR)
> +     {
> +       unsigned HOST_WIDE_INT cnt;
> +       tree cfield, cval;
> + 
> +       if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
> + 	{
> + 	  double_int low_bound, elt_size;
> + 	  double_int index, max_index;
> + 	  tree domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
> + 
> + 	  /* Compute low bound and elt size.  */
> + 	  if (domain_type && TYPE_MIN_VALUE (domain_type))
> + 	    {
> + 	      /* Static constructors for variably sized objects makes no sense.  */
> + 	      gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) ==
> + 			  INTEGER_CST);
> + 	      low_bound = tree_to_double_int (TYPE_MIN_VALUE (domain_type));
> + 	    }
> + 	  else
> + 	    low_bound = double_int_zero;
> + 
> + 	  /* Static constructors for variably sized objects makes no sense.  */
> + 	  gcc_assert (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
> + 	  elt_size =
> + 	    tree_to_double_int (TYPE_SIZE_UNIT
> + 				(TREE_TYPE (TREE_TYPE (ctor))));
> + 
> + 	  if (!TYPE_SIZE_UNIT (type)
> + 	      || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
> + 	      || double_int_cmp (elt_size,
> + 				 tree_to_double_int (TYPE_SIZE_UNIT (type)),
> + 				 0) < 0)
> + 	    goto maybe_whole_ctor;
> + 	  index = double_int_sub (low_bound, double_int_one);
> + 	  FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
> + 				    cval)
> + 	    {
> + 	      unsigned HOST_WIDE_INT elt_offset, max_elt_offset;
> + 
> + 	      /* Array constructor might explicitely set index, or specify range
> + 		 or leave index NULL meaning that it is next index after previous one.  */
> + 	      if (cfield)
> + 		{
> + 		  if (TREE_CODE (cfield) == INTEGER_CST)
> + 		    max_index = index = tree_to_double_int (cfield);
> + 		  else
> + 		    {
> + 		      gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
> + 		      index = tree_to_double_int (TREE_OPERAND (cfield, 0));
> + 		      max_index = tree_to_double_int (TREE_OPERAND (cfield, 1));
> + 		    }
> + 		}
> + 	      else
> + 		max_index = index = double_int_add (index, double_int_one);
> + 
> + 	      /* Temporaries computing the offset can be large, offset in the static storage
> + 		 however must fit in memory, so HOST_WIDE_INT is enough.  */
> + 	      elt_offset =
> + 		double_int_to_uhwi (double_int_mul
> + 				    (double_int_sub (index, low_bound),
> + 				     elt_size));
> + 	      max_elt_offset =
> + 		double_int_to_uhwi (double_int_mul
> + 				    (double_int_add
> + 				     (double_int_sub (max_index, low_bound),
> + 				      double_int_one), elt_size));
> + 	      if (elt_offset <= offset / BITS_PER_UNIT
> + 		  && max_elt_offset >
> + 		  (offset + BITS_PER_UNIT - 1) / BITS_PER_UNIT)
> + 		{
> + 		  unsigned HOST_WIDE_INT
> + 		    inner_offset = ((offset - elt_offset * BITS_PER_UNIT)
> + 				    % (double_int_to_uhwi (elt_size) *
> + 				       BITS_PER_UNIT));
> + 		  ret = fold_ctor_reference (type, cval, inner_offset, size);
> + 		  if (ret)
> + 		    return ret;
> + 		  goto maybe_whole_ctor;
> + 		}
> + 	    }
> + 	  /* When memory is not explicitely mentioned in constructor,
> + 	     it is 0 (or out of range).  */
> + 	  return build_zero_cst (type);
> + 	}
> +       else
> + 	{
> + 	  FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
> + 				    cval)
> + 	    {
> + 	      tree byte_offset = DECL_FIELD_OFFSET (cfield);
> + 	      tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
> + 	      tree field_size = DECL_SIZE (cfield);
> + 	      double_int bitoffset;
> + 
> + 	      /* Variable sized objects in static constructors makes no sense.  */
> + 	      gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
> + 			  && TREE_CODE (byte_offset) == INTEGER_CST
> + 			  && TREE_CODE (field_size) == INTEGER_CST);
> + 	      bitoffset = double_int_add (tree_to_double_int (field_offset),
> + 					  double_int_mul
> + 					    (tree_to_double_int (byte_offset),
> + 					     uhwi_to_double_int (BITS_PER_UNIT)));
> + 	      if (double_int_cmp (uhwi_to_double_int (offset), bitoffset, 0) >= 0
> + 		  && double_int_cmp (uhwi_to_double_int (offset),
> + 				     double_int_add
> + 				     (bitoffset,
> + 				      tree_to_double_int (field_size)), 0) < 0)
> + 		{
> + 		  ret = fold_ctor_reference (type, cval,
> + 					     double_int_to_uhwi
> + 					     (double_int_sub
> + 					      (uhwi_to_double_int (offset),
> + 					       bitoffset)), size);
> + 		  if (ret)
> + 		    return ret;
> + 		  goto maybe_whole_ctor;
> + 		}
> + 	    }
> + 	  /* When memory is not explicitely mentioned in constructor, it is 0.  */
> + 	  return build_zero_cst (type);
> + 	}
> +     }
> + 
> + maybe_whole_ctor:
> +   if (AGGREGATE_TYPE_P (type) && AGGREGATE_TYPE_P (TREE_TYPE (ctor))
> +       && !offset
> +       && compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor)), size) >=0)
> +     {
> +       ret = canonicalize_constructor_val (ctor);
> +       if (ret
> +           && !useless_type_conversion_p (type, TREE_TYPE (ret)))
> + 	{
> + 	  /* VIEW_CONVERT_EXPR is valid only for matching operand sizes.
> + 	     For bitfields, the size of memory acccess differ from size of type,
> + 	     thus we need both compare that ctor is greater than memory access
> + 	     and that type sizes match.  */
> + 	  if (!operand_equal_p (TYPE_SIZE (type),
> + 				TYPE_SIZE (TREE_TYPE (ctor)), 0))
> + 	    return NULL_TREE;
> + 	  ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
> + 	  if (ret)
> + 	    STRIP_NOPS (ret);
> + 	}
> +      return ret;
> +    }
> +   return NULL_TREE;
> + }
> + 
>   /* Return the tree representing the element referenced by T if T is an
>      ARRAY_REF or COMPONENT_REF into constant aggregates.  Return
>      NULL_TREE otherwise.  */
> *************** get_base_constructor (tree base, tree *o
> *** 1378,1387 ****
>   tree
>   fold_const_aggregate_ref (tree t)
>   {
> !   tree ctor, idx, field;
> !   unsigned HOST_WIDE_INT cnt;
> !   tree cfield, cval;
>     tree tem;
>   
>     if (TREE_CODE_CLASS (TREE_CODE (t)) == tcc_declaration)
>       return get_symbol_constant_value (t);
> --- 1590,1599 ----
>   tree
>   fold_const_aggregate_ref (tree t)
>   {
> !   tree ctor, idx, base;
> !   HOST_WIDE_INT offset, size, max_size;
>     tree tem;
> +   tree ctr_offset;
>   
>     if (TREE_CODE_CLASS (TREE_CODE (t)) == tcc_declaration)
>       return get_symbol_constant_value (t);
> *************** fold_const_aggregate_ref (tree t)
> *** 1393,1491 ****
>     switch (TREE_CODE (t))
>       {
>       case ARRAY_REF:
> !       ctor = get_base_constructor (TREE_OPERAND (t, 0), &idx);
>   
> !       if (idx)
> ! 	return NULL_TREE;
>   
>         if (ctor == error_mark_node)
>   	return build_zero_cst (TREE_TYPE (t));
>   
> !       if (ctor == NULL_TREE
> ! 	  || (TREE_CODE (ctor) != CONSTRUCTOR
> ! 	      && TREE_CODE (ctor) != STRING_CST))
> ! 	return NULL_TREE;
> ! 
> !       /* Get the index.  If we have an SSA_NAME, try to resolve it
> ! 	 with the current lattice value for the SSA_NAME.  */
> !       idx = TREE_OPERAND (t, 1);
> !       switch (TREE_CODE (idx))
>   	{
> ! 	case SSA_NAME:
> ! 	  if ((tem = get_constant_value (idx))
> ! 	      && TREE_CODE (tem) == INTEGER_CST)
> ! 	    idx = tem;
> ! 	  else
>   	    return NULL_TREE;
> ! 	  break;
> ! 
> ! 	case INTEGER_CST:
> ! 	  break;
> ! 
> ! 	default:
> ! 	  return NULL_TREE;
>   	}
> ! 
> !       /* Fold read from constant string.  */
> !       if (TREE_CODE (ctor) == STRING_CST)
> ! 	{
> ! 	  tree low_bound = array_ref_low_bound (t);
> ! 	  double_int low_bound_cst;
> ! 	  double_int index_cst;
> ! 	  double_int length_cst;
> ! 	  bool signed_p = TYPE_UNSIGNED (TREE_TYPE (idx));
> ! 
> ! 	  if (TREE_CODE (idx) != INTEGER_CST
> ! 	      || !INTEGRAL_TYPE_P (TREE_TYPE (t))
> ! 	      || TREE_CODE (low_bound) != INTEGER_CST)
> ! 	    return NULL_TREE;
> ! 	  low_bound_cst = tree_to_double_int (low_bound);
> ! 	  index_cst = tree_to_double_int (idx);
> ! 	  length_cst = uhwi_to_double_int (TREE_STRING_LENGTH (ctor));
> ! 	  index_cst = double_int_sub (index_cst, low_bound_cst);
> ! 	  if ((TYPE_MODE (TREE_TYPE (t))
> ! 	       == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
> ! 	      && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
> ! 	          == MODE_INT)
> ! 	      && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) == 1
> ! 	      && double_int_cmp (index_cst, length_cst, signed_p) < 0)
> ! 	    return build_int_cst_type (TREE_TYPE (t),
> ! 				       (TREE_STRING_POINTER (ctor)
> ! 					[double_int_to_uhwi (index_cst)]));
> ! 	  return NULL_TREE;
> ! 	}
> ! 
> !       /* Whoo-hoo!  I'll fold ya baby.  Yeah!  */
> !       FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
> ! 	if (tree_int_cst_equal (cfield, idx))
> ! 	  return canonicalize_constructor_val (cval);
> !       break;
> ! 
> !     case COMPONENT_REF:
> !       /* Get a CONSTRUCTOR.  If BASE is a VAR_DECL, get its
> ! 	 DECL_INITIAL.  If BASE is a nested reference into another
> ! 	 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
> ! 	 the inner reference.  */
> !       ctor = get_base_constructor (TREE_OPERAND (t, 0), &idx);
> ! 
> !       if (idx)
>   	return NULL_TREE;
> ! 
> !       if (ctor == error_mark_node)
> ! 	return build_zero_cst (TREE_TYPE (t));
> ! 
> !       if (ctor == NULL_TREE
> ! 	  || TREE_CODE (ctor) != CONSTRUCTOR)
>   	return NULL_TREE;
>   
> !       field = TREE_OPERAND (t, 1);
> ! 
> !       FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
> ! 	if (cfield == field
> ! 	    /* FIXME: Handle bit-fields.  */
> ! 	    && ! DECL_BIT_FIELD (cfield))
> ! 	  return canonicalize_constructor_val (cval);
> !       break;
>   
>       case REALPART_EXPR:
>       case IMAGPART_EXPR:
> --- 1605,1684 ----
>     switch (TREE_CODE (t))
>       {
>       case ARRAY_REF:
> !     case ARRAY_RANGE_REF:
> !       /* Constant indexes are handled well by get_base_constructor.
> ! 	 Only special case variable offsets.
> ! 	 FIXME: This code can't handle nested references with variable indexes
> ! 	 (they will be handled only by iteration of ccp).  Perhaps we can bring
> ! 	 get_ref_base_and_extent here and make it use get_constant_value.  */
> !       if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
> ! 	  && (idx = get_constant_value (TREE_OPERAND (t, 1)))
> ! 	  && host_integerp (idx, 0))
> ! 	{
> ! 	  tree low_bound, unit_size;
>   
> ! 	  /* If the resulting bit-offset is constant, track it.  */
> ! 	  if ((low_bound = array_ref_low_bound (t),
> ! 	       host_integerp (low_bound, 0))
> ! 	      && (unit_size = array_ref_element_size (t),
> ! 		  host_integerp (unit_size, 1)))
> ! 	    {
> ! 	      offset = TREE_INT_CST_LOW (idx);
> ! 	      offset -= TREE_INT_CST_LOW (low_bound);
> ! 	      offset *= TREE_INT_CST_LOW (unit_size);
> ! 	      offset *= BITS_PER_UNIT;
> ! 
> ! 	      base = TREE_OPERAND (t, 0);
> ! 	      ctor = get_base_constructor (base, &ctr_offset);
> ! 	      if (ctr_offset)
> ! 		{
> ! 		  if (!host_integerp (ctr_offset, 1))
> ! 		    return NULL_TREE;
> ! 		  offset += TREE_INT_CST_LOW (ctr_offset) * BITS_PER_UNIT;
> ! 		}
> ! 	      /* Empty constructor.  Always fold to 0. */
> ! 	      if (ctor == error_mark_node)
> ! 		return build_zero_cst (TREE_TYPE (t));
> ! 	      /* Out of bound array access.  Value is undefined, but don't fold. */
> ! 	      if (offset < 0)
> ! 		return NULL_TREE;
> ! 	      /* We can not determine ctor.  */
> ! 	      if (!ctor)
> ! 		return NULL_TREE;
> ! 	      return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
> ! 					  TREE_INT_CST_LOW (unit_size));
> ! 	    }
> ! 	}
> !       /* Fallthru.  */
> ! 	
> !     case COMPONENT_REF:
> !     case BIT_FIELD_REF:
> !     case TARGET_MEM_REF:
> !     case MEM_REF:
> !       base = get_ref_base_and_extent (t, &offset, &size, &max_size);
> !       ctor = get_base_constructor (base, &ctr_offset);
>   
> +       /* Empty constructor.  Always fold to 0. */
>         if (ctor == error_mark_node)
>   	return build_zero_cst (TREE_TYPE (t));
>   
> !       if (ctr_offset)
>   	{
> ! 	  if (!host_integerp (ctr_offset, 1))
>   	    return NULL_TREE;
> ! 	  offset += TREE_INT_CST_LOW (ctr_offset) * BITS_PER_UNIT;
>   	}
> !       /* Out of bound array access.  Value is undefined, but don't fold. */
> !       if (offset < 0)
>   	return NULL_TREE;
> !       /* We can not determine ctor.  */
> !       if (!ctor)
> ! 	return NULL_TREE;
> !       /* We do not know precise address.  */
> !       if (max_size != size)
>   	return NULL_TREE;
>   
> !       return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size);
>   
>       case REALPART_EXPR:
>       case IMAGPART_EXPR:
> *************** fold_const_aggregate_ref (tree t)
> *** 1497,1569 ****
>   	break;
>         }
>   
> -     case MEM_REF:
> -       ctor = get_base_constructor (t, &idx);
> - 
> -       if (ctor == error_mark_node)
> - 	return build_zero_cst (TREE_TYPE (t));
> - 
> -       if (ctor && !AGGREGATE_TYPE_P (TREE_TYPE (ctor))
> - 	  && !idx)
> - 	{
> - 	  if (ctor
> - 	      && !useless_type_conversion_p
> - 		    (TREE_TYPE (t), TREE_TYPE (ctor)))
> - 	    ctor = fold_unary (VIEW_CONVERT_EXPR, TREE_TYPE (t), ctor);
> - 	  return ctor;
> - 	}
> - 
> -       if (!idx)
> - 	idx = integer_zero_node;
> - 
> -       if (ctor == NULL_TREE
> - 	  || (TREE_CODE (ctor) != CONSTRUCTOR
> - 	      && TREE_CODE (ctor) != STRING_CST))
> - 	return NULL_TREE;
> - 
> -       /* Fold read from constant string.  */
> -       if (TREE_CODE (ctor) == STRING_CST)
> - 	{
> - 	  if (INTEGRAL_TYPE_P (TREE_TYPE (t))
> - 	      && (TYPE_MODE (TREE_TYPE (t))
> - 		  == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
> - 	      && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
> - 	          == MODE_INT)
> - 	      && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) == 1
> - 	      && compare_tree_int (idx, TREE_STRING_LENGTH (ctor)) < 0)
> - 	    return build_int_cst_type (TREE_TYPE (t),
> - 				       (TREE_STRING_POINTER (ctor)
> - 					[TREE_INT_CST_LOW (idx)]));
> - 	  return NULL_TREE;
> - 	}
> - 
> -       /* ???  Implement byte-offset indexing into a non-array CONSTRUCTOR.  */
> -       if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
> - 	  && (TYPE_MODE (TREE_TYPE (t))
> - 	      == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
> - 	  && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (t))) != 0
> - 	  && integer_zerop
> - 	       (int_const_binop
> - 		  (TRUNC_MOD_EXPR, idx,
> - 		   size_int (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (t)))), 0)))
> - 	{
> - 	  idx = int_const_binop (TRUNC_DIV_EXPR, idx,
> - 				 size_int (GET_MODE_SIZE
> - 					     (TYPE_MODE (TREE_TYPE (t)))), 0);
> - 	  FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
> - 	    if (tree_int_cst_equal (cfield, idx))
> - 	      {
> - 		cval = canonicalize_constructor_val (cval);
> - 		if (useless_type_conversion_p (TREE_TYPE (t), TREE_TYPE (cval)))
> - 		  return cval;
> - 		else if (CONSTANT_CLASS_P (cval))
> - 		  return fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (t), cval);
> - 		else
> - 		  return NULL_TREE;
> - 	      }
> - 	}
> -       break;
> - 
>       default:
>         break;
>       }
> --- 1690,1695 ----
> Index: gimple-fold.c
> ===================================================================
> *** gimple-fold.c	(revision 164250)
> --- gimple-fold.c	(working copy)
> *************** canonicalize_constructor_val (tree cval)
> *** 51,57 ****
>       {
>         tree base = get_base_address (TREE_OPERAND (cval, 0));
>         if (base && TREE_CODE (base) == VAR_DECL)
> ! 	add_referenced_var (base);
>       }
>     return cval;
>   }
> --- 51,67 ----
>       {
>         tree base = get_base_address (TREE_OPERAND (cval, 0));
>         if (base && TREE_CODE (base) == VAR_DECL)
> ! 	{
> ! 	  struct varpool_node *vnode;
> ! 	  /* initializer of external C++ vtables contain references to symbols
> ! 	     from other compilation unit.  We can't refer to those directly.  
> ! 	     Those symbols are all both static and external.  */
> ! 	  if ((TREE_STATIC (base) && DECL_EXTERNAL (base))
> ! 	      && (!(vnode = varpool_get_node (base))
> ! 		  /*|| !vnode->finalized*/))
> ! 	    return NULL_TREE;
> ! 	  add_referenced_var (base);
> ! 	}
>       }
>     return cval;
>   }
> 
> 

-- 
Richard Guenther <rguenther@suse.de>
Novell / SUSE Labs
SUSE LINUX Products GmbH - Nuernberg - AG Nuernberg - HRB 16746 - GF: Markus Rex



More information about the Gcc-patches mailing list