[PATCH][3/n] LTO type merging cleanup

Richard Guenther rguenther@suse.de
Tue May 10 16:25:00 GMT 2011


This is the second half of splitting TYPE_CANONICAL handling away
from the rest of the machinery.  The following patch introduces
a simplified, non-SCC based hashing for TYPE_CANONICAL - it
still hashes things it shouldn't, but the patch is supposed to
be functional equivalent apart from the pointer-following case.

Bootstrapped on x86_64-unknown-linux-gnu, testing and SPEC2k6 LTO
building in progress.

I plan to commit both pieces together tomorrow, if the testing
succeeded.

A followup will then remove dead codepaths from the other half
of the machinery, followed by a patch to simplify both hashing
and comparing TYPE_CANONICALs a tad bid.

Richard.

2011-05-10  Richard Guenther  <rguenther@suse.de>

	* gimple.c (iterative_hash_canonical_type): Split out from
	iterative_hash_gimple_type and friends.  Do not recurse
	to pointed-to types.
	(gimple_canonical_type_hash): Use it, allocate the hash here.

Index: trunk/gcc/gimple.c
===================================================================
*** trunk.orig/gcc/gimple.c	2011-05-10 13:45:13.000000000 +0200
--- trunk/gcc/gimple.c	2011-05-10 17:02:07.000000000 +0200
*************** gimple_type_hash (const void *p)
*** 4344,4353 ****
    return gimple_type_hash_1 (p, GTC_MERGE);
  }
  
  static hashval_t
  gimple_canonical_type_hash (const void *p)
  {
!   return gimple_type_hash_1 (p, GTC_DIAG);
  }
  
  
--- 4344,4491 ----
    return gimple_type_hash_1 (p, GTC_MERGE);
  }
  
+ /* Returning a hash value for gimple type TYPE combined with VAL.
+ 
+    The hash value returned is equal for types considered compatible
+    by gimple_canonical_types_compatible_p.  */
+ 
+ static hashval_t
+ iterative_hash_canonical_type (tree type, hashval_t val)
+ {
+   hashval_t v;
+   void **slot;
+   struct tree_int_map *mp, m;
+ 
+   m.base.from = type;
+   if ((slot = htab_find_slot (canonical_type_hash_cache, &m, INSERT))
+       && *slot)
+     return iterative_hash_hashval_t (((struct tree_int_map *) *slot)->to, 0);
+ 
+   /* Combine a few common features of types so that types are grouped into
+      smaller sets; when searching for existing matching types to merge,
+      only existing types having the same features as the new type will be
+      checked.  */
+   v = iterative_hash_hashval_t (TREE_CODE (type), 0);
+   v = iterative_hash_hashval_t (TYPE_QUALS (type), v);
+   v = iterative_hash_hashval_t (TREE_ADDRESSABLE (type), v);
+ 
+   /* Do not hash the types size as this will cause differences in
+      hash values for the complete vs. the incomplete type variant.  */
+ 
+   /* Incorporate common features of numerical types.  */
+   if (INTEGRAL_TYPE_P (type)
+       || SCALAR_FLOAT_TYPE_P (type)
+       || FIXED_POINT_TYPE_P (type))
+     {
+       v = iterative_hash_hashval_t (TYPE_PRECISION (type), v);
+       v = iterative_hash_hashval_t (TYPE_MODE (type), v);
+       v = iterative_hash_hashval_t (TYPE_UNSIGNED (type), v);
+     }
+ 
+   /* For pointer and reference types, fold in information about the type
+      pointed to but do not recurse to the pointed-to type.  */
+   if (POINTER_TYPE_P (type))
+     {
+       v = iterative_hash_hashval_t (TYPE_REF_CAN_ALIAS_ALL (type), v);
+       v = iterative_hash_hashval_t (TREE_CODE (TREE_TYPE (type)), v);
+     }
+ 
+   /* For integer types hash the types min/max values and the string flag.  */
+   if (TREE_CODE (type) == INTEGER_TYPE)
+     {
+       /* OMP lowering can introduce error_mark_node in place of
+ 	 random local decls in types.  */
+       if (TYPE_MIN_VALUE (type) != error_mark_node)
+ 	v = iterative_hash_expr (TYPE_MIN_VALUE (type), v);
+       if (TYPE_MAX_VALUE (type) != error_mark_node)
+ 	v = iterative_hash_expr (TYPE_MAX_VALUE (type), v);
+       v = iterative_hash_hashval_t (TYPE_STRING_FLAG (type), v);
+     }
+ 
+   /* For array types hash their domain and the string flag.  */
+   if (TREE_CODE (type) == ARRAY_TYPE
+       && TYPE_DOMAIN (type))
+     {
+       v = iterative_hash_hashval_t (TYPE_STRING_FLAG (type), v);
+       v = iterative_hash_canonical_type (TYPE_DOMAIN (type), v);
+     }
+ 
+   /* Recurse for aggregates with a single element type.  */
+   if (TREE_CODE (type) == ARRAY_TYPE
+       || TREE_CODE (type) == COMPLEX_TYPE
+       || TREE_CODE (type) == VECTOR_TYPE)
+     v = iterative_hash_canonical_type (TREE_TYPE (type), v);
+ 
+   /* Incorporate function return and argument types.  */
+   if (TREE_CODE (type) == FUNCTION_TYPE || TREE_CODE (type) == METHOD_TYPE)
+     {
+       unsigned na;
+       tree p;
+ 
+       /* For method types also incorporate their parent class.  */
+       if (TREE_CODE (type) == METHOD_TYPE)
+ 	v = iterative_hash_canonical_type (TYPE_METHOD_BASETYPE (type), v);
+ 
+       /* For result types allow mismatch in completeness.  */
+       if (RECORD_OR_UNION_TYPE_P (TREE_TYPE (type)))
+ 	{
+ 	  v = iterative_hash_hashval_t (TREE_CODE (TREE_TYPE (type)), v);
+ 	  v = iterative_hash_name
+ 		(TYPE_NAME (TYPE_MAIN_VARIANT (TREE_TYPE (type))), v);
+ 	}
+       else
+ 	v = iterative_hash_canonical_type (TREE_TYPE (type), v);
+ 
+       for (p = TYPE_ARG_TYPES (type), na = 0; p; p = TREE_CHAIN (p))
+ 	{
+ 	  /* For argument types allow mismatch in completeness.  */
+ 	  if (RECORD_OR_UNION_TYPE_P (TREE_VALUE (p)))
+ 	    {
+ 	      v = iterative_hash_hashval_t (TREE_CODE (TREE_VALUE (p)), v);
+ 	      v = iterative_hash_name
+ 		    (TYPE_NAME (TYPE_MAIN_VARIANT (TREE_VALUE (p))), v);
+ 	    }
+ 	  else
+ 	    v = iterative_hash_canonical_type (TREE_VALUE (p), v);
+ 	  na++;
+ 	}
+ 
+       v = iterative_hash_hashval_t (na, v);
+     }
+ 
+   if (TREE_CODE (type) == RECORD_TYPE
+       || TREE_CODE (type) == UNION_TYPE
+       || TREE_CODE (type) == QUAL_UNION_TYPE)
+     {
+       unsigned nf;
+       tree f;
+ 
+       for (f = TYPE_FIELDS (type), nf = 0; f; f = TREE_CHAIN (f))
+ 	{
+ 	  v = iterative_hash_canonical_type (TREE_TYPE (f), v);
+ 	  nf++;
+ 	}
+ 
+       v = iterative_hash_hashval_t (nf, v);
+     }
+ 
+   /* Cache the just computed hash value.  */
+   mp = ggc_alloc_cleared_tree_int_map ();
+   mp->base.from = type;
+   mp->to = v;
+   *slot = (void *) mp;
+ 
+   return iterative_hash_hashval_t (v, val);
+ }
+ 
  static hashval_t
  gimple_canonical_type_hash (const void *p)
  {
!   if (canonical_type_hash_cache == NULL)
!     canonical_type_hash_cache = htab_create_ggc (512, tree_int_map_hash,
! 						 tree_int_map_eq, NULL);
! 
!   return iterative_hash_canonical_type (CONST_CAST_TREE ((const_tree) p), 0);
  }
  
  



More information about the Gcc-patches mailing list