This is the mail archive of the gcc-patches@gcc.gnu.org mailing list for the GCC project.


Index Nav: [Date Index] [Subject Index] [Author Index] [Thread Index]
Message Nav: [Date Prev] [Date Next] [Thread Prev] [Thread Next]
Other format: [Raw text]

Re: Optimize type streaming


On Fri, 11 Jul 2014, Jan Hubicka wrote:

> > > Hmm, walking everything first and then starting streaming sounds bad idea
> > > for locality.  Can't I just re-do the walk since I know what the SCC is?
> > > I will read the code more after lunch.
> > 
> > Might be possible with a 2nd SCC stack set up, yes.
> 
> I set up hash_map to store the hash values anyway. What about "sorting" using
> walk_tree with simple callback pushing the parameter on stack if it is in the
> hash map and removing it from hash map?
> 
> That way I do not need to duplicate the tricky SCC walking and probably won't
> be more terrible in overhead than qsort.  Seems like a good idea at least for a
> prototype, will try it out.

Awww, but you get edges walked that are not walked in the DFS walk ...
(basically you can end up walking the whole program IL if you are 
unlucky).

RIchard.

> honza
> > 
> > Richard.
> > 
> > > Honza
> > > > 
> > > > Richard.
> > > > 
> > > > > Honza
> > > > > > 
> > > > > > Thanks,
> > > > > > Richard.
> > > > > > 
> > > > > > > Honza
> > > > > > > 
> > > > > > > 	* lto-streamer-out.c (hash_tree): Add map argument;
> > > > > > > 	remove special purpose hack for pointer types.
> > > > > > > 	(visit): Update it.
> > > > > > > 	(hash_scc): Add iteration until equivalence classes stabilize.
> > > > > > > Index: lto-streamer-out.c
> > > > > > > ===================================================================
> > > > > > > --- lto-streamer-out.c	(revision 212426)
> > > > > > > +++ lto-streamer-out.c	(working copy)
> > > > > > > @@ -690,13 +712,19 @@ DFS_write_tree_body (struct output_block
> > > > > > >  /* Return a hash value for the tree T.  */
> > > > > > >  
> > > > > > >  static hashval_t
> > > > > > > -hash_tree (struct streamer_tree_cache_d *cache, tree t)
> > > > > > > +hash_tree (struct streamer_tree_cache_d *cache, hash_map<tree, hashval_t> *map, tree t)
> > > > > > >  {
> > > > > > >  #define visit(SIBLING) \
> > > > > > >    do { \
> > > > > > >      unsigned ix; \
> > > > > > > -    if (SIBLING && streamer_tree_cache_lookup (cache, SIBLING, &ix)) \
> > > > > > > +    if (!SIBLING) \
> > > > > > > +      v = iterative_hash_hashval_t (0, v); \
> > > > > > > +    else if (streamer_tree_cache_lookup (cache, SIBLING, &ix)) \
> > > > > > >        v = iterative_hash_hashval_t (streamer_tree_cache_get_hash (cache, ix), v); \
> > > > > > > +    else if (map) \
> > > > > > > +      v = iterative_hash_hashval_t (*map->get (SIBLING), v); \
> > > > > > > +    else \
> > > > > > > +      v = iterative_hash_hashval_t (1, v); \
> > > > > > >    } while (0)
> > > > > > >  
> > > > > > >    /* Hash TS_BASE.  */
> > > > > > > @@ -896,23 +924,7 @@ hash_tree (struct streamer_tree_cache_d
> > > > > > >  
> > > > > > >    if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
> > > > > > >      {
> > > > > > > -      if (POINTER_TYPE_P (t))
> > > > > > > -	{
> > > > > > > -	  /* For pointers factor in the pointed-to type recursively as
> > > > > > > -	     we cannot recurse through only pointers.
> > > > > > > -	     ???  We can generalize this by keeping track of the
> > > > > > > -	     in-SCC edges for each tree (or arbitrarily the first
> > > > > > > -	     such edge) and hashing that in in a second stage
> > > > > > > -	     (instead of the quadratic mixing of the SCC we do now).  */
> > > > > > > -	  hashval_t x;
> > > > > > > -	  unsigned ix;
> > > > > > > -	  if (streamer_tree_cache_lookup (cache, TREE_TYPE (t), &ix))
> > > > > > > -	    x = streamer_tree_cache_get_hash (cache, ix);
> > > > > > > -	  else
> > > > > > > -	    x = hash_tree (cache, TREE_TYPE (t));
> > > > > > > -	  v = iterative_hash_hashval_t (x, v);
> > > > > > > -	}
> > > > > > > -      else if (code != IDENTIFIER_NODE)
> > > > > > > +      if (code != IDENTIFIER_NODE)
> > > > > > >  	visit (TREE_TYPE (t));
> > > > > > >      }
> > > > > > >  
> > > > > > > @@ -1122,28 +1156,78 @@ scc_entry_compare (const void *p1_, cons
> > > > > > >  static hashval_t
> > > > > > >  hash_scc (struct streamer_tree_cache_d *cache, unsigned first, unsigned size)
> > > > > > >  {
> > > > > > > +  unsigned int last_classes = 0, iterations = 0;
> > > > > > > +
> > > > > > >    /* Compute hash values for the SCC members.  */
> > > > > > >    for (unsigned i = 0; i < size; ++i)
> > > > > > > -    sccstack[first+i].hash = hash_tree (cache, sccstack[first+i].t);
> > > > > > > +    sccstack[first+i].hash = hash_tree (cache, NULL, sccstack[first+i].t);
> > > > > > >  
> > > > > > >    if (size == 1)
> > > > > > >      return sccstack[first].hash;
> > > > > > >  
> > > > > > > -  /* Sort the SCC of type, hash pairs so that when we mix in
> > > > > > > -     all members of the SCC the hash value becomes independent on
> > > > > > > -     the order we visited the SCC.  Produce hash of the whole SCC as
> > > > > > > -     combination of hashes of individual elements.  Then combine that hash into
> > > > > > > -     hash of each element, so othewise identically looking elements from two
> > > > > > > -     different SCCs are distinguished.  */
> > > > > > > -  qsort (&sccstack[first], size, sizeof (scc_entry), scc_entry_compare);
> > > > > > > -
> > > > > > > -  hashval_t scc_hash = sccstack[first].hash;
> > > > > > > -  for (unsigned i = 1; i < size; ++i)
> > > > > > > -    scc_hash = iterative_hash_hashval_t (scc_hash,
> > > > > > > -					 sccstack[first+i].hash);
> > > > > > > -  for (unsigned i = 0; i < size; ++i)
> > > > > > > -    sccstack[first+i].hash = iterative_hash_hashval_t (sccstack[first+i].hash, scc_hash);
> > > > > > > -  return scc_hash;
> > > > > > > +  /* We may want to iterate multiple times across SCC propagating across the internal
> > > > > > > +     edges in order to get different hash values for individual trees.  */
> > > > > > > +  do
> > > > > > > +    {
> > > > > > > +      /* Sort the SCC of type, hash pairs so that when we mix in
> > > > > > > +	 all members of the SCC the hash value becomes independent on
> > > > > > > +	 the order we visited the SCC.  Produce hash of the whole SCC as
> > > > > > > +	 combination of hashes of individual elements.  Then combine that hash into
> > > > > > > +	 hash of each element, so othewise identically looking elements from two
> > > > > > > +	 different SCCs are distinguished.  */
> > > > > > > +      qsort (&sccstack[first], size, sizeof (scc_entry), scc_entry_compare);
> > > > > > > +
> > > > > > > +      unsigned int classes = 1;
> > > > > > > +      hashval_t scc_hash = sccstack[first].hash;
> > > > > > > +      for (unsigned i = 1; i < size; ++i)
> > > > > > > +        {
> > > > > > > +          if (sccstack[first+i-1].hash != sccstack[first+i].hash)
> > > > > > > +	    classes++;
> > > > > > > +          scc_hash = iterative_hash_hashval_t (scc_hash,
> > > > > > > +					       sccstack[first+i].hash);
> > > > > > > +        }
> > > > > > > +
> > > > > > > +      /* If we have no duplicated entries, or we no longer get refinements,
> > > > > > > +	 we are done.  */
> > > > > > > +      if (classes <= last_classes || classes == size || iterations > 16)
> > > > > > > +	{
> > > > > > > +	  for (unsigned i = 1; i < size - 1; ++i)
> > > > > > > +	    {
> > > > > > > +	      hashval_t hash = sccstack[first+i].hash;
> > > > > > > +
> > > > > > > +	      if (hash == sccstack[first+i+1].hash)
> > > > > > > +		for (;i < size && sccstack[first+i].hash == hash; i++)
> > > > > > > +		  debug_tree (sccstack[first+i].t);
> > > > > > > +	      else
> > > > > > > +		i++;
> > > > > > > +	    }
> > > > > > > +	  for (unsigned i = 0; i < size; ++i)
> > > > > > > +	    sccstack[first+i].hash = iterative_hash_hashval_t (sccstack[first+i].hash, scc_hash);
> > > > > > > +	  return scc_hash;
> > > > > > > +	}
> > > > > > > +
> > > > > > > +      /* Otherwise try to propagate hash values across the edges.  */
> > > > > > > +      last_classes = classes;
> > > > > > > +      iterations++;
> > > > > > > +      {
> > > > > > > +	hash_map <tree, hashval_t> map(size*2);
> > > > > > > +	for (unsigned i = 0; i < size; ++i)
> > > > > > > +	  map.put (sccstack[first+i].t, sccstack[first+i].hash);
> > > > > > > +
> > > > > > > +	for (unsigned i = 0; i < size - 1;)
> > > > > > > +	  {
> > > > > > > +	    hashval_t hash = sccstack[first+i].hash;
> > > > > > > +
> > > > > > > +	    /* Rehash only the entries that have duplicate hash values.  */
> > > > > > > +	    if (hash == sccstack[first+i+1].hash)
> > > > > > > +	      for (;i < size && sccstack[first+i].hash == hash; i++)
> > > > > > > +	        sccstack[first+i].hash = hash_tree (cache, &map, sccstack[first+i].t);
> > > > > > > +	    else
> > > > > > > +	      i++;
> > > > > > > +	  }
> > > > > > > +      }
> > > > > > > +    }
> > > > > > > +  while (true);
> > > > > > >  }
> > > > > > >  
> > > > > > >  /* DFS walk EXPR and stream SCCs of tree bodies if they are not
> > > > > > > 
> > > > > > > 
> > > > > > 
> > > > > > -- 
> > > > > > Richard Biener <rguenther@suse.de>
> > > > > > SUSE / SUSE Labs
> > > > > > SUSE LINUX Products GmbH - Nuernberg - AG Nuernberg - HRB 16746
> > > > > > GF: Jeff Hawn, Jennifer Guild, Felix Imend"orffer
> > > > > 
> > > > > 
> > > > 
> > > > -- 
> > > > Richard Biener <rguenther@suse.de>
> > > > SUSE / SUSE Labs
> > > > SUSE LINUX Products GmbH - Nuernberg - AG Nuernberg - HRB 16746
> > > > GF: Jeff Hawn, Jennifer Guild, Felix Imend"orffer
> > > 
> > > 
> > 
> > -- 
> > Richard Biener <rguenther@suse.de>
> > SUSE / SUSE Labs
> > SUSE LINUX Products GmbH - Nuernberg - AG Nuernberg - HRB 16746
> > GF: Jeff Hawn, Jennifer Guild, Felix Imend"orffer
> 
> 

-- 
Richard Biener <rguenther@suse.de>
SUSE / SUSE Labs
SUSE LINUX Products GmbH - Nuernberg - AG Nuernberg - HRB 16746
GF: Jeff Hawn, Jennifer Guild, Felix Imend"orffer


Index Nav: [Date Index] [Subject Index] [Author Index] [Thread Index]
Message Nav: [Date Prev] [Date Next] [Thread Prev] [Thread Next]