This is the mail archive of the gcc-patches@gcc.gnu.org mailing list for the GCC project.


Index Nav: [Date Index] [Subject Index] [Author Index] [Thread Index]
Message Nav: [Date Prev] [Date Next] [Thread Prev] [Thread Next]
Other format: [Raw text]

[C++ PATCH] lookup iterators come alive


This patch enables the 2D nature of lookup results.

Previously whenever we had to combine overload sets from multiple places, we'd prepend one of them, one function at a time, to the other.

This patch changes that. We can prepend an entire overload set as a single entry to another set. This nesting is only ever 1 deep, and only ever applies to lookup results -- the symbol binding remains a 1 dimensional overload.

Other than the using-declaration case I discussed yesterday, the only place we might meet the same overload as before is in ADL lookup, where we re-search the same namespaces as we searched in the non-adl lookup. The most common case is we'll see the exact same overload again. We detect this in lookup_maybe_add, by using LOOKUP_SEEN_P on the overload nodes themselves. A rarer case is that between the first lookup and the later one, we added new functions to the symbol binding. We detect this too, and replace the older lookup with the newer one.

As ADL is recursive, we have to unmark and mark any partial lookup result, when we discover a recursive lookup.

Hopefully I already captured all the places an overload iterator should be lkp_iterator rather than ovl_iterator. The latter will assert it's not looking at a nested overload now.

nathan
--
Nathan Sidwell
2017-05-26  Nathan Sidwell  <nathan@acm.org>

	* cp-tree.h (OVL_CHAIN): Check looking at OVERLOAD.
	(ovl_iterator): Add allow_inner field.  Adjust ctor.  Make
	unduplicatable.
	(ovl_iterator::maybe_push, ovl_iterator::pop): New.
	(lkp_iterator): Add outer field.  Adjust ctor.
	(lkp_iterator::operator++): New.
	(lookup_mark, lookup_maybe_add): Declare.
	* name-lookup.c (name_lookup): Delete fn_set member.
	(name_lookup::preserve_state, name_lookup::restore_state): Unmark
	and mark lookup.
	(name_lookup::add_value): Use lookup_add directly.
	(name_lookup::add_fns: Use lookup_maybe_add.
	(name_lookup::search_adl): Mark and unmark fns.
	(pushdecl): Adjust.
	* pt.c (check_explicit_specialization): Use lookup_add directly.
	* ptree.c (cxx_print_xnode): Show complete overload structure.
	* tree.c (lookup_mark, lookup_maybe_add): New.

Index: cp/cp-tree.h
===================================================================
--- cp/cp-tree.h	(revision 248506)
+++ cp/cp-tree.h	(working copy)
@@ -657,7 +657,8 @@ typedef struct ptrmem_cst * ptrmem_cst_t
    Other users should use iterators and convenience functions.  */
 #define OVL_FUNCTION(NODE) \
   (((struct tree_overload*)OVERLOAD_CHECK (NODE))->function)
-#define OVL_CHAIN(NODE)      TREE_CHAIN (NODE)
+#define OVL_CHAIN(NODE) \
+  (((struct tree_overload*)OVERLOAD_CHECK (NODE))->common.chain)
 
 /* If set, this was imported in a using declaration.   */
 #define OVL_USING_P(NODE)	TREE_LANG_FLAG_1 (OVERLOAD_CHECK (NODE))
@@ -684,6 +685,9 @@ typedef struct ptrmem_cst * ptrmem_cst_t
 #define OVL_SINGLE_P(NODE) \
   (TREE_CODE (NODE) != OVERLOAD || !OVL_CHAIN (NODE))
 
+/* OVL_HIDDEN_P nodes come first, then OVL_USING_P nodes, then regular
+   fns.  */
+
 struct GTY(()) tree_overload {
   struct tree_common common;
   tree function;
@@ -694,19 +698,19 @@ struct GTY(()) tree_overload {
 class ovl_iterator 
 {
   tree ovl;
+  const bool allow_inner; /* Only used when checking.  */
 
  public:
-  ovl_iterator (tree o)
-  :ovl (o)
-    {}
-
-  ovl_iterator &operator= (const ovl_iterator &from)
+  explicit ovl_iterator (tree o, bool allow = false)
+    : ovl (o), allow_inner (allow)
   {
-    ovl = from.ovl;
-
-    return *this;
   }
 
+ private:
+  /* Do not duplicate.  */
+  ovl_iterator &operator= (const ovl_iterator &);
+  ovl_iterator (const ovl_iterator &);
+
  public:
   operator bool () const
   {
@@ -722,7 +726,7 @@ class ovl_iterator
     tree fn = TREE_CODE (ovl) != OVERLOAD ? ovl : OVL_FUNCTION (ovl);
 
     /* Check this is not an unexpected 2-dimensional overload.  */
-    gcc_checking_assert (TREE_CODE (fn) != OVERLOAD);
+    gcc_checking_assert (allow_inner || TREE_CODE (fn) != OVERLOAD);
 
     return fn;
   }
@@ -748,6 +752,27 @@ class ovl_iterator
     return reveal_node (head, ovl);
   }
 
+ protected:
+  /* If we have a nested overload, point at the inner overload and
+     return the next link on the outer one.  */
+  tree maybe_push ()
+  {
+    tree r = NULL_TREE;
+
+    if (ovl && TREE_CODE (ovl) == OVERLOAD && OVL_NESTED_P (ovl))
+      {
+	r = OVL_CHAIN (ovl);
+	ovl = OVL_FUNCTION (ovl);
+      }
+    return r;
+  }
+  /* Restore an outer nested overload.  */
+  void pop (tree outer)
+  {
+    gcc_checking_assert (!ovl);
+    ovl = outer;
+  }
+
  private:
   /* We make these static functions to avoid the address of the
      iterator escaping the local context.  */
@@ -758,18 +783,34 @@ class ovl_iterator
 /* Iterator over a (potentially) 2 dimensional overload, which is
    produced by name lookup.  */
 
-/* Note this is currently a placeholder, as the name-lookup changes
-   are not yet committed.  */
-
 class lkp_iterator : public ovl_iterator
 {
   typedef ovl_iterator parent;
 
+  tree outer;
+
  public:
-  lkp_iterator (tree o)
-    : parent (o)
+  explicit lkp_iterator (tree o)
+    : parent (o, true), outer (maybe_push ())
   {
   }
+
+ public:
+  lkp_iterator &operator++ ()
+  {
+    bool repush = !outer;
+
+    if (!parent::operator++ () && !repush)
+      {
+	pop (outer);
+	repush = true;
+      }
+
+    if (repush)
+      outer = maybe_push ();
+
+    return *this;
+  }
 };
 
 struct GTY(()) tree_template_decl {
@@ -6865,7 +6906,9 @@ extern tree ovl_make				(tree fn,
 extern tree ovl_insert				(tree fn, tree maybe_ovl,
 						 bool using_p = false);
 extern tree ovl_skip_hidden			(tree) ATTRIBUTE_PURE;
+extern void lookup_mark				(tree lookup, bool val);
 extern tree lookup_add				(tree fns, tree lookup);
+extern tree lookup_maybe_add			(tree fns, tree lookup);
 extern void lookup_keep				(tree lookup, bool keep);
 extern int is_overloaded_fn			(tree) ATTRIBUTE_PURE;
 extern bool really_overloaded_fn		(tree) ATTRIBUTE_PURE;
Index: cp/name-lookup.c
===================================================================
--- cp/name-lookup.c	(revision 248506)
+++ cp/name-lookup.c	(working copy)
@@ -161,7 +161,6 @@ public:
   int flags;	/* Lookup flags.  */
   vec<tree, va_heap, vl_embed> *scopes;
   name_lookup *previous; /* Previously active lookup.  */
-  hash_set<tree> *fn_set;
 
 protected:
   /* Marked scope stack for outermost name lookup.  */
@@ -172,13 +171,12 @@ protected:
 public:
   name_lookup (tree n, int f = 0)
   : name (n), value (NULL_TREE), type (NULL_TREE), flags (f),
-    scopes (NULL), previous (NULL), fn_set (NULL)
+    scopes (NULL), previous (NULL)
   {
     preserve_state ();
   }
   ~name_lookup ()
   {
-    gcc_checking_assert (!fn_set);
     restore_state ();
   }
 
@@ -299,6 +297,9 @@ name_lookup::preserve_state ()
 	      previous->scopes->quick_push (decl);
 	    }
 	}
+
+      /* Unmark the outer partial lookup.  */
+      lookup_mark (previous->value, false);
     }
   else
     scopes = shared_scopes;
@@ -322,6 +323,8 @@ name_lookup::restore_state ()
   active = previous;
   if (previous)
     {
+      free (scopes);
+
       unsigned length = vec_safe_length (previous->scopes);
       for (unsigned ix = 0; ix != length; ix++)
 	{
@@ -345,7 +348,8 @@ name_lookup::restore_state ()
 	  LOOKUP_SEEN_P (decl) = true;
 	}
 
-      free (scopes);
+      /* Remark the outer partial lookup.  */
+      lookup_mark (previous->value, true);
     }
   else
     shared_scopes = scopes;
@@ -403,10 +407,7 @@ name_lookup::add_value (tree new_val)
 	    && same_type_p (TREE_TYPE (value), TREE_TYPE (new_val))))
     ;
   else if (OVL_P (value) && OVL_P (new_val))
-    {
-      for (ovl_iterator iter (new_val); iter; ++iter)
-	value = lookup_add (*iter, value);
-    }
+    value = lookup_add (new_val, value);
   else
     value = ambiguous (new_val, value);
 }
@@ -684,9 +685,7 @@ name_lookup::add_fns (tree fns)
     return;
 
   /* Only add those that aren't already there.  */
-  for (ovl_iterator iter (fns); iter; ++iter)
-    if (!fn_set->add (*iter))
-      value = lookup_add (*iter, value);
+  value = lookup_maybe_add (fns, value);
 }
 
 /* Add functions of a namespace to the lookup structure.  */
@@ -987,13 +986,9 @@ name_lookup::adl_template_arg (tree arg)
 tree
 name_lookup::search_adl (tree fns, vec<tree, va_gc> *args)
 {
+  lookup_mark (fns, true);
   value = fns;
 
-  /* Add the current overload set into the hash table.  */
-  fn_set = new hash_set<tree>;
-  for (lkp_iterator iter (fns); iter; ++iter)
-    fn_set->add (*iter);
-
   unsigned ix;
   tree arg;
 
@@ -1005,10 +1000,8 @@ name_lookup::search_adl (tree fns, vec<t
     else
       adl_expr (arg);
 
-  delete fn_set;
-  fn_set = NULL;
-
   fns = value;
+  lookup_mark (fns, false);
 
   return fns;
 }
@@ -2101,7 +2094,6 @@ check_local_shadow (tree decl)
   inform (DECL_SOURCE_LOCATION (shadowed), "shadowed declaration is here");
 }
 
-
 /* DECL is being pushed inside function CTX.  Set its context, if
    needed.  */
 
@@ -2394,14 +2386,14 @@ do_pushdecl (tree decl, bool is_friend)
 }
 
 /* Record a decl-node X as belonging to the current lexical scope.
-   It's a friend if IS_FRIEND is true.  */
+   It's a friend if IS_FRIEND is true -- which affects exactly where
+   we push it.  */
 
 tree
 pushdecl (tree x, bool is_friend)
 {
-  tree ret;
   bool subtime = timevar_cond_start (TV_NAME_LOOKUP);
-  ret = do_pushdecl (x, is_friend);
+  tree ret = do_pushdecl (x, is_friend);
   timevar_cond_stop (TV_NAME_LOOKUP, subtime);
   return ret;
 }
@@ -4199,7 +4191,7 @@ set_namespace_binding (tree scope, tree
     supplement_binding (binding, val);
 }
 
-/* Set value binding og NAME in the global namespace to VAL.  Does not
+/* Set value binding of NAME in the global namespace to VAL.  Does not
    add it to the list of things in the namespace.  */
 
 void
Index: cp/pt.c
===================================================================
--- cp/pt.c	(revision 248506)
+++ cp/pt.c	(working copy)
@@ -2931,8 +2931,7 @@ check_explicit_specialization (tree decl
 
 		    /* Glue all these conversion functions together
 		       with those we already have.  */
-		    for (ovl_iterator iter (ovl); iter; ++iter)
-		      fns = lookup_add (*iter, fns);
+		    fns = lookup_add (ovl, fns);
 		  }
 	    }
 
Index: cp/ptree.c
===================================================================
--- cp/ptree.c	(revision 248506)
+++ cp/ptree.c	(working copy)
@@ -237,8 +237,10 @@ cxx_print_xnode (FILE *file, tree node,
       break;
     case OVERLOAD:
       print_node (file, "name", OVL_NAME (node), indent+4);
-      for (lkp_iterator iter (node); iter; ++iter)
-	print_node (file, "function", *iter, indent+4);
+      for (ovl_iterator iter (node, true); iter; ++iter)
+	print_node (file,
+		    TREE_CODE (*iter) == OVERLOAD ? "inner" : "function",
+		    *iter, indent+4);
       break;
     case TEMPLATE_PARM_INDEX:
       print_node (file, "decl", TEMPLATE_PARM_DECL (node), indent+4);
Index: cp/tree.c
===================================================================
--- cp/tree.c	(revision 248506)
+++ cp/tree.c	(working copy)
@@ -2150,8 +2150,8 @@ ovl_copy (tree ovl)
 }
 
 /* Add FN to the (potentially NULL) overload set OVL.  USING_P is
-   true, if FN is via a using declaration.  Overloads are ordered as
-   using, regular.  */
+   true, if FN is via a using declaration.  We also pay attention to
+   DECL_HIDDEN.  Overloads are ordered as hidden, using, regular.  */
 
 tree
 ovl_insert (tree fn, tree maybe_ovl, bool using_p)
@@ -2287,6 +2287,29 @@ ovl_iterator::remove_node (tree overload
   return overload;
 }
 
+/* Mark or unmark a lookup set. */
+
+void
+lookup_mark (tree ovl, bool val)
+{
+  /* For every node that is a lookup, mark the thing it points to.  */
+  for (; ovl && TREE_CODE (ovl) == OVERLOAD && OVL_LOOKUP_P (ovl);
+       ovl = OVL_CHAIN (ovl))
+    {
+      tree targ = OVL_FUNCTION (ovl);
+      gcc_checking_assert (LOOKUP_SEEN_P (targ) != val);
+      LOOKUP_SEEN_P (targ) = val;
+    }
+
+  if (ovl && (TREE_CODE (ovl) == OVERLOAD ||
+	      TREE_CODE (ovl) == FUNCTION_DECL))
+    {
+      /* Mark the overload itsef.  */
+      gcc_checking_assert (LOOKUP_SEEN_P (ovl) != val);
+      LOOKUP_SEEN_P (ovl) = val;
+    }
+}
+
 /* Add a set of new FNS into a lookup.  */
 
 tree
@@ -2303,6 +2326,75 @@ lookup_add (tree fns, tree lookup)
   return lookup;
 }
 
+/* FNS is a new overload set, add it to LOOKUP, if it is not already
+   present there.  */
+
+tree
+lookup_maybe_add (tree fns, tree lookup)
+{
+  if (LOOKUP_SEEN_P (fns))
+    return lookup;
+
+  if (lookup && TREE_CODE (fns) == OVERLOAD)
+    {
+      /* Determine if we already have some part of this overload in
+	 the overload set.  If so fix things up so we only have the
+	 overload set once.  */
+      tree marked = NULL_TREE;
+
+      for (tree probe = fns; probe; probe = OVL_CHAIN (probe))
+	if (LOOKUP_SEEN_P (probe))
+	  {
+	    marked = probe;
+	    break;
+	  }
+	else if (TREE_CODE (probe) != OVERLOAD)
+	  break;
+
+      if (marked)
+	{
+	  /* The tail of this overload is already in the lookup
+	     set.  Stitch out the tail case, which might involve
+	     copying.  */
+	  bool rewrite = false;
+
+	  LOOKUP_SEEN_P (marked) = false;
+	  for (tree *prev = &lookup, probe = *prev;
+	       ; prev = &OVL_CHAIN (probe), probe = *prev)
+	    {
+	      if (probe == marked)
+		{
+		  *prev = NULL_TREE;
+		  break;
+		}
+	      gcc_checking_assert (OVL_LOOKUP_P (probe));
+	      if (marked == OVL_FUNCTION (probe))
+		{
+		  *prev = OVL_CHAIN (probe);
+		  break;
+		}
+
+	      /* If we're in a used part of the lookup set, copy the
+		 node, so as to not disturb stored uses.  */
+	      gcc_checking_assert (!rewrite || OVL_USED_P (probe));
+	      if (OVL_USED_P (probe))
+		{
+		  rewrite = true;
+		  probe = ovl_copy (probe);
+		  OVL_LOOKUP_P (probe) = true;
+		  *prev = probe;
+		}
+	    }
+	}
+    }
+
+  /* Finally mark the new overload and prepend it to the current
+     lookup.  */
+  LOOKUP_SEEN_P (fns) = true;
+
+  return lookup_add (fns, lookup);
+}
+
 /* If KEEP is true, preserve the contents of a lookup so that it is
    available for a later instantiation.  Otherwise release the LOOKUP
    nodes for reuse.  */

Index Nav: [Date Index] [Subject Index] [Author Index] [Thread Index]
Message Nav: [Date Prev] [Date Next] [Thread Prev] [Thread Next]