libstdc++
hashtable_policy.h
Go to the documentation of this file.
1 // Internal policy header for unordered_set and unordered_map -*- C++ -*-
2 
3 // Copyright (C) 2010-2021 Free Software Foundation, Inc.
4 //
5 // This file is part of the GNU ISO C++ Library. This library is free
6 // software; you can redistribute it and/or modify it under the
7 // terms of the GNU General Public License as published by the
8 // Free Software Foundation; either version 3, or (at your option)
9 // any later version.
10 
11 // This library is distributed in the hope that it will be useful,
12 // but WITHOUT ANY WARRANTY; without even the implied warranty of
13 // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 // GNU General Public License for more details.
15 
16 // Under Section 7 of GPL version 3, you are granted additional
17 // permissions described in the GCC Runtime Library Exception, version
18 // 3.1, as published by the Free Software Foundation.
19 
20 // You should have received a copy of the GNU General Public License and
21 // a copy of the GCC Runtime Library Exception along with this program;
22 // see the files COPYING3 and COPYING.RUNTIME respectively. If not, see
23 // <http://www.gnu.org/licenses/>.
24 
25 /** @file bits/hashtable_policy.h
26  * This is an internal header file, included by other library headers.
27  * Do not attempt to use it directly.
28  * @headername{unordered_map,unordered_set}
29  */
30 
31 #ifndef _HASHTABLE_POLICY_H
32 #define _HASHTABLE_POLICY_H 1
33 
34 #include <tuple> // for std::tuple, std::forward_as_tuple
35 #include <bits/stl_algobase.h> // for std::min, std::is_permutation.
36 #include <ext/numeric_traits.h> // for __gnu_cxx::__int_traits
37 
38 namespace std _GLIBCXX_VISIBILITY(default)
39 {
40 _GLIBCXX_BEGIN_NAMESPACE_VERSION
41 
42  template<typename _Key, typename _Value, typename _Alloc,
43  typename _ExtractKey, typename _Equal,
44  typename _Hash, typename _RangeHash, typename _Unused,
45  typename _RehashPolicy, typename _Traits>
46  class _Hashtable;
47 
48 namespace __detail
49 {
50  /**
51  * @defgroup hashtable-detail Base and Implementation Classes
52  * @ingroup unordered_associative_containers
53  * @{
54  */
55  template<typename _Key, typename _Value, typename _ExtractKey,
56  typename _Equal, typename _Hash, typename _RangeHash,
57  typename _Unused, typename _Traits>
58  struct _Hashtable_base;
59 
60  // Helper function: return distance(first, last) for forward
61  // iterators, or 0/1 for input iterators.
62  template<class _Iterator>
64  __distance_fw(_Iterator __first, _Iterator __last,
66  { return __first != __last ? 1 : 0; }
67 
68  template<class _Iterator>
70  __distance_fw(_Iterator __first, _Iterator __last,
72  { return std::distance(__first, __last); }
73 
74  template<class _Iterator>
76  __distance_fw(_Iterator __first, _Iterator __last)
77  { return __distance_fw(__first, __last,
78  std::__iterator_category(__first)); }
79 
80  struct _Identity
81  {
82  template<typename _Tp>
83  _Tp&&
84  operator()(_Tp&& __x) const noexcept
85  { return std::forward<_Tp>(__x); }
86  };
87 
88  struct _Select1st
89  {
90  template<typename _Tp>
91  auto
92  operator()(_Tp&& __x) const noexcept
93  -> decltype(std::get<0>(std::forward<_Tp>(__x)))
94  { return std::get<0>(std::forward<_Tp>(__x)); }
95  };
96 
97  struct _Select2nd
98  {
99  template<typename _Tp>
100  auto
101  operator()(_Tp&& __x) const noexcept
102  -> decltype(std::get<1>(std::forward<_Tp>(__x)))
103  { return std::get<1>(std::forward<_Tp>(__x)); }
104  };
105 
106  template<typename _ExKey>
107  struct _NodeBuilder;
108 
109  template<>
110  struct _NodeBuilder<_Select1st>
111  {
112  template<typename _Kt, typename _Arg, typename _NodeGenerator>
113  static auto
114  _S_build(_Kt&& __k, _Arg&& __arg, const _NodeGenerator& __node_gen)
115  -> decltype(__node_gen(std::piecewise_construct,
116  std::forward_as_tuple(std::forward<_Kt>(__k)),
117  std::forward_as_tuple(_Select2nd{}(
118  std::forward<_Arg>(__arg)))))
119  {
120  return __node_gen(std::piecewise_construct,
121  std::forward_as_tuple(std::forward<_Kt>(__k)),
122  std::forward_as_tuple(_Select2nd{}(std::forward<_Arg>(__arg))));
123  }
124  };
125 
126  template<>
127  struct _NodeBuilder<_Identity>
128  {
129  template<typename _Kt, typename _Arg, typename _NodeGenerator>
130  static auto
131  _S_build(_Kt&& __k, _Arg&&, const _NodeGenerator& __node_gen)
132  -> decltype(__node_gen(std::forward<_Kt>(__k)))
133  { return __node_gen(std::forward<_Kt>(__k)); }
134  };
135 
136  template<typename _NodeAlloc>
137  struct _Hashtable_alloc;
138 
139  // Functor recycling a pool of nodes and using allocation once the pool is
140  // empty.
141  template<typename _NodeAlloc>
142  struct _ReuseOrAllocNode
143  {
144  private:
145  using __node_alloc_type = _NodeAlloc;
146  using __hashtable_alloc = _Hashtable_alloc<__node_alloc_type>;
147  using __node_alloc_traits =
148  typename __hashtable_alloc::__node_alloc_traits;
149  using __node_type = typename __hashtable_alloc::__node_type;
150 
151  public:
152  _ReuseOrAllocNode(__node_type* __nodes, __hashtable_alloc& __h)
153  : _M_nodes(__nodes), _M_h(__h) { }
154  _ReuseOrAllocNode(const _ReuseOrAllocNode&) = delete;
155 
156  ~_ReuseOrAllocNode()
157  { _M_h._M_deallocate_nodes(_M_nodes); }
158 
159  template<typename... _Args>
160  __node_type*
161  operator()(_Args&&... __args) const
162  {
163  if (_M_nodes)
164  {
165  __node_type* __node = _M_nodes;
166  _M_nodes = _M_nodes->_M_next();
167  __node->_M_nxt = nullptr;
168  auto& __a = _M_h._M_node_allocator();
169  __node_alloc_traits::destroy(__a, __node->_M_valptr());
170  __try
171  {
172  __node_alloc_traits::construct(__a, __node->_M_valptr(),
173  std::forward<_Args>(__args)...);
174  }
175  __catch(...)
176  {
177  _M_h._M_deallocate_node_ptr(__node);
178  __throw_exception_again;
179  }
180  return __node;
181  }
182  return _M_h._M_allocate_node(std::forward<_Args>(__args)...);
183  }
184 
185  private:
186  mutable __node_type* _M_nodes;
187  __hashtable_alloc& _M_h;
188  };
189 
190  // Functor similar to the previous one but without any pool of nodes to
191  // recycle.
192  template<typename _NodeAlloc>
193  struct _AllocNode
194  {
195  private:
196  using __hashtable_alloc = _Hashtable_alloc<_NodeAlloc>;
197  using __node_type = typename __hashtable_alloc::__node_type;
198 
199  public:
200  _AllocNode(__hashtable_alloc& __h)
201  : _M_h(__h) { }
202 
203  template<typename... _Args>
204  __node_type*
205  operator()(_Args&&... __args) const
206  { return _M_h._M_allocate_node(std::forward<_Args>(__args)...); }
207 
208  private:
209  __hashtable_alloc& _M_h;
210  };
211 
212  // Auxiliary types used for all instantiations of _Hashtable nodes
213  // and iterators.
214 
215  /**
216  * struct _Hashtable_traits
217  *
218  * Important traits for hash tables.
219  *
220  * @tparam _Cache_hash_code Boolean value. True if the value of
221  * the hash function is stored along with the value. This is a
222  * time-space tradeoff. Storing it may improve lookup speed by
223  * reducing the number of times we need to call the _Hash or _Equal
224  * functors.
225  *
226  * @tparam _Constant_iterators Boolean value. True if iterator and
227  * const_iterator are both constant iterator types. This is true
228  * for unordered_set and unordered_multiset, false for
229  * unordered_map and unordered_multimap.
230  *
231  * @tparam _Unique_keys Boolean value. True if the return value
232  * of _Hashtable::count(k) is always at most one, false if it may
233  * be an arbitrary number. This is true for unordered_set and
234  * unordered_map, false for unordered_multiset and
235  * unordered_multimap.
236  */
237  template<bool _Cache_hash_code, bool _Constant_iterators, bool _Unique_keys>
239  {
240  using __hash_cached = __bool_constant<_Cache_hash_code>;
241  using __constant_iterators = __bool_constant<_Constant_iterators>;
242  using __unique_keys = __bool_constant<_Unique_keys>;
243  };
244 
245  /**
246  * struct _Hash_node_base
247  *
248  * Nodes, used to wrap elements stored in the hash table. A policy
249  * template parameter of class template _Hashtable controls whether
250  * nodes also store a hash code. In some cases (e.g. strings) this
251  * may be a performance win.
252  */
254  {
255  _Hash_node_base* _M_nxt;
256 
257  _Hash_node_base() noexcept : _M_nxt() { }
258 
259  _Hash_node_base(_Hash_node_base* __next) noexcept : _M_nxt(__next) { }
260  };
261 
262  /**
263  * struct _Hash_node_value_base
264  *
265  * Node type with the value to store.
266  */
267  template<typename _Value>
269  {
270  typedef _Value value_type;
271 
272  __gnu_cxx::__aligned_buffer<_Value> _M_storage;
273 
274  _Value*
275  _M_valptr() noexcept
276  { return _M_storage._M_ptr(); }
277 
278  const _Value*
279  _M_valptr() const noexcept
280  { return _M_storage._M_ptr(); }
281 
282  _Value&
283  _M_v() noexcept
284  { return *_M_valptr(); }
285 
286  const _Value&
287  _M_v() const noexcept
288  { return *_M_valptr(); }
289  };
290 
291  /**
292  * Primary template struct _Hash_node_code_cache.
293  */
294  template<bool _Cache_hash_code>
296  { };
297 
298  /**
299  * Specialization for node with cache, struct _Hash_node_code_cache.
300  */
301  template<>
303  { std::size_t _M_hash_code; };
304 
305  template<typename _Value, bool _Cache_hash_code>
306  struct _Hash_node_value
307  : _Hash_node_value_base<_Value>
308  , _Hash_node_code_cache<_Cache_hash_code>
309  { };
310 
311  /**
312  * Primary template struct _Hash_node.
313  */
314  template<typename _Value, bool _Cache_hash_code>
315  struct _Hash_node
317  , _Hash_node_value<_Value, _Cache_hash_code>
318  {
319  _Hash_node*
320  _M_next() const noexcept
321  { return static_cast<_Hash_node*>(this->_M_nxt); }
322  };
323 
324  /// Base class for node iterators.
325  template<typename _Value, bool _Cache_hash_code>
327  {
329 
330  __node_type* _M_cur;
331 
332  _Node_iterator_base() : _M_cur(nullptr) { }
333  _Node_iterator_base(__node_type* __p) noexcept
334  : _M_cur(__p) { }
335 
336  void
337  _M_incr() noexcept
338  { _M_cur = _M_cur->_M_next(); }
339 
340  friend bool
341  operator==(const _Node_iterator_base& __x, const _Node_iterator_base& __y)
342  noexcept
343  { return __x._M_cur == __y._M_cur; }
344 
345 #if __cpp_impl_three_way_comparison < 201907L
346  friend bool
347  operator!=(const _Node_iterator_base& __x, const _Node_iterator_base& __y)
348  noexcept
349  { return __x._M_cur != __y._M_cur; }
350 #endif
351  };
352 
353  /// Node iterators, used to iterate through all the hashtable.
354  template<typename _Value, bool __constant_iterators, bool __cache>
356  : public _Node_iterator_base<_Value, __cache>
357  {
358  private:
360  using __node_type = typename __base_type::__node_type;
361 
362  public:
363  typedef _Value value_type;
364  typedef std::ptrdiff_t difference_type;
366 
367  using pointer = typename std::conditional<__constant_iterators,
368  const value_type*, value_type*>::type;
369 
370  using reference = typename std::conditional<__constant_iterators,
371  const value_type&, value_type&>::type;
372 
373  _Node_iterator() = default;
374 
375  explicit
376  _Node_iterator(__node_type* __p) noexcept
377  : __base_type(__p) { }
378 
379  reference
380  operator*() const noexcept
381  { return this->_M_cur->_M_v(); }
382 
383  pointer
384  operator->() const noexcept
385  { return this->_M_cur->_M_valptr(); }
386 
388  operator++() noexcept
389  {
390  this->_M_incr();
391  return *this;
392  }
393 
395  operator++(int) noexcept
396  {
397  _Node_iterator __tmp(*this);
398  this->_M_incr();
399  return __tmp;
400  }
401  };
402 
403  /// Node const_iterators, used to iterate through all the hashtable.
404  template<typename _Value, bool __constant_iterators, bool __cache>
406  : public _Node_iterator_base<_Value, __cache>
407  {
408  private:
410  using __node_type = typename __base_type::__node_type;
411 
412  public:
413  typedef _Value value_type;
414  typedef std::ptrdiff_t difference_type;
416 
417  typedef const value_type* pointer;
418  typedef const value_type& reference;
419 
420  _Node_const_iterator() = default;
421 
422  explicit
423  _Node_const_iterator(__node_type* __p) noexcept
424  : __base_type(__p) { }
425 
426  _Node_const_iterator(const _Node_iterator<_Value, __constant_iterators,
427  __cache>& __x) noexcept
428  : __base_type(__x._M_cur) { }
429 
430  reference
431  operator*() const noexcept
432  { return this->_M_cur->_M_v(); }
433 
434  pointer
435  operator->() const noexcept
436  { return this->_M_cur->_M_valptr(); }
437 
439  operator++() noexcept
440  {
441  this->_M_incr();
442  return *this;
443  }
444 
446  operator++(int) noexcept
447  {
448  _Node_const_iterator __tmp(*this);
449  this->_M_incr();
450  return __tmp;
451  }
452  };
453 
454  // Many of class template _Hashtable's template parameters are policy
455  // classes. These are defaults for the policies.
456 
457  /// Default range hashing function: use division to fold a large number
458  /// into the range [0, N).
460  {
461  typedef std::size_t first_argument_type;
462  typedef std::size_t second_argument_type;
463  typedef std::size_t result_type;
464 
465  result_type
466  operator()(first_argument_type __num,
467  second_argument_type __den) const noexcept
468  { return __num % __den; }
469  };
470 
471  /// Default ranged hash function H. In principle it should be a
472  /// function object composed from objects of type H1 and H2 such that
473  /// h(k, N) = h2(h1(k), N), but that would mean making extra copies of
474  /// h1 and h2. So instead we'll just use a tag to tell class template
475  /// hashtable to do that composition.
477 
478  /// Default value for rehash policy. Bucket size is (usually) the
479  /// smallest prime that keeps the load factor small enough.
481  {
483 
484  _Prime_rehash_policy(float __z = 1.0) noexcept
485  : _M_max_load_factor(__z), _M_next_resize(0) { }
486 
487  float
488  max_load_factor() const noexcept
489  { return _M_max_load_factor; }
490 
491  // Return a bucket size no smaller than n.
492  std::size_t
493  _M_next_bkt(std::size_t __n) const;
494 
495  // Return a bucket count appropriate for n elements
496  std::size_t
497  _M_bkt_for_elements(std::size_t __n) const
498  { return __builtin_ceil(__n / (double)_M_max_load_factor); }
499 
500  // __n_bkt is current bucket count, __n_elt is current element count,
501  // and __n_ins is number of elements to be inserted. Do we need to
502  // increase bucket count? If so, return make_pair(true, n), where n
503  // is the new bucket count. If not, return make_pair(false, 0).
505  _M_need_rehash(std::size_t __n_bkt, std::size_t __n_elt,
506  std::size_t __n_ins) const;
507 
508  typedef std::size_t _State;
509 
510  _State
511  _M_state() const
512  { return _M_next_resize; }
513 
514  void
515  _M_reset() noexcept
516  { _M_next_resize = 0; }
517 
518  void
519  _M_reset(_State __state)
520  { _M_next_resize = __state; }
521 
522  static const std::size_t _S_growth_factor = 2;
523 
524  float _M_max_load_factor;
525  mutable std::size_t _M_next_resize;
526  };
527 
528  /// Range hashing function assuming that second arg is a power of 2.
530  {
531  typedef std::size_t first_argument_type;
532  typedef std::size_t second_argument_type;
533  typedef std::size_t result_type;
534 
535  result_type
536  operator()(first_argument_type __num,
537  second_argument_type __den) const noexcept
538  { return __num & (__den - 1); }
539  };
540 
541  /// Compute closest power of 2 not less than __n
542  inline std::size_t
543  __clp2(std::size_t __n) noexcept
544  {
546  // Equivalent to return __n ? std::bit_ceil(__n) : 0;
547  if (__n < 2)
548  return __n;
549  const unsigned __lz = sizeof(size_t) > sizeof(long)
550  ? __builtin_clzll(__n - 1ull)
551  : __builtin_clzl(__n - 1ul);
552  // Doing two shifts avoids undefined behaviour when __lz == 0.
553  return (size_t(1) << (__int_traits<size_t>::__digits - __lz - 1)) << 1;
554  }
555 
556  /// Rehash policy providing power of 2 bucket numbers. Avoids modulo
557  /// operations.
559  {
561 
562  _Power2_rehash_policy(float __z = 1.0) noexcept
563  : _M_max_load_factor(__z), _M_next_resize(0) { }
564 
565  float
566  max_load_factor() const noexcept
567  { return _M_max_load_factor; }
568 
569  // Return a bucket size no smaller than n (as long as n is not above the
570  // highest power of 2).
571  std::size_t
572  _M_next_bkt(std::size_t __n) noexcept
573  {
574  if (__n == 0)
575  // Special case on container 1st initialization with 0 bucket count
576  // hint. We keep _M_next_resize to 0 to make sure that next time we
577  // want to add an element allocation will take place.
578  return 1;
579 
580  const auto __max_width = std::min<size_t>(sizeof(size_t), 8);
581  const auto __max_bkt = size_t(1) << (__max_width * __CHAR_BIT__ - 1);
582  std::size_t __res = __clp2(__n);
583 
584  if (__res == 0)
585  __res = __max_bkt;
586  else if (__res == 1)
587  // If __res is 1 we force it to 2 to make sure there will be an
588  // allocation so that nothing need to be stored in the initial
589  // single bucket
590  __res = 2;
591 
592  if (__res == __max_bkt)
593  // Set next resize to the max value so that we never try to rehash again
594  // as we already reach the biggest possible bucket number.
595  // Note that it might result in max_load_factor not being respected.
596  _M_next_resize = size_t(-1);
597  else
598  _M_next_resize
599  = __builtin_floor(__res * (double)_M_max_load_factor);
600 
601  return __res;
602  }
603 
604  // Return a bucket count appropriate for n elements
605  std::size_t
606  _M_bkt_for_elements(std::size_t __n) const noexcept
607  { return __builtin_ceil(__n / (double)_M_max_load_factor); }
608 
609  // __n_bkt is current bucket count, __n_elt is current element count,
610  // and __n_ins is number of elements to be inserted. Do we need to
611  // increase bucket count? If so, return make_pair(true, n), where n
612  // is the new bucket count. If not, return make_pair(false, 0).
614  _M_need_rehash(std::size_t __n_bkt, std::size_t __n_elt,
615  std::size_t __n_ins) noexcept
616  {
617  if (__n_elt + __n_ins > _M_next_resize)
618  {
619  // If _M_next_resize is 0 it means that we have nothing allocated so
620  // far and that we start inserting elements. In this case we start
621  // with an initial bucket size of 11.
622  double __min_bkts
623  = std::max<std::size_t>(__n_elt + __n_ins, _M_next_resize ? 0 : 11)
624  / (double)_M_max_load_factor;
625  if (__min_bkts >= __n_bkt)
626  return { true,
627  _M_next_bkt(std::max<std::size_t>(__builtin_floor(__min_bkts) + 1,
628  __n_bkt * _S_growth_factor)) };
629 
630  _M_next_resize
631  = __builtin_floor(__n_bkt * (double)_M_max_load_factor);
632  return { false, 0 };
633  }
634  else
635  return { false, 0 };
636  }
637 
638  typedef std::size_t _State;
639 
640  _State
641  _M_state() const noexcept
642  { return _M_next_resize; }
643 
644  void
645  _M_reset() noexcept
646  { _M_next_resize = 0; }
647 
648  void
649  _M_reset(_State __state) noexcept
650  { _M_next_resize = __state; }
651 
652  static const std::size_t _S_growth_factor = 2;
653 
654  float _M_max_load_factor;
655  std::size_t _M_next_resize;
656  };
657 
658  // Base classes for std::_Hashtable. We define these base classes
659  // because in some cases we want to do different things depending on
660  // the value of a policy class. In some cases the policy class
661  // affects which member functions and nested typedefs are defined;
662  // we handle that by specializing base class templates. Several of
663  // the base class templates need to access other members of class
664  // template _Hashtable, so we use a variant of the "Curiously
665  // Recurring Template Pattern" (CRTP) technique.
666 
667  /**
668  * Primary class template _Map_base.
669  *
670  * If the hashtable has a value type of the form pair<T1, T2> and a
671  * key extraction policy (_ExtractKey) that returns the first part
672  * of the pair, the hashtable gets a mapped_type typedef. If it
673  * satisfies those criteria and also has unique keys, then it also
674  * gets an operator[].
675  */
676  template<typename _Key, typename _Value, typename _Alloc,
677  typename _ExtractKey, typename _Equal,
678  typename _Hash, typename _RangeHash, typename _Unused,
679  typename _RehashPolicy, typename _Traits,
680  bool _Unique_keys = _Traits::__unique_keys::value>
681  struct _Map_base { };
682 
683  /// Partial specialization, __unique_keys set to false.
684  template<typename _Key, typename _Pair, typename _Alloc, typename _Equal,
685  typename _Hash, typename _RangeHash, typename _Unused,
686  typename _RehashPolicy, typename _Traits>
687  struct _Map_base<_Key, _Pair, _Alloc, _Select1st, _Equal,
688  _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits, false>
689  {
690  using mapped_type = typename std::tuple_element<1, _Pair>::type;
691  };
692 
693  /// Partial specialization, __unique_keys set to true.
694  template<typename _Key, typename _Pair, typename _Alloc, typename _Equal,
695  typename _Hash, typename _RangeHash, typename _Unused,
696  typename _RehashPolicy, typename _Traits>
697  struct _Map_base<_Key, _Pair, _Alloc, _Select1st, _Equal,
698  _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits, true>
699  {
700  private:
701  using __hashtable_base = _Hashtable_base<_Key, _Pair, _Select1st, _Equal,
702  _Hash, _RangeHash, _Unused,
703  _Traits>;
704 
705  using __hashtable = _Hashtable<_Key, _Pair, _Alloc, _Select1st, _Equal,
706  _Hash, _RangeHash,
707  _Unused, _RehashPolicy, _Traits>;
708 
709  using __hash_code = typename __hashtable_base::__hash_code;
710 
711  public:
712  using key_type = typename __hashtable_base::key_type;
713  using mapped_type = typename std::tuple_element<1, _Pair>::type;
714 
715  mapped_type&
716  operator[](const key_type& __k);
717 
718  mapped_type&
719  operator[](key_type&& __k);
720 
721  // _GLIBCXX_RESOLVE_LIB_DEFECTS
722  // DR 761. unordered_map needs an at() member function.
723  mapped_type&
724  at(const key_type& __k);
725 
726  const mapped_type&
727  at(const key_type& __k) const;
728  };
729 
730  template<typename _Key, typename _Pair, typename _Alloc, typename _Equal,
731  typename _Hash, typename _RangeHash, typename _Unused,
732  typename _RehashPolicy, typename _Traits>
733  auto
734  _Map_base<_Key, _Pair, _Alloc, _Select1st, _Equal,
735  _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits, true>::
736  operator[](const key_type& __k)
737  -> mapped_type&
738  {
739  __hashtable* __h = static_cast<__hashtable*>(this);
740  __hash_code __code = __h->_M_hash_code(__k);
741  std::size_t __bkt = __h->_M_bucket_index(__code);
742  if (auto __node = __h->_M_find_node(__bkt, __k, __code))
743  return __node->_M_v().second;
744 
745  typename __hashtable::_Scoped_node __node {
746  __h,
749  std::tuple<>()
750  };
751  auto __pos
752  = __h->_M_insert_unique_node(__bkt, __code, __node._M_node);
753  __node._M_node = nullptr;
754  return __pos->second;
755  }
756 
757  template<typename _Key, typename _Pair, typename _Alloc, typename _Equal,
758  typename _Hash, typename _RangeHash, typename _Unused,
759  typename _RehashPolicy, typename _Traits>
760  auto
761  _Map_base<_Key, _Pair, _Alloc, _Select1st, _Equal,
762  _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits, true>::
763  operator[](key_type&& __k)
764  -> mapped_type&
765  {
766  __hashtable* __h = static_cast<__hashtable*>(this);
767  __hash_code __code = __h->_M_hash_code(__k);
768  std::size_t __bkt = __h->_M_bucket_index(__code);
769  if (auto __node = __h->_M_find_node(__bkt, __k, __code))
770  return __node->_M_v().second;
771 
772  typename __hashtable::_Scoped_node __node {
773  __h,
776  std::tuple<>()
777  };
778  auto __pos
779  = __h->_M_insert_unique_node(__bkt, __code, __node._M_node);
780  __node._M_node = nullptr;
781  return __pos->second;
782  }
783 
784  template<typename _Key, typename _Pair, typename _Alloc, typename _Equal,
785  typename _Hash, typename _RangeHash, typename _Unused,
786  typename _RehashPolicy, typename _Traits>
787  auto
788  _Map_base<_Key, _Pair, _Alloc, _Select1st, _Equal,
789  _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits, true>::
790  at(const key_type& __k)
791  -> mapped_type&
792  {
793  __hashtable* __h = static_cast<__hashtable*>(this);
794  auto __ite = __h->find(__k);
795 
796  if (!__ite._M_cur)
797  __throw_out_of_range(__N("_Map_base::at"));
798  return __ite->second;
799  }
800 
801  template<typename _Key, typename _Pair, typename _Alloc, typename _Equal,
802  typename _Hash, typename _RangeHash, typename _Unused,
803  typename _RehashPolicy, typename _Traits>
804  auto
805  _Map_base<_Key, _Pair, _Alloc, _Select1st, _Equal,
806  _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits, true>::
807  at(const key_type& __k) const
808  -> const mapped_type&
809  {
810  const __hashtable* __h = static_cast<const __hashtable*>(this);
811  auto __ite = __h->find(__k);
812 
813  if (!__ite._M_cur)
814  __throw_out_of_range(__N("_Map_base::at"));
815  return __ite->second;
816  }
817 
818  /**
819  * Primary class template _Insert_base.
820  *
821  * Defines @c insert member functions appropriate to all _Hashtables.
822  */
823  template<typename _Key, typename _Value, typename _Alloc,
824  typename _ExtractKey, typename _Equal,
825  typename _Hash, typename _RangeHash, typename _Unused,
826  typename _RehashPolicy, typename _Traits>
828  {
829  protected:
830  using __hashtable_base = _Hashtable_base<_Key, _Value, _ExtractKey,
831  _Equal, _Hash, _RangeHash,
832  _Unused, _Traits>;
833 
834  using __hashtable = _Hashtable<_Key, _Value, _Alloc, _ExtractKey, _Equal,
835  _Hash, _RangeHash,
836  _Unused, _RehashPolicy, _Traits>;
837 
838  using __hash_cached = typename _Traits::__hash_cached;
839  using __constant_iterators = typename _Traits::__constant_iterators;
840 
842  __alloc_rebind<_Alloc, _Hash_node<_Value,
843  __hash_cached::value>>>;
844 
845  using value_type = typename __hashtable_base::value_type;
846  using size_type = typename __hashtable_base::size_type;
847 
848  using __unique_keys = typename _Traits::__unique_keys;
849  using __node_alloc_type = typename __hashtable_alloc::__node_alloc_type;
850  using __node_gen_type = _AllocNode<__node_alloc_type>;
851 
852  __hashtable&
853  _M_conjure_hashtable()
854  { return *(static_cast<__hashtable*>(this)); }
855 
856  template<typename _InputIterator, typename _NodeGetter>
857  void
858  _M_insert_range(_InputIterator __first, _InputIterator __last,
859  const _NodeGetter&, true_type __uks);
860 
861  template<typename _InputIterator, typename _NodeGetter>
862  void
863  _M_insert_range(_InputIterator __first, _InputIterator __last,
864  const _NodeGetter&, false_type __uks);
865 
866  public:
867  using iterator = _Node_iterator<_Value, __constant_iterators::value,
868  __hash_cached::value>;
869 
870  using const_iterator = _Node_const_iterator<_Value, __constant_iterators::value,
871  __hash_cached::value>;
872 
873  using __ireturn_type = typename std::conditional<__unique_keys::value,
875  iterator>::type;
876 
877  __ireturn_type
878  insert(const value_type& __v)
879  {
880  __hashtable& __h = _M_conjure_hashtable();
881  __node_gen_type __node_gen(__h);
882  return __h._M_insert(__v, __node_gen, __unique_keys{});
883  }
884 
885  iterator
886  insert(const_iterator __hint, const value_type& __v)
887  {
888  __hashtable& __h = _M_conjure_hashtable();
889  __node_gen_type __node_gen(__h);
890  return __h._M_insert(__hint, __v, __node_gen, __unique_keys{});
891  }
892 
893  template<typename _KType, typename... _Args>
895  try_emplace(const_iterator, _KType&& __k, _Args&&... __args)
896  {
897  __hashtable& __h = _M_conjure_hashtable();
898  auto __code = __h._M_hash_code(__k);
899  std::size_t __bkt = __h._M_bucket_index(__code);
900  if (auto __node = __h._M_find_node(__bkt, __k, __code))
901  return { iterator(__node), false };
902 
903  typename __hashtable::_Scoped_node __node {
904  &__h,
906  std::forward_as_tuple(std::forward<_KType>(__k)),
907  std::forward_as_tuple(std::forward<_Args>(__args)...)
908  };
909  auto __it
910  = __h._M_insert_unique_node(__bkt, __code, __node._M_node);
911  __node._M_node = nullptr;
912  return { __it, true };
913  }
914 
915  void
916  insert(initializer_list<value_type> __l)
917  { this->insert(__l.begin(), __l.end()); }
918 
919  template<typename _InputIterator>
920  void
921  insert(_InputIterator __first, _InputIterator __last)
922  {
923  __hashtable& __h = _M_conjure_hashtable();
924  __node_gen_type __node_gen(__h);
925  return _M_insert_range(__first, __last, __node_gen, __unique_keys{});
926  }
927  };
928 
929  template<typename _Key, typename _Value, typename _Alloc,
930  typename _ExtractKey, typename _Equal,
931  typename _Hash, typename _RangeHash, typename _Unused,
932  typename _RehashPolicy, typename _Traits>
933  template<typename _InputIterator, typename _NodeGetter>
934  void
935  _Insert_base<_Key, _Value, _Alloc, _ExtractKey, _Equal,
936  _Hash, _RangeHash, _Unused,
937  _RehashPolicy, _Traits>::
938  _M_insert_range(_InputIterator __first, _InputIterator __last,
939  const _NodeGetter& __node_gen, true_type __uks)
940  {
941  __hashtable& __h = _M_conjure_hashtable();
942  for (; __first != __last; ++__first)
943  __h._M_insert(*__first, __node_gen, __uks);
944  }
945 
946  template<typename _Key, typename _Value, typename _Alloc,
947  typename _ExtractKey, typename _Equal,
948  typename _Hash, typename _RangeHash, typename _Unused,
949  typename _RehashPolicy, typename _Traits>
950  template<typename _InputIterator, typename _NodeGetter>
951  void
952  _Insert_base<_Key, _Value, _Alloc, _ExtractKey, _Equal,
953  _Hash, _RangeHash, _Unused,
954  _RehashPolicy, _Traits>::
955  _M_insert_range(_InputIterator __first, _InputIterator __last,
956  const _NodeGetter& __node_gen, false_type __uks)
957  {
958  using __rehash_type = typename __hashtable::__rehash_type;
959  using __rehash_state = typename __hashtable::__rehash_state;
960  using pair_type = std::pair<bool, std::size_t>;
961 
962  size_type __n_elt = __detail::__distance_fw(__first, __last);
963  if (__n_elt == 0)
964  return;
965 
966  __hashtable& __h = _M_conjure_hashtable();
967  __rehash_type& __rehash = __h._M_rehash_policy;
968  const __rehash_state& __saved_state = __rehash._M_state();
969  pair_type __do_rehash = __rehash._M_need_rehash(__h._M_bucket_count,
970  __h._M_element_count,
971  __n_elt);
972 
973  if (__do_rehash.first)
974  __h._M_rehash(__do_rehash.second, __saved_state);
975 
976  for (; __first != __last; ++__first)
977  __h._M_insert(*__first, __node_gen, __uks);
978  }
979 
980  /**
981  * Primary class template _Insert.
982  *
983  * Defines @c insert member functions that depend on _Hashtable policies,
984  * via partial specializations.
985  */
986  template<typename _Key, typename _Value, typename _Alloc,
987  typename _ExtractKey, typename _Equal,
988  typename _Hash, typename _RangeHash, typename _Unused,
989  typename _RehashPolicy, typename _Traits,
990  bool _Constant_iterators = _Traits::__constant_iterators::value>
991  struct _Insert;
992 
993  /// Specialization.
994  template<typename _Key, typename _Value, typename _Alloc,
995  typename _ExtractKey, typename _Equal,
996  typename _Hash, typename _RangeHash, typename _Unused,
997  typename _RehashPolicy, typename _Traits>
998  struct _Insert<_Key, _Value, _Alloc, _ExtractKey, _Equal,
999  _Hash, _RangeHash, _Unused,
1000  _RehashPolicy, _Traits, true>
1001  : public _Insert_base<_Key, _Value, _Alloc, _ExtractKey, _Equal,
1002  _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits>
1003  {
1004  using __base_type = _Insert_base<_Key, _Value, _Alloc, _ExtractKey,
1005  _Equal, _Hash, _RangeHash, _Unused,
1006  _RehashPolicy, _Traits>;
1007 
1008  using value_type = typename __base_type::value_type;
1009  using iterator = typename __base_type::iterator;
1011  using __ireturn_type = typename __base_type::__ireturn_type;
1012 
1013  using __unique_keys = typename __base_type::__unique_keys;
1014  using __hashtable = typename __base_type::__hashtable;
1015  using __node_gen_type = typename __base_type::__node_gen_type;
1016 
1017  using __base_type::insert;
1018 
1019  __ireturn_type
1020  insert(value_type&& __v)
1021  {
1022  __hashtable& __h = this->_M_conjure_hashtable();
1023  __node_gen_type __node_gen(__h);
1024  return __h._M_insert(std::move(__v), __node_gen, __unique_keys{});
1025  }
1026 
1027  iterator
1028  insert(const_iterator __hint, value_type&& __v)
1029  {
1030  __hashtable& __h = this->_M_conjure_hashtable();
1031  __node_gen_type __node_gen(__h);
1032  return __h._M_insert(__hint, std::move(__v), __node_gen,
1033  __unique_keys{});
1034  }
1035  };
1036 
1037  /// Specialization.
1038  template<typename _Key, typename _Value, typename _Alloc,
1039  typename _ExtractKey, typename _Equal,
1040  typename _Hash, typename _RangeHash, typename _Unused,
1041  typename _RehashPolicy, typename _Traits>
1042  struct _Insert<_Key, _Value, _Alloc, _ExtractKey, _Equal,
1043  _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits, false>
1044  : public _Insert_base<_Key, _Value, _Alloc, _ExtractKey, _Equal,
1045  _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits>
1046  {
1047  using __base_type = _Insert_base<_Key, _Value, _Alloc, _ExtractKey,
1048  _Equal, _Hash, _RangeHash, _Unused,
1049  _RehashPolicy, _Traits>;
1050  using value_type = typename __base_type::value_type;
1051  using iterator = typename __base_type::iterator;
1053 
1054  using __unique_keys = typename __base_type::__unique_keys;
1055  using __hashtable = typename __base_type::__hashtable;
1056  using __ireturn_type = typename __base_type::__ireturn_type;
1057 
1058  using __base_type::insert;
1059 
1060  template<typename _Pair>
1062 
1063  template<typename _Pair>
1065 
1066  template<typename _Pair>
1067  using _IFconsp = typename _IFcons<_Pair>::type;
1068 
1069  template<typename _Pair, typename = _IFconsp<_Pair>>
1070  __ireturn_type
1071  insert(_Pair&& __v)
1072  {
1073  __hashtable& __h = this->_M_conjure_hashtable();
1074  return __h._M_emplace(__unique_keys{}, std::forward<_Pair>(__v));
1075  }
1076 
1077  template<typename _Pair, typename = _IFconsp<_Pair>>
1078  iterator
1079  insert(const_iterator __hint, _Pair&& __v)
1080  {
1081  __hashtable& __h = this->_M_conjure_hashtable();
1082  return __h._M_emplace(__hint, __unique_keys{},
1083  std::forward<_Pair>(__v));
1084  }
1085  };
1086 
1087  template<typename _Policy>
1088  using __has_load_factor = typename _Policy::__has_load_factor;
1089 
1090  /**
1091  * Primary class template _Rehash_base.
1092  *
1093  * Give hashtable the max_load_factor functions and reserve iff the
1094  * rehash policy supports it.
1095  */
1096  template<typename _Key, typename _Value, typename _Alloc,
1097  typename _ExtractKey, typename _Equal,
1098  typename _Hash, typename _RangeHash, typename _Unused,
1099  typename _RehashPolicy, typename _Traits,
1100  typename =
1101  __detected_or_t<false_type, __has_load_factor, _RehashPolicy>>
1103 
1104  /// Specialization when rehash policy doesn't provide load factor management.
1105  template<typename _Key, typename _Value, typename _Alloc,
1106  typename _ExtractKey, typename _Equal,
1107  typename _Hash, typename _RangeHash, typename _Unused,
1108  typename _RehashPolicy, typename _Traits>
1109  struct _Rehash_base<_Key, _Value, _Alloc, _ExtractKey, _Equal,
1110  _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits,
1111  false_type /* Has load factor */>
1112  {
1113  };
1114 
1115  /// Specialization when rehash policy provide load factor management.
1116  template<typename _Key, typename _Value, typename _Alloc,
1117  typename _ExtractKey, typename _Equal,
1118  typename _Hash, typename _RangeHash, typename _Unused,
1119  typename _RehashPolicy, typename _Traits>
1120  struct _Rehash_base<_Key, _Value, _Alloc, _ExtractKey, _Equal,
1121  _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits,
1122  true_type /* Has load factor */>
1123  {
1124  using __hashtable = _Hashtable<_Key, _Value, _Alloc, _ExtractKey,
1125  _Equal, _Hash, _RangeHash, _Unused,
1126  _RehashPolicy, _Traits>;
1127 
1128  float
1129  max_load_factor() const noexcept
1130  {
1131  const __hashtable* __this = static_cast<const __hashtable*>(this);
1132  return __this->__rehash_policy().max_load_factor();
1133  }
1134 
1135  void
1136  max_load_factor(float __z)
1137  {
1138  __hashtable* __this = static_cast<__hashtable*>(this);
1139  __this->__rehash_policy(_RehashPolicy(__z));
1140  }
1141 
1142  void
1143  reserve(std::size_t __n)
1144  {
1145  __hashtable* __this = static_cast<__hashtable*>(this);
1146  __this->rehash(__this->__rehash_policy()._M_bkt_for_elements(__n));
1147  }
1148  };
1149 
1150  /**
1151  * Primary class template _Hashtable_ebo_helper.
1152  *
1153  * Helper class using EBO when it is not forbidden (the type is not
1154  * final) and when it is worth it (the type is empty.)
1155  */
1156  template<int _Nm, typename _Tp,
1157  bool __use_ebo = !__is_final(_Tp) && __is_empty(_Tp)>
1159 
1160  /// Specialization using EBO.
1161  template<int _Nm, typename _Tp>
1162  struct _Hashtable_ebo_helper<_Nm, _Tp, true>
1163  : private _Tp
1164  {
1165  _Hashtable_ebo_helper() noexcept(noexcept(_Tp())) : _Tp() { }
1166 
1167  template<typename _OtherTp>
1168  _Hashtable_ebo_helper(_OtherTp&& __tp)
1169  : _Tp(std::forward<_OtherTp>(__tp))
1170  { }
1171 
1172  const _Tp& _M_cget() const { return static_cast<const _Tp&>(*this); }
1173  _Tp& _M_get() { return static_cast<_Tp&>(*this); }
1174  };
1175 
1176  /// Specialization not using EBO.
1177  template<int _Nm, typename _Tp>
1178  struct _Hashtable_ebo_helper<_Nm, _Tp, false>
1179  {
1180  _Hashtable_ebo_helper() = default;
1181 
1182  template<typename _OtherTp>
1183  _Hashtable_ebo_helper(_OtherTp&& __tp)
1184  : _M_tp(std::forward<_OtherTp>(__tp))
1185  { }
1186 
1187  const _Tp& _M_cget() const { return _M_tp; }
1188  _Tp& _M_get() { return _M_tp; }
1189 
1190  private:
1191  _Tp _M_tp{};
1192  };
1193 
1194  /**
1195  * Primary class template _Local_iterator_base.
1196  *
1197  * Base class for local iterators, used to iterate within a bucket
1198  * but not between buckets.
1199  */
1200  template<typename _Key, typename _Value, typename _ExtractKey,
1201  typename _Hash, typename _RangeHash, typename _Unused,
1202  bool __cache_hash_code>
1204 
1205  /**
1206  * Primary class template _Hash_code_base.
1207  *
1208  * Encapsulates two policy issues that aren't quite orthogonal.
1209  * (1) the difference between using a ranged hash function and using
1210  * the combination of a hash function and a range-hashing function.
1211  * In the former case we don't have such things as hash codes, so
1212  * we have a dummy type as placeholder.
1213  * (2) Whether or not we cache hash codes. Caching hash codes is
1214  * meaningless if we have a ranged hash function.
1215  *
1216  * We also put the key extraction objects here, for convenience.
1217  * Each specialization derives from one or more of the template
1218  * parameters to benefit from Ebo. This is important as this type
1219  * is inherited in some cases by the _Local_iterator_base type used
1220  * to implement local_iterator and const_local_iterator. As with
1221  * any iterator type we prefer to make it as small as possible.
1222  */
1223  template<typename _Key, typename _Value, typename _ExtractKey,
1224  typename _Hash, typename _RangeHash, typename _Unused,
1225  bool __cache_hash_code>
1227  : private _Hashtable_ebo_helper<1, _Hash>
1228  {
1229  private:
1231 
1232  // Gives the local iterator implementation access to _M_bucket_index().
1233  friend struct _Local_iterator_base<_Key, _Value, _ExtractKey,
1234  _Hash, _RangeHash, _Unused, false>;
1235 
1236  public:
1237  typedef _Hash hasher;
1238 
1239  hasher
1240  hash_function() const
1241  { return _M_hash(); }
1242 
1243  protected:
1244  typedef std::size_t __hash_code;
1245 
1246  // We need the default constructor for the local iterators and _Hashtable
1247  // default constructor.
1248  _Hash_code_base() = default;
1249 
1250  _Hash_code_base(const _Hash& __hash) : __ebo_hash(__hash) { }
1251 
1252  __hash_code
1253  _M_hash_code(const _Key& __k) const
1254  {
1255  static_assert(__is_invocable<const _Hash&, const _Key&>{},
1256  "hash function must be invocable with an argument of key type");
1257  return _M_hash()(__k);
1258  }
1259 
1260  template<typename _Kt>
1261  __hash_code
1262  _M_hash_code_tr(const _Kt& __k) const
1263  {
1264  static_assert(__is_invocable<const _Hash&, const _Kt&>{},
1265  "hash function must be invocable with an argument of key type");
1266  return _M_hash()(__k);
1267  }
1268 
1269  std::size_t
1270  _M_bucket_index(__hash_code __c, std::size_t __bkt_count) const
1271  { return _RangeHash{}(__c, __bkt_count); }
1272 
1273  std::size_t
1274  _M_bucket_index(const _Hash_node_value<_Value, false>& __n,
1275  std::size_t __bkt_count) const
1276  noexcept( noexcept(declval<const _Hash&>()(declval<const _Key&>()))
1277  && noexcept(declval<const _RangeHash&>()((__hash_code)0,
1278  (std::size_t)0)) )
1279  {
1280  return _RangeHash{}(_M_hash_code(_ExtractKey{}(__n._M_v())),
1281  __bkt_count);
1282  }
1283 
1284  std::size_t
1285  _M_bucket_index(const _Hash_node_value<_Value, true>& __n,
1286  std::size_t __bkt_count) const
1287  noexcept( noexcept(declval<const _RangeHash&>()((__hash_code)0,
1288  (std::size_t)0)) )
1289  { return _RangeHash{}(__n._M_hash_code, __bkt_count); }
1290 
1291  void
1292  _M_store_code(_Hash_node_code_cache<false>&, __hash_code) const
1293  { }
1294 
1295  void
1296  _M_copy_code(_Hash_node_code_cache<false>&,
1297  const _Hash_node_code_cache<false>&) const
1298  { }
1299 
1300  void
1301  _M_store_code(_Hash_node_code_cache<true>& __n, __hash_code __c) const
1302  { __n._M_hash_code = __c; }
1303 
1304  void
1305  _M_copy_code(_Hash_node_code_cache<true>& __to,
1306  const _Hash_node_code_cache<true>& __from) const
1307  { __to._M_hash_code = __from._M_hash_code; }
1308 
1309  void
1310  _M_swap(_Hash_code_base& __x)
1311  { std::swap(__ebo_hash::_M_get(), __x.__ebo_hash::_M_get()); }
1312 
1313  const _Hash&
1314  _M_hash() const { return __ebo_hash::_M_cget(); }
1315  };
1316 
1317  /// Partial specialization used when nodes contain a cached hash code.
1318  template<typename _Key, typename _Value, typename _ExtractKey,
1319  typename _Hash, typename _RangeHash, typename _Unused>
1320  struct _Local_iterator_base<_Key, _Value, _ExtractKey,
1321  _Hash, _RangeHash, _Unused, true>
1322  : public _Node_iterator_base<_Value, true>
1323  {
1324  protected:
1326  using __hash_code_base = _Hash_code_base<_Key, _Value, _ExtractKey,
1327  _Hash, _RangeHash, _Unused, true>;
1328 
1329  _Local_iterator_base() = default;
1332  std::size_t __bkt, std::size_t __bkt_count)
1333  : __base_node_iter(__p), _M_bucket(__bkt), _M_bucket_count(__bkt_count)
1334  { }
1335 
1336  void
1337  _M_incr()
1338  {
1339  __base_node_iter::_M_incr();
1340  if (this->_M_cur)
1341  {
1342  std::size_t __bkt
1343  = _RangeHash{}(this->_M_cur->_M_hash_code, _M_bucket_count);
1344  if (__bkt != _M_bucket)
1345  this->_M_cur = nullptr;
1346  }
1347  }
1348 
1349  std::size_t _M_bucket;
1350  std::size_t _M_bucket_count;
1351 
1352  public:
1353  std::size_t
1354  _M_get_bucket() const { return _M_bucket; } // for debug mode
1355  };
1356 
1357  // Uninitialized storage for a _Hash_code_base.
1358  // This type is DefaultConstructible and Assignable even if the
1359  // _Hash_code_base type isn't, so that _Local_iterator_base<..., false>
1360  // can be DefaultConstructible and Assignable.
1361  template<typename _Tp, bool _IsEmpty = std::is_empty<_Tp>::value>
1362  struct _Hash_code_storage
1363  {
1364  __gnu_cxx::__aligned_buffer<_Tp> _M_storage;
1365 
1366  _Tp*
1367  _M_h() { return _M_storage._M_ptr(); }
1368 
1369  const _Tp*
1370  _M_h() const { return _M_storage._M_ptr(); }
1371  };
1372 
1373  // Empty partial specialization for empty _Hash_code_base types.
1374  template<typename _Tp>
1375  struct _Hash_code_storage<_Tp, true>
1376  {
1377  static_assert( std::is_empty<_Tp>::value, "Type must be empty" );
1378 
1379  // As _Tp is an empty type there will be no bytes written/read through
1380  // the cast pointer, so no strict-aliasing violation.
1381  _Tp*
1382  _M_h() { return reinterpret_cast<_Tp*>(this); }
1383 
1384  const _Tp*
1385  _M_h() const { return reinterpret_cast<const _Tp*>(this); }
1386  };
1387 
1388  template<typename _Key, typename _Value, typename _ExtractKey,
1389  typename _Hash, typename _RangeHash, typename _Unused>
1390  using __hash_code_for_local_iter
1391  = _Hash_code_storage<_Hash_code_base<_Key, _Value, _ExtractKey,
1392  _Hash, _RangeHash, _Unused, false>>;
1393 
1394  // Partial specialization used when hash codes are not cached
1395  template<typename _Key, typename _Value, typename _ExtractKey,
1396  typename _Hash, typename _RangeHash, typename _Unused>
1397  struct _Local_iterator_base<_Key, _Value, _ExtractKey,
1398  _Hash, _RangeHash, _Unused, false>
1399  : __hash_code_for_local_iter<_Key, _Value, _ExtractKey, _Hash, _RangeHash,
1400  _Unused>
1401  , _Node_iterator_base<_Value, false>
1402  {
1403  protected:
1404  using __hash_code_base = _Hash_code_base<_Key, _Value, _ExtractKey,
1405  _Hash, _RangeHash, _Unused, false>;
1406  using __node_iter_base = _Node_iterator_base<_Value, false>;
1407 
1408  _Local_iterator_base() : _M_bucket_count(-1) { }
1409 
1410  _Local_iterator_base(const __hash_code_base& __base,
1411  _Hash_node<_Value, false>* __p,
1412  std::size_t __bkt, std::size_t __bkt_count)
1413  : __node_iter_base(__p), _M_bucket(__bkt), _M_bucket_count(__bkt_count)
1414  { _M_init(__base); }
1415 
1416  ~_Local_iterator_base()
1417  {
1418  if (_M_bucket_count != size_t(-1))
1419  _M_destroy();
1420  }
1421 
1422  _Local_iterator_base(const _Local_iterator_base& __iter)
1423  : __node_iter_base(__iter._M_cur), _M_bucket(__iter._M_bucket)
1424  , _M_bucket_count(__iter._M_bucket_count)
1425  {
1426  if (_M_bucket_count != size_t(-1))
1427  _M_init(*__iter._M_h());
1428  }
1429 
1430  _Local_iterator_base&
1431  operator=(const _Local_iterator_base& __iter)
1432  {
1433  if (_M_bucket_count != -1)
1434  _M_destroy();
1435  this->_M_cur = __iter._M_cur;
1436  _M_bucket = __iter._M_bucket;
1437  _M_bucket_count = __iter._M_bucket_count;
1438  if (_M_bucket_count != -1)
1439  _M_init(*__iter._M_h());
1440  return *this;
1441  }
1442 
1443  void
1444  _M_incr()
1445  {
1446  __node_iter_base::_M_incr();
1447  if (this->_M_cur)
1448  {
1449  std::size_t __bkt = this->_M_h()->_M_bucket_index(*this->_M_cur,
1450  _M_bucket_count);
1451  if (__bkt != _M_bucket)
1452  this->_M_cur = nullptr;
1453  }
1454  }
1455 
1456  std::size_t _M_bucket;
1457  std::size_t _M_bucket_count;
1458 
1459  void
1460  _M_init(const __hash_code_base& __base)
1461  { ::new(this->_M_h()) __hash_code_base(__base); }
1462 
1463  void
1464  _M_destroy() { this->_M_h()->~__hash_code_base(); }
1465 
1466  public:
1467  std::size_t
1468  _M_get_bucket() const { return _M_bucket; } // for debug mode
1469  };
1470 
1471  /// local iterators
1472  template<typename _Key, typename _Value, typename _ExtractKey,
1473  typename _Hash, typename _RangeHash, typename _Unused,
1474  bool __constant_iterators, bool __cache>
1476  : public _Local_iterator_base<_Key, _Value, _ExtractKey,
1477  _Hash, _RangeHash, _Unused, __cache>
1478  {
1479  private:
1480  using __base_type = _Local_iterator_base<_Key, _Value, _ExtractKey,
1481  _Hash, _RangeHash, _Unused, __cache>;
1482  using __hash_code_base = typename __base_type::__hash_code_base;
1483 
1484  public:
1485  typedef _Value value_type;
1486  typedef typename std::conditional<__constant_iterators,
1487  const value_type*, value_type*>::type
1488  pointer;
1489  typedef typename std::conditional<__constant_iterators,
1490  const value_type&, value_type&>::type
1491  reference;
1492  typedef std::ptrdiff_t difference_type;
1494 
1495  _Local_iterator() = default;
1496 
1497  _Local_iterator(const __hash_code_base& __base,
1499  std::size_t __bkt, std::size_t __bkt_count)
1500  : __base_type(__base, __n, __bkt, __bkt_count)
1501  { }
1502 
1503  reference
1504  operator*() const
1505  { return this->_M_cur->_M_v(); }
1506 
1507  pointer
1508  operator->() const
1509  { return this->_M_cur->_M_valptr(); }
1510 
1512  operator++()
1513  {
1514  this->_M_incr();
1515  return *this;
1516  }
1517 
1519  operator++(int)
1520  {
1521  _Local_iterator __tmp(*this);
1522  this->_M_incr();
1523  return __tmp;
1524  }
1525  };
1526 
1527  /// local const_iterators
1528  template<typename _Key, typename _Value, typename _ExtractKey,
1529  typename _Hash, typename _RangeHash, typename _Unused,
1530  bool __constant_iterators, bool __cache>
1532  : public _Local_iterator_base<_Key, _Value, _ExtractKey,
1533  _Hash, _RangeHash, _Unused, __cache>
1534  {
1535  private:
1536  using __base_type = _Local_iterator_base<_Key, _Value, _ExtractKey,
1537  _Hash, _RangeHash, _Unused, __cache>;
1538  using __hash_code_base = typename __base_type::__hash_code_base;
1539 
1540  public:
1541  typedef _Value value_type;
1542  typedef const value_type* pointer;
1543  typedef const value_type& reference;
1544  typedef std::ptrdiff_t difference_type;
1546 
1547  _Local_const_iterator() = default;
1548 
1549  _Local_const_iterator(const __hash_code_base& __base,
1551  std::size_t __bkt, std::size_t __bkt_count)
1552  : __base_type(__base, __n, __bkt, __bkt_count)
1553  { }
1554 
1555  _Local_const_iterator(const _Local_iterator<_Key, _Value, _ExtractKey,
1556  _Hash, _RangeHash, _Unused,
1557  __constant_iterators,
1558  __cache>& __x)
1559  : __base_type(__x)
1560  { }
1561 
1562  reference
1563  operator*() const
1564  { return this->_M_cur->_M_v(); }
1565 
1566  pointer
1567  operator->() const
1568  { return this->_M_cur->_M_valptr(); }
1569 
1571  operator++()
1572  {
1573  this->_M_incr();
1574  return *this;
1575  }
1576 
1578  operator++(int)
1579  {
1580  _Local_const_iterator __tmp(*this);
1581  this->_M_incr();
1582  return __tmp;
1583  }
1584  };
1585 
1586  /**
1587  * Primary class template _Hashtable_base.
1588  *
1589  * Helper class adding management of _Equal functor to
1590  * _Hash_code_base type.
1591  *
1592  * Base class templates are:
1593  * - __detail::_Hash_code_base
1594  * - __detail::_Hashtable_ebo_helper
1595  */
1596  template<typename _Key, typename _Value, typename _ExtractKey,
1597  typename _Equal, typename _Hash, typename _RangeHash,
1598  typename _Unused, typename _Traits>
1600  : public _Hash_code_base<_Key, _Value, _ExtractKey, _Hash, _RangeHash,
1601  _Unused, _Traits::__hash_cached::value>,
1602  private _Hashtable_ebo_helper<0, _Equal>
1603  {
1604  public:
1605  typedef _Key key_type;
1606  typedef _Value value_type;
1607  typedef _Equal key_equal;
1608  typedef std::size_t size_type;
1609  typedef std::ptrdiff_t difference_type;
1610 
1611  using __traits_type = _Traits;
1612  using __hash_cached = typename __traits_type::__hash_cached;
1613 
1614  using __hash_code_base = _Hash_code_base<_Key, _Value, _ExtractKey,
1615  _Hash, _RangeHash, _Unused,
1616  __hash_cached::value>;
1617 
1618  using __hash_code = typename __hash_code_base::__hash_code;
1619 
1620  private:
1622 
1623  static bool
1624  _S_equals(__hash_code, const _Hash_node_code_cache<false>&)
1625  { return true; }
1626 
1627  static bool
1628  _S_node_equals(const _Hash_node_code_cache<false>&,
1630  { return true; }
1631 
1632  static bool
1633  _S_equals(__hash_code __c, const _Hash_node_code_cache<true>& __n)
1634  { return __c == __n._M_hash_code; }
1635 
1636  static bool
1637  _S_node_equals(const _Hash_node_code_cache<true>& __lhn,
1638  const _Hash_node_code_cache<true>& __rhn)
1639  { return __lhn._M_hash_code == __rhn._M_hash_code; }
1640 
1641  protected:
1642  _Hashtable_base() = default;
1643 
1644  _Hashtable_base(const _Hash& __hash, const _Equal& __eq)
1645  : __hash_code_base(__hash), _EqualEBO(__eq)
1646  { }
1647 
1648  bool
1649  _M_equals(const _Key& __k, __hash_code __c,
1650  const _Hash_node_value<_Value, __hash_cached::value>& __n) const
1651  {
1652  static_assert(__is_invocable<const _Equal&, const _Key&, const _Key&>{},
1653  "key equality predicate must be invocable with two arguments of "
1654  "key type");
1655  return _S_equals(__c, __n) && _M_eq()(__k, _ExtractKey{}(__n._M_v()));
1656  }
1657 
1658  template<typename _Kt>
1659  bool
1660  _M_equals_tr(const _Kt& __k, __hash_code __c,
1661  const _Hash_node_value<_Value,
1662  __hash_cached::value>& __n) const
1663  {
1664  static_assert(
1665  __is_invocable<const _Equal&, const _Kt&, const _Key&>{},
1666  "key equality predicate must be invocable with two arguments of "
1667  "key type");
1668  return _S_equals(__c, __n) && _M_eq()(__k, _ExtractKey{}(__n._M_v()));
1669  }
1670 
1671  bool
1672  _M_node_equals(
1673  const _Hash_node_value<_Value, __hash_cached::value>& __lhn,
1674  const _Hash_node_value<_Value, __hash_cached::value>& __rhn) const
1675  {
1676  return _S_node_equals(__lhn, __rhn)
1677  && _M_eq()(_ExtractKey{}(__lhn._M_v()), _ExtractKey{}(__rhn._M_v()));
1678  }
1679 
1680  void
1681  _M_swap(_Hashtable_base& __x)
1682  {
1683  __hash_code_base::_M_swap(__x);
1684  std::swap(_EqualEBO::_M_get(), __x._EqualEBO::_M_get());
1685  }
1686 
1687  const _Equal&
1688  _M_eq() const { return _EqualEBO::_M_cget(); }
1689  };
1690 
1691  /**
1692  * Primary class template _Equality.
1693  *
1694  * This is for implementing equality comparison for unordered
1695  * containers, per N3068, by John Lakos and Pablo Halpern.
1696  * Algorithmically, we follow closely the reference implementations
1697  * therein.
1698  */
1699  template<typename _Key, typename _Value, typename _Alloc,
1700  typename _ExtractKey, typename _Equal,
1701  typename _Hash, typename _RangeHash, typename _Unused,
1702  typename _RehashPolicy, typename _Traits,
1703  bool _Unique_keys = _Traits::__unique_keys::value>
1704  struct _Equality;
1705 
1706  /// unordered_map and unordered_set specializations.
1707  template<typename _Key, typename _Value, typename _Alloc,
1708  typename _ExtractKey, typename _Equal,
1709  typename _Hash, typename _RangeHash, typename _Unused,
1710  typename _RehashPolicy, typename _Traits>
1711  struct _Equality<_Key, _Value, _Alloc, _ExtractKey, _Equal,
1712  _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits, true>
1713  {
1714  using __hashtable = _Hashtable<_Key, _Value, _Alloc, _ExtractKey, _Equal,
1715  _Hash, _RangeHash, _Unused,
1716  _RehashPolicy, _Traits>;
1717 
1718  bool
1719  _M_equal(const __hashtable&) const;
1720  };
1721 
1722  template<typename _Key, typename _Value, typename _Alloc,
1723  typename _ExtractKey, typename _Equal,
1724  typename _Hash, typename _RangeHash, typename _Unused,
1725  typename _RehashPolicy, typename _Traits>
1726  bool
1727  _Equality<_Key, _Value, _Alloc, _ExtractKey, _Equal,
1728  _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits, true>::
1729  _M_equal(const __hashtable& __other) const
1730  {
1731  using __node_type = typename __hashtable::__node_type;
1732  const __hashtable* __this = static_cast<const __hashtable*>(this);
1733  if (__this->size() != __other.size())
1734  return false;
1735 
1736  for (auto __itx = __this->begin(); __itx != __this->end(); ++__itx)
1737  {
1738  std::size_t __ybkt = __other._M_bucket_index(*__itx._M_cur);
1739  auto __prev_n = __other._M_buckets[__ybkt];
1740  if (!__prev_n)
1741  return false;
1742 
1743  for (__node_type* __n = static_cast<__node_type*>(__prev_n->_M_nxt);;
1744  __n = __n->_M_next())
1745  {
1746  if (__n->_M_v() == *__itx)
1747  break;
1748 
1749  if (!__n->_M_nxt
1750  || __other._M_bucket_index(*__n->_M_next()) != __ybkt)
1751  return false;
1752  }
1753  }
1754 
1755  return true;
1756  }
1757 
1758  /// unordered_multiset and unordered_multimap specializations.
1759  template<typename _Key, typename _Value, typename _Alloc,
1760  typename _ExtractKey, typename _Equal,
1761  typename _Hash, typename _RangeHash, typename _Unused,
1762  typename _RehashPolicy, typename _Traits>
1763  struct _Equality<_Key, _Value, _Alloc, _ExtractKey, _Equal,
1764  _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits, false>
1765  {
1766  using __hashtable = _Hashtable<_Key, _Value, _Alloc, _ExtractKey, _Equal,
1767  _Hash, _RangeHash, _Unused,
1768  _RehashPolicy, _Traits>;
1769 
1770  bool
1771  _M_equal(const __hashtable&) const;
1772  };
1773 
1774  template<typename _Key, typename _Value, typename _Alloc,
1775  typename _ExtractKey, typename _Equal,
1776  typename _Hash, typename _RangeHash, typename _Unused,
1777  typename _RehashPolicy, typename _Traits>
1778  bool
1779  _Equality<_Key, _Value, _Alloc, _ExtractKey, _Equal,
1780  _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits, false>::
1781  _M_equal(const __hashtable& __other) const
1782  {
1783  using __node_type = typename __hashtable::__node_type;
1784  const __hashtable* __this = static_cast<const __hashtable*>(this);
1785  if (__this->size() != __other.size())
1786  return false;
1787 
1788  for (auto __itx = __this->begin(); __itx != __this->end();)
1789  {
1790  std::size_t __x_count = 1;
1791  auto __itx_end = __itx;
1792  for (++__itx_end; __itx_end != __this->end()
1793  && __this->key_eq()(_ExtractKey{}(*__itx),
1794  _ExtractKey{}(*__itx_end));
1795  ++__itx_end)
1796  ++__x_count;
1797 
1798  std::size_t __ybkt = __other._M_bucket_index(*__itx._M_cur);
1799  auto __y_prev_n = __other._M_buckets[__ybkt];
1800  if (!__y_prev_n)
1801  return false;
1802 
1803  __node_type* __y_n = static_cast<__node_type*>(__y_prev_n->_M_nxt);
1804  for (;;)
1805  {
1806  if (__this->key_eq()(_ExtractKey{}(__y_n->_M_v()),
1807  _ExtractKey{}(*__itx)))
1808  break;
1809 
1810  auto __y_ref_n = __y_n;
1811  for (__y_n = __y_n->_M_next(); __y_n; __y_n = __y_n->_M_next())
1812  if (!__other._M_node_equals(*__y_ref_n, *__y_n))
1813  break;
1814 
1815  if (!__y_n || __other._M_bucket_index(*__y_n) != __ybkt)
1816  return false;
1817  }
1818 
1819  typename __hashtable::const_iterator __ity(__y_n);
1820  for (auto __ity_end = __ity; __ity_end != __other.end(); ++__ity_end)
1821  if (--__x_count == 0)
1822  break;
1823 
1824  if (__x_count != 0)
1825  return false;
1826 
1827  if (!std::is_permutation(__itx, __itx_end, __ity))
1828  return false;
1829 
1830  __itx = __itx_end;
1831  }
1832  return true;
1833  }
1834 
1835  /**
1836  * This type deals with all allocation and keeps an allocator instance
1837  * through inheritance to benefit from EBO when possible.
1838  */
1839  template<typename _NodeAlloc>
1840  struct _Hashtable_alloc : private _Hashtable_ebo_helper<0, _NodeAlloc>
1841  {
1842  private:
1844  public:
1845  using __node_type = typename _NodeAlloc::value_type;
1846  using __node_alloc_type = _NodeAlloc;
1847  // Use __gnu_cxx to benefit from _S_always_equal and al.
1849 
1850  using __value_alloc_traits = typename __node_alloc_traits::template
1851  rebind_traits<typename __node_type::value_type>;
1852 
1853  using __node_ptr = __node_type*;
1854  using __node_base = _Hash_node_base;
1855  using __node_base_ptr = __node_base*;
1856  using __buckets_alloc_type =
1857  __alloc_rebind<__node_alloc_type, __node_base_ptr>;
1859  using __buckets_ptr = __node_base_ptr*;
1860 
1861  _Hashtable_alloc() = default;
1862  _Hashtable_alloc(const _Hashtable_alloc&) = default;
1863  _Hashtable_alloc(_Hashtable_alloc&&) = default;
1864 
1865  template<typename _Alloc>
1866  _Hashtable_alloc(_Alloc&& __a)
1867  : __ebo_node_alloc(std::forward<_Alloc>(__a))
1868  { }
1869 
1870  __node_alloc_type&
1871  _M_node_allocator()
1872  { return __ebo_node_alloc::_M_get(); }
1873 
1874  const __node_alloc_type&
1875  _M_node_allocator() const
1876  { return __ebo_node_alloc::_M_cget(); }
1877 
1878  // Allocate a node and construct an element within it.
1879  template<typename... _Args>
1880  __node_ptr
1881  _M_allocate_node(_Args&&... __args);
1882 
1883  // Destroy the element within a node and deallocate the node.
1884  void
1885  _M_deallocate_node(__node_ptr __n);
1886 
1887  // Deallocate a node.
1888  void
1889  _M_deallocate_node_ptr(__node_ptr __n);
1890 
1891  // Deallocate the linked list of nodes pointed to by __n.
1892  // The elements within the nodes are destroyed.
1893  void
1894  _M_deallocate_nodes(__node_ptr __n);
1895 
1897  _M_allocate_buckets(std::size_t __bkt_count);
1898 
1899  void
1900  _M_deallocate_buckets(__buckets_ptr, std::size_t __bkt_count);
1901  };
1902 
1903  // Definitions of class template _Hashtable_alloc's out-of-line member
1904  // functions.
1905  template<typename _NodeAlloc>
1906  template<typename... _Args>
1907  auto
1909  -> __node_ptr
1910  {
1911  auto __nptr = __node_alloc_traits::allocate(_M_node_allocator(), 1);
1912  __node_ptr __n = std::__to_address(__nptr);
1913  __try
1914  {
1915  ::new ((void*)__n) __node_type;
1916  __node_alloc_traits::construct(_M_node_allocator(),
1917  __n->_M_valptr(),
1918  std::forward<_Args>(__args)...);
1919  return __n;
1920  }
1921  __catch(...)
1922  {
1923  __node_alloc_traits::deallocate(_M_node_allocator(), __nptr, 1);
1924  __throw_exception_again;
1925  }
1926  }
1927 
1928  template<typename _NodeAlloc>
1929  void
1930  _Hashtable_alloc<_NodeAlloc>::_M_deallocate_node(__node_ptr __n)
1931  {
1932  __node_alloc_traits::destroy(_M_node_allocator(), __n->_M_valptr());
1933  _M_deallocate_node_ptr(__n);
1934  }
1935 
1936  template<typename _NodeAlloc>
1937  void
1938  _Hashtable_alloc<_NodeAlloc>::_M_deallocate_node_ptr(__node_ptr __n)
1939  {
1940  typedef typename __node_alloc_traits::pointer _Ptr;
1941  auto __ptr = std::pointer_traits<_Ptr>::pointer_to(*__n);
1942  __n->~__node_type();
1943  __node_alloc_traits::deallocate(_M_node_allocator(), __ptr, 1);
1944  }
1945 
1946  template<typename _NodeAlloc>
1947  void
1948  _Hashtable_alloc<_NodeAlloc>::_M_deallocate_nodes(__node_ptr __n)
1949  {
1950  while (__n)
1951  {
1952  __node_ptr __tmp = __n;
1953  __n = __n->_M_next();
1954  _M_deallocate_node(__tmp);
1955  }
1956  }
1957 
1958  template<typename _NodeAlloc>
1959  auto
1960  _Hashtable_alloc<_NodeAlloc>::_M_allocate_buckets(std::size_t __bkt_count)
1961  -> __buckets_ptr
1962  {
1963  __buckets_alloc_type __alloc(_M_node_allocator());
1964 
1965  auto __ptr = __buckets_alloc_traits::allocate(__alloc, __bkt_count);
1966  __buckets_ptr __p = std::__to_address(__ptr);
1967  __builtin_memset(__p, 0, __bkt_count * sizeof(__node_base_ptr));
1968  return __p;
1969  }
1970 
1971  template<typename _NodeAlloc>
1972  void
1973  _Hashtable_alloc<_NodeAlloc>::
1974  _M_deallocate_buckets(__buckets_ptr __bkts,
1975  std::size_t __bkt_count)
1976  {
1977  typedef typename __buckets_alloc_traits::pointer _Ptr;
1978  auto __ptr = std::pointer_traits<_Ptr>::pointer_to(*__bkts);
1979  __buckets_alloc_type __alloc(_M_node_allocator());
1980  __buckets_alloc_traits::deallocate(__alloc, __ptr, __bkt_count);
1981  }
1982 
1983  ///@} hashtable-detail
1984 } // namespace __detail
1985 _GLIBCXX_END_NAMESPACE_VERSION
1986 } // namespace std
1987 
1988 #endif // _HASHTABLE_POLICY_H
constexpr complex< _Tp > operator*(const complex< _Tp > &__x, const complex< _Tp > &__y)
Return new complex value x times y.
Definition: complex:392
integral_constant< bool, true > true_type
The type used as a compile-time boolean with true value.
Definition: type_traits:83
constexpr piecewise_construct_t piecewise_construct
Tag for piecewise construction of std::pair objects.
Definition: stl_pair.h:83
constexpr std::remove_reference< _Tp >::type && move(_Tp &&__t) noexcept
Convert a value to an rvalue.
Definition: move.h:104
constexpr tuple< _Elements &&... > forward_as_tuple(_Elements &&... __args) noexcept
std::forward_as_tuple
Definition: tuple:1569
void swap(any &__x, any &__y) noexcept
Exchange the states of two any objects.
Definition: any:422
std::size_t __clp2(std::size_t __n) noexcept
Compute closest power of 2 not less than __n.
constexpr iterator_traits< _Iter >::iterator_category __iterator_category(const _Iter &)
ISO C++ entities toplevel namespace is std.
__numeric_traits_integer< _Tp > __int_traits
Convenience alias for __numeric_traits<integer-type>.
constexpr _Iterator __base(_Iterator __it)
initializer_list
tuple_element
Definition: array:442
Primary class template, tuple.
Definition: tuple:600
integral_constant
Definition: type_traits:66
Define a member typedef type to one of two argument types.
Definition: type_traits:2227
is_empty
Definition: type_traits:757
is_constructible
Definition: type_traits:954
Define a member typedef type only if a boolean constant is true.
Definition: type_traits:2200
Uniform interface to all allocator types.
Traits class for iterators.
Base class for node iterators.
Node iterators, used to iterate through all the hashtable.
Node const_iterators, used to iterate through all the hashtable.
Default range hashing function: use division to fold a large number into the range [0,...
Default ranged hash function H. In principle it should be a function object composed from objects of ...
Default value for rehash policy. Bucket size is (usually) the smallest prime that keeps the load fact...
Range hashing function assuming that second arg is a power of 2.
Rehash policy providing power of 2 bucket numbers. Avoids modulo operations.
Uniform interface to all pointer-like types.
Definition: ptr_traits.h:84
Marking input iterators.
Forward iterators support a superset of input iterator operations.
Struct holding two objects of arbitrary type.
Definition: stl_pair.h:202
Uniform interface to C++98 and C++11 allocators.