1// Internal policy header for unordered_set and unordered_map -*- C++ -*-
2
3// Copyright (C) 2010-2021 Free Software Foundation, Inc.
4//
5// This file is part of the GNU ISO C++ Library. This library is free
6// software; you can redistribute it and/or modify it under the
7// terms of the GNU General Public License as published by the
8// Free Software Foundation; either version 3, or (at your option)
9// any later version.
10
11// This library is distributed in the hope that it will be useful,
12// but WITHOUT ANY WARRANTY; without even the implied warranty of
13// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14// GNU General Public License for more details.
15
16// Under Section 7 of GPL version 3, you are granted additional
17// permissions described in the GCC Runtime Library Exception, version
18// 3.1, as published by the Free Software Foundation.
19
20// You should have received a copy of the GNU General Public License and
21// a copy of the GCC Runtime Library Exception along with this program;
22// see the files COPYING3 and COPYING.RUNTIME respectively. If not, see
23// <http://www.gnu.org/licenses/>.
24
25/** @file bits/hashtable_policy.h
26 * This is an internal header file, included by other library headers.
27 * Do not attempt to use it directly.
28 * @headername{unordered_map,unordered_set}
29 */
30
31#ifndef _HASHTABLE_POLICY_H
32#define _HASHTABLE_POLICY_H 1
33
34#include <tuple> // for std::tuple, std::forward_as_tuple
35#include <bits/stl_algobase.h> // for std::min, std::is_permutation.
36#include <ext/numeric_traits.h> // for __gnu_cxx::__int_traits
37
38namespace std _GLIBCXX_VISIBILITY(default)
39{
40_GLIBCXX_BEGIN_NAMESPACE_VERSION
41
42 template<typename _Key, typename _Value, typename _Alloc,
43 typename _ExtractKey, typename _Equal,
44 typename _Hash, typename _RangeHash, typename _Unused,
45 typename _RehashPolicy, typename _Traits>
46 class _Hashtable;
47
48namespace __detail
49{
50 /**
51 * @defgroup hashtable-detail Base and Implementation Classes
52 * @ingroup unordered_associative_containers
53 * @{
54 */
55 template<typename _Key, typename _Value, typename _ExtractKey,
56 typename _Equal, typename _Hash, typename _RangeHash,
57 typename _Unused, typename _Traits>
58 struct _Hashtable_base;
59
60 // Helper function: return distance(first, last) for forward
61 // iterators, or 0/1 for input iterators.
62 template<class _Iterator>
63 inline typename std::iterator_traits<_Iterator>::difference_type
64 __distance_fw(_Iterator __first, _Iterator __last,
65 std::input_iterator_tag)
66 { return __first != __last ? 1 : 0; }
67
68 template<class _Iterator>
69 inline typename std::iterator_traits<_Iterator>::difference_type
70 __distance_fw(_Iterator __first, _Iterator __last,
71 std::forward_iterator_tag)
72 { return std::distance(__first, __last); }
73
74 template<class _Iterator>
75 inline typename std::iterator_traits<_Iterator>::difference_type
76 __distance_fw(_Iterator __first, _Iterator __last)
77 { return __distance_fw(__first, __last,
78 std::__iterator_category(__first)); }
79
80 struct _Identity
81 {
82 template<typename _Tp>
83 _Tp&&
84 operator()(_Tp&& __x) const noexcept
85 { return std::forward<_Tp>(__x); }
86 };
87
88 struct _Select1st
89 {
90 template<typename _Tp>
91 auto
92 operator()(_Tp&& __x) const noexcept
93 -> decltype(std::get<0>(std::forward<_Tp>(__x)))
94 { return std::get<0>(std::forward<_Tp>(__x)); }
95 };
96
97 template<typename _NodeAlloc>
98 struct _Hashtable_alloc;
99
100 // Functor recycling a pool of nodes and using allocation once the pool is
101 // empty.
102 template<typename _NodeAlloc>
103 struct _ReuseOrAllocNode
104 {
105 private:
106 using __node_alloc_type = _NodeAlloc;
107 using __hashtable_alloc = _Hashtable_alloc<__node_alloc_type>;
108 using __node_alloc_traits =
109 typename __hashtable_alloc::__node_alloc_traits;
110 using __node_type = typename __hashtable_alloc::__node_type;
111
112 public:
113 _ReuseOrAllocNode(__node_type* __nodes, __hashtable_alloc& __h)
114 : _M_nodes(__nodes), _M_h(__h) { }
115 _ReuseOrAllocNode(const _ReuseOrAllocNode&) = delete;
116
117 ~_ReuseOrAllocNode()
118 { _M_h._M_deallocate_nodes(_M_nodes); }
119
120 template<typename _Arg>
121 __node_type*
122 operator()(_Arg&& __arg) const
123 {
124 if (_M_nodes)
125 {
126 __node_type* __node = _M_nodes;
127 _M_nodes = _M_nodes->_M_next();
128 __node->_M_nxt = nullptr;
129 auto& __a = _M_h._M_node_allocator();
130 __node_alloc_traits::destroy(__a, __node->_M_valptr());
131 __try
132 {
133 __node_alloc_traits::construct(__a, __node->_M_valptr(),
134 std::forward<_Arg>(__arg));
135 }
136 __catch(...)
137 {
138 _M_h._M_deallocate_node_ptr(__node);
139 __throw_exception_again;
140 }
141 return __node;
142 }
143 return _M_h._M_allocate_node(std::forward<_Arg>(__arg));
144 }
145
146 private:
147 mutable __node_type* _M_nodes;
148 __hashtable_alloc& _M_h;
149 };
150
151 // Functor similar to the previous one but without any pool of nodes to
152 // recycle.
153 template<typename _NodeAlloc>
154 struct _AllocNode
155 {
156 private:
157 using __hashtable_alloc = _Hashtable_alloc<_NodeAlloc>;
158 using __node_type = typename __hashtable_alloc::__node_type;
159
160 public:
161 _AllocNode(__hashtable_alloc& __h)
162 : _M_h(__h) { }
163
164 template<typename _Arg>
165 __node_type*
166 operator()(_Arg&& __arg) const
167 { return _M_h._M_allocate_node(std::forward<_Arg>(__arg)); }
168
169 private:
170 __hashtable_alloc& _M_h;
171 };
172
173 // Auxiliary types used for all instantiations of _Hashtable nodes
174 // and iterators.
175
176 /**
177 * struct _Hashtable_traits
178 *
179 * Important traits for hash tables.
180 *
181 * @tparam _Cache_hash_code Boolean value. True if the value of
182 * the hash function is stored along with the value. This is a
183 * time-space tradeoff. Storing it may improve lookup speed by
184 * reducing the number of times we need to call the _Hash or _Equal
185 * functors.
186 *
187 * @tparam _Constant_iterators Boolean value. True if iterator and
188 * const_iterator are both constant iterator types. This is true
189 * for unordered_set and unordered_multiset, false for
190 * unordered_map and unordered_multimap.
191 *
192 * @tparam _Unique_keys Boolean value. True if the return value
193 * of _Hashtable::count(k) is always at most one, false if it may
194 * be an arbitrary number. This is true for unordered_set and
195 * unordered_map, false for unordered_multiset and
196 * unordered_multimap.
197 */
198 template<bool _Cache_hash_code, bool _Constant_iterators, bool _Unique_keys>
199 struct _Hashtable_traits
200 {
201 using __hash_cached = __bool_constant<_Cache_hash_code>;
202 using __constant_iterators = __bool_constant<_Constant_iterators>;
203 using __unique_keys = __bool_constant<_Unique_keys>;
204 };
205
206 /**
207 * struct _Hash_node_base
208 *
209 * Nodes, used to wrap elements stored in the hash table. A policy
210 * template parameter of class template _Hashtable controls whether
211 * nodes also store a hash code. In some cases (e.g. strings) this
212 * may be a performance win.
213 */
214 struct _Hash_node_base
215 {
216 _Hash_node_base* _M_nxt;
217
218 _Hash_node_base() noexcept : _M_nxt() { }
219
220 _Hash_node_base(_Hash_node_base* __next) noexcept : _M_nxt(__next) { }
221 };
222
223 /**
224 * struct _Hash_node_value_base
225 *
226 * Node type with the value to store.
227 */
228 template<typename _Value>
229 struct _Hash_node_value_base
230 {
231 typedef _Value value_type;
232
233 __gnu_cxx::__aligned_buffer<_Value> _M_storage;
234
235 _Value*
236 _M_valptr() noexcept
237 { return _M_storage._M_ptr(); }
238
239 const _Value*
240 _M_valptr() const noexcept
241 { return _M_storage._M_ptr(); }
242
243 _Value&
244 _M_v() noexcept
245 { return *_M_valptr(); }
246
247 const _Value&
248 _M_v() const noexcept
249 { return *_M_valptr(); }
250 };
251
252 /**
253 * Primary template struct _Hash_node_code_cache.
254 */
255 template<bool _Cache_hash_code>
256 struct _Hash_node_code_cache
257 { };
258
259 /**
260 * Specialization for node with cache, struct _Hash_node_code_cache.
261 */
262 template<>
263 struct _Hash_node_code_cache<true>
264 { std::size_t _M_hash_code; };
265
266 template<typename _Value, bool _Cache_hash_code>
267 struct _Hash_node_value
268 : _Hash_node_value_base<_Value>
269 , _Hash_node_code_cache<_Cache_hash_code>
270 { };
271
272 /**
273 * Primary template struct _Hash_node.
274 */
275 template<typename _Value, bool _Cache_hash_code>
276 struct _Hash_node
277 : _Hash_node_base
278 , _Hash_node_value<_Value, _Cache_hash_code>
279 {
280 _Hash_node*
281 _M_next() const noexcept
282 { return static_cast<_Hash_node*>(this->_M_nxt); }
283 };
284
285 /// Base class for node iterators.
286 template<typename _Value, bool _Cache_hash_code>
287 struct _Node_iterator_base
288 {
289 using __node_type = _Hash_node<_Value, _Cache_hash_code>;
290
291 __node_type* _M_cur;
292
293 _Node_iterator_base() : _M_cur(nullptr) { }
294 _Node_iterator_base(__node_type* __p) noexcept
295 : _M_cur(__p) { }
296
297 void
298 _M_incr() noexcept
299 { _M_cur = _M_cur->_M_next(); }
300
301 friend bool
302 operator==(const _Node_iterator_base& __x, const _Node_iterator_base& __y)
303 noexcept
304 { return __x._M_cur == __y._M_cur; }
305
306#if __cpp_impl_three_way_comparison < 201907L
307 friend bool
308 operator!=(const _Node_iterator_base& __x, const _Node_iterator_base& __y)
309 noexcept
310 { return __x._M_cur != __y._M_cur; }
311#endif
312 };
313
314 /// Node iterators, used to iterate through all the hashtable.
315 template<typename _Value, bool __constant_iterators, bool __cache>
316 struct _Node_iterator
317 : public _Node_iterator_base<_Value, __cache>
318 {
319 private:
320 using __base_type = _Node_iterator_base<_Value, __cache>;
321 using __node_type = typename __base_type::__node_type;
322
323 public:
324 typedef _Value value_type;
325 typedef std::ptrdiff_t difference_type;
326 typedef std::forward_iterator_tag iterator_category;
327
328 using pointer = typename std::conditional<__constant_iterators,
329 const value_type*, value_type*>::type;
330
331 using reference = typename std::conditional<__constant_iterators,
332 const value_type&, value_type&>::type;
333
334 _Node_iterator() = default;
335
336 explicit
337 _Node_iterator(__node_type* __p) noexcept
338 : __base_type(__p) { }
339
340 reference
341 operator*() const noexcept
342 { return this->_M_cur->_M_v(); }
343
344 pointer
345 operator->() const noexcept
346 { return this->_M_cur->_M_valptr(); }
347
348 _Node_iterator&
349 operator++() noexcept
350 {
351 this->_M_incr();
352 return *this;
353 }
354
355 _Node_iterator
356 operator++(int) noexcept
357 {
358 _Node_iterator __tmp(*this);
359 this->_M_incr();
360 return __tmp;
361 }
362 };
363
364 /// Node const_iterators, used to iterate through all the hashtable.
365 template<typename _Value, bool __constant_iterators, bool __cache>
366 struct _Node_const_iterator
367 : public _Node_iterator_base<_Value, __cache>
368 {
369 private:
370 using __base_type = _Node_iterator_base<_Value, __cache>;
371 using __node_type = typename __base_type::__node_type;
372
373 public:
374 typedef _Value value_type;
375 typedef std::ptrdiff_t difference_type;
376 typedef std::forward_iterator_tag iterator_category;
377
378 typedef const value_type* pointer;
379 typedef const value_type& reference;
380
381 _Node_const_iterator() = default;
382
383 explicit
384 _Node_const_iterator(__node_type* __p) noexcept
385 : __base_type(__p) { }
386
387 _Node_const_iterator(const _Node_iterator<_Value, __constant_iterators,
388 __cache>& __x) noexcept
389 : __base_type(__x._M_cur) { }
390
391 reference
392 operator*() const noexcept
393 { return this->_M_cur->_M_v(); }
394
395 pointer
396 operator->() const noexcept
397 { return this->_M_cur->_M_valptr(); }
398
399 _Node_const_iterator&
400 operator++() noexcept
401 {
402 this->_M_incr();
403 return *this;
404 }
405
406 _Node_const_iterator
407 operator++(int) noexcept
408 {
409 _Node_const_iterator __tmp(*this);
410 this->_M_incr();
411 return __tmp;
412 }
413 };
414
415 // Many of class template _Hashtable's template parameters are policy
416 // classes. These are defaults for the policies.
417
418 /// Default range hashing function: use division to fold a large number
419 /// into the range [0, N).
420 struct _Mod_range_hashing
421 {
422 typedef std::size_t first_argument_type;
423 typedef std::size_t second_argument_type;
424 typedef std::size_t result_type;
425
426 result_type
427 operator()(first_argument_type __num,
428 second_argument_type __den) const noexcept
429 { return __num % __den; }
430 };
431
432 /// Default ranged hash function H. In principle it should be a
433 /// function object composed from objects of type H1 and H2 such that
434 /// h(k, N) = h2(h1(k), N), but that would mean making extra copies of
435 /// h1 and h2. So instead we'll just use a tag to tell class template
436 /// hashtable to do that composition.
437 struct _Default_ranged_hash { };
438
439 /// Default value for rehash policy. Bucket size is (usually) the
440 /// smallest prime that keeps the load factor small enough.
441 struct _Prime_rehash_policy
442 {
443 using __has_load_factor = true_type;
444
445 _Prime_rehash_policy(float __z = 1.0) noexcept
446 : _M_max_load_factor(__z), _M_next_resize(0) { }
447
448 float
449 max_load_factor() const noexcept
450 { return _M_max_load_factor; }
451
452 // Return a bucket size no smaller than n.
453 std::size_t
454 _M_next_bkt(std::size_t __n) const;
455
456 // Return a bucket count appropriate for n elements
457 std::size_t
458 _M_bkt_for_elements(std::size_t __n) const
459 { return __builtin_ceil(__n / (double)_M_max_load_factor); }
460
461 // __n_bkt is current bucket count, __n_elt is current element count,
462 // and __n_ins is number of elements to be inserted. Do we need to
463 // increase bucket count? If so, return make_pair(true, n), where n
464 // is the new bucket count. If not, return make_pair(false, 0).
465 std::pair<bool, std::size_t>
466 _M_need_rehash(std::size_t __n_bkt, std::size_t __n_elt,
467 std::size_t __n_ins) const;
468
469 typedef std::size_t _State;
470
471 _State
472 _M_state() const
473 { return _M_next_resize; }
474
475 void
476 _M_reset() noexcept
477 { _M_next_resize = 0; }
478
479 void
480 _M_reset(_State __state)
481 { _M_next_resize = __state; }
482
483 static const std::size_t _S_growth_factor = 2;
484
485 float _M_max_load_factor;
486 mutable std::size_t _M_next_resize;
487 };
488
489 /// Range hashing function assuming that second arg is a power of 2.
490 struct _Mask_range_hashing
491 {
492 typedef std::size_t first_argument_type;
493 typedef std::size_t second_argument_type;
494 typedef std::size_t result_type;
495
496 result_type
497 operator()(first_argument_type __num,
498 second_argument_type __den) const noexcept
499 { return __num & (__den - 1); }
500 };
501
502 /// Compute closest power of 2 not less than __n
503 inline std::size_t
504 __clp2(std::size_t __n) noexcept
505 {
506 using __gnu_cxx::__int_traits;
507 // Equivalent to return __n ? std::bit_ceil(__n) : 0;
508 if (__n < 2)
509 return __n;
510 const unsigned __lz = sizeof(size_t) > sizeof(long)
511 ? __builtin_clzll(__n - 1ull)
512 : __builtin_clzl(__n - 1ul);
513 // Doing two shifts avoids undefined behaviour when __lz == 0.
514 return (size_t(1) << (__int_traits<size_t>::__digits - __lz - 1)) << 1;
515 }
516
517 /// Rehash policy providing power of 2 bucket numbers. Avoids modulo
518 /// operations.
519 struct _Power2_rehash_policy
520 {
521 using __has_load_factor = true_type;
522
523 _Power2_rehash_policy(float __z = 1.0) noexcept
524 : _M_max_load_factor(__z), _M_next_resize(0) { }
525
526 float
527 max_load_factor() const noexcept
528 { return _M_max_load_factor; }
529
530 // Return a bucket size no smaller than n (as long as n is not above the
531 // highest power of 2).
532 std::size_t
533 _M_next_bkt(std::size_t __n) noexcept
534 {
535 if (__n == 0)
536 // Special case on container 1st initialization with 0 bucket count
537 // hint. We keep _M_next_resize to 0 to make sure that next time we
538 // want to add an element allocation will take place.
539 return 1;
540
541 const auto __max_width = std::min<size_t>(sizeof(size_t), 8);
542 const auto __max_bkt = size_t(1) << (__max_width * __CHAR_BIT__ - 1);
543 std::size_t __res = __clp2(__n);
544
545 if (__res == 0)
546 __res = __max_bkt;
547 else if (__res == 1)
548 // If __res is 1 we force it to 2 to make sure there will be an
549 // allocation so that nothing need to be stored in the initial
550 // single bucket
551 __res = 2;
552
553 if (__res == __max_bkt)
554 // Set next resize to the max value so that we never try to rehash again
555 // as we already reach the biggest possible bucket number.
556 // Note that it might result in max_load_factor not being respected.
557 _M_next_resize = size_t(-1);
558 else
559 _M_next_resize
560 = __builtin_floor(__res * (double)_M_max_load_factor);
561
562 return __res;
563 }
564
565 // Return a bucket count appropriate for n elements
566 std::size_t
567 _M_bkt_for_elements(std::size_t __n) const noexcept
568 { return __builtin_ceil(__n / (double)_M_max_load_factor); }
569
570 // __n_bkt is current bucket count, __n_elt is current element count,
571 // and __n_ins is number of elements to be inserted. Do we need to
572 // increase bucket count? If so, return make_pair(true, n), where n
573 // is the new bucket count. If not, return make_pair(false, 0).
574 std::pair<bool, std::size_t>
575 _M_need_rehash(std::size_t __n_bkt, std::size_t __n_elt,
576 std::size_t __n_ins) noexcept
577 {
578 if (__n_elt + __n_ins > _M_next_resize)
579 {
580 // If _M_next_resize is 0 it means that we have nothing allocated so
581 // far and that we start inserting elements. In this case we start
582 // with an initial bucket size of 11.
583 double __min_bkts
584 = std::max<std::size_t>(__n_elt + __n_ins, _M_next_resize ? 0 : 11)
585 / (double)_M_max_load_factor;
586 if (__min_bkts >= __n_bkt)
587 return { true,
588 _M_next_bkt(std::max<std::size_t>(__builtin_floor(__min_bkts) + 1,
589 __n_bkt * _S_growth_factor)) };
590
591 _M_next_resize
592 = __builtin_floor(__n_bkt * (double)_M_max_load_factor);
593 return { false, 0 };
594 }
595 else
596 return { false, 0 };
597 }
598
599 typedef std::size_t _State;
600
601 _State
602 _M_state() const noexcept
603 { return _M_next_resize; }
604
605 void
606 _M_reset() noexcept
607 { _M_next_resize = 0; }
608
609 void
610 _M_reset(_State __state) noexcept
611 { _M_next_resize = __state; }
612
613 static const std::size_t _S_growth_factor = 2;
614
615 float _M_max_load_factor;
616 std::size_t _M_next_resize;
617 };
618
619 // Base classes for std::_Hashtable. We define these base classes
620 // because in some cases we want to do different things depending on
621 // the value of a policy class. In some cases the policy class
622 // affects which member functions and nested typedefs are defined;
623 // we handle that by specializing base class templates. Several of
624 // the base class templates need to access other members of class
625 // template _Hashtable, so we use a variant of the "Curiously
626 // Recurring Template Pattern" (CRTP) technique.
627
628 /**
629 * Primary class template _Map_base.
630 *
631 * If the hashtable has a value type of the form pair<T1, T2> and a
632 * key extraction policy (_ExtractKey) that returns the first part
633 * of the pair, the hashtable gets a mapped_type typedef. If it
634 * satisfies those criteria and also has unique keys, then it also
635 * gets an operator[].
636 */
637 template<typename _Key, typename _Value, typename _Alloc,
638 typename _ExtractKey, typename _Equal,
639 typename _Hash, typename _RangeHash, typename _Unused,
640 typename _RehashPolicy, typename _Traits,
641 bool _Unique_keys = _Traits::__unique_keys::value>
642 struct _Map_base { };
643
644 /// Partial specialization, __unique_keys set to false.
645 template<typename _Key, typename _Pair, typename _Alloc, typename _Equal,
646 typename _Hash, typename _RangeHash, typename _Unused,
647 typename _RehashPolicy, typename _Traits>
648 struct _Map_base<_Key, _Pair, _Alloc, _Select1st, _Equal,
649 _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits, false>
650 {
651 using mapped_type = typename std::tuple_element<1, _Pair>::type;
652 };
653
654 /// Partial specialization, __unique_keys set to true.
655 template<typename _Key, typename _Pair, typename _Alloc, typename _Equal,
656 typename _Hash, typename _RangeHash, typename _Unused,
657 typename _RehashPolicy, typename _Traits>
658 struct _Map_base<_Key, _Pair, _Alloc, _Select1st, _Equal,
659 _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits, true>
660 {
661 private:
662 using __hashtable_base = _Hashtable_base<_Key, _Pair, _Select1st, _Equal,
663 _Hash, _RangeHash, _Unused,
664 _Traits>;
665
666 using __hashtable = _Hashtable<_Key, _Pair, _Alloc, _Select1st, _Equal,
667 _Hash, _RangeHash,
668 _Unused, _RehashPolicy, _Traits>;
669
670 using __hash_code = typename __hashtable_base::__hash_code;
671
672 public:
673 using key_type = typename __hashtable_base::key_type;
674 using mapped_type = typename std::tuple_element<1, _Pair>::type;
675
676 mapped_type&
677 operator[](const key_type& __k);
678
679 mapped_type&
680 operator[](key_type&& __k);
681
682 // _GLIBCXX_RESOLVE_LIB_DEFECTS
683 // DR 761. unordered_map needs an at() member function.
684 mapped_type&
685 at(const key_type& __k);
686
687 const mapped_type&
688 at(const key_type& __k) const;
689 };
690
691 template<typename _Key, typename _Pair, typename _Alloc, typename _Equal,
692 typename _Hash, typename _RangeHash, typename _Unused,
693 typename _RehashPolicy, typename _Traits>
694 auto
695 _Map_base<_Key, _Pair, _Alloc, _Select1st, _Equal,
696 _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits, true>::
697 operator[](const key_type& __k)
698 -> mapped_type&
699 {
700 __hashtable* __h = static_cast<__hashtable*>(this);
701 __hash_code __code = __h->_M_hash_code(__k);
702 std::size_t __bkt = __h->_M_bucket_index(__code);
703 if (auto __node = __h->_M_find_node(__bkt, __k, __code))
704 return __node->_M_v().second;
705
706 typename __hashtable::_Scoped_node __node {
707 __h,
708 std::piecewise_construct,
709 std::tuple<const key_type&>(__k),
710 std::tuple<>()
711 };
712 auto __pos
713 = __h->_M_insert_unique_node(__bkt, __code, __node._M_node);
714 __node._M_node = nullptr;
715 return __pos->second;
716 }
717
718 template<typename _Key, typename _Pair, typename _Alloc, typename _Equal,
719 typename _Hash, typename _RangeHash, typename _Unused,
720 typename _RehashPolicy, typename _Traits>
721 auto
722 _Map_base<_Key, _Pair, _Alloc, _Select1st, _Equal,
723 _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits, true>::
724 operator[](key_type&& __k)
725 -> mapped_type&
726 {
727 __hashtable* __h = static_cast<__hashtable*>(this);
728 __hash_code __code = __h->_M_hash_code(__k);
729 std::size_t __bkt = __h->_M_bucket_index(__code);
730 if (auto __node = __h->_M_find_node(__bkt, __k, __code))
731 return __node->_M_v().second;
732
733 typename __hashtable::_Scoped_node __node {
734 __h,
735 std::piecewise_construct,
736 std::forward_as_tuple(std::move(__k)),
737 std::tuple<>()
738 };
739 auto __pos
740 = __h->_M_insert_unique_node(__bkt, __code, __node._M_node);
741 __node._M_node = nullptr;
742 return __pos->second;
743 }
744
745 template<typename _Key, typename _Pair, typename _Alloc, typename _Equal,
746 typename _Hash, typename _RangeHash, typename _Unused,
747 typename _RehashPolicy, typename _Traits>
748 auto
749 _Map_base<_Key, _Pair, _Alloc, _Select1st, _Equal,
750 _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits, true>::
751 at(const key_type& __k)
752 -> mapped_type&
753 {
754 __hashtable* __h = static_cast<__hashtable*>(this);
755 auto __ite = __h->find(__k);
756
757 if (!__ite._M_cur)
758 __throw_out_of_range(__N("_Map_base::at"));
759 return __ite->second;
760 }
761
762 template<typename _Key, typename _Pair, typename _Alloc, typename _Equal,
763 typename _Hash, typename _RangeHash, typename _Unused,
764 typename _RehashPolicy, typename _Traits>
765 auto
766 _Map_base<_Key, _Pair, _Alloc, _Select1st, _Equal,
767 _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits, true>::
768 at(const key_type& __k) const
769 -> const mapped_type&
770 {
771 const __hashtable* __h = static_cast<const __hashtable*>(this);
772 auto __ite = __h->find(__k);
773
774 if (!__ite._M_cur)
775 __throw_out_of_range(__N("_Map_base::at"));
776 return __ite->second;
777 }
778
779 /**
780 * Primary class template _Insert_base.
781 *
782 * Defines @c insert member functions appropriate to all _Hashtables.
783 */
784 template<typename _Key, typename _Value, typename _Alloc,
785 typename _ExtractKey, typename _Equal,
786 typename _Hash, typename _RangeHash, typename _Unused,
787 typename _RehashPolicy, typename _Traits>
788 struct _Insert_base
789 {
790 protected:
791 using __hashtable_base = _Hashtable_base<_Key, _Value, _ExtractKey,
792 _Equal, _Hash, _RangeHash,
793 _Unused, _Traits>;
794
795 using __hashtable = _Hashtable<_Key, _Value, _Alloc, _ExtractKey, _Equal,
796 _Hash, _RangeHash,
797 _Unused, _RehashPolicy, _Traits>;
798
799 using __hash_cached = typename _Traits::__hash_cached;
800 using __constant_iterators = typename _Traits::__constant_iterators;
801
802 using __hashtable_alloc = _Hashtable_alloc<
803 __alloc_rebind<_Alloc, _Hash_node<_Value,
804 __hash_cached::value>>>;
805
806 using value_type = typename __hashtable_base::value_type;
807 using size_type = typename __hashtable_base::size_type;
808
809 using __unique_keys = typename _Traits::__unique_keys;
810 using __node_alloc_type = typename __hashtable_alloc::__node_alloc_type;
811 using __node_gen_type = _AllocNode<__node_alloc_type>;
812
813 __hashtable&
814 _M_conjure_hashtable()
815 { return *(static_cast<__hashtable*>(this)); }
816
817 template<typename _InputIterator, typename _NodeGetter>
818 void
819 _M_insert_range(_InputIterator __first, _InputIterator __last,
820 const _NodeGetter&, true_type __uks);
821
822 template<typename _InputIterator, typename _NodeGetter>
823 void
824 _M_insert_range(_InputIterator __first, _InputIterator __last,
825 const _NodeGetter&, false_type __uks);
826
827 public:
828 using iterator = _Node_iterator<_Value, __constant_iterators::value,
829 __hash_cached::value>;
830
831 using const_iterator = _Node_const_iterator<_Value, __constant_iterators::value,
832 __hash_cached::value>;
833
834 using __ireturn_type = typename std::conditional<__unique_keys::value,
835 std::pair<iterator, bool>,
836 iterator>::type;
837
838 __ireturn_type
839 insert(const value_type& __v)
840 {
841 __hashtable& __h = _M_conjure_hashtable();
842 __node_gen_type __node_gen(__h);
843 return __h._M_insert(__v, __node_gen, __unique_keys{});
844 }
845
846 iterator
847 insert(const_iterator __hint, const value_type& __v)
848 {
849 __hashtable& __h = _M_conjure_hashtable();
850 __node_gen_type __node_gen(__h);
851 return __h._M_insert(__hint, __v, __node_gen, __unique_keys{});
852 }
853
854 template<typename _KType, typename... _Args>
855 std::pair<iterator, bool>
856 try_emplace(const_iterator, _KType&& __k, _Args&&... __args)
857 {
858 __hashtable& __h = _M_conjure_hashtable();
859 auto __code = __h._M_hash_code(__k);
860 std::size_t __bkt = __h._M_bucket_index(__code);
861 if (auto __node = __h._M_find_node(__bkt, __k, __code))
862 return { iterator(__node), false };
863
864 typename __hashtable::_Scoped_node __node {
865 &__h,
866 std::piecewise_construct,
867 std::forward_as_tuple(std::forward<_KType>(__k)),
868 std::forward_as_tuple(std::forward<_Args>(__args)...)
869 };
870 auto __it
871 = __h._M_insert_unique_node(__bkt, __code, __node._M_node);
872 __node._M_node = nullptr;
873 return { __it, true };
874 }
875
876 void
877 insert(initializer_list<value_type> __l)
878 { this->insert(__l.begin(), __l.end()); }
879
880 template<typename _InputIterator>
881 void
882 insert(_InputIterator __first, _InputIterator __last)
883 {
884 __hashtable& __h = _M_conjure_hashtable();
885 __node_gen_type __node_gen(__h);
886 return _M_insert_range(__first, __last, __node_gen, __unique_keys{});
887 }
888 };
889
890 template<typename _Key, typename _Value, typename _Alloc,
891 typename _ExtractKey, typename _Equal,
892 typename _Hash, typename _RangeHash, typename _Unused,
893 typename _RehashPolicy, typename _Traits>
894 template<typename _InputIterator, typename _NodeGetter>
895 void
896 _Insert_base<_Key, _Value, _Alloc, _ExtractKey, _Equal,
897 _Hash, _RangeHash, _Unused,
898 _RehashPolicy, _Traits>::
899 _M_insert_range(_InputIterator __first, _InputIterator __last,
900 const _NodeGetter& __node_gen, true_type __uks)
901 {
902 __hashtable& __h = _M_conjure_hashtable();
903 for (; __first != __last; ++__first)
904 __h._M_insert(*__first, __node_gen, __uks);
905 }
906
907 template<typename _Key, typename _Value, typename _Alloc,
908 typename _ExtractKey, typename _Equal,
909 typename _Hash, typename _RangeHash, typename _Unused,
910 typename _RehashPolicy, typename _Traits>
911 template<typename _InputIterator, typename _NodeGetter>
912 void
913 _Insert_base<_Key, _Value, _Alloc, _ExtractKey, _Equal,
914 _Hash, _RangeHash, _Unused,
915 _RehashPolicy, _Traits>::
916 _M_insert_range(_InputIterator __first, _InputIterator __last,
917 const _NodeGetter& __node_gen, false_type __uks)
918 {
919 using __rehash_type = typename __hashtable::__rehash_type;
920 using __rehash_state = typename __hashtable::__rehash_state;
921 using pair_type = std::pair<bool, std::size_t>;
922
923 size_type __n_elt = __detail::__distance_fw(__first, __last);
924 if (__n_elt == 0)
925 return;
926
927 __hashtable& __h = _M_conjure_hashtable();
928 __rehash_type& __rehash = __h._M_rehash_policy;
929 const __rehash_state& __saved_state = __rehash._M_state();
930 pair_type __do_rehash = __rehash._M_need_rehash(__h._M_bucket_count,
931 __h._M_element_count,
932 __n_elt);
933
934 if (__do_rehash.first)
935 __h._M_rehash(__do_rehash.second, __saved_state);
936
937 for (; __first != __last; ++__first)
938 __h._M_insert(*__first, __node_gen, __uks);
939 }
940
941 /**
942 * Primary class template _Insert.
943 *
944 * Defines @c insert member functions that depend on _Hashtable policies,
945 * via partial specializations.
946 */
947 template<typename _Key, typename _Value, typename _Alloc,
948 typename _ExtractKey, typename _Equal,
949 typename _Hash, typename _RangeHash, typename _Unused,
950 typename _RehashPolicy, typename _Traits,
951 bool _Constant_iterators = _Traits::__constant_iterators::value>
952 struct _Insert;
953
954 /// Specialization.
955 template<typename _Key, typename _Value, typename _Alloc,
956 typename _ExtractKey, typename _Equal,
957 typename _Hash, typename _RangeHash, typename _Unused,
958 typename _RehashPolicy, typename _Traits>
959 struct _Insert<_Key, _Value, _Alloc, _ExtractKey, _Equal,
960 _Hash, _RangeHash, _Unused,
961 _RehashPolicy, _Traits, true>
962 : public _Insert_base<_Key, _Value, _Alloc, _ExtractKey, _Equal,
963 _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits>
964 {
965 using __base_type = _Insert_base<_Key, _Value, _Alloc, _ExtractKey,
966 _Equal, _Hash, _RangeHash, _Unused,
967 _RehashPolicy, _Traits>;
968
969 using value_type = typename __base_type::value_type;
970 using iterator = typename __base_type::iterator;
971 using const_iterator = typename __base_type::const_iterator;
972 using __ireturn_type = typename __base_type::__ireturn_type;
973
974 using __unique_keys = typename __base_type::__unique_keys;
975 using __hashtable = typename __base_type::__hashtable;
976 using __node_gen_type = typename __base_type::__node_gen_type;
977
978 using __base_type::insert;
979
980 __ireturn_type
981 insert(value_type&& __v)
982 {
983 __hashtable& __h = this->_M_conjure_hashtable();
984 __node_gen_type __node_gen(__h);
985 return __h._M_insert(std::move(__v), __node_gen, __unique_keys{});
986 }
987
988 iterator
989 insert(const_iterator __hint, value_type&& __v)
990 {
991 __hashtable& __h = this->_M_conjure_hashtable();
992 __node_gen_type __node_gen(__h);
993 return __h._M_insert(__hint, std::move(__v), __node_gen,
994 __unique_keys{});
995 }
996 };
997
998 /// Specialization.
999 template<typename _Key, typename _Value, typename _Alloc,
1000 typename _ExtractKey, typename _Equal,
1001 typename _Hash, typename _RangeHash, typename _Unused,
1002 typename _RehashPolicy, typename _Traits>
1003 struct _Insert<_Key, _Value, _Alloc, _ExtractKey, _Equal,
1004 _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits, false>
1005 : public _Insert_base<_Key, _Value, _Alloc, _ExtractKey, _Equal,
1006 _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits>
1007 {
1008 using __base_type = _Insert_base<_Key, _Value, _Alloc, _ExtractKey,
1009 _Equal, _Hash, _RangeHash, _Unused,
1010 _RehashPolicy, _Traits>;
1011 using value_type = typename __base_type::value_type;
1012 using iterator = typename __base_type::iterator;
1013 using const_iterator = typename __base_type::const_iterator;
1014
1015 using __unique_keys = typename __base_type::__unique_keys;
1016 using __hashtable = typename __base_type::__hashtable;
1017 using __ireturn_type = typename __base_type::__ireturn_type;
1018
1019 using __base_type::insert;
1020
1021 template<typename _Pair>
1022 using __is_cons = std::is_constructible<value_type, _Pair&&>;
1023
1024 template<typename _Pair>
1025 using _IFcons = std::enable_if<__is_cons<_Pair>::value>;
1026
1027 template<typename _Pair>
1028 using _IFconsp = typename _IFcons<_Pair>::type;
1029
1030 template<typename _Pair, typename = _IFconsp<_Pair>>
1031 __ireturn_type
1032 insert(_Pair&& __v)
1033 {
1034 __hashtable& __h = this->_M_conjure_hashtable();
1035 return __h._M_emplace(__unique_keys{}, std::forward<_Pair>(__v));
1036 }
1037
1038 template<typename _Pair, typename = _IFconsp<_Pair>>
1039 iterator
1040 insert(const_iterator __hint, _Pair&& __v)
1041 {
1042 __hashtable& __h = this->_M_conjure_hashtable();
1043 return __h._M_emplace(__hint, __unique_keys{},
1044 std::forward<_Pair>(__v));
1045 }
1046 };
1047
1048 template<typename _Policy>
1049 using __has_load_factor = typename _Policy::__has_load_factor;
1050
1051 /**
1052 * Primary class template _Rehash_base.
1053 *
1054 * Give hashtable the max_load_factor functions and reserve iff the
1055 * rehash policy supports it.
1056 */
1057 template<typename _Key, typename _Value, typename _Alloc,
1058 typename _ExtractKey, typename _Equal,
1059 typename _Hash, typename _RangeHash, typename _Unused,
1060 typename _RehashPolicy, typename _Traits,
1061 typename =
1062 __detected_or_t<false_type, __has_load_factor, _RehashPolicy>>
1063 struct _Rehash_base;
1064
1065 /// Specialization when rehash policy doesn't provide load factor management.
1066 template<typename _Key, typename _Value, typename _Alloc,
1067 typename _ExtractKey, typename _Equal,
1068 typename _Hash, typename _RangeHash, typename _Unused,
1069 typename _RehashPolicy, typename _Traits>
1070 struct _Rehash_base<_Key, _Value, _Alloc, _ExtractKey, _Equal,
1071 _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits,
1072 false_type /* Has load factor */>
1073 {
1074 };
1075
1076 /// Specialization when rehash policy provide load factor management.
1077 template<typename _Key, typename _Value, typename _Alloc,
1078 typename _ExtractKey, typename _Equal,
1079 typename _Hash, typename _RangeHash, typename _Unused,
1080 typename _RehashPolicy, typename _Traits>
1081 struct _Rehash_base<_Key, _Value, _Alloc, _ExtractKey, _Equal,
1082 _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits,
1083 true_type /* Has load factor */>
1084 {
1085 using __hashtable = _Hashtable<_Key, _Value, _Alloc, _ExtractKey,
1086 _Equal, _Hash, _RangeHash, _Unused,
1087 _RehashPolicy, _Traits>;
1088
1089 float
1090 max_load_factor() const noexcept
1091 {
1092 const __hashtable* __this = static_cast<const __hashtable*>(this);
1093 return __this->__rehash_policy().max_load_factor();
1094 }
1095
1096 void
1097 max_load_factor(float __z)
1098 {
1099 __hashtable* __this = static_cast<__hashtable*>(this);
1100 __this->__rehash_policy(_RehashPolicy(__z));
1101 }
1102
1103 void
1104 reserve(std::size_t __n)
1105 {
1106 __hashtable* __this = static_cast<__hashtable*>(this);
1107 __this->rehash(__this->__rehash_policy()._M_bkt_for_elements(__n));
1108 }
1109 };
1110
1111 /**
1112 * Primary class template _Hashtable_ebo_helper.
1113 *
1114 * Helper class using EBO when it is not forbidden (the type is not
1115 * final) and when it is worth it (the type is empty.)
1116 */
1117 template<int _Nm, typename _Tp,
1118 bool __use_ebo = !__is_final(_Tp) && __is_empty(_Tp)>
1119 struct _Hashtable_ebo_helper;
1120
1121 /// Specialization using EBO.
1122 template<int _Nm, typename _Tp>
1123 struct _Hashtable_ebo_helper<_Nm, _Tp, true>
1124 : private _Tp
1125 {
1126 _Hashtable_ebo_helper() noexcept(noexcept(_Tp())) : _Tp() { }
1127
1128 template<typename _OtherTp>
1129 _Hashtable_ebo_helper(_OtherTp&& __tp)
1130 : _Tp(std::forward<_OtherTp>(__tp))
1131 { }
1132
1133 const _Tp& _M_cget() const { return static_cast<const _Tp&>(*this); }
1134 _Tp& _M_get() { return static_cast<_Tp&>(*this); }
1135 };
1136
1137 /// Specialization not using EBO.
1138 template<int _Nm, typename _Tp>
1139 struct _Hashtable_ebo_helper<_Nm, _Tp, false>
1140 {
1141 _Hashtable_ebo_helper() = default;
1142
1143 template<typename _OtherTp>
1144 _Hashtable_ebo_helper(_OtherTp&& __tp)
1145 : _M_tp(std::forward<_OtherTp>(__tp))
1146 { }
1147
1148 const _Tp& _M_cget() const { return _M_tp; }
1149 _Tp& _M_get() { return _M_tp; }
1150
1151 private:
1152 _Tp _M_tp{};
1153 };
1154
1155 /**
1156 * Primary class template _Local_iterator_base.
1157 *
1158 * Base class for local iterators, used to iterate within a bucket
1159 * but not between buckets.
1160 */
1161 template<typename _Key, typename _Value, typename _ExtractKey,
1162 typename _Hash, typename _RangeHash, typename _Unused,
1163 bool __cache_hash_code>
1164 struct _Local_iterator_base;
1165
1166 /**
1167 * Primary class template _Hash_code_base.
1168 *
1169 * Encapsulates two policy issues that aren't quite orthogonal.
1170 * (1) the difference between using a ranged hash function and using
1171 * the combination of a hash function and a range-hashing function.
1172 * In the former case we don't have such things as hash codes, so
1173 * we have a dummy type as placeholder.
1174 * (2) Whether or not we cache hash codes. Caching hash codes is
1175 * meaningless if we have a ranged hash function.
1176 *
1177 * We also put the key extraction objects here, for convenience.
1178 * Each specialization derives from one or more of the template
1179 * parameters to benefit from Ebo. This is important as this type
1180 * is inherited in some cases by the _Local_iterator_base type used
1181 * to implement local_iterator and const_local_iterator. As with
1182 * any iterator type we prefer to make it as small as possible.
1183 */
1184 template<typename _Key, typename _Value, typename _ExtractKey,
1185 typename _Hash, typename _RangeHash, typename _Unused,
1186 bool __cache_hash_code>
1187 struct _Hash_code_base
1188 : private _Hashtable_ebo_helper<1, _Hash>
1189 {
1190 private:
1191 using __ebo_hash = _Hashtable_ebo_helper<1, _Hash>;
1192
1193 // Gives the local iterator implementation access to _M_bucket_index().
1194 friend struct _Local_iterator_base<_Key, _Value, _ExtractKey,
1195 _Hash, _RangeHash, _Unused, false>;
1196
1197 public:
1198 typedef _Hash hasher;
1199
1200 hasher
1201 hash_function() const
1202 { return _M_hash(); }
1203
1204 protected:
1205 typedef std::size_t __hash_code;
1206
1207 // We need the default constructor for the local iterators and _Hashtable
1208 // default constructor.
1209 _Hash_code_base() = default;
1210
1211 _Hash_code_base(const _Hash& __hash) : __ebo_hash(__hash) { }
1212
1213 __hash_code
1214 _M_hash_code(const _Key& __k) const
1215 {
1216 static_assert(__is_invocable<const _Hash&, const _Key&>{},
1217 "hash function must be invocable with an argument of key type");
1218 return _M_hash()(__k);
1219 }
1220
1221 template<typename _Kt>
1222 __hash_code
1223 _M_hash_code_tr(const _Kt& __k) const
1224 {
1225 static_assert(__is_invocable<const _Hash&, const _Kt&>{},
1226 "hash function must be invocable with an argument of key type");
1227 return _M_hash()(__k);
1228 }
1229
1230 std::size_t
1231 _M_bucket_index(__hash_code __c, std::size_t __bkt_count) const
1232 { return _RangeHash{}(__c, __bkt_count); }
1233
1234 std::size_t
1235 _M_bucket_index(const _Hash_node_value<_Value, false>& __n,
1236 std::size_t __bkt_count) const
1237 noexcept( noexcept(declval<const _Hash&>()(declval<const _Key&>()))
1238 && noexcept(declval<const _RangeHash&>()((__hash_code)0,
1239 (std::size_t)0)) )
1240 {
1241 return _RangeHash{}(_M_hash_code(_ExtractKey{}(__n._M_v())),
1242 __bkt_count);
1243 }
1244
1245 std::size_t
1246 _M_bucket_index(const _Hash_node_value<_Value, true>& __n,
1247 std::size_t __bkt_count) const
1248 noexcept( noexcept(declval<const _RangeHash&>()((__hash_code)0,
1249 (std::size_t)0)) )
1250 { return _RangeHash{}(__n._M_hash_code, __bkt_count); }
1251
1252 void
1253 _M_store_code(_Hash_node_code_cache<false>&, __hash_code) const
1254 { }
1255
1256 void
1257 _M_copy_code(_Hash_node_code_cache<false>&,
1258 const _Hash_node_code_cache<false>&) const
1259 { }
1260
1261 void
1262 _M_store_code(_Hash_node_code_cache<true>& __n, __hash_code __c) const
1263 { __n._M_hash_code = __c; }
1264
1265 void
1266 _M_copy_code(_Hash_node_code_cache<true>& __to,
1267 const _Hash_node_code_cache<true>& __from) const
1268 { __to._M_hash_code = __from._M_hash_code; }
1269
1270 void
1271 _M_swap(_Hash_code_base& __x)
1272 { std::swap(__ebo_hash::_M_get(), __x.__ebo_hash::_M_get()); }
1273
1274 const _Hash&
1275 _M_hash() const { return __ebo_hash::_M_cget(); }
1276 };
1277
1278 /// Partial specialization used when nodes contain a cached hash code.
1279 template<typename _Key, typename _Value, typename _ExtractKey,
1280 typename _Hash, typename _RangeHash, typename _Unused>
1281 struct _Local_iterator_base<_Key, _Value, _ExtractKey,
1282 _Hash, _RangeHash, _Unused, true>
1283 : public _Node_iterator_base<_Value, true>
1284 {
1285 protected:
1286 using __base_node_iter = _Node_iterator_base<_Value, true>;
1287 using __hash_code_base = _Hash_code_base<_Key, _Value, _ExtractKey,
1288 _Hash, _RangeHash, _Unused, true>;
1289
1290 _Local_iterator_base() = default;
1291 _Local_iterator_base(const __hash_code_base&,
1292 _Hash_node<_Value, true>* __p,
1293 std::size_t __bkt, std::size_t __bkt_count)
1294 : __base_node_iter(__p), _M_bucket(__bkt), _M_bucket_count(__bkt_count)
1295 { }
1296
1297 void
1298 _M_incr()
1299 {
1300 __base_node_iter::_M_incr();
1301 if (this->_M_cur)
1302 {
1303 std::size_t __bkt
1304 = _RangeHash{}(this->_M_cur->_M_hash_code, _M_bucket_count);
1305 if (__bkt != _M_bucket)
1306 this->_M_cur = nullptr;
1307 }
1308 }
1309
1310 std::size_t _M_bucket;
1311 std::size_t _M_bucket_count;
1312
1313 public:
1314 std::size_t
1315 _M_get_bucket() const { return _M_bucket; } // for debug mode
1316 };
1317
1318 // Uninitialized storage for a _Hash_code_base.
1319 // This type is DefaultConstructible and Assignable even if the
1320 // _Hash_code_base type isn't, so that _Local_iterator_base<..., false>
1321 // can be DefaultConstructible and Assignable.
1322 template<typename _Tp, bool _IsEmpty = std::is_empty<_Tp>::value>
1323 struct _Hash_code_storage
1324 {
1325 __gnu_cxx::__aligned_buffer<_Tp> _M_storage;
1326
1327 _Tp*
1328 _M_h() { return _M_storage._M_ptr(); }
1329
1330 const _Tp*
1331 _M_h() const { return _M_storage._M_ptr(); }
1332 };
1333
1334 // Empty partial specialization for empty _Hash_code_base types.
1335 template<typename _Tp>
1336 struct _Hash_code_storage<_Tp, true>
1337 {
1338 static_assert( std::is_empty<_Tp>::value, "Type must be empty" );
1339
1340 // As _Tp is an empty type there will be no bytes written/read through
1341 // the cast pointer, so no strict-aliasing violation.
1342 _Tp*
1343 _M_h() { return reinterpret_cast<_Tp*>(this); }
1344
1345 const _Tp*
1346 _M_h() const { return reinterpret_cast<const _Tp*>(this); }
1347 };
1348
1349 template<typename _Key, typename _Value, typename _ExtractKey,
1350 typename _Hash, typename _RangeHash, typename _Unused>
1351 using __hash_code_for_local_iter
1352 = _Hash_code_storage<_Hash_code_base<_Key, _Value, _ExtractKey,
1353 _Hash, _RangeHash, _Unused, false>>;
1354
1355 // Partial specialization used when hash codes are not cached
1356 template<typename _Key, typename _Value, typename _ExtractKey,
1357 typename _Hash, typename _RangeHash, typename _Unused>
1358 struct _Local_iterator_base<_Key, _Value, _ExtractKey,
1359 _Hash, _RangeHash, _Unused, false>
1360 : __hash_code_for_local_iter<_Key, _Value, _ExtractKey, _Hash, _RangeHash,
1361 _Unused>
1362 , _Node_iterator_base<_Value, false>
1363 {
1364 protected:
1365 using __hash_code_base = _Hash_code_base<_Key, _Value, _ExtractKey,
1366 _Hash, _RangeHash, _Unused, false>;
1367 using __node_iter_base = _Node_iterator_base<_Value, false>;
1368
1369 _Local_iterator_base() : _M_bucket_count(-1) { }
1370
1371 _Local_iterator_base(const __hash_code_base& __base,
1372 _Hash_node<_Value, false>* __p,
1373 std::size_t __bkt, std::size_t __bkt_count)
1374 : __node_iter_base(__p), _M_bucket(__bkt), _M_bucket_count(__bkt_count)
1375 { _M_init(__base); }
1376
1377 ~_Local_iterator_base()
1378 {
1379 if (_M_bucket_count != size_t(-1))
1380 _M_destroy();
1381 }
1382
1383 _Local_iterator_base(const _Local_iterator_base& __iter)
1384 : __node_iter_base(__iter._M_cur), _M_bucket(__iter._M_bucket)
1385 , _M_bucket_count(__iter._M_bucket_count)
1386 {
1387 if (_M_bucket_count != size_t(-1))
1388 _M_init(*__iter._M_h());
1389 }
1390
1391 _Local_iterator_base&
1392 operator=(const _Local_iterator_base& __iter)
1393 {
1394 if (_M_bucket_count != -1)
1395 _M_destroy();
1396 this->_M_cur = __iter._M_cur;
1397 _M_bucket = __iter._M_bucket;
1398 _M_bucket_count = __iter._M_bucket_count;
1399 if (_M_bucket_count != -1)
1400 _M_init(*__iter._M_h());
1401 return *this;
1402 }
1403
1404 void
1405 _M_incr()
1406 {
1407 __node_iter_base::_M_incr();
1408 if (this->_M_cur)
1409 {
1410 std::size_t __bkt = this->_M_h()->_M_bucket_index(*this->_M_cur,
1411 _M_bucket_count);
1412 if (__bkt != _M_bucket)
1413 this->_M_cur = nullptr;
1414 }
1415 }
1416
1417 std::size_t _M_bucket;
1418 std::size_t _M_bucket_count;
1419
1420 void
1421 _M_init(const __hash_code_base& __base)
1422 { ::new(this->_M_h()) __hash_code_base(__base); }
1423
1424 void
1425 _M_destroy() { this->_M_h()->~__hash_code_base(); }
1426
1427 public:
1428 std::size_t
1429 _M_get_bucket() const { return _M_bucket; } // for debug mode
1430 };
1431
1432 /// local iterators
1433 template<typename _Key, typename _Value, typename _ExtractKey,
1434 typename _Hash, typename _RangeHash, typename _Unused,
1435 bool __constant_iterators, bool __cache>
1436 struct _Local_iterator
1437 : public _Local_iterator_base<_Key, _Value, _ExtractKey,
1438 _Hash, _RangeHash, _Unused, __cache>
1439 {
1440 private:
1441 using __base_type = _Local_iterator_base<_Key, _Value, _ExtractKey,
1442 _Hash, _RangeHash, _Unused, __cache>;
1443 using __hash_code_base = typename __base_type::__hash_code_base;
1444
1445 public:
1446 typedef _Value value_type;
1447 typedef typename std::conditional<__constant_iterators,
1448 const value_type*, value_type*>::type
1449 pointer;
1450 typedef typename std::conditional<__constant_iterators,
1451 const value_type&, value_type&>::type
1452 reference;
1453 typedef std::ptrdiff_t difference_type;
1454 typedef std::forward_iterator_tag iterator_category;
1455
1456 _Local_iterator() = default;
1457
1458 _Local_iterator(const __hash_code_base& __base,
1459 _Hash_node<_Value, __cache>* __n,
1460 std::size_t __bkt, std::size_t __bkt_count)
1461 : __base_type(__base, __n, __bkt, __bkt_count)
1462 { }
1463
1464 reference
1465 operator*() const
1466 { return this->_M_cur->_M_v(); }
1467
1468 pointer
1469 operator->() const
1470 { return this->_M_cur->_M_valptr(); }
1471
1472 _Local_iterator&
1473 operator++()
1474 {
1475 this->_M_incr();
1476 return *this;
1477 }
1478
1479 _Local_iterator
1480 operator++(int)
1481 {
1482 _Local_iterator __tmp(*this);
1483 this->_M_incr();
1484 return __tmp;
1485 }
1486 };
1487
1488 /// local const_iterators
1489 template<typename _Key, typename _Value, typename _ExtractKey,
1490 typename _Hash, typename _RangeHash, typename _Unused,
1491 bool __constant_iterators, bool __cache>
1492 struct _Local_const_iterator
1493 : public _Local_iterator_base<_Key, _Value, _ExtractKey,
1494 _Hash, _RangeHash, _Unused, __cache>
1495 {
1496 private:
1497 using __base_type = _Local_iterator_base<_Key, _Value, _ExtractKey,
1498 _Hash, _RangeHash, _Unused, __cache>;
1499 using __hash_code_base = typename __base_type::__hash_code_base;
1500
1501 public:
1502 typedef _Value value_type;
1503 typedef const value_type* pointer;
1504 typedef const value_type& reference;
1505 typedef std::ptrdiff_t difference_type;
1506 typedef std::forward_iterator_tag iterator_category;
1507
1508 _Local_const_iterator() = default;
1509
1510 _Local_const_iterator(const __hash_code_base& __base,
1511 _Hash_node<_Value, __cache>* __n,
1512 std::size_t __bkt, std::size_t __bkt_count)
1513 : __base_type(__base, __n, __bkt, __bkt_count)
1514 { }
1515
1516 _Local_const_iterator(const _Local_iterator<_Key, _Value, _ExtractKey,
1517 _Hash, _RangeHash, _Unused,
1518 __constant_iterators,
1519 __cache>& __x)
1520 : __base_type(__x)
1521 { }
1522
1523 reference
1524 operator*() const
1525 { return this->_M_cur->_M_v(); }
1526
1527 pointer
1528 operator->() const
1529 { return this->_M_cur->_M_valptr(); }
1530
1531 _Local_const_iterator&
1532 operator++()
1533 {
1534 this->_M_incr();
1535 return *this;
1536 }
1537
1538 _Local_const_iterator
1539 operator++(int)
1540 {
1541 _Local_const_iterator __tmp(*this);
1542 this->_M_incr();
1543 return __tmp;
1544 }
1545 };
1546
1547 /**
1548 * Primary class template _Hashtable_base.
1549 *
1550 * Helper class adding management of _Equal functor to
1551 * _Hash_code_base type.
1552 *
1553 * Base class templates are:
1554 * - __detail::_Hash_code_base
1555 * - __detail::_Hashtable_ebo_helper
1556 */
1557 template<typename _Key, typename _Value, typename _ExtractKey,
1558 typename _Equal, typename _Hash, typename _RangeHash,
1559 typename _Unused, typename _Traits>
1560 struct _Hashtable_base
1561 : public _Hash_code_base<_Key, _Value, _ExtractKey, _Hash, _RangeHash,
1562 _Unused, _Traits::__hash_cached::value>,
1563 private _Hashtable_ebo_helper<0, _Equal>
1564 {
1565 public:
1566 typedef _Key key_type;
1567 typedef _Value value_type;
1568 typedef _Equal key_equal;
1569 typedef std::size_t size_type;
1570 typedef std::ptrdiff_t difference_type;
1571
1572 using __traits_type = _Traits;
1573 using __hash_cached = typename __traits_type::__hash_cached;
1574
1575 using __hash_code_base = _Hash_code_base<_Key, _Value, _ExtractKey,
1576 _Hash, _RangeHash, _Unused,
1577 __hash_cached::value>;
1578
1579 using __hash_code = typename __hash_code_base::__hash_code;
1580
1581 private:
1582 using _EqualEBO = _Hashtable_ebo_helper<0, _Equal>;
1583
1584 static bool
1585 _S_equals(__hash_code, const _Hash_node_code_cache<false>&)
1586 { return true; }
1587
1588 static bool
1589 _S_node_equals(const _Hash_node_code_cache<false>&,
1590 const _Hash_node_code_cache<false>&)
1591 { return true; }
1592
1593 static bool
1594 _S_equals(__hash_code __c, const _Hash_node_code_cache<true>& __n)
1595 { return __c == __n._M_hash_code; }
1596
1597 static bool
1598 _S_node_equals(const _Hash_node_code_cache<true>& __lhn,
1599 const _Hash_node_code_cache<true>& __rhn)
1600 { return __lhn._M_hash_code == __rhn._M_hash_code; }
1601
1602 protected:
1603 _Hashtable_base() = default;
1604
1605 _Hashtable_base(const _Hash& __hash, const _Equal& __eq)
1606 : __hash_code_base(__hash), _EqualEBO(__eq)
1607 { }
1608
1609 bool
1610 _M_equals(const _Key& __k, __hash_code __c,
1611 const _Hash_node_value<_Value, __hash_cached::value>& __n) const
1612 {
1613 static_assert(__is_invocable<const _Equal&, const _Key&, const _Key&>{},
1614 "key equality predicate must be invocable with two arguments of "
1615 "key type");
1616 return _S_equals(__c, __n) && _M_eq()(__k, _ExtractKey{}(__n._M_v()));
1617 }
1618
1619 template<typename _Kt>
1620 bool
1621 _M_equals_tr(const _Kt& __k, __hash_code __c,
1622 const _Hash_node_value<_Value,
1623 __hash_cached::value>& __n) const
1624 {
1625 static_assert(
1626 __is_invocable<const _Equal&, const _Kt&, const _Key&>{},
1627 "key equality predicate must be invocable with two arguments of "
1628 "key type");
1629 return _S_equals(__c, __n) && _M_eq()(__k, _ExtractKey{}(__n._M_v()));
1630 }
1631
1632 bool
1633 _M_node_equals(
1634 const _Hash_node_value<_Value, __hash_cached::value>& __lhn,
1635 const _Hash_node_value<_Value, __hash_cached::value>& __rhn) const
1636 {
1637 return _S_node_equals(__lhn, __rhn)
1638 && _M_eq()(_ExtractKey{}(__lhn._M_v()), _ExtractKey{}(__rhn._M_v()));
1639 }
1640
1641 void
1642 _M_swap(_Hashtable_base& __x)
1643 {
1644 __hash_code_base::_M_swap(__x);
1645 std::swap(_EqualEBO::_M_get(), __x._EqualEBO::_M_get());
1646 }
1647
1648 const _Equal&
1649 _M_eq() const { return _EqualEBO::_M_cget(); }
1650 };
1651
1652 /**
1653 * Primary class template _Equality.
1654 *
1655 * This is for implementing equality comparison for unordered
1656 * containers, per N3068, by John Lakos and Pablo Halpern.
1657 * Algorithmically, we follow closely the reference implementations
1658 * therein.
1659 */
1660 template<typename _Key, typename _Value, typename _Alloc,
1661 typename _ExtractKey, typename _Equal,
1662 typename _Hash, typename _RangeHash, typename _Unused,
1663 typename _RehashPolicy, typename _Traits,
1664 bool _Unique_keys = _Traits::__unique_keys::value>
1665 struct _Equality;
1666
1667 /// unordered_map and unordered_set specializations.
1668 template<typename _Key, typename _Value, typename _Alloc,
1669 typename _ExtractKey, typename _Equal,
1670 typename _Hash, typename _RangeHash, typename _Unused,
1671 typename _RehashPolicy, typename _Traits>
1672 struct _Equality<_Key, _Value, _Alloc, _ExtractKey, _Equal,
1673 _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits, true>
1674 {
1675 using __hashtable = _Hashtable<_Key, _Value, _Alloc, _ExtractKey, _Equal,
1676 _Hash, _RangeHash, _Unused,
1677 _RehashPolicy, _Traits>;
1678
1679 bool
1680 _M_equal(const __hashtable&) const;
1681 };
1682
1683 template<typename _Key, typename _Value, typename _Alloc,
1684 typename _ExtractKey, typename _Equal,
1685 typename _Hash, typename _RangeHash, typename _Unused,
1686 typename _RehashPolicy, typename _Traits>
1687 bool
1688 _Equality<_Key, _Value, _Alloc, _ExtractKey, _Equal,
1689 _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits, true>::
1690 _M_equal(const __hashtable& __other) const
1691 {
1692 using __node_type = typename __hashtable::__node_type;
1693 const __hashtable* __this = static_cast<const __hashtable*>(this);
1694 if (__this->size() != __other.size())
1695 return false;
1696
1697 for (auto __itx = __this->begin(); __itx != __this->end(); ++__itx)
1698 {
1699 std::size_t __ybkt = __other._M_bucket_index(*__itx._M_cur);
1700 auto __prev_n = __other._M_buckets[__ybkt];
1701 if (!__prev_n)
1702 return false;
1703
1704 for (__node_type* __n = static_cast<__node_type*>(__prev_n->_M_nxt);;
1705 __n = __n->_M_next())
1706 {
1707 if (__n->_M_v() == *__itx)
1708 break;
1709
1710 if (!__n->_M_nxt
1711 || __other._M_bucket_index(*__n->_M_next()) != __ybkt)
1712 return false;
1713 }
1714 }
1715
1716 return true;
1717 }
1718
1719 /// unordered_multiset and unordered_multimap specializations.
1720 template<typename _Key, typename _Value, typename _Alloc,
1721 typename _ExtractKey, typename _Equal,
1722 typename _Hash, typename _RangeHash, typename _Unused,
1723 typename _RehashPolicy, typename _Traits>
1724 struct _Equality<_Key, _Value, _Alloc, _ExtractKey, _Equal,
1725 _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits, false>
1726 {
1727 using __hashtable = _Hashtable<_Key, _Value, _Alloc, _ExtractKey, _Equal,
1728 _Hash, _RangeHash, _Unused,
1729 _RehashPolicy, _Traits>;
1730
1731 bool
1732 _M_equal(const __hashtable&) const;
1733 };
1734
1735 template<typename _Key, typename _Value, typename _Alloc,
1736 typename _ExtractKey, typename _Equal,
1737 typename _Hash, typename _RangeHash, typename _Unused,
1738 typename _RehashPolicy, typename _Traits>
1739 bool
1740 _Equality<_Key, _Value, _Alloc, _ExtractKey, _Equal,
1741 _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits, false>::
1742 _M_equal(const __hashtable& __other) const
1743 {
1744 using __node_type = typename __hashtable::__node_type;
1745 const __hashtable* __this = static_cast<const __hashtable*>(this);
1746 if (__this->size() != __other.size())
1747 return false;
1748
1749 for (auto __itx = __this->begin(); __itx != __this->end();)
1750 {
1751 std::size_t __x_count = 1;
1752 auto __itx_end = __itx;
1753 for (++__itx_end; __itx_end != __this->end()
1754 && __this->key_eq()(_ExtractKey{}(*__itx),
1755 _ExtractKey{}(*__itx_end));
1756 ++__itx_end)
1757 ++__x_count;
1758
1759 std::size_t __ybkt = __other._M_bucket_index(*__itx._M_cur);
1760 auto __y_prev_n = __other._M_buckets[__ybkt];
1761 if (!__y_prev_n)
1762 return false;
1763
1764 __node_type* __y_n = static_cast<__node_type*>(__y_prev_n->_M_nxt);
1765 for (;;)
1766 {
1767 if (__this->key_eq()(_ExtractKey{}(__y_n->_M_v()),
1768 _ExtractKey{}(*__itx)))
1769 break;
1770
1771 auto __y_ref_n = __y_n;
1772 for (__y_n = __y_n->_M_next(); __y_n; __y_n = __y_n->_M_next())
1773 if (!__other._M_node_equals(*__y_ref_n, *__y_n))
1774 break;
1775
1776 if (!__y_n || __other._M_bucket_index(*__y_n) != __ybkt)
1777 return false;
1778 }
1779
1780 typename __hashtable::const_iterator __ity(__y_n);
1781 for (auto __ity_end = __ity; __ity_end != __other.end(); ++__ity_end)
1782 if (--__x_count == 0)
1783 break;
1784
1785 if (__x_count != 0)
1786 return false;
1787
1788 if (!std::is_permutation(__itx, __itx_end, __ity))
1789 return false;
1790
1791 __itx = __itx_end;
1792 }
1793 return true;
1794 }
1795
1796 /**
1797 * This type deals with all allocation and keeps an allocator instance
1798 * through inheritance to benefit from EBO when possible.
1799 */
1800 template<typename _NodeAlloc>
1801 struct _Hashtable_alloc : private _Hashtable_ebo_helper<0, _NodeAlloc>
1802 {
1803 private:
1804 using __ebo_node_alloc = _Hashtable_ebo_helper<0, _NodeAlloc>;
1805 public:
1806 using __node_type = typename _NodeAlloc::value_type;
1807 using __node_alloc_type = _NodeAlloc;
1808 // Use __gnu_cxx to benefit from _S_always_equal and al.
1809 using __node_alloc_traits = __gnu_cxx::__alloc_traits<__node_alloc_type>;
1810
1811 using __value_alloc_traits = typename __node_alloc_traits::template
1812 rebind_traits<typename __node_type::value_type>;
1813
1814 using __node_ptr = __node_type*;
1815 using __node_base = _Hash_node_base;
1816 using __node_base_ptr = __node_base*;
1817 using __buckets_alloc_type =
1818 __alloc_rebind<__node_alloc_type, __node_base_ptr>;
1819 using __buckets_alloc_traits = std::allocator_traits<__buckets_alloc_type>;
1820 using __buckets_ptr = __node_base_ptr*;
1821
1822 _Hashtable_alloc() = default;
1823 _Hashtable_alloc(const _Hashtable_alloc&) = default;
1824 _Hashtable_alloc(_Hashtable_alloc&&) = default;
1825
1826 template<typename _Alloc>
1827 _Hashtable_alloc(_Alloc&& __a)
1828 : __ebo_node_alloc(std::forward<_Alloc>(__a))
1829 { }
1830
1831 __node_alloc_type&
1832 _M_node_allocator()
1833 { return __ebo_node_alloc::_M_get(); }
1834
1835 const __node_alloc_type&
1836 _M_node_allocator() const
1837 { return __ebo_node_alloc::_M_cget(); }
1838
1839 // Allocate a node and construct an element within it.
1840 template<typename... _Args>
1841 __node_ptr
1842 _M_allocate_node(_Args&&... __args);
1843
1844 // Destroy the element within a node and deallocate the node.
1845 void
1846 _M_deallocate_node(__node_ptr __n);
1847
1848 // Deallocate a node.
1849 void
1850 _M_deallocate_node_ptr(__node_ptr __n);
1851
1852 // Deallocate the linked list of nodes pointed to by __n.
1853 // The elements within the nodes are destroyed.
1854 void
1855 _M_deallocate_nodes(__node_ptr __n);
1856
1857 __buckets_ptr
1858 _M_allocate_buckets(std::size_t __bkt_count);
1859
1860 void
1861 _M_deallocate_buckets(__buckets_ptr, std::size_t __bkt_count);
1862 };
1863
1864 // Definitions of class template _Hashtable_alloc's out-of-line member
1865 // functions.
1866 template<typename _NodeAlloc>
1867 template<typename... _Args>
1868 auto
1869 _Hashtable_alloc<_NodeAlloc>::_M_allocate_node(_Args&&... __args)
1870 -> __node_ptr
1871 {
1872 auto __nptr = __node_alloc_traits::allocate(_M_node_allocator(), 1);
1873 __node_ptr __n = std::__to_address(__nptr);
1874 __try
1875 {
1876 ::new ((void*)__n) __node_type;
1877 __node_alloc_traits::construct(_M_node_allocator(),
1878 __n->_M_valptr(),
1879 std::forward<_Args>(__args)...);
1880 return __n;
1881 }
1882 __catch(...)
1883 {
1884 __node_alloc_traits::deallocate(_M_node_allocator(), __nptr, 1);
1885 __throw_exception_again;
1886 }
1887 }
1888
1889 template<typename _NodeAlloc>
1890 void
1891 _Hashtable_alloc<_NodeAlloc>::_M_deallocate_node(__node_ptr __n)
1892 {
1893 __node_alloc_traits::destroy(_M_node_allocator(), __n->_M_valptr());
1894 _M_deallocate_node_ptr(__n);
1895 }
1896
1897 template<typename _NodeAlloc>
1898 void
1899 _Hashtable_alloc<_NodeAlloc>::_M_deallocate_node_ptr(__node_ptr __n)
1900 {
1901 typedef typename __node_alloc_traits::pointer _Ptr;
1902 auto __ptr = std::pointer_traits<_Ptr>::pointer_to(*__n);
1903 __n->~__node_type();
1904 __node_alloc_traits::deallocate(_M_node_allocator(), __ptr, 1);
1905 }
1906
1907 template<typename _NodeAlloc>
1908 void
1909 _Hashtable_alloc<_NodeAlloc>::_M_deallocate_nodes(__node_ptr __n)
1910 {
1911 while (__n)
1912 {
1913 __node_ptr __tmp = __n;
1914 __n = __n->_M_next();
1915 _M_deallocate_node(__tmp);
1916 }
1917 }
1918
1919 template<typename _NodeAlloc>
1920 auto
1921 _Hashtable_alloc<_NodeAlloc>::_M_allocate_buckets(std::size_t __bkt_count)
1922 -> __buckets_ptr
1923 {
1924 __buckets_alloc_type __alloc(_M_node_allocator());
1925
1926 auto __ptr = __buckets_alloc_traits::allocate(__alloc, __bkt_count);
1927 __buckets_ptr __p = std::__to_address(__ptr);
1928 __builtin_memset(__p, 0, __bkt_count * sizeof(__node_base_ptr));
1929 return __p;
1930 }
1931
1932 template<typename _NodeAlloc>
1933 void
1934 _Hashtable_alloc<_NodeAlloc>::
1935 _M_deallocate_buckets(__buckets_ptr __bkts,
1936 std::size_t __bkt_count)
1937 {
1938 typedef typename __buckets_alloc_traits::pointer _Ptr;
1939 auto __ptr = std::pointer_traits<_Ptr>::pointer_to(*__bkts);
1940 __buckets_alloc_type __alloc(_M_node_allocator());
1941 __buckets_alloc_traits::deallocate(__alloc, __ptr, __bkt_count);
1942 }
1943
1944 ///@} hashtable-detail
1945} // namespace __detail
1946_GLIBCXX_END_NAMESPACE_VERSION
1947} // namespace std
1948
1949#endif // _HASHTABLE_POLICY_H
1950