libstdc++
hashtable_policy.h
Go to the documentation of this file.
1// Internal policy header for unordered_set and unordered_map -*- C++ -*-
2
3// Copyright (C) 2010-2024 Free Software Foundation, Inc.
4//
5// This file is part of the GNU ISO C++ Library. This library is free
6// software; you can redistribute it and/or modify it under the
7// terms of the GNU General Public License as published by the
8// Free Software Foundation; either version 3, or (at your option)
9// any later version.
10
11// This library is distributed in the hope that it will be useful,
12// but WITHOUT ANY WARRANTY; without even the implied warranty of
13// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14// GNU General Public License for more details.
15
16// Under Section 7 of GPL version 3, you are granted additional
17// permissions described in the GCC Runtime Library Exception, version
18// 3.1, as published by the Free Software Foundation.
19
20// You should have received a copy of the GNU General Public License and
21// a copy of the GCC Runtime Library Exception along with this program;
22// see the files COPYING3 and COPYING.RUNTIME respectively. If not, see
23// <http://www.gnu.org/licenses/>.
24
25/** @file bits/hashtable_policy.h
26 * This is an internal header file, included by other library headers.
27 * Do not attempt to use it directly.
28 * @headername{unordered_map,unordered_set}
29 */
30
31#ifndef _HASHTABLE_POLICY_H
32#define _HASHTABLE_POLICY_H 1
33
34#include <tuple> // for std::tuple, std::forward_as_tuple
35#include <bits/functional_hash.h> // for __is_fast_hash
36#include <bits/stl_algobase.h> // for std::min
37#include <bits/stl_pair.h> // for std::pair
38#include <ext/aligned_buffer.h> // for __gnu_cxx::__aligned_buffer
39#include <ext/alloc_traits.h> // for std::__alloc_rebind
40#include <ext/numeric_traits.h> // for __gnu_cxx::__int_traits
41
42namespace std _GLIBCXX_VISIBILITY(default)
43{
44_GLIBCXX_BEGIN_NAMESPACE_VERSION
45/// @cond undocumented
46
47 template<typename _Key, typename _Value, typename _Alloc,
48 typename _ExtractKey, typename _Equal,
49 typename _Hash, typename _RangeHash, typename _Unused,
50 typename _RehashPolicy, typename _Traits>
51 class _Hashtable;
52
53namespace __detail
54{
55 /**
56 * @defgroup hashtable-detail Base and Implementation Classes
57 * @ingroup unordered_associative_containers
58 * @{
59 */
60 template<typename _Key, typename _Value, typename _ExtractKey,
61 typename _Equal, typename _Hash, typename _RangeHash,
62 typename _Unused, typename _Traits>
63 struct _Hashtable_base;
64
65#pragma GCC diagnostic push
66#pragma GCC diagnostic ignored "-Wc++17-extensions" // if constexpr
67 // Helper function: return distance(first, last) for forward
68 // iterators, or 0/1 for input iterators.
69 template<typename _Iterator>
71 __distance_fw(_Iterator __first, _Iterator __last)
72 {
74 if constexpr (is_convertible<_Cat, forward_iterator_tag>::value)
75 return std::distance(__first, __last);
76 else
77 return __first != __last ? 1 : 0;
78 }
79#pragma GCC diagnostic pop
80
81 struct _Identity
82 {
83 template<typename _Tp>
84 _Tp&&
85 operator()(_Tp&& __x) const noexcept
86 { return std::forward<_Tp>(__x); }
87 };
88
89 struct _Select1st
90 {
91 template<typename _Pair>
92 struct __1st_type;
93
94 template<typename _Tp, typename _Up>
95 struct __1st_type<pair<_Tp, _Up>>
96 { using type = _Tp; };
97
98 template<typename _Tp, typename _Up>
99 struct __1st_type<const pair<_Tp, _Up>>
100 { using type = const _Tp; };
101
102 template<typename _Pair>
103 struct __1st_type<_Pair&>
104 { using type = typename __1st_type<_Pair>::type&; };
105
106 template<typename _Tp>
107 typename __1st_type<_Tp>::type&&
108 operator()(_Tp&& __x) const noexcept
109 { return std::forward<_Tp>(__x).first; }
110 };
111
112 template<typename _ExKey>
113 struct _NodeBuilder;
114
115 template<>
116 struct _NodeBuilder<_Select1st>
117 {
118 template<typename _Kt, typename _Arg, typename _NodeGenerator>
119 static auto
120 _S_build(_Kt&& __k, _Arg&& __arg, _NodeGenerator& __node_gen)
121 -> typename _NodeGenerator::__node_ptr
122 {
123 return __node_gen(std::forward<_Kt>(__k),
124 std::forward<_Arg>(__arg).second);
125 }
126 };
127
128 template<>
129 struct _NodeBuilder<_Identity>
130 {
131 template<typename _Kt, typename _Arg, typename _NodeGenerator>
132 static auto
133 _S_build(_Kt&& __k, _Arg&&, _NodeGenerator& __node_gen)
134 -> typename _NodeGenerator::__node_ptr
135 { return __node_gen(std::forward<_Kt>(__k)); }
136 };
137
138 template<typename _HashtableAlloc, typename _NodePtr>
139 struct _NodePtrGuard
140 {
141 _HashtableAlloc& _M_h;
142 _NodePtr _M_ptr;
143
144 ~_NodePtrGuard()
145 {
146 if (_M_ptr)
147 _M_h._M_deallocate_node_ptr(_M_ptr);
148 }
149 };
150
151 template<typename _NodeAlloc>
152 struct _Hashtable_alloc;
153
154 // Functor recycling a pool of nodes and using allocation once the pool is
155 // empty.
156 template<typename _NodeAlloc>
157 struct _ReuseOrAllocNode
158 {
159 private:
160 using __node_alloc_type = _NodeAlloc;
161 using __hashtable_alloc = _Hashtable_alloc<__node_alloc_type>;
162 using __node_alloc_traits =
163 typename __hashtable_alloc::__node_alloc_traits;
164
165 public:
166 using __node_ptr = typename __hashtable_alloc::__node_ptr;
167
168 _ReuseOrAllocNode(__node_ptr __nodes, __hashtable_alloc& __h)
169 : _M_nodes(__nodes), _M_h(__h) { }
170 _ReuseOrAllocNode(const _ReuseOrAllocNode&) = delete;
171
172 ~_ReuseOrAllocNode()
173 { _M_h._M_deallocate_nodes(_M_nodes); }
174
175#pragma GCC diagnostic push
176#pragma GCC diagnostic ignored "-Wc++17-extensions" // if constexpr
177 template<typename _Arg>
178 __node_ptr
179 operator()(_Arg&& __arg)
180 {
181 if (!_M_nodes)
182 return _M_h._M_allocate_node(std::forward<_Arg>(__arg));
183
184 using value_type = typename _NodeAlloc::value_type::value_type;
185
186 __node_ptr __node = _M_nodes;
187 if constexpr (is_assignable<value_type&, _Arg>::value)
188 {
189 __node->_M_v() = std::forward<_Arg>(__arg);
190 _M_nodes = _M_nodes->_M_next();
191 __node->_M_nxt = nullptr;
192 }
193 else
194 {
195 _M_nodes = _M_nodes->_M_next();
196 __node->_M_nxt = nullptr;
197 auto& __a = _M_h._M_node_allocator();
198 __node_alloc_traits::destroy(__a, __node->_M_valptr());
199 _NodePtrGuard<__hashtable_alloc, __node_ptr>
200 __guard{ _M_h, __node };
201 __node_alloc_traits::construct(__a, __node->_M_valptr(),
202 std::forward<_Arg>(__arg));
203 __guard._M_ptr = nullptr;
204 }
205 return __node;
206 }
207#pragma GCC diagnostic pop
208
209 private:
210 __node_ptr _M_nodes;
211 __hashtable_alloc& _M_h;
212 };
213
214 // Functor similar to the previous one but without any pool of nodes to
215 // recycle.
216 template<typename _NodeAlloc>
217 struct _AllocNode
218 {
219 private:
220 using __hashtable_alloc = _Hashtable_alloc<_NodeAlloc>;
221
222 public:
223 using __node_ptr = typename __hashtable_alloc::__node_ptr;
224
225 _AllocNode(__hashtable_alloc& __h)
226 : _M_h(__h) { }
227
228 template<typename... _Args>
229 __node_ptr
230 operator()(_Args&&... __args) const
231 { return _M_h._M_allocate_node(std::forward<_Args>(__args)...); }
232
233 private:
234 __hashtable_alloc& _M_h;
235 };
236
237 // Auxiliary types used for all instantiations of _Hashtable nodes
238 // and iterators.
239
240 /**
241 * struct _Hashtable_traits
242 *
243 * Important traits for hash tables.
244 *
245 * @tparam _Cache_hash_code Boolean value. True if the value of
246 * the hash function is stored along with the value. This is a
247 * time-space tradeoff. Storing it may improve lookup speed by
248 * reducing the number of times we need to call the _Hash or _Equal
249 * functors.
250 *
251 * @tparam _Constant_iterators Boolean value. True if iterator and
252 * const_iterator are both constant iterator types. This is true
253 * for unordered_set and unordered_multiset, false for
254 * unordered_map and unordered_multimap.
255 *
256 * @tparam _Unique_keys Boolean value. True if the return value
257 * of _Hashtable::count(k) is always at most one, false if it may
258 * be an arbitrary number. This is true for unordered_set and
259 * unordered_map, false for unordered_multiset and
260 * unordered_multimap.
261 */
262 template<bool _Cache_hash_code, bool _Constant_iterators, bool _Unique_keys>
263 struct _Hashtable_traits
264 {
265 using __hash_cached = __bool_constant<_Cache_hash_code>;
266 using __constant_iterators = __bool_constant<_Constant_iterators>;
267 using __unique_keys = __bool_constant<_Unique_keys>;
268 };
269
270 /**
271 * struct _Hashtable_hash_traits
272 *
273 * Important traits for hash tables depending on associated hasher.
274 *
275 */
276 template<typename _Hash>
277 struct _Hashtable_hash_traits
278 {
279 static constexpr std::size_t
280 __small_size_threshold() noexcept
281 { return std::__is_fast_hash<_Hash>::value ? 0 : 20; }
282 };
283
284 /**
285 * struct _Hash_node_base
286 *
287 * Nodes, used to wrap elements stored in the hash table. A policy
288 * template parameter of class template _Hashtable controls whether
289 * nodes also store a hash code. In some cases (e.g. strings) this
290 * may be a performance win.
291 */
292 struct _Hash_node_base
293 {
294 _Hash_node_base* _M_nxt;
295
296 _Hash_node_base() noexcept : _M_nxt() { }
297
298 _Hash_node_base(_Hash_node_base* __next) noexcept : _M_nxt(__next) { }
299 };
300
301 /**
302 * struct _Hash_node_value_base
303 *
304 * Node type with the value to store.
305 */
306 template<typename _Value>
307 struct _Hash_node_value_base
308 {
309 typedef _Value value_type;
310
311 __gnu_cxx::__aligned_buffer<_Value> _M_storage;
312
313 [[__gnu__::__always_inline__]]
314 _Value*
315 _M_valptr() noexcept
316 { return _M_storage._M_ptr(); }
317
318 [[__gnu__::__always_inline__]]
319 const _Value*
320 _M_valptr() const noexcept
321 { return _M_storage._M_ptr(); }
322
323 [[__gnu__::__always_inline__]]
324 _Value&
325 _M_v() noexcept
326 { return *_M_valptr(); }
327
328 [[__gnu__::__always_inline__]]
329 const _Value&
330 _M_v() const noexcept
331 { return *_M_valptr(); }
332 };
333
334 /**
335 * Primary template struct _Hash_node_code_cache.
336 */
337 template<bool _Cache_hash_code>
338 struct _Hash_node_code_cache
339 { };
340
341 /**
342 * Specialization for node with cache, struct _Hash_node_code_cache.
343 */
344 template<>
345 struct _Hash_node_code_cache<true>
346 { std::size_t _M_hash_code; };
347
348 template<typename _Value, bool _Cache_hash_code>
349 struct _Hash_node_value
350 : _Hash_node_value_base<_Value>
351 , _Hash_node_code_cache<_Cache_hash_code>
352 { };
353
354 /**
355 * Primary template struct _Hash_node.
356 */
357 template<typename _Value, bool _Cache_hash_code>
358 struct _Hash_node
359 : _Hash_node_base
360 , _Hash_node_value<_Value, _Cache_hash_code>
361 {
362 _Hash_node*
363 _M_next() const noexcept
364 { return static_cast<_Hash_node*>(this->_M_nxt); }
365 };
366
367 /// Base class for node iterators.
368 template<typename _Value, bool _Cache_hash_code>
369 struct _Node_iterator_base
370 {
371 using __node_type = _Hash_node<_Value, _Cache_hash_code>;
372
373 __node_type* _M_cur;
374
375 _Node_iterator_base() : _M_cur(nullptr) { }
376 _Node_iterator_base(__node_type* __p) noexcept
377 : _M_cur(__p) { }
378
379 void
380 _M_incr() noexcept
381 { _M_cur = _M_cur->_M_next(); }
382
383 friend bool
384 operator==(const _Node_iterator_base& __x, const _Node_iterator_base& __y)
385 noexcept
386 { return __x._M_cur == __y._M_cur; }
387
388#if __cpp_impl_three_way_comparison < 201907L
389 friend bool
390 operator!=(const _Node_iterator_base& __x, const _Node_iterator_base& __y)
391 noexcept
392 { return __x._M_cur != __y._M_cur; }
393#endif
394 };
395
396 /// Node iterators, used to iterate through all the hashtable.
397 template<typename _Value, bool __constant_iterators, bool __cache>
398 struct _Node_iterator
399 : public _Node_iterator_base<_Value, __cache>
400 {
401 private:
402 using __base_type = _Node_iterator_base<_Value, __cache>;
403 using __node_type = typename __base_type::__node_type;
404
405 public:
406 using value_type = _Value;
407 using difference_type = std::ptrdiff_t;
408 using iterator_category = std::forward_iterator_tag;
409
410 using pointer = __conditional_t<__constant_iterators,
411 const value_type*, value_type*>;
412
413 using reference = __conditional_t<__constant_iterators,
414 const value_type&, value_type&>;
415
416 _Node_iterator() = default;
417
418 explicit
419 _Node_iterator(__node_type* __p) noexcept
420 : __base_type(__p) { }
421
422 reference
423 operator*() const noexcept
424 { return this->_M_cur->_M_v(); }
425
426 pointer
427 operator->() const noexcept
428 { return this->_M_cur->_M_valptr(); }
429
430 _Node_iterator&
431 operator++() noexcept
432 {
433 this->_M_incr();
434 return *this;
435 }
436
437 _Node_iterator
438 operator++(int) noexcept
439 {
440 _Node_iterator __tmp(*this);
441 this->_M_incr();
442 return __tmp;
443 }
444
445#if __cpp_impl_three_way_comparison >= 201907L
446 friend bool
447 operator==(const _Node_iterator&, const _Node_iterator&) = default;
448#else
449 friend bool
450 operator==(const _Node_iterator& __x, const _Node_iterator& __y) noexcept
451 {
452 const __base_type& __bx = __x;
453 const __base_type& __by = __y;
454 return __bx == __by;
455 }
456
457 friend bool
458 operator!=(const _Node_iterator& __x, const _Node_iterator& __y) noexcept
459 { return !(__x == __y); }
460#endif
461 };
462
463 /// Node const_iterators, used to iterate through all the hashtable.
464 template<typename _Value, bool __constant_iterators, bool __cache>
465 struct _Node_const_iterator
466 : public _Node_iterator_base<_Value, __cache>
467 {
468 private:
469 using __base_type = _Node_iterator_base<_Value, __cache>;
470 using __node_type = typename __base_type::__node_type;
471
472 // The corresponding non-const iterator.
473 using __iterator
474 = _Node_iterator<_Value, __constant_iterators, __cache>;
475
476 public:
477 typedef _Value value_type;
478 typedef std::ptrdiff_t difference_type;
479 typedef std::forward_iterator_tag iterator_category;
480
481 typedef const value_type* pointer;
482 typedef const value_type& reference;
483
484 _Node_const_iterator() = default;
485
486 explicit
487 _Node_const_iterator(__node_type* __p) noexcept
488 : __base_type(__p) { }
489
490 _Node_const_iterator(const __iterator& __x) noexcept
491 : __base_type(__x._M_cur) { }
492
493 reference
494 operator*() const noexcept
495 { return this->_M_cur->_M_v(); }
496
497 pointer
498 operator->() const noexcept
499 { return this->_M_cur->_M_valptr(); }
500
501 _Node_const_iterator&
502 operator++() noexcept
503 {
504 this->_M_incr();
505 return *this;
506 }
507
508 _Node_const_iterator
509 operator++(int) noexcept
510 {
511 _Node_const_iterator __tmp(*this);
512 this->_M_incr();
513 return __tmp;
514 }
515
516#if __cpp_impl_three_way_comparison >= 201907L
517 friend bool
518 operator==(const _Node_const_iterator&,
519 const _Node_const_iterator&) = default;
520
521 friend bool
522 operator==(const _Node_const_iterator& __x, const __iterator& __y)
523 {
524 const __base_type& __bx = __x;
525 const __base_type& __by = __y;
526 return __bx == __by;
527 }
528#else
529 friend bool
530 operator==(const _Node_const_iterator& __x,
531 const _Node_const_iterator& __y) noexcept
532 {
533 const __base_type& __bx = __x;
534 const __base_type& __by = __y;
535 return __bx == __by;
536 }
537
538 friend bool
539 operator!=(const _Node_const_iterator& __x,
540 const _Node_const_iterator& __y) noexcept
541 { return !(__x == __y); }
542
543 friend bool
544 operator==(const _Node_const_iterator& __x,
545 const __iterator& __y) noexcept
546 {
547 const __base_type& __bx = __x;
548 const __base_type& __by = __y;
549 return __bx == __by;
550 }
551
552 friend bool
553 operator!=(const _Node_const_iterator& __x,
554 const __iterator& __y) noexcept
555 { return !(__x == __y); }
556
557 friend bool
558 operator==(const __iterator& __x,
559 const _Node_const_iterator& __y) noexcept
560 {
561 const __base_type& __bx = __x;
562 const __base_type& __by = __y;
563 return __bx == __by;
564 }
565
566 friend bool
567 operator!=(const __iterator& __x,
568 const _Node_const_iterator& __y) noexcept
569 { return !(__x == __y); }
570#endif
571 };
572
573 // Many of class template _Hashtable's template parameters are policy
574 // classes. These are defaults for the policies.
575
576 /// Default range hashing function: use division to fold a large number
577 /// into the range [0, N).
578 struct _Mod_range_hashing
579 {
580 typedef std::size_t first_argument_type;
581 typedef std::size_t second_argument_type;
582 typedef std::size_t result_type;
583
584 result_type
585 operator()(first_argument_type __num,
586 second_argument_type __den) const noexcept
587 { return __num % __den; }
588 };
589
590 /// Default ranged hash function H. In principle it should be a
591 /// function object composed from objects of type H1 and H2 such that
592 /// h(k, N) = h2(h1(k), N), but that would mean making extra copies of
593 /// h1 and h2. So instead we'll just use a tag to tell class template
594 /// hashtable to do that composition.
595 struct _Default_ranged_hash { };
596
597 /// Default value for rehash policy. Bucket size is (usually) the
598 /// smallest prime that keeps the load factor small enough.
599 struct _Prime_rehash_policy
600 {
601 using __has_load_factor = true_type;
602
603 _Prime_rehash_policy(float __z = 1.0) noexcept
604 : _M_max_load_factor(__z), _M_next_resize(0) { }
605
606 float
607 max_load_factor() const noexcept
608 { return _M_max_load_factor; }
609
610 // Return a bucket size no smaller than n.
611 // TODO: 'const' qualifier is kept for abi compatibility reason.
612 std::size_t
613 _M_next_bkt(std::size_t __n) const;
614
615 // Return a bucket count appropriate for n elements
616 std::size_t
617 _M_bkt_for_elements(std::size_t __n) const
618 { return __builtin_ceil(__n / (double)_M_max_load_factor); }
619
620 // __n_bkt is current bucket count, __n_elt is current element count,
621 // and __n_ins is number of elements to be inserted. Do we need to
622 // increase bucket count? If so, return make_pair(true, n), where n
623 // is the new bucket count. If not, return make_pair(false, 0).
624 // TODO: 'const' qualifier is kept for abi compatibility reason.
626 _M_need_rehash(std::size_t __n_bkt, std::size_t __n_elt,
627 std::size_t __n_ins) const;
628
629 typedef std::size_t _State;
630
631 _State
632 _M_state() const
633 { return _M_next_resize; }
634
635 void
636 _M_reset() noexcept
637 { _M_next_resize = 0; }
638
639 void
640 _M_reset(_State __state)
641 { _M_next_resize = __state; }
642
643 static const std::size_t _S_growth_factor = 2;
644
645 float _M_max_load_factor;
646
647 // TODO: 'mutable' kept for abi compatibility reason.
648 mutable std::size_t _M_next_resize;
649 };
650
651 /// Range hashing function assuming that second arg is a power of 2.
652 struct _Mask_range_hashing
653 {
654 typedef std::size_t first_argument_type;
655 typedef std::size_t second_argument_type;
656 typedef std::size_t result_type;
657
658 result_type
659 operator()(first_argument_type __num,
660 second_argument_type __den) const noexcept
661 { return __num & (__den - 1); }
662 };
663
664 /// Compute closest power of 2 not less than __n
665 inline std::size_t
666 __clp2(std::size_t __n) noexcept
667 {
669 // Equivalent to return __n ? std::bit_ceil(__n) : 0;
670 if (__n < 2)
671 return __n;
672 const unsigned __lz = sizeof(size_t) > sizeof(long)
673 ? __builtin_clzll(__n - 1ull)
674 : __builtin_clzl(__n - 1ul);
675 // Doing two shifts avoids undefined behaviour when __lz == 0.
676 return (size_t(1) << (__int_traits<size_t>::__digits - __lz - 1)) << 1;
677 }
678
679 /// Rehash policy providing power of 2 bucket numbers. Avoids modulo
680 /// operations.
681 struct _Power2_rehash_policy
682 {
683 using __has_load_factor = true_type;
684
685 _Power2_rehash_policy(float __z = 1.0) noexcept
686 : _M_max_load_factor(__z), _M_next_resize(0) { }
687
688 float
689 max_load_factor() const noexcept
690 { return _M_max_load_factor; }
691
692 // Return a bucket size no smaller than n (as long as n is not above the
693 // highest power of 2).
694 std::size_t
695 _M_next_bkt(std::size_t __n) noexcept
696 {
697 if (__n == 0)
698 // Special case on container 1st initialization with 0 bucket count
699 // hint. We keep _M_next_resize to 0 to make sure that next time we
700 // want to add an element allocation will take place.
701 return 1;
702
703 const auto __max_width = std::min<size_t>(sizeof(size_t), 8);
704 const auto __max_bkt = size_t(1) << (__max_width * __CHAR_BIT__ - 1);
705 std::size_t __res = __clp2(__n);
706
707 if (__res == 0)
708 __res = __max_bkt;
709 else if (__res == 1)
710 // If __res is 1 we force it to 2 to make sure there will be an
711 // allocation so that nothing need to be stored in the initial
712 // single bucket
713 __res = 2;
714
715 if (__res == __max_bkt)
716 // Set next resize to the max value so that we never try to rehash again
717 // as we already reach the biggest possible bucket number.
718 // Note that it might result in max_load_factor not being respected.
719 _M_next_resize = size_t(-1);
720 else
721 _M_next_resize
722 = __builtin_floor(__res * (double)_M_max_load_factor);
723
724 return __res;
725 }
726
727 // Return a bucket count appropriate for n elements
728 std::size_t
729 _M_bkt_for_elements(std::size_t __n) const noexcept
730 { return __builtin_ceil(__n / (double)_M_max_load_factor); }
731
732 // __n_bkt is current bucket count, __n_elt is current element count,
733 // and __n_ins is number of elements to be inserted. Do we need to
734 // increase bucket count? If so, return make_pair(true, n), where n
735 // is the new bucket count. If not, return make_pair(false, 0).
737 _M_need_rehash(std::size_t __n_bkt, std::size_t __n_elt,
738 std::size_t __n_ins) noexcept
739 {
740 if (__n_elt + __n_ins > _M_next_resize)
741 {
742 // If _M_next_resize is 0 it means that we have nothing allocated so
743 // far and that we start inserting elements. In this case we start
744 // with an initial bucket size of 11.
745 double __min_bkts
746 = std::max<std::size_t>(__n_elt + __n_ins, _M_next_resize ? 0 : 11)
747 / (double)_M_max_load_factor;
748 if (__min_bkts >= __n_bkt)
749 return { true,
750 _M_next_bkt(std::max<std::size_t>(__builtin_floor(__min_bkts) + 1,
751 __n_bkt * _S_growth_factor)) };
752
753 _M_next_resize
754 = __builtin_floor(__n_bkt * (double)_M_max_load_factor);
755 return { false, 0 };
756 }
757 else
758 return { false, 0 };
759 }
760
761 typedef std::size_t _State;
762
763 _State
764 _M_state() const noexcept
765 { return _M_next_resize; }
766
767 void
768 _M_reset() noexcept
769 { _M_next_resize = 0; }
770
771 void
772 _M_reset(_State __state) noexcept
773 { _M_next_resize = __state; }
774
775 static const std::size_t _S_growth_factor = 2;
776
777 float _M_max_load_factor;
778 std::size_t _M_next_resize;
779 };
780
781 template<typename _RehashPolicy>
782 struct _RehashStateGuard
783 {
784 _RehashPolicy* _M_guarded_obj;
785 typename _RehashPolicy::_State _M_prev_state;
786
787 _RehashStateGuard(_RehashPolicy& __policy)
788 : _M_guarded_obj(std::__addressof(__policy))
789 , _M_prev_state(__policy._M_state())
790 { }
791 _RehashStateGuard(const _RehashStateGuard&) = delete;
792
793 ~_RehashStateGuard()
794 {
795 if (_M_guarded_obj)
796 _M_guarded_obj->_M_reset(_M_prev_state);
797 }
798 };
799
800 // Base classes for std::_Hashtable. We define these base classes
801 // because in some cases we want to do different things depending on
802 // the value of a policy class. In some cases the policy class
803 // affects which member functions and nested typedefs are defined;
804 // we handle that by specializing base class templates. Several of
805 // the base class templates need to access other members of class
806 // template _Hashtable, so we use a variant of the "Curiously
807 // Recurring Template Pattern" (CRTP) technique.
808
809 /**
810 * Primary class template _Map_base.
811 *
812 * If the hashtable has a value type of the form pair<const T1, T2> and
813 * a key extraction policy (_ExtractKey) that returns the first part
814 * of the pair, the hashtable gets a mapped_type typedef. If it
815 * satisfies those criteria and also has unique keys, then it also
816 * gets an operator[].
817 */
818 template<typename _Key, typename _Value, typename _Alloc,
819 typename _ExtractKey, typename _Equal,
820 typename _Hash, typename _RangeHash, typename _Unused,
821 typename _RehashPolicy, typename _Traits,
822 bool _Unique_keys = _Traits::__unique_keys::value>
823 struct _Map_base { };
824
825 /// Partial specialization, __unique_keys set to false, std::pair value type.
826 template<typename _Key, typename _Val, typename _Alloc, typename _Equal,
827 typename _Hash, typename _RangeHash, typename _Unused,
828 typename _RehashPolicy, typename _Traits>
829 struct _Map_base<_Key, pair<const _Key, _Val>, _Alloc, _Select1st, _Equal,
830 _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits, false>
831 {
832 using mapped_type = _Val;
833 };
834
835 /// Partial specialization, __unique_keys set to true.
836 template<typename _Key, typename _Val, typename _Alloc, typename _Equal,
837 typename _Hash, typename _RangeHash, typename _Unused,
838 typename _RehashPolicy, typename _Traits>
839 struct _Map_base<_Key, pair<const _Key, _Val>, _Alloc, _Select1st, _Equal,
840 _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits, true>
841 {
842 private:
843 using __hashtable_base = _Hashtable_base<_Key, pair<const _Key, _Val>,
844 _Select1st, _Equal, _Hash,
845 _RangeHash, _Unused,
846 _Traits>;
847
848 using __hashtable = _Hashtable<_Key, pair<const _Key, _Val>, _Alloc,
849 _Select1st, _Equal, _Hash, _RangeHash,
850 _Unused, _RehashPolicy, _Traits>;
851
852 using __hash_code = typename __hashtable_base::__hash_code;
853
854 public:
855 using key_type = typename __hashtable_base::key_type;
856 using mapped_type = _Val;
857
858 mapped_type&
859 operator[](const key_type& __k);
860
861 mapped_type&
862 operator[](key_type&& __k);
863
864 // _GLIBCXX_RESOLVE_LIB_DEFECTS
865 // DR 761. unordered_map needs an at() member function.
866 mapped_type&
867 at(const key_type& __k)
868 {
869 auto __ite = static_cast<__hashtable*>(this)->find(__k);
870 if (!__ite._M_cur)
871 __throw_out_of_range(__N("unordered_map::at"));
872 return __ite->second;
873 }
874
875 const mapped_type&
876 at(const key_type& __k) const
877 {
878 auto __ite = static_cast<const __hashtable*>(this)->find(__k);
879 if (!__ite._M_cur)
880 __throw_out_of_range(__N("unordered_map::at"));
881 return __ite->second;
882 }
883 };
884
885 template<typename _Key, typename _Val, typename _Alloc, typename _Equal,
886 typename _Hash, typename _RangeHash, typename _Unused,
887 typename _RehashPolicy, typename _Traits>
888 auto
889 _Map_base<_Key, pair<const _Key, _Val>, _Alloc, _Select1st, _Equal,
890 _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits, true>::
891 operator[](const key_type& __k)
892 -> mapped_type&
893 {
894 __hashtable* __h = static_cast<__hashtable*>(this);
895 __hash_code __code = __h->_M_hash_code(__k);
896 std::size_t __bkt = __h->_M_bucket_index(__code);
897 if (auto __node = __h->_M_find_node(__bkt, __k, __code))
898 return __node->_M_v().second;
899
900 typename __hashtable::_Scoped_node __node {
901 __h,
905 };
906 auto __pos
907 = __h->_M_insert_unique_node(__bkt, __code, __node._M_node);
908 __node._M_node = nullptr;
909 return __pos->second;
910 }
911
912 template<typename _Key, typename _Val, typename _Alloc, typename _Equal,
913 typename _Hash, typename _RangeHash, typename _Unused,
914 typename _RehashPolicy, typename _Traits>
915 auto
916 _Map_base<_Key, pair<const _Key, _Val>, _Alloc, _Select1st, _Equal,
917 _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits, true>::
918 operator[](key_type&& __k)
919 -> mapped_type&
920 {
921 __hashtable* __h = static_cast<__hashtable*>(this);
922 __hash_code __code = __h->_M_hash_code(__k);
923 std::size_t __bkt = __h->_M_bucket_index(__code);
924 if (auto __node = __h->_M_find_node(__bkt, __k, __code))
925 return __node->_M_v().second;
926
927 typename __hashtable::_Scoped_node __node {
928 __h,
932 };
933 auto __pos
934 = __h->_M_insert_unique_node(__bkt, __code, __node._M_node);
935 __node._M_node = nullptr;
936 return __pos->second;
937 }
938
939 // Partial specialization for unordered_map<const T, U>, see PR 104174.
940 template<typename _Key, typename _Val, typename _Alloc, typename _Equal,
941 typename _Hash, typename _RangeHash, typename _Unused,
942 typename _RehashPolicy, typename _Traits, bool __uniq>
943 struct _Map_base<const _Key, pair<const _Key, _Val>,
944 _Alloc, _Select1st, _Equal, _Hash,
945 _RangeHash, _Unused, _RehashPolicy, _Traits, __uniq>
946 : _Map_base<_Key, pair<const _Key, _Val>, _Alloc, _Select1st, _Equal, _Hash,
947 _RangeHash, _Unused, _RehashPolicy, _Traits, __uniq>
948 { };
949
950 template<typename _Policy>
951 using __has_load_factor = typename _Policy::__has_load_factor;
952
953 /**
954 * Primary class template _Rehash_base.
955 *
956 * Give hashtable the max_load_factor functions and reserve iff the
957 * rehash policy supports it.
958 */
959 template<typename _Key, typename _Value, typename _Alloc,
960 typename _ExtractKey, typename _Equal,
961 typename _Hash, typename _RangeHash, typename _Unused,
962 typename _RehashPolicy, typename _Traits,
963 typename =
964 __detected_or_t<false_type, __has_load_factor, _RehashPolicy>>
965 struct _Rehash_base;
966
967 /// Specialization when rehash policy doesn't provide load factor management.
968 template<typename _Key, typename _Value, typename _Alloc,
969 typename _ExtractKey, typename _Equal,
970 typename _Hash, typename _RangeHash, typename _Unused,
971 typename _RehashPolicy, typename _Traits>
972 struct _Rehash_base<_Key, _Value, _Alloc, _ExtractKey, _Equal,
973 _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits,
974 false_type /* Has load factor */>
975 {
976 };
977
978 /// Specialization when rehash policy provide load factor management.
979 template<typename _Key, typename _Value, typename _Alloc,
980 typename _ExtractKey, typename _Equal,
981 typename _Hash, typename _RangeHash, typename _Unused,
982 typename _RehashPolicy, typename _Traits>
983 struct _Rehash_base<_Key, _Value, _Alloc, _ExtractKey, _Equal,
984 _Hash, _RangeHash, _Unused, _RehashPolicy, _Traits,
985 true_type /* Has load factor */>
986 {
987 private:
988 using __hashtable = _Hashtable<_Key, _Value, _Alloc, _ExtractKey,
989 _Equal, _Hash, _RangeHash, _Unused,
990 _RehashPolicy, _Traits>;
991
992 public:
993 float
994 max_load_factor() const noexcept
995 {
996 const __hashtable* __this = static_cast<const __hashtable*>(this);
997 return __this->__rehash_policy().max_load_factor();
998 }
999
1000 void
1001 max_load_factor(float __z)
1002 {
1003 __hashtable* __this = static_cast<__hashtable*>(this);
1004 __this->__rehash_policy(_RehashPolicy(__z));
1005 }
1006
1007 void
1008 reserve(std::size_t __n)
1009 {
1010 __hashtable* __this = static_cast<__hashtable*>(this);
1011 __this->rehash(__this->__rehash_policy()._M_bkt_for_elements(__n));
1012 }
1013 };
1014
1015 /**
1016 * Primary class template _Hashtable_ebo_helper.
1017 *
1018 * Helper class using EBO when it is not forbidden (the type is not
1019 * final) and when it is worth it (the type is empty.)
1020 */
1021 template<int _Nm, typename _Tp,
1022 bool __use_ebo = !__is_final(_Tp) && __is_empty(_Tp)>
1023 struct _Hashtable_ebo_helper;
1024
1025 /// Specialization using EBO.
1026 template<int _Nm, typename _Tp>
1027 struct _Hashtable_ebo_helper<_Nm, _Tp, true>
1028 : private _Tp
1029 {
1030 _Hashtable_ebo_helper() noexcept(noexcept(_Tp())) : _Tp() { }
1031
1032 template<typename _OtherTp>
1033 _Hashtable_ebo_helper(_OtherTp&& __tp)
1034 : _Tp(std::forward<_OtherTp>(__tp))
1035 { }
1036
1037 const _Tp& _M_cget() const { return static_cast<const _Tp&>(*this); }
1038 _Tp& _M_get() { return static_cast<_Tp&>(*this); }
1039 };
1040
1041 /// Specialization not using EBO.
1042 template<int _Nm, typename _Tp>
1043 struct _Hashtable_ebo_helper<_Nm, _Tp, false>
1044 {
1045 _Hashtable_ebo_helper() = default;
1046
1047 template<typename _OtherTp>
1048 _Hashtable_ebo_helper(_OtherTp&& __tp)
1049 : _M_tp(std::forward<_OtherTp>(__tp))
1050 { }
1051
1052 const _Tp& _M_cget() const { return _M_tp; }
1053 _Tp& _M_get() { return _M_tp; }
1054
1055 private:
1056 _Tp _M_tp{};
1057 };
1058
1059 /**
1060 * Primary class template _Local_iterator_base.
1061 *
1062 * Base class for local iterators, used to iterate within a bucket
1063 * but not between buckets.
1064 */
1065 template<typename _Key, typename _Value, typename _ExtractKey,
1066 typename _Hash, typename _RangeHash, typename _Unused,
1067 bool __cache_hash_code>
1068 struct _Local_iterator_base;
1069
1070 /**
1071 * Primary class template _Hash_code_base.
1072 *
1073 * Encapsulates two policy issues that aren't quite orthogonal.
1074 * (1) the difference between using a ranged hash function and using
1075 * the combination of a hash function and a range-hashing function.
1076 * In the former case we don't have such things as hash codes, so
1077 * we have a dummy type as placeholder.
1078 * (2) Whether or not we cache hash codes. Caching hash codes is
1079 * meaningless if we have a ranged hash function.
1080 *
1081 * We also put the key extraction objects here, for convenience.
1082 * Each specialization derives from one or more of the template
1083 * parameters to benefit from Ebo. This is important as this type
1084 * is inherited in some cases by the _Local_iterator_base type used
1085 * to implement local_iterator and const_local_iterator. As with
1086 * any iterator type we prefer to make it as small as possible.
1087 */
1088 template<typename _Key, typename _Value, typename _ExtractKey,
1089 typename _Hash, typename _RangeHash, typename _Unused,
1090 bool __cache_hash_code>
1091 struct _Hash_code_base
1092 : private _Hashtable_ebo_helper<1, _Hash>
1093 {
1094 private:
1095 using __ebo_hash = _Hashtable_ebo_helper<1, _Hash>;
1096
1097 // Gives the local iterator implementation access to _M_bucket_index().
1098 friend struct _Local_iterator_base<_Key, _Value, _ExtractKey,
1099 _Hash, _RangeHash, _Unused, false>;
1100
1101 public:
1102 typedef _Hash hasher;
1103
1104 hasher
1105 hash_function() const
1106 { return _M_hash(); }
1107
1108 protected:
1109 typedef std::size_t __hash_code;
1110
1111 // We need the default constructor for the local iterators and _Hashtable
1112 // default constructor.
1113 _Hash_code_base() = default;
1114
1115 _Hash_code_base(const _Hash& __hash) : __ebo_hash(__hash) { }
1116
1117 __hash_code
1118 _M_hash_code(const _Key& __k) const
1119 {
1120 static_assert(__is_invocable<const _Hash&, const _Key&>{},
1121 "hash function must be invocable with an argument of key type");
1122 return _M_hash()(__k);
1123 }
1124
1125 template<typename _Kt>
1126 __hash_code
1127 _M_hash_code_tr(const _Kt& __k) const
1128 {
1129 static_assert(__is_invocable<const _Hash&, const _Kt&>{},
1130 "hash function must be invocable with an argument of key type");
1131 return _M_hash()(__k);
1132 }
1133
1134 __hash_code
1135 _M_hash_code(const _Hash_node_value<_Value, false>& __n) const
1136 { return _M_hash_code(_ExtractKey{}(__n._M_v())); }
1137
1138 __hash_code
1139 _M_hash_code(const _Hash_node_value<_Value, true>& __n) const
1140 { return __n._M_hash_code; }
1141
1142 std::size_t
1143 _M_bucket_index(__hash_code __c, std::size_t __bkt_count) const
1144 { return _RangeHash{}(__c, __bkt_count); }
1145
1146 std::size_t
1147 _M_bucket_index(const _Hash_node_value<_Value, false>& __n,
1148 std::size_t __bkt_count) const
1149 noexcept( noexcept(declval<const _Hash&>()(declval<const _Key&>())) )
1150 {
1151 return _RangeHash{}(_M_hash_code(_ExtractKey{}(__n._M_v())),
1152 __bkt_count);
1153 }
1154
1155 std::size_t
1156 _M_bucket_index(const _Hash_node_value<_Value, true>& __n,
1157 std::size_t __bkt_count) const noexcept
1158 { return _RangeHash{}(__n._M_hash_code, __bkt_count); }
1159
1160 void
1161 _M_store_code(_Hash_node_code_cache<false>&, __hash_code) const
1162 { }
1163
1164 void
1165 _M_copy_code(_Hash_node_code_cache<false>&,
1166 const _Hash_node_code_cache<false>&) const
1167 { }
1168
1169 void
1170 _M_store_code(_Hash_node_code_cache<true>& __n, __hash_code __c) const
1171 { __n._M_hash_code = __c; }
1172
1173 void
1174 _M_copy_code(_Hash_node_code_cache<true>& __to,
1175 const _Hash_node_code_cache<true>& __from) const
1176 { __to._M_hash_code = __from._M_hash_code; }
1177
1178 void
1179 _M_swap(_Hash_code_base& __x)
1180 { std::swap(__ebo_hash::_M_get(), __x.__ebo_hash::_M_get()); }
1181
1182 const _Hash&
1183 _M_hash() const { return __ebo_hash::_M_cget(); }
1184 };
1185
1186 /// Partial specialization used when nodes contain a cached hash code.
1187 template<typename _Key, typename _Value, typename _ExtractKey,
1188 typename _Hash, typename _RangeHash, typename _Unused>
1189 struct _Local_iterator_base<_Key, _Value, _ExtractKey,
1190 _Hash, _RangeHash, _Unused, true>
1191 : public _Node_iterator_base<_Value, true>
1192 {
1193 protected:
1194 using __base_node_iter = _Node_iterator_base<_Value, true>;
1195 using __hash_code_base = _Hash_code_base<_Key, _Value, _ExtractKey,
1196 _Hash, _RangeHash, _Unused, true>;
1197
1198 _Local_iterator_base() = default;
1199 _Local_iterator_base(const __hash_code_base&,
1200 _Hash_node<_Value, true>* __p,
1201 std::size_t __bkt, std::size_t __bkt_count)
1202 : __base_node_iter(__p), _M_bucket(__bkt), _M_bucket_count(__bkt_count)
1203 { }
1204
1205 void
1206 _M_incr()
1207 {
1208 __base_node_iter::_M_incr();
1209 if (this->_M_cur)
1210 {
1211 std::size_t __bkt
1212 = _RangeHash{}(this->_M_cur->_M_hash_code, _M_bucket_count);
1213 if (__bkt != _M_bucket)
1214 this->_M_cur = nullptr;
1215 }
1216 }
1217
1218 std::size_t _M_bucket;
1219 std::size_t _M_bucket_count;
1220
1221 public:
1222 std::size_t
1223 _M_get_bucket() const { return _M_bucket; } // for debug mode
1224 };
1225
1226 // Uninitialized storage for a _Hash_code_base.
1227 // This type is DefaultConstructible and Assignable even if the
1228 // _Hash_code_base type isn't, so that _Local_iterator_base<..., false>
1229 // can be DefaultConstructible and Assignable.
1230 template<typename _Tp, bool _IsEmpty = std::is_empty<_Tp>::value>
1231 struct _Hash_code_storage
1232 {
1233 __gnu_cxx::__aligned_buffer<_Tp> _M_storage;
1234
1235 _Tp*
1236 _M_h() { return _M_storage._M_ptr(); }
1237
1238 const _Tp*
1239 _M_h() const { return _M_storage._M_ptr(); }
1240 };
1241
1242 // Empty partial specialization for empty _Hash_code_base types.
1243 template<typename _Tp>
1244 struct _Hash_code_storage<_Tp, true>
1245 {
1246 static_assert( std::is_empty<_Tp>::value, "Type must be empty" );
1247
1248 // As _Tp is an empty type there will be no bytes written/read through
1249 // the cast pointer, so no strict-aliasing violation.
1250 _Tp*
1251 _M_h() { return reinterpret_cast<_Tp*>(this); }
1252
1253 const _Tp*
1254 _M_h() const { return reinterpret_cast<const _Tp*>(this); }
1255 };
1256
1257 template<typename _Key, typename _Value, typename _ExtractKey,
1258 typename _Hash, typename _RangeHash, typename _Unused>
1259 using __hash_code_for_local_iter
1260 = _Hash_code_storage<_Hash_code_base<_Key, _Value, _ExtractKey,
1261 _Hash, _RangeHash, _Unused, false>>;
1262
1263 // Partial specialization used when hash codes are not cached
1264 template<typename _Key, typename _Value, typename _ExtractKey,
1265 typename _Hash, typename _RangeHash, typename _Unused>
1266 struct _Local_iterator_base<_Key, _Value, _ExtractKey,
1267 _Hash, _RangeHash, _Unused, false>
1268 : __hash_code_for_local_iter<_Key, _Value, _ExtractKey, _Hash, _RangeHash,
1269 _Unused>
1270 , _Node_iterator_base<_Value, false>
1271 {
1272 protected:
1273 using __hash_code_base = _Hash_code_base<_Key, _Value, _ExtractKey,
1274 _Hash, _RangeHash, _Unused, false>;
1275 using __node_iter_base = _Node_iterator_base<_Value, false>;
1276
1277 _Local_iterator_base() : _M_bucket_count(-1) { }
1278
1279 _Local_iterator_base(const __hash_code_base& __base,
1280 _Hash_node<_Value, false>* __p,
1281 std::size_t __bkt, std::size_t __bkt_count)
1282 : __node_iter_base(__p), _M_bucket(__bkt), _M_bucket_count(__bkt_count)
1283 { _M_init(__base); }
1284
1285 ~_Local_iterator_base()
1286 {
1287 if (_M_bucket_count != size_t(-1))
1288 _M_destroy();
1289 }
1290
1291 _Local_iterator_base(const _Local_iterator_base& __iter)
1292 : __node_iter_base(__iter._M_cur), _M_bucket(__iter._M_bucket)
1293 , _M_bucket_count(__iter._M_bucket_count)
1294 {
1295 if (_M_bucket_count != size_t(-1))
1296 _M_init(*__iter._M_h());
1297 }
1298
1299 _Local_iterator_base&
1300 operator=(const _Local_iterator_base& __iter)
1301 {
1302 if (_M_bucket_count != -1)
1303 _M_destroy();
1304 this->_M_cur = __iter._M_cur;
1305 _M_bucket = __iter._M_bucket;
1306 _M_bucket_count = __iter._M_bucket_count;
1307 if (_M_bucket_count != -1)
1308 _M_init(*__iter._M_h());
1309 return *this;
1310 }
1311
1312 void
1313 _M_incr()
1314 {
1315 __node_iter_base::_M_incr();
1316 if (this->_M_cur)
1317 {
1318 std::size_t __bkt = this->_M_h()->_M_bucket_index(*this->_M_cur,
1319 _M_bucket_count);
1320 if (__bkt != _M_bucket)
1321 this->_M_cur = nullptr;
1322 }
1323 }
1324
1325 std::size_t _M_bucket;
1326 std::size_t _M_bucket_count;
1327
1328 void
1329 _M_init(const __hash_code_base& __base)
1330 { ::new(this->_M_h()) __hash_code_base(__base); }
1331
1332 void
1333 _M_destroy() { this->_M_h()->~__hash_code_base(); }
1334
1335 public:
1336 std::size_t
1337 _M_get_bucket() const { return _M_bucket; } // for debug mode
1338 };
1339
1340 /// local iterators
1341 template<typename _Key, typename _Value, typename _ExtractKey,
1342 typename _Hash, typename _RangeHash, typename _Unused,
1343 bool __constant_iterators, bool __cache>
1344 struct _Local_iterator
1345 : public _Local_iterator_base<_Key, _Value, _ExtractKey,
1346 _Hash, _RangeHash, _Unused, __cache>
1347 {
1348 private:
1349 using __base_type = _Local_iterator_base<_Key, _Value, _ExtractKey,
1350 _Hash, _RangeHash, _Unused, __cache>;
1351 using __hash_code_base = typename __base_type::__hash_code_base;
1352
1353 public:
1354 using value_type = _Value;
1355 using pointer = __conditional_t<__constant_iterators,
1356 const value_type*, value_type*>;
1357 using reference = __conditional_t<__constant_iterators,
1358 const value_type&, value_type&>;
1359 using difference_type = ptrdiff_t;
1360 using iterator_category = forward_iterator_tag;
1361
1362 _Local_iterator() = default;
1363
1364 _Local_iterator(const __hash_code_base& __base,
1365 _Hash_node<_Value, __cache>* __n,
1366 std::size_t __bkt, std::size_t __bkt_count)
1367 : __base_type(__base, __n, __bkt, __bkt_count)
1368 { }
1369
1370 reference
1371 operator*() const
1372 { return this->_M_cur->_M_v(); }
1373
1374 pointer
1375 operator->() const
1376 { return this->_M_cur->_M_valptr(); }
1377
1378 _Local_iterator&
1379 operator++()
1380 {
1381 this->_M_incr();
1382 return *this;
1383 }
1384
1385 _Local_iterator
1386 operator++(int)
1387 {
1388 _Local_iterator __tmp(*this);
1389 this->_M_incr();
1390 return __tmp;
1391 }
1392 };
1393
1394 /// local const_iterators
1395 template<typename _Key, typename _Value, typename _ExtractKey,
1396 typename _Hash, typename _RangeHash, typename _Unused,
1397 bool __constant_iterators, bool __cache>
1398 struct _Local_const_iterator
1399 : public _Local_iterator_base<_Key, _Value, _ExtractKey,
1400 _Hash, _RangeHash, _Unused, __cache>
1401 {
1402 private:
1403 using __base_type = _Local_iterator_base<_Key, _Value, _ExtractKey,
1404 _Hash, _RangeHash, _Unused, __cache>;
1405 using __hash_code_base = typename __base_type::__hash_code_base;
1406
1407 public:
1408 typedef _Value value_type;
1409 typedef const value_type* pointer;
1410 typedef const value_type& reference;
1411 typedef std::ptrdiff_t difference_type;
1412 typedef std::forward_iterator_tag iterator_category;
1413
1414 _Local_const_iterator() = default;
1415
1416 _Local_const_iterator(const __hash_code_base& __base,
1417 _Hash_node<_Value, __cache>* __n,
1418 std::size_t __bkt, std::size_t __bkt_count)
1419 : __base_type(__base, __n, __bkt, __bkt_count)
1420 { }
1421
1422 _Local_const_iterator(const _Local_iterator<_Key, _Value, _ExtractKey,
1423 _Hash, _RangeHash, _Unused,
1424 __constant_iterators,
1425 __cache>& __x)
1426 : __base_type(__x)
1427 { }
1428
1429 reference
1430 operator*() const
1431 { return this->_M_cur->_M_v(); }
1432
1433 pointer
1434 operator->() const
1435 { return this->_M_cur->_M_valptr(); }
1436
1437 _Local_const_iterator&
1438 operator++()
1439 {
1440 this->_M_incr();
1441 return *this;
1442 }
1443
1444 _Local_const_iterator
1445 operator++(int)
1446 {
1447 _Local_const_iterator __tmp(*this);
1448 this->_M_incr();
1449 return __tmp;
1450 }
1451 };
1452
1453 /**
1454 * Primary class template _Hashtable_base.
1455 *
1456 * Helper class adding management of _Equal functor to
1457 * _Hash_code_base type.
1458 *
1459 * Base class templates are:
1460 * - __detail::_Hash_code_base
1461 * - __detail::_Hashtable_ebo_helper
1462 */
1463 template<typename _Key, typename _Value, typename _ExtractKey,
1464 typename _Equal, typename _Hash, typename _RangeHash,
1465 typename _Unused, typename _Traits>
1466 struct _Hashtable_base
1467 : public _Hash_code_base<_Key, _Value, _ExtractKey, _Hash, _RangeHash,
1468 _Unused, _Traits::__hash_cached::value>,
1469 private _Hashtable_ebo_helper<0, _Equal>
1470 {
1471 public:
1472 typedef _Key key_type;
1473 typedef _Value value_type;
1474 typedef _Equal key_equal;
1475 typedef std::size_t size_type;
1476 typedef std::ptrdiff_t difference_type;
1477
1478 using __traits_type = _Traits;
1479 using __hash_cached = typename __traits_type::__hash_cached;
1480
1481 using __hash_code_base = _Hash_code_base<_Key, _Value, _ExtractKey,
1482 _Hash, _RangeHash, _Unused,
1483 __hash_cached::value>;
1484
1485 using __hash_code = typename __hash_code_base::__hash_code;
1486
1487 private:
1488 using _EqualEBO = _Hashtable_ebo_helper<0, _Equal>;
1489
1490 protected:
1491 _Hashtable_base() = default;
1492
1493 _Hashtable_base(const _Hash& __hash, const _Equal& __eq)
1494 : __hash_code_base(__hash), _EqualEBO(__eq)
1495 { }
1496
1497 bool
1498 _M_key_equals(const _Key& __k,
1499 const _Hash_node_value<_Value,
1500 __hash_cached::value>& __n) const
1501 {
1502 static_assert(__is_invocable<const _Equal&, const _Key&, const _Key&>{},
1503 "key equality predicate must be invocable with two arguments of "
1504 "key type");
1505 return _M_eq()(__k, _ExtractKey{}(__n._M_v()));
1506 }
1507
1508 template<typename _Kt>
1509 bool
1510 _M_key_equals_tr(const _Kt& __k,
1511 const _Hash_node_value<_Value,
1512 __hash_cached::value>& __n) const
1513 {
1514 static_assert(
1515 __is_invocable<const _Equal&, const _Kt&, const _Key&>{},
1516 "key equality predicate must be invocable with the argument type "
1517 "and the key type");
1518 return _M_eq()(__k, _ExtractKey{}(__n._M_v()));
1519 }
1520
1521#pragma GCC diagnostic push
1522#pragma GCC diagnostic ignored "-Wc++17-extensions" // if constexpr
1523 bool
1524 _M_equals(const _Key& __k, __hash_code __c,
1525 const _Hash_node_value<_Value, __hash_cached::value>& __n) const
1526 {
1527 if constexpr (__hash_cached::value)
1528 if (__c != __n._M_hash_code)
1529 return false;
1530
1531 return _M_key_equals(__k, __n);
1532 }
1533
1534 template<typename _Kt>
1535 bool
1536 _M_equals_tr(const _Kt& __k, __hash_code __c,
1537 const _Hash_node_value<_Value,
1538 __hash_cached::value>& __n) const
1539 {
1540 if constexpr (__hash_cached::value)
1541 if (__c != __n._M_hash_code)
1542 return false;
1543
1544 return _M_key_equals_tr(__k, __n);
1545 }
1546
1547 bool
1548 _M_node_equals(
1549 const _Hash_node_value<_Value, __hash_cached::value>& __lhn,
1550 const _Hash_node_value<_Value, __hash_cached::value>& __rhn) const
1551 {
1552 if constexpr (__hash_cached::value)
1553 if (__lhn._M_hash_code != __rhn._M_hash_code)
1554 return false;
1555
1556 return _M_key_equals(_ExtractKey{}(__lhn._M_v()), __rhn);
1557 }
1558#pragma GCC diagnostic pop
1559
1560 void
1561 _M_swap(_Hashtable_base& __x)
1562 {
1563 __hash_code_base::_M_swap(__x);
1564 std::swap(_EqualEBO::_M_get(), __x._EqualEBO::_M_get());
1565 }
1566
1567 const _Equal&
1568 _M_eq() const { return _EqualEBO::_M_cget(); }
1569 };
1570
1571 /**
1572 * This type deals with all allocation and keeps an allocator instance
1573 * through inheritance to benefit from EBO when possible.
1574 */
1575 template<typename _NodeAlloc>
1576 struct _Hashtable_alloc : private _Hashtable_ebo_helper<0, _NodeAlloc>
1577 {
1578 private:
1579 using __ebo_node_alloc = _Hashtable_ebo_helper<0, _NodeAlloc>;
1580
1581 template<typename>
1582 struct __get_value_type;
1583 template<typename _Val, bool _Cache_hash_code>
1584 struct __get_value_type<_Hash_node<_Val, _Cache_hash_code>>
1585 { using type = _Val; };
1586
1587 public:
1588 using __node_type = typename _NodeAlloc::value_type;
1589 using __node_alloc_type = _NodeAlloc;
1590 // Use __gnu_cxx to benefit from _S_always_equal and al.
1591 using __node_alloc_traits = __gnu_cxx::__alloc_traits<__node_alloc_type>;
1592
1593 using __value_alloc_traits = typename __node_alloc_traits::template
1594 rebind_traits<typename __get_value_type<__node_type>::type>;
1595
1596 using __node_ptr = __node_type*;
1597 using __node_base = _Hash_node_base;
1598 using __node_base_ptr = __node_base*;
1599 using __buckets_alloc_type =
1600 __alloc_rebind<__node_alloc_type, __node_base_ptr>;
1601 using __buckets_alloc_traits = std::allocator_traits<__buckets_alloc_type>;
1602 using __buckets_ptr = __node_base_ptr*;
1603
1604 _Hashtable_alloc() = default;
1605 _Hashtable_alloc(const _Hashtable_alloc&) = default;
1606 _Hashtable_alloc(_Hashtable_alloc&&) = default;
1607
1608 template<typename _Alloc>
1609 _Hashtable_alloc(_Alloc&& __a)
1610 : __ebo_node_alloc(std::forward<_Alloc>(__a))
1611 { }
1612
1613 __node_alloc_type&
1614 _M_node_allocator()
1615 { return __ebo_node_alloc::_M_get(); }
1616
1617 const __node_alloc_type&
1618 _M_node_allocator() const
1619 { return __ebo_node_alloc::_M_cget(); }
1620
1621 // Allocate a node and construct an element within it.
1622 template<typename... _Args>
1623 __node_ptr
1624 _M_allocate_node(_Args&&... __args);
1625
1626 // Destroy the element within a node and deallocate the node.
1627 void
1628 _M_deallocate_node(__node_ptr __n);
1629
1630 // Deallocate a node.
1631 void
1632 _M_deallocate_node_ptr(__node_ptr __n);
1633
1634 // Deallocate the linked list of nodes pointed to by __n.
1635 // The elements within the nodes are destroyed.
1636 void
1637 _M_deallocate_nodes(__node_ptr __n);
1638
1639 __buckets_ptr
1640 _M_allocate_buckets(std::size_t __bkt_count);
1641
1642 void
1643 _M_deallocate_buckets(__buckets_ptr, std::size_t __bkt_count);
1644 };
1645
1646 // Definitions of class template _Hashtable_alloc's out-of-line member
1647 // functions.
1648 template<typename _NodeAlloc>
1649 template<typename... _Args>
1650 auto
1651 _Hashtable_alloc<_NodeAlloc>::_M_allocate_node(_Args&&... __args)
1652 -> __node_ptr
1653 {
1654 auto& __alloc = _M_node_allocator();
1655 auto __nptr = __node_alloc_traits::allocate(__alloc, 1);
1656 __node_ptr __n = std::__to_address(__nptr);
1657 __try
1658 {
1659 ::new ((void*)__n) __node_type;
1660 __node_alloc_traits::construct(__alloc, __n->_M_valptr(),
1661 std::forward<_Args>(__args)...);
1662 return __n;
1663 }
1664 __catch(...)
1665 {
1666 __n->~__node_type();
1667 __node_alloc_traits::deallocate(__alloc, __nptr, 1);
1668 __throw_exception_again;
1669 }
1670 }
1671
1672 template<typename _NodeAlloc>
1673 void
1674 _Hashtable_alloc<_NodeAlloc>::_M_deallocate_node(__node_ptr __n)
1675 {
1676 __node_alloc_traits::destroy(_M_node_allocator(), __n->_M_valptr());
1677 _M_deallocate_node_ptr(__n);
1678 }
1679
1680 template<typename _NodeAlloc>
1681 void
1682 _Hashtable_alloc<_NodeAlloc>::_M_deallocate_node_ptr(__node_ptr __n)
1683 {
1684 typedef typename __node_alloc_traits::pointer _Ptr;
1685 auto __ptr = std::pointer_traits<_Ptr>::pointer_to(*__n);
1686 __n->~__node_type();
1687 __node_alloc_traits::deallocate(_M_node_allocator(), __ptr, 1);
1688 }
1689
1690 template<typename _NodeAlloc>
1691 void
1692 _Hashtable_alloc<_NodeAlloc>::_M_deallocate_nodes(__node_ptr __n)
1693 {
1694 while (__n)
1695 {
1696 __node_ptr __tmp = __n;
1697 __n = __n->_M_next();
1698 _M_deallocate_node(__tmp);
1699 }
1700 }
1701
1702 template<typename _NodeAlloc>
1703 auto
1704 _Hashtable_alloc<_NodeAlloc>::_M_allocate_buckets(std::size_t __bkt_count)
1705 -> __buckets_ptr
1706 {
1707 __buckets_alloc_type __alloc(_M_node_allocator());
1708
1709 auto __ptr = __buckets_alloc_traits::allocate(__alloc, __bkt_count);
1710 __buckets_ptr __p = std::__to_address(__ptr);
1711 __builtin_memset(__p, 0, __bkt_count * sizeof(__node_base_ptr));
1712 return __p;
1713 }
1714
1715 template<typename _NodeAlloc>
1716 void
1717 _Hashtable_alloc<_NodeAlloc>::
1718 _M_deallocate_buckets(__buckets_ptr __bkts,
1719 std::size_t __bkt_count)
1720 {
1721 typedef typename __buckets_alloc_traits::pointer _Ptr;
1722 auto __ptr = std::pointer_traits<_Ptr>::pointer_to(*__bkts);
1723 __buckets_alloc_type __alloc(_M_node_allocator());
1724 __buckets_alloc_traits::deallocate(__alloc, __ptr, __bkt_count);
1725 }
1726
1727 ///@} hashtable-detail
1728} // namespace __detail
1729/// @endcond
1730_GLIBCXX_END_NAMESPACE_VERSION
1731} // namespace std
1732
1733#endif // _HASHTABLE_POLICY_H
constexpr complex< _Tp > operator*(const complex< _Tp > &__x, const complex< _Tp > &__y)
Return new complex value x times y.
Definition complex:405
__bool_constant< true > true_type
The type used as a compile-time boolean with true value.
Definition type_traits:116
__bool_constant< false > false_type
The type used as a compile-time boolean with false value.
Definition type_traits:119
constexpr tuple< _Elements &&... > forward_as_tuple(_Elements &&... __args) noexcept
Create a tuple of lvalue or rvalue references to the arguments.
Definition tuple:2678
constexpr std::remove_reference< _Tp >::type && move(_Tp &&__t) noexcept
Convert a value to an rvalue.
Definition move.h:127
constexpr piecewise_construct_t piecewise_construct
Tag for piecewise construction of std::pair objects.
Definition stl_pair.h:82
constexpr _Tp * __addressof(_Tp &__r) noexcept
Same as C++11 std::addressof.
Definition move.h:51
constexpr _Tp && forward(typename std::remove_reference< _Tp >::type &__t) noexcept
Forward an lvalue.
Definition move.h:70
ISO C++ entities toplevel namespace is std.
constexpr iterator_traits< _InputIterator >::difference_type distance(_InputIterator __first, _InputIterator __last)
A generalization of pointer arithmetic.
__numeric_traits_integer< _Tp > __int_traits
Convenience alias for __numeric_traits<integer-type>.
constexpr _Iterator __base(_Iterator __it)
Primary class template, tuple.
Definition tuple:834
is_empty
Definition type_traits:948
Uniform interface to all allocator types.
Uniform interface to all pointer-like types.
Definition ptr_traits.h:178
Struct holding two objects of arbitrary type.
Definition stl_pair.h:286
Forward iterators support a superset of input iterator operations.
Traits class for iterators.
Uniform interface to C++98 and C++11 allocators.