| 1 |
// Internal policy header for TR1 unordered_set and unordered_map -*- C++ -*- |
| 2 |
|
| 3 |
// Copyright (C) 2010-2021 Free Software Foundation, Inc. |
| 4 |
// |
| 5 |
// This file is part of the GNU ISO C++ Library. This library is free |
| 6 |
// software; you can redistribute it and/or modify it under the |
| 7 |
// terms of the GNU General Public License as published by the |
| 8 |
// Free Software Foundation; either version 3, or (at your option) |
| 9 |
// any later version. |
| 10 |
|
| 11 |
// This library is distributed in the hope that it will be useful, |
| 12 |
// but WITHOUT ANY WARRANTY; without even the implied warranty of |
| 13 |
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
| 14 |
// GNU General Public License for more details. |
| 15 |
|
| 16 |
// Under Section 7 of GPL version 3, you are granted additional |
| 17 |
// permissions described in the GCC Runtime Library Exception, version |
| 18 |
// 3.1, as published by the Free Software Foundation. |
| 19 |
|
| 20 |
// You should have received a copy of the GNU General Public License and |
| 21 |
// a copy of the GCC Runtime Library Exception along with this program; |
| 22 |
// see the files COPYING3 and COPYING.RUNTIME respectively. If not, see |
| 23 |
// <http://www.gnu.org/licenses/>. |
| 24 |
|
| 25 |
/** @file tr1/hashtable_policy.h |
| 26 |
* This is an internal header file, included by other library headers. |
| 27 |
* Do not attempt to use it directly. |
| 28 |
* @headername{tr1/unordered_map, tr1/unordered_set} |
| 29 |
*/ |
| 30 |
|
| 31 |
namespace std _GLIBCXX_VISIBILITY(default) |
| 32 |
{ |
| 33 |
_GLIBCXX_BEGIN_NAMESPACE_VERSION |
| 34 |
|
| 35 |
namespace tr1 |
| 36 |
{ |
| 37 |
namespace __detail |
| 38 |
{ |
| 39 |
// Helper function: return distance(first, last) for forward |
| 40 |
// iterators, or 0 for input iterators. |
| 41 |
template<class _Iterator> |
| 42 |
inline typename std::iterator_traits<_Iterator>::difference_type |
| 43 |
__distance_fw(_Iterator __first, _Iterator __last, |
| 44 |
std::input_iterator_tag) |
| 45 |
{ return 0; } |
| 46 |
|
| 47 |
template<class _Iterator> |
| 48 |
inline typename std::iterator_traits<_Iterator>::difference_type |
| 49 |
__distance_fw(_Iterator __first, _Iterator __last, |
| 50 |
std::forward_iterator_tag) |
| 51 |
{ return std::distance(__first, __last); } |
| 52 |
|
| 53 |
template<class _Iterator> |
| 54 |
inline typename std::iterator_traits<_Iterator>::difference_type |
| 55 |
__distance_fw(_Iterator __first, _Iterator __last) |
| 56 |
{ |
| 57 |
typedef typename std::iterator_traits<_Iterator>::iterator_category _Tag; |
| 58 |
return __distance_fw(__first, __last, _Tag()); |
| 59 |
} |
| 60 |
|
| 61 |
// Auxiliary types used for all instantiations of _Hashtable: nodes |
| 62 |
// and iterators. |
| 63 |
|
| 64 |
// Nodes, used to wrap elements stored in the hash table. A policy |
| 65 |
// template parameter of class template _Hashtable controls whether |
| 66 |
// nodes also store a hash code. In some cases (e.g. strings) this |
| 67 |
// may be a performance win. |
| 68 |
template<typename _Value, bool __cache_hash_code> |
| 69 |
struct _Hash_node; |
| 70 |
|
| 71 |
template<typename _Value> |
| 72 |
struct _Hash_node<_Value, true> |
| 73 |
{ |
| 74 |
_Value _M_v; |
| 75 |
std::size_t _M_hash_code; |
| 76 |
_Hash_node* _M_next; |
| 77 |
}; |
| 78 |
|
| 79 |
template<typename _Value> |
| 80 |
struct _Hash_node<_Value, false> |
| 81 |
{ |
| 82 |
_Value _M_v; |
| 83 |
_Hash_node* _M_next; |
| 84 |
}; |
| 85 |
|
| 86 |
// Local iterators, used to iterate within a bucket but not between |
| 87 |
// buckets. |
| 88 |
template<typename _Value, bool __cache> |
| 89 |
struct _Node_iterator_base |
| 90 |
{ |
| 91 |
_Node_iterator_base(_Hash_node<_Value, __cache>* __p) |
| 92 |
: _M_cur(__p) { } |
| 93 |
|
| 94 |
void |
| 95 |
_M_incr() |
| 96 |
{ _M_cur = _M_cur->_M_next; } |
| 97 |
|
| 98 |
_Hash_node<_Value, __cache>* _M_cur; |
| 99 |
}; |
| 100 |
|
| 101 |
template<typename _Value, bool __cache> |
| 102 |
inline bool |
| 103 |
operator==(const _Node_iterator_base<_Value, __cache>& __x, |
| 104 |
const _Node_iterator_base<_Value, __cache>& __y) |
| 105 |
{ return __x._M_cur == __y._M_cur; } |
| 106 |
|
| 107 |
template<typename _Value, bool __cache> |
| 108 |
inline bool |
| 109 |
operator!=(const _Node_iterator_base<_Value, __cache>& __x, |
| 110 |
const _Node_iterator_base<_Value, __cache>& __y) |
| 111 |
{ return __x._M_cur != __y._M_cur; } |
| 112 |
|
| 113 |
template<typename _Value, bool __constant_iterators, bool __cache> |
| 114 |
struct _Node_iterator |
| 115 |
: public _Node_iterator_base<_Value, __cache> |
| 116 |
{ |
| 117 |
typedef _Value value_type; |
| 118 |
typedef typename |
| 119 |
__gnu_cxx::__conditional_type<__constant_iterators, |
| 120 |
const _Value*, _Value*>::__type |
| 121 |
pointer; |
| 122 |
typedef typename |
| 123 |
__gnu_cxx::__conditional_type<__constant_iterators, |
| 124 |
const _Value&, _Value&>::__type |
| 125 |
reference; |
| 126 |
typedef std::ptrdiff_t difference_type; |
| 127 |
typedef std::forward_iterator_tag iterator_category; |
| 128 |
|
| 129 |
_Node_iterator() |
| 130 |
: _Node_iterator_base<_Value, __cache>(0) { } |
| 131 |
|
| 132 |
explicit |
| 133 |
_Node_iterator(_Hash_node<_Value, __cache>* __p) |
| 134 |
: _Node_iterator_base<_Value, __cache>(__p) { } |
| 135 |
|
| 136 |
reference |
| 137 |
operator*() const |
| 138 |
{ return this->_M_cur->_M_v; } |
| 139 |
|
| 140 |
pointer |
| 141 |
operator->() const |
| 142 |
{ return std::__addressof(this->_M_cur->_M_v); } |
| 143 |
|
| 144 |
_Node_iterator& |
| 145 |
operator++() |
| 146 |
{ |
| 147 |
this->_M_incr(); |
| 148 |
return *this; |
| 149 |
} |
| 150 |
|
| 151 |
_Node_iterator |
| 152 |
operator++(int) |
| 153 |
{ |
| 154 |
_Node_iterator __tmp(*this); |
| 155 |
this->_M_incr(); |
| 156 |
return __tmp; |
| 157 |
} |
| 158 |
}; |
| 159 |
|
| 160 |
template<typename _Value, bool __constant_iterators, bool __cache> |
| 161 |
struct _Node_const_iterator |
| 162 |
: public _Node_iterator_base<_Value, __cache> |
| 163 |
{ |
| 164 |
typedef _Value value_type; |
| 165 |
typedef const _Value* pointer; |
| 166 |
typedef const _Value& reference; |
| 167 |
typedef std::ptrdiff_t difference_type; |
| 168 |
typedef std::forward_iterator_tag iterator_category; |
| 169 |
|
| 170 |
_Node_const_iterator() |
| 171 |
: _Node_iterator_base<_Value, __cache>(0) { } |
| 172 |
|
| 173 |
explicit |
| 174 |
_Node_const_iterator(_Hash_node<_Value, __cache>* __p) |
| 175 |
: _Node_iterator_base<_Value, __cache>(__p) { } |
| 176 |
|
| 177 |
_Node_const_iterator(const _Node_iterator<_Value, __constant_iterators, |
| 178 |
__cache>& __x) |
| 179 |
: _Node_iterator_base<_Value, __cache>(__x._M_cur) { } |
| 180 |
|
| 181 |
reference |
| 182 |
operator*() const |
| 183 |
{ return this->_M_cur->_M_v; } |
| 184 |
|
| 185 |
pointer |
| 186 |
operator->() const |
| 187 |
{ return std::__addressof(this->_M_cur->_M_v); } |
| 188 |
|
| 189 |
_Node_const_iterator& |
| 190 |
operator++() |
| 191 |
{ |
| 192 |
this->_M_incr(); |
| 193 |
return *this; |
| 194 |
} |
| 195 |
|
| 196 |
_Node_const_iterator |
| 197 |
operator++(int) |
| 198 |
{ |
| 199 |
_Node_const_iterator __tmp(*this); |
| 200 |
this->_M_incr(); |
| 201 |
return __tmp; |
| 202 |
} |
| 203 |
}; |
| 204 |
|
| 205 |
template<typename _Value, bool __cache> |
| 206 |
struct _Hashtable_iterator_base |
| 207 |
{ |
| 208 |
_Hashtable_iterator_base(_Hash_node<_Value, __cache>* __node, |
| 209 |
_Hash_node<_Value, __cache>** __bucket) |
| 210 |
: _M_cur_node(__node), _M_cur_bucket(__bucket) { } |
| 211 |
|
| 212 |
void |
| 213 |
_M_incr() |
| 214 |
{ |
| 215 |
_M_cur_node = _M_cur_node->_M_next; |
| 216 |
if (!_M_cur_node) |
| 217 |
_M_incr_bucket(); |
| 218 |
} |
| 219 |
|
| 220 |
void |
| 221 |
_M_incr_bucket(); |
| 222 |
|
| 223 |
_Hash_node<_Value, __cache>* _M_cur_node; |
| 224 |
_Hash_node<_Value, __cache>** _M_cur_bucket; |
| 225 |
}; |
| 226 |
|
| 227 |
// Global iterators, used for arbitrary iteration within a hash |
| 228 |
// table. Larger and more expensive than local iterators. |
| 229 |
template<typename _Value, bool __cache> |
| 230 |
void |
| 231 |
_Hashtable_iterator_base<_Value, __cache>:: |
| 232 |
_M_incr_bucket() |
| 233 |
{ |
| 234 |
++_M_cur_bucket; |
| 235 |
|
| 236 |
// This loop requires the bucket array to have a non-null sentinel. |
| 237 |
while (!*_M_cur_bucket) |
| 238 |
++_M_cur_bucket; |
| 239 |
_M_cur_node = *_M_cur_bucket; |
| 240 |
} |
| 241 |
|
| 242 |
template<typename _Value, bool __cache> |
| 243 |
inline bool |
| 244 |
operator==(const _Hashtable_iterator_base<_Value, __cache>& __x, |
| 245 |
const _Hashtable_iterator_base<_Value, __cache>& __y) |
| 246 |
{ return __x._M_cur_node == __y._M_cur_node; } |
| 247 |
|
| 248 |
template<typename _Value, bool __cache> |
| 249 |
inline bool |
| 250 |
operator!=(const _Hashtable_iterator_base<_Value, __cache>& __x, |
| 251 |
const _Hashtable_iterator_base<_Value, __cache>& __y) |
| 252 |
{ return __x._M_cur_node != __y._M_cur_node; } |
| 253 |
|
| 254 |
template<typename _Value, bool __constant_iterators, bool __cache> |
| 255 |
struct _Hashtable_iterator |
| 256 |
: public _Hashtable_iterator_base<_Value, __cache> |
| 257 |
{ |
| 258 |
typedef _Value value_type; |
| 259 |
typedef typename |
| 260 |
__gnu_cxx::__conditional_type<__constant_iterators, |
| 261 |
const _Value*, _Value*>::__type |
| 262 |
pointer; |
| 263 |
typedef typename |
| 264 |
__gnu_cxx::__conditional_type<__constant_iterators, |
| 265 |
const _Value&, _Value&>::__type |
| 266 |
reference; |
| 267 |
typedef std::ptrdiff_t difference_type; |
| 268 |
typedef std::forward_iterator_tag iterator_category; |
| 269 |
|
| 270 |
_Hashtable_iterator() |
| 271 |
: _Hashtable_iterator_base<_Value, __cache>(0, 0) { } |
| 272 |
|
| 273 |
_Hashtable_iterator(_Hash_node<_Value, __cache>* __p, |
| 274 |
_Hash_node<_Value, __cache>** __b) |
| 275 |
: _Hashtable_iterator_base<_Value, __cache>(__p, __b) { } |
| 276 |
|
| 277 |
explicit |
| 278 |
_Hashtable_iterator(_Hash_node<_Value, __cache>** __b) |
| 279 |
: _Hashtable_iterator_base<_Value, __cache>(*__b, __b) { } |
| 280 |
|
| 281 |
reference |
| 282 |
operator*() const |
| 283 |
{ return this->_M_cur_node->_M_v; } |
| 284 |
|
| 285 |
pointer |
| 286 |
operator->() const |
| 287 |
{ return std::__addressof(this->_M_cur_node->_M_v); } |
| 288 |
|
| 289 |
_Hashtable_iterator& |
| 290 |
operator++() |
| 291 |
{ |
| 292 |
this->_M_incr(); |
| 293 |
return *this; |
| 294 |
} |
| 295 |
|
| 296 |
_Hashtable_iterator |
| 297 |
operator++(int) |
| 298 |
{ |
| 299 |
_Hashtable_iterator __tmp(*this); |
| 300 |
this->_M_incr(); |
| 301 |
return __tmp; |
| 302 |
} |
| 303 |
}; |
| 304 |
|
| 305 |
template<typename _Value, bool __constant_iterators, bool __cache> |
| 306 |
struct _Hashtable_const_iterator |
| 307 |
: public _Hashtable_iterator_base<_Value, __cache> |
| 308 |
{ |
| 309 |
typedef _Value value_type; |
| 310 |
typedef const _Value* pointer; |
| 311 |
typedef const _Value& reference; |
| 312 |
typedef std::ptrdiff_t difference_type; |
| 313 |
typedef std::forward_iterator_tag iterator_category; |
| 314 |
|
| 315 |
_Hashtable_const_iterator() |
| 316 |
: _Hashtable_iterator_base<_Value, __cache>(0, 0) { } |
| 317 |
|
| 318 |
_Hashtable_const_iterator(_Hash_node<_Value, __cache>* __p, |
| 319 |
_Hash_node<_Value, __cache>** __b) |
| 320 |
: _Hashtable_iterator_base<_Value, __cache>(__p, __b) { } |
| 321 |
|
| 322 |
explicit |
| 323 |
_Hashtable_const_iterator(_Hash_node<_Value, __cache>** __b) |
| 324 |
: _Hashtable_iterator_base<_Value, __cache>(*__b, __b) { } |
| 325 |
|
| 326 |
_Hashtable_const_iterator(const _Hashtable_iterator<_Value, |
| 327 |
__constant_iterators, __cache>& __x) |
| 328 |
: _Hashtable_iterator_base<_Value, __cache>(__x._M_cur_node, |
| 329 |
__x._M_cur_bucket) { } |
| 330 |
|
| 331 |
reference |
| 332 |
operator*() const |
| 333 |
{ return this->_M_cur_node->_M_v; } |
| 334 |
|
| 335 |
pointer |
| 336 |
operator->() const |
| 337 |
{ return std::__addressof(this->_M_cur_node->_M_v); } |
| 338 |
|
| 339 |
_Hashtable_const_iterator& |
| 340 |
operator++() |
| 341 |
{ |
| 342 |
this->_M_incr(); |
| 343 |
return *this; |
| 344 |
} |
| 345 |
|
| 346 |
_Hashtable_const_iterator |
| 347 |
operator++(int) |
| 348 |
{ |
| 349 |
_Hashtable_const_iterator __tmp(*this); |
| 350 |
this->_M_incr(); |
| 351 |
return __tmp; |
| 352 |
} |
| 353 |
}; |
| 354 |
|
| 355 |
|
| 356 |
// Many of class template _Hashtable's template parameters are policy |
| 357 |
// classes. These are defaults for the policies. |
| 358 |
|
| 359 |
// Default range hashing function: use division to fold a large number |
| 360 |
// into the range [0, N). |
| 361 |
struct _Mod_range_hashing |
| 362 |
{ |
| 363 |
typedef std::size_t first_argument_type; |
| 364 |
typedef std::size_t second_argument_type; |
| 365 |
typedef std::size_t result_type; |
| 366 |
|
| 367 |
result_type |
| 368 |
operator()(first_argument_type __num, second_argument_type __den) const |
| 369 |
{ return __num % __den; } |
| 370 |
}; |
| 371 |
|
| 372 |
// Default ranged hash function H. In principle it should be a |
| 373 |
// function object composed from objects of type H1 and H2 such that |
| 374 |
// h(k, N) = h2(h1(k), N), but that would mean making extra copies of |
| 375 |
// h1 and h2. So instead we'll just use a tag to tell class template |
| 376 |
// hashtable to do that composition. |
| 377 |
struct _Default_ranged_hash { }; |
| 378 |
|
| 379 |
// Default value for rehash policy. Bucket size is (usually) the |
| 380 |
// smallest prime that keeps the load factor small enough. |
| 381 |
struct _Prime_rehash_policy |
| 382 |
{ |
| 383 |
_Prime_rehash_policy(float __z = 1.0) |
| 384 |
: _M_max_load_factor(__z), _M_growth_factor(2.f), _M_next_resize(0) { } |
| 385 |
|
| 386 |
float |
| 387 |
max_load_factor() const |
| 388 |
{ return _M_max_load_factor; } |
| 389 |
|
| 390 |
// Return a bucket size no smaller than n. |
| 391 |
std::size_t |
| 392 |
_M_next_bkt(std::size_t __n) const; |
| 393 |
|
| 394 |
// Return a bucket count appropriate for n elements |
| 395 |
std::size_t |
| 396 |
_M_bkt_for_elements(std::size_t __n) const; |
| 397 |
|
| 398 |
// __n_bkt is current bucket count, __n_elt is current element count, |
| 399 |
// and __n_ins is number of elements to be inserted. Do we need to |
| 400 |
// increase bucket count? If so, return make_pair(true, n), where n |
| 401 |
// is the new bucket count. If not, return make_pair(false, 0). |
| 402 |
std::pair<bool, std::size_t> |
| 403 |
_M_need_rehash(std::size_t __n_bkt, std::size_t __n_elt, |
| 404 |
std::size_t __n_ins) const; |
| 405 |
|
| 406 |
enum { _S_n_primes = sizeof(unsigned long) != 8 ? 256 : 256 + 48 }; |
| 407 |
|
| 408 |
float _M_max_load_factor; |
| 409 |
float _M_growth_factor; |
| 410 |
mutable std::size_t _M_next_resize; |
| 411 |
}; |
| 412 |
|
| 413 |
extern const unsigned long __prime_list[]; |
| 414 |
|
| 415 |
// XXX This is a hack. There's no good reason for any of |
| 416 |
// _Prime_rehash_policy's member functions to be inline. |
| 417 |
|
| 418 |
// Return a prime no smaller than n. |
| 419 |
inline std::size_t |
| 420 |
_Prime_rehash_policy:: |
| 421 |
_M_next_bkt(std::size_t __n) const |
| 422 |
{ |
| 423 |
// Don't include the last prime in the search, so that anything |
| 424 |
// higher than the second-to-last prime returns a past-the-end |
| 425 |
// iterator that can be dereferenced to get the last prime. |
| 426 |
const unsigned long* __p |
| 427 |
= std::lower_bound(__prime_list, __prime_list + _S_n_primes - 1, __n); |
| 428 |
_M_next_resize = |
| 429 |
static_cast<std::size_t>(__builtin_ceil(*__p * _M_max_load_factor)); |
| 430 |
return *__p; |
| 431 |
} |
| 432 |
|
| 433 |
// Return the smallest prime p such that alpha p >= n, where alpha |
| 434 |
// is the load factor. |
| 435 |
inline std::size_t |
| 436 |
_Prime_rehash_policy:: |
| 437 |
_M_bkt_for_elements(std::size_t __n) const |
| 438 |
{ |
| 439 |
const float __min_bkts = __n / _M_max_load_factor; |
| 440 |
return _M_next_bkt(__builtin_ceil(__min_bkts)); |
| 441 |
} |
| 442 |
|
| 443 |
// Finds the smallest prime p such that alpha p > __n_elt + __n_ins. |
| 444 |
// If p > __n_bkt, return make_pair(true, p); otherwise return |
| 445 |
// make_pair(false, 0). In principle this isn't very different from |
| 446 |
// _M_bkt_for_elements. |
| 447 |
|
| 448 |
// The only tricky part is that we're caching the element count at |
| 449 |
// which we need to rehash, so we don't have to do a floating-point |
| 450 |
// multiply for every insertion. |
| 451 |
|
| 452 |
inline std::pair<bool, std::size_t> |
| 453 |
_Prime_rehash_policy:: |
| 454 |
_M_need_rehash(std::size_t __n_bkt, std::size_t __n_elt, |
| 455 |
std::size_t __n_ins) const |
| 456 |
{ |
| 457 |
if (__n_elt + __n_ins > _M_next_resize) |
| 458 |
{ |
| 459 |
float __min_bkts = ((float(__n_ins) + float(__n_elt)) |
| 460 |
/ _M_max_load_factor); |
| 461 |
if (__min_bkts > __n_bkt) |
| 462 |
{ |
| 463 |
__min_bkts = std::max(__min_bkts, _M_growth_factor * __n_bkt); |
| 464 |
return std::make_pair(true, |
| 465 |
_M_next_bkt(__builtin_ceil(__min_bkts))); |
| 466 |
} |
| 467 |
else |
| 468 |
{ |
| 469 |
_M_next_resize = static_cast<std::size_t> |
| 470 |
(__builtin_ceil(__n_bkt * _M_max_load_factor)); |
| 471 |
return std::make_pair(false, 0); |
| 472 |
} |
| 473 |
} |
| 474 |
else |
| 475 |
return std::make_pair(false, 0); |
| 476 |
} |
| 477 |
|
| 478 |
// Base classes for std::tr1::_Hashtable. We define these base |
| 479 |
// classes because in some cases we want to do different things |
| 480 |
// depending on the value of a policy class. In some cases the |
| 481 |
// policy class affects which member functions and nested typedefs |
| 482 |
// are defined; we handle that by specializing base class templates. |
| 483 |
// Several of the base class templates need to access other members |
| 484 |
// of class template _Hashtable, so we use the "curiously recurring |
| 485 |
// template pattern" for them. |
| 486 |
|
| 487 |
// class template _Map_base. If the hashtable has a value type of the |
| 488 |
// form pair<T1, T2> and a key extraction policy that returns the |
| 489 |
// first part of the pair, the hashtable gets a mapped_type typedef. |
| 490 |
// If it satisfies those criteria and also has unique keys, then it |
| 491 |
// also gets an operator[]. |
| 492 |
template<typename _Key, typename _Value, typename _Ex, bool __unique, |
| 493 |
typename _Hashtable> |
| 494 |
struct _Map_base { }; |
| 495 |
|
| 496 |
template<typename _Key, typename _Pair, typename _Hashtable> |
| 497 |
struct _Map_base<_Key, _Pair, std::_Select1st<_Pair>, false, _Hashtable> |
| 498 |
{ |
| 499 |
typedef typename _Pair::second_type mapped_type; |
| 500 |
}; |
| 501 |
|
| 502 |
template<typename _Key, typename _Pair, typename _Hashtable> |
| 503 |
struct _Map_base<_Key, _Pair, std::_Select1st<_Pair>, true, _Hashtable> |
| 504 |
{ |
| 505 |
typedef typename _Pair::second_type mapped_type; |
| 506 |
|
| 507 |
mapped_type& |
| 508 |
operator[](const _Key& __k); |
| 509 |
}; |
| 510 |
|
| 511 |
template<typename _Key, typename _Pair, typename _Hashtable> |
| 512 |
typename _Map_base<_Key, _Pair, std::_Select1st<_Pair>, |
| 513 |
true, _Hashtable>::mapped_type& |
| 514 |
_Map_base<_Key, _Pair, std::_Select1st<_Pair>, true, _Hashtable>:: |
| 515 |
operator[](const _Key& __k) |
| 516 |
{ |
| 517 |
_Hashtable* __h = static_cast<_Hashtable*>(this); |
| 518 |
typename _Hashtable::_Hash_code_type __code = __h->_M_hash_code(__k); |
| 519 |
std::size_t __n = __h->_M_bucket_index(__k, __code, |
| 520 |
__h->_M_bucket_count); |
| 521 |
|
| 522 |
typename _Hashtable::_Node* __p = |
| 523 |
__h->_M_find_node(__h->_M_buckets[__n], __k, __code); |
| 524 |
if (!__p) |
| 525 |
return __h->_M_insert_bucket(std::make_pair(__k, mapped_type()), |
| 526 |
__n, __code)->second; |
| 527 |
return (__p->_M_v).second; |
| 528 |
} |
| 529 |
|
| 530 |
// class template _Rehash_base. Give hashtable the max_load_factor |
| 531 |
// functions iff the rehash policy is _Prime_rehash_policy. |
| 532 |
template<typename _RehashPolicy, typename _Hashtable> |
| 533 |
struct _Rehash_base { }; |
| 534 |
|
| 535 |
template<typename _Hashtable> |
| 536 |
struct _Rehash_base<_Prime_rehash_policy, _Hashtable> |
| 537 |
{ |
| 538 |
float |
| 539 |
max_load_factor() const |
| 540 |
{ |
| 541 |
const _Hashtable* __this = static_cast<const _Hashtable*>(this); |
| 542 |
return __this->__rehash_policy().max_load_factor(); |
| 543 |
} |
| 544 |
|
| 545 |
void |
| 546 |
max_load_factor(float __z) |
| 547 |
{ |
| 548 |
_Hashtable* __this = static_cast<_Hashtable*>(this); |
| 549 |
__this->__rehash_policy(_Prime_rehash_policy(__z)); |
| 550 |
} |
| 551 |
}; |
| 552 |
|
| 553 |
// Class template _Hash_code_base. Encapsulates two policy issues that |
| 554 |
// aren't quite orthogonal. |
| 555 |
// (1) the difference between using a ranged hash function and using |
| 556 |
// the combination of a hash function and a range-hashing function. |
| 557 |
// In the former case we don't have such things as hash codes, so |
| 558 |
// we have a dummy type as placeholder. |
| 559 |
// (2) Whether or not we cache hash codes. Caching hash codes is |
| 560 |
// meaningless if we have a ranged hash function. |
| 561 |
// We also put the key extraction and equality comparison function |
| 562 |
// objects here, for convenience. |
| 563 |
|
| 564 |
// Primary template: unused except as a hook for specializations. |
| 565 |
template<typename _Key, typename _Value, |
| 566 |
typename _ExtractKey, typename _Equal, |
| 567 |
typename _H1, typename _H2, typename _Hash, |
| 568 |
bool __cache_hash_code> |
| 569 |
struct _Hash_code_base; |
| 570 |
|
| 571 |
// Specialization: ranged hash function, no caching hash codes. H1 |
| 572 |
// and H2 are provided but ignored. We define a dummy hash code type. |
| 573 |
template<typename _Key, typename _Value, |
| 574 |
typename _ExtractKey, typename _Equal, |
| 575 |
typename _H1, typename _H2, typename _Hash> |
| 576 |
struct _Hash_code_base<_Key, _Value, _ExtractKey, _Equal, _H1, _H2, |
| 577 |
_Hash, false> |
| 578 |
{ |
| 579 |
protected: |
| 580 |
_Hash_code_base(const _ExtractKey& __ex, const _Equal& __eq, |
| 581 |
const _H1&, const _H2&, const _Hash& __h) |
| 582 |
: _M_extract(__ex), _M_eq(__eq), _M_ranged_hash(__h) { } |
| 583 |
|
| 584 |
typedef void* _Hash_code_type; |
| 585 |
|
| 586 |
_Hash_code_type |
| 587 |
_M_hash_code(const _Key& __key) const |
| 588 |
{ return 0; } |
| 589 |
|
| 590 |
std::size_t |
| 591 |
_M_bucket_index(const _Key& __k, _Hash_code_type, |
| 592 |
std::size_t __n) const |
| 593 |
{ return _M_ranged_hash(__k, __n); } |
| 594 |
|
| 595 |
std::size_t |
| 596 |
_M_bucket_index(const _Hash_node<_Value, false>* __p, |
| 597 |
std::size_t __n) const |
| 598 |
{ return _M_ranged_hash(_M_extract(__p->_M_v), __n); } |
| 599 |
|
| 600 |
bool |
| 601 |
_M_compare(const _Key& __k, _Hash_code_type, |
| 602 |
_Hash_node<_Value, false>* __n) const |
| 603 |
{ return _M_eq(__k, _M_extract(__n->_M_v)); } |
| 604 |
|
| 605 |
void |
| 606 |
_M_store_code(_Hash_node<_Value, false>*, _Hash_code_type) const |
| 607 |
{ } |
| 608 |
|
| 609 |
void |
| 610 |
_M_copy_code(_Hash_node<_Value, false>*, |
| 611 |
const _Hash_node<_Value, false>*) const |
| 612 |
{ } |
| 613 |
|
| 614 |
void |
| 615 |
_M_swap(_Hash_code_base& __x) |
| 616 |
{ |
| 617 |
std::swap(_M_extract, __x._M_extract); |
| 618 |
std::swap(_M_eq, __x._M_eq); |
| 619 |
std::swap(_M_ranged_hash, __x._M_ranged_hash); |
| 620 |
} |
| 621 |
|
| 622 |
protected: |
| 623 |
_ExtractKey _M_extract; |
| 624 |
_Equal _M_eq; |
| 625 |
_Hash _M_ranged_hash; |
| 626 |
}; |
| 627 |
|
| 628 |
|
| 629 |
// No specialization for ranged hash function while caching hash codes. |
| 630 |
// That combination is meaningless, and trying to do it is an error. |
| 631 |
|
| 632 |
|
| 633 |
// Specialization: ranged hash function, cache hash codes. This |
| 634 |
// combination is meaningless, so we provide only a declaration |
| 635 |
// and no definition. |
| 636 |
template<typename _Key, typename _Value, |
| 637 |
typename _ExtractKey, typename _Equal, |
| 638 |
typename _H1, typename _H2, typename _Hash> |
| 639 |
struct _Hash_code_base<_Key, _Value, _ExtractKey, _Equal, _H1, _H2, |
| 640 |
_Hash, true>; |
| 641 |
|
| 642 |
// Specialization: hash function and range-hashing function, no |
| 643 |
// caching of hash codes. H is provided but ignored. Provides |
| 644 |
// typedef and accessor required by TR1. |
| 645 |
template<typename _Key, typename _Value, |
| 646 |
typename _ExtractKey, typename _Equal, |
| 647 |
typename _H1, typename _H2> |
| 648 |
struct _Hash_code_base<_Key, _Value, _ExtractKey, _Equal, _H1, _H2, |
| 649 |
_Default_ranged_hash, false> |
| 650 |
{ |
| 651 |
typedef _H1 hasher; |
| 652 |
|
| 653 |
hasher |
| 654 |
hash_function() const |
| 655 |
{ return _M_h1; } |
| 656 |
|
| 657 |
protected: |
| 658 |
_Hash_code_base(const _ExtractKey& __ex, const _Equal& __eq, |
| 659 |
const _H1& __h1, const _H2& __h2, |
| 660 |
const _Default_ranged_hash&) |
| 661 |
: _M_extract(__ex), _M_eq(__eq), _M_h1(__h1), _M_h2(__h2) { } |
| 662 |
|
| 663 |
typedef std::size_t _Hash_code_type; |
| 664 |
|
| 665 |
_Hash_code_type |
| 666 |
_M_hash_code(const _Key& __k) const |
| 667 |
{ return _M_h1(__k); } |
| 668 |
|
| 669 |
std::size_t |
| 670 |
_M_bucket_index(const _Key&, _Hash_code_type __c, |
| 671 |
std::size_t __n) const |
| 672 |
{ return _M_h2(__c, __n); } |
| 673 |
|
| 674 |
std::size_t |
| 675 |
_M_bucket_index(const _Hash_node<_Value, false>* __p, |
| 676 |
std::size_t __n) const |
| 677 |
{ return _M_h2(_M_h1(_M_extract(__p->_M_v)), __n); } |
| 678 |
|
| 679 |
bool |
| 680 |
_M_compare(const _Key& __k, _Hash_code_type, |
| 681 |
_Hash_node<_Value, false>* __n) const |
| 682 |
{ return _M_eq(__k, _M_extract(__n->_M_v)); } |
| 683 |
|
| 684 |
void |
| 685 |
_M_store_code(_Hash_node<_Value, false>*, _Hash_code_type) const |
| 686 |
{ } |
| 687 |
|
| 688 |
void |
| 689 |
_M_copy_code(_Hash_node<_Value, false>*, |
| 690 |
const _Hash_node<_Value, false>*) const |
| 691 |
{ } |
| 692 |
|
| 693 |
void |
| 694 |
_M_swap(_Hash_code_base& __x) |
| 695 |
{ |
| 696 |
std::swap(_M_extract, __x._M_extract); |
| 697 |
std::swap(_M_eq, __x._M_eq); |
| 698 |
std::swap(_M_h1, __x._M_h1); |
| 699 |
std::swap(_M_h2, __x._M_h2); |
| 700 |
} |
| 701 |
|
| 702 |
protected: |
| 703 |
_ExtractKey _M_extract; |
| 704 |
_Equal _M_eq; |
| 705 |
_H1 _M_h1; |
| 706 |
_H2 _M_h2; |
| 707 |
}; |
| 708 |
|
| 709 |
// Specialization: hash function and range-hashing function, |
| 710 |
// caching hash codes. H is provided but ignored. Provides |
| 711 |
// typedef and accessor required by TR1. |
| 712 |
template<typename _Key, typename _Value, |
| 713 |
typename _ExtractKey, typename _Equal, |
| 714 |
typename _H1, typename _H2> |
| 715 |
struct _Hash_code_base<_Key, _Value, _ExtractKey, _Equal, _H1, _H2, |
| 716 |
_Default_ranged_hash, true> |
| 717 |
{ |
| 718 |
typedef _H1 hasher; |
| 719 |
|
| 720 |
hasher |
| 721 |
hash_function() const |
| 722 |
{ return _M_h1; } |
| 723 |
|
| 724 |
protected: |
| 725 |
_Hash_code_base(const _ExtractKey& __ex, const _Equal& __eq, |
| 726 |
const _H1& __h1, const _H2& __h2, |
| 727 |
const _Default_ranged_hash&) |
| 728 |
: _M_extract(__ex), _M_eq(__eq), _M_h1(__h1), _M_h2(__h2) { } |
| 729 |
|
| 730 |
typedef std::size_t _Hash_code_type; |
| 731 |
|
| 732 |
_Hash_code_type |
| 733 |
_M_hash_code(const _Key& __k) const |
| 734 |
{ return _M_h1(__k); } |
| 735 |
|
| 736 |
std::size_t |
| 737 |
_M_bucket_index(const _Key&, _Hash_code_type __c, |
| 738 |
std::size_t __n) const |
| 739 |
{ return _M_h2(__c, __n); } |
| 740 |
|
| 741 |
std::size_t |
| 742 |
_M_bucket_index(const _Hash_node<_Value, true>* __p, |
| 743 |
std::size_t __n) const |
| 744 |
{ return _M_h2(__p->_M_hash_code, __n); } |
| 745 |
|
| 746 |
bool |
| 747 |
_M_compare(const _Key& __k, _Hash_code_type __c, |
| 748 |
_Hash_node<_Value, true>* __n) const |
| 749 |
{ return __c == __n->_M_hash_code && _M_eq(__k, _M_extract(__n->_M_v)); } |
| 750 |
|
| 751 |
void |
| 752 |
_M_store_code(_Hash_node<_Value, true>* __n, _Hash_code_type __c) const |
| 753 |
{ __n->_M_hash_code = __c; } |
| 754 |
|
| 755 |
void |
| 756 |
_M_copy_code(_Hash_node<_Value, true>* __to, |
| 757 |
const _Hash_node<_Value, true>* __from) const |
| 758 |
{ __to->_M_hash_code = __from->_M_hash_code; } |
| 759 |
|
| 760 |
void |
| 761 |
_M_swap(_Hash_code_base& __x) |
| 762 |
{ |
| 763 |
std::swap(_M_extract, __x._M_extract); |
| 764 |
std::swap(_M_eq, __x._M_eq); |
| 765 |
std::swap(_M_h1, __x._M_h1); |
| 766 |
std::swap(_M_h2, __x._M_h2); |
| 767 |
} |
| 768 |
|
| 769 |
protected: |
| 770 |
_ExtractKey _M_extract; |
| 771 |
_Equal _M_eq; |
| 772 |
_H1 _M_h1; |
| 773 |
_H2 _M_h2; |
| 774 |
}; |
| 775 |
} // namespace __detail |
| 776 |
} |
| 777 |
|
| 778 |
_GLIBCXX_END_NAMESPACE_VERSION |
| 779 |
} |