20 #ifndef __TBB__concurrent_unordered_impl_H 21 #define __TBB__concurrent_unordered_impl_H 22 #if !defined(__TBB_concurrent_unordered_map_H) && !defined(__TBB_concurrent_unordered_set_H) && !defined(__TBB_concurrent_hash_map_H) 23 #error Do not #include this internal file directly; use public TBB headers instead. 26 #include "../tbb_stddef.h" 33 #include __TBB_STD_SWAP_HEADER 35 #include "../atomic.h" 36 #include "../tbb_exception.h" 37 #include "../tbb_allocator.h" 39 #if __TBB_INITIALIZER_LISTS_PRESENT 40 #include <initializer_list> 43 #if __TBB_CPP11_RVALUE_REF_PRESENT && !__TBB_IMPLICIT_COPY_DELETION_BROKEN 44 #define __TBB_UNORDERED_NODE_HANDLE_PRESENT 1 51 #if __TBB_UNORDERED_NODE_HANDLE_PRESENT 53 #endif // __TBB_UNORDERED_NODE_HANDLE_PRESENT 56 namespace interface5 {
60 template <
typename T,
typename Allocator>
62 template <
typename Traits>
66 template<
class Solist,
typename Value>
67 class flist_iterator :
public std::iterator<std::forward_iterator_tag, Value>
69 template <
typename T,
typename Allocator>
71 template <
typename Traits>
73 template<
class M,
typename V>
112 template<
typename M,
typename T,
typename U>
114 template<
typename M,
typename T,
typename U>
118 template<
typename Solist,
typename T,
typename U>
122 template<
typename Solist,
typename T,
typename U>
128 template<
class Solist,
typename Value>
133 using base_type::get_node_ptr;
134 template <
typename T,
typename Allocator>
136 template<
class M,
typename V>
138 template <
typename Traits>
140 template<
typename M,
typename T,
typename U>
142 template<
typename M,
typename T,
typename U>
146 solist_iterator(nodeptr_t pnode,
const Solist *plist) : base_type(pnode), my_list_ptr(plist) {}
156 : base_type(other), my_list_ptr(other.my_list_ptr) {}
165 return this->base_type::operator*();
173 do ++(*(base_type *)
this);
188 template<
typename Solist,
typename T,
typename U>
192 template<
typename Solist,
typename T,
typename U>
202 template <
typename T,
typename Allocator>
236 my_order_key = order_key;
247 return reinterpret_cast<value_type*
>(&my_element);
260 if (exchange_node == current_node)
268 return exchange_node;
275 return (my_order_key & 0x1) == 0;
286 nodeptr_t pnode = my_node_allocator.allocate(1);
287 pnode->init(order_key);
292 template<
typename Arg>
295 nodeptr_t pnode = my_node_allocator.allocate(1);
299 new(
static_cast<void*
>(&pnode->my_element)) T(tbb::internal::forward<Arg>(t));
300 pnode->init(order_key);
302 my_node_allocator.deallocate(pnode, 1);
310 template<
typename Arg>
313 __TBB_ASSERT(
false,
"This compile-time helper should never get called");
318 template<
typename __TBB_PARAMETER_PACK Args>
320 nodeptr_t pnode = my_node_allocator.allocate(1);
324 new(
static_cast<void*
>(&pnode->my_element)) T(
__TBB_PACK_EXPANSION(tbb::internal::forward<Args>(args)));
326 my_node_allocator.deallocate(pnode, 1);
334 : my_node_allocator(a), my_element_count(0)
338 my_head = create_node(
sokey_t(0));
347 nodeptr_t pnode = my_head;
350 __TBB_ASSERT(pnode != NULL && pnode->my_next == NULL,
"Invalid head list node");
358 return (my_node_allocator);
363 nodeptr_t pnode = my_head;
365 __TBB_ASSERT(my_head != NULL,
"Invalid head list node");
366 pnext = pnode->my_next;
367 pnode->my_next = NULL;
370 while (pnode != NULL)
372 pnext = pnode->my_next;
377 my_element_count = 0;
382 return first_real_iterator(raw_begin());
387 return first_real_iterator(raw_begin());
391 return (iterator(0,
this));
394 const_iterator
end()
const {
395 return (const_iterator(0,
this));
399 return (((
const self_type *)
this)->
begin());
403 return (((
const self_type *)
this)->
end());
408 return (my_element_count == 0);
413 return my_element_count;
418 return my_node_allocator.max_size();
438 return raw_iterator(my_head);
443 return raw_const_iterator(my_head);
447 return raw_iterator(0);
451 return raw_const_iterator(0);
492 while (it != raw_end() && it.
get_node_ptr()->is_dummy())
503 while (it != raw_end() && it.
get_node_ptr()->is_dummy())
511 if (!pnode->is_dummy()) my_node_allocator.destroy(pnode);
512 my_node_allocator.deallocate(pnode, 1);
517 static nodeptr_t
try_insert_atomic(nodeptr_t previous, nodeptr_t new_node, nodeptr_t current_node) {
518 new_node->my_next = current_node;
519 return previous->atomic_set_next(new_node, current_node);
523 std::pair<iterator, bool>
try_insert(raw_iterator it, raw_iterator next, nodeptr_t pnode, size_type *new_count)
527 if (inserted_node == pnode)
530 check_range(it, next);
532 return std::pair<iterator, bool>(iterator(pnode,
this),
true);
536 return std::pair<iterator, bool>(
end(),
false);
543 raw_iterator
last = raw_end();
544 raw_iterator where = it;
551 nodeptr_t dummy_node = create_node(order_key);
559 if (where == last || get_order_key(where) > order_key)
561 __TBB_ASSERT(get_order_key(it) < order_key,
"Invalid node order in the list");
566 if (inserted_node == dummy_node)
569 check_range(it, where);
570 return raw_iterator(dummy_node);
584 else if (get_order_key(where) == order_key)
587 destroy_node(dummy_node);
601 __TBB_ASSERT(prevnode->my_next == pnode,
"Erase must take consecutive iterators");
602 prevnode->my_next = pnode->my_next;
607 void erase_node(raw_iterator previous, raw_const_iterator& where,
610 nodeptr_t pnode = erase_node_impl(previous, where);
614 void erase_node(raw_iterator previous, raw_const_iterator& where,
617 erase_node_impl(previous, where);
620 void erase_node(raw_iterator previous, raw_const_iterator& where) {
625 template<
typename AllowDestroy>
626 iterator
erase_node(raw_iterator previous, const_iterator where, AllowDestroy)
628 raw_const_iterator it = where;
629 erase_node(previous, it, AllowDestroy());
632 return get_iterator(first_real_iterator(it));
635 iterator
erase_node(raw_iterator previous, const_iterator& where) {
650 nodeptr_t previous_node = my_head;
651 raw_const_iterator begin_iterator = first++;
654 for (raw_const_iterator it = first; it !=
last;)
658 nodeptr_t dummy_node = pnode->is_dummy() ? create_node(pnode->get_order_key()) : create_node(pnode->get_order_key(), pnode->my_element);
659 previous_node = try_insert_atomic(previous_node, dummy_node, NULL);
660 __TBB_ASSERT(previous_node != NULL,
"Insertion must succeed");
661 raw_const_iterator where = it++;
662 source.
erase_node(get_iterator(begin_iterator), where);
670 template <
typename Traits>
677 for (raw_iterator it = first; it !=
last; ++it)
679 raw_iterator next = it;
682 __TBB_ASSERT(next == raw_end() || get_order_key(next) >= get_order_key(it),
"!!! List order inconsistency !!!");
691 check_range( raw_begin(), raw_end() );
700 #if defined(_MSC_VER) && !defined(__INTEL_COMPILER) 701 #pragma warning(push) 702 #pragma warning(disable: 4127) // warning C4127: conditional expression is constant 705 template <
typename Traits>
735 #if __TBB_UNORDERED_NODE_HANDLE_PRESENT 737 #endif // __TBB_UNORDERED_NODE_HANDLE_PRESENT 738 using Traits::my_hash_compare;
739 using Traits::get_key;
740 using Traits::allow_multimapping;
742 static const size_type initial_bucket_number = 8;
745 template<
typename OtherTraits>
749 typedef std::pair<const_iterator, const_iterator>
paircc_t;
751 static size_type
const pointers_per_table =
sizeof(size_type) * 8;
752 static const size_type initial_bucket_load = 4;
767 const hash_compare& hc = hash_compare(),
const allocator_type& a = allocator_type())
768 : Traits(hc), my_solist(a),
769 my_allocator(a), my_maximum_bucket_size((float) initial_bucket_load)
771 if( n_of_buckets == 0) ++n_of_buckets;
772 my_number_of_buckets = size_type(1)<<
__TBB_Log2((uintptr_t)n_of_buckets*2-1);
777 : Traits(right.my_hash_compare), my_solist(a), my_allocator(a)
780 internal_copy(right);
784 : Traits(right.my_hash_compare), my_solist(right.get_allocator()), my_allocator(right.get_allocator())
788 internal_copy(right);
791 #if __TBB_CPP11_RVALUE_REF_PRESENT 793 : Traits(right.my_hash_compare), my_solist(right.get_allocator()), my_allocator(right.get_allocator()),
794 my_maximum_bucket_size(float(initial_bucket_load))
796 my_number_of_buckets = initial_bucket_number;
802 : Traits(right.my_hash_compare), my_solist(a), my_allocator(a)
804 call_internal_clear_on_exit clear_buckets_on_exception(
this);
807 if (a == right.get_allocator()){
808 my_number_of_buckets = initial_bucket_number;
809 my_maximum_bucket_size = float(initial_bucket_load);
812 my_maximum_bucket_size = right.my_maximum_bucket_size;
813 my_number_of_buckets = right.my_number_of_buckets;
814 my_solist.my_element_count = right.my_solist.my_element_count;
816 if (! right.my_solist.empty()){
817 nodeptr_t previous_node = my_solist.my_head;
820 for (raw_const_iterator it = ++(right.my_solist.raw_begin()),
last = right.my_solist.raw_end(); it !=
last; ++it)
822 const nodeptr_t pnode = it.get_node_ptr();
824 if (pnode->is_dummy()) {
825 node = my_solist.create_node(pnode->get_order_key());
826 size_type bucket =
__TBB_ReverseBits(pnode->get_order_key()) % my_number_of_buckets;
827 set_bucket(bucket, node);
829 node = my_solist.create_node(pnode->get_order_key(),
std::move(pnode->my_element));
832 previous_node = my_solist.try_insert_atomic(previous_node, node, NULL);
833 __TBB_ASSERT(previous_node != NULL,
"Insertion of node failed. Concurrent inserts in constructor ?");
835 my_solist.check_range();
839 clear_buckets_on_exception.dismiss();
842 #endif // __TBB_CPP11_RVALUE_REF_PRESENT 846 internal_copy(right);
850 #if __TBB_CPP11_RVALUE_REF_PRESENT 861 swap(this->my_solist.my_node_allocator, other.my_solist.my_node_allocator);
862 swap(this->my_allocator, other.my_allocator);
866 this->
swap(moved_copy);
872 #endif // __TBB_CPP11_RVALUE_REF_PRESENT 874 #if __TBB_INITIALIZER_LISTS_PRESENT 879 this->insert(il.begin(),il.end());
882 #endif // __TBB_INITIALIZER_LISTS_PRESENT 890 #if __TBB_UNORDERED_NODE_HANDLE_PRESENT 891 template<
typename SourceType>
893 typedef typename SourceType::iterator source_iterator;
895 typename SourceType::node_type>::
value),
896 "Incompatible containers cannot be merged");
898 for(source_iterator it = source.begin(); it != source.end();) {
899 source_iterator where = it++;
900 if (allow_multimapping || find(get_key(*where)) ==
end()) {
901 std::pair<node_type, raw_iterator> extract_result = source.internal_extract(where);
904 sokey_t old_order_key = extract_result.first.my_node->get_order_key();
908 if (!insert(
std::move(extract_result.first)).second) {
909 raw_iterator next = extract_result.second;
910 raw_iterator current = next++;
913 extract_result.first.my_node->init(old_order_key);
916 "Wrong nodes order in source container");
918 extract_result.first.my_node->get_order_key() <= next.
get_node_ptr()->get_order_key(),
919 "Wrong nodes order in source container");
921 size_t new_count = 0;
923 source.my_solist.try_insert(current, next, extract_result.first.my_node, &new_count).second;
925 "Changing source container while merging is unsafe.");
927 extract_result.first.deactivate();
931 #endif // __TBB_UNORDERED_NODE_HANDLE_PRESENT 935 return my_solist.get_allocator();
940 return my_solist.empty();
944 return my_solist.size();
948 return my_solist.max_size();
953 return my_solist.begin();
957 return my_solist.begin();
961 return my_solist.end();
964 const_iterator
end()
const {
965 return my_solist.end();
969 return my_solist.cbegin();
973 return my_solist.cend();
991 bool empty()
const {
return my_begin_node == my_end_node;}
995 return my_midpoint_node != my_end_node;
999 my_table(r.my_table), my_end_node(r.my_end_node)
1002 __TBB_ASSERT( !empty(),
"Splitting despite the range is not divisible" );
1009 my_table(a_table), my_begin_node(a_table.my_solist.
begin()),
1010 my_end_node(a_table.my_solist.
end())
1021 if( my_begin_node == my_end_node )
1022 my_midpoint_node = my_end_node;
1024 sokey_t begin_key = solist_t::get_safe_order_key(my_begin_node);
1025 sokey_t end_key = solist_t::get_safe_order_key(my_end_node);
1034 my_midpoint_node = my_end_node;
1038 sokey_t mid_key = solist_t::get_safe_order_key(my_midpoint_node);
1039 __TBB_ASSERT( begin_key < mid_key,
"my_begin_node is after my_midpoint_node" );
1040 __TBB_ASSERT( mid_key <= end_key,
"my_midpoint_node is after my_end_node" );
1042 #endif // TBB_USE_ASSERT 1060 return range_type( *
this );
1064 return const_range_type( *
this );
1070 tbb::internal::true_type>(
value);
1075 return insert(value).first;
1078 #if __TBB_CPP11_RVALUE_REF_PRESENT 1090 #if __TBB_UNORDERED_NODE_HANDLE_PRESENT 1091 std::pair<iterator, bool>
insert(node_type&& nh) {
1093 nodeptr_t handled_node = nh.my_node;
1094 std::pair<iterator, bool> insert_result =
1096 tbb::internal::false_type>
1097 (handled_node->my_element, handled_node);
1098 if (insert_result.second)
1100 return insert_result;
1102 return std::pair<iterator, bool>(
end(),
false);
1105 iterator
insert(const_iterator, node_type&& nh) {
1108 #endif // __TBB_UNORDERED_NODE_HANDLE_PRESENT 1110 #if __TBB_CPP11_VARIADIC_TEMPLATES_PRESENT && __TBB_CPP11_RVALUE_REF_PRESENT 1111 template<
typename... Args>
1112 std::pair<iterator, bool>
emplace(Args&&... args) {
1113 nodeptr_t pnode = my_solist.create_node_v(tbb::internal::forward<Args>(args)...);
1119 template<
typename... Args>
1122 return emplace(tbb::internal::forward<Args>(args)...).first;
1124 #endif // __TBB_CPP11_VARIADIC_TEMPLATES_PRESENT && __TBB_CPP11_RVALUE_REF_PRESENT 1127 template<
class Iterator>
1129 for (Iterator it = first; it !=
last; ++it)
1133 #if __TBB_INITIALIZER_LISTS_PRESENT 1134 void insert(std::initializer_list<value_type> il) {
1136 insert(il.begin(), il.end());
1141 return internal_erase(where);
1145 while (first != last)
1146 unsafe_erase(first++);
1147 return my_solist.get_iterator(first);
1151 pairii_t where = equal_range(key);
1152 size_type item_count = internal_distance(where.first, where.second);
1153 unsafe_erase(where.first, where.second);
1157 #if __TBB_UNORDERED_NODE_HANDLE_PRESENT 1159 return internal_extract(where).first;
1163 pairii_t where = equal_range(key);
1164 if (where.first ==
end())
return node_type();
1165 return internal_extract(where.first).first;
1167 #endif // __TBB_UNORDERED_NODE_HANDLE_PRESENT 1170 if (
this != &right) {
1171 std::swap(my_hash_compare, right.my_hash_compare);
1173 internal_swap_buckets(right);
1181 return my_hash_compare.my_hash_object;
1185 return my_hash_compare.my_key_compare_object;
1197 raw_iterator dummy_node = my_solist.raw_begin();
1198 set_bucket(0, dummy_node);
1203 return internal_find(key);
1207 return const_cast<self_type*
>(
this)->internal_find(key);
1211 if(allow_multimapping) {
1212 paircc_t answer = equal_range(key);
1213 size_type item_count = internal_distance(answer.first, answer.second);
1216 return const_cast<self_type*
>(
this)->internal_find(key) ==
end()?0:1;
1221 return internal_equal_range(key);
1225 return const_cast<self_type*
>(
this)->internal_equal_range(key);
1230 return my_number_of_buckets;
1234 return segment_size(pointers_per_table-1);
1238 size_type item_count = 0;
1239 if (is_initialized(bucket)) {
1240 raw_iterator it = get_bucket(bucket);
1242 for (; it != my_solist.raw_end() && !it.
get_node_ptr()->is_dummy(); ++it)
1249 sokey_t order_key = (
sokey_t) my_hash_compare(key);
1250 size_type bucket = order_key % my_number_of_buckets;
1256 if (!is_initialized(bucket))
1259 raw_iterator it = get_bucket(bucket);
1260 return my_solist.first_real_iterator(it);
1266 if (!is_initialized(bucket))
1269 raw_const_iterator it = get_bucket(bucket);
1270 return my_solist.first_real_iterator(it);
1277 if (!is_initialized(bucket))
1280 raw_iterator it = get_bucket(bucket);
1284 while(it != my_solist.raw_end() && !it.
get_node_ptr()->is_dummy());
1287 return my_solist.first_real_iterator(it);
1294 if (!is_initialized(bucket))
1297 raw_const_iterator it = get_bucket(bucket);
1301 while(it != my_solist.raw_end() && !it.
get_node_ptr()->is_dummy());
1304 return my_solist.first_real_iterator(it);
1308 return ((
const self_type *)
this)->unsafe_begin(bucket);
1312 return ((
const self_type *)
this)->unsafe_end(bucket);
1317 return (
float)
size() / (float) unsafe_bucket_count();
1321 return my_maximum_bucket_size;
1325 if (newmax != newmax || newmax < 0)
1327 my_maximum_bucket_size = newmax;
1334 size_type current_buckets = my_number_of_buckets;
1335 if (current_buckets >= buckets)
1337 my_number_of_buckets = size_type(1)<<
__TBB_Log2((uintptr_t)buckets*2-1);
1345 memset(my_buckets, 0,
sizeof(my_buckets));
1348 raw_iterator dummy_node = my_solist.raw_begin();
1349 set_bucket(0, dummy_node);
1353 for (size_type index = 0; index < pointers_per_table; ++index) {
1354 if (my_buckets[index] != NULL) {
1355 size_type sz = segment_size(index);
1356 for (size_type index2 = 0; index2 < sz; ++index2)
1357 my_allocator.destroy(&my_buckets[index][index2]);
1358 my_allocator.deallocate(my_buckets[index], sz);
1359 my_buckets[index] = 0;
1371 insert(right.
begin(), right.
end());
1382 for (size_type index = 0; index < pointers_per_table; ++index)
1384 raw_iterator * iterator_pointer = my_buckets[index];
1396 for (const_iterator it = first; it !=
last; ++it)
1403 template<
typename AllowCreate,
typename AllowDestroy,
typename ValueType>
1406 const key_type *pkey = &get_key(value);
1407 sokey_t hash_key = (
sokey_t) my_hash_compare(*pkey);
1408 size_type new_count = 0;
1409 sokey_t order_key = split_order_key_regular(hash_key);
1410 raw_iterator previous = prepare_bucket(hash_key);
1411 raw_iterator
last = my_solist.raw_end();
1416 pnode->init(order_key);
1420 for (raw_iterator where = previous;;)
1423 if (where == last || solist_t::get_order_key(where) > order_key ||
1425 (allow_multimapping && solist_t::get_order_key(where) == order_key &&
1426 !my_hash_compare(get_key(*where), *pkey)))
1429 pnode = my_solist.create_node(order_key, tbb::internal::forward<ValueType>(value), AllowCreate());
1431 pkey = &get_key(pnode->my_element);
1435 std::pair<iterator, bool> result = my_solist.try_insert(previous, where, pnode, &new_count);
1440 adjust_table_size(new_count, my_number_of_buckets);
1454 else if (!allow_multimapping && solist_t::get_order_key(where) == order_key &&
1455 !my_hash_compare(get_key(*where), *pkey))
1458 my_solist.destroy_node(pnode);
1459 return std::pair<iterator, bool>(my_solist.get_iterator(where),
false);
1469 sokey_t hash_key = (
sokey_t) my_hash_compare(key);
1470 sokey_t order_key = split_order_key_regular(hash_key);
1471 raw_iterator
last = my_solist.raw_end();
1473 for (raw_iterator it = prepare_bucket(hash_key); it !=
last; ++it)
1475 if (solist_t::get_order_key(it) > order_key)
1481 else if (solist_t::get_order_key(it) == order_key)
1486 if (!my_hash_compare(get_key(*it), key))
1487 return my_solist.get_iterator(it);
1497 sokey_t hash_key = (
sokey_t) my_hash_compare(get_key(*it));
1498 raw_iterator previous = prepare_bucket(hash_key);
1499 raw_iterator
last = my_solist.raw_end();
1503 for (raw_iterator where = previous; where !=
last; previous = where) {
1505 if (my_solist.get_iterator(where) == it)
1506 return my_solist.erase_node(previous, it);
1511 #if __TBB_UNORDERED_NODE_HANDLE_PRESENT 1513 sokey_t hash_key =
sokey_t(my_hash_compare(get_key(*it)));
1514 raw_iterator previous = prepare_bucket(hash_key);
1515 raw_iterator
last = my_solist.raw_end();
1518 for(raw_iterator where = previous; where !=
last; previous = where) {
1520 if (my_solist.get_iterator(where) == it) {
1521 const_iterator result = it;
1523 return std::pair<node_type, raw_iterator>( node_type(result.
get_node_ptr()),
1527 return std::pair<node_type, iterator>(node_type(),
end());
1529 #endif // __TBB_UNORDERED_NODE_HANDLE_PRESENT 1535 sokey_t hash_key = (
sokey_t) my_hash_compare(key);
1536 sokey_t order_key = split_order_key_regular(hash_key);
1537 raw_iterator end_it = my_solist.raw_end();
1539 for (raw_iterator it = prepare_bucket(hash_key); it != end_it; ++it)
1541 if (solist_t::get_order_key(it) > order_key)
1544 return pairii_t(
end(),
end());
1546 else if (solist_t::get_order_key(it) == order_key &&
1547 !my_hash_compare(get_key(*it), key))
1549 iterator
first = my_solist.get_iterator(it);
1551 do ++
last;
while( allow_multimapping && last !=
end() && !my_hash_compare(get_key(*last), key) );
1552 return pairii_t(first, last);
1556 return pairii_t(
end(),
end());
1563 __TBB_ASSERT( bucket != 0,
"The first bucket must always be initialized");
1565 size_type parent_bucket = get_parent(bucket);
1568 if (!is_initialized(parent_bucket))
1569 init_bucket(parent_bucket);
1571 raw_iterator
parent = get_bucket(parent_bucket);
1574 raw_iterator dummy_node = my_solist.insert_dummy(parent, split_order_key_dummy(bucket));
1575 set_bucket(bucket, dummy_node);
1581 if ( ((
float) total_elements / (
float) current_size) > my_maximum_bucket_size )
1584 my_number_of_buckets.compare_and_swap(2u*current_size, current_size);
1593 size_type msb =
__TBB_Log2((uintptr_t)bucket);
1594 return bucket & ~(size_type(1) << msb);
1601 return size_type(
__TBB_Log2( uintptr_t(index|1) ) );
1606 return (size_type(1)<<k & ~size_type(1));
1611 return k? size_type(1)<<k : 2;
1615 size_type segment = segment_index_of(bucket);
1616 bucket -= segment_base(segment);
1617 __TBB_ASSERT( my_buckets[segment],
"bucket must be in an allocated segment" );
1618 return my_buckets[segment][bucket];
1622 size_type bucket = hash_key % my_number_of_buckets;
1623 size_type segment = segment_index_of(bucket);
1624 size_type index = bucket - segment_base(segment);
1625 if (my_buckets[segment] == NULL || my_buckets[segment][index].
get_node_ptr() == NULL)
1626 init_bucket(bucket);
1627 return my_buckets[segment][index];
1631 size_type segment = segment_index_of(bucket);
1632 bucket -= segment_base(segment);
1634 if (my_buckets[segment] == NULL) {
1635 size_type sz = segment_size(segment);
1636 raw_iterator * new_segment = my_allocator.allocate(sz);
1637 std::memset(static_cast<void*>(new_segment), 0, sz*
sizeof(raw_iterator));
1639 if (my_buckets[segment].compare_and_swap( new_segment, NULL) != NULL)
1640 my_allocator.deallocate(new_segment, sz);
1643 my_buckets[segment][bucket] = dummy_head;
1647 size_type segment = segment_index_of(bucket);
1648 bucket -= segment_base(segment);
1650 if (my_buckets[segment] == NULL)
1653 raw_iterator it = my_buckets[segment][bucket];
1674 atomic<raw_iterator*> my_buckets[pointers_per_table];
1676 #if defined(_MSC_VER) && !defined(__INTEL_COMPILER) 1677 #pragma warning(pop) // warning 4127 is back 1684 #endif // __TBB__concurrent_unordered_impl_H iterator internal_erase(const_iterator it)
const concurrent_unordered_base & my_table
flist_iterator operator++(int)
Solist::difference_type difference_type
iterator erase_node(raw_iterator previous, const_iterator &where)
void const char const char int ITT_FORMAT __itt_group_sync x void const char ITT_FORMAT __itt_group_sync s void ITT_FORMAT __itt_group_sync p void ITT_FORMAT p void ITT_FORMAT p no args __itt_suppress_mode_t unsigned int void size_t ITT_FORMAT d void ITT_FORMAT p void ITT_FORMAT p __itt_model_site __itt_model_site_instance * instance
solist_iterator operator++(int)
std::pair< iterator, iterator > pairii_t
solist_t::const_iterator const_iterator
pointer operator->() const
call_internal_clear_on_exit(concurrent_unordered_base *instance)
solist_t::raw_const_iterator raw_const_iterator
local_iterator unsafe_begin(size_type bucket)
void move(tbb_thread &t1, tbb_thread &t2)
allocator_type get_allocator() const
static size_type segment_index_of(size_type index)
solist_t::iterator iterator
bool_constant< true > true_type
void destroy_node(nodeptr_t pnode)
split_ordered_list< T, Allocator > self_type
Traits::value_type value_type
void max_load_factor(float newmax)
tbb::internal::allocator_traits< allocator_type >::difference_type difference_type
solist_t::raw_iterator raw_iterator
iterator find(const key_type &key)
const_iterator begin() const
iterator insert(const_iterator, const value_type &value)
float my_maximum_bucket_size
nodeptr_t erase_node_impl(raw_iterator previous, raw_const_iterator &where)
std::pair< iterator, bool > emplace(Args &&... args)
Dummy type that distinguishes splitting constructor from copy constructor.
allocator_type::value_type & reference
raw_iterator get_iterator(raw_const_iterator it)
friend bool operator==(const flist_iterator< M, T > &i, const flist_iterator< M, U > &j)
#define __TBB_STATIC_ASSERT(condition, msg)
std::pair< iterator, bool > try_insert(raw_iterator it, raw_iterator next, nodeptr_t pnode, size_type *new_count)
nodeptr_t get_node_ptr() const
Solist::value_type value_type
size_type max_size() const
static size_type segment_base(size_type k)
T __TBB_ReverseBits(T src)
const_iterator get_iterator(raw_const_iterator it) const
concurrent_unordered_base(size_type n_of_buckets=initial_bucket_number, const hash_compare &hc=hash_compare(), const allocator_type &a=allocator_type())
const Solist * my_list_ptr
Solist::reference reference
size_type unsafe_bucket(const key_type &key) const
static nodeptr_t try_insert_atomic(nodeptr_t previous, nodeptr_t new_node, nodeptr_t current_node)
size_type get_parent(size_type bucket) const
reference operator*() const
node_type unsafe_extract(const key_type &key)
allocator_type::pointer pointer
void init_bucket(size_type bucket)
Traits::allocator_type allocator_type
tbb::internal::allocator_traits< allocator_type >::difference_type difference_type
const_range_type range() const
concurrent_unordered_base(concurrent_unordered_base &&right, const allocator_type &a)
Base class for types that should not be assigned.
solist_iterator(const solist_iterator< Solist, typename Solist::value_type > &other)
void internal_copy(const self_type &right)
range_type(const concurrent_unordered_base &a_table)
Init range with container and grainsize specified.
const_range_type(const concurrent_unordered_base &a_table)
Init range with container and grainsize specified.
void rehash(size_type buckets)
size_type unsafe_erase(const key_type &key)
std::pair< const_iterator, const_iterator > paircc_t
nodeptr_t create_node(sokey_t order_key, __TBB_FORWARDING_REF(Arg) t, tbb::internal::true_type=tbb::internal::true_type())
concurrent_unordered_base< Traits > self_type
node_type unsafe_extract(const_iterator where)
tbb::internal::allocator_traits< allocator_type >::pointer pointer
iterator first_real_iterator(raw_iterator it)
raw_const_iterator my_end_node
allocator_type::const_pointer const_pointer
tbb::internal::allocator_rebind< allocator_type, node >::type my_node_allocator
flist_iterator< self_type, value_type > raw_iterator
concurrent_unordered_base::iterator iterator
iterator get_iterator(raw_iterator it)
void erase_node(raw_iterator previous, raw_const_iterator &where, tbb::internal::false_type)
tbb::internal::allocator_rebind< allocator_type, raw_iterator >::type my_allocator
flist_iterator & operator=(const flist_iterator< Solist, typename Solist::value_type > &other)
raw_const_iterator raw_end() const
#define __TBB_PACK_EXPANSION(A)
void set_midpoint() const
Set my_midpoint_node to point approximately half way between my_begin_node and my_end_node.
reference operator*() const
solist_iterator< self_type, const value_type > const_iterator
#define __TBB_FORWARDING_REF(A)
std::pair< iterator, bool > internal_insert(__TBB_FORWARDING_REF(ValueType) value, nodeptr_t pnode=NULL)
atomic< raw_iterator * > my_buckets[pointers_per_table]
solist_iterator(nodeptr_t pnode, const Solist *plist)
const_local_iterator unsafe_cend(size_type bucket) const
raw_iterator insert_dummy(raw_iterator it, sokey_t order_key)
hasher hash_function() const
#define __TBB_ASSERT_EX(predicate, comment)
"Extended" version is useful to suppress warnings if a variable is only used with an assert ...
allocator_type get_allocator() const
void insert(Iterator first, Iterator last)
solist_iterator & operator++()
const value_type & const_reference
Detects whether two given types are the same.
std::pair< const_iterator, const_iterator > equal_range(const key_type &key) const
const_iterator end() const
raw_const_iterator my_midpoint_node
void const char const char int ITT_FORMAT __itt_group_sync x void const char ITT_FORMAT __itt_group_sync s void ITT_FORMAT __itt_group_sync p void ITT_FORMAT p void ITT_FORMAT p no args __itt_suppress_mode_t unsigned int void size_t ITT_FORMAT d void ITT_FORMAT p void ITT_FORMAT p __itt_model_site __itt_model_site_instance ITT_FORMAT p __itt_model_task __itt_model_task_instance ITT_FORMAT p void ITT_FORMAT p void ITT_FORMAT p void size_t ITT_FORMAT d void ITT_FORMAT p const wchar_t ITT_FORMAT s const char ITT_FORMAT s const char ITT_FORMAT s const char ITT_FORMAT s no args void ITT_FORMAT p size_t ITT_FORMAT d no args const wchar_t const wchar_t ITT_FORMAT s __itt_heap_function void size_t int ITT_FORMAT d __itt_heap_function void ITT_FORMAT p __itt_heap_function void void size_t int ITT_FORMAT d no args no args unsigned int ITT_FORMAT u const __itt_domain __itt_id ITT_FORMAT lu const __itt_domain __itt_id __itt_id __itt_string_handle ITT_FORMAT p const __itt_domain __itt_id ITT_FORMAT p const __itt_domain __itt_id __itt_timestamp __itt_timestamp ITT_FORMAT lu const __itt_domain __itt_id __itt_id __itt_string_handle ITT_FORMAT p const __itt_domain ITT_FORMAT p const __itt_domain __itt_string_handle unsigned long long ITT_FORMAT lu const __itt_domain __itt_id __itt_string_handle * key
const_local_iterator unsafe_end(size_type bucket) const
void set_bucket(size_type bucket, raw_iterator dummy_head)
nodeptr_t create_node_v(__TBB_FORWARDING_REF(Args) __TBB_PARAMETER_PACK args)
float max_load_factor() const
tbb::internal::allocator_traits< allocator_type >::value_type value_type
const allocator_type::value_type & const_reference
void suppress_unused_warning(const T1 &)
Utility template function to prevent "unused" warnings by various compilers.
raw_iterator get_bucket(size_type bucket) const
hash_compare::key_equal key_equal
allocator_type::difference_type difference_type
std::pair< iterator, bool > insert(value_type &&value)
iterator emplace_hint(const_iterator, Args &&... args)
intptr_t __TBB_Log2(uintptr_t x)
raw_const_iterator raw_begin() const
const_iterator find(const key_type &key) const
void check_range(raw_iterator first, raw_iterator last)
auto first(Container &c) -> decltype(begin(c))
bool is_initialized(size_type bucket) const
iterator unsafe_erase(const_iterator where)
const_iterator end() const
solist_t::nodeptr_t nodeptr_t
allocator_traits< Alloc >::template rebind_alloc< T >::other type
const_iterator cend() const
#define __TBB_PARAMETER_PACK
atomic< T > & as_atomic(T &t)
pairii_t internal_equal_range(const key_type &key)
size_type unsafe_bucket_size(size_type bucket)
static iterator get_iterator(const_iterator it)
tbb::internal::allocator_traits< allocator_type >::const_pointer const_pointer
void throw_exception(exception_id eid)
Versionless convenience wrapper for throw_exception_v4()
void init(sokey_t order_key)
size_type grainsize() const
The grain size for this range.
local_iterator unsafe_end(size_type bucket)
void const char const char int ITT_FORMAT __itt_group_sync x void const char ITT_FORMAT __itt_group_sync s void ITT_FORMAT __itt_group_sync p void ITT_FORMAT p void ITT_FORMAT p no args __itt_suppress_mode_t unsigned int void size_t size
tbb::internal::allocator_rebind< Allocator, T >::type allocator_type
sokey_t split_order_key_regular(sokey_t order_key) const
iterator insert(const_iterator, value_type &&value)
range_type(range_type &r, split)
Split range.
tbb::internal::allocator_traits< allocator_type >::const_pointer const_pointer
hash_compare my_hash_compare
friend bool operator!=(const flist_iterator< M, T > &i, const flist_iterator< M, U > &j)
bool is_divisible() const
True if range can be partitioned into two subranges.
auto last(Container &c) -> decltype(begin(c))
const_local_iterator unsafe_begin(size_type bucket) const
Traits::hash_compare hash_compare
flist_iterator< Solist, Value > base_type
concurrent_unordered_base(concurrent_unordered_base &&right)
std::pair< iterator, bool > insert(node_type &&nh)
concurrent_unordered_base::const_iterator iterator
void swap(atomic< T > &lhs, atomic< T > &rhs)
iterator erase_node(raw_iterator previous, const_iterator where, AllowDestroy)
concurrent_unordered_base(const concurrent_unordered_base &right)
void adjust_table_size(size_type total_elements, size_type current_size)
solist_iterator & operator=(const solist_iterator< Solist, typename Solist::value_type > &other)
void move_all(self_type &source)
const_range_type(const_range_type &r, split)
Split range.
float load_factor() const
hash_compare::hasher hasher
concurrent_unordered_base::difference_type difference_type
void erase_node(raw_iterator previous, raw_const_iterator &where)
void const char const char int ITT_FORMAT __itt_group_sync x void const char ITT_FORMAT __itt_group_sync s void ITT_FORMAT __itt_group_sync p void ITT_FORMAT p void ITT_FORMAT p no args __itt_suppress_mode_t unsigned int void size_t ITT_FORMAT d void ITT_FORMAT p void ITT_FORMAT p __itt_model_site __itt_model_site_instance ITT_FORMAT p __itt_model_task __itt_model_task_instance ITT_FORMAT p void ITT_FORMAT p void ITT_FORMAT p void size_t ITT_FORMAT d void ITT_FORMAT p const wchar_t ITT_FORMAT s const char ITT_FORMAT s const char ITT_FORMAT s const char ITT_FORMAT s no args void ITT_FORMAT p size_t ITT_FORMAT d no args const wchar_t const wchar_t ITT_FORMAT s __itt_heap_function void size_t int ITT_FORMAT d __itt_heap_function void ITT_FORMAT p __itt_heap_function void void size_t int ITT_FORMAT d no args no args unsigned int ITT_FORMAT u const __itt_domain __itt_id ITT_FORMAT lu const __itt_domain __itt_id __itt_id __itt_string_handle ITT_FORMAT p const __itt_domain __itt_id ITT_FORMAT p const __itt_domain __itt_id __itt_timestamp __itt_timestamp end
const_iterator cbegin() const
static size_type internal_distance(const_iterator first, const_iterator last)
bool_constant< false > false_type
raw_const_iterator my_begin_node
flist_iterator(const flist_iterator< Solist, typename Solist::value_type > &other)
split_ordered_list(allocator_type a=allocator_type())
Solist::value_type value_type
tbb::internal::allocator_traits< allocator_type >::size_type size_type
const_iterator cbegin() const
nodeptr_t atomic_set_next(nodeptr_t new_node, nodeptr_t current_node)
size_type my_element_count
#define __TBB_ASSERT(predicate, comment)
No-op version of __TBB_ASSERT.
concurrent_unordered_base & operator=(concurrent_unordered_base &&other)
static sokey_t get_safe_order_key(const raw_const_iterator &it)
raw_iterator prepare_bucket(sokey_t hash_key)
void internal_swap_buckets(concurrent_unordered_base &right)
solist_iterator< self_type, value_type > iterator
static sokey_t get_order_key(const raw_const_iterator &it)
sokey_t split_order_key_dummy(sokey_t order_key) const
Solist::difference_type difference_type
pointer operator->() const
flist_iterator(nodeptr_t pnode)
iterator unsafe_erase(const_iterator first, const_iterator last)
concurrent_unordered_base::value_type value_type
void swap(self_type &other)
flist_iterator & operator++()
iterator insert(const_iterator, node_type &&nh)
std::pair< iterator, iterator > equal_range(const key_type &key)
void const char const char int ITT_FORMAT __itt_group_sync x void const char ITT_FORMAT __itt_group_sync s void ITT_FORMAT __itt_group_sync p void ITT_FORMAT p void ITT_FORMAT p no args __itt_suppress_mode_t unsigned int void size_t ITT_FORMAT d void ITT_FORMAT p void ITT_FORMAT p __itt_model_site __itt_model_site_instance ITT_FORMAT p __itt_model_task __itt_model_task_instance ITT_FORMAT p void ITT_FORMAT p void ITT_FORMAT p void size_t ITT_FORMAT d void ITT_FORMAT p const wchar_t ITT_FORMAT s const char ITT_FORMAT s const char ITT_FORMAT s const char ITT_FORMAT s no args void ITT_FORMAT p size_t ITT_FORMAT d no args const wchar_t const wchar_t ITT_FORMAT s __itt_heap_function void size_t int ITT_FORMAT d __itt_heap_function void ITT_FORMAT p __itt_heap_function void void size_t int ITT_FORMAT d no args no args unsigned int ITT_FORMAT u const __itt_domain __itt_id ITT_FORMAT lu const __itt_domain __itt_id __itt_id __itt_string_handle ITT_FORMAT p const __itt_domain __itt_id ITT_FORMAT p const __itt_domain __itt_id __itt_timestamp begin
size_type unsafe_bucket_count() const
~call_internal_clear_on_exit()
concurrent_unordered_base & operator=(const concurrent_unordered_base &right)
void swap(concurrent_unordered_base &right)
iterator internal_find(const key_type &key)
Solist::reference reference
concurrent_unordered_base::size_type size_type
Type for size of a range.
void const char const char int ITT_FORMAT __itt_group_sync x void const char ITT_FORMAT __itt_group_sync s void ITT_FORMAT __itt_group_sync p void ITT_FORMAT p void ITT_FORMAT p no args __itt_suppress_mode_t unsigned int void size_t ITT_FORMAT d void ITT_FORMAT p void ITT_FORMAT p __itt_model_site __itt_model_site_instance ITT_FORMAT p __itt_model_task __itt_model_task_instance ITT_FORMAT p void ITT_FORMAT p void ITT_FORMAT p void size_t ITT_FORMAT d void ITT_FORMAT p const wchar_t ITT_FORMAT s const char ITT_FORMAT s const char ITT_FORMAT s const char ITT_FORMAT s no args void ITT_FORMAT p size_t ITT_FORMAT d no args const wchar_t const wchar_t ITT_FORMAT s __itt_heap_function void size_t int ITT_FORMAT d __itt_heap_function void ITT_FORMAT p __itt_heap_function void void size_t int ITT_FORMAT d no args no args unsigned int ITT_FORMAT u const __itt_domain __itt_id ITT_FORMAT lu const __itt_domain __itt_id __itt_id parent
Solist::nodeptr_t nodeptr_t
void swap(concurrent_hash_map< Key, T, HashCompare, A > &a, concurrent_hash_map< Key, T, HashCompare, A > &b)
size_type max_size() const
~concurrent_unordered_base()
size_type unsafe_max_bucket_count() const
const_iterator const_local_iterator
std::pair< node_type, raw_iterator > internal_extract(const_iterator it)
allocator_type::size_type size_type
sokey_t get_order_key() const
Traits::key_type key_type
void internal_merge(SourceType &source)
concurrent_unordered_base(const concurrent_unordered_base &right, const allocator_type &a)
tbb::internal::allocator_traits< allocator_type >::size_type size_type
Traits::node_type node_type
std::pair< iterator, bool > insert(const value_type &value)
static size_type segment_size(size_type k)
allocator_type::value_type value_type
concurrent_unordered_base * my_instance
const_local_iterator unsafe_cbegin(size_type bucket) const
atomic< size_type > my_number_of_buckets
size_type count(const key_type &key) const
flist_iterator< self_type, const value_type > raw_const_iterator
const_iterator cend() const
nodeptr_t create_node(sokey_t, __TBB_FORWARDING_REF(Arg), tbb::internal::false_type)
const_iterator begin() const
bool empty() const
True if range is empty.
void erase_node(raw_iterator previous, raw_const_iterator &where, tbb::internal::true_type)
Solist::nodeptr_t nodeptr_t
tbb::internal::allocator_traits< allocator_type >::pointer pointer
nodeptr_t create_node(sokey_t order_key)
concurrent_unordered_base::reference reference
split_ordered_list< value_type, typename Traits::allocator_type > solist_t
const_iterator first_real_iterator(raw_const_iterator it) const
void const char const char int ITT_FORMAT __itt_group_sync x void const char ITT_FORMAT __itt_group_sync s void ITT_FORMAT __itt_group_sync p void ITT_FORMAT p void ITT_FORMAT p no args __itt_suppress_mode_t unsigned int void size_t ITT_FORMAT d void ITT_FORMAT p void ITT_FORMAT p __itt_model_site __itt_model_site_instance ITT_FORMAT p __itt_model_task __itt_model_task_instance ITT_FORMAT p void ITT_FORMAT p void ITT_FORMAT p void size_t ITT_FORMAT d void ITT_FORMAT p const wchar_t ITT_FORMAT s const char ITT_FORMAT s const char ITT_FORMAT s const char ITT_FORMAT s no args void ITT_FORMAT p size_t ITT_FORMAT d no args const wchar_t const wchar_t ITT_FORMAT s __itt_heap_function void size_t int ITT_FORMAT d __itt_heap_function void ITT_FORMAT p __itt_heap_function void void size_t int ITT_FORMAT d no args no args unsigned int ITT_FORMAT u const __itt_domain __itt_id ITT_FORMAT lu const __itt_domain __itt_id __itt_id __itt_string_handle ITT_FORMAT p const __itt_domain __itt_id ITT_FORMAT p const __itt_domain __itt_id __itt_timestamp __itt_timestamp ITT_FORMAT lu const __itt_domain __itt_id __itt_id __itt_string_handle ITT_FORMAT p const __itt_domain ITT_FORMAT p const __itt_domain __itt_string_handle unsigned long long value