24 #ifndef __TBB__concurrent_unordered_impl_H 25 #define __TBB__concurrent_unordered_impl_H 26 #if !defined(__TBB_concurrent_unordered_map_H) && !defined(__TBB_concurrent_unordered_set_H) && !defined(__TBB_concurrent_hash_map_H) 27 #error Do not #include this internal file directly; use public TBB headers instead. 30 #include "../tbb_stddef.h" 32 #if !TBB_USE_EXCEPTIONS && _MSC_VER 34 #pragma warning (push) 35 #pragma warning (disable: 4530) 45 #if !TBB_USE_EXCEPTIONS && _MSC_VER 49 #include "../atomic.h" 50 #include "../tbb_exception.h" 51 #include "../tbb_allocator.h" 53 #if __TBB_INITIALIZER_LISTS_PRESENT 54 #include <initializer_list> 57 #include "_tbb_hash_compare_impl.h" 60 namespace interface5 {
64 template <
typename T,
typename Allocator>
65 class split_ordered_list;
66 template <
typename Traits>
67 class concurrent_unordered_base;
70 template<
class Solist,
typename Value>
71 class flist_iterator :
public std::iterator<std::forward_iterator_tag, Value>
73 template <
typename T,
typename Allocator>
74 friend class split_ordered_list;
75 template <
typename Traits>
76 friend class concurrent_unordered_base;
77 template<
class M,
typename V>
78 friend class flist_iterator;
80 typedef typename Solist::nodeptr_t nodeptr_t;
82 typedef typename Solist::value_type value_type;
83 typedef typename Solist::difference_type difference_type;
84 typedef typename Solist::pointer pointer;
85 typedef typename Solist::reference reference;
87 flist_iterator() : my_node_ptr(0) {}
88 flist_iterator(
const flist_iterator<Solist, typename Solist::value_type> &other )
89 : my_node_ptr(other.my_node_ptr) {}
91 reference operator*()
const {
return my_node_ptr->my_element; }
92 pointer operator->()
const {
return &**
this; }
94 flist_iterator& operator++() {
95 my_node_ptr = my_node_ptr->my_next;
99 flist_iterator operator++(
int) {
100 flist_iterator tmp = *
this;
106 flist_iterator(nodeptr_t pnode) : my_node_ptr(pnode) {}
107 nodeptr_t get_node_ptr()
const {
return my_node_ptr; }
109 nodeptr_t my_node_ptr;
111 template<
typename M,
typename T,
typename U>
112 friend bool operator==(
const flist_iterator<M,T> &i,
const flist_iterator<M,U> &j );
113 template<
typename M,
typename T,
typename U>
114 friend bool operator!=(
const flist_iterator<M,T>& i,
const flist_iterator<M,U>& j );
117 template<
typename Solist,
typename T,
typename U>
118 bool operator==(
const flist_iterator<Solist,T> &i,
const flist_iterator<Solist,U> &j ) {
119 return i.my_node_ptr == j.my_node_ptr;
121 template<
typename Solist,
typename T,
typename U>
122 bool operator!=(
const flist_iterator<Solist,T>& i,
const flist_iterator<Solist,U>& j ) {
123 return i.my_node_ptr != j.my_node_ptr;
127 template<
class Solist,
typename Value>
128 class solist_iterator :
public flist_iterator<Solist, Value>
130 typedef flist_iterator<Solist, Value> base_type;
131 typedef typename Solist::nodeptr_t nodeptr_t;
132 using base_type::get_node_ptr;
133 template <
typename T,
typename Allocator>
134 friend class split_ordered_list;
135 template<
class M,
typename V>
136 friend class solist_iterator;
137 template<
typename M,
typename T,
typename U>
138 friend bool operator==(
const solist_iterator<M,T> &i,
const solist_iterator<M,U> &j );
139 template<
typename M,
typename T,
typename U>
140 friend bool operator!=(
const solist_iterator<M,T>& i,
const solist_iterator<M,U>& j );
142 const Solist *my_list_ptr;
143 solist_iterator(nodeptr_t pnode,
const Solist *plist) : base_type(pnode), my_list_ptr(plist) {}
146 typedef typename Solist::value_type value_type;
147 typedef typename Solist::difference_type difference_type;
148 typedef typename Solist::pointer pointer;
149 typedef typename Solist::reference reference;
152 solist_iterator(
const solist_iterator<Solist, typename Solist::value_type> &other )
153 : base_type(other), my_list_ptr(other.my_list_ptr) {}
155 reference operator*()
const {
156 return this->base_type::operator*();
159 pointer operator->()
const {
163 solist_iterator& operator++() {
164 do ++(*(base_type *)
this);
165 while (get_node_ptr() != NULL && get_node_ptr()->is_dummy());
170 solist_iterator operator++(
int) {
171 solist_iterator tmp = *
this;
173 while (get_node_ptr() != NULL && get_node_ptr()->is_dummy());
179 template<
typename Solist,
typename T,
typename U>
180 bool operator==(
const solist_iterator<Solist,T> &i,
const solist_iterator<Solist,U> &j ) {
181 return i.my_node_ptr == j.my_node_ptr && i.my_list_ptr == j.my_list_ptr;
183 template<
typename Solist,
typename T,
typename U>
184 bool operator!=(
const solist_iterator<Solist,T>& i,
const solist_iterator<Solist,U>& j ) {
185 return i.my_node_ptr != j.my_node_ptr || i.my_list_ptr != j.my_list_ptr;
189 typedef size_t sokey_t;
193 template <
typename T,
typename Allocator>
194 class split_ordered_list
197 typedef split_ordered_list<T, Allocator> self_type;
198 typedef typename Allocator::template rebind<T>::other allocator_type;
200 typedef node *nodeptr_t;
202 typedef typename allocator_type::size_type size_type;
203 typedef typename allocator_type::difference_type difference_type;
204 typedef typename allocator_type::pointer pointer;
205 typedef typename allocator_type::const_pointer const_pointer;
206 typedef typename allocator_type::reference reference;
207 typedef typename allocator_type::const_reference const_reference;
208 typedef typename allocator_type::value_type value_type;
210 typedef solist_iterator<self_type, const value_type> const_iterator;
211 typedef solist_iterator<self_type, value_type> iterator;
212 typedef flist_iterator<self_type, const value_type> raw_const_iterator;
213 typedef flist_iterator<self_type, value_type> raw_iterator;
216 struct node : tbb::internal::no_assign
223 void init(sokey_t order_key) {
224 my_order_key = order_key;
229 sokey_t get_order_key()
const {
234 nodeptr_t atomic_set_next(nodeptr_t new_node, nodeptr_t current_node)
237 nodeptr_t exchange_node = tbb::internal::as_atomic(my_next).compare_and_swap(new_node, current_node);
239 if (exchange_node == current_node)
247 return exchange_node;
253 bool is_dummy()
const {
254 return (my_order_key & 0x1) == 0;
259 value_type my_element;
260 sokey_t my_order_key;
264 nodeptr_t create_node(sokey_t order_key) {
265 nodeptr_t pnode = my_node_allocator.allocate(1);
266 pnode->init(order_key);
271 template<
typename Arg>
272 nodeptr_t create_node(sokey_t order_key, __TBB_FORWARDING_REF(Arg) t){
273 nodeptr_t pnode = my_node_allocator.allocate(1);
277 new(
static_cast<void*
>(&pnode->my_element)) T(tbb::internal::forward<Arg>(t));
278 pnode->init(order_key);
280 my_node_allocator.deallocate(pnode, 1);
288 template<
typename __TBB_PARAMETER_PACK Args>
289 nodeptr_t create_node_v( __TBB_FORWARDING_REF(Args) __TBB_PARAMETER_PACK args){
290 nodeptr_t pnode = my_node_allocator.allocate(1);
294 new(
static_cast<void*
>(&pnode->my_element)) T(__TBB_PACK_EXPANSION(tbb::internal::forward<Args>(args)));
296 my_node_allocator.deallocate(pnode, 1);
303 split_ordered_list(allocator_type a = allocator_type())
304 : my_node_allocator(a), my_element_count(0)
308 my_head = create_node(sokey_t(0));
311 ~split_ordered_list()
317 nodeptr_t pnode = my_head;
320 __TBB_ASSERT(pnode != NULL && pnode->my_next == NULL,
"Invalid head list node");
327 allocator_type get_allocator()
const {
328 return (my_node_allocator);
333 nodeptr_t pnode = my_head;
335 __TBB_ASSERT(my_head != NULL,
"Invalid head list node");
336 pnext = pnode->my_next;
337 pnode->my_next = NULL;
340 while (pnode != NULL)
342 pnext = pnode->my_next;
347 my_element_count = 0;
352 return first_real_iterator(raw_begin());
356 const_iterator begin()
const {
357 return first_real_iterator(raw_begin());
361 return (iterator(0,
this));
364 const_iterator end()
const {
365 return (const_iterator(0,
this));
368 const_iterator cbegin()
const {
369 return (((
const self_type *)
this)->begin());
372 const_iterator cend()
const {
373 return (((
const self_type *)
this)->end());
378 return (my_element_count == 0);
382 size_type size()
const {
383 return my_element_count;
387 size_type max_size()
const {
388 return my_node_allocator.max_size();
392 void swap(self_type& other)
400 std::swap(my_element_count, other.my_element_count);
401 std::swap(my_head, other.my_head);
407 raw_iterator raw_begin() {
408 return raw_iterator(my_head);
412 raw_const_iterator raw_begin()
const {
413 return raw_const_iterator(my_head);
416 raw_iterator raw_end() {
417 return raw_iterator(0);
420 raw_const_iterator raw_end()
const {
421 return raw_const_iterator(0);
424 static sokey_t get_order_key(
const raw_const_iterator& it) {
425 return it.get_node_ptr()->get_order_key();
428 static sokey_t get_safe_order_key(
const raw_const_iterator& it) {
429 if( !it.get_node_ptr() )
return ~sokey_t(0);
430 return it.get_node_ptr()->get_order_key();
435 iterator get_iterator(raw_iterator it) {
436 __TBB_ASSERT(it.get_node_ptr() == NULL || !it.get_node_ptr()->is_dummy(),
"Invalid user node (dummy)");
437 return iterator(it.get_node_ptr(),
this);
442 const_iterator get_iterator(raw_const_iterator it)
const {
443 __TBB_ASSERT(it.get_node_ptr() == NULL || !it.get_node_ptr()->is_dummy(),
"Invalid user node (dummy)");
444 return const_iterator(it.get_node_ptr(),
this);
448 raw_iterator get_iterator(raw_const_iterator it) {
449 return raw_iterator(it.get_node_ptr());
453 static iterator get_iterator(const_iterator it) {
454 return iterator(it.my_node_ptr, it.my_list_ptr);
459 iterator first_real_iterator(raw_iterator it)
462 while (it != raw_end() && it.get_node_ptr()->is_dummy())
465 return iterator(it.get_node_ptr(),
this);
470 const_iterator first_real_iterator(raw_const_iterator it)
const 473 while (it != raw_end() && it.get_node_ptr()->is_dummy())
476 return const_iterator(it.get_node_ptr(),
this);
480 void destroy_node(nodeptr_t pnode) {
481 if (!pnode->is_dummy()) my_node_allocator.destroy(pnode);
482 my_node_allocator.deallocate(pnode, 1);
487 static nodeptr_t try_insert_atomic(nodeptr_t previous, nodeptr_t new_node, nodeptr_t current_node) {
488 new_node->my_next = current_node;
489 return previous->atomic_set_next(new_node, current_node);
493 std::pair<iterator, bool> try_insert(raw_iterator it, raw_iterator next, nodeptr_t pnode, size_type *new_count)
495 nodeptr_t inserted_node = try_insert_atomic(it.get_node_ptr(), pnode, next.get_node_ptr());
497 if (inserted_node == pnode)
500 check_range(it, next);
501 *new_count = tbb::internal::as_atomic(my_element_count).fetch_and_increment();
502 return std::pair<iterator, bool>(iterator(pnode,
this),
true);
506 return std::pair<iterator, bool>(end(),
false);
511 raw_iterator insert_dummy(raw_iterator it, sokey_t order_key)
513 raw_iterator last = raw_end();
514 raw_iterator where = it;
516 __TBB_ASSERT(where != last,
"Invalid head node");
521 nodeptr_t dummy_node = create_node(order_key);
525 __TBB_ASSERT(it != last,
"Invalid head list node");
529 if (where == last || get_order_key(where) > order_key)
531 __TBB_ASSERT(get_order_key(it) < order_key,
"Invalid node order in the list");
534 nodeptr_t inserted_node = try_insert_atomic(it.get_node_ptr(), dummy_node, where.get_node_ptr());
536 if (inserted_node == dummy_node)
539 check_range(it, where);
540 return raw_iterator(dummy_node);
554 else if (get_order_key(where) == order_key)
557 destroy_node(dummy_node);
569 void erase_node(raw_iterator previous, raw_const_iterator& where)
571 nodeptr_t pnode = (where++).get_node_ptr();
572 nodeptr_t prevnode = previous.get_node_ptr();
573 __TBB_ASSERT(prevnode->my_next == pnode,
"Erase must take consecutive iterators");
574 prevnode->my_next = pnode->my_next;
580 iterator erase_node(raw_iterator previous, const_iterator where)
582 raw_const_iterator it = where;
583 erase_node(previous, it);
586 return get_iterator(first_real_iterator(it));
590 void move_all(self_type& source)
592 raw_const_iterator first = source.raw_begin();
593 raw_const_iterator last = source.raw_end();
598 nodeptr_t previous_node = my_head;
599 raw_const_iterator begin_iterator = first++;
602 for (raw_const_iterator it = first; it != last;)
604 nodeptr_t pnode = it.get_node_ptr();
606 nodeptr_t dummy_node = pnode->is_dummy() ? create_node(pnode->get_order_key()) : create_node(pnode->get_order_key(), pnode->my_element);
607 previous_node = try_insert_atomic(previous_node, dummy_node, NULL);
608 __TBB_ASSERT(previous_node != NULL,
"Insertion must succeed");
609 raw_const_iterator where = it++;
610 source.erase_node(get_iterator(begin_iterator), where);
618 template <
typename Traits>
619 friend class concurrent_unordered_base;
622 void check_range( raw_iterator first, raw_iterator last )
625 for (raw_iterator it = first; it != last; ++it)
627 raw_iterator next = it;
630 __TBB_ASSERT(next == raw_end() || get_order_key(next) >= get_order_key(it),
"!!! List order inconsistency !!!");
633 tbb::internal::suppress_unused_warning(first, last);
639 check_range( raw_begin(), raw_end() );
643 typename allocator_type::template rebind<node>::other my_node_allocator;
644 size_type my_element_count;
648 #if defined(_MSC_VER) && !defined(__INTEL_COMPILER) 649 #pragma warning(push) 650 #pragma warning(disable: 4127) // warning C4127: conditional expression is constant 653 template <
typename Traits>
654 class concurrent_unordered_base :
public Traits
658 typedef concurrent_unordered_base<Traits> self_type;
659 typedef typename Traits::value_type value_type;
660 typedef typename Traits::key_type key_type;
661 typedef typename Traits::hash_compare hash_compare;
662 typedef typename Traits::value_compare value_compare;
663 typedef typename Traits::allocator_type allocator_type;
664 typedef typename hash_compare::hasher hasher;
665 typedef typename hash_compare::key_equal key_equal;
666 typedef typename allocator_type::pointer pointer;
667 typedef typename allocator_type::const_pointer const_pointer;
668 typedef typename allocator_type::reference reference;
669 typedef typename allocator_type::const_reference const_reference;
670 typedef typename allocator_type::size_type size_type;
671 typedef typename allocator_type::difference_type difference_type;
672 typedef split_ordered_list<value_type, typename Traits::allocator_type> solist_t;
673 typedef typename solist_t::nodeptr_t nodeptr_t;
675 typedef typename solist_t::raw_iterator raw_iterator;
676 typedef typename solist_t::raw_const_iterator raw_const_iterator;
677 typedef typename solist_t::iterator iterator;
678 typedef typename solist_t::const_iterator const_iterator;
679 typedef iterator local_iterator;
680 typedef const_iterator const_local_iterator;
681 using Traits::my_hash_compare;
682 using Traits::get_key;
683 using Traits::allow_multimapping;
685 static const size_type initial_bucket_number = 8;
687 typedef std::pair<iterator, iterator> pairii_t;
688 typedef std::pair<const_iterator, const_iterator> paircc_t;
690 static size_type
const pointers_per_table =
sizeof(size_type) * 8;
691 static const size_type initial_bucket_load = 4;
693 struct call_internal_clear_on_exit{
694 concurrent_unordered_base* my_instance;
695 call_internal_clear_on_exit(concurrent_unordered_base* instance) : my_instance(instance) {}
696 void dismiss(){ my_instance = NULL;}
697 ~call_internal_clear_on_exit(){
699 my_instance->internal_clear();
705 concurrent_unordered_base(size_type n_of_buckets = initial_bucket_number,
706 const hash_compare& hc = hash_compare(),
const allocator_type& a = allocator_type())
707 : Traits(hc), my_solist(a),
708 my_allocator(a), my_maximum_bucket_size((float) initial_bucket_load)
710 if( n_of_buckets == 0) ++n_of_buckets;
711 my_number_of_buckets = size_type(1)<<__TBB_Log2((uintptr_t)n_of_buckets*2-1);
715 concurrent_unordered_base(
const concurrent_unordered_base& right,
const allocator_type& a)
716 : Traits(right.my_hash_compare), my_solist(a), my_allocator(a)
719 internal_copy(right);
722 concurrent_unordered_base(
const concurrent_unordered_base& right)
723 : Traits(right.my_hash_compare), my_solist(right.get_allocator()), my_allocator(right.get_allocator())
727 internal_copy(right);
730 #if __TBB_CPP11_RVALUE_REF_PRESENT 731 concurrent_unordered_base(concurrent_unordered_base&& right)
732 : Traits(right.my_hash_compare), my_solist(right.get_allocator()), my_allocator(right.get_allocator())
738 concurrent_unordered_base(concurrent_unordered_base&& right,
const allocator_type& a)
739 : Traits(right.my_hash_compare), my_solist(a), my_allocator(a)
741 call_internal_clear_on_exit clear_buckets_on_exception(
this);
744 if (a == right.get_allocator()){
747 my_maximum_bucket_size = right.my_maximum_bucket_size;
748 my_number_of_buckets = right.my_number_of_buckets;
749 my_solist.my_element_count = right.my_solist.my_element_count;
751 if (! right.my_solist.empty()){
752 nodeptr_t previous_node = my_solist.my_head;
755 for (raw_const_iterator it = ++(right.my_solist.raw_begin()), last = right.my_solist.raw_end(); it != last; ++it)
757 const nodeptr_t pnode = it.get_node_ptr();
759 if (pnode->is_dummy()) {
760 node = my_solist.create_node(pnode->get_order_key());
761 size_type bucket = __TBB_ReverseBits(pnode->get_order_key()) % my_number_of_buckets;
762 set_bucket(bucket, node);
764 node = my_solist.create_node(pnode->get_order_key(), std::move(pnode->my_element));
767 previous_node = my_solist.try_insert_atomic(previous_node, node, NULL);
768 __TBB_ASSERT(previous_node != NULL,
"Insertion of node failed. Concurrent inserts in constructor ?");
770 my_solist.check_range();
774 clear_buckets_on_exception.dismiss();
777 #endif // __TBB_CPP11_RVALUE_REF_PRESENT 779 concurrent_unordered_base& operator=(
const concurrent_unordered_base& right) {
781 internal_copy(right);
785 #if __TBB_CPP11_RVALUE_REF_PRESENT 786 concurrent_unordered_base& operator=(concurrent_unordered_base&& other)
789 typedef typename tbb::internal::allocator_traits<allocator_type>::propagate_on_container_move_assignment pocma_t;
790 if(pocma_t::value || this->my_allocator == other.my_allocator) {
791 concurrent_unordered_base trash (std::move(*
this));
793 if (pocma_t::value) {
796 swap(this->my_solist.my_node_allocator, other.my_solist.my_node_allocator);
797 swap(this->my_allocator, other.my_allocator);
800 concurrent_unordered_base moved_copy(std::move(other),this->my_allocator);
801 this->swap(moved_copy);
807 #endif // __TBB_CPP11_RVALUE_REF_PRESENT 809 #if __TBB_INITIALIZER_LISTS_PRESENT 810 concurrent_unordered_base& operator=(std::initializer_list<value_type> il)
814 this->insert(il.begin(),il.end());
817 #endif // __TBB_INITIALIZER_LISTS_PRESENT 820 ~concurrent_unordered_base() {
826 allocator_type get_allocator()
const {
827 return my_solist.get_allocator();
832 return my_solist.empty();
835 size_type size()
const {
836 return my_solist.size();
839 size_type max_size()
const {
840 return my_solist.max_size();
845 return my_solist.begin();
848 const_iterator begin()
const {
849 return my_solist.begin();
853 return my_solist.end();
856 const_iterator end()
const {
857 return my_solist.end();
860 const_iterator cbegin()
const {
861 return my_solist.cbegin();
864 const_iterator cend()
const {
865 return my_solist.cend();
869 class const_range_type : tbb::internal::no_assign {
870 const concurrent_unordered_base &my_table;
871 raw_const_iterator my_begin_node;
872 raw_const_iterator my_end_node;
873 mutable raw_const_iterator my_midpoint_node;
876 typedef typename concurrent_unordered_base::size_type size_type;
877 typedef typename concurrent_unordered_base::value_type value_type;
878 typedef typename concurrent_unordered_base::reference reference;
879 typedef typename concurrent_unordered_base::difference_type difference_type;
880 typedef typename concurrent_unordered_base::const_iterator iterator;
883 bool empty()
const {
return my_begin_node == my_end_node;}
886 bool is_divisible()
const {
887 return my_midpoint_node != my_end_node;
890 const_range_type( const_range_type &r, split ) :
891 my_table(r.my_table), my_end_node(r.my_end_node)
893 r.my_end_node = my_begin_node = r.my_midpoint_node;
894 __TBB_ASSERT( !empty(),
"Splitting despite the range is not divisible" );
895 __TBB_ASSERT( !r.empty(),
"Splitting despite the range is not divisible" );
900 const_range_type(
const concurrent_unordered_base &a_table ) :
901 my_table(a_table), my_begin_node(a_table.my_solist.begin()),
902 my_end_node(a_table.my_solist.end())
906 iterator begin()
const {
return my_table.my_solist.get_iterator(my_begin_node); }
907 iterator end()
const {
return my_table.my_solist.get_iterator(my_end_node); }
909 size_type grainsize()
const {
return 1; }
912 void set_midpoint()
const {
913 if( my_begin_node == my_end_node )
914 my_midpoint_node = my_end_node;
916 sokey_t begin_key = solist_t::get_safe_order_key(my_begin_node);
917 sokey_t end_key = solist_t::get_safe_order_key(my_end_node);
918 size_t mid_bucket = __TBB_ReverseBits( begin_key + (end_key-begin_key)/2 ) % my_table.my_number_of_buckets;
919 while ( !my_table.is_initialized(mid_bucket) ) mid_bucket = my_table.get_parent(mid_bucket);
920 if(__TBB_ReverseBits(mid_bucket) > begin_key) {
922 my_midpoint_node = my_table.my_solist.first_real_iterator(my_table.get_bucket( mid_bucket ));
926 my_midpoint_node = my_end_node;
930 sokey_t mid_key = solist_t::get_safe_order_key(my_midpoint_node);
931 __TBB_ASSERT( begin_key < mid_key,
"my_begin_node is after my_midpoint_node" );
932 __TBB_ASSERT( mid_key <= end_key,
"my_midpoint_node is after my_end_node" );
934 #endif // TBB_USE_ASSERT 939 class range_type :
public const_range_type {
941 typedef typename concurrent_unordered_base::iterator iterator;
943 range_type( range_type &r, split ) : const_range_type( r, split() ) {}
945 range_type(
const concurrent_unordered_base &a_table ) : const_range_type(a_table) {}
947 iterator begin()
const {
return solist_t::get_iterator( const_range_type::begin() ); }
948 iterator end()
const {
return solist_t::get_iterator( const_range_type::end() ); }
952 return range_type( *
this );
955 const_range_type range()
const {
956 return const_range_type( *
this );
960 std::pair<iterator, bool> insert(
const value_type& value) {
961 return internal_insert(value);
964 iterator insert(const_iterator,
const value_type& value) {
966 return insert(value).first;
969 #if __TBB_CPP11_RVALUE_REF_PRESENT 970 std::pair<iterator, bool> insert(value_type&& value) {
971 return internal_insert(std::move(value));
974 iterator insert(const_iterator, value_type&& value) {
976 return insert(std::move(value)).first;
979 #if __TBB_CPP11_VARIADIC_TEMPLATES_PRESENT 980 template<
typename... Args>
981 std::pair<iterator, bool> emplace(Args&&... args) {
982 nodeptr_t pnode = my_solist.create_node_v(tbb::internal::forward<Args>(args)...);
983 const sokey_t hashed_element_key = (sokey_t) my_hash_compare(get_key(pnode->my_element));
984 const sokey_t order_key = split_order_key_regular(hashed_element_key);
985 pnode->init(order_key);
987 return internal_insert(pnode->my_element, pnode);
990 template<
typename... Args>
991 iterator emplace_hint(const_iterator, Args&&... args) {
993 return emplace(tbb::internal::forward<Args>(args)...).first;
996 #endif // __TBB_CPP11_VARIADIC_TEMPLATES_PRESENT 997 #endif // __TBB_CPP11_RVALUE_REF_PRESENT 999 template<
class Iterator>
1000 void insert(Iterator first, Iterator last) {
1001 for (Iterator it = first; it != last; ++it)
1005 #if __TBB_INITIALIZER_LISTS_PRESENT 1006 void insert(std::initializer_list<value_type> il) {
1008 insert(il.begin(), il.end());
1012 iterator unsafe_erase(const_iterator where) {
1013 return internal_erase(where);
1016 iterator unsafe_erase(const_iterator first, const_iterator last) {
1017 while (first != last)
1018 unsafe_erase(first++);
1019 return my_solist.get_iterator(first);
1022 size_type unsafe_erase(
const key_type& key) {
1023 pairii_t where = equal_range(key);
1024 size_type item_count = internal_distance(where.first, where.second);
1025 unsafe_erase(where.first, where.second);
1029 void swap(concurrent_unordered_base& right) {
1030 if (
this != &right) {
1031 std::swap(my_hash_compare, right.my_hash_compare);
1032 my_solist.swap(right.my_solist);
1033 internal_swap_buckets(right);
1034 std::swap(my_number_of_buckets, right.my_number_of_buckets);
1035 std::swap(my_maximum_bucket_size, right.my_maximum_bucket_size);
1040 hasher hash_function()
const {
1041 return my_hash_compare.my_hash_object;
1044 key_equal key_eq()
const {
1045 return my_hash_compare.my_key_compare_object;
1056 __TBB_ASSERT(my_buckets[0] == NULL, NULL);
1057 raw_iterator dummy_node = my_solist.raw_begin();
1058 set_bucket(0, dummy_node);
1062 iterator find(
const key_type& key) {
1063 return internal_find(key);
1066 const_iterator find(
const key_type& key)
const {
1067 return const_cast<self_type*
>(
this)->internal_find(key);
1070 size_type count(
const key_type& key)
const {
1071 if(allow_multimapping) {
1072 paircc_t answer = equal_range(key);
1073 size_type item_count = internal_distance(answer.first, answer.second);
1076 return const_cast<self_type*
>(
this)->internal_find(key) == end()?0:1;
1080 std::pair<iterator, iterator> equal_range(
const key_type& key) {
1081 return internal_equal_range(key);
1084 std::pair<const_iterator, const_iterator> equal_range(
const key_type& key)
const {
1085 return const_cast<self_type*
>(
this)->internal_equal_range(key);
1089 size_type unsafe_bucket_count()
const {
1090 return my_number_of_buckets;
1093 size_type unsafe_max_bucket_count()
const {
1094 return segment_size(pointers_per_table-1);
1097 size_type unsafe_bucket_size(size_type bucket) {
1098 size_type item_count = 0;
1099 if (is_initialized(bucket)) {
1100 raw_iterator it = get_bucket(bucket);
1102 for (; it != my_solist.raw_end() && !it.get_node_ptr()->is_dummy(); ++it)
1108 size_type unsafe_bucket(
const key_type& key)
const {
1109 sokey_t order_key = (sokey_t) my_hash_compare(key);
1110 size_type bucket = order_key % my_number_of_buckets;
1115 local_iterator unsafe_begin(size_type bucket) {
1116 if (!is_initialized(bucket))
1119 raw_iterator it = get_bucket(bucket);
1120 return my_solist.first_real_iterator(it);
1124 const_local_iterator unsafe_begin(size_type bucket)
const 1126 if (!is_initialized(bucket))
1129 raw_const_iterator it = get_bucket(bucket);
1130 return my_solist.first_real_iterator(it);
1135 local_iterator unsafe_end(size_type bucket)
1137 if (!is_initialized(bucket))
1140 raw_iterator it = get_bucket(bucket);
1144 while(it != my_solist.raw_end() && !it.get_node_ptr()->is_dummy());
1147 return my_solist.first_real_iterator(it);
1152 const_local_iterator unsafe_end(size_type bucket)
const 1154 if (!is_initialized(bucket))
1157 raw_const_iterator it = get_bucket(bucket);
1161 while(it != my_solist.raw_end() && !it.get_node_ptr()->is_dummy());
1164 return my_solist.first_real_iterator(it);
1167 const_local_iterator unsafe_cbegin(size_type bucket)
const {
1168 return ((
const self_type *)
this)->unsafe_begin(bucket);
1171 const_local_iterator unsafe_cend(size_type bucket)
const {
1172 return ((
const self_type *)
this)->unsafe_end(bucket);
1176 float load_factor()
const {
1177 return (
float) size() / (float) unsafe_bucket_count();
1180 float max_load_factor()
const {
1181 return my_maximum_bucket_size;
1184 void max_load_factor(
float newmax) {
1185 if (newmax != newmax || newmax < 0)
1186 tbb::internal::throw_exception(tbb::internal::eid_invalid_load_factor);
1187 my_maximum_bucket_size = newmax;
1193 void rehash(size_type buckets) {
1194 size_type current_buckets = my_number_of_buckets;
1195 if (current_buckets >= buckets)
1197 my_number_of_buckets = size_type(1)<<__TBB_Log2((uintptr_t)buckets*2-1);
1203 void internal_init() {
1205 memset(my_buckets, 0, pointers_per_table *
sizeof(
void *));
1208 raw_iterator dummy_node = my_solist.raw_begin();
1209 set_bucket(0, dummy_node);
1212 void internal_clear() {
1213 for (size_type index = 0; index < pointers_per_table; ++index) {
1214 if (my_buckets[index] != NULL) {
1215 size_type sz = segment_size(index);
1216 for (size_type index2 = 0; index2 < sz; ++index2)
1217 my_allocator.destroy(&my_buckets[index][index2]);
1218 my_allocator.deallocate(my_buckets[index], sz);
1219 my_buckets[index] = 0;
1224 void internal_copy(
const self_type& right) {
1227 my_maximum_bucket_size = right.my_maximum_bucket_size;
1228 my_number_of_buckets = right.my_number_of_buckets;
1231 insert(right.begin(), right.end());
1232 my_hash_compare = right.my_hash_compare;
1233 } __TBB_CATCH(...) {
1239 void internal_swap_buckets(concurrent_unordered_base& right)
1242 for (size_type index = 0; index < pointers_per_table; ++index)
1244 raw_iterator * iterator_pointer = my_buckets[index];
1245 my_buckets[index] = right.my_buckets[index];
1246 right.my_buckets[index] = iterator_pointer;
1252 static size_type internal_distance(const_iterator first, const_iterator last)
1256 for (const_iterator it = first; it != last; ++it)
1263 template<
typename ValueType>
1264 std::pair<iterator, bool> internal_insert( __TBB_FORWARDING_REF(ValueType) value, nodeptr_t pnode = NULL)
1266 sokey_t order_key = (sokey_t) my_hash_compare(get_key(value));
1267 size_type bucket = order_key % my_number_of_buckets;
1271 if (!is_initialized(bucket))
1272 init_bucket(bucket);
1274 size_type new_count = 0;
1275 order_key = split_order_key_regular(order_key);
1276 raw_iterator it = get_bucket(bucket);
1277 raw_iterator last = my_solist.raw_end();
1278 raw_iterator where = it;
1280 __TBB_ASSERT(where != last,
"Invalid head node");
1287 if (where == last || solist_t::get_order_key(where) > order_key ||
1289 (allow_multimapping && solist_t::get_order_key(where) == order_key &&
1290 !my_hash_compare(get_key(*where), get_key(value))))
1293 pnode = my_solist.create_node(order_key, tbb::internal::forward<ValueType>(value));
1296 std::pair<iterator, bool> result = my_solist.try_insert(it, where, pnode, &new_count);
1301 adjust_table_size(new_count, my_number_of_buckets);
1316 else if (!allow_multimapping && solist_t::get_order_key(where) == order_key &&
1317 my_hash_compare(get_key(*where), get_key(value)) == 0)
1320 my_solist.destroy_node(pnode);
1321 return std::pair<iterator, bool>(my_solist.get_iterator(where),
false);
1330 iterator internal_find(
const key_type& key)
1332 sokey_t order_key = (sokey_t) my_hash_compare(key);
1333 size_type bucket = order_key % my_number_of_buckets;
1336 if (!is_initialized(bucket))
1337 init_bucket(bucket);
1339 order_key = split_order_key_regular(order_key);
1340 raw_iterator last = my_solist.raw_end();
1342 for (raw_iterator it = get_bucket(bucket); it != last; ++it)
1344 if (solist_t::get_order_key(it) > order_key)
1350 else if (solist_t::get_order_key(it) == order_key)
1355 if (!my_hash_compare(get_key(*it), key))
1356 return my_solist.get_iterator(it);
1364 iterator internal_erase(const_iterator it)
1367 const key_type& key = get_key(*it);
1368 sokey_t order_key = (sokey_t) my_hash_compare(key);
1369 size_type bucket = order_key % my_number_of_buckets;
1372 if (!is_initialized(bucket))
1373 init_bucket(bucket);
1375 order_key = split_order_key_regular(order_key);
1377 raw_iterator previous = get_bucket(bucket);
1378 raw_iterator last = my_solist.raw_end();
1379 raw_iterator where = previous;
1381 __TBB_ASSERT(where != last,
"Invalid head node");
1389 else if (my_solist.get_iterator(where) == it)
1390 return my_solist.erase_node(previous, it);
1400 pairii_t internal_equal_range(
const key_type& key)
1402 sokey_t order_key = (sokey_t) my_hash_compare(key);
1403 size_type bucket = order_key % my_number_of_buckets;
1406 if (!is_initialized(bucket))
1407 init_bucket(bucket);
1409 order_key = split_order_key_regular(order_key);
1410 raw_iterator end_it = my_solist.raw_end();
1412 for (raw_iterator it = get_bucket(bucket); it != end_it; ++it)
1414 if (solist_t::get_order_key(it) > order_key)
1417 return pairii_t(end(), end());
1419 else if (solist_t::get_order_key(it) == order_key && !my_hash_compare(get_key(*it), key))
1421 iterator first = my_solist.get_iterator(it);
1422 iterator last = first;
1423 do ++last;
while( allow_multimapping && last != end() && !my_hash_compare(get_key(*last), key) );
1424 return pairii_t(first, last);
1428 return pairii_t(end(), end());
1432 void init_bucket(size_type bucket)
1435 __TBB_ASSERT( bucket != 0,
"The first bucket must always be initialized");
1437 size_type parent_bucket = get_parent(bucket);
1440 if (!is_initialized(parent_bucket))
1441 init_bucket(parent_bucket);
1443 raw_iterator parent = get_bucket(parent_bucket);
1446 raw_iterator dummy_node = my_solist.insert_dummy(parent, split_order_key_dummy(bucket));
1447 set_bucket(bucket, dummy_node);
1450 void adjust_table_size(size_type total_elements, size_type current_size)
1453 if ( ((
float) total_elements / (
float) current_size) > my_maximum_bucket_size )
1456 my_number_of_buckets.compare_and_swap(2u*current_size, current_size);
1462 size_type get_parent(size_type bucket)
const 1465 size_type msb = __TBB_Log2((uintptr_t)bucket);
1466 return bucket & ~(size_type(1) << msb);
1472 static size_type segment_index_of( size_type index ) {
1473 return size_type( __TBB_Log2( uintptr_t(index|1) ) );
1477 static size_type segment_base( size_type k ) {
1478 return (size_type(1)<<k & ~size_type(1));
1482 static size_type segment_size( size_type k ) {
1483 return k? size_type(1)<<k : 2;
1486 raw_iterator get_bucket(size_type bucket)
const {
1487 size_type segment = segment_index_of(bucket);
1488 bucket -= segment_base(segment);
1489 __TBB_ASSERT( my_buckets[segment],
"bucket must be in an allocated segment" );
1490 return my_buckets[segment][bucket];
1493 void set_bucket(size_type bucket, raw_iterator dummy_head) {
1494 size_type segment = segment_index_of(bucket);
1495 bucket -= segment_base(segment);
1497 if (my_buckets[segment] == NULL) {
1498 size_type sz = segment_size(segment);
1499 raw_iterator * new_segment = my_allocator.allocate(sz);
1500 std::memset(new_segment, 0, sz*
sizeof(raw_iterator));
1502 if (my_buckets[segment].compare_and_swap( new_segment, NULL) != NULL)
1503 my_allocator.deallocate(new_segment, sz);
1506 my_buckets[segment][bucket] = dummy_head;
1509 bool is_initialized(size_type bucket)
const {
1510 size_type segment = segment_index_of(bucket);
1511 bucket -= segment_base(segment);
1513 if (my_buckets[segment] == NULL)
1516 raw_iterator it = my_buckets[segment][bucket];
1517 return (it.get_node_ptr() != NULL);
1523 sokey_t split_order_key_regular(sokey_t order_key)
const {
1524 return __TBB_ReverseBits(order_key) | 0x1;
1528 sokey_t split_order_key_dummy(sokey_t order_key)
const {
1529 return __TBB_ReverseBits(order_key) & ~sokey_t(0x1);
1533 atomic<size_type> my_number_of_buckets;
1535 typename allocator_type::template rebind<raw_iterator>::other my_allocator;
1536 float my_maximum_bucket_size;
1537 atomic<raw_iterator*> my_buckets[pointers_per_table];
1539 #if defined(_MSC_VER) && !defined(__INTEL_COMPILER) 1540 #pragma warning(pop) // warning 4127 is back 1547 #endif // __TBB__concurrent_unordered_impl_H Definition: _flow_graph_async_msg_impl.h:32
The namespace tbb contains all components of the library.
Definition: parallel_for.h:44