10 #ifndef EIGEN_CXX11_THREADPOOL_RUNQUEUE_H_ 11 #define EIGEN_CXX11_THREADPOOL_RUNQUEUE_H_ 38 template <
typename Work,
unsigned kSize>
43 eigen_assert((kSize & (kSize - 1)) == 0);
44 eigen_assert(kSize > 2);
45 eigen_assert(kSize <= (64 << 10));
46 for (
unsigned i = 0; i < kSize; i++)
47 array_[i].state.store(kEmpty, std::memory_order_relaxed);
50 ~
RunQueue() { eigen_assert(Size() == 0); }
54 Work PushFront(Work w) {
55 unsigned front = front_.load(std::memory_order_relaxed);
56 Elem* e = &array_[front & kMask];
57 uint8_t s = e->state.load(std::memory_order_relaxed);
59 !e->state.compare_exchange_strong(s, kBusy, std::memory_order_acquire))
61 front_.store(front + 1 + (kSize << 1), std::memory_order_relaxed);
63 e->state.store(kReady, std::memory_order_release);
70 unsigned front = front_.load(std::memory_order_relaxed);
71 Elem* e = &array_[(front - 1) & kMask];
72 uint8_t s = e->state.load(std::memory_order_relaxed);
74 !e->state.compare_exchange_strong(s, kBusy, std::memory_order_acquire))
76 Work w = std::move(e->w);
77 e->state.store(kEmpty, std::memory_order_release);
78 front = ((front - 1) & kMask2) | (front & ~kMask2);
79 front_.store(front, std::memory_order_relaxed);
85 Work PushBack(Work w) {
86 std::unique_lock<std::mutex> lock(mutex_);
87 unsigned back = back_.load(std::memory_order_relaxed);
88 Elem* e = &array_[(back - 1) & kMask];
89 uint8_t s = e->state.load(std::memory_order_relaxed);
91 !e->state.compare_exchange_strong(s, kBusy, std::memory_order_acquire))
93 back = ((back - 1) & kMask2) | (back & ~kMask2);
94 back_.store(back, std::memory_order_relaxed);
96 e->state.store(kReady, std::memory_order_release);
103 if (Empty())
return Work();
104 std::unique_lock<std::mutex> lock(mutex_, std::try_to_lock);
105 if (!lock)
return Work();
106 unsigned back = back_.load(std::memory_order_relaxed);
107 Elem* e = &array_[back & kMask];
108 uint8_t s = e->state.load(std::memory_order_relaxed);
110 !e->state.compare_exchange_strong(s, kBusy, std::memory_order_acquire))
112 Work w = std::move(e->w);
113 e->state.store(kEmpty, std::memory_order_release);
114 back_.store(back + 1 + (kSize << 1), std::memory_order_relaxed);
120 unsigned PopBackHalf(std::vector<Work>* result) {
121 if (Empty())
return 0;
122 std::unique_lock<std::mutex> lock(mutex_, std::try_to_lock);
124 unsigned back = back_.load(std::memory_order_relaxed);
125 unsigned size = Size();
127 if (size > 1) mid = back + (size - 1) / 2;
130 for (;
static_cast<int>(mid - back) >= 0; mid--) {
131 Elem* e = &array_[mid & kMask];
132 uint8_t s = e->state.load(std::memory_order_relaxed);
135 !e->state.compare_exchange_strong(s, kBusy,
136 std::memory_order_acquire))
142 eigen_assert(s == kReady);
144 result->push_back(std::move(e->w));
145 e->state.store(kEmpty, std::memory_order_release);
149 back_.store(start + 1 + (kSize << 1), std::memory_order_relaxed);
155 unsigned Size()
const {
160 unsigned front = front_.load(std::memory_order_acquire);
161 unsigned back = back_.load(std::memory_order_acquire);
162 unsigned front1 = front_.load(std::memory_order_relaxed);
163 if (front != front1)
continue;
164 int size = (front & kMask2) - (back & kMask2);
166 if (size < 0) size += 2 * kSize;
171 if (size > static_cast<int>(kSize)) size = kSize;
178 bool Empty()
const {
return Size() == 0; }
181 static const unsigned kMask = kSize - 1;
182 static const unsigned kMask2 = (kSize << 1) - 1;
184 std::atomic<uint8_t> state;
200 std::atomic<unsigned> front_;
201 std::atomic<unsigned> back_;
205 void operator=(
const RunQueue&) =
delete;
210 #endif // EIGEN_CXX11_THREADPOOL_RUNQUEUE_H_ Namespace containing all symbols from the Eigen library.
Definition: bench_norm.cpp:85
Definition: RunQueue.h:39