1 #ifndef LINTEL_ATOMIC_COUNTER_HPP 2 #define LINTEL_ATOMIC_COUNTER_HPP 22 #define LINTEL_USE_GCC_ASM_ATOMICS 1 24 #if (LINTEL_USE_STD_ATOMICS + LINTEL_USE_GCC_BUILTIN_SYNC_ATOMICS + LINTEL_USE_GCC_ASM_ATOMICS != 1) 25 # error Choose exactly one of the above 28 #if defined(__GNUC__) && (__GNUC__ * 100 + __GNUC_MINOR__ >= 406) // need at least gcc 4.6 29 # define LINTEL_HAS_STD_ATOMICS 1 32 #if defined(__GNUC__) && (__GNUC__ * 100 + __GNUC_MINOR__ < 407) // need at least gcc 4.7 33 # if defined(LINTEL_USE_STD_ATOMICS) 34 # error gcc-4.6 + std::atomic = bug (http://gcc.gnu.org/ml/gcc-bugs/2012-10/msg00158.html) 38 #if defined(__GNUC__) && (__GNUC__ * 100 + __GNUC_MINOR__ >= 401) // need at least gcc 4.1 39 # if defined(__i386__) // Pure __i386__ does not have a __sync_add_and_fetch that can return a value. 40 # if defined(__i486__) || defined(__i586__) || defined(__i686__) 41 # define LINTEL_HAS_GCC_BUILTIN_SYNC_ATOMICS 1 42 # define LINTEL_HAS_GCC_ASM_ATOMICS 1 44 # elif defined(__x86_64) || defined(__x86_64__) //(__i386__ is not defined on x86_64) 45 # define LINTEL_HAS_GCC_BUILTIN_SYNC_ATOMICS 1 46 # define LINTEL_HAS_GCC_ASM_ATOMICS 1 47 # else // Assume all non i386 archs have it 48 # define LINTEL_HAS_GCC_BUILTIN_SYNC_ATOMICS 1 51 # if defined(LINTEL_USE_GCC_BUILTIN_SYNC_ATOMICS) 52 # error detected platform does not have __sync_* primitives 56 #if defined(LINTEL_USE_GCC_BUILTIN_SYNC_ATOMICS) && not defined(LINTEL_HAS_GCC_BUILTIN_SYNC_ATOMICS) 57 #error detected platform does not have __sync builtins 60 #if defined(LINTEL_USE_STD_ATOMICS) && not defined(LINTEL_HAS_STD_ATOMICS) 61 #error detected platform does not have std::atomic<> 64 #if defined(LINTEL_USE_GCC_ASM_ATOMICS) && not defined(LINTEL_HAS_GCC_ASM_ATOMICS) 65 #error detected platform does not have asm atomics 68 #if defined (LINTEL_USE_STD_ATOMICS) 74 #if defined (LINTEL_USE_STD_ATOMICS) 75 # define LINTEL_ATOMIC_FETCH(op, var, amount) ::std::atomic_fetch_##op(var, amount) 76 # define LINTEL_ATOMIC_LOAD(ptr) ::std::atomic_load(ptr) 77 # define LINTEL_ATOMIC_STORE(ptr, val) ::std::atomic_store(ptr, val) 78 # define LINTEL_ATOMIC_EXCHANGE(ptr, val) ::std::atomic_exchange(ptr, val) 79 # define LINTEL_COMPARE_EXCHANGE(current, expected, desired) ::std::atomic_compare_exchange_strong(current, expected, desired) 80 # define LINTEL_ATOMIC_THREAD_FENCE(order) ::std::atomic_thread_fence(order) 81 #elif defined (LINTEL_USE_GCC_BUILTIN_SYNC_ATOMICS) 82 # define LINTEL_ATOMIC_FETCH(op, var, amount) ::__sync_fetch_and_##op(var, amount) 84 # define LINTEL_ATOMIC_LOAD(ptr) ({__typeof(*(ptr)) t=*(ptr); ::__sync_synchronize(); t;}) 86 # define LINTEL_ATOMIC_STORE(ptr, val) {::__sync_synchronize(); *(ptr)=val; ::__sync_synchronize();} 87 # define LINTEL_ATOMIC_EXCHANGE(ptr, val) ::__sync_lock_test_and_set(ptr, val) 88 # define LINTEL_COMPARE_EXCHANGE(current, expected, desired) ({__typeof(*(expected)) val=*(expected); val==(*(expected)=::__sync_val_compare_and_swap(current, val, desired));}) 89 # define LINTEL_ATOMIC_THREAD_FENCE(order) {::__sync_synchronize();(void)order;} 90 #elif defined (LINTEL_USE_GCC_ASM_ATOMICS) 91 # define LINTEL_ATOMIC_FETCH(op, var, amount) lintel::x86Gcc_atomic_fetch_##op(var, amount) 92 # define LINTEL_ATOMIC_LOAD(ptr) lintel::x86Gcc_atomic_load(ptr) 93 # define LINTEL_ATOMIC_STORE(ptr, val) lintel::x86Gcc_atomic_store(ptr, val) 94 # define LINTEL_ATOMIC_EXCHANGE(ptr, val) lintel::x86Gcc_atomic_exchange(ptr, val) 95 # define LINTEL_COMPARE_EXCHANGE(current, expected, desired) lintel::x86Gcc_compare_exchange(current, expected, desired) 96 # define LINTEL_ATOMIC_THREAD_FENCE(order) lintel::x86Gcc_atomic_thread_fence(order) 99 #if defined (LINTEL_USE_STD_ATOMICS) 121 #if defined(LINTEL_USE_GCC_ASM_ATOMICS) 126 asm volatile (
"mov %1, %0":
"=r" (v) :
"m" (*counter));
127 asm volatile (
"":: :
"memory");
133 asm volatile (
"xchg %1, %0":
"+m" (*counter),
"+r" (v)::
"memory");
138 asm volatile (
"xchg %1, %0":
"+m" (*counter),
"+r" (v)::
"memory");
145 asm volatile (
"lock; cmpxchg %3,%0\n\t" 147 :
"+m" (*current),
"+a" (*expected),
"=rm"(result)
155 asm volatile (
"lock xadd %1, %0":
"+m" (*counter),
"+r" (v)::
"memory");
161 return x86Gcc_atomic_fetch_add<T>(counter, -v);
169 desired = expected | v;
179 desired = expected & v;
189 desired = expected ^ v;
202 asm volatile (
"#lfence":: :
"memory");
206 asm volatile (
"#sfence":: :
"memory");
210 asm volatile (
"#mfence":: :
"memory");
213 asm volatile (
"mfence":: :
"memory");
228 asm volatile (
"":: :
"memory");
266 return *
this += amount;
362 #if defined (LINTEL_USE_STD_ATOMICS) 364 #elif defined (LINTEL_USE_GCC_BUILTIN_SYNC_ATOMICS) || defined (LINTEL_USE_GCC_ASM_ATOMICS) 372 #if defined(__GNUC__) && (__GNUC__ * 100 + __GNUC_MINOR__ > 404) 374 #pragma GCC push_options 375 #pragma GCC optimize ("no-strict-aliasing") 390 template<
typename T,
typename C>
395 template<
typename T,
typename C>
400 template<
typename T,
typename C>
405 template<
typename T,
typename C>
410 template<
typename T,
typename C>
415 template<
typename T,
typename C>
420 template<
typename T,
typename C>
425 template<
typename T,
typename C>
431 #if defined(__GNUC__) && (__GNUC__ * 100 + __GNUC_MINOR__ > 404) 432 #pragma GCC pop_options 436 #endif // LINTEL_ATOMIC_COUNTER_HPP T load() const
Definition: AtomicCounter.hpp:279
void atomic_thread_fence(memory_order order)
Definition: AtomicCounter.hpp:223
T incThenFetch()
Definition: AtomicCounter.hpp:253
T operator-=(T amount)
Definition: AtomicCounter.hpp:325
T atomic_fetch_xor(T *object, C operand)
Definition: AtomicCounter.hpp:426
#define LINTEL_ATOMIC_EXCHANGE(ptr, val)
Definition: AtomicCounter.hpp:94
T fetch_add(T amount)
Definition: AtomicCounter.hpp:301
T operator|=(T amount)
Definition: AtomicCounter.hpp:329
T operator--()
Definition: AtomicCounter.hpp:351
static T x86Gcc_atomic_load(const T *counter)
Definition: AtomicCounter.hpp:124
Definition: MersenneTwisterRandom.hpp:18
static bool x86Gcc_compare_exchange(T *current, T *expected, T desired)
Definition: AtomicCounter.hpp:143
T atomic_fetch_and(T *object, C operand)
Definition: AtomicCounter.hpp:421
enum lintel::memory_order memory_order
Enumeration for memory_order.
T atomic_fetch_sub(T *object, C operand)
Definition: AtomicCounter.hpp:411
static T x86Gcc_atomic_fetch_add(T *counter, T v)
Definition: AtomicCounter.hpp:154
T decThenFetch()
Definition: AtomicCounter.hpp:259
void atomic_signal_fence(memory_order)
Definition: AtomicCounter.hpp:227
static void x86Gcc_atomic_thread_fence(memory_order order)
Definition: AtomicCounter.hpp:194
T exchange(T t)
Definition: AtomicCounter.hpp:293
static T x86Gcc_atomic_fetch_xor(T *counter, T v)
Definition: AtomicCounter.hpp:185
Atomic< int > AtomicCounter
Definition: AtomicCounter.hpp:370
#define LINTEL_ATOMIC_FETCH(op, var, amount)
Definition: AtomicCounter.hpp:91
Definition: AtomicCounter.hpp:111
T atomic_load(const T *object)
Definition: AtomicCounter.hpp:386
T atomic_exchange(T *object, C desired)
Definition: AtomicCounter.hpp:396
T fetch_or(T amount)
Definition: AtomicCounter.hpp:309
Atomic(T counter)
Definition: AtomicCounter.hpp:249
T operator++(int)
Definition: AtomicCounter.hpp:341
Definition: AtomicCounter.hpp:116
void store(T t)
Definition: AtomicCounter.hpp:283
Atomic()
Definition: AtomicCounter.hpp:243
T atomic_fetch_add(T *object, C operand)
Definition: AtomicCounter.hpp:406
Definition: AtomicCounter.hpp:115
memory_order
Enumeration for memory_order.
Definition: AtomicCounter.hpp:110
static T x86Gcc_atomic_fetch_and(T *counter, T v)
Definition: AtomicCounter.hpp:175
T operator^=(T amount)
Definition: AtomicCounter.hpp:337
#define LINTEL_ATOMIC_LOAD(ptr)
Definition: AtomicCounter.hpp:92
void atomic_store(T *object, C desired)
Definition: AtomicCounter.hpp:391
Definition: AtomicCounter.hpp:114
T operator+=(T amount)
Definition: AtomicCounter.hpp:321
T addThenFetch(T amount)
Definition: AtomicCounter.hpp:265
static void x86Gcc_atomic_store(T *counter, T v)
Definition: AtomicCounter.hpp:132
static T x86Gcc_atomic_fetch_sub(T *counter, T v)
Definition: AtomicCounter.hpp:160
bool isZero() const
Definition: AtomicCounter.hpp:271
T fetch_and(T amount)
Definition: AtomicCounter.hpp:313
T operator=(T amount)
Assignement.
Definition: AtomicCounter.hpp:288
T counter
Definition: AtomicCounter.hpp:366
T fetch_sub(T amount)
Definition: AtomicCounter.hpp:305
An atomic counter that avoids using locks.
Definition: AtomicCounter.hpp:241
bool compare_exchange_strong(T *expected, T desired)
Definition: AtomicCounter.hpp:297
bool atomic_compare_exchange_strong(T *object, T *expected, C desired)
Definition: AtomicCounter.hpp:401
T operator++()
Definition: AtomicCounter.hpp:348
T operator--(int)
Definition: AtomicCounter.hpp:344
#define LINTEL_ATOMIC_STORE(ptr, val)
Definition: AtomicCounter.hpp:93
static T x86Gcc_atomic_exchange(T *counter, T v)
Definition: AtomicCounter.hpp:137
#define LINTEL_COMPARE_EXCHANGE(current, expected, desired)
Definition: AtomicCounter.hpp:95
#define LINTEL_ATOMIC_THREAD_FENCE(order)
Definition: AtomicCounter.hpp:96
T operator &=(T amount)
Definition: AtomicCounter.hpp:333
static T x86Gcc_atomic_fetch_or(T *counter, T v)
Definition: AtomicCounter.hpp:165
#define T
Definition: w_okvl_inl.h:45
Definition: AtomicCounter.hpp:112
T atomic_fetch_or(T *object, C operand)
Definition: AtomicCounter.hpp:416
T fetch_xor(T amount)
Definition: AtomicCounter.hpp:317
Definition: AtomicCounter.hpp:113