22 #if !defined(__TBB_machine_H) || defined(__TBB_machine_sunos_sparc_H) 23 #error Do not #include this internal file directly; use public TBB headers instead. 26 #define __TBB_machine_sunos_sparc_H 31 #define __TBB_WORDSIZE 8 34 #define __TBB_ENDIANNESS __TBB_ENDIAN_BIG 38 #define __TBB_compiler_fence() __asm__ __volatile__ ("": : :"memory") 39 #define __TBB_control_consistency_helper() __TBB_compiler_fence() 40 #define __TBB_acquire_consistency_helper() __TBB_compiler_fence() 41 #define __TBB_release_consistency_helper() __TBB_compiler_fence() 42 #define __TBB_full_memory_fence() __asm__ __volatile__("membar #LoadLoad|#LoadStore|#StoreStore|#StoreLoad": : : "memory") 55 static inline int32_t __TBB_machine_cmpswp4(
volatile void *ptr, int32_t value, int32_t comparand ){
59 :
"=m"(*(int32_t *)ptr),
"=r"(result)
60 :
"m"(*(int32_t *)ptr),
"1"(value),
"r"(comparand),
"r"(ptr)
72 static inline int64_t __TBB_machine_cmpswp8(
volatile void *ptr, int64_t value, int64_t comparand ){
76 :
"=m"(*(int64_t *)ptr),
"=r"(result)
77 :
"m"(*(int64_t *)ptr),
"1"(value),
"r"(comparand),
"r"(ptr)
92 static inline int32_t __TBB_machine_fetchadd4(
volatile void *ptr, int32_t addend){
94 __asm__ __volatile__ (
95 "0:\t add\t %3, %4, %0\n" 96 "\t cas\t [%2], %3, %0\n" 98 "\t bne,a,pn\t %%icc, 0b\n" 100 :
"=&r"(result),
"=m"(*(int32_t *)ptr)
101 :
"r"(ptr),
"r"(*(int32_t *)ptr),
"r"(addend),
"m"(*(int32_t *)ptr)
112 static inline int64_t __TBB_machine_fetchadd8(
volatile void *ptr, int64_t addend){
114 __asm__ __volatile__ (
115 "0:\t add\t %3, %4, %0\n" 116 "\t casx\t [%2], %3, %0\n" 118 "\t bne,a,pn\t %%xcc, 0b\n" 120 :
"=&r"(result),
"=m"(*(int64_t *)ptr)
121 :
"r"(ptr),
"r"(*(int64_t *)ptr),
"r"(addend),
"m"(*(int64_t *)ptr)
130 static inline int64_t __TBB_machine_lg( uint64_t x ) {
131 __TBB_ASSERT(x,
"__TBB_Log2(0) undefined");
141 __asm__ (
"popc %1, %0" :
"=r"(count) :
"r"(x) );
147 static inline void __TBB_machine_or(
volatile void *ptr, uint64_t value ) {
148 __asm__ __volatile__ (
149 "0:\t or\t %2, %3, %%g1\n" 150 "\t casx\t [%1], %2, %%g1\n" 151 "\t cmp\t %2, %%g1\n" 152 "\t bne,a,pn\t %%xcc, 0b\n" 154 :
"=m"(*(int64_t *)ptr)
155 :
"r"(ptr),
"r"(*(int64_t *)ptr),
"r"(value),
"m"(*(int64_t *)ptr)
156 :
"ccr",
"g1",
"memory");
159 static inline void __TBB_machine_and(
volatile void *ptr, uint64_t value ) {
160 __asm__ __volatile__ (
161 "0:\t and\t %2, %3, %%g1\n" 162 "\t casx\t [%1], %2, %%g1\n" 163 "\t cmp\t %2, %%g1\n" 164 "\t bne,a,pn\t %%xcc, 0b\n" 166 :
"=m"(*(int64_t *)ptr)
167 :
"r"(ptr),
"r"(*(int64_t *)ptr),
"r"(value),
"m"(*(int64_t *)ptr)
168 :
"ccr",
"g1",
"memory");
172 static inline void __TBB_machine_pause( int32_t delay ) {
179 static inline bool __TBB_machine_trylockbyte(
unsigned char &flag){
180 unsigned char result;
181 __asm__ __volatile__ (
182 "ldstub\t [%2], %0\n" 183 :
"=r"(result),
"=m"(flag)
184 :
"r"(&flag),
"m"(flag)
189 #define __TBB_USE_GENERIC_PART_WORD_CAS 1 190 #define __TBB_USE_GENERIC_PART_WORD_FETCH_ADD 1 191 #define __TBB_USE_GENERIC_FETCH_STORE 1 192 #define __TBB_USE_GENERIC_HALF_FENCED_LOAD_STORE 1 193 #define __TBB_USE_GENERIC_RELAXED_LOAD_STORE 1 194 #define __TBB_USE_GENERIC_SEQUENTIAL_CONSISTENCY_LOAD_STORE 1 196 #define __TBB_AtomicOR(P,V) __TBB_machine_or(P,V) 197 #define __TBB_AtomicAND(P,V) __TBB_machine_and(P,V) 200 #define __TBB_Pause(V) __TBB_machine_pause(V) 201 #define __TBB_Log2(V) __TBB_machine_lg(V) 203 #define __TBB_TryLockByte(P) __TBB_machine_trylockbyte(P)