35 #define AkThreadYield() { \
36 struct timespec _akthreadyieldts; \
37 _akthreadyieldts.tv_sec = 0; \
38 _akthreadyieldts.tv_nsec = 1; \
39 nanosleep(&_akthreadyieldts, NULL); \
44 #if defined(__x86_64__) || defined(__i386__)
45 __asm__
volatile(
"pause" :::
"memory");
46 #elif defined(__aarch64__) || (defined(__arm__) && __ARM_ARCH >= 7)
47 __asm__
volatile(
"yield" :::
"memory");
49 #error Unsupported platform for AkSpinHint
62 #define AK_ATOMIC_FENCE_FULL_BARRIER() __sync_synchronize();
64 static inline int32_t
AkAtomicLoad32(
AkAtomic32* pSrc ) { int32_t ret; __atomic_load( ( int32_t* )pSrc, &ret, __ATOMIC_SEQ_CST);
return ret; }
65 static inline void AkAtomicStore32(
AkAtomic32* pDest, int32_t value ) { __atomic_store( ( int32_t* )pDest, &value, __ATOMIC_SEQ_CST); }
69 static inline int32_t
AkAtomicAdd32(
AkAtomic32* pDest, int32_t value ) {
return __sync_add_and_fetch( ( int32_t* )pDest, value ); }
70 static inline int32_t
AkAtomicSub32(
AkAtomic32* pDest, int32_t value ) {
return __sync_sub_and_fetch( ( int32_t* )pDest, value ); }
71 static inline int32_t
AkAtomicAnd32(
AkAtomic32* pDest, int32_t value ) {
return __sync_and_and_fetch( ( int32_t* )pDest, value ); }
72 static inline int32_t
AkAtomicOr32(
AkAtomic32* pDest, int32_t value ) {
return __sync_or_and_fetch( ( int32_t* )pDest, value ); }
73 static inline int AkAtomicCas32(
AkAtomic32* pDest, int32_t proposed, int32_t expected ) {
return __sync_bool_compare_and_swap( ( int32_t* )pDest, expected, proposed ); }
75 #if defined( __i386 ) || ( defined( __ARM_ARCH ) && ( __ARM_ARCH <= 7 ) )
77 static inline void AkAtomicStore64(
AkAtomic64* pDest, int64_t value ) { int64_t tmp;
do { tmp = *pDest; }
while( __sync_val_compare_and_swap( pDest, tmp, value ) != tmp ); }
79 static inline int64_t
AkAtomicLoad64(
AkAtomic64* pSrc ) { int64_t ret; __atomic_load( ( int64_t* )pSrc, &ret, __ATOMIC_SEQ_CST);
return ret; }
80 static inline void AkAtomicStore64(
AkAtomic64* pDest, int64_t value ) { __atomic_store( ( int64_t* )pDest, &value, __ATOMIC_SEQ_CST ); }
86 static inline int64_t
AkAtomicAdd64(
AkAtomic64* pDest, int64_t value ) {
return __sync_add_and_fetch( ( int64_t* )pDest, value ); }
87 static inline int64_t
AkAtomicSub64(
AkAtomic64* pDest, int64_t value ) {
return __sync_sub_and_fetch( ( int64_t* )pDest, value ); }
88 static inline int64_t
AkAtomicAnd64(
AkAtomic64* pDest, int64_t value ) {
return __sync_and_and_fetch( ( int64_t* )pDest, value ); }
89 static inline int64_t
AkAtomicOr64(
AkAtomic64* pDest, int64_t value ) {
return __sync_or_and_fetch( ( int64_t* )pDest, value ); }
90 static inline int AkAtomicCas64(
AkAtomic64* pDest, int64_t proposed, int64_t expected ) {
return __sync_bool_compare_and_swap( ( int64_t* )pDest, expected, proposed ); }
92 static inline void*
AkAtomicLoadPtr(
AkAtomicPtr* pSrc ) {
size_t ret; __atomic_load( (
size_t* )pSrc, &ret, __ATOMIC_SEQ_CST);
return (
void* )ret; }
93 static inline void AkAtomicStorePtr(
AkAtomicPtr* pDest,
void* value ) { __atomic_store( (
size_t* )pDest, (
size_t* )&value, __ATOMIC_SEQ_CST); }
95 static inline int AkAtomicCasPtr(
AkAtomicPtr* pDest,
void* proposed,
void* expected ) {
return __sync_bool_compare_and_swap( (
void** )pDest, expected, proposed ); }