Index: src/atomic.h ================================================================== --- src/atomic.h +++ src/atomic.h @@ -966,11 +966,11 @@ # error of_atomic_ptr_cmpswap not implemented! #endif } static OF_INLINE void -of_memory_barrier(void) +of_memory_barrier_sync(void) { #if !defined(OF_HAVE_THREADS) /* nop */ #elif defined(OF_X86_64_ASM) __asm__ __volatile__ ( @@ -992,17 +992,37 @@ # error of_memory_barrier not implemented! #endif } static OF_INLINE void -of_memory_enter_barrier(void) +of_memory_barrier_enter(void) +{ + of_memory_barrier_sync(); +} + +static OF_INLINE void +of_memory_barrier_exit(void) +{ + of_memory_barrier_sync(); +} + +static OF_INLINE void +of_memory_barrier_producer(void) { - of_memory_barrier(); +#if defined(OF_X86_64_ASM) + __asm__ __volatile__ ("sfence" ::: "memory"); +#else + of_memory_barrier_sync(); +#endif } static OF_INLINE void -of_memory_leave_barrier(void) +of_memory_barrier_consumer(void) { - of_memory_barrier(); +#if defined(OF_X86_64_ASM) + __asm__ __volatile__ ("lfence" ::: "memory"); +#else + of_memory_barrier_sync(); +#endif } OF_ASSUME_NONNULL_END Index: src/threading.h ================================================================== --- src/threading.h +++ src/threading.h @@ -182,11 +182,11 @@ static OF_INLINE bool of_spinlock_trylock(of_spinlock_t *spinlock) { #if defined(OF_HAVE_ATOMIC_OPS) if (of_atomic_int_cmpswap(spinlock, 0, 1)) { - of_memory_enter_barrier(); + of_memory_barrier_enter(); return true; } return false; #elif defined(OF_HAVE_PTHREAD_SPINLOCKS) @@ -221,11 +221,11 @@ of_spinlock_unlock(of_spinlock_t *spinlock) { #if defined(OF_HAVE_ATOMIC_OPS) bool ret = of_atomic_int_cmpswap(spinlock, 1, 0); - of_memory_leave_barrier(); + of_memory_barrier_exit(); return ret; #elif defined(OF_HAVE_PTHREAD_SPINLOCKS) return !pthread_spin_unlock(spinlock); #else