ObjFW  Check-in [96a128f954]

Overview
Comment:atomic.h: Improve memory barrier

Replace of_memory_read_barrier() and of_memory_write_barrier() - which
are quite unspecific - with of_memory_enter_barrier() and
of_memory_leave_barrier().

Also add an assembly implementation for ARM and ARM64.

Downloads: Tarball | ZIP archive | SQL archive
Timelines: family | ancestors | descendants | both | trunk
Files: files | file ages | folders
SHA3-256: 96a128f95416d10874cdc3525cf46411862ed745f529a8a291e97f00d9698e46
User & Date: js on 2016-07-30 21:18:37
Other Links: manifest | tags
Context
2016-07-30
21:22
of_spinlock_(un)lock: Add memory barrier check-in: 8e7c6ddf8c user: js tags: trunk
21:18
atomic.h: Improve memory barrier check-in: 96a128f954 user: js tags: trunk
17:09
runtime/exception.m: Small fix for SjLj and SEH check-in: 3c71107e06 user: js tags: trunk
Changes

Modified src/atomic.h from [17afd94567] to [c6a5af4a20].

967
968
969
970
971
972
973

974
975
976
977
978
979
980




981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
1001
1002
1003
1004
1005
1006
1007
1008
1009
1010
1011
1012
1013
1014
1015
1016
1017
#endif
}

static OF_INLINE void
of_memory_barrier(void)
{
#if !defined(OF_HAVE_THREADS)

#elif defined(OF_X86_64_ASM) || defined(OF_X86_ASM)
	__asm__ __volatile__ (
	    "mfence"
	);
#elif defined(OF_POWERPC_ASM)
	__asm__ __volatile__ (
	    "sync"




	);
#elif defined(OF_HAVE_GCC_ATOMIC_OPS)
	__sync_synchronize();
#elif defined(OF_HAVE_OSATOMIC)
	OSMemoryBarrier();
#else
# error of_memory_barrier not implemented!
#endif
}

static OF_INLINE void
of_memory_read_barrier(void)
{
#if !defined(OF_HAVE_THREADS)
#elif defined(OF_X86_64_ASM) || defined(OF_X86_ASM)
	__asm__ __volatile__ (
	    "lfence"
	);
#else
	of_memory_barrier();
#endif
}

static OF_INLINE void
of_memory_write_barrier(void)
{
#if !defined(OF_HAVE_THREADS)
#elif defined(OF_X86_64_ASM) || defined(OF_X86_ASM)
	__asm__ __volatile__ (
	    "sfence"
	);
#else
	of_memory_barrier();
#endif
}

OF_ASSUME_NONNULL_END







>


|



|
>
>
>
>











|

<
<
<
<
<
<

<



|

<
<
<
<
<
<

<



967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998






999

1000
1001
1002
1003
1004






1005

1006
1007
1008
#endif
}

static OF_INLINE void
of_memory_barrier(void)
{
#if !defined(OF_HAVE_THREADS)
	/* nop */
#elif defined(OF_X86_64_ASM) || defined(OF_X86_ASM)
	__asm__ __volatile__ (
	    "mfence" ::: "memory"
	);
#elif defined(OF_POWERPC_ASM)
	__asm__ __volatile__ (
	    "sync" ::: "memory"
	);
#elif defined(OF_ARMV7_ASM) || defined(OF_ARM64_ASM)
	__asm__ __volatile__ (
	    "dmb" ::: "memory"
	);
#elif defined(OF_HAVE_GCC_ATOMIC_OPS)
	__sync_synchronize();
#elif defined(OF_HAVE_OSATOMIC)
	OSMemoryBarrier();
#else
# error of_memory_barrier not implemented!
#endif
}

static OF_INLINE void
of_memory_enter_barrier(void)
{






	of_memory_barrier();

}

static OF_INLINE void
of_memory_leave_barrier(void)
{






	of_memory_barrier();

}

OF_ASSUME_NONNULL_END