ObjFW  Check-in [48ffea9a9a]

Overview
Comment:Clean up memory barriers
Downloads: Tarball | ZIP archive | SQL archive
Timelines: family | ancestors | descendants | both | trunk
Files: files | file ages | folders
SHA3-256: 48ffea9a9a39898158765d4789f6d673ed4e3134f847850b90dba1fbf69932d1
User & Date: js on 2017-04-13 13:19:02
Other Links: manifest | tags
Context
2017-04-13
13:30
-[OFObject release]: Add memory barriers check-in: 05e2b4b851 user: js tags: trunk
13:19
Clean up memory barriers check-in: 48ffea9a9a user: js tags: trunk
12:57
OFSandbox: Don't waste memory on bools check-in: 0fc70a4510 user: js tags: trunk
Changes

Modified src/atomic_builtins.h from [06ac7a3e57] to [07554058fd].

10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
 *
 * Alternatively, it may be distributed under the terms of the GNU General
 * Public License, either version 2 or 3, which can be found in the file
 * LICENSE.GPLv2 or LICENSE.GPLv3 respectively included in the packaging of this
 * file.
 */

OF_ASSUME_NONNULL_BEGIN

static OF_INLINE int
of_atomic_int_add(volatile int *_Nonnull p, int i)
{
	return __atomic_add_fetch(p, i, __ATOMIC_RELAXED);
}

static OF_INLINE int32_t







<
<







10
11
12
13
14
15
16


17
18
19
20
21
22
23
 *
 * Alternatively, it may be distributed under the terms of the GNU General
 * Public License, either version 2 or 3, which can be found in the file
 * LICENSE.GPLv2 or LICENSE.GPLv3 respectively included in the packaging of this
 * file.
 */



static OF_INLINE int
of_atomic_int_add(volatile int *_Nonnull p, int i)
{
	return __atomic_add_fetch(p, i, __ATOMIC_RELAXED);
}

static OF_INLINE int32_t
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
    void *_Nullable o, void *_Nullable n)
{
	return __atomic_compare_exchange(p, &o, &n, false,
	    __ATOMIC_RELAXED, __ATOMIC_RELAXED);
}

static OF_INLINE void
of_memory_barrier_sync(void)
{
	__atomic_thread_fence(__ATOMIC_SEQ_CST);
}

static OF_INLINE void
of_memory_barrier_enter(void)
{
	__atomic_thread_fence(__ATOMIC_SEQ_CST);
}

static OF_INLINE void
of_memory_barrier_exit(void)
{
	__atomic_thread_fence(__ATOMIC_SEQ_CST);
}

static OF_INLINE void
of_memory_barrier_producer(void)
{
	__atomic_thread_fence(__ATOMIC_SEQ_CST);
}

static OF_INLINE void
of_memory_barrier_consumer(void)
{
	__atomic_thread_fence(__ATOMIC_SEQ_CST);
}

OF_ASSUME_NONNULL_END







|





|

|



|

|

<
<
<
<
<
<
<
<
<
<
<
<
<
<
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151














    void *_Nullable o, void *_Nullable n)
{
	return __atomic_compare_exchange(p, &o, &n, false,
	    __ATOMIC_RELAXED, __ATOMIC_RELAXED);
}

static OF_INLINE void
of_memory_barrier_full(void)
{
	__atomic_thread_fence(__ATOMIC_SEQ_CST);
}

static OF_INLINE void
of_memory_barrier_acquire(void)
{
	__atomic_thread_fence(__ATOMIC_ACQUIRE);
}

static OF_INLINE void
of_memory_barrier_release(void)
{
	__atomic_thread_fence(__ATOMIC_RELEASE);
}














Modified src/atomic_no_threads.h from [cc41e9df60] to [d6831a1ea4].

10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
 *
 * Alternatively, it may be distributed under the terms of the GNU General
 * Public License, either version 2 or 3, which can be found in the file
 * LICENSE.GPLv2 or LICENSE.GPLv3 respectively included in the packaging of this
 * file.
 */

OF_ASSUME_NONNULL_BEGIN

static OF_INLINE int
of_atomic_int_add(volatile int *_Nonnull p, int i)
{
	return (*p += i);
}

static OF_INLINE int32_t







<
<







10
11
12
13
14
15
16


17
18
19
20
21
22
23
 *
 * Alternatively, it may be distributed under the terms of the GNU General
 * Public License, either version 2 or 3, which can be found in the file
 * LICENSE.GPLv2 or LICENSE.GPLv3 respectively included in the packaging of this
 * file.
 */



static OF_INLINE int
of_atomic_int_add(volatile int *_Nonnull p, int i)
{
	return (*p += i);
}

static OF_INLINE int32_t
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
		return true;
	}

	return false;
}

static OF_INLINE void
of_memory_barrier_sync(void)
{
	/* nop */
}

static OF_INLINE void
of_memory_barrier_enter(void)
{
	/* nop */
}

static OF_INLINE void
of_memory_barrier_exit(void)
{
	/* nop */
}

static OF_INLINE void
of_memory_barrier_producer(void)
{
	/* nop */
}

static OF_INLINE void
of_memory_barrier_consumer(void)
{
	/* nop */
}

OF_ASSUME_NONNULL_END







|





|





|



<
<
<
<
<
<
<
<
<
<
<
<
<
<
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163














		return true;
	}

	return false;
}

static OF_INLINE void
of_memory_barrier(void)
{
	/* nop */
}

static OF_INLINE void
of_memory_barrier_acquire(void)
{
	/* nop */
}

static OF_INLINE void
of_memory_barrier_release(void)
{
	/* nop */
}














Modified src/atomic_osatomic.h from [267428ae70] to [eb1a6c5a77].

12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
 * Public License, either version 2 or 3, which can be found in the file
 * LICENSE.GPLv2 or LICENSE.GPLv3 respectively included in the packaging of this
 * file.
 */

#include <libkern/OSAtomic.h>

OF_ASSUME_NONNULL_BEGIN

static OF_INLINE int
of_atomic_int_add(volatile int *_Nonnull p, int i)
{
	return OSAtomicAdd32(i, p);
}

static OF_INLINE int32_t







<
<







12
13
14
15
16
17
18


19
20
21
22
23
24
25
 * Public License, either version 2 or 3, which can be found in the file
 * LICENSE.GPLv2 or LICENSE.GPLv3 respectively included in the packaging of this
 * file.
 */

#include <libkern/OSAtomic.h>



static OF_INLINE int
of_atomic_int_add(volatile int *_Nonnull p, int i)
{
	return OSAtomicAdd32(i, p);
}

static OF_INLINE int32_t
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
of_atomic_ptr_cmpswap(void *volatile _Nullable *_Nonnull p,
    void *_Nullable o, void *_Nullable n)
{
	return OSAtomicCompareAndSwapPtr(o, n, p);
}

static OF_INLINE void
of_memory_barrier_sync(void)
{
	OSMemoryBarrier();
}

static OF_INLINE void
of_memory_barrier_enter(void)
{
	OSMemoryBarrier();
}

static OF_INLINE void
of_memory_barrier_exit(void)
{
	OSMemoryBarrier();
}

static OF_INLINE void
of_memory_barrier_producer(void)
{
	OSMemoryBarrier();
}

static OF_INLINE void
of_memory_barrier_consumer(void)
{
	OSMemoryBarrier();
}

OF_ASSUME_NONNULL_END







|





|





|



<
<
<
<
<
<
<
<
<
<
<
<
<
<
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158














of_atomic_ptr_cmpswap(void *volatile _Nullable *_Nonnull p,
    void *_Nullable o, void *_Nullable n)
{
	return OSAtomicCompareAndSwapPtr(o, n, p);
}

static OF_INLINE void
of_memory_barrier(void)
{
	OSMemoryBarrier();
}

static OF_INLINE void
of_memory_barrier_acquire(void)
{
	OSMemoryBarrier();
}

static OF_INLINE void
of_memory_barrier_release(void)
{
	OSMemoryBarrier();
}














Modified src/atomic_powerpc.h from [965a4d4675] to [3babc72639].

10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
 *
 * Alternatively, it may be distributed under the terms of the GNU General
 * Public License, either version 2 or 3, which can be found in the file
 * LICENSE.GPLv2 or LICENSE.GPLv3 respectively included in the packaging of this
 * file.
 */

OF_ASSUME_NONNULL_BEGIN

static OF_INLINE int
of_atomic_int_add(volatile int *_Nonnull p, int i)
{
	__asm__ __volatile__ (
	    "0:\n\t"
	    "lwarx	%0, 0, %2\n\t"
	    "add	%0, %0, %1\n\t"







<
<







10
11
12
13
14
15
16


17
18
19
20
21
22
23
 *
 * Alternatively, it may be distributed under the terms of the GNU General
 * Public License, either version 2 or 3, which can be found in the file
 * LICENSE.GPLv2 or LICENSE.GPLv3 respectively included in the packaging of this
 * file.
 */



static OF_INLINE int
of_atomic_int_add(volatile int *_Nonnull p, int i)
{
	__asm__ __volatile__ (
	    "0:\n\t"
	    "lwarx	%0, 0, %2\n\t"
	    "add	%0, %0, %1\n\t"
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
	    : "cc"
	);

	return r;
}

static OF_INLINE void
of_memory_barrier_sync(void)
{
	__asm__ __volatile__ (
	    "sync" ::: "memory"
	);
}

static OF_INLINE void
of_memory_barrier_enter(void)
{
	__asm__ __volatile__ (
	    "sync" ::: "memory"
	);
}

static OF_INLINE void
of_memory_barrier_exit(void)
{
	__asm__ __volatile__ (
	    "sync" ::: "memory"
	);
}

static OF_INLINE void
of_memory_barrier_producer(void)
{
	__asm__ __volatile__ (
	    "sync" ::: "memory"
	);
}

static OF_INLINE void
of_memory_barrier_consumer(void)
{
	__asm__ __volatile__ (
	    "sync" ::: "memory"
	);
}

OF_ASSUME_NONNULL_END







|


|




|


|




|


|


<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382


















	    : "cc"
	);

	return r;
}

static OF_INLINE void
of_memory_barrier(void)
{
	__asm__ __volatile__ (
	    "lwsync" ::: "memory"
	);
}

static OF_INLINE void
of_memory_barrier_acquire(void)
{
	__asm__ __volatile__ (
	    "lwsync" ::: "memory"
	);
}

static OF_INLINE void
of_memory_barrier_release(void)
{
	__asm__ __volatile__ (
	    "lwsync" ::: "memory"
	);
}


















Modified src/atomic_sync_builtins.h from [2cee3b1977] to [cd9ea173f9].

10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
 *
 * Alternatively, it may be distributed under the terms of the GNU General
 * Public License, either version 2 or 3, which can be found in the file
 * LICENSE.GPLv2 or LICENSE.GPLv3 respectively included in the packaging of this
 * file.
 */

OF_ASSUME_NONNULL_BEGIN

static OF_INLINE int
of_atomic_int_add(volatile int *_Nonnull p, int i)
{
	return __sync_add_and_fetch(p, i);
}

static OF_INLINE int32_t







<
<







10
11
12
13
14
15
16


17
18
19
20
21
22
23
 *
 * Alternatively, it may be distributed under the terms of the GNU General
 * Public License, either version 2 or 3, which can be found in the file
 * LICENSE.GPLv2 or LICENSE.GPLv3 respectively included in the packaging of this
 * file.
 */



static OF_INLINE int
of_atomic_int_add(volatile int *_Nonnull p, int i)
{
	return __sync_add_and_fetch(p, i);
}

static OF_INLINE int32_t
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
of_atomic_ptr_cmpswap(void *volatile _Nullable *_Nonnull p,
    void *_Nullable o, void *_Nullable n)
{
	return __sync_bool_compare_and_swap(p, o, n);
}

static OF_INLINE void
of_memory_barrier_sync(void)
{
	__sync_synchronize();
}

static OF_INLINE void
of_memory_barrier_enter(void)
{
	__sync_synchronize();
}

static OF_INLINE void
of_memory_barrier_exit(void)
{
	__sync_synchronize();
}

static OF_INLINE void
of_memory_barrier_producer(void)
{
	__sync_synchronize();
}

static OF_INLINE void
of_memory_barrier_consumer(void)
{
	__sync_synchronize();
}

OF_ASSUME_NONNULL_END







|





|





|



<
<
<
<
<
<
<
<
<
<
<
<
<
<
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148














of_atomic_ptr_cmpswap(void *volatile _Nullable *_Nonnull p,
    void *_Nullable o, void *_Nullable n)
{
	return __sync_bool_compare_and_swap(p, o, n);
}

static OF_INLINE void
of_memory_barrier(void)
{
	__sync_synchronize();
}

static OF_INLINE void
of_memory_barrier_acquire(void)
{
	__sync_synchronize();
}

static OF_INLINE void
of_memory_barrier_release(void)
{
	__sync_synchronize();
}














Modified src/atomic_x86.h from [4c024af6ca] to [3c92b1ba0d].

477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
	    : "cc"
	);

	return r;
}

static OF_INLINE void
of_memory_barrier_sync(void)
{
	__asm__ __volatile__ (
	    "mfence" ::: "memory"
	);
}

static OF_INLINE void
of_memory_barrier_enter(void)
{
	__asm__ __volatile__ (
	    "mfence" ::: "memory"
	);
}

static OF_INLINE void
of_memory_barrier_exit(void)
{
	__asm__ __volatile__ (
	    "mfence" ::: "memory"
	);
}

static OF_INLINE void
of_memory_barrier_producer(void)
{
	__asm__ __volatile__ (
	    "sfence" ::: "memory"
	);
}

static OF_INLINE void
of_memory_barrier_consumer(void)
{
	__asm__ __volatile__ (
	    "lfence" ::: "memory"
	);
}

OF_ASSUME_NONNULL_END







|







|

|
<
<



|

|
<
<


<
<
<
<
<
<
<
<
<
<
<
<
<
<
<
<

477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494


495
496
497
498
499
500


501
502
















503
	    : "cc"
	);

	return r;
}

static OF_INLINE void
of_memory_barrier(void)
{
	__asm__ __volatile__ (
	    "mfence" ::: "memory"
	);
}

static OF_INLINE void
of_memory_barrier_acquire(void)
{
	__asm__ __volatile__ ("" ::: "memory");


}

static OF_INLINE void
of_memory_barrier_release(void)
{
	__asm__ __volatile__ ("" ::: "memory");


}

















OF_ASSUME_NONNULL_END

Modified src/threading.h from [394f138c54] to [774a47eec4].

180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
}

static OF_INLINE bool
of_spinlock_trylock(of_spinlock_t *spinlock)
{
#if defined(OF_HAVE_ATOMIC_OPS)
	if (of_atomic_int_cmpswap(spinlock, 0, 1)) {
		of_memory_barrier_enter();
		return true;
	}

	return false;
#elif defined(OF_HAVE_PTHREAD_SPINLOCKS)
	return !pthread_spin_trylock(spinlock);
#else







|







180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
}

static OF_INLINE bool
of_spinlock_trylock(of_spinlock_t *spinlock)
{
#if defined(OF_HAVE_ATOMIC_OPS)
	if (of_atomic_int_cmpswap(spinlock, 0, 1)) {
		of_memory_barrier_acquire();
		return true;
	}

	return false;
#elif defined(OF_HAVE_PTHREAD_SPINLOCKS)
	return !pthread_spin_trylock(spinlock);
#else
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233

static OF_INLINE bool
of_spinlock_unlock(of_spinlock_t *spinlock)
{
#if defined(OF_HAVE_ATOMIC_OPS)
	bool ret = of_atomic_int_cmpswap(spinlock, 1, 0);

	of_memory_barrier_exit();

	return ret;
#elif defined(OF_HAVE_PTHREAD_SPINLOCKS)
	return !pthread_spin_unlock(spinlock);
#else
	return of_mutex_unlock(spinlock);
#endif







|







219
220
221
222
223
224
225
226
227
228
229
230
231
232
233

static OF_INLINE bool
of_spinlock_unlock(of_spinlock_t *spinlock)
{
#if defined(OF_HAVE_ATOMIC_OPS)
	bool ret = of_atomic_int_cmpswap(spinlock, 1, 0);

	of_memory_barrier_release();

	return ret;
#elif defined(OF_HAVE_PTHREAD_SPINLOCKS)
	return !pthread_spin_unlock(spinlock);
#else
	return of_mutex_unlock(spinlock);
#endif

Modified src/threading.m from [493fa84215] to [df2361523e].

29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
#ifndef OF_HAVE_PTHREADS
void
of_once(of_once_t *control, void (*func)(void))
{
	if (of_atomic_int_cmpswap(control, 0, 1)) {
		func();

		of_memory_barrier_sync();

		of_atomic_int_inc(control);
	} else
		while (*control == 1)
			of_thread_yield();
}
#endif







|







29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
#ifndef OF_HAVE_PTHREADS
void
of_once(of_once_t *control, void (*func)(void))
{
	if (of_atomic_int_cmpswap(control, 0, 1)) {
		func();

		of_memory_barrier();

		of_atomic_int_inc(control);
	} else
		while (*control == 1)
			of_thread_yield();
}
#endif