ObjFW  Diff

Differences From Artifact [cc8d24b757]:

  • File src/atomic.h — part of check-in [b7097a67b6] at 2015-06-14 10:45:10 on branch trunk — Add OF_NONNULL / OF_NULLABLE and use that instead

    Using __nonnull directly doesn't work on systems using glibc, as glibc
    defines __nonnull as a parameterized define. While this does not fix the
    problem of Clang introducing __nonnull even though it conflicts with
    glibc, this at least means it's possible again to compile things with
    versions of Clang that don't support __nonnull on systems with glibc. (user: js, size: 20341) [annotate] [blame] [check-ins using]

To Artifact [6e95bb1842]:


10
11
12
13
14
15
16


17

18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
10
11
12
13
14
15
16
17
18

19
20
21
22
23
24




25
26
27
28
29
30
31







+
+
-
+





-
-
-
-







 *
 * Alternatively, it may be distributed under the terms of the GNU General
 * Public License, either version 2 or 3, which can be found in the file
 * LICENSE.GPLv2 or LICENSE.GPLv3 respectively included in the packaging of this
 * file.
 */

#include <stdlib.h>

#import "objfw-defs.h"
#import "macros.h"

#ifndef OF_HAVE_ATOMIC_OPS
# error No atomic operations available!
#endif

#include <stdlib.h>

#import "macros.h"

#ifdef OF_HAVE_OSATOMIC
# include <libkern/OSAtomic.h>
#endif

OF_ASSUME_NONNULL_BEGIN

static OF_INLINE int
56
57
58
59
60
61
62
63

64
65
66
67
68
69
70
54
55
56
57
58
59
60

61
62
63
64
65
66
67
68







-
+







		    : "r"(i), "m"(*p)
		);
# endif
	else
		abort();

	return i;
#elif defined(OF_PPC_ASM)
#elif defined(OF_POWERPC_ASM)
	__asm__ __volatile__ (
	    "0:\n\t"
	    "lwarx	%0, 0, %2\n\t"
	    "add	%0, %0, %1\n\t"
	    "stwcx.	%0, 0, %2\n\t"
	    "bne-	0b"
	    : "=&r"(i)
92
93
94
95
96
97
98
99

100
101
102
103
104
105
106
90
91
92
93
94
95
96

97
98
99
100
101
102
103
104







-
+







	    "xaddl	%0, %2\n\t"
	    "addl	%1, %0"
	    : "+&r"(i)
	    : "r"(i), "m"(*p)
	);

	return i;
#elif defined(OF_PPC_ASM)
#elif defined(OF_POWERPC_ASM)
	__asm__ __volatile__ (
	    "0:\n\t"
	    "lwarx	%0, 0, %2\n\t"
	    "add	%0, %0, %1\n\t"
	    "stwcx.	%0, 0, %2\n\t"
	    "bne-	0b"
	    : "=&r"(i)
138
139
140
141
142
143
144
145

146
147
148
149
150
151
152
136
137
138
139
140
141
142

143
144
145
146
147
148
149
150







-
+







	    "xaddl	%0, %2\n\t"
	    "addl	%1, %0"
	    : "+&r"(i)
	    : "r"(i), "m"(*p)
	);

	return (void*)i;
#elif defined(OF_PPC_ASM)
#elif defined(OF_POWERPC_ASM)
	__asm__ __volatile__ (
	    "0:\n\t"
	    "lwarx	%0, 0, %2\n\t"
	    "add	%0, %0, %1\n\t"
	    "stwcx.	%0, 0, %2\n\t"
	    "bne-	0b"
	    : "=&r"(i)
193
194
195
196
197
198
199
200

201
202
203
204
205
206
207
191
192
193
194
195
196
197

198
199
200
201
202
203
204
205







-
+







		    : "r"(i), "m"(*p)
		);
# endif
	else
		abort();

	return i;
#elif defined(OF_PPC_ASM)
#elif defined(OF_POWERPC_ASM)
	__asm__ __volatile__ (
	    "0:\n\t"
	    "lwarx	%0, 0, %2\n\t"
	    "sub	%0, %0, %1\n\t"
	    "stwcx.	%0, 0, %2\n\t"
	    "bne-	0b"
	    : "=&r"(i)
230
231
232
233
234
235
236
237

238
239
240
241
242
243
244
228
229
230
231
232
233
234

235
236
237
238
239
240
241
242







-
+







	    "xaddl	%0, %2\n\t"
	    "subl	%1, %0"
	    : "+&r"(i)
	    : "r"(i), "m"(*p)
	);

	return i;
#elif defined(OF_PPC_ASM)
#elif defined(OF_POWERPC_ASM)
	__asm__ __volatile__ (
	    "0:\n\t"
	    "lwarx	%0, 0, %2\n\t"
	    "sub	%0, %0, %1\n\t"
	    "stwcx.	%0, 0, %2\n\t"
	    "bne-	0b"
	    : "=&r"(i)
278
279
280
281
282
283
284
285

286
287
288
289
290
291
292
276
277
278
279
280
281
282

283
284
285
286
287
288
289
290







-
+







	    "xaddl	%0, %2\n\t"
	    "subl	%1, %0"
	    : "+&r"(i)
	    : "r"(i), "m"(*p)
	);

	return (void*)i;
#elif defined(OF_PPC_ASM)
#elif defined(OF_POWERPC_ASM)
	__asm__ __volatile__ (
	    "0:\n\t"
	    "lwarx	%0, 0, %2\n\t"
	    "sub	%0, %0, %1\n\t"
	    "stwcx.	%0, 0, %2\n\t"
	    "bne-	0b"
	    : "=&r"(i)
337
338
339
340
341
342
343
344

345
346
347
348
349
350
351
335
336
337
338
339
340
341

342
343
344
345
346
347
348
349







-
+







		    : "m"(*p)
		);
# endif
	else
		abort();

	return i;
#elif defined(OF_PPC_ASM)
#elif defined(OF_POWERPC_ASM)
	int i;

	__asm__ __volatile__ (
	    "0:\n\t"
	    "lwarx	%0, 0, %1\n\t"
	    "addi	%0, %0, 1\n\t"
	    "stwcx.	%0, 0, %1\n\t"
379
380
381
382
383
384
385
386

387
388
389
390
391
392
393
377
378
379
380
381
382
383

384
385
386
387
388
389
390
391







-
+







	    "xaddl	%0, %1\n\t"
	    "incl	%0"
	    : "=&r"(i)
	    : "m"(*p)
	);

	return i;
#elif defined(OF_PPC_ASM)
#elif defined(OF_POWERPC_ASM)
	int32_t i;

	__asm__ __volatile__ (
	    "0:\n\t"
	    "lwarx	%0, 0, %1\n\t"
	    "addi	%0, %0, 1\n\t"
	    "stwcx.	%0, 0, %1\n\t"
436
437
438
439
440
441
442
443

444
445
446
447
448
449
450
434
435
436
437
438
439
440

441
442
443
444
445
446
447
448







-
+







		    : "m"(*p)
		);
# endif
	else
		abort();

	return i;
#elif defined(OF_PPC_ASM)
#elif defined(OF_POWERPC_ASM)
	int i;

	__asm__ __volatile__ (
	    "0:\n\t"
	    "lwarx	%0, 0, %1\n\t"
	    "subi	%0, %0, 1\n\t"
	    "stwcx.	%0, 0, %1\n\t"
478
479
480
481
482
483
484
485

486
487
488
489
490
491
492
476
477
478
479
480
481
482

483
484
485
486
487
488
489
490







-
+







	    "xaddl	%0, %1\n\t"
	    "decl	%0"
	    : "=&r"(i)
	    : "m"(*p)
	);

	return i;
#elif defined(OF_PPC_ASM)
#elif defined(OF_POWERPC_ASM)
	int32_t i;

	__asm__ __volatile__ (
	    "0:\n\t"
	    "lwarx	%0, 0, %1\n\t"
	    "subi	%0, %0, 1\n\t"
	    "stwcx.	%0, 0, %1\n\t"
539
540
541
542
543
544
545
546

547
548
549
550
551
552
553
537
538
539
540
541
542
543

544
545
546
547
548
549
550
551







-
+







		    : "rax", "cc"
		);
# endif
	else
		abort();

	return i;
#elif defined(OF_PPC_ASM)
#elif defined(OF_POWERPC_ASM)
	__asm__ __volatile__ (
	    "0:\n\t"
	    "lwarx	%0, 0, %2\n\t"
	    "or		%0, %0, %1\n\t"
	    "stwcx.	%0, 0, %2\n\t"
	    "bne-	0b"
	    : "=&r"(i)
580
581
582
583
584
585
586
587

588
589
590
591
592
593
594
578
579
580
581
582
583
584

585
586
587
588
589
590
591
592







-
+







	    "jne	0b"
	    : "=&r"(i)
	    : "r"(i), "m"(*p)
	    : "eax", "cc"
	);

	return i;
#elif defined(OF_PPC_ASM)
#elif defined(OF_POWERPC_ASM)
	__asm__ __volatile__ (
	    "0:\n\t"
	    "lwarx	%0, 0, %2\n\t"
	    "or		%0, %0, %1\n\t"
	    "stwcx.	%0, 0, %2\n\t"
	    "bne-	0b"
	    : "=&r"(i)
639
640
641
642
643
644
645
646

647
648
649
650
651
652
653
637
638
639
640
641
642
643

644
645
646
647
648
649
650
651







-
+







		    : "rax", "cc"
		);
# endif
	else
		abort();

	return i;
#elif defined(OF_PPC_ASM)
#elif defined(OF_POWERPC_ASM)
	__asm__ __volatile__ (
	    "0:\n\t"
	    "lwarx	%0, 0, %2\n\t"
	    "and	%0, %0, %1\n\t"
	    "stwcx.	%0, 0, %2\n\t"
	    "bne-	0b"
	    : "=&r"(i)
680
681
682
683
684
685
686
687

688
689
690
691
692
693
694
678
679
680
681
682
683
684

685
686
687
688
689
690
691
692







-
+







	    "jne	0b"
	    : "=&r"(i)
	    : "r"(i), "m"(*p)
	    : "eax", "cc"
	);

	return i;
#elif defined(OF_PPC_ASM)
#elif defined(OF_POWERPC_ASM)
	__asm__ __volatile__ (
	    "0:\n\t"
	    "lwarx	%0, 0, %2\n\t"
	    "and	%0, %0, %1\n\t"
	    "stwcx.	%0, 0, %2\n\t"
	    "bne-	0b"
	    : "=&r"(i)
739
740
741
742
743
744
745
746

747
748
749
750
751
752
753
737
738
739
740
741
742
743

744
745
746
747
748
749
750
751







-
+







		    : "rax", "cc"
		);
# endif
	else
		abort();

	return i;
#elif defined(OF_PPC_ASM)
#elif defined(OF_POWERPC_ASM)
	__asm__ __volatile__ (
	    "0:\n\t"
	    "lwarx	%0, 0, %2\n\t"
	    "xor	%0, %0, %1\n\t"
	    "stwcx.	%0, 0, %2\n\t"
	    "bne-	0b"
	    : "=&r"(i)
780
781
782
783
784
785
786
787

788
789
790
791
792
793
794
778
779
780
781
782
783
784

785
786
787
788
789
790
791
792







-
+







	    "jne	0b"
	    : "=&r"(i)
	    : "r"(i), "m"(*p)
	    : "eax", "cc"
	);

	return i;
#elif defined(OF_PPC_ASM)
#elif defined(OF_POWERPC_ASM)
	__asm__ __volatile__ (
	    "0:\n\t"
	    "lwarx	%0, 0, %2\n\t"
	    "xor	%0, %0, %1\n\t"
	    "stwcx.	%0, 0, %2\n\t"
	    "bne-	0b"
	    : "=&r"(i)
825
826
827
828
829
830
831
832

833
834
835
836
837
838
839
823
824
825
826
827
828
829

830
831
832
833
834
835
836
837







-
+







	    "movzbl	%b0, %0"
	    : "=&d"(r), "+a"(o)	/* use d instead of r to avoid a gcc bug */
	    : "r"(n), "m"(*p)
	    : "cc"
	);

	return r;
#elif defined(OF_PPC_ASM)
#elif defined(OF_POWERPC_ASM)
	int r;

	__asm__ __volatile__ (
	    "0:\n\t"
	    "lwarx	%0, 0, %3\n\t"
	    "cmpw	%0, %1\n\t"
	    "bne	1f\n\t"
880
881
882
883
884
885
886
887

888
889
890
891
892
893
894
878
879
880
881
882
883
884

885
886
887
888
889
890
891
892







-
+







	    "movzbl	%b0, %0"
	    : "=&d"(r), "+a"(o)	/* use d instead of r to avoid a gcc bug */
	    : "r"(n), "m"(*p)
	    : "cc"
	);

	return r;
#elif defined(OF_PPC_ASM)
#elif defined(OF_POWERPC_ASM)
	int r;

	__asm__ __volatile__ (
	    "0:\n\t"
	    "lwarx	%0, 0, %3\n\t"
	    "cmpw	%0, %1\n\t"
	    "bne	1f\n\t"
936
937
938
939
940
941
942
943

944
945
946
947
948
949
950
934
935
936
937
938
939
940

941
942
943
944
945
946
947
948







-
+







	    "movzbl	%b0, %0"
	    : "=&d"(r), "+a"(o)	/* use d instead of r to avoid a gcc bug */
	    : "r"(n), "m"(*p)
	    : "cc"
	);

	return r;
#elif defined(OF_PPC_ASM)
#elif defined(OF_POWERPC_ASM)
	int r;

	__asm__ __volatile__ (
	    "0:\n\t"
	    "lwarx	%0, 0, %3\n\t"
	    "cmpw	%0, %1\n\t"
	    "bne	1f\n\t"
973
974
975
976
977
978
979
980

981
982
983
984
985
986
987
971
972
973
974
975
976
977

978
979
980
981
982
983
984
985







-
+







of_memory_barrier(void)
{
#if !defined(OF_HAVE_THREADS)
#elif defined(OF_X86_64_ASM) || defined(OF_X86_ASM)
	__asm__ __volatile__ (
	    "mfence"
	);
#elif defined(OF_PPC_ASM)
#elif defined(OF_POWERPC_ASM)
	__asm__ __volatile__ (
	    "sync"
	);
#elif defined(OF_HAVE_GCC_ATOMIC_OPS)
	__sync_synchronize();
#elif defined(OF_HAVE_OSATOMIC)
	OSMemoryBarrier();