ObjFW  Check-in [6ea2424dad]

Overview
Comment:Fix x86(_64) asm for of_atomic_{add,sub,inc,dec}_32.
of_atomic_{or,and,xor}_32 to follow.
Downloads: Tarball | ZIP archive | SQL archive
Timelines: family | ancestors | descendants | both | trunk
Files: files | file ages | folders
SHA3-256: 6ea2424dad914532ae8b0edf18bbcb4a46b7a1efcec622271f982149d9d7f344
User & Date: js on 2010-10-05 19:31:37
Other Links: manifest | tags
Context
2010-10-05
19:36
Small fix in tests' Makefile. check-in: b04c382563 user: js tags: trunk
19:31
Fix x86(_64) asm for of_atomic_{add,sub,inc,dec}_32.
of_atomic_{or,and,xor}_32 to follow.
check-in: 6ea2424dad user: js tags: trunk
2010-09-26
14:40
Add -[stringByXMLUnescapingWithBlock:]. check-in: 76903ee63f user: js tags: trunk
Changes

Modified src/atomic.h from [527e2eb067] to [842f2e4fa9].

22
23
24
25
26
27
28
29
30

31

32
33
34



35

36

37
38
39
40
41
42
43
44
45
46
47
48
49
50
51


52

53
54
55



56

57

58
59
60
61
62
63
64
65
66
67
68
69
70
71
72





73

74
75
76



77

78

79
80
81
82
83
84
85
86
87
88
89
90
91
92
93





94

95
96
97



98

99

100
101
102
103
104
105
106
22
23
24
25
26
27
28


29
30
31



32
33
34
35
36

37
38
39
40
41
42
43
44
45
46
47
48
49
50


51
52
53
54



55
56
57
58
59

60
61
62
63
64
65
66
67
68
69
70
71
72
73


74
75
76
77
78
79
80



81
82
83
84
85

86
87
88
89
90
91
92
93
94
95
96
97
98
99


100
101
102
103
104
105
106



107
108
109
110
111

112
113
114
115
116
117
118
119







-
-
+

+
-
-
-
+
+
+

+
-
+













-
-
+
+

+
-
-
-
+
+
+

+
-
+













-
-
+
+
+
+
+

+
-
-
-
+
+
+

+
-
+













-
-
+
+
+
+
+

+
-
-
-
+
+
+

+
-
+








static OF_INLINE int32_t
of_atomic_add_32(volatile int32_t *p, int32_t i)
{
#if !defined(OF_THREADS)
	return (*p += i);
#elif defined(OF_X86_ASM) || defined(OF_AMD64_ASM)
	int32_t r = *p + i;
	__asm__ volatile (
	__asm__ (
	    "lock\n\t"
	    "xaddl	%0, %2\n\t"
	    "addl %0, (%1)"
	    :
	    : "r"(i), "r"(p), "m"(*p)
	    "addl	%1, %0"
	    : "+&r"(i)
	    : "r"(i), "m"(*p)
	);

	return r;
	return i;
#elif defined(OF_HAVE_GCC_ATOMIC_OPS)
	return __sync_add_and_fetch(p, i);
#elif defined(OF_HAVE_LIBKERN_OSATOMIC_H)
	return OSAtomicAdd32Barrier(i, p);
#endif
}

static OF_INLINE int32_t
of_atomic_sub_32(volatile int32_t *p, int32_t i)
{
#if !defined(OF_THREADS)
	return (*p -= i);
#elif defined(OF_X86_ASM) || defined(OF_AMD64_ASM)
	int32_t r = *p - i;
	__asm__ volatile (
	__asm__ (
	    "negl	%0\n\t"
	    "lock\n\t"
	    "xaddl	%0, %2\n\t"
	    "subl %0, (%1)"
	    :
	    : "r"(i), "r"(p), "m"(*p)
	    "subl	%1, %0"
	    : "+&r"(i)
	    : "r"(i), "m"(*p)
	);

	return r;
	return i;
#elif defined(OF_HAVE_GCC_ATOMIC_OPS)
	return __sync_sub_and_fetch(p, i);
#elif defined(OF_HAVE_LIBKERN_OSATOMIC_H)
	return OSAtomicAdd32Barrier(-i, p);
#endif
}

static OF_INLINE int32_t
of_atomic_inc_32(volatile int32_t *p)
{
#if !defined(OF_THREADS)
	return ++*p;
#elif defined(OF_X86_ASM) || defined(OF_AMD64_ASM)
	int32_t r = *p + 1;
	__asm__ volatile (
	uint32_t i;

	__asm__ (
	    "xorl	%0, %0\n\t"
	    "incl	%0\n\t"
	    "lock\n\t"
	    "xaddl	%0, %1\n\t"
	    "incl (%0)"
	    :
	    : "r"(p), "m"(*p)
	    "incl	%0"
	    : "=&r"(i)
	    : "m"(*p)
	);

	return r;
	return i;
#elif defined(OF_HAVE_GCC_ATOMIC_OPS)
	return __sync_add_and_fetch(p, 1);
#elif defined(OF_HAVE_LIBKERN_OSATOMIC_H)
	return OSAtomicIncrement32Barrier(p);
#endif
}

static OF_INLINE int32_t
of_atomic_dec_32(volatile int32_t *p)
{
#if !defined(OF_THREADS)
	return --*p;
#elif defined(OF_X86_ASM) || defined(OF_AMD64_ASM)
	int32_t r = *p - 1;
	__asm__ volatile (
	uint32_t i;

	__asm__ (
	    "xorl	%0, %0\n\t"
	    "decl	%0\n\t"
	    "lock\n\t"
	    "xaddl	%0, %1\n\t"
	    "decl (%0)"
	    :
	    : "r"(p), "m"(*p)
	    "decl	%0"
	    : "=&r"(i)
	    : "m"(*p)
	);

	return r;
	return i;
#elif defined(OF_HAVE_GCC_ATOMIC_OPS)
	return __sync_sub_and_fetch(p, 1);
#elif defined(OF_HAVE_LIBKERN_OSATOMIC_H)
	return OSAtomicDecrement32Barrier(p);
#endif
}

175
176
177
178
179
180
181
182

183
184
185
186
187
188
189
188
189
190
191
192
193
194

195
196
197
198
199
200
201
202







-
+







		*p = n;
		return YES;
	}

	return NO;
#elif defined(OF_X86_ASM) || defined(OF_AMD64_ASM)
	uint32_t r;
	__asm__ volatile (
	__asm__ (
	    "lock; cmpxchg %2, (%3)\n\t"
	    "lahf\n\t"
	    "andb	$64, %%ah\n\t"
	    "shrb	$6, %%ah\n\t"
	    "movzx	%%ah, %0\n\t"
	    : "=a"(r)
	    : "a"(o), "r"(n), "r"(p), "m"(*p)
204
205
206
207
208
209
210
211

212
213
214
215
216
217
218
217
218
219
220
221
222
223

224
225
226
227
228
229
230
231







-
+







		*p = n;
		return YES;
	}

	return NO;
#elif defined(OF_X86_ASM) || defined(OF_AMD64_ASM)
	uint32_t r;
	__asm__ volatile (
	__asm__ (
	    "lock; cmpxchg %2, (%3)\n\t"
	    "lahf\n\t"
	    "andb	$64, %%ah\n\t"
	    "shrb	$6, %%ah\n\t"
	    "movzx	%%ah, %0\n\t"
	    : "=a"(r)
	    : "a"(o), "q"(n), "q"(p), "m"(*p)