78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
|
#endif
}
static OF_INLINE void*
of_atomic_add_ptr(void* volatile *p, intptr_t i)
{
#if !defined(OF_THREADS)
return (*p += i);
#elif defined(OF_X86_ASM) || defined(OF_AMD64_ASM)
__asm__ (
"lock\n\t"
"xaddl %0, %2\n\t"
"addl %1, %0"
: "+&r"(i)
: "r"(i), "m"(*p)
|
|
|
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
|
#endif
}
static OF_INLINE void*
of_atomic_add_ptr(void* volatile *p, intptr_t i)
{
#if !defined(OF_THREADS)
return (*(char* volatile*)p += i);
#elif defined(OF_X86_ASM) || defined(OF_AMD64_ASM)
__asm__ (
"lock\n\t"
"xaddl %0, %2\n\t"
"addl %1, %0"
: "+&r"(i)
: "r"(i), "m"(*p)
|
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
|
#endif
}
static OF_INLINE void*
of_atomic_sub_ptr(void* volatile *p, intptr_t i)
{
#if !defined(OF_THREADS)
return (*p -= i);
#elif defined(OF_X86_ASM) || defined(OF_AMD64_ASM)
__asm__ (
"negl %0\n\t"
"lock\n\t"
"xaddl %0, %2\n\t"
"subl %1, %0"
: "+&r"(i)
|
|
|
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
|
#endif
}
static OF_INLINE void*
of_atomic_sub_ptr(void* volatile *p, intptr_t i)
{
#if !defined(OF_THREADS)
return (*(char* volatile*)p -= i);
#elif defined(OF_X86_ASM) || defined(OF_AMD64_ASM)
__asm__ (
"negl %0\n\t"
"lock\n\t"
"xaddl %0, %2\n\t"
"subl %1, %0"
: "+&r"(i)
|