53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
|
: "+&r"(i)
: "r"(i), "m"(*p)
);
return i;
}
static OF_INLINE void*
of_atomic_ptr_add(void *volatile _Nullable *_Nonnull p, intptr_t i)
{
#if defined(OF_X86_64_ASM)
__asm__ __volatile__ (
"lock\n\t"
"xaddq %0, %2\n\t"
"addq %1, %0"
|
|
|
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
|
: "+&r"(i)
: "r"(i), "m"(*p)
);
return i;
}
static OF_INLINE void *_Nullable
of_atomic_ptr_add(void *volatile _Nullable *_Nonnull p, intptr_t i)
{
#if defined(OF_X86_64_ASM)
__asm__ __volatile__ (
"lock\n\t"
"xaddq %0, %2\n\t"
"addq %1, %0"
|
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
|
: "+&r"(i)
: "r"(i), "m"(*p)
);
return i;
}
static OF_INLINE void*
of_atomic_ptr_sub(void *volatile _Nullable *_Nonnull p, intptr_t i)
{
#if defined(OF_X86_64_ASM)
__asm__ __volatile__ (
"negq %0\n\t"
"lock\n\t"
"xaddq %0, %2\n\t"
|
|
|
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
|
: "+&r"(i)
: "r"(i), "m"(*p)
);
return i;
}
static OF_INLINE void *_Nullable
of_atomic_ptr_sub(void *volatile _Nullable *_Nonnull p, intptr_t i)
{
#if defined(OF_X86_64_ASM)
__asm__ __volatile__ (
"negq %0\n\t"
"lock\n\t"
"xaddq %0, %2\n\t"
|