@@ -26,11 +26,11 @@ "xaddl %0, %2\n\t" "addl %1, %0" : "+&r"(i) : "r"(i), "m"(*p) ); -#ifdef OF_X86_64_ASM +#ifdef OF_X86_64 else if (sizeof(int) == 8) __asm__ __volatile__ ( "lock\n\t" "xaddq %0, %2\n\t" "addq %1, %0" @@ -59,21 +59,21 @@ } static OF_INLINE void *_Nullable of_atomic_ptr_add(void *volatile _Nullable *_Nonnull p, intptr_t i) { -#if defined(OF_X86_64_ASM) +#if defined(OF_X86_64) __asm__ __volatile__ ( "lock\n\t" "xaddq %0, %2\n\t" "addq %1, %0" : "+&r"(i) : "r"(i), "m"(*p) ); return (void *)i; -#elif defined(OF_X86_ASM) +#elif defined(OF_X86) __asm__ __volatile__ ( "lock\n\t" "xaddl %0, %2\n\t" "addl %1, %0" : "+&r"(i) @@ -94,11 +94,11 @@ "xaddl %0, %2\n\t" "subl %1, %0" : "+&r"(i) : "r"(i), "m"(*p) ); -#ifdef OF_X86_64_ASM +#ifdef OF_X86_64 else if (sizeof(int) == 8) __asm__ __volatile__ ( "negq %0\n\t" "lock\n\t" "xaddq %0, %2\n\t" @@ -129,11 +129,11 @@ } static OF_INLINE void *_Nullable of_atomic_ptr_sub(void *volatile _Nullable *_Nonnull p, intptr_t i) { -#if defined(OF_X86_64_ASM) +#if defined(OF_X86_64) __asm__ __volatile__ ( "negq %0\n\t" "lock\n\t" "xaddq %0, %2\n\t" "subq %1, %0" @@ -140,11 +140,11 @@ : "+&r"(i) : "r"(i), "m"(*p) ); return (void *)i; -#elif defined(OF_X86_ASM) +#elif defined(OF_X86) __asm__ __volatile__ ( "negl %0\n\t" "lock\n\t" "xaddl %0, %2\n\t" "subl %1, %0" @@ -169,11 +169,11 @@ "xaddl %0, %1\n\t" "incl %0" : "=&r"(i) : "m"(*p) ); -#ifdef OF_X86_64_ASM +#ifdef OF_X86_64 else if (sizeof(int) == 8) __asm__ __volatile__ ( "xorq %0, %0\n\t" "incq %0\n\t" "lock\n\t" @@ -220,11 +220,11 @@ "xaddl %0, %1\n\t" "decl %0" : "=&r"(i) : "m"(*p) ); -#ifdef OF_X86_64_ASM +#ifdef OF_X86_64 else if (sizeof(int) == 8) __asm__ __volatile__ ( "xorq %0, %0\n\t" "decq %0\n\t" "lock\n\t" @@ -272,11 +272,11 @@ "jne 0b" : "=&r"(i) : "r"(i), "m"(*p) : "eax", "cc" ); -#ifdef OF_X86_64_ASM +#ifdef OF_X86_64 else if (sizeof(int) == 8) __asm__ __volatile__ ( "0:\n\t" "movq %2, %0\n\t" "movq %0, %%rax\n\t" @@ -328,11 +328,11 @@ "jne 0b" : "=&r"(i) : "r"(i), "m"(*p) : "eax", "cc" ); -#ifdef OF_X86_64_ASM +#ifdef OF_X86_64 else if (sizeof(int) == 8) __asm__ __volatile__ ( "0:\n\t" "movq %2, %0\n\t" "movq %0, %%rax\n\t" @@ -384,11 +384,11 @@ "jne 0b" : "=&r"(i) : "r"(i), "m"(*p) : "eax", "cc" ); -#ifdef OF_X86_64_ASM +#ifdef OF_X86_64 else if (sizeof(int) == 8) __asm__ __volatile__ ( "0:\n\t" "movq %2, %0\n\t" "movq %0, %%rax\n\t"