@@ -24,11 +24,11 @@ "xaddl %0, %2\n\t" "addl %1, %0" : "+&r"(i) : "r"(i), "m"(*p) ); -#ifdef OF_X86_64 +#ifdef OF_AMD64 else if (sizeof(int) == 8) __asm__ __volatile__ ( "lock\n\t" "xaddq %0, %2\n\t" "addq %1, %0" @@ -57,11 +57,11 @@ } static OF_INLINE void *_Nullable OFAtomicPointerAdd(void *volatile _Nullable *_Nonnull p, intptr_t i) { -#if defined(OF_X86_64) +#if defined(OF_AMD64) __asm__ __volatile__ ( "lock\n\t" "xaddq %0, %2\n\t" "addq %1, %0" : "+&r"(i) @@ -92,11 +92,11 @@ "xaddl %0, %2\n\t" "subl %1, %0" : "+&r"(i) : "r"(i), "m"(*p) ); -#ifdef OF_X86_64 +#ifdef OF_AMD64 else if (sizeof(int) == 8) __asm__ __volatile__ ( "negq %0\n\t" "lock\n\t" "xaddq %0, %2\n\t" @@ -127,11 +127,11 @@ } static OF_INLINE void *_Nullable OFAtomicPointerSubtract(void *volatile _Nullable *_Nonnull p, intptr_t i) { -#if defined(OF_X86_64) +#if defined(OF_AMD64) __asm__ __volatile__ ( "negq %0\n\t" "lock\n\t" "xaddq %0, %2\n\t" "subq %1, %0" @@ -167,11 +167,11 @@ "xaddl %0, %1\n\t" "incl %0" : "=&r"(i) : "m"(*p) ); -#ifdef OF_X86_64 +#ifdef OF_AMD64 else if (sizeof(int) == 8) __asm__ __volatile__ ( "xorq %0, %0\n\t" "incq %0\n\t" "lock\n\t" @@ -218,11 +218,11 @@ "xaddl %0, %1\n\t" "decl %0" : "=&r"(i) : "m"(*p) ); -#ifdef OF_X86_64 +#ifdef OF_AMD64 else if (sizeof(int) == 8) __asm__ __volatile__ ( "xorq %0, %0\n\t" "decq %0\n\t" "lock\n\t" @@ -270,11 +270,11 @@ "jne 0b" : "=&r"(i) : "r"(i), "m"(*p) : "eax", "cc" ); -#ifdef OF_X86_64 +#ifdef OF_AMD64 else if (sizeof(int) == 8) __asm__ __volatile__ ( "0:\n\t" "movq %2, %0\n\t" "movq %0, %%rax\n\t" @@ -326,11 +326,11 @@ "jne 0b" : "=&r"(i) : "r"(i), "m"(*p) : "eax", "cc" ); -#ifdef OF_X86_64 +#ifdef OF_AMD64 else if (sizeof(int) == 8) __asm__ __volatile__ ( "0:\n\t" "movq %2, %0\n\t" "movq %0, %%rax\n\t" @@ -382,11 +382,11 @@ "jne 0b" : "=&r"(i) : "r"(i), "m"(*p) : "eax", "cc" ); -#ifdef OF_X86_64 +#ifdef OF_AMD64 else if (sizeof(int) == 8) __asm__ __volatile__ ( "0:\n\t" "movq %2, %0\n\t" "movq %0, %%rax\n\t" @@ -480,11 +480,11 @@ } static OF_INLINE void OFMemoryBarrier(void) { -#ifdef OF_X86_64 +#ifdef OF_AMD64 __asm__ __volatile__ ( "lock orq $0, (%%rsp)" ::: "memory", "cc" ); #else __asm__ __volatile__ (