Index: PLATFORMS.md ================================================================== --- PLATFORMS.md +++ PLATFORMS.md @@ -151,13 +151,13 @@ As forwarding needs hand-written assembly for each combination of CPU architecture, executable format and calling convention, it is only available for the following platforms (except resolveClassMethod: and resolveInstanceMethod:, which are always available): - * AMD64 (SysV/ELF, Apple/Mach-O, Win64/PE) * ARM (EABI/ELF, Apple/Mach-O) * ARM64 (Apple/Mach-O) * MIPS (O32/ELF, EABI/ELF) * PPC (SysV/ELF, EABI/ELF, Apple/Mach-O) * x86 (SysV/ELF, Apple/Mach-O, Win32/PE) + * x86_64 (SysV/ELF, Apple/Mach-O, Win64/PE) Apple means both, the Apple ABI and runtime. Index: configure.ac ================================================================== --- configure.ac +++ configure.ac @@ -540,11 +540,11 @@ atomic_ops="none" AC_MSG_CHECKING(whether we have an atomic ops assembly implementation) AC_EGREP_CPP(yes, [ #if defined(__GNUC__) && (defined(__i386__) || \ - defined(__amd64__) || defined(__x86_64__)) + defined(__x86_64__) || defined(__amd64__)) yes #endif ], [ AC_MSG_RESULT(yes) atomic_ops="assembly implementation" Index: src/OFObject.h ================================================================== --- src/OFObject.h +++ src/OFObject.h @@ -115,11 +115,11 @@ # define OF_HAVE_FORWARDING_TARGET_FOR_SELECTOR # define OF_HAVE_FORWARDING_TARGET_FOR_SELECTOR_STRET # endif #else # if defined(__ELF__) -# if defined(__amd64__) || defined(__x86_64__) || defined(__i386__) || \ +# if defined(__x86_64__) || defined(__amd64__) || defined(__i386__) || \ defined(__arm__) || defined(__ARM__) || defined(__ppc__) || \ defined(__PPC__) # define OF_HAVE_FORWARDING_TARGET_FOR_SELECTOR # if __OBJFW_RUNTIME_ABI__ >= 800 # define OF_HAVE_FORWARDING_TARGET_FOR_SELECTOR_STRET Index: src/atomic.h ================================================================== --- src/atomic.h +++ src/atomic.h @@ -31,20 +31,20 @@ static OF_INLINE int of_atomic_add_int(volatile int *p, int i) { #if !defined(OF_HAVE_THREADS) return (*p += i); -#elif defined(OF_X86_ASM) || defined(OF_AMD64_ASM) +#elif defined(OF_X86_64_ASM) || defined(OF_X86_ASM) if (sizeof(int) == 4) __asm__ __volatile__ ( "lock\n\t" "xaddl %0, %2\n\t" "addl %1, %0" : "+&r"(i) : "r"(i), "m"(*p) ); -# ifdef OF_AMD64_ASM +# ifdef OF_X86_64_ASM else if (sizeof(int) == 8) __asm__ __volatile__ ( "lock\n\t" "xaddq %0, %2\n\t" "addq %1, %0" @@ -68,11 +68,11 @@ static OF_INLINE int32_t of_atomic_add_32(volatile int32_t *p, int32_t i) { #if !defined(OF_HAVE_THREADS) return (*p += i); -#elif defined(OF_X86_ASM) || defined(OF_AMD64_ASM) +#elif defined(OF_X86_64_ASM) || defined(OF_X86_ASM) __asm__ __volatile__ ( "lock\n\t" "xaddl %0, %2\n\t" "addl %1, %0" : "+&r"(i) @@ -92,25 +92,25 @@ static OF_INLINE void* of_atomic_add_ptr(void* volatile *p, intptr_t i) { #if !defined(OF_HAVE_THREADS) return (*(char* volatile*)p += i); -#elif defined(OF_X86_ASM) +#elif defined(OF_X86_64_ASM) __asm__ __volatile__ ( "lock\n\t" - "xaddl %0, %2\n\t" - "addl %1, %0" + "xaddq %0, %2\n\t" + "addq %1, %0" : "+&r"(i) : "r"(i), "m"(*p) ); return (void*)i; -#elif defined(OF_AMD64_ASM) +#elif defined(OF_X86_ASM) __asm__ __volatile__ ( "lock\n\t" - "xaddq %0, %2\n\t" - "addq %1, %0" + "xaddl %0, %2\n\t" + "addl %1, %0" : "+&r"(i) : "r"(i), "m"(*p) ); return (void*)i; @@ -130,21 +130,21 @@ static OF_INLINE int of_atomic_sub_int(volatile int *p, int i) { #if !defined(OF_HAVE_THREADS) return (*p -= i); -#elif defined(OF_X86_ASM) || defined(OF_AMD64_ASM) +#elif defined(OF_X86_64_ASM) || defined(OF_X86_ASM) if (sizeof(int) == 4) __asm__ __volatile__ ( "negl %0\n\t" "lock\n\t" "xaddl %0, %2\n\t" "subl %1, %0" : "+&r"(i) : "r"(i), "m"(*p) ); -# ifdef OF_AMD64_ASM +# ifdef OF_X86_64_ASM else if (sizeof(int) == 8) __asm__ __volatile__ ( "negq %0\n\t" "lock\n\t" "xaddq %0, %2\n\t" @@ -169,11 +169,11 @@ static OF_INLINE int32_t of_atomic_sub_32(volatile int32_t *p, int32_t i) { #if !defined(OF_HAVE_THREADS) return (*p -= i); -#elif defined(OF_X86_ASM) || defined(OF_AMD64_ASM) +#elif defined(OF_X86_64_ASM) || defined(OF_X86_ASM) __asm__ __volatile__ ( "negl %0\n\t" "lock\n\t" "xaddl %0, %2\n\t" "subl %1, %0" @@ -194,28 +194,28 @@ static OF_INLINE void* of_atomic_sub_ptr(void* volatile *p, intptr_t i) { #if !defined(OF_HAVE_THREADS) return (*(char* volatile*)p -= i); +#elif defined(OF_X86_64_ASM) + __asm__ __volatile__ ( + "negq %0\n\t" + "lock\n\t" + "xaddq %0, %2\n\t" + "subq %1, %0" + : "+&r"(i) + : "r"(i), "m"(*p) + ); + + return (void*)i; #elif defined(OF_X86_ASM) __asm__ __volatile__ ( "negl %0\n\t" "lock\n\t" "xaddl %0, %2\n\t" "subl %1, %0" : "+&r"(i) - : "r"(i), "m"(*p) - ); - - return (void*)i; -#elif defined(OF_AMD64_ASM) - __asm__ __volatile__ ( - "negq %0\n\t" - "lock\n\t" - "xaddq %0, %2\n\t" - "subq %1, %0" - : "+&r"(i) : "r"(i), "m"(*p) ); return (void*)i; #elif defined(OF_HAVE_GCC_ATOMIC_OPS) @@ -234,11 +234,11 @@ static OF_INLINE int of_atomic_inc_int(volatile int *p) { #if !defined(OF_HAVE_THREADS) return ++*p; -#elif defined(OF_X86_ASM) || defined(OF_AMD64_ASM) +#elif defined(OF_X86_64_ASM) || defined(OF_X86_ASM) int i; if (sizeof(int) == 4) __asm__ __volatile__ ( "xorl %0, %0\n\t" @@ -247,11 +247,11 @@ "xaddl %0, %1\n\t" "incl %0" : "=&r"(i) : "m"(*p) ); -# ifdef OF_AMD64_ASM +# ifdef OF_X86_64_ASM else if (sizeof(int) == 8) __asm__ __volatile__ ( "xorq %0, %0\n\t" "incq %0\n\t" "lock\n\t" @@ -277,11 +277,11 @@ static OF_INLINE int32_t of_atomic_inc_32(volatile int32_t *p) { #if !defined(OF_HAVE_THREADS) return ++*p; -#elif defined(OF_X86_ASM) || defined(OF_AMD64_ASM) +#elif defined(OF_X86_64_ASM) || defined(OF_X86_ASM) uint32_t i; __asm__ __volatile__ ( "xorl %0, %0\n\t" "incl %0\n\t" @@ -305,11 +305,11 @@ static OF_INLINE int of_atomic_dec_int(volatile int *p) { #if !defined(OF_HAVE_THREADS) return --*p; -#elif defined(OF_X86_ASM) || defined(OF_AMD64_ASM) +#elif defined(OF_X86_64_ASM) || defined(OF_X86_ASM) int i; if (sizeof(int) == 4) __asm__ __volatile__ ( "xorl %0, %0\n\t" @@ -318,11 +318,11 @@ "xaddl %0, %1\n\t" "decl %0" : "=&r"(i) : "m"(*p) ); -# ifdef OF_AMD64_ASM +# ifdef OF_X86_64_ASM else if (sizeof(int) == 8) __asm__ __volatile__ ( "xorq %0, %0\n\t" "decq %0\n\t" "lock\n\t" @@ -348,11 +348,11 @@ static OF_INLINE int32_t of_atomic_dec_32(volatile int32_t *p) { #if !defined(OF_HAVE_THREADS) return --*p; -#elif defined(OF_X86_ASM) || defined(OF_AMD64_ASM) +#elif defined(OF_X86_64_ASM) || defined(OF_X86_ASM) uint32_t i; __asm__ __volatile__ ( "xorl %0, %0\n\t" "decl %0\n\t" @@ -376,11 +376,11 @@ static OF_INLINE unsigned int of_atomic_or_int(volatile unsigned int *p, unsigned int i) { #if !defined(OF_HAVE_THREADS) return (*p |= i); -#elif defined(OF_X86_ASM) || defined(OF_AMD64_ASM) +#elif defined(OF_X86_64_ASM) || defined(OF_X86_ASM) if (sizeof(int) == 4) __asm__ __volatile__ ( "0:\n\t" "movl %2, %0\n\t" "movl %0, %%eax\n\t" @@ -390,11 +390,11 @@ "jne 0\n\t" : "=&r"(i) : "r"(i), "m"(*p) : "eax", "cc" ); -# ifdef OF_AMD64_ASM +# ifdef OF_X86_64_ASM else if (sizeof(int) == 8) __asm__ __volatile__ ( "0:\n\t" "movq %2, %0\n\t" "movq %0, %%rax\n\t" @@ -423,11 +423,11 @@ static OF_INLINE uint32_t of_atomic_or_32(volatile uint32_t *p, uint32_t i) { #if !defined(OF_HAVE_THREADS) return (*p |= i); -#elif defined(OF_X86_ASM) || defined(OF_AMD64_ASM) +#elif defined(OF_X86_64_ASM) || defined(OF_X86_ASM) __asm__ __volatile__ ( "0:\n\t" "movl %2, %0\n\t" "movl %0, %%eax\n\t" "orl %1, %0\n\t" @@ -452,11 +452,11 @@ static OF_INLINE unsigned int of_atomic_and_int(volatile unsigned int *p, unsigned int i) { #if !defined(OF_HAVE_THREADS) return (*p &= i); -#elif defined(OF_X86_ASM) || defined(OF_AMD64_ASM) +#elif defined(OF_X86_64_ASM) || defined(OF_X86_ASM) if (sizeof(int) == 4) __asm__ __volatile__ ( "0:\n\t" "movl %2, %0\n\t" "movl %0, %%eax\n\t" @@ -466,11 +466,11 @@ "jne 0\n\t" : "=&r"(i) : "r"(i), "m"(*p) : "eax", "cc" ); -# ifdef OF_AMD64_ASM +# ifdef OF_X86_64_ASM else if (sizeof(int) == 8) __asm__ __volatile__ ( "0:\n\t" "movq %2, %0\n\t" "movq %0, %%rax\n\t" @@ -499,11 +499,11 @@ static OF_INLINE uint32_t of_atomic_and_32(volatile uint32_t *p, uint32_t i) { #if !defined(OF_HAVE_THREADS) return (*p &= i); -#elif defined(OF_X86_ASM) || defined(OF_AMD64_ASM) +#elif defined(OF_X86_64_ASM) || defined(OF_X86_ASM) __asm__ __volatile__ ( "0:\n\t" "movl %2, %0\n\t" "movl %0, %%eax\n\t" "andl %1, %0\n\t" @@ -528,11 +528,11 @@ static OF_INLINE unsigned int of_atomic_xor_int(volatile unsigned int *p, unsigned int i) { #if !defined(OF_HAVE_THREADS) return (*p ^= i); -#elif defined(OF_X86_ASM) || defined(OF_AMD64_ASM) +#elif defined(OF_X86_64_ASM) || defined(OF_X86_ASM) if (sizeof(int) == 4) __asm__ __volatile__ ( "0:\n\t" "movl %2, %0\n\t" "movl %0, %%eax\n\t" @@ -542,11 +542,11 @@ "jne 0\n\t" : "=&r"(i) : "r"(i), "m"(*p) : "eax", "cc" ); -# ifdef OF_AMD64_ASM +# ifdef OF_X86_64_ASM else if (sizeof(int) == 8) __asm__ __volatile__ ( "0:\n\t" "movq %2, %0\n\t" "movq %0, %%rax\n\t" @@ -575,11 +575,11 @@ static OF_INLINE uint32_t of_atomic_xor_32(volatile uint32_t *p, uint32_t i) { #if !defined(OF_HAVE_THREADS) return (*p ^= i); -#elif defined(OF_X86_ASM) || defined(OF_AMD64_ASM) +#elif defined(OF_X86_64_ASM) || defined(OF_X86_ASM) __asm__ __volatile__ ( "0:\n\t" "movl %2, %0\n\t" "movl %0, %%eax\n\t" "xorl %1, %0\n\t" @@ -609,11 +609,11 @@ *p = n; return true; } return false; -#elif defined(OF_X86_ASM) || defined(OF_AMD64_ASM) +#elif defined(OF_X86_64_ASM) || defined(OF_X86_ASM) int r; __asm__ __volatile__ ( "lock\n\t" "cmpxchg %2, %3\n\t" @@ -642,11 +642,11 @@ *p = n; return true; } return false; -#elif defined(OF_X86_ASM) || defined(OF_AMD64_ASM) +#elif defined(OF_X86_64_ASM) || defined(OF_X86_ASM) int r; __asm__ __volatile__ ( "lock\n\t" "cmpxchg %2, %3\n\t" @@ -675,11 +675,11 @@ *p = n; return true; } return false; -#elif defined(OF_X86_ASM) || defined(OF_AMD64_ASM) +#elif defined(OF_X86_64_ASM) || defined(OF_X86_ASM) int r; __asm__ __volatile__ ( "lock\n\t" "cmpxchg %2, %3\n\t" @@ -702,11 +702,11 @@ static OF_INLINE void of_memory_barrier(void) { #if !defined(OF_HAVE_THREADS) -#elif defined(OF_X86_ASM) || defined(OF_AMD64_ASM) +#elif defined(OF_X86_64_ASM) || defined(OF_X86_ASM) __asm__ __volatile__ ( "mfence" ); #elif defined(OF_HAVE_GCC_ATOMIC_OPS) __sync_synchronize(); @@ -719,11 +719,11 @@ static OF_INLINE void of_memory_read_barrier(void) { #if !defined(OF_HAVE_THREADS) -#elif defined(OF_X86_ASM) || defined(OF_AMD64_ASM) +#elif defined(OF_X86_64_ASM) || defined(OF_X86_ASM) __asm__ __volatile__ ( "lfence" ); #else of_memory_barrier(); @@ -732,13 +732,13 @@ static OF_INLINE void of_memory_write_barrier(void) { #if !defined(OF_HAVE_THREADS) -#elif defined(OF_X86_ASM) || defined(OF_AMD64_ASM) +#elif defined(OF_X86_64_ASM) || defined(OF_X86_ASM) __asm__ __volatile__ ( "sfence" ); #else of_memory_barrier(); #endif } DELETED src/forwarding/forwarding-amd64-elf.S Index: src/forwarding/forwarding-amd64-elf.S ================================================================== --- src/forwarding/forwarding-amd64-elf.S +++ src/forwarding/forwarding-amd64-elf.S @@ -1,218 +0,0 @@ -/* - * Copyright (c) 2008, 2009, 2010, 2011, 2012, 2013, 2014 - * Jonathan Schleifer - * - * All rights reserved. - * - * This file is part of ObjFW. It may be distributed under the terms of the - * Q Public License 1.0, which can be found in the file LICENSE.QPL included in - * the packaging of this file. - * - * Alternatively, it may be distributed under the terms of the GNU General - * Public License, either version 2 or 3, which can be found in the file - * LICENSE.GPLv2 or LICENSE.GPLv3 respectively included in the packaging of this - * file. - */ - -.globl of_forward -.globl of_forward_stret - -.section .text -of_forward: - pushq %rbp - movq %rsp, %rbp - - /* Save all arguments */ - subq $0xC0, %rsp /* 16-byte alignment */ - movq %rax, -0x8(%rbp) - movq %rdi, -0x10(%rbp) - movq %rsi, -0x18(%rbp) - movq %rdx, -0x20(%rbp) - movq %rcx, -0x28(%rbp) - movq %r8, -0x30(%rbp) - movq %r9, -0x38(%rbp) - movdqa %xmm0, -0x50(%rbp) - movdqa %xmm1, -0x60(%rbp) - movdqa %xmm2, -0x70(%rbp) - movdqa %xmm3, -0x80(%rbp) - movdqa %xmm4, -0x90(%rbp) - movdqa %xmm5, -0xA0(%rbp) - movdqa %xmm6, -0xB0(%rbp) - movdqa %xmm7, -0xC0(%rbp) - - call object_getClass@PLT - - movq %rax, %rdi - leaq sel_forwardingTargetForSelector_(%rip), %rsi - call class_respondsToSelector@PLT - - testq %rax, %rax - jz 0f - - movq -0x10(%rbp), %rdi - leaq sel_forwardingTargetForSelector_(%rip), %rsi - call objc_msg_lookup@PLT - - movq -0x10(%rbp), %rdi - leaq sel_forwardingTargetForSelector_(%rip), %rsi - movq -0x18(%rbp), %rdx - call *%rax - - testq %rax, %rax - jz 0f - cmpq -0x10(%rbp), %rax - je 0f - - movq %rax, -0x10(%rbp) - - movq %rax, %rdi - movq -0x18(%rbp), %rsi - call objc_msg_lookup@PLT - movq %rax, %r11 - - /* Restore all arguments */ - movdqa -0xC0(%rbp), %xmm7 - movdqa -0xB0(%rbp), %xmm6 - movdqa -0xA0(%rbp), %xmm5 - movdqa -0x90(%rbp), %xmm4 - movdqa -0x80(%rbp), %xmm3 - movdqa -0x70(%rbp), %xmm2 - movdqa -0x60(%rbp), %xmm1 - movdqa -0x50(%rbp), %xmm0 - movq -0x38(%rbp), %r9 - movq -0x30(%rbp), %r8 - movq -0x28(%rbp), %rcx - movq -0x20(%rbp), %rdx - movq -0x18(%rbp), %rsi - movq -0x10(%rbp), %rdi - movq -0x8(%rbp), %rax - - movq %rbp, %rsp - popq %rbp - - jmpq *%r11 - -0: - movq -0x10(%rbp), %rdi - movq -0x18(%rbp), %rsi - - movq %rbp, %rsp - popq %rbp - - jmp of_method_not_found@PLT -.type of_forward, %function -.size of_forward, .-of_forward - -of_forward_stret: - pushq %rbp - movq %rsp, %rbp - - /* Save all arguments */ - subq $0xC0, %rsp /* 16-byte alignment */ - movq %rax, -0x8(%rbp) - movq %rdi, -0x10(%rbp) - movq %rsi, -0x18(%rbp) - movq %rdx, -0x20(%rbp) - movq %rcx, -0x28(%rbp) - movq %r8, -0x30(%rbp) - movq %r9, -0x38(%rbp) - movdqa %xmm0, -0x50(%rbp) - movdqa %xmm1, -0x60(%rbp) - movdqa %xmm2, -0x70(%rbp) - movdqa %xmm3, -0x80(%rbp) - movdqa %xmm4, -0x90(%rbp) - movdqa %xmm5, -0xA0(%rbp) - movdqa %xmm6, -0xB0(%rbp) - movdqa %xmm7, -0xC0(%rbp) - - movq %rsi, %rdi - call object_getClass@PLT - - movq %rax, %rdi - leaq sel_forwardingTargetForSelector_(%rip), %rsi - call class_respondsToSelector@PLT - - testq %rax, %rax - jz 0f - - movq -0x18(%rbp), %rdi - leaq sel_forwardingTargetForSelector_(%rip), %rsi - call objc_msg_lookup@PLT - - movq -0x18(%rbp), %rdi - leaq sel_forwardingTargetForSelector_(%rip), %rsi - movq -0x20(%rbp), %rdx - call *%rax - - testq %rax, %rax - jz 0f - cmpq -0x18(%rbp), %rax - je 0f - - movq %rax, -0x18(%rbp) - - movq %rax, %rdi - movq -0x20(%rbp), %rsi - call objc_msg_lookup_stret@PLT - movq %rax, %r11 - - /* Restore all arguments */ - movdqa -0xC0(%rbp), %xmm7 - movdqa -0xB0(%rbp), %xmm6 - movdqa -0xA0(%rbp), %xmm5 - movdqa -0x90(%rbp), %xmm4 - movdqa -0x80(%rbp), %xmm3 - movdqa -0x70(%rbp), %xmm2 - movdqa -0x60(%rbp), %xmm1 - movdqa -0x50(%rbp), %xmm0 - movq -0x38(%rbp), %r9 - movq -0x30(%rbp), %r8 - movq -0x28(%rbp), %rcx - movq -0x20(%rbp), %rdx - movq -0x18(%rbp), %rsi - movq -0x10(%rbp), %rdi - movq -0x8(%rbp), %rax - - movq %rbp, %rsp - popq %rbp - - jmpq *%r11 - -0: - movq -0x10(%rbp), %rdi - movq -0x18(%rbp), %rsi - movq -0x20(%rbp), %rdx - - movq %rbp, %rsp - popq %rbp - - jmp of_method_not_found_stret@PLT -.type of_forward_stret, %function -.size of_forward_stret, .-of_forward_stret - -init: - leaq module(%rip), %rdi - jmp __objc_exec_class@PLT - -.section .ctors, "aw", %progbits - .quad init - -.section .rodata -str_forwardingTargetForSelector_: - .asciz "forwardingTargetForSelector:" - -.section .data -sel_forwardingTargetForSelector_: - .quad str_forwardingTargetForSelector_, 0 - .quad 0, 0 -symtab: - .quad 0, sel_forwardingTargetForSelector_ - .short 0, 0 - .long 0 - .quad 0 -module: - .quad 8, 32, 0, symtab - -#ifdef __linux__ -.section .note.GNU-stack, "", %progbits -#endif DELETED src/forwarding/forwarding-amd64-win64.S Index: src/forwarding/forwarding-amd64-win64.S ================================================================== --- src/forwarding/forwarding-amd64-win64.S +++ src/forwarding/forwarding-amd64-win64.S @@ -1,188 +0,0 @@ -/* - * Copyright (c) 2008, 2009, 2010, 2011, 2012, 2013, 2014 - * Jonathan Schleifer - * - * All rights reserved. - * - * This file is part of ObjFW. It may be distributed under the terms of the - * Q Public License 1.0, which can be found in the file LICENSE.QPL included in - * the packaging of this file. - * - * Alternatively, it may be distributed under the terms of the GNU General - * Public License, either version 2 or 3, which can be found in the file - * LICENSE.GPLv2 or LICENSE.GPLv3 respectively included in the packaging of this - * file. - */ - -.globl of_forward -.globl of_forward_stret - -.section .text -of_forward: - pushq %rbp - movq %rsp, %rbp - - /* Save all arguments */ - subq $0x90, %rsp /* 16-byte alignment */ - movq %rax, -0x28(%rbp) - movq %rcx, -0x30(%rbp) - movq %rdx, -0x38(%rbp) - movq %r8, -0x40(%rbp) - movq %r9, -0x48(%rbp) - movdqa %xmm0, -0x60(%rbp) - movdqa %xmm1, -0x70(%rbp) - movdqa %xmm2, -0x80(%rbp) - movdqa %xmm3, -0x90(%rbp) - - call object_getClass - - movq %rax, %rcx - leaq sel_forwardingTargetForSelector_(%rip), %rdx - call class_respondsToSelector - - testq %rax, %rax - jz 0f - - movq -0x30(%rbp), %rcx - leaq sel_forwardingTargetForSelector_(%rip), %rdx - call objc_msg_lookup - - movq -0x30(%rbp), %rcx - leaq sel_forwardingTargetForSelector_(%rip), %rdx - movq -0x38(%rbp), %r8 - call *%rax - - testq %rax, %rax - jz 0f - cmpq -0x30(%rbp), %rax - je 0f - - movq %rax, -0x30(%rbp) - - movq %rax, %rcx - movq -0x38(%rbp), %rdx - call objc_msg_lookup - movq %rax, %r11 - - /* Restore all arguments */ - movdqa -0x90(%rbp), %xmm3 - movdqa -0x80(%rbp), %xmm2 - movdqa -0x70(%rbp), %xmm1 - movdqa -0x60(%rbp), %xmm0 - movq -0x48(%rbp), %r9 - movq -0x40(%rbp), %r8 - movq -0x38(%rbp), %rdx - movq -0x30(%rbp), %rcx - movq -0x28(%rbp), %rax - - movq %rbp, %rsp - popq %rbp - - jmpq *%r11 - -0: - movq -0x30(%rbp), %rcx - movq -0x38(%rbp), %rdx - - movq %rbp, %rsp - popq %rbp - - jmp of_method_not_found - -of_forward_stret: - pushq %rbp - movq %rsp, %rbp - - /* Save all arguments */ - subq $0x90, %rsp /* 16-byte alignment */ - movq %rax, -0x28(%rbp) - movq %rcx, -0x30(%rbp) - movq %rdx, -0x38(%rbp) - movq %r8, -0x40(%rbp) - movq %r9, -0x48(%rbp) - movdqa %xmm0, -0x60(%rbp) - movdqa %xmm1, -0x70(%rbp) - movdqa %xmm2, -0x80(%rbp) - movdqa %xmm3, -0x90(%rbp) - - movq %rdx, %rcx - call object_getClass - - movq %rax, %rcx - leaq sel_forwardingTargetForSelector_(%rip), %rdx - call class_respondsToSelector - - testq %rax, %rax - jz 0f - - movq -0x38(%rbp), %rcx - leaq sel_forwardingTargetForSelector_(%rip), %rdx - call objc_msg_lookup - - movq -0x38(%rbp), %rcx - leaq sel_forwardingTargetForSelector_(%rip), %rdx - movq -0x40(%rbp), %r8 - call *%rax - - testq %rax, %rax - jz 0f - cmpq -0x38(%rbp), %rax - je 0f - - movq %rax, -0x38(%rbp) - - movq %rax, %rcx - movq -0x40(%rbp), %rdx - call objc_msg_lookup_stret@PLT - movq %rax, %r11 - - /* Restore all arguments */ - movdqa -0x90(%rbp), %xmm3 - movdqa -0x80(%rbp), %xmm2 - movdqa -0x70(%rbp), %xmm1 - movdqa -0x60(%rbp), %xmm0 - movq -0x48(%rbp), %r9 - movq -0x40(%rbp), %r8 - movq -0x38(%rbp), %rdx - movq -0x30(%rbp), %rcx - movq -0x28(%rbp), %rax - - movq %rbp, %rsp - popq %rbp - - jmpq *%r11 - -0: - movq -0x30(%rbp), %rcx - movq -0x38(%rbp), %rdx - movq -0x40(%rbp), %r8 - - movq %rbp, %rsp - popq %rbp - - jmp of_method_not_found_stret - -init: - leaq module(%rip), %rcx - jmp __objc_exec_class - -.section .ctors, "aw" - .quad init - -.section .rodata -str_forwardingTargetForSelector_: - .asciz "forwardingTargetForSelector:" - -.section .data -sel_forwardingTargetForSelector_: - .quad str_forwardingTargetForSelector_, 0 - .quad 0, 0 -symtab: - .long 0, 0 - .quad sel_forwardingTargetForSelector_ - .short 0, 0 - .long 0 - .quad 0 -module: - .long 8, 32 - .quad 0, symtab ADDED src/forwarding/forwarding-x86_64-elf.S Index: src/forwarding/forwarding-x86_64-elf.S ================================================================== --- src/forwarding/forwarding-x86_64-elf.S +++ src/forwarding/forwarding-x86_64-elf.S @@ -0,0 +1,218 @@ +/* + * Copyright (c) 2008, 2009, 2010, 2011, 2012, 2013, 2014 + * Jonathan Schleifer + * + * All rights reserved. + * + * This file is part of ObjFW. It may be distributed under the terms of the + * Q Public License 1.0, which can be found in the file LICENSE.QPL included in + * the packaging of this file. + * + * Alternatively, it may be distributed under the terms of the GNU General + * Public License, either version 2 or 3, which can be found in the file + * LICENSE.GPLv2 or LICENSE.GPLv3 respectively included in the packaging of this + * file. + */ + +.globl of_forward +.globl of_forward_stret + +.section .text +of_forward: + pushq %rbp + movq %rsp, %rbp + + /* Save all arguments */ + subq $0xC0, %rsp /* 16-byte alignment */ + movq %rax, -0x8(%rbp) + movq %rdi, -0x10(%rbp) + movq %rsi, -0x18(%rbp) + movq %rdx, -0x20(%rbp) + movq %rcx, -0x28(%rbp) + movq %r8, -0x30(%rbp) + movq %r9, -0x38(%rbp) + movdqa %xmm0, -0x50(%rbp) + movdqa %xmm1, -0x60(%rbp) + movdqa %xmm2, -0x70(%rbp) + movdqa %xmm3, -0x80(%rbp) + movdqa %xmm4, -0x90(%rbp) + movdqa %xmm5, -0xA0(%rbp) + movdqa %xmm6, -0xB0(%rbp) + movdqa %xmm7, -0xC0(%rbp) + + call object_getClass@PLT + + movq %rax, %rdi + leaq sel_forwardingTargetForSelector_(%rip), %rsi + call class_respondsToSelector@PLT + + testq %rax, %rax + jz 0f + + movq -0x10(%rbp), %rdi + leaq sel_forwardingTargetForSelector_(%rip), %rsi + call objc_msg_lookup@PLT + + movq -0x10(%rbp), %rdi + leaq sel_forwardingTargetForSelector_(%rip), %rsi + movq -0x18(%rbp), %rdx + call *%rax + + testq %rax, %rax + jz 0f + cmpq -0x10(%rbp), %rax + je 0f + + movq %rax, -0x10(%rbp) + + movq %rax, %rdi + movq -0x18(%rbp), %rsi + call objc_msg_lookup@PLT + movq %rax, %r11 + + /* Restore all arguments */ + movdqa -0xC0(%rbp), %xmm7 + movdqa -0xB0(%rbp), %xmm6 + movdqa -0xA0(%rbp), %xmm5 + movdqa -0x90(%rbp), %xmm4 + movdqa -0x80(%rbp), %xmm3 + movdqa -0x70(%rbp), %xmm2 + movdqa -0x60(%rbp), %xmm1 + movdqa -0x50(%rbp), %xmm0 + movq -0x38(%rbp), %r9 + movq -0x30(%rbp), %r8 + movq -0x28(%rbp), %rcx + movq -0x20(%rbp), %rdx + movq -0x18(%rbp), %rsi + movq -0x10(%rbp), %rdi + movq -0x8(%rbp), %rax + + movq %rbp, %rsp + popq %rbp + + jmpq *%r11 + +0: + movq -0x10(%rbp), %rdi + movq -0x18(%rbp), %rsi + + movq %rbp, %rsp + popq %rbp + + jmp of_method_not_found@PLT +.type of_forward, %function +.size of_forward, .-of_forward + +of_forward_stret: + pushq %rbp + movq %rsp, %rbp + + /* Save all arguments */ + subq $0xC0, %rsp /* 16-byte alignment */ + movq %rax, -0x8(%rbp) + movq %rdi, -0x10(%rbp) + movq %rsi, -0x18(%rbp) + movq %rdx, -0x20(%rbp) + movq %rcx, -0x28(%rbp) + movq %r8, -0x30(%rbp) + movq %r9, -0x38(%rbp) + movdqa %xmm0, -0x50(%rbp) + movdqa %xmm1, -0x60(%rbp) + movdqa %xmm2, -0x70(%rbp) + movdqa %xmm3, -0x80(%rbp) + movdqa %xmm4, -0x90(%rbp) + movdqa %xmm5, -0xA0(%rbp) + movdqa %xmm6, -0xB0(%rbp) + movdqa %xmm7, -0xC0(%rbp) + + movq %rsi, %rdi + call object_getClass@PLT + + movq %rax, %rdi + leaq sel_forwardingTargetForSelector_(%rip), %rsi + call class_respondsToSelector@PLT + + testq %rax, %rax + jz 0f + + movq -0x18(%rbp), %rdi + leaq sel_forwardingTargetForSelector_(%rip), %rsi + call objc_msg_lookup@PLT + + movq -0x18(%rbp), %rdi + leaq sel_forwardingTargetForSelector_(%rip), %rsi + movq -0x20(%rbp), %rdx + call *%rax + + testq %rax, %rax + jz 0f + cmpq -0x18(%rbp), %rax + je 0f + + movq %rax, -0x18(%rbp) + + movq %rax, %rdi + movq -0x20(%rbp), %rsi + call objc_msg_lookup_stret@PLT + movq %rax, %r11 + + /* Restore all arguments */ + movdqa -0xC0(%rbp), %xmm7 + movdqa -0xB0(%rbp), %xmm6 + movdqa -0xA0(%rbp), %xmm5 + movdqa -0x90(%rbp), %xmm4 + movdqa -0x80(%rbp), %xmm3 + movdqa -0x70(%rbp), %xmm2 + movdqa -0x60(%rbp), %xmm1 + movdqa -0x50(%rbp), %xmm0 + movq -0x38(%rbp), %r9 + movq -0x30(%rbp), %r8 + movq -0x28(%rbp), %rcx + movq -0x20(%rbp), %rdx + movq -0x18(%rbp), %rsi + movq -0x10(%rbp), %rdi + movq -0x8(%rbp), %rax + + movq %rbp, %rsp + popq %rbp + + jmpq *%r11 + +0: + movq -0x10(%rbp), %rdi + movq -0x18(%rbp), %rsi + movq -0x20(%rbp), %rdx + + movq %rbp, %rsp + popq %rbp + + jmp of_method_not_found_stret@PLT +.type of_forward_stret, %function +.size of_forward_stret, .-of_forward_stret + +init: + leaq module(%rip), %rdi + jmp __objc_exec_class@PLT + +.section .ctors, "aw", %progbits + .quad init + +.section .rodata +str_forwardingTargetForSelector_: + .asciz "forwardingTargetForSelector:" + +.section .data +sel_forwardingTargetForSelector_: + .quad str_forwardingTargetForSelector_, 0 + .quad 0, 0 +symtab: + .quad 0, sel_forwardingTargetForSelector_ + .short 0, 0 + .long 0 + .quad 0 +module: + .quad 8, 32, 0, symtab + +#ifdef __linux__ +.section .note.GNU-stack, "", %progbits +#endif ADDED src/forwarding/forwarding-x86_64-win64.S Index: src/forwarding/forwarding-x86_64-win64.S ================================================================== --- src/forwarding/forwarding-x86_64-win64.S +++ src/forwarding/forwarding-x86_64-win64.S @@ -0,0 +1,188 @@ +/* + * Copyright (c) 2008, 2009, 2010, 2011, 2012, 2013, 2014 + * Jonathan Schleifer + * + * All rights reserved. + * + * This file is part of ObjFW. It may be distributed under the terms of the + * Q Public License 1.0, which can be found in the file LICENSE.QPL included in + * the packaging of this file. + * + * Alternatively, it may be distributed under the terms of the GNU General + * Public License, either version 2 or 3, which can be found in the file + * LICENSE.GPLv2 or LICENSE.GPLv3 respectively included in the packaging of this + * file. + */ + +.globl of_forward +.globl of_forward_stret + +.section .text +of_forward: + pushq %rbp + movq %rsp, %rbp + + /* Save all arguments */ + subq $0x90, %rsp /* 16-byte alignment */ + movq %rax, -0x28(%rbp) + movq %rcx, -0x30(%rbp) + movq %rdx, -0x38(%rbp) + movq %r8, -0x40(%rbp) + movq %r9, -0x48(%rbp) + movdqa %xmm0, -0x60(%rbp) + movdqa %xmm1, -0x70(%rbp) + movdqa %xmm2, -0x80(%rbp) + movdqa %xmm3, -0x90(%rbp) + + call object_getClass + + movq %rax, %rcx + leaq sel_forwardingTargetForSelector_(%rip), %rdx + call class_respondsToSelector + + testq %rax, %rax + jz 0f + + movq -0x30(%rbp), %rcx + leaq sel_forwardingTargetForSelector_(%rip), %rdx + call objc_msg_lookup + + movq -0x30(%rbp), %rcx + leaq sel_forwardingTargetForSelector_(%rip), %rdx + movq -0x38(%rbp), %r8 + call *%rax + + testq %rax, %rax + jz 0f + cmpq -0x30(%rbp), %rax + je 0f + + movq %rax, -0x30(%rbp) + + movq %rax, %rcx + movq -0x38(%rbp), %rdx + call objc_msg_lookup + movq %rax, %r11 + + /* Restore all arguments */ + movdqa -0x90(%rbp), %xmm3 + movdqa -0x80(%rbp), %xmm2 + movdqa -0x70(%rbp), %xmm1 + movdqa -0x60(%rbp), %xmm0 + movq -0x48(%rbp), %r9 + movq -0x40(%rbp), %r8 + movq -0x38(%rbp), %rdx + movq -0x30(%rbp), %rcx + movq -0x28(%rbp), %rax + + movq %rbp, %rsp + popq %rbp + + jmpq *%r11 + +0: + movq -0x30(%rbp), %rcx + movq -0x38(%rbp), %rdx + + movq %rbp, %rsp + popq %rbp + + jmp of_method_not_found + +of_forward_stret: + pushq %rbp + movq %rsp, %rbp + + /* Save all arguments */ + subq $0x90, %rsp /* 16-byte alignment */ + movq %rax, -0x28(%rbp) + movq %rcx, -0x30(%rbp) + movq %rdx, -0x38(%rbp) + movq %r8, -0x40(%rbp) + movq %r9, -0x48(%rbp) + movdqa %xmm0, -0x60(%rbp) + movdqa %xmm1, -0x70(%rbp) + movdqa %xmm2, -0x80(%rbp) + movdqa %xmm3, -0x90(%rbp) + + movq %rdx, %rcx + call object_getClass + + movq %rax, %rcx + leaq sel_forwardingTargetForSelector_(%rip), %rdx + call class_respondsToSelector + + testq %rax, %rax + jz 0f + + movq -0x38(%rbp), %rcx + leaq sel_forwardingTargetForSelector_(%rip), %rdx + call objc_msg_lookup + + movq -0x38(%rbp), %rcx + leaq sel_forwardingTargetForSelector_(%rip), %rdx + movq -0x40(%rbp), %r8 + call *%rax + + testq %rax, %rax + jz 0f + cmpq -0x38(%rbp), %rax + je 0f + + movq %rax, -0x38(%rbp) + + movq %rax, %rcx + movq -0x40(%rbp), %rdx + call objc_msg_lookup_stret@PLT + movq %rax, %r11 + + /* Restore all arguments */ + movdqa -0x90(%rbp), %xmm3 + movdqa -0x80(%rbp), %xmm2 + movdqa -0x70(%rbp), %xmm1 + movdqa -0x60(%rbp), %xmm0 + movq -0x48(%rbp), %r9 + movq -0x40(%rbp), %r8 + movq -0x38(%rbp), %rdx + movq -0x30(%rbp), %rcx + movq -0x28(%rbp), %rax + + movq %rbp, %rsp + popq %rbp + + jmpq *%r11 + +0: + movq -0x30(%rbp), %rcx + movq -0x38(%rbp), %rdx + movq -0x40(%rbp), %r8 + + movq %rbp, %rsp + popq %rbp + + jmp of_method_not_found_stret + +init: + leaq module(%rip), %rcx + jmp __objc_exec_class + +.section .ctors, "aw" + .quad init + +.section .rodata +str_forwardingTargetForSelector_: + .asciz "forwardingTargetForSelector:" + +.section .data +sel_forwardingTargetForSelector_: + .quad str_forwardingTargetForSelector_, 0 + .quad 0, 0 +symtab: + .long 0, 0 + .quad sel_forwardingTargetForSelector_ + .short 0, 0 + .long 0 + .quad 0 +module: + .long 8, 32 + .quad 0, symtab Index: src/forwarding/forwarding.S ================================================================== --- src/forwarding/forwarding.S +++ src/forwarding/forwarding.S @@ -28,12 +28,12 @@ # elif defined(__ppc__) # include "apple-forwarding-ppc.S" # endif #else # if defined(__ELF__) -# if defined(__amd64__) || defined(__x86_64__) -# include "forwarding-amd64-elf.S" +# if defined(__x86_64__) || defined(__amd64__) +# include "forwarding-x86_64-elf.S" # elif defined(__i386__) # include "forwarding-x86-elf.S" # elif defined(__arm__) || defined(__ARM__) # include "forwarding-arm-elf.S" # elif defined(__ppc__) || defined(__PPC__) @@ -42,11 +42,11 @@ (defined(__mips_eabi) && _MIPS_SZPTR == 32) # include "forwarding-mips-elf.S" # endif # elif defined(_WIN32) # if defined(__x86_64__) -# include "forwarding-amd64-win64.S" +# include "forwarding-x86_64-win64.S" # elif defined(__i386__) # include "forwarding-x86-win32.S" # endif # endif #endif Index: src/macros.h ================================================================== --- src/macros.h +++ src/macros.h @@ -64,12 +64,12 @@ # define OF_BIGGEST_ALIGNMENT 16 # endif #endif #ifdef __GNUC__ -# if defined(__amd64__) || defined(__x86_64__) -# define OF_AMD64_ASM +# if defined(__x86_64__) || defined(__amd64__) +# define OF_X86_64_ASM # elif defined(__i386__) # define OF_X86_ASM # elif defined(__ppc__) || defined(__PPC__) # define OF_PPC_ASM # elif defined(__arm__) || defined(__ARM__) @@ -201,11 +201,11 @@ } static OF_INLINE uint16_t OF_CONST_FUNC OF_BSWAP16_NONCONST(uint16_t i) { -#if defined(OF_X86_ASM) || defined(OF_AMD64_ASM) +#if defined(OF_X86_64_ASM) || defined(OF_X86_ASM) __asm__ ( "xchgb %h0, %b0" : "=Q"(i) : "0"(i) ); @@ -229,11 +229,11 @@ } static OF_INLINE uint32_t OF_CONST_FUNC OF_BSWAP32_NONCONST(uint32_t i) { -#if defined(OF_X86_ASM) || defined(OF_AMD64_ASM) +#if defined(OF_X86_64_ASM) || defined(OF_X86_ASM) __asm__ ( "bswap %0" : "=q"(i) : "0"(i) ); @@ -259,11 +259,11 @@ } static OF_INLINE uint64_t OF_CONST_FUNC OF_BSWAP64_NONCONST(uint64_t i) { -#if defined(OF_AMD64_ASM) +#if defined(OF_X86_64_ASM) __asm__ ( "bswap %0" : "=r"(i) : "0"(i) ); DELETED src/runtime/lookup-asm/lookup-asm-amd64-elf.S Index: src/runtime/lookup-asm/lookup-asm-amd64-elf.S ================================================================== --- src/runtime/lookup-asm/lookup-asm-amd64-elf.S +++ src/runtime/lookup-asm/lookup-asm-amd64-elf.S @@ -1,81 +0,0 @@ -/* - * Copyright (c) 2008, 2009, 2010, 2011, 2012, 2013, 2014 - * Jonathan Schleifer - * - * All rights reserved. - * - * This file is part of ObjFW. It may be distributed under the terms of the - * Q Public License 1.0, which can be found in the file LICENSE.QPL included in - * the packaging of this file. - * - * Alternatively, it may be distributed under the terms of the GNU General - * Public License, either version 2 or 3, which can be found in the file - * LICENSE.GPLv2 or LICENSE.GPLv3 respectively included in the packaging of this - * file. - */ - -#include "config.h" - -.globl objc_msg_lookup -.globl objc_msg_lookup_stret -.globl objc_msg_lookup_super -.globl objc_msg_lookup_super_stret - -.section .text -.macro generate_lookup name not_found -\name: - testq %rdi, %rdi - jz ret_nil - - movq (%rdi), %r8 - movq 64(%r8), %r8 - -.Lmain_\name: - movq (%rsi), %rax - movzbl %ah, %ecx - movzbl %al, %edx -#ifdef OF_SELUID24 - shrl $16, %eax - - movq (%r8,%rax,8), %r8 -#endif - movq (%r8,%rcx,8), %r8 - movq (%r8,%rdx,8), %rax - - testq %rax, %rax - jz \not_found@PLT - - ret -.type \name, %function -.size \name, .-\name -.endm - -.macro generate_lookup_super name lookup -\name: - movq (%rdi), %rax - testq %rax, %rax - jz ret_nil - - movq 8(%rdi), %r8 - movq 64(%r8), %r8 - jmp .Lmain_\lookup -.type \name, %function -.size \name, .-\name -.endm - -generate_lookup objc_msg_lookup objc_method_not_found -generate_lookup objc_msg_lookup_stret objc_method_not_found_stret -generate_lookup_super objc_msg_lookup_super objc_msg_lookup -generate_lookup_super objc_msg_lookup_super_stret objc_msg_lookup_stret - -ret_nil: - leaq nil_method(%rip), %rax - ret - -nil_method: - xorq %rax, %rax - ret - -#ifdef __linux__ -.section .note.GNU-stack, "", %progbits -#endif DELETED src/runtime/lookup-asm/lookup-asm-amd64-macho.S Index: src/runtime/lookup-asm/lookup-asm-amd64-macho.S ================================================================== --- src/runtime/lookup-asm/lookup-asm-amd64-macho.S +++ src/runtime/lookup-asm/lookup-asm-amd64-macho.S @@ -1,73 +0,0 @@ -/* - * Copyright (c) 2008, 2009, 2010, 2011, 2012, 2013, 2014 - * Jonathan Schleifer - * - * All rights reserved. - * - * This file is part of ObjFW. It may be distributed under the terms of the - * Q Public License 1.0, which can be found in the file LICENSE.QPL included in - * the packaging of this file. - * - * Alternatively, it may be distributed under the terms of the GNU General - * Public License, either version 2 or 3, which can be found in the file - * LICENSE.GPLv2 or LICENSE.GPLv3 respectively included in the packaging of this - * file. - */ - -#include "config.h" - -.globl _objc_msg_lookup -.globl _objc_msg_lookup_stret -.globl _objc_msg_lookup_super -.globl _objc_msg_lookup_super_stret - -.section __TEXT, __text, regular, pure_instructions -.macro generate_lookup -$0: - testq %rdi, %rdi - jz ret_nil - - movq (%rdi), %r8 - movq 64(%r8), %r8 - -Lmain_$0: - movq (%rsi), %rax - movzbl %ah, %ecx - movzbl %al, %edx -#ifdef OF_SELUID24 - shrl $16, %eax - - movq (%r8,%rax,8), %r8 -#endif - movq (%r8,%rcx,8), %r8 - movq (%r8,%rdx,8), %rax - - testq %rax, %rax - jz $1 - - ret -.endmacro - -.macro generate_lookup_super -$0: - movq (%rdi), %rax - testq %rax, %rax - jz ret_nil - - movq 8(%rdi), %r8 - movq 64(%r8), %r8 - jmp Lmain_$1 -.endmacro - -generate_lookup _objc_msg_lookup, _objc_method_not_found -generate_lookup _objc_msg_lookup_stret, _objc_method_not_found_stret -generate_lookup_super _objc_msg_lookup_super, _objc_msg_lookup -generate_lookup_super _objc_msg_lookup_super_stret, _objc_msg_lookup_stret - -ret_nil: - leaq nil_method(%rip), %rax - ret - -nil_method: - movq %rdi, %rax - ret DELETED src/runtime/lookup-asm/lookup-asm-amd64-win64.S Index: src/runtime/lookup-asm/lookup-asm-amd64-win64.S ================================================================== --- src/runtime/lookup-asm/lookup-asm-amd64-win64.S +++ src/runtime/lookup-asm/lookup-asm-amd64-win64.S @@ -1,81 +0,0 @@ -/* - * Copyright (c) 2008, 2009, 2010, 2011, 2012, 2013, 2014 - * Jonathan Schleifer - * - * All rights reserved. - * - * This file is part of ObjFW. It may be distributed under the terms of the - * Q Public License 1.0, which can be found in the file LICENSE.QPL included in - * the packaging of this file. - * - * Alternatively, it may be distributed under the terms of the GNU General - * Public License, either version 2 or 3, which can be found in the file - * LICENSE.GPLv2 or LICENSE.GPLv3 respectively included in the packaging of this - * file. - */ - -#include "config.h" - -.globl objc_msg_lookup -.globl objc_msg_lookup_stret -.globl objc_msg_lookup_super -.globl objc_msg_lookup_super_stret - -.section .text -.macro generate_lookup name not_found -\name: - testq %rcx, %rcx - jz ret_nil - - movq (%rcx), %r8 - movq 56(%r8), %r8 - -.Lmain_\name: - movq %rcx, %r10 - movq %rdx, %r11 - - movq (%rdx), %rax - movzbl %ah, %ecx - movzbl %al, %edx -#ifdef OF_SELUID24 - shrl $16, %eax - - movq (%r8,%rax,8), %r8 -#endif - movq (%r8,%rcx,8), %r8 - movq (%r8,%rdx,8), %rax - - testq %rax, %rax - jz 0f - - ret - -0: - movq %r10, %rcx - movq %r11, %rdx - jmp \not_found -.endm - -.macro generate_lookup_super name lookup -\name: - movq (%rcx), %rax - testq %rax, %rax - jz ret_nil - - movq 8(%rcx), %r8 - movq 56(%r8), %r8 - jmp .Lmain_\lookup -.endm - -generate_lookup objc_msg_lookup objc_method_not_found -generate_lookup objc_msg_lookup_stret objc_method_not_found_stret -generate_lookup_super objc_msg_lookup_super objc_msg_lookup -generate_lookup_super objc_msg_lookup_super_stret objc_msg_lookup_stret - -ret_nil: - leaq nil_method(%rip), %rax - ret - -nil_method: - xorq %rax, %rax - ret ADDED src/runtime/lookup-asm/lookup-asm-x86_64-elf.S Index: src/runtime/lookup-asm/lookup-asm-x86_64-elf.S ================================================================== --- src/runtime/lookup-asm/lookup-asm-x86_64-elf.S +++ src/runtime/lookup-asm/lookup-asm-x86_64-elf.S @@ -0,0 +1,81 @@ +/* + * Copyright (c) 2008, 2009, 2010, 2011, 2012, 2013, 2014 + * Jonathan Schleifer + * + * All rights reserved. + * + * This file is part of ObjFW. It may be distributed under the terms of the + * Q Public License 1.0, which can be found in the file LICENSE.QPL included in + * the packaging of this file. + * + * Alternatively, it may be distributed under the terms of the GNU General + * Public License, either version 2 or 3, which can be found in the file + * LICENSE.GPLv2 or LICENSE.GPLv3 respectively included in the packaging of this + * file. + */ + +#include "config.h" + +.globl objc_msg_lookup +.globl objc_msg_lookup_stret +.globl objc_msg_lookup_super +.globl objc_msg_lookup_super_stret + +.section .text +.macro generate_lookup name not_found +\name: + testq %rdi, %rdi + jz ret_nil + + movq (%rdi), %r8 + movq 64(%r8), %r8 + +.Lmain_\name: + movq (%rsi), %rax + movzbl %ah, %ecx + movzbl %al, %edx +#ifdef OF_SELUID24 + shrl $16, %eax + + movq (%r8,%rax,8), %r8 +#endif + movq (%r8,%rcx,8), %r8 + movq (%r8,%rdx,8), %rax + + testq %rax, %rax + jz \not_found@PLT + + ret +.type \name, %function +.size \name, .-\name +.endm + +.macro generate_lookup_super name lookup +\name: + movq (%rdi), %rax + testq %rax, %rax + jz ret_nil + + movq 8(%rdi), %r8 + movq 64(%r8), %r8 + jmp .Lmain_\lookup +.type \name, %function +.size \name, .-\name +.endm + +generate_lookup objc_msg_lookup objc_method_not_found +generate_lookup objc_msg_lookup_stret objc_method_not_found_stret +generate_lookup_super objc_msg_lookup_super objc_msg_lookup +generate_lookup_super objc_msg_lookup_super_stret objc_msg_lookup_stret + +ret_nil: + leaq nil_method(%rip), %rax + ret + +nil_method: + xorq %rax, %rax + ret + +#ifdef __linux__ +.section .note.GNU-stack, "", %progbits +#endif ADDED src/runtime/lookup-asm/lookup-asm-x86_64-macho.S Index: src/runtime/lookup-asm/lookup-asm-x86_64-macho.S ================================================================== --- src/runtime/lookup-asm/lookup-asm-x86_64-macho.S +++ src/runtime/lookup-asm/lookup-asm-x86_64-macho.S @@ -0,0 +1,73 @@ +/* + * Copyright (c) 2008, 2009, 2010, 2011, 2012, 2013, 2014 + * Jonathan Schleifer + * + * All rights reserved. + * + * This file is part of ObjFW. It may be distributed under the terms of the + * Q Public License 1.0, which can be found in the file LICENSE.QPL included in + * the packaging of this file. + * + * Alternatively, it may be distributed under the terms of the GNU General + * Public License, either version 2 or 3, which can be found in the file + * LICENSE.GPLv2 or LICENSE.GPLv3 respectively included in the packaging of this + * file. + */ + +#include "config.h" + +.globl _objc_msg_lookup +.globl _objc_msg_lookup_stret +.globl _objc_msg_lookup_super +.globl _objc_msg_lookup_super_stret + +.section __TEXT, __text, regular, pure_instructions +.macro generate_lookup +$0: + testq %rdi, %rdi + jz ret_nil + + movq (%rdi), %r8 + movq 64(%r8), %r8 + +Lmain_$0: + movq (%rsi), %rax + movzbl %ah, %ecx + movzbl %al, %edx +#ifdef OF_SELUID24 + shrl $16, %eax + + movq (%r8,%rax,8), %r8 +#endif + movq (%r8,%rcx,8), %r8 + movq (%r8,%rdx,8), %rax + + testq %rax, %rax + jz $1 + + ret +.endmacro + +.macro generate_lookup_super +$0: + movq (%rdi), %rax + testq %rax, %rax + jz ret_nil + + movq 8(%rdi), %r8 + movq 64(%r8), %r8 + jmp Lmain_$1 +.endmacro + +generate_lookup _objc_msg_lookup, _objc_method_not_found +generate_lookup _objc_msg_lookup_stret, _objc_method_not_found_stret +generate_lookup_super _objc_msg_lookup_super, _objc_msg_lookup +generate_lookup_super _objc_msg_lookup_super_stret, _objc_msg_lookup_stret + +ret_nil: + leaq nil_method(%rip), %rax + ret + +nil_method: + movq %rdi, %rax + ret ADDED src/runtime/lookup-asm/lookup-asm-x86_64-win64.S Index: src/runtime/lookup-asm/lookup-asm-x86_64-win64.S ================================================================== --- src/runtime/lookup-asm/lookup-asm-x86_64-win64.S +++ src/runtime/lookup-asm/lookup-asm-x86_64-win64.S @@ -0,0 +1,81 @@ +/* + * Copyright (c) 2008, 2009, 2010, 2011, 2012, 2013, 2014 + * Jonathan Schleifer + * + * All rights reserved. + * + * This file is part of ObjFW. It may be distributed under the terms of the + * Q Public License 1.0, which can be found in the file LICENSE.QPL included in + * the packaging of this file. + * + * Alternatively, it may be distributed under the terms of the GNU General + * Public License, either version 2 or 3, which can be found in the file + * LICENSE.GPLv2 or LICENSE.GPLv3 respectively included in the packaging of this + * file. + */ + +#include "config.h" + +.globl objc_msg_lookup +.globl objc_msg_lookup_stret +.globl objc_msg_lookup_super +.globl objc_msg_lookup_super_stret + +.section .text +.macro generate_lookup name not_found +\name: + testq %rcx, %rcx + jz ret_nil + + movq (%rcx), %r8 + movq 56(%r8), %r8 + +.Lmain_\name: + movq %rcx, %r10 + movq %rdx, %r11 + + movq (%rdx), %rax + movzbl %ah, %ecx + movzbl %al, %edx +#ifdef OF_SELUID24 + shrl $16, %eax + + movq (%r8,%rax,8), %r8 +#endif + movq (%r8,%rcx,8), %r8 + movq (%r8,%rdx,8), %rax + + testq %rax, %rax + jz 0f + + ret + +0: + movq %r10, %rcx + movq %r11, %rdx + jmp \not_found +.endm + +.macro generate_lookup_super name lookup +\name: + movq (%rcx), %rax + testq %rax, %rax + jz ret_nil + + movq 8(%rcx), %r8 + movq 56(%r8), %r8 + jmp .Lmain_\lookup +.endm + +generate_lookup objc_msg_lookup objc_method_not_found +generate_lookup objc_msg_lookup_stret objc_method_not_found_stret +generate_lookup_super objc_msg_lookup_super objc_msg_lookup +generate_lookup_super objc_msg_lookup_super_stret objc_msg_lookup_stret + +ret_nil: + leaq nil_method(%rip), %rax + ret + +nil_method: + xorq %rax, %rax + ret Index: src/runtime/lookup-asm/lookup-asm.S ================================================================== --- src/runtime/lookup-asm/lookup-asm.S +++ src/runtime/lookup-asm/lookup-asm.S @@ -15,12 +15,12 @@ */ #include "config.h" #if defined(__ELF__) -# if defined(__amd64__) || defined(__x86_64__) -# include "lookup-asm-amd64-elf.S" +# if defined(__x86_64__) || defined(__amd64__) +# include "lookup-asm-x86_64-elf.S" # elif defined(__i386__) # include "lookup-asm-x86-elf.S" # elif defined(__arm__) || defined(__ARM__) # include "lookup-asm-arm-elf.S" # elif defined(__ppc__) || defined(__PPC__) @@ -29,16 +29,16 @@ (defined(__mips_eabi) && _MIPS_SZPTR == 32) # include "lookup-asm-mips-elf.S" # endif #elif defined(__MACH__) # if defined(__x86_64__) -# include "lookup-asm-amd64-macho.S" +# include "lookup-asm-x86_64-macho.S" # elif defined(__ppc__) # include "lookup-asm-ppc-macho.S" # endif #elif defined(_WIN32) # if defined(__x86_64__) -# include "lookup-asm-amd64-win64.S" +# include "lookup-asm-x86_64-win64.S" # elif defined(__i386__) # include "lookup-asm-x86-win32.S" # endif #endif Index: src/runtime/runtime-private.h ================================================================== --- src/runtime/runtime-private.h +++ src/runtime/runtime-private.h @@ -174,11 +174,11 @@ return (void*)s->buckets[i]->buckets[j]; #endif } #if defined(__ELF__) -# if defined(__amd64__) || defined(__x86_64__) || defined(__i386__) || \ +# if defined(__x86_64__) || defined(__amd64__) || defined(__i386__) || \ defined(__ppc__) || defined(__PPC__) || defined(__arm__) || \ defined(__ARM__) # define OF_ASM_LOOKUP # endif # if (defined(_MIPS_SIM) && _MIPS_SIM == _ABIO32) || \