Overview
Comment: | Fix of_atomic_{add,sub}_ptr on AMD64. |
---|---|
Downloads: | Tarball | ZIP archive | SQL archive |
Timelines: | family | ancestors | descendants | both | 0.6 |
Files: | files | file ages | folders |
SHA3-256: |
10b7f839c58c364e1d8a42b049932fb2 |
User & Date: | js on 2012-03-21 09:23:54 |
Other Links: | branch diff | manifest | tags |
Context
2020-05-23
| ||
16:47 | Close 0.6 branch Closed-Leaf check-in: 503226703e user: js tags: 0.6 | |
2012-03-21
| ||
09:23 | Fix of_atomic_{add,sub}_ptr on AMD64. check-in: 10b7f839c5 user: js tags: 0.6 | |
2012-03-16
| ||
17:14 | Fix memory wasting in OFBigDataArray. check-in: d31c0b6fc8 user: js tags: 0.6 | |
Changes
Modified src/atomic.h from [1848f8ec6b] to [6e23ade0bb].
︙ | ︙ | |||
79 80 81 82 83 84 85 | } static OF_INLINE void* of_atomic_add_ptr(void* volatile *p, intptr_t i) { #if !defined(OF_THREADS) return (*(char* volatile*)p += i); | | > > > > > > > > > > | 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 | } static OF_INLINE void* of_atomic_add_ptr(void* volatile *p, intptr_t i) { #if !defined(OF_THREADS) return (*(char* volatile*)p += i); #elif defined(OF_X86_ASM) __asm__ ( "lock\n\t" "xaddl %0, %2\n\t" "addl %1, %0" : "+&r"(i) : "r"(i), "m"(*p) ); return (void*)i; #elif defined(OF_AMD64_ASM) __asm__ ( "lock\n\t" "xaddq %0, %2\n\t" "addq %1, %0" : "+&r"(i) : "r"(i), "m"(*p) ); return (void*)i; #elif defined(OF_HAVE_GCC_ATOMIC_OPS) return __sync_add_and_fetch(p, i); |
︙ | ︙ | |||
161 162 163 164 165 166 167 | } static OF_INLINE void* of_atomic_sub_ptr(void* volatile *p, intptr_t i) { #if !defined(OF_THREADS) return (*(char* volatile*)p -= i); | | > > > > > > > > > > > | 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 | } static OF_INLINE void* of_atomic_sub_ptr(void* volatile *p, intptr_t i) { #if !defined(OF_THREADS) return (*(char* volatile*)p -= i); #elif defined(OF_X86_ASM) __asm__ ( "negl %0\n\t" "lock\n\t" "xaddl %0, %2\n\t" "subl %1, %0" : "+&r"(i) : "r"(i), "m"(*p) ); return (void*)i; #elif defined(OF_AMD64_ASM) __asm__ ( "negq %0\n\t" "lock\n\t" "xaddq %0, %2\n\t" "subq %1, %0" : "+&r"(i) : "r"(i), "m"(*p) ); return (void*)i; #elif defined(OF_HAVE_GCC_ATOMIC_OPS) return __sync_sub_and_fetch(p, i); |
︙ | ︙ |