Differences From Artifact [23c4a049f4]:
- File
src/atomic.h
— part of check-in
[9feaa90358]
at
2016-07-30 21:46:24
on branch trunk
— of_memory_barrier(): Only use mfence on x86_64
This is only available on x86 with SSE2, while it's always available on
x86_64. However, checking if SSE2 is available here would be too slow,
therefore let the compiler decide what do do instead (which will depend
on the selected target CPU). (user: js, size: 20199) [annotate] [blame] [check-ins using]
To Artifact [50acc02363]:
- File
src/atomic.h
— part of check-in
[0c3e3da576]
at
2016-07-30 23:29:38
on branch trunk
— atomic.h: Use the OSAtomic variant without barrier
This matches what the assembly versions does. However, the __sync_*
versions still use the barrier, but unfortunately, no version without it
is provided. The only way around this would be to use the new __atomic_*
that has been added in GCC 4.7. (user: js, size: 20052) [annotate] [blame] [check-ins using]
︙ | ︙ | |||
67 68 69 70 71 72 73 | "bne- 0b" : "=&r"(i) : "r"(i), "r"(p) ); return i; #elif defined(OF_HAVE_OSATOMIC) | | | 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 | "bne- 0b" : "=&r"(i) : "r"(i), "r"(p) ); return i; #elif defined(OF_HAVE_OSATOMIC) return OSAtomicAdd32(i, p); #else # error of_atomic_int_add not implemented! #endif } static OF_INLINE int32_t of_atomic_int32_add(volatile int32_t *_Nonnull p, int32_t i) |
︙ | ︙ | |||
103 104 105 106 107 108 109 | "bne- 0b" : "=&r"(i) : "r"(i), "r"(p) ); return i; #elif defined(OF_HAVE_OSATOMIC) | | | 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 | "bne- 0b" : "=&r"(i) : "r"(i), "r"(p) ); return i; #elif defined(OF_HAVE_OSATOMIC) return OSAtomicAdd32(i, p); #else # error of_atomic_int32_add not implemented! #endif } static OF_INLINE void* of_atomic_ptr_add(void *volatile _Nullable *_Nonnull p, intptr_t i) |
︙ | ︙ | |||
150 151 152 153 154 155 156 | : "=&r"(i) : "r"(i), "r"(p) ); return (void*)i; #elif defined(OF_HAVE_OSATOMIC) # ifdef __LP64__ | | | | 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 | : "=&r"(i) : "r"(i), "r"(p) ); return (void*)i; #elif defined(OF_HAVE_OSATOMIC) # ifdef __LP64__ return (void*)OSAtomicAdd64(i, (int64_t*)p); # else return (void*)OSAtomicAdd32(i, (int32_t*)p); # endif #else # error of_atomic_ptr_add not implemented! #endif } static OF_INLINE int |
︙ | ︙ | |||
204 205 206 207 208 209 210 | "bne- 0b" : "=&r"(i) : "r"(i), "r"(p) ); return i; #elif defined(OF_HAVE_OSATOMIC) | | | 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 | "bne- 0b" : "=&r"(i) : "r"(i), "r"(p) ); return i; #elif defined(OF_HAVE_OSATOMIC) return OSAtomicAdd32(-i, p); #else # error of_atomic_int_sub not implemented! #endif } static OF_INLINE int32_t of_atomic_int32_sub(volatile int32_t *_Nonnull p, int32_t i) |
︙ | ︙ | |||
241 242 243 244 245 246 247 | "bne- 0b" : "=&r"(i) : "r"(i), "r"(p) ); return i; #elif defined(OF_HAVE_OSATOMIC) | | | 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 | "bne- 0b" : "=&r"(i) : "r"(i), "r"(p) ); return i; #elif defined(OF_HAVE_OSATOMIC) return OSAtomicAdd32(-i, p); #else # error of_atomic_int32_sub not implemented! #endif } static OF_INLINE void* of_atomic_ptr_sub(void *volatile _Nullable *_Nonnull p, intptr_t i) |
︙ | ︙ | |||
290 291 292 293 294 295 296 | : "=&r"(i) : "r"(i), "r"(p) ); return (void*)i; #elif defined(OF_HAVE_OSATOMIC) # ifdef __LP64__ | | | | 290 291 292 293 294 295 296 297 298 299 300 301 302 303 304 305 306 | : "=&r"(i) : "r"(i), "r"(p) ); return (void*)i; #elif defined(OF_HAVE_OSATOMIC) # ifdef __LP64__ return (void*)OSAtomicAdd64(-i, (int64_t*)p); # else return (void*)OSAtomicAdd32(-i, (int32_t*)p); # endif #else # error of_atomic_ptr_sub not implemented! #endif } static OF_INLINE int |
︙ | ︙ | |||
350 351 352 353 354 355 356 | "bne- 0b" : "=&r"(i) : "r"(p) ); return i; #elif defined(OF_HAVE_OSATOMIC) | | | 350 351 352 353 354 355 356 357 358 359 360 361 362 363 364 | "bne- 0b" : "=&r"(i) : "r"(p) ); return i; #elif defined(OF_HAVE_OSATOMIC) return OSAtomicIncrement32(p); #else # error of_atomic_int_inc not implemented! #endif } static OF_INLINE int32_t of_atomic_int32_inc(volatile int32_t *_Nonnull p) |
︙ | ︙ | |||
392 393 394 395 396 397 398 | "bne- 0b" : "=&r"(i) : "r"(p) ); return i; #elif defined(OF_HAVE_OSATOMIC) | | | 392 393 394 395 396 397 398 399 400 401 402 403 404 405 406 | "bne- 0b" : "=&r"(i) : "r"(p) ); return i; #elif defined(OF_HAVE_OSATOMIC) return OSAtomicIncrement32(p); #else # error of_atomic_int32_inc not implemented! #endif } static OF_INLINE int of_atomic_int_dec(volatile int *_Nonnull p) |
︙ | ︙ | |||
449 450 451 452 453 454 455 | "bne- 0b" : "=&r"(i) : "r"(p) ); return i; #elif defined(OF_HAVE_OSATOMIC) | | | 449 450 451 452 453 454 455 456 457 458 459 460 461 462 463 | "bne- 0b" : "=&r"(i) : "r"(p) ); return i; #elif defined(OF_HAVE_OSATOMIC) return OSAtomicDecrement32(p); #else # error of_atomic_int_dec not implemented! #endif } static OF_INLINE int32_t of_atomic_int32_dec(volatile int32_t *_Nonnull p) |
︙ | ︙ | |||
491 492 493 494 495 496 497 | "bne- 0b" : "=&r"(i) : "r"(p) ); return i; #elif defined(OF_HAVE_OSATOMIC) | | | 491 492 493 494 495 496 497 498 499 500 501 502 503 504 505 | "bne- 0b" : "=&r"(i) : "r"(p) ); return i; #elif defined(OF_HAVE_OSATOMIC) return OSAtomicDecrement32(p); #else # error of_atomic_int32_dec not implemented! #endif } static OF_INLINE unsigned int of_atomic_int_or(volatile unsigned int *_Nonnull p, unsigned int i) |
︙ | ︙ | |||
550 551 552 553 554 555 556 | "bne- 0b" : "=&r"(i) : "r"(i), "r"(p) ); return i; #elif defined(OF_HAVE_OSATOMIC) | | | 550 551 552 553 554 555 556 557 558 559 560 561 562 563 564 | "bne- 0b" : "=&r"(i) : "r"(i), "r"(p) ); return i; #elif defined(OF_HAVE_OSATOMIC) return OSAtomicOr32(i, p); #else # error of_atomic_int_or not implemented! #endif } static OF_INLINE uint32_t of_atomic_int32_or(volatile uint32_t *_Nonnull p, uint32_t i) |
︙ | ︙ | |||
591 592 593 594 595 596 597 | "bne- 0b" : "=&r"(i) : "r"(i), "r"(p) ); return i; #elif defined(OF_HAVE_OSATOMIC) | | | 591 592 593 594 595 596 597 598 599 600 601 602 603 604 605 | "bne- 0b" : "=&r"(i) : "r"(i), "r"(p) ); return i; #elif defined(OF_HAVE_OSATOMIC) return OSAtomicOr32(i, p); #else # error of_atomic_int32_or not implemented! #endif } static OF_INLINE unsigned int of_atomic_int_and(volatile unsigned int *_Nonnull p, unsigned int i) |
︙ | ︙ | |||
650 651 652 653 654 655 656 | "bne- 0b" : "=&r"(i) : "r"(i), "r"(p) ); return i; #elif defined(OF_HAVE_OSATOMIC) | | | 650 651 652 653 654 655 656 657 658 659 660 661 662 663 664 | "bne- 0b" : "=&r"(i) : "r"(i), "r"(p) ); return i; #elif defined(OF_HAVE_OSATOMIC) return OSAtomicAnd32(i, p); #else # error of_atomic_int_and not implemented! #endif } static OF_INLINE uint32_t of_atomic_int32_and(volatile uint32_t *_Nonnull p, uint32_t i) |
︙ | ︙ | |||
691 692 693 694 695 696 697 | "bne- 0b" : "=&r"(i) : "r"(i), "r"(p) ); return i; #elif defined(OF_HAVE_OSATOMIC) | | | 691 692 693 694 695 696 697 698 699 700 701 702 703 704 705 | "bne- 0b" : "=&r"(i) : "r"(i), "r"(p) ); return i; #elif defined(OF_HAVE_OSATOMIC) return OSAtomicAnd32(i, p); #else # error of_atomic_int32_and not implemented! #endif } static OF_INLINE unsigned int of_atomic_int_xor(volatile unsigned int *_Nonnull p, unsigned int i) |
︙ | ︙ | |||
750 751 752 753 754 755 756 | "bne- 0b" : "=&r"(i) : "r"(i), "r"(p) ); return i; #elif defined(OF_HAVE_OSATOMIC) | | | 750 751 752 753 754 755 756 757 758 759 760 761 762 763 764 | "bne- 0b" : "=&r"(i) : "r"(i), "r"(p) ); return i; #elif defined(OF_HAVE_OSATOMIC) return OSAtomicXor32(i, p); #else # error of_atomic_int_xor not implemented! #endif } static OF_INLINE uint32_t of_atomic_int32_xor(volatile uint32_t *_Nonnull p, uint32_t i) |
︙ | ︙ | |||
791 792 793 794 795 796 797 | "bne- 0b" : "=&r"(i) : "r"(i), "r"(p) ); return i; #elif defined(OF_HAVE_OSATOMIC) | | | 791 792 793 794 795 796 797 798 799 800 801 802 803 804 805 | "bne- 0b" : "=&r"(i) : "r"(i), "r"(p) ); return i; #elif defined(OF_HAVE_OSATOMIC) return OSAtomicXor32(i, p); #else # error of_atomic_int32_xor not implemented! #endif } static OF_INLINE bool of_atomic_int_cmpswap(volatile int *_Nonnull p, int o, int n) |
︙ | ︙ | |||
846 847 848 849 850 851 852 | : "=&r"(r) : "r"(o), "r"(n), "r"(p) : "cc" ); return r; #elif defined(OF_HAVE_OSATOMIC) | | | 846 847 848 849 850 851 852 853 854 855 856 857 858 859 860 | : "=&r"(r) : "r"(o), "r"(n), "r"(p) : "cc" ); return r; #elif defined(OF_HAVE_OSATOMIC) return OSAtomicCompareAndSwapInt(o, n, p); #else # error of_atomic_int_cmpswap not implemented! #endif } static OF_INLINE bool of_atomic_int32_cmpswap(volatile int32_t *_Nonnull p, int32_t o, int32_t n) |
︙ | ︙ | |||
901 902 903 904 905 906 907 | : "=&r"(r) : "r"(o), "r"(n), "r"(p) : "cc" ); return r; #elif defined(OF_HAVE_OSATOMIC) | | | 901 902 903 904 905 906 907 908 909 910 911 912 913 914 915 | : "=&r"(r) : "r"(o), "r"(n), "r"(p) : "cc" ); return r; #elif defined(OF_HAVE_OSATOMIC) return OSAtomicCompareAndSwap32(o, n, p); #else # error of_atomic_int32_cmpswap not implemented! #endif } static OF_INLINE bool of_atomic_ptr_cmpswap(void *volatile _Nullable *_Nonnull p, |
︙ | ︙ | |||
957 958 959 960 961 962 963 | : "=&r"(r) : "r"(o), "r"(n), "r"(p) : "cc" ); return r; #elif defined(OF_HAVE_OSATOMIC) | | | 957 958 959 960 961 962 963 964 965 966 967 968 969 970 971 | : "=&r"(r) : "r"(o), "r"(n), "r"(p) : "cc" ); return r; #elif defined(OF_HAVE_OSATOMIC) return OSAtomicCompareAndSwapPtr(o, n, p); #else # error of_atomic_ptr_cmpswap not implemented! #endif } static OF_INLINE void of_memory_barrier(void) |
︙ | ︙ |