Overview
Comment: | Error out if an atomic operation is missing. |
---|---|
Downloads: | Tarball | ZIP archive | SQL archive |
Timelines: | family | ancestors | descendants | both | trunk |
Files: | files | file ages | folders |
SHA3-256: |
196aa3f6fe0a228a7c1c3c8944c5b4f3 |
User & Date: | js on 2012-07-21 11:42:16 |
Other Links: | manifest | tags |
Context
2012-07-21
| ||
21:16 | configure.ac: Remove old check. check-in: 27c863fb99 user: js tags: trunk | |
11:42 | Error out if an atomic operation is missing. check-in: 196aa3f6fe user: js tags: trunk | |
2012-07-20
| ||
23:46 | Fix the ARM architecture detection mess. check-in: 7e7d14b815 user: js tags: trunk | |
Changes
Modified src/atomic.h from [cfff404a91] to [fe64838cdd].
︙ | ︙ | |||
14 15 16 17 18 19 20 | * file. */ #include <stdlib.h> #import "macros.h" | < < < < < | 14 15 16 17 18 19 20 21 22 23 24 25 26 27 | * file. */ #include <stdlib.h> #import "macros.h" #ifdef OF_HAVE_OSATOMIC # include <libkern/OSAtomic.h> #endif static OF_INLINE int of_atomic_add_int(volatile int *p, int i) { |
︙ | ︙ | |||
62 63 64 65 66 67 68 69 70 71 72 73 74 75 | return OSAtomicAdd32Barrier(i, p); # ifdef OF_HAVE_OSATOMIC_64 else if (sizeof(int) == 8) return OSAtomicAdd64Barrier(i, p); # endif else abort(); #endif } static OF_INLINE int32_t of_atomic_add_32(volatile int32_t *p, int32_t i) { #if !defined(OF_THREADS) | > > | 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 | return OSAtomicAdd32Barrier(i, p); # ifdef OF_HAVE_OSATOMIC_64 else if (sizeof(int) == 8) return OSAtomicAdd64Barrier(i, p); # endif else abort(); #else # error No atomic operations available! #endif } static OF_INLINE int32_t of_atomic_add_32(volatile int32_t *p, int32_t i) { #if !defined(OF_THREADS) |
︙ | ︙ | |||
84 85 86 87 88 89 90 91 92 93 94 95 96 97 | ); return i; #elif defined(OF_HAVE_GCC_ATOMIC_OPS) return __sync_add_and_fetch(p, i); #elif defined(OF_HAVE_OSATOMIC) return OSAtomicAdd32Barrier(i, p); #endif } static OF_INLINE void* of_atomic_add_ptr(void* volatile *p, intptr_t i) { #if !defined(OF_THREADS) | > > | 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 | ); return i; #elif defined(OF_HAVE_GCC_ATOMIC_OPS) return __sync_add_and_fetch(p, i); #elif defined(OF_HAVE_OSATOMIC) return OSAtomicAdd32Barrier(i, p); #else # error No atomic operations available! #endif } static OF_INLINE void* of_atomic_add_ptr(void* volatile *p, intptr_t i) { #if !defined(OF_THREADS) |
︙ | ︙ | |||
123 124 125 126 127 128 129 130 131 132 133 134 135 136 | return (void*)OSAtomicAdd32Barrier(i, (int32_t*)p); # ifdef OF_HAVE_OSATOMIC_64 else if (sizeof(void*) == 8) return (void*)OSAtomicAdd64Barrier(i, (int64_t*)p); # endif else abort(); #endif } static OF_INLINE int of_atomic_sub_int(volatile int *p, int i) { #if !defined(OF_THREADS) | > > | 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 | return (void*)OSAtomicAdd32Barrier(i, (int32_t*)p); # ifdef OF_HAVE_OSATOMIC_64 else if (sizeof(void*) == 8) return (void*)OSAtomicAdd64Barrier(i, (int64_t*)p); # endif else abort(); #else # error No atomic operations available! #endif } static OF_INLINE int of_atomic_sub_int(volatile int *p, int i) { #if !defined(OF_THREADS) |
︙ | ︙ | |||
167 168 169 170 171 172 173 174 175 176 177 178 179 180 | return OSAtomicAdd32Barrier(-i, p); # ifdef OF_HAVE_OSATOMIC_64 else if (sizeof(int) == 8) return OSAtomicAdd64Barrier(-i, p); # endif else abort(); #endif } static OF_INLINE int32_t of_atomic_sub_32(volatile int32_t *p, int32_t i) { #if !defined(OF_THREADS) | > > | 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 | return OSAtomicAdd32Barrier(-i, p); # ifdef OF_HAVE_OSATOMIC_64 else if (sizeof(int) == 8) return OSAtomicAdd64Barrier(-i, p); # endif else abort(); #else # error No atomic operations available! #endif } static OF_INLINE int32_t of_atomic_sub_32(volatile int32_t *p, int32_t i) { #if !defined(OF_THREADS) |
︙ | ︙ | |||
190 191 192 193 194 195 196 197 198 199 200 201 202 203 | ); return i; #elif defined(OF_HAVE_GCC_ATOMIC_OPS) return __sync_sub_and_fetch(p, i); #elif defined(OF_HAVE_OSATOMIC) return OSAtomicAdd32Barrier(-i, p); #endif } static OF_INLINE void* of_atomic_sub_ptr(void* volatile *p, intptr_t i) { #if !defined(OF_THREADS) | > > | 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 | ); return i; #elif defined(OF_HAVE_GCC_ATOMIC_OPS) return __sync_sub_and_fetch(p, i); #elif defined(OF_HAVE_OSATOMIC) return OSAtomicAdd32Barrier(-i, p); #else # error No atomic operations available! #endif } static OF_INLINE void* of_atomic_sub_ptr(void* volatile *p, intptr_t i) { #if !defined(OF_THREADS) |
︙ | ︙ | |||
231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 | return (void*)OSAtomicAdd32Barrier(-i, (int32_t*)p); # ifdef OF_HAVE_OSATOMIC_64 else if (sizeof(void*) == 8) return (void*)OSAtomicAdd64Barrier(-i, (int64_t*)p); # endif else abort(); #endif } static OF_INLINE int of_atomic_inc_int(volatile int *p) { #if !defined(OF_THREADS) return ++*p; #elif defined(OF_X86_ASM) || defined(OF_AMD64_ASM) | > > | | 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 | return (void*)OSAtomicAdd32Barrier(-i, (int32_t*)p); # ifdef OF_HAVE_OSATOMIC_64 else if (sizeof(void*) == 8) return (void*)OSAtomicAdd64Barrier(-i, (int64_t*)p); # endif else abort(); #else # error No atomic operations available! #endif } static OF_INLINE int of_atomic_inc_int(volatile int *p) { #if !defined(OF_THREADS) return ++*p; #elif defined(OF_X86_ASM) || defined(OF_AMD64_ASM) int i; if (sizeof(int) == 4) __asm__ ( "xorl %0, %0\n\t" "incl %0\n\t" "lock\n\t" "xaddl %0, %1\n\t" |
︙ | ︙ | |||
279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 | return OSAtomicIncrement32Barrier(p); # ifdef OF_HAVE_OSATOMIC_64 else if (sizeof(int) == 8) return OSAtomicDecrement64Barrier(p); # endif else abort(); #endif } static OF_INLINE int32_t of_atomic_inc_32(volatile int32_t *p) { #if !defined(OF_THREADS) return ++*p; #elif defined(OF_X86_ASM) || defined(OF_AMD64_ASM) uint32_t i; | > > > | 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300 301 302 303 304 | return OSAtomicIncrement32Barrier(p); # ifdef OF_HAVE_OSATOMIC_64 else if (sizeof(int) == 8) return OSAtomicDecrement64Barrier(p); # endif else abort(); #else # error No atomic operations available! #endif } static OF_INLINE int32_t of_atomic_inc_32(volatile int32_t *p) { #if !defined(OF_THREADS) return ++*p; #elif defined(OF_X86_ASM) || defined(OF_AMD64_ASM) uint32_t i; |
︙ | ︙ | |||
304 305 306 307 308 309 310 311 312 313 314 315 316 317 318 319 | ); return i; #elif defined(OF_HAVE_GCC_ATOMIC_OPS) return __sync_add_and_fetch(p, 1); #elif defined(OF_HAVE_OSATOMIC) return OSAtomicIncrement32Barrier(p); #endif } static OF_INLINE int of_atomic_dec_int(volatile int *p) { #if !defined(OF_THREADS) return --*p; #elif defined(OF_X86_ASM) || defined(OF_AMD64_ASM) | > > | | 314 315 316 317 318 319 320 321 322 323 324 325 326 327 328 329 330 331 332 333 334 335 336 337 338 339 | ); return i; #elif defined(OF_HAVE_GCC_ATOMIC_OPS) return __sync_add_and_fetch(p, 1); #elif defined(OF_HAVE_OSATOMIC) return OSAtomicIncrement32Barrier(p); #else # error No atomic operations available! #endif } static OF_INLINE int of_atomic_dec_int(volatile int *p) { #if !defined(OF_THREADS) return --*p; #elif defined(OF_X86_ASM) || defined(OF_AMD64_ASM) int i; if (sizeof(int) == 4) __asm__ ( "xorl %0, %0\n\t" "decl %0\n\t" "lock\n\t" "xaddl %0, %1\n\t" |
︙ | ︙ | |||
352 353 354 355 356 357 358 359 360 361 362 363 364 365 | return OSAtomicDecrement32Barrier(p); # ifdef OF_HAVE_OSATOMIC_64 else if (sizeof(int) == 8) return OSAtomicDecrement64Barrier(p); # endif else abort(); #endif } static OF_INLINE int32_t of_atomic_dec_32(volatile int32_t *p) { #if !defined(OF_THREADS) | > > | 364 365 366 367 368 369 370 371 372 373 374 375 376 377 378 379 | return OSAtomicDecrement32Barrier(p); # ifdef OF_HAVE_OSATOMIC_64 else if (sizeof(int) == 8) return OSAtomicDecrement64Barrier(p); # endif else abort(); #else # error No atomic operations available! #endif } static OF_INLINE int32_t of_atomic_dec_32(volatile int32_t *p) { #if !defined(OF_THREADS) |
︙ | ︙ | |||
378 379 380 381 382 383 384 385 386 387 388 389 390 391 | ); return i; #elif defined(OF_HAVE_GCC_ATOMIC_OPS) return __sync_sub_and_fetch(p, 1); #elif defined(OF_HAVE_OSATOMIC) return OSAtomicDecrement32Barrier(p); #endif } static OF_INLINE unsigned int of_atomic_or_int(volatile unsigned int *p, unsigned int i) { #if !defined(OF_THREADS) | > > | 392 393 394 395 396 397 398 399 400 401 402 403 404 405 406 407 | ); return i; #elif defined(OF_HAVE_GCC_ATOMIC_OPS) return __sync_sub_and_fetch(p, 1); #elif defined(OF_HAVE_OSATOMIC) return OSAtomicDecrement32Barrier(p); #else # error No atomic operations available! #endif } static OF_INLINE unsigned int of_atomic_or_int(volatile unsigned int *p, unsigned int i) { #if !defined(OF_THREADS) |
︙ | ︙ | |||
430 431 432 433 434 435 436 437 438 439 440 441 442 443 | return OSAtomicOr32Barrier(i, p); # ifdef OF_HAVE_OSATOMIC_64 else if (sizeof(int) == 8) return OSAtomicOr64Barrier(i, p); # endif else abort(); #endif } static OF_INLINE uint32_t of_atomic_or_32(volatile uint32_t *p, uint32_t i) { #if !defined(OF_THREADS) | > > | 446 447 448 449 450 451 452 453 454 455 456 457 458 459 460 461 | return OSAtomicOr32Barrier(i, p); # ifdef OF_HAVE_OSATOMIC_64 else if (sizeof(int) == 8) return OSAtomicOr64Barrier(i, p); # endif else abort(); #else # error No atomic operations available! #endif } static OF_INLINE uint32_t of_atomic_or_32(volatile uint32_t *p, uint32_t i) { #if !defined(OF_THREADS) |
︙ | ︙ | |||
457 458 459 460 461 462 463 464 465 466 467 468 469 470 | ); return i; #elif defined(OF_HAVE_GCC_ATOMIC_OPS) return __sync_or_and_fetch(p, i); #elif defined(OF_HAVE_OSATOMIC) return OSAtomicOr32Barrier(i, p); #endif } static OF_INLINE unsigned int of_atomic_and_int(volatile unsigned int *p, unsigned int i) { #if !defined(OF_THREADS) | > > | 475 476 477 478 479 480 481 482 483 484 485 486 487 488 489 490 | ); return i; #elif defined(OF_HAVE_GCC_ATOMIC_OPS) return __sync_or_and_fetch(p, i); #elif defined(OF_HAVE_OSATOMIC) return OSAtomicOr32Barrier(i, p); #else # error No atomic operations available! #endif } static OF_INLINE unsigned int of_atomic_and_int(volatile unsigned int *p, unsigned int i) { #if !defined(OF_THREADS) |
︙ | ︙ | |||
509 510 511 512 513 514 515 516 517 518 519 520 521 522 | return OSAtomicAnd32Barrier(i, p); # ifdef OF_HAVE_OSATOMIC_64 else if (sizeof(int) == 8) return OSAtomicAnd64Barrier(i, p); # endif else abort(); #endif } static OF_INLINE uint32_t of_atomic_and_32(volatile uint32_t *p, uint32_t i) { #if !defined(OF_THREADS) | > > | 529 530 531 532 533 534 535 536 537 538 539 540 541 542 543 544 | return OSAtomicAnd32Barrier(i, p); # ifdef OF_HAVE_OSATOMIC_64 else if (sizeof(int) == 8) return OSAtomicAnd64Barrier(i, p); # endif else abort(); #else # error No atomic operations available! #endif } static OF_INLINE uint32_t of_atomic_and_32(volatile uint32_t *p, uint32_t i) { #if !defined(OF_THREADS) |
︙ | ︙ | |||
536 537 538 539 540 541 542 543 544 545 546 547 548 549 | ); return i; #elif defined(OF_HAVE_GCC_ATOMIC_OPS) return __sync_and_and_fetch(p, i); #elif defined(OF_HAVE_OSATOMIC) return OSAtomicAnd32Barrier(i, p); #endif } static OF_INLINE unsigned int of_atomic_xor_int(volatile unsigned int *p, unsigned int i) { #if !defined(OF_THREADS) | > > | 558 559 560 561 562 563 564 565 566 567 568 569 570 571 572 573 | ); return i; #elif defined(OF_HAVE_GCC_ATOMIC_OPS) return __sync_and_and_fetch(p, i); #elif defined(OF_HAVE_OSATOMIC) return OSAtomicAnd32Barrier(i, p); #else # error No atomic operations available! #endif } static OF_INLINE unsigned int of_atomic_xor_int(volatile unsigned int *p, unsigned int i) { #if !defined(OF_THREADS) |
︙ | ︙ | |||
588 589 590 591 592 593 594 595 596 597 598 599 600 601 | return OSAtomicXor32Barrier(i, p); # ifdef OF_HAVE_OSATOMIC_64 else (sizeof(int) == 8) return OSAtomicXor64Barrier(i, p); # endif else abort(); #endif } static OF_INLINE uint32_t of_atomic_xor_32(volatile uint32_t *p, uint32_t i) { #if !defined(OF_THREADS) | > > | 612 613 614 615 616 617 618 619 620 621 622 623 624 625 626 627 | return OSAtomicXor32Barrier(i, p); # ifdef OF_HAVE_OSATOMIC_64 else (sizeof(int) == 8) return OSAtomicXor64Barrier(i, p); # endif else abort(); #else # error No atomic operations available! #endif } static OF_INLINE uint32_t of_atomic_xor_32(volatile uint32_t *p, uint32_t i) { #if !defined(OF_THREADS) |
︙ | ︙ | |||
615 616 617 618 619 620 621 622 623 624 625 626 627 628 | ); return i; #elif defined(OF_HAVE_GCC_ATOMIC_OPS) return __sync_xor_and_fetch(p, i); #elif defined(OF_HAVE_OSATOMIC) return OSAtomicXor32Barrier(i, p); #endif } static OF_INLINE BOOL of_atomic_cmpswap_int(volatile int *p, int o, int n) { #if !defined(OF_THREADS) | > > | 641 642 643 644 645 646 647 648 649 650 651 652 653 654 655 656 | ); return i; #elif defined(OF_HAVE_GCC_ATOMIC_OPS) return __sync_xor_and_fetch(p, i); #elif defined(OF_HAVE_OSATOMIC) return OSAtomicXor32Barrier(i, p); #else # error No atomic operations available! #endif } static OF_INLINE BOOL of_atomic_cmpswap_int(volatile int *p, int o, int n) { #if !defined(OF_THREADS) |
︙ | ︙ | |||
646 647 648 649 650 651 652 653 654 655 656 657 658 659 | ); return r; #elif defined(OF_HAVE_GCC_ATOMIC_OPS) return __sync_bool_compare_and_swap(p, o, n); #elif defined(OF_HAVE_OSATOMIC) return OSAtomicCompareAndSwapIntBarrier(o, n, p); #endif } static OF_INLINE BOOL of_atomic_cmpswap_32(volatile int32_t *p, int32_t o, int32_t n) { #if !defined(OF_THREADS) | > > | 674 675 676 677 678 679 680 681 682 683 684 685 686 687 688 689 | ); return r; #elif defined(OF_HAVE_GCC_ATOMIC_OPS) return __sync_bool_compare_and_swap(p, o, n); #elif defined(OF_HAVE_OSATOMIC) return OSAtomicCompareAndSwapIntBarrier(o, n, p); #else # error No atomic operations available! #endif } static OF_INLINE BOOL of_atomic_cmpswap_32(volatile int32_t *p, int32_t o, int32_t n) { #if !defined(OF_THREADS) |
︙ | ︙ | |||
677 678 679 680 681 682 683 684 685 686 687 688 689 690 | ); return r; #elif defined(OF_HAVE_GCC_ATOMIC_OPS) return __sync_bool_compare_and_swap(p, o, n); #elif defined(OF_HAVE_OSATOMIC) return OSAtomicCompareAndSwap32Barrier(o, n, p); #endif } static OF_INLINE BOOL of_atomic_cmpswap_ptr(void* volatile *p, void *o, void *n) { #if !defined(OF_THREADS) | > > | 707 708 709 710 711 712 713 714 715 716 717 718 719 720 721 722 | ); return r; #elif defined(OF_HAVE_GCC_ATOMIC_OPS) return __sync_bool_compare_and_swap(p, o, n); #elif defined(OF_HAVE_OSATOMIC) return OSAtomicCompareAndSwap32Barrier(o, n, p); #else # error No atomic operations available! #endif } static OF_INLINE BOOL of_atomic_cmpswap_ptr(void* volatile *p, void *o, void *n) { #if !defined(OF_THREADS) |
︙ | ︙ | |||
708 709 710 711 712 713 714 715 716 | ); return r; #elif defined(OF_HAVE_GCC_ATOMIC_OPS) return __sync_bool_compare_and_swap(p, o, n); #elif defined(OF_HAVE_OSATOMIC) return OSAtomicCompareAndSwapPtrBarrier(o, n, p); #endif } | > > | 740 741 742 743 744 745 746 747 748 749 750 | ); return r; #elif defined(OF_HAVE_GCC_ATOMIC_OPS) return __sync_bool_compare_and_swap(p, o, n); #elif defined(OF_HAVE_OSATOMIC) return OSAtomicCompareAndSwapPtrBarrier(o, n, p); #else # error No atomic operations available! #endif } |