Overview
Comment: | Make atomic ops work on an ILP64 AMD64 system. |
---|---|
Downloads: | Tarball | ZIP archive | SQL archive |
Timelines: | family | ancestors | descendants | both | trunk |
Files: | files | file ages | folders |
SHA3-256: |
448d8b46f9ac52f5539a8443c90b33cd |
User & Date: | js on 2012-03-21 09:46:59 |
Other Links: | manifest | tags |
Context
2012-03-21
| ||
10:45 | Add OFThreadPool. check-in: 420e3c09c6 user: js tags: trunk | |
09:46 | Make atomic ops work on an ILP64 AMD64 system. check-in: 448d8b46f9 user: js tags: trunk | |
09:22 | Fix of_atomic_{add,sub}_ptr on AMD64. check-in: 19421c0b6b user: js tags: trunk | |
Changes
Modified src/atomic.h from [6e23ade0bb] to [0e0a7c1a99].
︙ | ︙ | |||
29 30 31 32 33 34 35 | static OF_INLINE int of_atomic_add_int(volatile int *p, int i) { #if !defined(OF_THREADS) return (*p += i); #elif defined(OF_X86_ASM) || defined(OF_AMD64_ASM) | > | | | | | | | > > > > > > > > > > > > | 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 | static OF_INLINE int of_atomic_add_int(volatile int *p, int i) { #if !defined(OF_THREADS) return (*p += i); #elif defined(OF_X86_ASM) || defined(OF_AMD64_ASM) if (sizeof(int) == 4) __asm__ ( "lock\n\t" "xaddl %0, %2\n\t" "addl %1, %0" : "+&r"(i) : "r"(i), "m"(*p) ); # ifdef OF_AMD64_ASM else if (sizeof(int) == 8) __asm__ ( "lock\n\t" "xaddq %0, %2\n\t" "addq %1, %0" : "+&r"(i) : "r"(i), "m"(*p) ); # endif else abort(); return i; #elif defined(OF_HAVE_GCC_ATOMIC_OPS) return __sync_add_and_fetch(p, i); #elif defined(OF_HAVE_OSATOMIC) if (sizeof(int) == 4) return OSAtomicAdd32Barrier(i, p); |
︙ | ︙ | |||
119 120 121 122 123 124 125 | static OF_INLINE int of_atomic_sub_int(volatile int *p, int i) { #if !defined(OF_THREADS) return (*p -= i); #elif defined(OF_X86_ASM) || defined(OF_AMD64_ASM) | > | | | | | | | | > > > > > > > > > > > > > | 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 | static OF_INLINE int of_atomic_sub_int(volatile int *p, int i) { #if !defined(OF_THREADS) return (*p -= i); #elif defined(OF_X86_ASM) || defined(OF_AMD64_ASM) if (sizeof(int) == 4) __asm__ ( "negl %0\n\t" "lock\n\t" "xaddl %0, %2\n\t" "subl %1, %0" : "+&r"(i) : "r"(i), "m"(*p) ); # ifdef OF_AMD64_ASM else if (sizeof(int) == 8) __asm__ ( "negq %0\n\t" "lock\n\t" "xaddq %0, %2\n\t" "subq %1, %0" : "+&r"(i) : "r"(i), "m"(*p) ); # endif else abort(); return i; #elif defined(OF_HAVE_GCC_ATOMIC_OPS) return __sync_sub_and_fetch(p, i); #elif defined(OF_HAVE_OSATOMIC) if (sizeof(int) == 4) return OSAtomicAdd32Barrier(-i, p); |
︙ | ︙ | |||
215 216 217 218 219 220 221 | of_atomic_inc_int(volatile int *p) { #if !defined(OF_THREADS) return ++*p; #elif defined(OF_X86_ASM) || defined(OF_AMD64_ASM) uint32_t i; | > | | | | | | | | | > > > > > > > > > > > > > > | 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 | of_atomic_inc_int(volatile int *p) { #if !defined(OF_THREADS) return ++*p; #elif defined(OF_X86_ASM) || defined(OF_AMD64_ASM) uint32_t i; if (sizeof(int) == 4) __asm__ ( "xorl %0, %0\n\t" "incl %0\n\t" "lock\n\t" "xaddl %0, %1\n\t" "incl %0" : "=&r"(i) : "m"(*p) ); # ifdef OF_AMD64_ASM else if (sizeof(int) == 8) __asm__ ( "xorq %0, %0\n\t" "incq %0\n\t" "lock\n\t" "xaddq %0, %1\n\t" "incq %0" : "=&r"(i) : "m"(*p) ); # endif else abort(); return i; #elif defined(OF_HAVE_GCC_ATOMIC_OPS) return __sync_add_and_fetch(p, 1); #elif defined(OF_HAVE_OSATOMIC) if (sizeof(int) == 4) return OSAtomicIncrement32Barrier(p); |
︙ | ︙ | |||
273 274 275 276 277 278 279 | of_atomic_dec_int(volatile int *p) { #if !defined(OF_THREADS) return --*p; #elif defined(OF_X86_ASM) || defined(OF_AMD64_ASM) uint32_t i; | > | | | | | | | | | > > > > > > > > > > > > > > | 315 316 317 318 319 320 321 322 323 324 325 326 327 328 329 330 331 332 333 334 335 336 337 338 339 340 341 342 343 344 345 346 347 348 349 350 351 352 | of_atomic_dec_int(volatile int *p) { #if !defined(OF_THREADS) return --*p; #elif defined(OF_X86_ASM) || defined(OF_AMD64_ASM) uint32_t i; if (sizeof(int) == 4) __asm__ ( "xorl %0, %0\n\t" "decl %0\n\t" "lock\n\t" "xaddl %0, %1\n\t" "decl %0" : "=&r"(i) : "m"(*p) ); # ifdef OF_AMD64_ASM else if (sizeof(int) == 8) __asm__ ( "xorq %0, %0\n\t" "decq %0\n\t" "lock\n\t" "xaddq %0, %1\n\t" "decq %0" : "=&r"(i) : "m"(*p) ); # endif else abort(); return i; #elif defined(OF_HAVE_GCC_ATOMIC_OPS) return __sync_sub_and_fetch(p, 1); #elif defined(OF_HAVE_OSATOMIC) if (sizeof(int) == 4) return OSAtomicDecrement32Barrier(p); |
︙ | ︙ | |||
330 331 332 333 334 335 336 | static OF_INLINE unsigned int of_atomic_or_int(volatile unsigned int *p, unsigned int i) { #if !defined(OF_THREADS) return (*p |= i); #elif defined(OF_X86_ASM) || defined(OF_AMD64_ASM) | > | | | | | | | | | | | | > > > > > > > > > > > > > > > > > | 387 388 389 390 391 392 393 394 395 396 397 398 399 400 401 402 403 404 405 406 407 408 409 410 411 412 413 414 415 416 417 418 419 420 421 422 423 424 425 426 427 428 429 430 | static OF_INLINE unsigned int of_atomic_or_int(volatile unsigned int *p, unsigned int i) { #if !defined(OF_THREADS) return (*p |= i); #elif defined(OF_X86_ASM) || defined(OF_AMD64_ASM) if (sizeof(int) == 4) __asm__ ( "0:\n\t" "movl %2, %0\n\t" "movl %2, %%eax\n\t" "orl %1, %0\n\t" "lock\n\t" "cmpxchg %0, %2\n\t" "jne 0\n\t" : "=&r"(i) : "r"(i), "m"(*p) : "eax" ); # ifdef OF_AMD64_ASM if (sizeof(int) == 8) __asm__ ( "0:\n\t" "movq %2, %0\n\t" "movq %2, %%rax\n\t" "orq %1, %0\n\t" "lock\n\t" "cmpxchg %0, %2\n\t" "jne 0\n\t" : "=&r"(i) : "r"(i), "m"(*p) : "rax" ); # endif else abort(); return i; #elif defined(OF_HAVE_GCC_ATOMIC_OPS) return __sync_or_and_fetch(p, i); #elif defined(OF_HAVE_OSATOMIC) if (sizeof(int) == 4) return OSAtomicOr32Barrier(i, p); |
︙ | ︙ | |||
391 392 393 394 395 396 397 | static OF_INLINE unsigned int of_atomic_and_int(volatile unsigned int *p, unsigned int i) { #if !defined(OF_THREADS) return (*p &= i); #elif defined(OF_X86_ASM) || defined(OF_AMD64_ASM) | > | | | | | | | | | | | | > > > > > > > > > > > > > > > > > | 466 467 468 469 470 471 472 473 474 475 476 477 478 479 480 481 482 483 484 485 486 487 488 489 490 491 492 493 494 495 496 497 498 499 500 501 502 503 504 505 506 507 508 509 | static OF_INLINE unsigned int of_atomic_and_int(volatile unsigned int *p, unsigned int i) { #if !defined(OF_THREADS) return (*p &= i); #elif defined(OF_X86_ASM) || defined(OF_AMD64_ASM) if (sizeof(int) == 4) __asm__ ( "0:\n\t" "movl %2, %0\n\t" "movl %2, %%eax\n\t" "andl %1, %0\n\t" "lock\n\t" "cmpxchg %0, %2\n\t" "jne 0\n\t" : "=&r"(i) : "r"(i), "m"(*p) : "eax" ); # ifdef OF_AMD64_ASM if (sizeof(int) == 8) __asm__ ( "0:\n\t" "movq %2, %0\n\t" "movq %2, %%rax\n\t" "andq %1, %0\n\t" "lock\n\t" "cmpxchg %0, %2\n\t" "jne 0\n\t" : "=&r"(i) : "r"(i), "m"(*p) : "rax" ); # endif else abort(); return i; #elif defined(OF_HAVE_GCC_ATOMIC_OPS) return __sync_and_and_fetch(p, i); #elif defined(OF_HAVE_OSATOMIC) if (sizeof(int) == 4) return OSAtomicAnd32Barrier(i, p); |
︙ | ︙ | |||
452 453 454 455 456 457 458 | static OF_INLINE unsigned int of_atomic_xor_int(volatile unsigned int *p, unsigned int i) { #if !defined(OF_THREADS) return (*p ^= i); #elif defined(OF_X86_ASM) || defined(OF_AMD64_ASM) | > | | | | | | | | | | | | > > > > > > > > > > > > > > > > > | 545 546 547 548 549 550 551 552 553 554 555 556 557 558 559 560 561 562 563 564 565 566 567 568 569 570 571 572 573 574 575 576 577 578 579 580 581 582 583 584 585 586 587 588 | static OF_INLINE unsigned int of_atomic_xor_int(volatile unsigned int *p, unsigned int i) { #if !defined(OF_THREADS) return (*p ^= i); #elif defined(OF_X86_ASM) || defined(OF_AMD64_ASM) if (sizeof(int) == 4) __asm__ ( "0:\n\t" "movl %2, %0\n\t" "movl %2, %%eax\n\t" "xorl %1, %0\n\t" "lock\n\t" "cmpxchg %0, %2\n\t" "jne 0\n\t" : "=&r"(i) : "r"(i), "m"(*p) : "eax" ); # ifdef OF_AMD64_ASM if (sizeof(int) == 8) __asm__ ( "0:\n\t" "movq %2, %0\n\t" "movq %2, %%rax\n\t" "xorq %1, %0\n\t" "lock\n\t" "cmpxchg %0, %2\n\t" "jne 0\n\t" : "=&r"(i) : "r"(i), "m"(*p) : "rax" ); # endif else abort(); return i; #elif defined(OF_HAVE_GCC_ATOMIC_OPS) return __sync_xor_and_fetch(p, i); #elif defined(OF_HAVE_OSATOMIC) if (sizeof(int) == 4) return OSAtomicXor32Barrier(i, p); |
︙ | ︙ | |||
518 519 520 521 522 523 524 | if (*p == o) { *p = n; return YES; } return NO; #elif defined(OF_X86_ASM) || defined(OF_AMD64_ASM) | | | 629 630 631 632 633 634 635 636 637 638 639 640 641 642 643 | if (*p == o) { *p = n; return YES; } return NO; #elif defined(OF_X86_ASM) || defined(OF_AMD64_ASM) int32_t r; __asm__ ( "xorl %0, %0\n\t" "lock\n\t" "cmpxchg %2, %3\n\t" "jne 0\n\t" "incl %0\n" |
︙ | ︙ |