︙ | | | ︙ | |
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
|
"movl %2, %%eax\n\t"
"orl %1, %0\n\t"
"lock\n\t"
"cmpxchg %0, %2\n\t"
"jne 0\n\t"
: "=&r"(i)
: "r"(i), "m"(*p)
: "eax"
);
# ifdef OF_AMD64_ASM
if (sizeof(int) == 8)
__asm__ (
"0:\n\t"
"movq %2, %0\n\t"
"movq %2, %%rax\n\t"
"orq %1, %0\n\t"
"lock\n\t"
"cmpxchg %0, %2\n\t"
"jne 0\n\t"
: "=&r"(i)
: "r"(i), "m"(*p)
: "rax"
);
# endif
else
abort();
return i;
#elif defined(OF_HAVE_GCC_ATOMIC_OPS)
|
|
|
|
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
|
"movl %2, %%eax\n\t"
"orl %1, %0\n\t"
"lock\n\t"
"cmpxchg %0, %2\n\t"
"jne 0\n\t"
: "=&r"(i)
: "r"(i), "m"(*p)
: "eax", "cc"
);
# ifdef OF_AMD64_ASM
if (sizeof(int) == 8)
__asm__ (
"0:\n\t"
"movq %2, %0\n\t"
"movq %2, %%rax\n\t"
"orq %1, %0\n\t"
"lock\n\t"
"cmpxchg %0, %2\n\t"
"jne 0\n\t"
: "=&r"(i)
: "r"(i), "m"(*p)
: "rax", "cc"
);
# endif
else
abort();
return i;
#elif defined(OF_HAVE_GCC_ATOMIC_OPS)
|
︙ | | | ︙ | |
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
|
"movl %2, %%eax\n\t"
"orl %1, %0\n\t"
"lock\n\t"
"cmpxchg %0, %2\n\t"
"jne 0\n\t"
: "=&r"(i)
: "r"(i), "m"(*p)
: "eax"
);
return i;
#elif defined(OF_HAVE_GCC_ATOMIC_OPS)
return __sync_or_and_fetch(p, i);
#elif defined(OF_HAVE_OSATOMIC)
return OSAtomicOr32Barrier(i, p);
|
|
|
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
|
"movl %2, %%eax\n\t"
"orl %1, %0\n\t"
"lock\n\t"
"cmpxchg %0, %2\n\t"
"jne 0\n\t"
: "=&r"(i)
: "r"(i), "m"(*p)
: "eax", "cc"
);
return i;
#elif defined(OF_HAVE_GCC_ATOMIC_OPS)
return __sync_or_and_fetch(p, i);
#elif defined(OF_HAVE_OSATOMIC)
return OSAtomicOr32Barrier(i, p);
|
︙ | | | ︙ | |
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
|
"movl %2, %%eax\n\t"
"andl %1, %0\n\t"
"lock\n\t"
"cmpxchg %0, %2\n\t"
"jne 0\n\t"
: "=&r"(i)
: "r"(i), "m"(*p)
: "eax"
);
# ifdef OF_AMD64_ASM
if (sizeof(int) == 8)
__asm__ (
"0:\n\t"
"movq %2, %0\n\t"
"movq %2, %%rax\n\t"
"andq %1, %0\n\t"
"lock\n\t"
"cmpxchg %0, %2\n\t"
"jne 0\n\t"
: "=&r"(i)
: "r"(i), "m"(*p)
: "rax"
);
# endif
else
abort();
return i;
#elif defined(OF_HAVE_GCC_ATOMIC_OPS)
|
|
|
|
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
|
"movl %2, %%eax\n\t"
"andl %1, %0\n\t"
"lock\n\t"
"cmpxchg %0, %2\n\t"
"jne 0\n\t"
: "=&r"(i)
: "r"(i), "m"(*p)
: "eax", "cc"
);
# ifdef OF_AMD64_ASM
if (sizeof(int) == 8)
__asm__ (
"0:\n\t"
"movq %2, %0\n\t"
"movq %2, %%rax\n\t"
"andq %1, %0\n\t"
"lock\n\t"
"cmpxchg %0, %2\n\t"
"jne 0\n\t"
: "=&r"(i)
: "r"(i), "m"(*p)
: "rax", "cc"
);
# endif
else
abort();
return i;
#elif defined(OF_HAVE_GCC_ATOMIC_OPS)
|
︙ | | | ︙ | |
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
|
"movl %2, %%eax\n\t"
"andl %1, %0\n\t"
"lock\n\t"
"cmpxchg %0, %2\n\t"
"jne 0\n\t"
: "=&r"(i)
: "r"(i), "m"(*p)
: "eax"
);
return i;
#elif defined(OF_HAVE_GCC_ATOMIC_OPS)
return __sync_and_and_fetch(p, i);
#elif defined(OF_HAVE_OSATOMIC)
return OSAtomicAnd32Barrier(i, p);
|
|
|
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
|
"movl %2, %%eax\n\t"
"andl %1, %0\n\t"
"lock\n\t"
"cmpxchg %0, %2\n\t"
"jne 0\n\t"
: "=&r"(i)
: "r"(i), "m"(*p)
: "eax", "cc"
);
return i;
#elif defined(OF_HAVE_GCC_ATOMIC_OPS)
return __sync_and_and_fetch(p, i);
#elif defined(OF_HAVE_OSATOMIC)
return OSAtomicAnd32Barrier(i, p);
|
︙ | | | ︙ | |
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
|
"movl %2, %%eax\n\t"
"xorl %1, %0\n\t"
"lock\n\t"
"cmpxchg %0, %2\n\t"
"jne 0\n\t"
: "=&r"(i)
: "r"(i), "m"(*p)
: "eax"
);
# ifdef OF_AMD64_ASM
if (sizeof(int) == 8)
__asm__ (
"0:\n\t"
"movq %2, %0\n\t"
"movq %2, %%rax\n\t"
"xorq %1, %0\n\t"
"lock\n\t"
"cmpxchg %0, %2\n\t"
"jne 0\n\t"
: "=&r"(i)
: "r"(i), "m"(*p)
: "rax"
);
# endif
else
abort();
return i;
#elif defined(OF_HAVE_GCC_ATOMIC_OPS)
|
|
|
|
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
|
"movl %2, %%eax\n\t"
"xorl %1, %0\n\t"
"lock\n\t"
"cmpxchg %0, %2\n\t"
"jne 0\n\t"
: "=&r"(i)
: "r"(i), "m"(*p)
: "eax", "cc"
);
# ifdef OF_AMD64_ASM
if (sizeof(int) == 8)
__asm__ (
"0:\n\t"
"movq %2, %0\n\t"
"movq %2, %%rax\n\t"
"xorq %1, %0\n\t"
"lock\n\t"
"cmpxchg %0, %2\n\t"
"jne 0\n\t"
: "=&r"(i)
: "r"(i), "m"(*p)
: "rax", "cc"
);
# endif
else
abort();
return i;
#elif defined(OF_HAVE_GCC_ATOMIC_OPS)
|
︙ | | | ︙ | |
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
|
"movl %2, %%eax\n\t"
"xorl %1, %0\n\t"
"lock\n\t"
"cmpxchgl %0, %2\n\t"
"jne 0\n\t"
: "=&r"(i)
: "r"(i), "m"(*p)
: "eax"
);
return i;
#elif defined(OF_HAVE_GCC_ATOMIC_OPS)
return __sync_xor_and_fetch(p, i);
#elif defined(OF_HAVE_OSATOMIC)
return OSAtomicXor32Barrier(i, p);
|
|
|
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
|
"movl %2, %%eax\n\t"
"xorl %1, %0\n\t"
"lock\n\t"
"cmpxchgl %0, %2\n\t"
"jne 0\n\t"
: "=&r"(i)
: "r"(i), "m"(*p)
: "eax", "cc"
);
return i;
#elif defined(OF_HAVE_GCC_ATOMIC_OPS)
return __sync_xor_and_fetch(p, i);
#elif defined(OF_HAVE_OSATOMIC)
return OSAtomicXor32Barrier(i, p);
|
︙ | | | ︙ | |
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
|
if (*p == o) {
*p = n;
return YES;
}
return NO;
#elif defined(OF_X86_ASM) || defined(OF_AMD64_ASM)
int32_t r;
__asm__ (
"xorl %0, %0\n\t"
"lock\n\t"
"cmpxchg %2, %3\n\t"
"sete %b0\n\t"
"movzbl %b0, %0"
: "=&d"(r) /* use d instead of r due to gcc bug */
: "a"(o), "r"(n), "m"(*p)
);
return r;
#elif defined(OF_HAVE_GCC_ATOMIC_OPS)
return __sync_bool_compare_and_swap(p, o, n);
#elif defined(OF_HAVE_OSATOMIC)
return OSAtomicCompareAndSwapIntBarrier(o, n, p);
|
|
>
|
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
|
if (*p == o) {
*p = n;
return YES;
}
return NO;
#elif defined(OF_X86_ASM) || defined(OF_AMD64_ASM)
int r;
__asm__ (
"xorl %0, %0\n\t"
"lock\n\t"
"cmpxchg %2, %3\n\t"
"sete %b0\n\t"
"movzbl %b0, %0"
: "=&d"(r) /* use d instead of r due to gcc bug */
: "a"(o), "r"(n), "m"(*p)
: "cc"
);
return r;
#elif defined(OF_HAVE_GCC_ATOMIC_OPS)
return __sync_bool_compare_and_swap(p, o, n);
#elif defined(OF_HAVE_OSATOMIC)
return OSAtomicCompareAndSwapIntBarrier(o, n, p);
|
︙ | | | ︙ | |
700
701
702
703
704
705
706
707
708
709
710
711
712
713
|
"xorl %0, %0\n\t"
"lock\n\t"
"cmpxchg %2, %3\n\t"
"sete %b0\n\t"
"movzbl %b0, %0"
: "=&d"(r) /* use d instead of r due to gcc bug */
: "a"(o), "r"(n), "m"(*p)
);
return r;
#elif defined(OF_HAVE_GCC_ATOMIC_OPS)
return __sync_bool_compare_and_swap(p, o, n);
#elif defined(OF_HAVE_OSATOMIC)
return OSAtomicCompareAndSwap32Barrier(o, n, p);
|
>
|
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
|
"xorl %0, %0\n\t"
"lock\n\t"
"cmpxchg %2, %3\n\t"
"sete %b0\n\t"
"movzbl %b0, %0"
: "=&d"(r) /* use d instead of r due to gcc bug */
: "a"(o), "r"(n), "m"(*p)
: "cc"
);
return r;
#elif defined(OF_HAVE_GCC_ATOMIC_OPS)
return __sync_bool_compare_and_swap(p, o, n);
#elif defined(OF_HAVE_OSATOMIC)
return OSAtomicCompareAndSwap32Barrier(o, n, p);
|
︙ | | | ︙ | |
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
|
"xorl %0, %0\n\t"
"lock\n\t"
"cmpxchg %2, %3\n\t"
"sete %b0\n\t"
"movzbl %b0, %0"
: "=&d"(r) /* use d instead of r due to gcc bug */
: "a"(o), "r"(n), "m"(*p)
);
return r;
#elif defined(OF_HAVE_GCC_ATOMIC_OPS)
return __sync_bool_compare_and_swap(p, o, n);
#elif defined(OF_HAVE_OSATOMIC)
return OSAtomicCompareAndSwapPtrBarrier(o, n, p);
#else
# error No atomic operations available!
#endif
}
|
>
|
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
|
"xorl %0, %0\n\t"
"lock\n\t"
"cmpxchg %2, %3\n\t"
"sete %b0\n\t"
"movzbl %b0, %0"
: "=&d"(r) /* use d instead of r due to gcc bug */
: "a"(o), "r"(n), "m"(*p)
: "cc"
);
return r;
#elif defined(OF_HAVE_GCC_ATOMIC_OPS)
return __sync_bool_compare_and_swap(p, o, n);
#elif defined(OF_HAVE_OSATOMIC)
return OSAtomicCompareAndSwapPtrBarrier(o, n, p);
#else
# error No atomic operations available!
#endif
}
|