30 #ifndef _GLIBCXX_ATOMIC_BASE_H
31 #define _GLIBCXX_ATOMIC_BASE_H 1
33 #pragma GCC system_header
40 namespace std _GLIBCXX_VISIBILITY(default)
42 _GLIBCXX_BEGIN_NAMESPACE_VERSION
66 return __m == memory_order_acq_rel ? memory_order_acquire
67 : __m == memory_order_release ? memory_order_relaxed : __m;
72 { __atomic_thread_fence(__m); }
76 { __atomic_signal_fence(__m); }
79 template<
typename _Tp>
89 template<
typename _IntTp>
207 #define ATOMIC_VAR_INIT(_VI) { _VI }
209 template<
typename _Tp>
212 template<
typename _Tp>
226 _GLIBCXX_BEGIN_EXTERN_C
231 #if __GCC_ATOMIC_TEST_AND_SET_TRUEVAL == 1
238 _GLIBCXX_END_EXTERN_C
240 #define ATOMIC_FLAG_INIT { 0 }
257 test_and_set(
memory_order __m = memory_order_seq_cst) noexcept
259 return __atomic_test_and_set (&_M_i, __m);
263 test_and_set(
memory_order __m = memory_order_seq_cst)
volatile noexcept
265 return __atomic_test_and_set (&_M_i, __m);
271 __glibcxx_assert(__m != memory_order_consume);
272 __glibcxx_assert(__m != memory_order_acquire);
273 __glibcxx_assert(__m != memory_order_acq_rel);
275 __atomic_clear (&_M_i, __m);
279 clear(
memory_order __m = memory_order_seq_cst)
volatile noexcept
281 __glibcxx_assert(__m != memory_order_consume);
282 __glibcxx_assert(__m != memory_order_acquire);
283 __glibcxx_assert(__m != memory_order_acq_rel);
285 __atomic_clear (&_M_i, __m);
313 template<
typename _ITp>
317 typedef _ITp __int_type;
329 constexpr
__atomic_base(__int_type __i) noexcept : _M_i (__i) { }
331 operator __int_type()
const noexcept
334 operator __int_type()
const volatile noexcept
338 operator=(__int_type __i) noexcept
345 operator=(__int_type __i)
volatile noexcept
352 operator++(
int) noexcept
353 {
return fetch_add(1); }
356 operator++(
int)
volatile noexcept
357 {
return fetch_add(1); }
360 operator--(
int) noexcept
361 {
return fetch_sub(1); }
364 operator--(
int)
volatile noexcept
365 {
return fetch_sub(1); }
368 operator++() noexcept
369 {
return __atomic_add_fetch(&_M_i, 1, memory_order_seq_cst); }
372 operator++()
volatile noexcept
373 {
return __atomic_add_fetch(&_M_i, 1, memory_order_seq_cst); }
376 operator--() noexcept
377 {
return __atomic_sub_fetch(&_M_i, 1, memory_order_seq_cst); }
380 operator--()
volatile noexcept
381 {
return __atomic_sub_fetch(&_M_i, 1, memory_order_seq_cst); }
384 operator+=(__int_type __i) noexcept
385 {
return __atomic_add_fetch(&_M_i, __i, memory_order_seq_cst); }
388 operator+=(__int_type __i)
volatile noexcept
389 {
return __atomic_add_fetch(&_M_i, __i, memory_order_seq_cst); }
392 operator-=(__int_type __i) noexcept
393 {
return __atomic_sub_fetch(&_M_i, __i, memory_order_seq_cst); }
396 operator-=(__int_type __i)
volatile noexcept
397 {
return __atomic_sub_fetch(&_M_i, __i, memory_order_seq_cst); }
400 operator&=(__int_type __i) noexcept
401 {
return __atomic_and_fetch(&_M_i, __i, memory_order_seq_cst); }
404 operator&=(__int_type __i)
volatile noexcept
405 {
return __atomic_and_fetch(&_M_i, __i, memory_order_seq_cst); }
408 operator|=(__int_type __i) noexcept
409 {
return __atomic_or_fetch(&_M_i, __i, memory_order_seq_cst); }
412 operator|=(__int_type __i)
volatile noexcept
413 {
return __atomic_or_fetch(&_M_i, __i, memory_order_seq_cst); }
416 operator^=(__int_type __i) noexcept
417 {
return __atomic_xor_fetch(&_M_i, __i, memory_order_seq_cst); }
420 operator^=(__int_type __i)
volatile noexcept
421 {
return __atomic_xor_fetch(&_M_i, __i, memory_order_seq_cst); }
424 is_lock_free()
const noexcept
425 {
return __atomic_is_lock_free (
sizeof (_M_i), &_M_i); }
428 is_lock_free()
const volatile noexcept
429 {
return __atomic_is_lock_free (
sizeof (_M_i), &_M_i); }
432 store(__int_type __i,
memory_order __m = memory_order_seq_cst) noexcept
434 __glibcxx_assert(__m != memory_order_acquire);
435 __glibcxx_assert(__m != memory_order_acq_rel);
436 __glibcxx_assert(__m != memory_order_consume);
438 __atomic_store_n(&_M_i, __i, __m);
442 store(__int_type __i,
443 memory_order __m = memory_order_seq_cst)
volatile noexcept
445 __glibcxx_assert(__m != memory_order_acquire);
446 __glibcxx_assert(__m != memory_order_acq_rel);
447 __glibcxx_assert(__m != memory_order_consume);
449 __atomic_store_n(&_M_i, __i, __m);
453 load(
memory_order __m = memory_order_seq_cst)
const noexcept
455 __glibcxx_assert(__m != memory_order_release);
456 __glibcxx_assert(__m != memory_order_acq_rel);
458 return __atomic_load_n(&_M_i, __m);
462 load(
memory_order __m = memory_order_seq_cst)
const volatile noexcept
464 __glibcxx_assert(__m != memory_order_release);
465 __glibcxx_assert(__m != memory_order_acq_rel);
467 return __atomic_load_n(&_M_i, __m);
471 exchange(__int_type __i,
474 return __atomic_exchange_n(&_M_i, __i, __m);
479 exchange(__int_type __i,
480 memory_order __m = memory_order_seq_cst)
volatile noexcept
482 return __atomic_exchange_n(&_M_i, __i, __m);
486 compare_exchange_weak(__int_type& __i1, __int_type __i2,
489 __glibcxx_assert(__m2 != memory_order_release);
490 __glibcxx_assert(__m2 != memory_order_acq_rel);
491 __glibcxx_assert(__m2 <= __m1);
493 return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 1, __m1, __m2);
497 compare_exchange_weak(__int_type& __i1, __int_type __i2,
501 __glibcxx_assert(__m2 != memory_order_release);
502 __glibcxx_assert(__m2 != memory_order_acq_rel);
503 __glibcxx_assert(__m2 <= __m1);
505 return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 1, __m1, __m2);
509 compare_exchange_weak(__int_type& __i1, __int_type __i2,
512 return compare_exchange_weak(__i1, __i2, __m,
513 __cmpexch_failure_order(__m));
517 compare_exchange_weak(__int_type& __i1, __int_type __i2,
518 memory_order __m = memory_order_seq_cst)
volatile noexcept
520 return compare_exchange_weak(__i1, __i2, __m,
521 __cmpexch_failure_order(__m));
525 compare_exchange_strong(__int_type& __i1, __int_type __i2,
528 __glibcxx_assert(__m2 != memory_order_release);
529 __glibcxx_assert(__m2 != memory_order_acq_rel);
530 __glibcxx_assert(__m2 <= __m1);
532 return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 0, __m1, __m2);
536 compare_exchange_strong(__int_type& __i1, __int_type __i2,
540 __glibcxx_assert(__m2 != memory_order_release);
541 __glibcxx_assert(__m2 != memory_order_acq_rel);
542 __glibcxx_assert(__m2 <= __m1);
544 return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 0, __m1, __m2);
548 compare_exchange_strong(__int_type& __i1, __int_type __i2,
551 return compare_exchange_strong(__i1, __i2, __m,
552 __cmpexch_failure_order(__m));
556 compare_exchange_strong(__int_type& __i1, __int_type __i2,
557 memory_order __m = memory_order_seq_cst)
volatile noexcept
559 return compare_exchange_strong(__i1, __i2, __m,
560 __cmpexch_failure_order(__m));
564 fetch_add(__int_type __i,
566 {
return __atomic_fetch_add(&_M_i, __i, __m); }
569 fetch_add(__int_type __i,
570 memory_order __m = memory_order_seq_cst)
volatile noexcept
571 {
return __atomic_fetch_add(&_M_i, __i, __m); }
574 fetch_sub(__int_type __i,
576 {
return __atomic_fetch_sub(&_M_i, __i, __m); }
579 fetch_sub(__int_type __i,
580 memory_order __m = memory_order_seq_cst)
volatile noexcept
581 {
return __atomic_fetch_sub(&_M_i, __i, __m); }
584 fetch_and(__int_type __i,
586 {
return __atomic_fetch_and(&_M_i, __i, __m); }
589 fetch_and(__int_type __i,
590 memory_order __m = memory_order_seq_cst)
volatile noexcept
591 {
return __atomic_fetch_and(&_M_i, __i, __m); }
594 fetch_or(__int_type __i,
596 {
return __atomic_fetch_or(&_M_i, __i, __m); }
599 fetch_or(__int_type __i,
600 memory_order __m = memory_order_seq_cst)
volatile noexcept
601 {
return __atomic_fetch_or(&_M_i, __i, __m); }
604 fetch_xor(__int_type __i,
606 {
return __atomic_fetch_xor(&_M_i, __i, __m); }
609 fetch_xor(__int_type __i,
610 memory_order __m = memory_order_seq_cst)
volatile noexcept
611 {
return __atomic_fetch_xor(&_M_i, __i, __m); }
616 template<
typename _PTp>
620 typedef _PTp* __pointer_type;
626 _M_type_size(ptrdiff_t __d) {
return __d *
sizeof(_PTp); }
629 _M_type_size(ptrdiff_t __d)
volatile {
return __d *
sizeof(_PTp); }
639 constexpr
__atomic_base(__pointer_type __p) noexcept : _M_p (__p) { }
641 operator __pointer_type()
const noexcept
644 operator __pointer_type()
const volatile noexcept
648 operator=(__pointer_type __p) noexcept
655 operator=(__pointer_type __p)
volatile noexcept
662 operator++(
int) noexcept
663 {
return fetch_add(1); }
666 operator++(
int)
volatile noexcept
667 {
return fetch_add(1); }
670 operator--(
int) noexcept
671 {
return fetch_sub(1); }
674 operator--(
int)
volatile noexcept
675 {
return fetch_sub(1); }
678 operator++() noexcept
679 {
return __atomic_add_fetch(&_M_p, _M_type_size(1),
680 memory_order_seq_cst); }
683 operator++()
volatile noexcept
684 {
return __atomic_add_fetch(&_M_p, _M_type_size(1),
685 memory_order_seq_cst); }
688 operator--() noexcept
689 {
return __atomic_sub_fetch(&_M_p, _M_type_size(1),
690 memory_order_seq_cst); }
693 operator--()
volatile noexcept
694 {
return __atomic_sub_fetch(&_M_p, _M_type_size(1),
695 memory_order_seq_cst); }
698 operator+=(ptrdiff_t __d) noexcept
699 {
return __atomic_add_fetch(&_M_p, _M_type_size(__d),
700 memory_order_seq_cst); }
703 operator+=(ptrdiff_t __d)
volatile noexcept
704 {
return __atomic_add_fetch(&_M_p, _M_type_size(__d),
705 memory_order_seq_cst); }
708 operator-=(ptrdiff_t __d) noexcept
709 {
return __atomic_sub_fetch(&_M_p, _M_type_size(__d),
710 memory_order_seq_cst); }
713 operator-=(ptrdiff_t __d)
volatile noexcept
714 {
return __atomic_sub_fetch(&_M_p, _M_type_size(__d),
715 memory_order_seq_cst); }
718 is_lock_free()
const noexcept
719 {
return __atomic_is_lock_free(_M_type_size(1), &_M_p); }
722 is_lock_free()
const volatile noexcept
723 {
return __atomic_is_lock_free(_M_type_size(1), &_M_p); }
726 store(__pointer_type __p,
729 __glibcxx_assert(__m != memory_order_acquire);
730 __glibcxx_assert(__m != memory_order_acq_rel);
731 __glibcxx_assert(__m != memory_order_consume);
733 __atomic_store_n(&_M_p, __p, __m);
737 store(__pointer_type __p,
738 memory_order __m = memory_order_seq_cst)
volatile noexcept
740 __glibcxx_assert(__m != memory_order_acquire);
741 __glibcxx_assert(__m != memory_order_acq_rel);
742 __glibcxx_assert(__m != memory_order_consume);
744 __atomic_store_n(&_M_p, __p, __m);
748 load(
memory_order __m = memory_order_seq_cst)
const noexcept
750 __glibcxx_assert(__m != memory_order_release);
751 __glibcxx_assert(__m != memory_order_acq_rel);
753 return __atomic_load_n(&_M_p, __m);
757 load(
memory_order __m = memory_order_seq_cst)
const volatile noexcept
759 __glibcxx_assert(__m != memory_order_release);
760 __glibcxx_assert(__m != memory_order_acq_rel);
762 return __atomic_load_n(&_M_p, __m);
766 exchange(__pointer_type __p,
769 return __atomic_exchange_n(&_M_p, __p, __m);
774 exchange(__pointer_type __p,
775 memory_order __m = memory_order_seq_cst)
volatile noexcept
777 return __atomic_exchange_n(&_M_p, __p, __m);
781 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
785 __glibcxx_assert(__m2 != memory_order_release);
786 __glibcxx_assert(__m2 != memory_order_acq_rel);
787 __glibcxx_assert(__m2 <= __m1);
789 return __atomic_compare_exchange_n(&_M_p, &__p1, __p2, 0, __m1, __m2);
793 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
797 __glibcxx_assert(__m2 != memory_order_release);
798 __glibcxx_assert(__m2 != memory_order_acq_rel);
799 __glibcxx_assert(__m2 <= __m1);
801 return __atomic_compare_exchange_n(&_M_p, &__p1, __p2, 0, __m1, __m2);
805 fetch_add(ptrdiff_t __d,
807 {
return __atomic_fetch_add(&_M_p, _M_type_size(__d), __m); }
810 fetch_add(ptrdiff_t __d,
811 memory_order __m = memory_order_seq_cst)
volatile noexcept
812 {
return __atomic_fetch_add(&_M_p, _M_type_size(__d), __m); }
815 fetch_sub(ptrdiff_t __d,
817 {
return __atomic_fetch_sub(&_M_p, _M_type_size(__d), __m); }
820 fetch_sub(ptrdiff_t __d,
821 memory_order __m = memory_order_seq_cst)
volatile noexcept
822 {
return __atomic_fetch_sub(&_M_p, _M_type_size(__d), __m); }
827 _GLIBCXX_END_NAMESPACE_VERSION