32 #ifndef _GLIBCXX_ATOMIC
33 #define _GLIBCXX_ATOMIC 1
35 #pragma GCC system_header
37 #if __cplusplus < 201103L
43 namespace std _GLIBCXX_VISIBILITY(default)
45 _GLIBCXX_BEGIN_NAMESPACE_VERSION
66 constexpr
atomic_bool(
bool __i) noexcept : _M_base(__i) { }
69 operator=(
bool __i) noexcept
70 {
return _M_base.operator=(__i); }
73 operator=(
bool __i)
volatile noexcept
74 {
return _M_base.operator=(__i); }
76 operator bool()
const noexcept
77 {
return _M_base.load(); }
79 operator bool()
const volatile noexcept
80 {
return _M_base.load(); }
83 is_lock_free()
const noexcept {
return _M_base.is_lock_free(); }
86 is_lock_free()
const volatile noexcept {
return _M_base.is_lock_free(); }
89 store(
bool __i,
memory_order __m = memory_order_seq_cst) noexcept
90 { _M_base.store(__i, __m); }
93 store(
bool __i,
memory_order __m = memory_order_seq_cst)
volatile noexcept
94 { _M_base.store(__i, __m); }
97 load(
memory_order __m = memory_order_seq_cst)
const noexcept
98 {
return _M_base.load(__m); }
101 load(
memory_order __m = memory_order_seq_cst)
const volatile noexcept
102 {
return _M_base.load(__m); }
105 exchange(
bool __i,
memory_order __m = memory_order_seq_cst) noexcept
106 {
return _M_base.exchange(__i, __m); }
110 memory_order __m = memory_order_seq_cst)
volatile noexcept
111 {
return _M_base.exchange(__i, __m); }
114 compare_exchange_weak(
bool& __i1,
bool __i2,
memory_order __m1,
116 {
return _M_base.compare_exchange_weak(__i1, __i2, __m1, __m2); }
119 compare_exchange_weak(
bool& __i1,
bool __i2,
memory_order __m1,
121 {
return _M_base.compare_exchange_weak(__i1, __i2, __m1, __m2); }
124 compare_exchange_weak(
bool& __i1,
bool __i2,
126 {
return _M_base.compare_exchange_weak(__i1, __i2, __m); }
129 compare_exchange_weak(
bool& __i1,
bool __i2,
130 memory_order __m = memory_order_seq_cst)
volatile noexcept
131 {
return _M_base.compare_exchange_weak(__i1, __i2, __m); }
134 compare_exchange_strong(
bool& __i1,
bool __i2,
memory_order __m1,
136 {
return _M_base.compare_exchange_strong(__i1, __i2, __m1, __m2); }
139 compare_exchange_strong(
bool& __i1,
bool __i2,
memory_order __m1,
141 {
return _M_base.compare_exchange_strong(__i1, __i2, __m1, __m2); }
144 compare_exchange_strong(
bool& __i1,
bool __i2,
146 {
return _M_base.compare_exchange_strong(__i1, __i2, __m); }
149 compare_exchange_strong(
bool& __i1,
bool __i2,
150 memory_order __m = memory_order_seq_cst)
volatile noexcept
151 {
return _M_base.compare_exchange_strong(__i1, __i2, __m); }
160 template<
typename _Tp>
167 atomic() noexcept =
default;
168 ~
atomic() noexcept =
default;
173 constexpr
atomic(_Tp __i) noexcept : _M_i(__i) { }
175 operator _Tp()
const noexcept
178 operator _Tp()
const volatile noexcept
182 operator=(_Tp __i) noexcept
183 { store(__i);
return __i; }
186 operator=(_Tp __i)
volatile noexcept
187 { store(__i);
return __i; }
190 is_lock_free()
const noexcept
191 {
return __atomic_is_lock_free(
sizeof(_M_i),
nullptr); }
194 is_lock_free()
const volatile noexcept
195 {
return __atomic_is_lock_free(
sizeof(_M_i),
nullptr); }
198 store(_Tp __i,
memory_order _m = memory_order_seq_cst) noexcept
199 { __atomic_store(&_M_i, &__i, _m); }
202 store(_Tp __i,
memory_order _m = memory_order_seq_cst)
volatile noexcept
203 { __atomic_store(&_M_i, &__i, _m); }
206 load(
memory_order _m = memory_order_seq_cst)
const noexcept
209 __atomic_load(&_M_i, &tmp, _m);
214 load(
memory_order _m = memory_order_seq_cst)
const volatile noexcept
217 __atomic_load(&_M_i, &tmp, _m);
222 exchange(_Tp __i,
memory_order _m = memory_order_seq_cst) noexcept
225 __atomic_exchange(&_M_i, &__i, &tmp, _m);
231 memory_order _m = memory_order_seq_cst)
volatile noexcept
234 __atomic_exchange(&_M_i, &__i, &tmp, _m);
239 compare_exchange_weak(_Tp& __e, _Tp __i,
memory_order __s,
242 return __atomic_compare_exchange(&_M_i, &__e, &__i,
true, __s, __f);
246 compare_exchange_weak(_Tp& __e, _Tp __i,
memory_order __s,
249 return __atomic_compare_exchange(&_M_i, &__e, &__i,
true, __s, __f);
253 compare_exchange_weak(_Tp& __e, _Tp __i,
255 {
return compare_exchange_weak(__e, __i, __m,
256 __cmpexch_failure_order(__m)); }
259 compare_exchange_weak(_Tp& __e, _Tp __i,
260 memory_order __m = memory_order_seq_cst)
volatile noexcept
261 {
return compare_exchange_weak(__e, __i, __m,
262 __cmpexch_failure_order(__m)); }
265 compare_exchange_strong(_Tp& __e, _Tp __i,
memory_order __s,
268 return __atomic_compare_exchange(&_M_i, &__e, &__i,
false, __s, __f);
272 compare_exchange_strong(_Tp& __e, _Tp __i,
memory_order __s,
275 return __atomic_compare_exchange(&_M_i, &__e, &__i,
false, __s, __f);
279 compare_exchange_strong(_Tp& __e, _Tp __i,
281 {
return compare_exchange_strong(__e, __i, __m,
282 __cmpexch_failure_order(__m)); }
285 compare_exchange_strong(_Tp& __e, _Tp __i,
286 memory_order __m = memory_order_seq_cst)
volatile noexcept
287 {
return compare_exchange_strong(__e, __i, __m,
288 __cmpexch_failure_order(__m)); }
293 template<
typename _Tp>
296 typedef _Tp* __pointer_type;
300 atomic() noexcept =
default;
301 ~
atomic() noexcept =
default;
306 constexpr
atomic(__pointer_type __p) noexcept : _M_b(__p) { }
308 operator __pointer_type()
const noexcept
309 {
return __pointer_type(_M_b); }
311 operator __pointer_type()
const volatile noexcept
312 {
return __pointer_type(_M_b); }
315 operator=(__pointer_type __p) noexcept
316 {
return _M_b.operator=(__p); }
319 operator=(__pointer_type __p)
volatile noexcept
320 {
return _M_b.operator=(__p); }
323 operator++(
int) noexcept
327 operator++(
int)
volatile noexcept
331 operator--(
int) noexcept
335 operator--(
int)
volatile noexcept
339 operator++() noexcept
343 operator++()
volatile noexcept
347 operator--() noexcept
351 operator--()
volatile noexcept
355 operator+=(ptrdiff_t __d) noexcept
356 {
return _M_b.operator+=(__d); }
359 operator+=(ptrdiff_t __d)
volatile noexcept
360 {
return _M_b.operator+=(__d); }
363 operator-=(ptrdiff_t __d) noexcept
364 {
return _M_b.operator-=(__d); }
367 operator-=(ptrdiff_t __d)
volatile noexcept
368 {
return _M_b.operator-=(__d); }
371 is_lock_free()
const noexcept
372 {
return _M_b.is_lock_free(); }
375 is_lock_free()
const volatile noexcept
376 {
return _M_b.is_lock_free(); }
379 store(__pointer_type __p,
381 {
return _M_b.store(__p, __m); }
384 store(__pointer_type __p,
385 memory_order __m = memory_order_seq_cst)
volatile noexcept
386 {
return _M_b.store(__p, __m); }
389 load(
memory_order __m = memory_order_seq_cst)
const noexcept
390 {
return _M_b.load(__m); }
393 load(
memory_order __m = memory_order_seq_cst)
const volatile noexcept
394 {
return _M_b.load(__m); }
397 exchange(__pointer_type __p,
399 {
return _M_b.exchange(__p, __m); }
402 exchange(__pointer_type __p,
403 memory_order __m = memory_order_seq_cst)
volatile noexcept
404 {
return _M_b.exchange(__p, __m); }
407 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
409 {
return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
412 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
415 {
return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
418 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
421 return compare_exchange_weak(__p1, __p2, __m,
422 __cmpexch_failure_order(__m));
426 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
427 memory_order __m = memory_order_seq_cst)
volatile noexcept
429 return compare_exchange_weak(__p1, __p2, __m,
430 __cmpexch_failure_order(__m));
434 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
436 {
return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
439 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
442 {
return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
445 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
448 return _M_b.compare_exchange_strong(__p1, __p2, __m,
449 __cmpexch_failure_order(__m));
453 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
454 memory_order __m = memory_order_seq_cst)
volatile noexcept
456 return _M_b.compare_exchange_strong(__p1, __p2, __m,
457 __cmpexch_failure_order(__m));
461 fetch_add(ptrdiff_t __d,
463 {
return _M_b.fetch_add(__d, __m); }
466 fetch_add(ptrdiff_t __d,
467 memory_order __m = memory_order_seq_cst)
volatile noexcept
468 {
return _M_b.fetch_add(__d, __m); }
471 fetch_sub(ptrdiff_t __d,
473 {
return _M_b.fetch_sub(__d, __m); }
476 fetch_sub(ptrdiff_t __d,
477 memory_order __m = memory_order_seq_cst)
volatile noexcept
478 {
return _M_b.fetch_sub(__d, __m); }
486 typedef bool __integral_type;
489 atomic() noexcept =
default;
490 ~
atomic() noexcept =
default;
497 using __base_type::operator __integral_type;
498 using __base_type::operator=;
505 typedef char __integral_type;
508 atomic() noexcept =
default;
509 ~
atomic() noexcept =
default;
516 using __base_type::operator __integral_type;
517 using __base_type::operator=;
524 typedef signed char __integral_type;
527 atomic() noexcept=
default;
528 ~
atomic() noexcept =
default;
535 using __base_type::operator __integral_type;
536 using __base_type::operator=;
543 typedef unsigned char __integral_type;
546 atomic() noexcept=
default;
547 ~
atomic() noexcept =
default;
554 using __base_type::operator __integral_type;
555 using __base_type::operator=;
562 typedef short __integral_type;
565 atomic() noexcept =
default;
566 ~
atomic() noexcept =
default;
573 using __base_type::operator __integral_type;
574 using __base_type::operator=;
581 typedef unsigned short __integral_type;
584 atomic() noexcept =
default;
585 ~
atomic() noexcept =
default;
592 using __base_type::operator __integral_type;
593 using __base_type::operator=;
600 typedef int __integral_type;
603 atomic() noexcept =
default;
604 ~
atomic() noexcept =
default;
611 using __base_type::operator __integral_type;
612 using __base_type::operator=;
619 typedef unsigned int __integral_type;
622 atomic() noexcept =
default;
623 ~
atomic() noexcept =
default;
630 using __base_type::operator __integral_type;
631 using __base_type::operator=;
638 typedef long __integral_type;
641 atomic() noexcept =
default;
642 ~
atomic() noexcept =
default;
649 using __base_type::operator __integral_type;
650 using __base_type::operator=;
657 typedef unsigned long __integral_type;
660 atomic() noexcept =
default;
661 ~
atomic() noexcept =
default;
668 using __base_type::operator __integral_type;
669 using __base_type::operator=;
676 typedef long long __integral_type;
679 atomic() noexcept =
default;
680 ~
atomic() noexcept =
default;
687 using __base_type::operator __integral_type;
688 using __base_type::operator=;
695 typedef unsigned long long __integral_type;
698 atomic() noexcept =
default;
699 ~
atomic() noexcept =
default;
706 using __base_type::operator __integral_type;
707 using __base_type::operator=;
714 typedef wchar_t __integral_type;
717 atomic() noexcept =
default;
718 ~
atomic() noexcept =
default;
725 using __base_type::operator __integral_type;
726 using __base_type::operator=;
733 typedef char16_t __integral_type;
736 atomic() noexcept =
default;
737 ~
atomic() noexcept =
default;
744 using __base_type::operator __integral_type;
745 using __base_type::operator=;
752 typedef char32_t __integral_type;
755 atomic() noexcept =
default;
756 ~
atomic() noexcept =
default;
763 using __base_type::operator __integral_type;
764 using __base_type::operator=;
770 atomic_flag_test_and_set_explicit(
atomic_flag* __a,
772 {
return __a->test_and_set(__m); }
775 atomic_flag_test_and_set_explicit(
volatile atomic_flag* __a,
777 {
return __a->test_and_set(__m); }
780 atomic_flag_clear_explicit(atomic_flag* __a,
memory_order __m) noexcept
784 atomic_flag_clear_explicit(
volatile atomic_flag* __a,
789 atomic_flag_test_and_set(atomic_flag* __a) noexcept
790 {
return atomic_flag_test_and_set_explicit(__a, memory_order_seq_cst); }
793 atomic_flag_test_and_set(
volatile atomic_flag* __a) noexcept
794 {
return atomic_flag_test_and_set_explicit(__a, memory_order_seq_cst); }
797 atomic_flag_clear(atomic_flag* __a) noexcept
798 { atomic_flag_clear_explicit(__a, memory_order_seq_cst); }
801 atomic_flag_clear(
volatile atomic_flag* __a) noexcept
802 { atomic_flag_clear_explicit(__a, memory_order_seq_cst); }
806 template<
typename _ITp>
808 atomic_is_lock_free(
const atomic<_ITp>* __a) noexcept
809 {
return __a->is_lock_free(); }
811 template<
typename _ITp>
813 atomic_is_lock_free(
const volatile atomic<_ITp>* __a) noexcept
814 {
return __a->is_lock_free(); }
816 template<
typename _ITp>
818 atomic_init(atomic<_ITp>* __a, _ITp __i) noexcept
819 { __a->store(__i, memory_order_relaxed); }
821 template<
typename _ITp>
823 atomic_init(
volatile atomic<_ITp>* __a, _ITp __i) noexcept
824 { __a->store(__i, memory_order_relaxed); }
826 template<
typename _ITp>
828 atomic_store_explicit(atomic<_ITp>* __a, _ITp __i,
830 { __a->store(__i, __m); }
832 template<
typename _ITp>
834 atomic_store_explicit(
volatile atomic<_ITp>* __a, _ITp __i,
836 { __a->store(__i, __m); }
838 template<
typename _ITp>
840 atomic_load_explicit(
const atomic<_ITp>* __a,
memory_order __m) noexcept
841 {
return __a->load(__m); }
843 template<
typename _ITp>
845 atomic_load_explicit(
const volatile atomic<_ITp>* __a,
847 {
return __a->load(__m); }
849 template<
typename _ITp>
851 atomic_exchange_explicit(atomic<_ITp>* __a, _ITp __i,
853 {
return __a->exchange(__i, __m); }
855 template<
typename _ITp>
857 atomic_exchange_explicit(
volatile atomic<_ITp>* __a, _ITp __i,
859 {
return __a->exchange(__i, __m); }
861 template<
typename _ITp>
863 atomic_compare_exchange_weak_explicit(atomic<_ITp>* __a,
864 _ITp* __i1, _ITp __i2,
867 {
return __a->compare_exchange_weak(*__i1, __i2, __m1, __m2); }
869 template<
typename _ITp>
871 atomic_compare_exchange_weak_explicit(
volatile atomic<_ITp>* __a,
872 _ITp* __i1, _ITp __i2,
875 {
return __a->compare_exchange_weak(*__i1, __i2, __m1, __m2); }
877 template<
typename _ITp>
879 atomic_compare_exchange_strong_explicit(atomic<_ITp>* __a,
880 _ITp* __i1, _ITp __i2,
883 {
return __a->compare_exchange_strong(*__i1, __i2, __m1, __m2); }
885 template<
typename _ITp>
887 atomic_compare_exchange_strong_explicit(
volatile atomic<_ITp>* __a,
888 _ITp* __i1, _ITp __i2,
891 {
return __a->compare_exchange_strong(*__i1, __i2, __m1, __m2); }
894 template<
typename _ITp>
896 atomic_store(atomic<_ITp>* __a, _ITp __i) noexcept
897 { atomic_store_explicit(__a, __i, memory_order_seq_cst); }
899 template<
typename _ITp>
901 atomic_store(
volatile atomic<_ITp>* __a, _ITp __i) noexcept
902 { atomic_store_explicit(__a, __i, memory_order_seq_cst); }
904 template<
typename _ITp>
906 atomic_load(
const atomic<_ITp>* __a) noexcept
907 {
return atomic_load_explicit(__a, memory_order_seq_cst); }
909 template<
typename _ITp>
911 atomic_load(
const volatile atomic<_ITp>* __a) noexcept
912 {
return atomic_load_explicit(__a, memory_order_seq_cst); }
914 template<
typename _ITp>
916 atomic_exchange(atomic<_ITp>* __a, _ITp __i) noexcept
917 {
return atomic_exchange_explicit(__a, __i, memory_order_seq_cst); }
919 template<
typename _ITp>
921 atomic_exchange(
volatile atomic<_ITp>* __a, _ITp __i) noexcept
922 {
return atomic_exchange_explicit(__a, __i, memory_order_seq_cst); }
924 template<
typename _ITp>
926 atomic_compare_exchange_weak(atomic<_ITp>* __a,
927 _ITp* __i1, _ITp __i2) noexcept
929 return atomic_compare_exchange_weak_explicit(__a, __i1, __i2,
930 memory_order_seq_cst,
931 memory_order_seq_cst);
934 template<
typename _ITp>
936 atomic_compare_exchange_weak(
volatile atomic<_ITp>* __a,
937 _ITp* __i1, _ITp __i2) noexcept
939 return atomic_compare_exchange_weak_explicit(__a, __i1, __i2,
940 memory_order_seq_cst,
941 memory_order_seq_cst);
944 template<
typename _ITp>
946 atomic_compare_exchange_strong(atomic<_ITp>* __a,
947 _ITp* __i1, _ITp __i2) noexcept
949 return atomic_compare_exchange_strong_explicit(__a, __i1, __i2,
950 memory_order_seq_cst,
951 memory_order_seq_cst);
954 template<
typename _ITp>
956 atomic_compare_exchange_strong(
volatile atomic<_ITp>* __a,
957 _ITp* __i1, _ITp __i2) noexcept
959 return atomic_compare_exchange_strong_explicit(__a, __i1, __i2,
960 memory_order_seq_cst,
961 memory_order_seq_cst);
968 template<
typename _ITp>
970 atomic_fetch_add_explicit(__atomic_base<_ITp>* __a, _ITp __i,
972 {
return __a->fetch_add(__i, __m); }
974 template<
typename _ITp>
976 atomic_fetch_add_explicit(
volatile __atomic_base<_ITp>* __a, _ITp __i,
978 {
return __a->fetch_add(__i, __m); }
980 template<
typename _ITp>
982 atomic_fetch_sub_explicit(__atomic_base<_ITp>* __a, _ITp __i,
984 {
return __a->fetch_sub(__i, __m); }
986 template<
typename _ITp>
988 atomic_fetch_sub_explicit(
volatile __atomic_base<_ITp>* __a, _ITp __i,
990 {
return __a->fetch_sub(__i, __m); }
992 template<
typename _ITp>
994 atomic_fetch_and_explicit(__atomic_base<_ITp>* __a, _ITp __i,
996 {
return __a->fetch_and(__i, __m); }
998 template<
typename _ITp>
1000 atomic_fetch_and_explicit(
volatile __atomic_base<_ITp>* __a, _ITp __i,
1002 {
return __a->fetch_and(__i, __m); }
1004 template<
typename _ITp>
1006 atomic_fetch_or_explicit(__atomic_base<_ITp>* __a, _ITp __i,
1008 {
return __a->fetch_or(__i, __m); }
1010 template<
typename _ITp>
1012 atomic_fetch_or_explicit(
volatile __atomic_base<_ITp>* __a, _ITp __i,
1014 {
return __a->fetch_or(__i, __m); }
1016 template<
typename _ITp>
1018 atomic_fetch_xor_explicit(__atomic_base<_ITp>* __a, _ITp __i,
1020 {
return __a->fetch_xor(__i, __m); }
1022 template<
typename _ITp>
1024 atomic_fetch_xor_explicit(
volatile __atomic_base<_ITp>* __a, _ITp __i,
1026 {
return __a->fetch_xor(__i, __m); }
1028 template<
typename _ITp>
1030 atomic_fetch_add(__atomic_base<_ITp>* __a, _ITp __i) noexcept
1031 {
return atomic_fetch_add_explicit(__a, __i, memory_order_seq_cst); }
1033 template<
typename _ITp>
1035 atomic_fetch_add(
volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept
1036 {
return atomic_fetch_add_explicit(__a, __i, memory_order_seq_cst); }
1038 template<
typename _ITp>
1040 atomic_fetch_sub(__atomic_base<_ITp>* __a, _ITp __i) noexcept
1041 {
return atomic_fetch_sub_explicit(__a, __i, memory_order_seq_cst); }
1043 template<
typename _ITp>
1045 atomic_fetch_sub(
volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept
1046 {
return atomic_fetch_sub_explicit(__a, __i, memory_order_seq_cst); }
1048 template<
typename _ITp>
1050 atomic_fetch_and(__atomic_base<_ITp>* __a, _ITp __i) noexcept
1051 {
return atomic_fetch_and_explicit(__a, __i, memory_order_seq_cst); }
1053 template<
typename _ITp>
1055 atomic_fetch_and(
volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept
1056 {
return atomic_fetch_and_explicit(__a, __i, memory_order_seq_cst); }
1058 template<
typename _ITp>
1060 atomic_fetch_or(__atomic_base<_ITp>* __a, _ITp __i) noexcept
1061 {
return atomic_fetch_or_explicit(__a, __i, memory_order_seq_cst); }
1063 template<
typename _ITp>
1065 atomic_fetch_or(
volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept
1066 {
return atomic_fetch_or_explicit(__a, __i, memory_order_seq_cst); }
1068 template<
typename _ITp>
1070 atomic_fetch_xor(__atomic_base<_ITp>* __a, _ITp __i) noexcept
1071 {
return atomic_fetch_xor_explicit(__a, __i, memory_order_seq_cst); }
1073 template<
typename _ITp>
1075 atomic_fetch_xor(
volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept
1076 {
return atomic_fetch_xor_explicit(__a, __i, memory_order_seq_cst); }
1080 template<
typename _ITp>
1082 atomic_fetch_add_explicit(atomic<_ITp*>* __a, ptrdiff_t __d,
1084 {
return __a->fetch_add(__d, __m); }
1086 template<
typename _ITp>
1088 atomic_fetch_add_explicit(
volatile atomic<_ITp*>* __a, ptrdiff_t __d,
1090 {
return __a->fetch_add(__d, __m); }
1092 template<
typename _ITp>
1094 atomic_fetch_add(
volatile atomic<_ITp*>* __a, ptrdiff_t __d) noexcept
1095 {
return __a->fetch_add(__d); }
1097 template<
typename _ITp>
1099 atomic_fetch_add(atomic<_ITp*>* __a, ptrdiff_t __d) noexcept
1100 {
return __a->fetch_add(__d); }
1102 template<
typename _ITp>
1104 atomic_fetch_sub_explicit(
volatile atomic<_ITp*>* __a,
1106 {
return __a->fetch_sub(__d, __m); }
1108 template<
typename _ITp>
1110 atomic_fetch_sub_explicit(atomic<_ITp*>* __a, ptrdiff_t __d,
1112 {
return __a->fetch_sub(__d, __m); }
1114 template<
typename _ITp>
1116 atomic_fetch_sub(
volatile atomic<_ITp*>* __a, ptrdiff_t __d) noexcept
1117 {
return __a->fetch_sub(__d); }
1119 template<
typename _ITp>
1121 atomic_fetch_sub(atomic<_ITp*>* __a, ptrdiff_t __d) noexcept
1122 {
return __a->fetch_sub(__d); }
1125 _GLIBCXX_END_NAMESPACE_VERSION
Generic atomic type, primary class template.
memory_order
Enumeration for memory_order.