32 #ifndef _GLIBCXX_ATOMIC
33 #define _GLIBCXX_ATOMIC 1
35 #pragma GCC system_header
37 #if __cplusplus < 201103L
43 namespace std _GLIBCXX_VISIBILITY(default)
45 _GLIBCXX_BEGIN_NAMESPACE_VERSION
52 template<
typename _Tp>
64 atomic() noexcept =
default;
65 ~
atomic() noexcept =
default;
70 constexpr
atomic(
bool __i) noexcept : _M_base(__i) { }
73 operator=(
bool __i) noexcept
74 {
return _M_base.operator=(__i); }
77 operator=(
bool __i)
volatile noexcept
78 {
return _M_base.operator=(__i); }
80 operator bool()
const noexcept
81 {
return _M_base.load(); }
83 operator bool()
const volatile noexcept
84 {
return _M_base.load(); }
87 is_lock_free()
const noexcept {
return _M_base.is_lock_free(); }
90 is_lock_free()
const volatile noexcept {
return _M_base.is_lock_free(); }
93 store(
bool __i,
memory_order __m = memory_order_seq_cst) noexcept
94 { _M_base.store(__i, __m); }
97 store(
bool __i,
memory_order __m = memory_order_seq_cst)
volatile noexcept
98 { _M_base.store(__i, __m); }
101 load(
memory_order __m = memory_order_seq_cst)
const noexcept
102 {
return _M_base.load(__m); }
105 load(
memory_order __m = memory_order_seq_cst)
const volatile noexcept
106 {
return _M_base.load(__m); }
109 exchange(
bool __i,
memory_order __m = memory_order_seq_cst) noexcept
110 {
return _M_base.exchange(__i, __m); }
114 memory_order __m = memory_order_seq_cst)
volatile noexcept
115 {
return _M_base.exchange(__i, __m); }
118 compare_exchange_weak(
bool& __i1,
bool __i2,
memory_order __m1,
120 {
return _M_base.compare_exchange_weak(__i1, __i2, __m1, __m2); }
123 compare_exchange_weak(
bool& __i1,
bool __i2,
memory_order __m1,
125 {
return _M_base.compare_exchange_weak(__i1, __i2, __m1, __m2); }
128 compare_exchange_weak(
bool& __i1,
bool __i2,
130 {
return _M_base.compare_exchange_weak(__i1, __i2, __m); }
133 compare_exchange_weak(
bool& __i1,
bool __i2,
134 memory_order __m = memory_order_seq_cst)
volatile noexcept
135 {
return _M_base.compare_exchange_weak(__i1, __i2, __m); }
138 compare_exchange_strong(
bool& __i1,
bool __i2,
memory_order __m1,
140 {
return _M_base.compare_exchange_strong(__i1, __i2, __m1, __m2); }
143 compare_exchange_strong(
bool& __i1,
bool __i2,
memory_order __m1,
145 {
return _M_base.compare_exchange_strong(__i1, __i2, __m1, __m2); }
148 compare_exchange_strong(
bool& __i1,
bool __i2,
150 {
return _M_base.compare_exchange_strong(__i1, __i2, __m); }
153 compare_exchange_strong(
bool& __i1,
bool __i2,
154 memory_order __m = memory_order_seq_cst)
volatile noexcept
155 {
return _M_base.compare_exchange_strong(__i1, __i2, __m); }
164 template<
typename _Tp>
169 static constexpr
int _S_min_alignment
170 = (
sizeof(_Tp) & (
sizeof(_Tp) - 1)) ||
sizeof(_Tp) > 16
173 static constexpr
int _S_alignment
174 = _S_min_alignment >
alignof(_Tp) ? _S_min_alignment :
alignof(_Tp);
176 alignas(_S_alignment) _Tp _M_i;
178 static_assert(__is_trivially_copyable(_Tp),
179 "std::atomic requires a trivially copyable type");
181 static_assert(
sizeof(_Tp) > 0,
182 "Incomplete or zero-sized types are not supported");
185 atomic() noexcept = default;
186 ~
atomic() noexcept = default;
191 constexpr
atomic(_Tp __i) noexcept : _M_i(__i) { }
193 operator _Tp() const noexcept
196 operator _Tp() const volatile noexcept
200 operator=(_Tp __i) noexcept
201 { store(__i);
return __i; }
204 operator=(_Tp __i)
volatile noexcept
205 { store(__i);
return __i; }
208 is_lock_free() const noexcept
211 return __atomic_is_lock_free(
sizeof(_M_i),
212 reinterpret_cast<void *>(-__alignof(_M_i)));
216 is_lock_free() const volatile noexcept
219 return __atomic_is_lock_free(
sizeof(_M_i),
220 reinterpret_cast<void *>(-__alignof(_M_i)));
224 store(_Tp __i,
memory_order __m = memory_order_seq_cst) noexcept
225 { __atomic_store(&_M_i, &__i, __m); }
228 store(_Tp __i,
memory_order __m = memory_order_seq_cst) volatile noexcept
229 { __atomic_store(&_M_i, &__i, __m); }
232 load(
memory_order __m = memory_order_seq_cst) const noexcept
235 __atomic_load(&_M_i, &tmp, __m);
240 load(
memory_order __m = memory_order_seq_cst) const volatile noexcept
243 __atomic_load(&_M_i, &tmp, __m);
248 exchange(_Tp __i,
memory_order __m = memory_order_seq_cst) noexcept
251 __atomic_exchange(&_M_i, &__i, &tmp, __m);
257 memory_order __m = memory_order_seq_cst) volatile noexcept
260 __atomic_exchange(&_M_i, &__i, &tmp, __m);
265 compare_exchange_weak(_Tp& __e, _Tp __i,
memory_order __s,
268 return __atomic_compare_exchange(&_M_i, &__e, &__i,
true, __s, __f);
272 compare_exchange_weak(_Tp& __e, _Tp __i,
memory_order __s,
275 return __atomic_compare_exchange(&_M_i, &__e, &__i,
true, __s, __f);
279 compare_exchange_weak(_Tp& __e, _Tp __i,
281 {
return compare_exchange_weak(__e, __i, __m,
282 __cmpexch_failure_order(__m)); }
285 compare_exchange_weak(_Tp& __e, _Tp __i,
286 memory_order __m = memory_order_seq_cst) volatile noexcept
287 {
return compare_exchange_weak(__e, __i, __m,
288 __cmpexch_failure_order(__m)); }
291 compare_exchange_strong(_Tp& __e, _Tp __i,
memory_order __s,
294 return __atomic_compare_exchange(&_M_i, &__e, &__i,
false, __s, __f);
298 compare_exchange_strong(_Tp& __e, _Tp __i,
memory_order __s,
301 return __atomic_compare_exchange(&_M_i, &__e, &__i,
false, __s, __f);
305 compare_exchange_strong(_Tp& __e, _Tp __i,
307 {
return compare_exchange_strong(__e, __i, __m,
308 __cmpexch_failure_order(__m)); }
311 compare_exchange_strong(_Tp& __e, _Tp __i,
312 memory_order __m = memory_order_seq_cst) volatile noexcept
313 {
return compare_exchange_strong(__e, __i, __m,
314 __cmpexch_failure_order(__m)); }
319 template<
typename _Tp>
322 typedef _Tp* __pointer_type;
326 atomic() noexcept =
default;
327 ~
atomic() noexcept =
default;
332 constexpr
atomic(__pointer_type __p) noexcept : _M_b(__p) { }
334 operator __pointer_type()
const noexcept
335 {
return __pointer_type(_M_b); }
337 operator __pointer_type()
const volatile noexcept
338 {
return __pointer_type(_M_b); }
341 operator=(__pointer_type __p) noexcept
342 {
return _M_b.operator=(__p); }
345 operator=(__pointer_type __p)
volatile noexcept
346 {
return _M_b.operator=(__p); }
349 operator++(
int) noexcept
353 operator++(
int)
volatile noexcept
357 operator--(
int) noexcept
361 operator--(
int)
volatile noexcept
365 operator++() noexcept
369 operator++()
volatile noexcept
373 operator--() noexcept
377 operator--()
volatile noexcept
381 operator+=(ptrdiff_t __d) noexcept
382 {
return _M_b.operator+=(__d); }
385 operator+=(ptrdiff_t __d)
volatile noexcept
386 {
return _M_b.operator+=(__d); }
389 operator-=(ptrdiff_t __d) noexcept
390 {
return _M_b.operator-=(__d); }
393 operator-=(ptrdiff_t __d)
volatile noexcept
394 {
return _M_b.operator-=(__d); }
397 is_lock_free()
const noexcept
398 {
return _M_b.is_lock_free(); }
401 is_lock_free()
const volatile noexcept
402 {
return _M_b.is_lock_free(); }
405 store(__pointer_type __p,
407 {
return _M_b.store(__p, __m); }
410 store(__pointer_type __p,
411 memory_order __m = memory_order_seq_cst)
volatile noexcept
412 {
return _M_b.store(__p, __m); }
415 load(
memory_order __m = memory_order_seq_cst)
const noexcept
416 {
return _M_b.load(__m); }
419 load(
memory_order __m = memory_order_seq_cst)
const volatile noexcept
420 {
return _M_b.load(__m); }
425 {
return _M_b.exchange(__p, __m); }
429 memory_order __m = memory_order_seq_cst)
volatile noexcept
430 {
return _M_b.exchange(__p, __m); }
433 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
435 {
return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
438 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
441 {
return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
444 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
447 return compare_exchange_weak(__p1, __p2, __m,
448 __cmpexch_failure_order(__m));
452 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
453 memory_order __m = memory_order_seq_cst)
volatile noexcept
455 return compare_exchange_weak(__p1, __p2, __m,
456 __cmpexch_failure_order(__m));
460 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
462 {
return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
465 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
468 {
return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
471 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
474 return _M_b.compare_exchange_strong(__p1, __p2, __m,
475 __cmpexch_failure_order(__m));
479 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
480 memory_order __m = memory_order_seq_cst)
volatile noexcept
482 return _M_b.compare_exchange_strong(__p1, __p2, __m,
483 __cmpexch_failure_order(__m));
487 fetch_add(ptrdiff_t __d,
489 {
return _M_b.fetch_add(__d, __m); }
492 fetch_add(ptrdiff_t __d,
493 memory_order __m = memory_order_seq_cst)
volatile noexcept
494 {
return _M_b.fetch_add(__d, __m); }
497 fetch_sub(ptrdiff_t __d,
499 {
return _M_b.fetch_sub(__d, __m); }
502 fetch_sub(ptrdiff_t __d,
503 memory_order __m = memory_order_seq_cst)
volatile noexcept
504 {
return _M_b.fetch_sub(__d, __m); }
512 typedef char __integral_type;
515 atomic() noexcept =
default;
516 ~
atomic() noexcept =
default;
523 using __base_type::operator __integral_type;
524 using __base_type::operator=;
531 typedef signed char __integral_type;
534 atomic() noexcept=
default;
535 ~
atomic() noexcept =
default;
542 using __base_type::operator __integral_type;
543 using __base_type::operator=;
550 typedef unsigned char __integral_type;
553 atomic() noexcept=
default;
554 ~
atomic() noexcept =
default;
561 using __base_type::operator __integral_type;
562 using __base_type::operator=;
569 typedef short __integral_type;
572 atomic() noexcept =
default;
573 ~
atomic() noexcept =
default;
580 using __base_type::operator __integral_type;
581 using __base_type::operator=;
588 typedef unsigned short __integral_type;
591 atomic() noexcept =
default;
592 ~
atomic() noexcept =
default;
599 using __base_type::operator __integral_type;
600 using __base_type::operator=;
607 typedef int __integral_type;
610 atomic() noexcept =
default;
611 ~
atomic() noexcept =
default;
618 using __base_type::operator __integral_type;
619 using __base_type::operator=;
626 typedef unsigned int __integral_type;
629 atomic() noexcept =
default;
630 ~
atomic() noexcept =
default;
637 using __base_type::operator __integral_type;
638 using __base_type::operator=;
645 typedef long __integral_type;
648 atomic() noexcept =
default;
649 ~
atomic() noexcept =
default;
656 using __base_type::operator __integral_type;
657 using __base_type::operator=;
664 typedef unsigned long __integral_type;
667 atomic() noexcept =
default;
668 ~
atomic() noexcept =
default;
675 using __base_type::operator __integral_type;
676 using __base_type::operator=;
683 typedef long long __integral_type;
686 atomic() noexcept =
default;
687 ~
atomic() noexcept =
default;
694 using __base_type::operator __integral_type;
695 using __base_type::operator=;
702 typedef unsigned long long __integral_type;
705 atomic() noexcept =
default;
706 ~
atomic() noexcept =
default;
713 using __base_type::operator __integral_type;
714 using __base_type::operator=;
721 typedef wchar_t __integral_type;
724 atomic() noexcept =
default;
725 ~
atomic() noexcept =
default;
732 using __base_type::operator __integral_type;
733 using __base_type::operator=;
740 typedef char16_t __integral_type;
743 atomic() noexcept =
default;
744 ~
atomic() noexcept =
default;
751 using __base_type::operator __integral_type;
752 using __base_type::operator=;
759 typedef char32_t __integral_type;
762 atomic() noexcept =
default;
763 ~
atomic() noexcept =
default;
770 using __base_type::operator __integral_type;
771 using __base_type::operator=;
892 atomic_flag_test_and_set_explicit(
atomic_flag* __a,
894 {
return __a->test_and_set(__m); }
897 atomic_flag_test_and_set_explicit(
volatile atomic_flag* __a,
899 {
return __a->test_and_set(__m); }
902 atomic_flag_clear_explicit(atomic_flag* __a,
memory_order __m) noexcept
906 atomic_flag_clear_explicit(
volatile atomic_flag* __a,
911 atomic_flag_test_and_set(atomic_flag* __a) noexcept
912 {
return atomic_flag_test_and_set_explicit(__a, memory_order_seq_cst); }
915 atomic_flag_test_and_set(
volatile atomic_flag* __a) noexcept
916 {
return atomic_flag_test_and_set_explicit(__a, memory_order_seq_cst); }
919 atomic_flag_clear(atomic_flag* __a) noexcept
920 { atomic_flag_clear_explicit(__a, memory_order_seq_cst); }
923 atomic_flag_clear(
volatile atomic_flag* __a) noexcept
924 { atomic_flag_clear_explicit(__a, memory_order_seq_cst); }
928 template<
typename _ITp>
930 atomic_is_lock_free(
const atomic<_ITp>* __a) noexcept
931 {
return __a->is_lock_free(); }
933 template<
typename _ITp>
935 atomic_is_lock_free(
const volatile atomic<_ITp>* __a) noexcept
936 {
return __a->is_lock_free(); }
938 template<
typename _ITp>
940 atomic_init(atomic<_ITp>* __a, _ITp __i) noexcept
941 { __a->store(__i, memory_order_relaxed); }
943 template<
typename _ITp>
945 atomic_init(
volatile atomic<_ITp>* __a, _ITp __i) noexcept
946 { __a->store(__i, memory_order_relaxed); }
948 template<
typename _ITp>
950 atomic_store_explicit(atomic<_ITp>* __a, _ITp __i,
952 { __a->store(__i, __m); }
954 template<
typename _ITp>
956 atomic_store_explicit(
volatile atomic<_ITp>* __a, _ITp __i,
958 { __a->store(__i, __m); }
960 template<
typename _ITp>
962 atomic_load_explicit(
const atomic<_ITp>* __a,
memory_order __m) noexcept
963 {
return __a->load(__m); }
965 template<
typename _ITp>
967 atomic_load_explicit(
const volatile atomic<_ITp>* __a,
969 {
return __a->load(__m); }
971 template<
typename _ITp>
973 atomic_exchange_explicit(atomic<_ITp>* __a, _ITp __i,
975 {
return __a->exchange(__i, __m); }
977 template<
typename _ITp>
979 atomic_exchange_explicit(
volatile atomic<_ITp>* __a, _ITp __i,
981 {
return __a->exchange(__i, __m); }
983 template<
typename _ITp>
985 atomic_compare_exchange_weak_explicit(atomic<_ITp>* __a,
986 _ITp* __i1, _ITp __i2,
989 {
return __a->compare_exchange_weak(*__i1, __i2, __m1, __m2); }
991 template<
typename _ITp>
993 atomic_compare_exchange_weak_explicit(
volatile atomic<_ITp>* __a,
994 _ITp* __i1, _ITp __i2,
997 {
return __a->compare_exchange_weak(*__i1, __i2, __m1, __m2); }
999 template<
typename _ITp>
1001 atomic_compare_exchange_strong_explicit(atomic<_ITp>* __a,
1002 _ITp* __i1, _ITp __i2,
1005 {
return __a->compare_exchange_strong(*__i1, __i2, __m1, __m2); }
1007 template<
typename _ITp>
1009 atomic_compare_exchange_strong_explicit(
volatile atomic<_ITp>* __a,
1010 _ITp* __i1, _ITp __i2,
1013 {
return __a->compare_exchange_strong(*__i1, __i2, __m1, __m2); }
1016 template<
typename _ITp>
1018 atomic_store(atomic<_ITp>* __a, _ITp __i) noexcept
1019 { atomic_store_explicit(__a, __i, memory_order_seq_cst); }
1021 template<
typename _ITp>
1023 atomic_store(
volatile atomic<_ITp>* __a, _ITp __i) noexcept
1024 { atomic_store_explicit(__a, __i, memory_order_seq_cst); }
1026 template<
typename _ITp>
1028 atomic_load(
const atomic<_ITp>* __a) noexcept
1029 {
return atomic_load_explicit(__a, memory_order_seq_cst); }
1031 template<
typename _ITp>
1033 atomic_load(
const volatile atomic<_ITp>* __a) noexcept
1034 {
return atomic_load_explicit(__a, memory_order_seq_cst); }
1036 template<
typename _ITp>
1038 atomic_exchange(atomic<_ITp>* __a, _ITp __i) noexcept
1039 {
return atomic_exchange_explicit(__a, __i, memory_order_seq_cst); }
1041 template<
typename _ITp>
1043 atomic_exchange(
volatile atomic<_ITp>* __a, _ITp __i) noexcept
1044 {
return atomic_exchange_explicit(__a, __i, memory_order_seq_cst); }
1046 template<
typename _ITp>
1048 atomic_compare_exchange_weak(atomic<_ITp>* __a,
1049 _ITp* __i1, _ITp __i2) noexcept
1051 return atomic_compare_exchange_weak_explicit(__a, __i1, __i2,
1052 memory_order_seq_cst,
1053 memory_order_seq_cst);
1056 template<
typename _ITp>
1058 atomic_compare_exchange_weak(
volatile atomic<_ITp>* __a,
1059 _ITp* __i1, _ITp __i2) noexcept
1061 return atomic_compare_exchange_weak_explicit(__a, __i1, __i2,
1062 memory_order_seq_cst,
1063 memory_order_seq_cst);
1066 template<
typename _ITp>
1068 atomic_compare_exchange_strong(atomic<_ITp>* __a,
1069 _ITp* __i1, _ITp __i2) noexcept
1071 return atomic_compare_exchange_strong_explicit(__a, __i1, __i2,
1072 memory_order_seq_cst,
1073 memory_order_seq_cst);
1076 template<
typename _ITp>
1078 atomic_compare_exchange_strong(
volatile atomic<_ITp>* __a,
1079 _ITp* __i1, _ITp __i2) noexcept
1081 return atomic_compare_exchange_strong_explicit(__a, __i1, __i2,
1082 memory_order_seq_cst,
1083 memory_order_seq_cst);
1090 template<
typename _ITp>
1092 atomic_fetch_add_explicit(__atomic_base<_ITp>* __a, _ITp __i,
1094 {
return __a->fetch_add(__i, __m); }
1096 template<
typename _ITp>
1098 atomic_fetch_add_explicit(
volatile __atomic_base<_ITp>* __a, _ITp __i,
1100 {
return __a->fetch_add(__i, __m); }
1102 template<
typename _ITp>
1104 atomic_fetch_sub_explicit(__atomic_base<_ITp>* __a, _ITp __i,
1106 {
return __a->fetch_sub(__i, __m); }
1108 template<
typename _ITp>
1110 atomic_fetch_sub_explicit(
volatile __atomic_base<_ITp>* __a, _ITp __i,
1112 {
return __a->fetch_sub(__i, __m); }
1114 template<
typename _ITp>
1116 atomic_fetch_and_explicit(__atomic_base<_ITp>* __a, _ITp __i,
1118 {
return __a->fetch_and(__i, __m); }
1120 template<
typename _ITp>
1122 atomic_fetch_and_explicit(
volatile __atomic_base<_ITp>* __a, _ITp __i,
1124 {
return __a->fetch_and(__i, __m); }
1126 template<
typename _ITp>
1128 atomic_fetch_or_explicit(__atomic_base<_ITp>* __a, _ITp __i,
1130 {
return __a->fetch_or(__i, __m); }
1132 template<
typename _ITp>
1134 atomic_fetch_or_explicit(
volatile __atomic_base<_ITp>* __a, _ITp __i,
1136 {
return __a->fetch_or(__i, __m); }
1138 template<
typename _ITp>
1140 atomic_fetch_xor_explicit(__atomic_base<_ITp>* __a, _ITp __i,
1142 {
return __a->fetch_xor(__i, __m); }
1144 template<
typename _ITp>
1146 atomic_fetch_xor_explicit(
volatile __atomic_base<_ITp>* __a, _ITp __i,
1148 {
return __a->fetch_xor(__i, __m); }
1150 template<
typename _ITp>
1152 atomic_fetch_add(__atomic_base<_ITp>* __a, _ITp __i) noexcept
1153 {
return atomic_fetch_add_explicit(__a, __i, memory_order_seq_cst); }
1155 template<
typename _ITp>
1157 atomic_fetch_add(
volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept
1158 {
return atomic_fetch_add_explicit(__a, __i, memory_order_seq_cst); }
1160 template<
typename _ITp>
1162 atomic_fetch_sub(__atomic_base<_ITp>* __a, _ITp __i) noexcept
1163 {
return atomic_fetch_sub_explicit(__a, __i, memory_order_seq_cst); }
1165 template<
typename _ITp>
1167 atomic_fetch_sub(
volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept
1168 {
return atomic_fetch_sub_explicit(__a, __i, memory_order_seq_cst); }
1170 template<
typename _ITp>
1172 atomic_fetch_and(__atomic_base<_ITp>* __a, _ITp __i) noexcept
1173 {
return atomic_fetch_and_explicit(__a, __i, memory_order_seq_cst); }
1175 template<
typename _ITp>
1177 atomic_fetch_and(
volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept
1178 {
return atomic_fetch_and_explicit(__a, __i, memory_order_seq_cst); }
1180 template<
typename _ITp>
1182 atomic_fetch_or(__atomic_base<_ITp>* __a, _ITp __i) noexcept
1183 {
return atomic_fetch_or_explicit(__a, __i, memory_order_seq_cst); }
1185 template<
typename _ITp>
1187 atomic_fetch_or(
volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept
1188 {
return atomic_fetch_or_explicit(__a, __i, memory_order_seq_cst); }
1190 template<
typename _ITp>
1192 atomic_fetch_xor(__atomic_base<_ITp>* __a, _ITp __i) noexcept
1193 {
return atomic_fetch_xor_explicit(__a, __i, memory_order_seq_cst); }
1195 template<
typename _ITp>
1197 atomic_fetch_xor(
volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept
1198 {
return atomic_fetch_xor_explicit(__a, __i, memory_order_seq_cst); }
1202 template<
typename _ITp>
1204 atomic_fetch_add_explicit(atomic<_ITp*>* __a, ptrdiff_t __d,
1206 {
return __a->fetch_add(__d, __m); }
1208 template<
typename _ITp>
1210 atomic_fetch_add_explicit(
volatile atomic<_ITp*>* __a, ptrdiff_t __d,
1212 {
return __a->fetch_add(__d, __m); }
1214 template<
typename _ITp>
1216 atomic_fetch_add(
volatile atomic<_ITp*>* __a, ptrdiff_t __d) noexcept
1217 {
return __a->fetch_add(__d); }
1219 template<
typename _ITp>
1221 atomic_fetch_add(atomic<_ITp*>* __a, ptrdiff_t __d) noexcept
1222 {
return __a->fetch_add(__d); }
1224 template<
typename _ITp>
1226 atomic_fetch_sub_explicit(
volatile atomic<_ITp*>* __a,
1228 {
return __a->fetch_sub(__d, __m); }
1230 template<
typename _ITp>
1232 atomic_fetch_sub_explicit(atomic<_ITp*>* __a, ptrdiff_t __d,
1234 {
return __a->fetch_sub(__d, __m); }
1236 template<
typename _ITp>
1238 atomic_fetch_sub(
volatile atomic<_ITp*>* __a, ptrdiff_t __d) noexcept
1239 {
return __a->fetch_sub(__d); }
1241 template<
typename _ITp>
1243 atomic_fetch_sub(atomic<_ITp*>* __a, ptrdiff_t __d) noexcept
1244 {
return __a->fetch_sub(__d); }
1247 _GLIBCXX_END_NAMESPACE_VERSION
1252 #endif // _GLIBCXX_ATOMIC
Generic atomic type, primary class template.
atomic< uint_fast8_t > atomic_uint_fast8_t
atomic_uint_fast8_t
atomic< uint_least64_t > atomic_uint_least64_t
atomic_uint_least64_t
Explicit specialization for unsigned short.
Explicit specialization for char.
atomic< uint_fast16_t > atomic_uint_fast16_t
atomic_uint_fast16_t
atomic< int_fast8_t > atomic_int_fast8_t
atomic_int_fast8_t
atomic< uint_least32_t > atomic_uint_least32_t
atomic_uint_least32_t
Explicit specialization for long long.
atomic< short > atomic_short
atomic_short
atomic< signed char > atomic_schar
atomic_schar
atomic< unsigned int > atomic_uint
atomic_uint
_Tp exchange(_Tp &__obj, _Up &&__new_val)
Assign __new_val to __obj and return its previous value.
Explicit specialization for wchar_t.
atomic< char32_t > atomic_char32_t
atomic_char32_t
Explicit specialization for unsigned long long.
atomic< int_least16_t > atomic_int_least16_t
atomic_int_least16_t
atomic< unsigned long > atomic_ulong
atomic_ulong
atomic< bool > atomic_bool
atomic_bool
atomic< int_fast16_t > atomic_int_fast16_t
atomic_int_fast16_t
atomic< int_least32_t > atomic_int_least32_t
atomic_int_least32_t
Explicit specialization for unsigned char.
atomic< uint_fast64_t > atomic_uint_fast64_t
atomic_uint_fast64_t
atomic< unsigned char > atomic_uchar
atomic_uchar
atomic< long long > atomic_llong
atomic_llong
atomic< int > atomic_int
atomic_int
atomic< size_t > atomic_size_t
atomic_size_t
memory_order
Enumeration for memory_order.
atomic< int_least64_t > atomic_int_least64_t
atomic_int_least64_t
atomic< intptr_t > atomic_intptr_t
atomic_intptr_t
atomic< unsigned long long > atomic_ullong
atomic_ullong
Explicit specialization for long.
atomic< uintptr_t > atomic_uintptr_t
atomic_uintptr_t
Explicit specialization for char32_t.
atomic< int_fast64_t > atomic_int_fast64_t
atomic_int_fast64_t
Explicit specialization for signed char.
atomic< char > atomic_char
atomic_char
Explicit specialization for unsigned long.
atomic< uint_fast32_t > atomic_uint_fast32_t
atomic_uint_fast32_t
atomic< int_fast32_t > atomic_int_fast32_t
atomic_int_fast32_t
atomic< unsigned short > atomic_ushort
atomic_ushort
atomic< uint_least8_t > atomic_uint_least8_t
atomic_uint_least8_t
Explicit specialization for int.
atomic< wchar_t > atomic_wchar_t
atomic_wchar_t
atomic< int_least8_t > atomic_int_least8_t
atomic_int_least8_t
Explicit specialization for char16_t.
atomic< long > atomic_long
atomic_long
atomic< uintmax_t > atomic_uintmax_t
atomic_uintmax_t
atomic< ptrdiff_t > atomic_ptrdiff_t
atomic_ptrdiff_t
atomic< intmax_t > atomic_intmax_t
atomic_intmax_t
Explicit specialization for short.
atomic< char16_t > atomic_char16_t
atomic_char16_t
atomic< uint_least16_t > atomic_uint_least16_t
atomic_uint_least16_t
Explicit specialization for unsigned int.