32 #ifndef _GLIBCXX_ATOMIC
33 #define _GLIBCXX_ATOMIC 1
35 #pragma GCC system_header
37 #if __cplusplus < 201103L
43 namespace std _GLIBCXX_VISIBILITY(default)
45 _GLIBCXX_BEGIN_NAMESPACE_VERSION
52 #if __cplusplus >= 201703L
53 # define __cpp_lib_atomic_is_always_lock_free 201603
56 template<
typename _Tp>
64 using value_type = bool;
70 atomic() noexcept =
default;
71 ~
atomic() noexcept =
default;
76 constexpr
atomic(
bool __i) noexcept : _M_base(__i) { }
79 operator=(
bool __i) noexcept
80 {
return _M_base.operator=(__i); }
83 operator=(
bool __i)
volatile noexcept
84 {
return _M_base.operator=(__i); }
86 operator bool()
const noexcept
87 {
return _M_base.load(); }
89 operator bool()
const volatile noexcept
90 {
return _M_base.load(); }
93 is_lock_free()
const noexcept {
return _M_base.is_lock_free(); }
96 is_lock_free()
const volatile noexcept {
return _M_base.is_lock_free(); }
98 #if __cplusplus >= 201703L
103 store(
bool __i,
memory_order __m = memory_order_seq_cst) noexcept
104 { _M_base.store(__i, __m); }
107 store(
bool __i,
memory_order __m = memory_order_seq_cst)
volatile noexcept
108 { _M_base.store(__i, __m); }
111 load(
memory_order __m = memory_order_seq_cst)
const noexcept
112 {
return _M_base.load(__m); }
115 load(
memory_order __m = memory_order_seq_cst)
const volatile noexcept
116 {
return _M_base.load(__m); }
119 exchange(
bool __i,
memory_order __m = memory_order_seq_cst) noexcept
120 {
return _M_base.exchange(__i, __m); }
124 memory_order __m = memory_order_seq_cst)
volatile noexcept
125 {
return _M_base.exchange(__i, __m); }
128 compare_exchange_weak(
bool& __i1,
bool __i2,
memory_order __m1,
130 {
return _M_base.compare_exchange_weak(__i1, __i2, __m1, __m2); }
133 compare_exchange_weak(
bool& __i1,
bool __i2,
memory_order __m1,
135 {
return _M_base.compare_exchange_weak(__i1, __i2, __m1, __m2); }
138 compare_exchange_weak(
bool& __i1,
bool __i2,
140 {
return _M_base.compare_exchange_weak(__i1, __i2, __m); }
143 compare_exchange_weak(
bool& __i1,
bool __i2,
144 memory_order __m = memory_order_seq_cst)
volatile noexcept
145 {
return _M_base.compare_exchange_weak(__i1, __i2, __m); }
148 compare_exchange_strong(
bool& __i1,
bool __i2,
memory_order __m1,
150 {
return _M_base.compare_exchange_strong(__i1, __i2, __m1, __m2); }
153 compare_exchange_strong(
bool& __i1,
bool __i2,
memory_order __m1,
155 {
return _M_base.compare_exchange_strong(__i1, __i2, __m1, __m2); }
158 compare_exchange_strong(
bool& __i1,
bool __i2,
160 {
return _M_base.compare_exchange_strong(__i1, __i2, __m); }
163 compare_exchange_strong(
bool& __i1,
bool __i2,
164 memory_order __m = memory_order_seq_cst)
volatile noexcept
165 {
return _M_base.compare_exchange_strong(__i1, __i2, __m); }
174 template<
typename _Tp>
177 using value_type = _Tp;
181 static constexpr
int _S_min_alignment
182 = (
sizeof(_Tp) & (
sizeof(_Tp) - 1)) ||
sizeof(_Tp) > 16
185 static constexpr
int _S_alignment
186 = _S_min_alignment >
alignof(_Tp) ? _S_min_alignment :
alignof(_Tp);
188 alignas(_S_alignment) _Tp _M_i;
190 static_assert(__is_trivially_copyable(_Tp),
191 "std::atomic requires a trivially copyable type");
193 static_assert(
sizeof(_Tp) > 0,
194 "Incomplete or zero-sized types are not supported");
197 atomic() noexcept = default;
198 ~
atomic() noexcept = default;
203 constexpr
atomic(_Tp __i) noexcept : _M_i(__i) { }
205 operator _Tp() const noexcept
208 operator _Tp() const volatile noexcept
212 operator=(_Tp __i) noexcept
213 { store(__i);
return __i; }
216 operator=(_Tp __i)
volatile noexcept
217 { store(__i);
return __i; }
220 is_lock_free() const noexcept
223 return __atomic_is_lock_free(
sizeof(_M_i),
224 reinterpret_cast<void *>(-_S_alignment));
228 is_lock_free() const volatile noexcept
231 return __atomic_is_lock_free(
sizeof(_M_i),
232 reinterpret_cast<void *>(-_S_alignment));
235 #if __cplusplus >= 201703L
236 static constexpr
bool is_always_lock_free
237 = __atomic_always_lock_free(
sizeof(_M_i), 0);
241 store(_Tp __i,
memory_order __m = memory_order_seq_cst) noexcept
245 store(_Tp __i,
memory_order __m = memory_order_seq_cst)
volatile noexcept
249 load(
memory_order __m = memory_order_seq_cst)
const noexcept
251 alignas(_Tp)
unsigned char __buf[
sizeof(_Tp)];
252 _Tp* __ptr = reinterpret_cast<_Tp*>(__buf);
258 load(
memory_order __m = memory_order_seq_cst)
const volatile noexcept
260 alignas(_Tp)
unsigned char __buf[
sizeof(_Tp)];
261 _Tp* __ptr = reinterpret_cast<_Tp*>(__buf);
267 exchange(_Tp __i,
memory_order __m = memory_order_seq_cst) noexcept
269 alignas(_Tp)
unsigned char __buf[
sizeof(_Tp)];
270 _Tp* __ptr = reinterpret_cast<_Tp*>(__buf);
278 memory_order __m = memory_order_seq_cst)
volatile noexcept
280 alignas(_Tp)
unsigned char __buf[
sizeof(_Tp)];
281 _Tp* __ptr = reinterpret_cast<_Tp*>(__buf);
288 compare_exchange_weak(_Tp& __e, _Tp __i,
memory_order __s,
294 true,
int(__s),
int(__f));
298 compare_exchange_weak(_Tp& __e, _Tp __i,
memory_order __s,
304 true,
int(__s),
int(__f));
308 compare_exchange_weak(_Tp& __e, _Tp __i,
310 {
return compare_exchange_weak(__e, __i, __m,
311 __cmpexch_failure_order(__m)); }
314 compare_exchange_weak(_Tp& __e, _Tp __i,
315 memory_order __m = memory_order_seq_cst)
volatile noexcept
316 {
return compare_exchange_weak(__e, __i, __m,
317 __cmpexch_failure_order(__m)); }
320 compare_exchange_strong(_Tp& __e, _Tp __i,
memory_order __s,
326 false,
int(__s),
int(__f));
330 compare_exchange_strong(_Tp& __e, _Tp __i,
memory_order __s,
336 false,
int(__s),
int(__f));
340 compare_exchange_strong(_Tp& __e, _Tp __i,
342 {
return compare_exchange_strong(__e, __i, __m,
343 __cmpexch_failure_order(__m)); }
346 compare_exchange_strong(_Tp& __e, _Tp __i,
347 memory_order __m = memory_order_seq_cst)
volatile noexcept
348 {
return compare_exchange_strong(__e, __i, __m,
349 __cmpexch_failure_order(__m)); }
354 template<
typename _Tp>
357 using value_type = _Tp*;
358 using difference_type = ptrdiff_t;
360 typedef _Tp* __pointer_type;
364 atomic() noexcept =
default;
365 ~
atomic() noexcept =
default;
370 constexpr
atomic(__pointer_type __p) noexcept : _M_b(__p) { }
372 operator __pointer_type()
const noexcept
373 {
return __pointer_type(_M_b); }
375 operator __pointer_type()
const volatile noexcept
376 {
return __pointer_type(_M_b); }
379 operator=(__pointer_type __p) noexcept
380 {
return _M_b.operator=(__p); }
383 operator=(__pointer_type __p)
volatile noexcept
384 {
return _M_b.operator=(__p); }
387 operator++(
int) noexcept
389 #if __cplusplus >= 201703L
396 operator++(
int)
volatile noexcept
398 #if __cplusplus >= 201703L
405 operator--(
int) noexcept
407 #if __cplusplus >= 201703L
414 operator--(
int)
volatile noexcept
416 #if __cplusplus >= 201703L
423 operator++() noexcept
425 #if __cplusplus >= 201703L
432 operator++()
volatile noexcept
434 #if __cplusplus >= 201703L
441 operator--() noexcept
443 #if __cplusplus >= 201703L
450 operator--()
volatile noexcept
452 #if __cplusplus >= 201703L
459 operator+=(ptrdiff_t __d) noexcept
461 #if __cplusplus >= 201703L
464 return _M_b.operator+=(__d);
468 operator+=(ptrdiff_t __d)
volatile noexcept
470 #if __cplusplus >= 201703L
473 return _M_b.operator+=(__d);
477 operator-=(ptrdiff_t __d) noexcept
479 #if __cplusplus >= 201703L
482 return _M_b.operator-=(__d);
486 operator-=(ptrdiff_t __d)
volatile noexcept
488 #if __cplusplus >= 201703L
491 return _M_b.operator-=(__d);
495 is_lock_free()
const noexcept
496 {
return _M_b.is_lock_free(); }
499 is_lock_free()
const volatile noexcept
500 {
return _M_b.is_lock_free(); }
502 #if __cplusplus >= 201703L
503 static constexpr
bool is_always_lock_free = ATOMIC_POINTER_LOCK_FREE == 2;
507 store(__pointer_type __p,
509 {
return _M_b.store(__p, __m); }
512 store(__pointer_type __p,
513 memory_order __m = memory_order_seq_cst)
volatile noexcept
514 {
return _M_b.store(__p, __m); }
517 load(
memory_order __m = memory_order_seq_cst)
const noexcept
518 {
return _M_b.load(__m); }
521 load(
memory_order __m = memory_order_seq_cst)
const volatile noexcept
522 {
return _M_b.load(__m); }
527 {
return _M_b.exchange(__p, __m); }
531 memory_order __m = memory_order_seq_cst)
volatile noexcept
532 {
return _M_b.exchange(__p, __m); }
535 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
537 {
return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
540 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
543 {
return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
546 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
549 return compare_exchange_weak(__p1, __p2, __m,
550 __cmpexch_failure_order(__m));
554 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
555 memory_order __m = memory_order_seq_cst)
volatile noexcept
557 return compare_exchange_weak(__p1, __p2, __m,
558 __cmpexch_failure_order(__m));
562 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
564 {
return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
567 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
570 {
return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
573 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
576 return _M_b.compare_exchange_strong(__p1, __p2, __m,
577 __cmpexch_failure_order(__m));
581 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
582 memory_order __m = memory_order_seq_cst)
volatile noexcept
584 return _M_b.compare_exchange_strong(__p1, __p2, __m,
585 __cmpexch_failure_order(__m));
589 fetch_add(ptrdiff_t __d,
592 #if __cplusplus >= 201703L
595 return _M_b.fetch_add(__d, __m);
599 fetch_add(ptrdiff_t __d,
600 memory_order __m = memory_order_seq_cst)
volatile noexcept
602 #if __cplusplus >= 201703L
605 return _M_b.fetch_add(__d, __m);
609 fetch_sub(ptrdiff_t __d,
612 #if __cplusplus >= 201703L
615 return _M_b.fetch_sub(__d, __m);
619 fetch_sub(ptrdiff_t __d,
620 memory_order __m = memory_order_seq_cst)
volatile noexcept
622 #if __cplusplus >= 201703L
625 return _M_b.fetch_sub(__d, __m);
634 typedef char __integral_type;
637 atomic() noexcept =
default;
638 ~
atomic() noexcept =
default;
645 using __base_type::operator __integral_type;
646 using __base_type::operator=;
648 #if __cplusplus >= 201703L
649 static constexpr
bool is_always_lock_free = ATOMIC_CHAR_LOCK_FREE == 2;
657 typedef signed char __integral_type;
660 atomic() noexcept=
default;
661 ~
atomic() noexcept =
default;
668 using __base_type::operator __integral_type;
669 using __base_type::operator=;
671 #if __cplusplus >= 201703L
672 static constexpr
bool is_always_lock_free = ATOMIC_CHAR_LOCK_FREE == 2;
680 typedef unsigned char __integral_type;
683 atomic() noexcept=
default;
684 ~
atomic() noexcept =
default;
691 using __base_type::operator __integral_type;
692 using __base_type::operator=;
694 #if __cplusplus >= 201703L
695 static constexpr
bool is_always_lock_free = ATOMIC_CHAR_LOCK_FREE == 2;
703 typedef short __integral_type;
706 atomic() noexcept =
default;
707 ~
atomic() noexcept =
default;
714 using __base_type::operator __integral_type;
715 using __base_type::operator=;
717 #if __cplusplus >= 201703L
718 static constexpr
bool is_always_lock_free = ATOMIC_SHORT_LOCK_FREE == 2;
726 typedef unsigned short __integral_type;
729 atomic() noexcept =
default;
730 ~
atomic() noexcept =
default;
737 using __base_type::operator __integral_type;
738 using __base_type::operator=;
740 #if __cplusplus >= 201703L
741 static constexpr
bool is_always_lock_free = ATOMIC_SHORT_LOCK_FREE == 2;
749 typedef int __integral_type;
752 atomic() noexcept =
default;
753 ~
atomic() noexcept =
default;
760 using __base_type::operator __integral_type;
761 using __base_type::operator=;
763 #if __cplusplus >= 201703L
764 static constexpr
bool is_always_lock_free = ATOMIC_INT_LOCK_FREE == 2;
772 typedef unsigned int __integral_type;
775 atomic() noexcept =
default;
776 ~
atomic() noexcept =
default;
783 using __base_type::operator __integral_type;
784 using __base_type::operator=;
786 #if __cplusplus >= 201703L
787 static constexpr
bool is_always_lock_free = ATOMIC_INT_LOCK_FREE == 2;
795 typedef long __integral_type;
798 atomic() noexcept =
default;
799 ~
atomic() noexcept =
default;
806 using __base_type::operator __integral_type;
807 using __base_type::operator=;
809 #if __cplusplus >= 201703L
810 static constexpr
bool is_always_lock_free = ATOMIC_LONG_LOCK_FREE == 2;
818 typedef unsigned long __integral_type;
821 atomic() noexcept =
default;
822 ~
atomic() noexcept =
default;
829 using __base_type::operator __integral_type;
830 using __base_type::operator=;
832 #if __cplusplus >= 201703L
833 static constexpr
bool is_always_lock_free = ATOMIC_LONG_LOCK_FREE == 2;
841 typedef long long __integral_type;
844 atomic() noexcept =
default;
845 ~
atomic() noexcept =
default;
852 using __base_type::operator __integral_type;
853 using __base_type::operator=;
855 #if __cplusplus >= 201703L
856 static constexpr
bool is_always_lock_free = ATOMIC_LLONG_LOCK_FREE == 2;
864 typedef unsigned long long __integral_type;
867 atomic() noexcept =
default;
868 ~
atomic() noexcept =
default;
875 using __base_type::operator __integral_type;
876 using __base_type::operator=;
878 #if __cplusplus >= 201703L
879 static constexpr
bool is_always_lock_free = ATOMIC_LLONG_LOCK_FREE == 2;
887 typedef wchar_t __integral_type;
890 atomic() noexcept =
default;
891 ~
atomic() noexcept =
default;
898 using __base_type::operator __integral_type;
899 using __base_type::operator=;
901 #if __cplusplus >= 201703L
902 static constexpr
bool is_always_lock_free = ATOMIC_WCHAR_T_LOCK_FREE == 2;
906 #ifdef _GLIBCXX_USE_CHAR8_T
911 typedef char8_t __integral_type;
914 atomic() noexcept = default;
915 ~
atomic() noexcept = default;
920 constexpr
atomic(__integral_type __i) noexcept : __base_type(__i) { }
922 using __base_type::operator __integral_type;
923 using __base_type::operator=;
925 #if __cplusplus > 201402L
926 static constexpr
bool is_always_lock_free = ATOMIC_CHAR8_T_LOCK_FREE == 2;
935 typedef char16_t __integral_type;
938 atomic() noexcept =
default;
939 ~
atomic() noexcept =
default;
946 using __base_type::operator __integral_type;
947 using __base_type::operator=;
949 #if __cplusplus >= 201703L
950 static constexpr
bool is_always_lock_free = ATOMIC_CHAR16_T_LOCK_FREE == 2;
958 typedef char32_t __integral_type;
961 atomic() noexcept =
default;
962 ~
atomic() noexcept =
default;
969 using __base_type::operator __integral_type;
970 using __base_type::operator=;
972 #if __cplusplus >= 201703L
973 static constexpr
bool is_always_lock_free = ATOMIC_CHAR32_T_LOCK_FREE == 2;
1017 #ifdef _GLIBCXX_USE_CHAR8_T
1028 #ifdef _GLIBCXX_USE_C99_STDINT_TR1
1120 #ifdef _GLIBCXX_USE_C99_STDINT_TR1
1130 atomic_flag_test_and_set_explicit(
atomic_flag* __a,
1132 {
return __a->test_and_set(__m); }
1135 atomic_flag_test_and_set_explicit(
volatile atomic_flag* __a,
1137 {
return __a->test_and_set(__m); }
1140 atomic_flag_clear_explicit(atomic_flag* __a,
memory_order __m) noexcept
1141 { __a->clear(__m); }
1144 atomic_flag_clear_explicit(
volatile atomic_flag* __a,
1146 { __a->clear(__m); }
1149 atomic_flag_test_and_set(atomic_flag* __a) noexcept
1150 {
return atomic_flag_test_and_set_explicit(__a, memory_order_seq_cst); }
1153 atomic_flag_test_and_set(
volatile atomic_flag* __a) noexcept
1154 {
return atomic_flag_test_and_set_explicit(__a, memory_order_seq_cst); }
1157 atomic_flag_clear(atomic_flag* __a) noexcept
1158 { atomic_flag_clear_explicit(__a, memory_order_seq_cst); }
1161 atomic_flag_clear(
volatile atomic_flag* __a) noexcept
1162 { atomic_flag_clear_explicit(__a, memory_order_seq_cst); }
1165 template<
typename _Tp>
1166 using __atomic_val_t =
typename atomic<_Tp>::value_type;
1167 template<
typename _Tp>
1168 using __atomic_diff_t =
typename atomic<_Tp>::difference_type;
1172 template<
typename _ITp>
1174 atomic_is_lock_free(
const atomic<_ITp>* __a) noexcept
1175 {
return __a->is_lock_free(); }
1177 template<
typename _ITp>
1179 atomic_is_lock_free(
const volatile atomic<_ITp>* __a) noexcept
1180 {
return __a->is_lock_free(); }
1182 template<
typename _ITp>
1184 atomic_init(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i) noexcept
1185 { __a->store(__i, memory_order_relaxed); }
1187 template<
typename _ITp>
1189 atomic_init(
volatile atomic<_ITp>* __a, __atomic_val_t<_ITp> __i) noexcept
1190 { __a->store(__i, memory_order_relaxed); }
1192 template<
typename _ITp>
1194 atomic_store_explicit(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i,
1196 { __a->store(__i, __m); }
1198 template<
typename _ITp>
1200 atomic_store_explicit(
volatile atomic<_ITp>* __a, __atomic_val_t<_ITp> __i,
1202 { __a->store(__i, __m); }
1204 template<
typename _ITp>
1206 atomic_load_explicit(
const atomic<_ITp>* __a,
memory_order __m) noexcept
1207 {
return __a->load(__m); }
1209 template<
typename _ITp>
1211 atomic_load_explicit(
const volatile atomic<_ITp>* __a,
1213 {
return __a->load(__m); }
1215 template<
typename _ITp>
1217 atomic_exchange_explicit(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i,
1219 {
return __a->exchange(__i, __m); }
1221 template<
typename _ITp>
1223 atomic_exchange_explicit(
volatile atomic<_ITp>* __a,
1224 __atomic_val_t<_ITp> __i,
1226 {
return __a->exchange(__i, __m); }
1228 template<
typename _ITp>
1230 atomic_compare_exchange_weak_explicit(atomic<_ITp>* __a,
1231 __atomic_val_t<_ITp>* __i1,
1232 __atomic_val_t<_ITp> __i2,
1235 {
return __a->compare_exchange_weak(*__i1, __i2, __m1, __m2); }
1237 template<
typename _ITp>
1239 atomic_compare_exchange_weak_explicit(
volatile atomic<_ITp>* __a,
1240 __atomic_val_t<_ITp>* __i1,
1241 __atomic_val_t<_ITp> __i2,
1244 {
return __a->compare_exchange_weak(*__i1, __i2, __m1, __m2); }
1246 template<
typename _ITp>
1248 atomic_compare_exchange_strong_explicit(atomic<_ITp>* __a,
1249 __atomic_val_t<_ITp>* __i1,
1250 __atomic_val_t<_ITp> __i2,
1253 {
return __a->compare_exchange_strong(*__i1, __i2, __m1, __m2); }
1255 template<
typename _ITp>
1257 atomic_compare_exchange_strong_explicit(
volatile atomic<_ITp>* __a,
1258 __atomic_val_t<_ITp>* __i1,
1259 __atomic_val_t<_ITp> __i2,
1262 {
return __a->compare_exchange_strong(*__i1, __i2, __m1, __m2); }
1265 template<
typename _ITp>
1267 atomic_store(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i) noexcept
1268 { atomic_store_explicit(__a, __i, memory_order_seq_cst); }
1270 template<
typename _ITp>
1272 atomic_store(
volatile atomic<_ITp>* __a, __atomic_val_t<_ITp> __i) noexcept
1273 { atomic_store_explicit(__a, __i, memory_order_seq_cst); }
1275 template<
typename _ITp>
1277 atomic_load(
const atomic<_ITp>* __a) noexcept
1278 {
return atomic_load_explicit(__a, memory_order_seq_cst); }
1280 template<
typename _ITp>
1282 atomic_load(
const volatile atomic<_ITp>* __a) noexcept
1283 {
return atomic_load_explicit(__a, memory_order_seq_cst); }
1285 template<
typename _ITp>
1287 atomic_exchange(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i) noexcept
1288 {
return atomic_exchange_explicit(__a, __i, memory_order_seq_cst); }
1290 template<
typename _ITp>
1292 atomic_exchange(
volatile atomic<_ITp>* __a,
1293 __atomic_val_t<_ITp> __i) noexcept
1294 {
return atomic_exchange_explicit(__a, __i, memory_order_seq_cst); }
1296 template<
typename _ITp>
1298 atomic_compare_exchange_weak(atomic<_ITp>* __a,
1299 __atomic_val_t<_ITp>* __i1,
1300 __atomic_val_t<_ITp> __i2) noexcept
1302 return atomic_compare_exchange_weak_explicit(__a, __i1, __i2,
1303 memory_order_seq_cst,
1304 memory_order_seq_cst);
1307 template<
typename _ITp>
1309 atomic_compare_exchange_weak(
volatile atomic<_ITp>* __a,
1310 __atomic_val_t<_ITp>* __i1,
1311 __atomic_val_t<_ITp> __i2) noexcept
1313 return atomic_compare_exchange_weak_explicit(__a, __i1, __i2,
1314 memory_order_seq_cst,
1315 memory_order_seq_cst);
1318 template<
typename _ITp>
1320 atomic_compare_exchange_strong(atomic<_ITp>* __a,
1321 __atomic_val_t<_ITp>* __i1,
1322 __atomic_val_t<_ITp> __i2) noexcept
1324 return atomic_compare_exchange_strong_explicit(__a, __i1, __i2,
1325 memory_order_seq_cst,
1326 memory_order_seq_cst);
1329 template<
typename _ITp>
1331 atomic_compare_exchange_strong(
volatile atomic<_ITp>* __a,
1332 __atomic_val_t<_ITp>* __i1,
1333 __atomic_val_t<_ITp> __i2) noexcept
1335 return atomic_compare_exchange_strong_explicit(__a, __i1, __i2,
1336 memory_order_seq_cst,
1337 memory_order_seq_cst);
1344 template<
typename _ITp>
1346 atomic_fetch_add_explicit(atomic<_ITp>* __a,
1347 __atomic_diff_t<_ITp> __i,
1349 {
return __a->fetch_add(__i, __m); }
1351 template<
typename _ITp>
1353 atomic_fetch_add_explicit(
volatile atomic<_ITp>* __a,
1354 __atomic_diff_t<_ITp> __i,
1356 {
return __a->fetch_add(__i, __m); }
1358 template<
typename _ITp>
1360 atomic_fetch_sub_explicit(atomic<_ITp>* __a,
1361 __atomic_diff_t<_ITp> __i,
1363 {
return __a->fetch_sub(__i, __m); }
1365 template<
typename _ITp>
1367 atomic_fetch_sub_explicit(
volatile atomic<_ITp>* __a,
1368 __atomic_diff_t<_ITp> __i,
1370 {
return __a->fetch_sub(__i, __m); }
1372 template<
typename _ITp>
1374 atomic_fetch_and_explicit(__atomic_base<_ITp>* __a,
1375 __atomic_val_t<_ITp> __i,
1377 {
return __a->fetch_and(__i, __m); }
1379 template<
typename _ITp>
1381 atomic_fetch_and_explicit(
volatile __atomic_base<_ITp>* __a,
1382 __atomic_val_t<_ITp> __i,
1384 {
return __a->fetch_and(__i, __m); }
1386 template<
typename _ITp>
1388 atomic_fetch_or_explicit(__atomic_base<_ITp>* __a,
1389 __atomic_val_t<_ITp> __i,
1391 {
return __a->fetch_or(__i, __m); }
1393 template<
typename _ITp>
1395 atomic_fetch_or_explicit(
volatile __atomic_base<_ITp>* __a,
1396 __atomic_val_t<_ITp> __i,
1398 {
return __a->fetch_or(__i, __m); }
1400 template<
typename _ITp>
1402 atomic_fetch_xor_explicit(__atomic_base<_ITp>* __a,
1403 __atomic_val_t<_ITp> __i,
1405 {
return __a->fetch_xor(__i, __m); }
1407 template<
typename _ITp>
1409 atomic_fetch_xor_explicit(
volatile __atomic_base<_ITp>* __a,
1410 __atomic_val_t<_ITp> __i,
1412 {
return __a->fetch_xor(__i, __m); }
1414 template<
typename _ITp>
1416 atomic_fetch_add(atomic<_ITp>* __a,
1417 __atomic_diff_t<_ITp> __i) noexcept
1418 {
return atomic_fetch_add_explicit(__a, __i, memory_order_seq_cst); }
1420 template<
typename _ITp>
1422 atomic_fetch_add(
volatile atomic<_ITp>* __a,
1423 __atomic_diff_t<_ITp> __i) noexcept
1424 {
return atomic_fetch_add_explicit(__a, __i, memory_order_seq_cst); }
1426 template<
typename _ITp>
1428 atomic_fetch_sub(atomic<_ITp>* __a,
1429 __atomic_diff_t<_ITp> __i) noexcept
1430 {
return atomic_fetch_sub_explicit(__a, __i, memory_order_seq_cst); }
1432 template<
typename _ITp>
1434 atomic_fetch_sub(
volatile atomic<_ITp>* __a,
1435 __atomic_diff_t<_ITp> __i) noexcept
1436 {
return atomic_fetch_sub_explicit(__a, __i, memory_order_seq_cst); }
1438 template<
typename _ITp>
1440 atomic_fetch_and(__atomic_base<_ITp>* __a,
1441 __atomic_val_t<_ITp> __i) noexcept
1442 {
return atomic_fetch_and_explicit(__a, __i, memory_order_seq_cst); }
1444 template<
typename _ITp>
1446 atomic_fetch_and(
volatile __atomic_base<_ITp>* __a,
1447 __atomic_val_t<_ITp> __i) noexcept
1448 {
return atomic_fetch_and_explicit(__a, __i, memory_order_seq_cst); }
1450 template<
typename _ITp>
1452 atomic_fetch_or(__atomic_base<_ITp>* __a,
1453 __atomic_val_t<_ITp> __i) noexcept
1454 {
return atomic_fetch_or_explicit(__a, __i, memory_order_seq_cst); }
1456 template<
typename _ITp>
1458 atomic_fetch_or(
volatile __atomic_base<_ITp>* __a,
1459 __atomic_val_t<_ITp> __i) noexcept
1460 {
return atomic_fetch_or_explicit(__a, __i, memory_order_seq_cst); }
1462 template<
typename _ITp>
1464 atomic_fetch_xor(__atomic_base<_ITp>* __a,
1465 __atomic_val_t<_ITp> __i) noexcept
1466 {
return atomic_fetch_xor_explicit(__a, __i, memory_order_seq_cst); }
1468 template<
typename _ITp>
1470 atomic_fetch_xor(
volatile __atomic_base<_ITp>* __a,
1471 __atomic_val_t<_ITp> __i) noexcept
1472 {
return atomic_fetch_xor_explicit(__a, __i, memory_order_seq_cst); }
1474 #if __cplusplus > 201703L
1476 struct atomic<float> : __atomic_float<float>
1478 atomic() noexcept = default;
1481 atomic(
float __fp) noexcept : __atomic_float<
float>(__fp)
1484 atomic& operator=(
const atomic&)
volatile =
delete;
1485 atomic& operator=(
const atomic&) =
delete;
1487 using __atomic_float<float>::operator=;
1491 struct atomic<double> : __atomic_float<double>
1493 atomic() noexcept = default;
1496 atomic(
double __fp) noexcept : __atomic_float<
double>(__fp)
1499 atomic& operator=(
const atomic&)
volatile =
delete;
1500 atomic& operator=(
const atomic&) =
delete;
1502 using __atomic_float<double>::operator=;
1506 struct atomic<long double> : __atomic_float<long double>
1508 atomic() noexcept = default;
1511 atomic(
long double __fp) noexcept : __atomic_float<
long double>(__fp)
1514 atomic& operator=(
const atomic&)
volatile =
delete;
1515 atomic& operator=(
const atomic&) =
delete;
1517 using __atomic_float<long double>::operator=;
1520 #define __cpp_lib_atomic_ref 201806L
1523 template<
typename _Tp>
1524 struct atomic_ref : __atomic_ref<_Tp>
1527 atomic_ref(_Tp& __t) noexcept : __atomic_ref<_Tp>(__t)
1530 atomic_ref& operator=(
const atomic_ref&) =
delete;
1532 atomic_ref(
const atomic_ref&) =
default;
1534 using __atomic_ref<_Tp>::operator=;
1541 _GLIBCXX_END_NAMESPACE_VERSION
1546 #endif // _GLIBCXX_ATOMIC