32 #ifndef _GLIBCXX_ATOMIC
33 #define _GLIBCXX_ATOMIC 1
35 #pragma GCC system_header
37 #if __cplusplus < 201103L
43 namespace std _GLIBCXX_VISIBILITY(default)
45 _GLIBCXX_BEGIN_NAMESPACE_VERSION
52 #if __cplusplus >= 201703L
53 # define __cpp_lib_atomic_is_always_lock_free 201603
56 template<
typename _Tp>
64 using value_type = bool;
70 atomic() noexcept =
default;
71 ~
atomic() noexcept =
default;
76 constexpr
atomic(
bool __i) noexcept : _M_base(__i) { }
79 operator=(
bool __i) noexcept
80 {
return _M_base.operator=(__i); }
83 operator=(
bool __i)
volatile noexcept
84 {
return _M_base.operator=(__i); }
86 operator bool()
const noexcept
87 {
return _M_base.load(); }
89 operator bool()
const volatile noexcept
90 {
return _M_base.load(); }
93 is_lock_free()
const noexcept {
return _M_base.is_lock_free(); }
96 is_lock_free()
const volatile noexcept {
return _M_base.is_lock_free(); }
98 #if __cplusplus >= 201703L
103 store(
bool __i,
memory_order __m = memory_order_seq_cst) noexcept
104 { _M_base.store(__i, __m); }
107 store(
bool __i,
memory_order __m = memory_order_seq_cst)
volatile noexcept
108 { _M_base.store(__i, __m); }
111 load(
memory_order __m = memory_order_seq_cst)
const noexcept
112 {
return _M_base.load(__m); }
115 load(
memory_order __m = memory_order_seq_cst)
const volatile noexcept
116 {
return _M_base.load(__m); }
119 exchange(
bool __i,
memory_order __m = memory_order_seq_cst) noexcept
120 {
return _M_base.exchange(__i, __m); }
124 memory_order __m = memory_order_seq_cst)
volatile noexcept
125 {
return _M_base.exchange(__i, __m); }
128 compare_exchange_weak(
bool& __i1,
bool __i2,
memory_order __m1,
130 {
return _M_base.compare_exchange_weak(__i1, __i2, __m1, __m2); }
133 compare_exchange_weak(
bool& __i1,
bool __i2,
memory_order __m1,
135 {
return _M_base.compare_exchange_weak(__i1, __i2, __m1, __m2); }
138 compare_exchange_weak(
bool& __i1,
bool __i2,
140 {
return _M_base.compare_exchange_weak(__i1, __i2, __m); }
143 compare_exchange_weak(
bool& __i1,
bool __i2,
144 memory_order __m = memory_order_seq_cst)
volatile noexcept
145 {
return _M_base.compare_exchange_weak(__i1, __i2, __m); }
148 compare_exchange_strong(
bool& __i1,
bool __i2,
memory_order __m1,
150 {
return _M_base.compare_exchange_strong(__i1, __i2, __m1, __m2); }
153 compare_exchange_strong(
bool& __i1,
bool __i2,
memory_order __m1,
155 {
return _M_base.compare_exchange_strong(__i1, __i2, __m1, __m2); }
158 compare_exchange_strong(
bool& __i1,
bool __i2,
160 {
return _M_base.compare_exchange_strong(__i1, __i2, __m); }
163 compare_exchange_strong(
bool& __i1,
bool __i2,
164 memory_order __m = memory_order_seq_cst)
volatile noexcept
165 {
return _M_base.compare_exchange_strong(__i1, __i2, __m); }
168 #if __cplusplus <= 201703L
169 # define _GLIBCXX20_INIT(I)
171 # define _GLIBCXX20_INIT(I) = I
179 template<
typename _Tp>
182 using value_type = _Tp;
186 static constexpr
int _S_min_alignment
187 = (
sizeof(_Tp) & (
sizeof(_Tp) - 1)) ||
sizeof(_Tp) > 16
190 static constexpr
int _S_alignment
191 = _S_min_alignment >
alignof(_Tp) ? _S_min_alignment :
alignof(_Tp);
193 alignas(_S_alignment) _Tp _M_i _GLIBCXX20_INIT(_Tp());
195 static_assert(__is_trivially_copyable(_Tp),
196 "std::atomic requires a trivially copyable type");
198 static_assert(
sizeof(_Tp) > 0,
199 "Incomplete or zero-sized types are not supported");
201 #if __cplusplus > 201703L
202 static_assert(is_copy_constructible_v<_Tp>);
203 static_assert(is_move_constructible_v<_Tp>);
204 static_assert(is_copy_assignable_v<_Tp>);
205 static_assert(is_move_assignable_v<_Tp>);
210 ~
atomic() noexcept =
default;
215 constexpr
atomic(_Tp __i) noexcept : _M_i(__i) { }
217 operator _Tp()
const noexcept
220 operator _Tp()
const volatile noexcept
224 operator=(_Tp __i) noexcept
225 { store(__i);
return __i; }
228 operator=(_Tp __i)
volatile noexcept
229 { store(__i);
return __i; }
232 is_lock_free()
const noexcept
235 return __atomic_is_lock_free(
sizeof(_M_i),
236 reinterpret_cast<void *
>(-_S_alignment));
240 is_lock_free()
const volatile noexcept
243 return __atomic_is_lock_free(
sizeof(_M_i),
244 reinterpret_cast<void *
>(-_S_alignment));
247 #if __cplusplus >= 201703L
248 static constexpr
bool is_always_lock_free
249 = __atomic_always_lock_free(
sizeof(_M_i), 0);
253 store(_Tp __i,
memory_order __m = memory_order_seq_cst) noexcept
257 store(_Tp __i,
memory_order __m = memory_order_seq_cst)
volatile noexcept
261 load(
memory_order __m = memory_order_seq_cst)
const noexcept
263 alignas(_Tp)
unsigned char __buf[
sizeof(_Tp)];
264 _Tp* __ptr =
reinterpret_cast<_Tp*
>(__buf);
270 load(
memory_order __m = memory_order_seq_cst)
const volatile noexcept
272 alignas(_Tp)
unsigned char __buf[
sizeof(_Tp)];
273 _Tp* __ptr =
reinterpret_cast<_Tp*
>(__buf);
279 exchange(_Tp __i,
memory_order __m = memory_order_seq_cst) noexcept
281 alignas(_Tp)
unsigned char __buf[
sizeof(_Tp)];
282 _Tp* __ptr =
reinterpret_cast<_Tp*
>(__buf);
290 memory_order __m = memory_order_seq_cst)
volatile noexcept
292 alignas(_Tp)
unsigned char __buf[
sizeof(_Tp)];
293 _Tp* __ptr =
reinterpret_cast<_Tp*
>(__buf);
300 compare_exchange_weak(_Tp& __e, _Tp __i,
memory_order __s,
306 true,
int(__s),
int(__f));
310 compare_exchange_weak(_Tp& __e, _Tp __i,
memory_order __s,
316 true,
int(__s),
int(__f));
320 compare_exchange_weak(_Tp& __e, _Tp __i,
322 {
return compare_exchange_weak(__e, __i, __m,
323 __cmpexch_failure_order(__m)); }
326 compare_exchange_weak(_Tp& __e, _Tp __i,
327 memory_order __m = memory_order_seq_cst)
volatile noexcept
328 {
return compare_exchange_weak(__e, __i, __m,
329 __cmpexch_failure_order(__m)); }
332 compare_exchange_strong(_Tp& __e, _Tp __i,
memory_order __s,
338 false,
int(__s),
int(__f));
342 compare_exchange_strong(_Tp& __e, _Tp __i,
memory_order __s,
348 false,
int(__s),
int(__f));
352 compare_exchange_strong(_Tp& __e, _Tp __i,
354 {
return compare_exchange_strong(__e, __i, __m,
355 __cmpexch_failure_order(__m)); }
358 compare_exchange_strong(_Tp& __e, _Tp __i,
359 memory_order __m = memory_order_seq_cst)
volatile noexcept
360 {
return compare_exchange_strong(__e, __i, __m,
361 __cmpexch_failure_order(__m)); }
363 #undef _GLIBCXX20_INIT
366 template<
typename _Tp>
369 using value_type = _Tp*;
370 using difference_type = ptrdiff_t;
372 typedef _Tp* __pointer_type;
376 atomic() noexcept =
default;
377 ~
atomic() noexcept =
default;
382 constexpr
atomic(__pointer_type __p) noexcept : _M_b(__p) { }
384 operator __pointer_type()
const noexcept
385 {
return __pointer_type(_M_b); }
387 operator __pointer_type()
const volatile noexcept
388 {
return __pointer_type(_M_b); }
391 operator=(__pointer_type __p) noexcept
392 {
return _M_b.operator=(__p); }
395 operator=(__pointer_type __p)
volatile noexcept
396 {
return _M_b.operator=(__p); }
399 operator++(
int) noexcept
401 #if __cplusplus >= 201703L
408 operator++(
int)
volatile noexcept
410 #if __cplusplus >= 201703L
417 operator--(
int) noexcept
419 #if __cplusplus >= 201703L
426 operator--(
int)
volatile noexcept
428 #if __cplusplus >= 201703L
435 operator++() noexcept
437 #if __cplusplus >= 201703L
444 operator++()
volatile noexcept
446 #if __cplusplus >= 201703L
453 operator--() noexcept
455 #if __cplusplus >= 201703L
462 operator--()
volatile noexcept
464 #if __cplusplus >= 201703L
471 operator+=(ptrdiff_t __d) noexcept
473 #if __cplusplus >= 201703L
476 return _M_b.operator+=(__d);
480 operator+=(ptrdiff_t __d)
volatile noexcept
482 #if __cplusplus >= 201703L
485 return _M_b.operator+=(__d);
489 operator-=(ptrdiff_t __d) noexcept
491 #if __cplusplus >= 201703L
494 return _M_b.operator-=(__d);
498 operator-=(ptrdiff_t __d)
volatile noexcept
500 #if __cplusplus >= 201703L
503 return _M_b.operator-=(__d);
507 is_lock_free()
const noexcept
508 {
return _M_b.is_lock_free(); }
511 is_lock_free()
const volatile noexcept
512 {
return _M_b.is_lock_free(); }
514 #if __cplusplus >= 201703L
515 static constexpr
bool is_always_lock_free = ATOMIC_POINTER_LOCK_FREE == 2;
519 store(__pointer_type __p,
521 {
return _M_b.store(__p, __m); }
524 store(__pointer_type __p,
525 memory_order __m = memory_order_seq_cst)
volatile noexcept
526 {
return _M_b.store(__p, __m); }
529 load(
memory_order __m = memory_order_seq_cst)
const noexcept
530 {
return _M_b.load(__m); }
533 load(
memory_order __m = memory_order_seq_cst)
const volatile noexcept
534 {
return _M_b.load(__m); }
539 {
return _M_b.exchange(__p, __m); }
543 memory_order __m = memory_order_seq_cst)
volatile noexcept
544 {
return _M_b.exchange(__p, __m); }
547 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
549 {
return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
552 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
555 {
return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
558 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
561 return compare_exchange_weak(__p1, __p2, __m,
562 __cmpexch_failure_order(__m));
566 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
567 memory_order __m = memory_order_seq_cst)
volatile noexcept
569 return compare_exchange_weak(__p1, __p2, __m,
570 __cmpexch_failure_order(__m));
574 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
576 {
return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
579 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
582 {
return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
585 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
588 return _M_b.compare_exchange_strong(__p1, __p2, __m,
589 __cmpexch_failure_order(__m));
593 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
594 memory_order __m = memory_order_seq_cst)
volatile noexcept
596 return _M_b.compare_exchange_strong(__p1, __p2, __m,
597 __cmpexch_failure_order(__m));
601 fetch_add(ptrdiff_t __d,
604 #if __cplusplus >= 201703L
607 return _M_b.fetch_add(__d, __m);
611 fetch_add(ptrdiff_t __d,
612 memory_order __m = memory_order_seq_cst)
volatile noexcept
614 #if __cplusplus >= 201703L
617 return _M_b.fetch_add(__d, __m);
621 fetch_sub(ptrdiff_t __d,
624 #if __cplusplus >= 201703L
627 return _M_b.fetch_sub(__d, __m);
631 fetch_sub(ptrdiff_t __d,
632 memory_order __m = memory_order_seq_cst)
volatile noexcept
634 #if __cplusplus >= 201703L
637 return _M_b.fetch_sub(__d, __m);
646 typedef char __integral_type;
649 atomic() noexcept =
default;
650 ~
atomic() noexcept =
default;
657 using __base_type::operator __integral_type;
658 using __base_type::operator=;
660 #if __cplusplus >= 201703L
661 static constexpr
bool is_always_lock_free = ATOMIC_CHAR_LOCK_FREE == 2;
669 typedef signed char __integral_type;
672 atomic() noexcept=
default;
673 ~
atomic() noexcept =
default;
680 using __base_type::operator __integral_type;
681 using __base_type::operator=;
683 #if __cplusplus >= 201703L
684 static constexpr
bool is_always_lock_free = ATOMIC_CHAR_LOCK_FREE == 2;
692 typedef unsigned char __integral_type;
695 atomic() noexcept=
default;
696 ~
atomic() noexcept =
default;
703 using __base_type::operator __integral_type;
704 using __base_type::operator=;
706 #if __cplusplus >= 201703L
707 static constexpr
bool is_always_lock_free = ATOMIC_CHAR_LOCK_FREE == 2;
715 typedef short __integral_type;
718 atomic() noexcept =
default;
719 ~
atomic() noexcept =
default;
726 using __base_type::operator __integral_type;
727 using __base_type::operator=;
729 #if __cplusplus >= 201703L
730 static constexpr
bool is_always_lock_free = ATOMIC_SHORT_LOCK_FREE == 2;
738 typedef unsigned short __integral_type;
741 atomic() noexcept =
default;
742 ~
atomic() noexcept =
default;
749 using __base_type::operator __integral_type;
750 using __base_type::operator=;
752 #if __cplusplus >= 201703L
753 static constexpr
bool is_always_lock_free = ATOMIC_SHORT_LOCK_FREE == 2;
761 typedef int __integral_type;
764 atomic() noexcept =
default;
765 ~
atomic() noexcept =
default;
772 using __base_type::operator __integral_type;
773 using __base_type::operator=;
775 #if __cplusplus >= 201703L
776 static constexpr
bool is_always_lock_free = ATOMIC_INT_LOCK_FREE == 2;
784 typedef unsigned int __integral_type;
787 atomic() noexcept =
default;
788 ~
atomic() noexcept =
default;
795 using __base_type::operator __integral_type;
796 using __base_type::operator=;
798 #if __cplusplus >= 201703L
799 static constexpr
bool is_always_lock_free = ATOMIC_INT_LOCK_FREE == 2;
807 typedef long __integral_type;
810 atomic() noexcept =
default;
811 ~
atomic() noexcept =
default;
818 using __base_type::operator __integral_type;
819 using __base_type::operator=;
821 #if __cplusplus >= 201703L
822 static constexpr
bool is_always_lock_free = ATOMIC_LONG_LOCK_FREE == 2;
830 typedef unsigned long __integral_type;
833 atomic() noexcept =
default;
834 ~
atomic() noexcept =
default;
841 using __base_type::operator __integral_type;
842 using __base_type::operator=;
844 #if __cplusplus >= 201703L
845 static constexpr
bool is_always_lock_free = ATOMIC_LONG_LOCK_FREE == 2;
853 typedef long long __integral_type;
856 atomic() noexcept =
default;
857 ~
atomic() noexcept =
default;
864 using __base_type::operator __integral_type;
865 using __base_type::operator=;
867 #if __cplusplus >= 201703L
868 static constexpr
bool is_always_lock_free = ATOMIC_LLONG_LOCK_FREE == 2;
876 typedef unsigned long long __integral_type;
879 atomic() noexcept =
default;
880 ~
atomic() noexcept =
default;
887 using __base_type::operator __integral_type;
888 using __base_type::operator=;
890 #if __cplusplus >= 201703L
891 static constexpr
bool is_always_lock_free = ATOMIC_LLONG_LOCK_FREE == 2;
899 typedef wchar_t __integral_type;
902 atomic() noexcept =
default;
903 ~
atomic() noexcept =
default;
910 using __base_type::operator __integral_type;
911 using __base_type::operator=;
913 #if __cplusplus >= 201703L
914 static constexpr
bool is_always_lock_free = ATOMIC_WCHAR_T_LOCK_FREE == 2;
918 #ifdef _GLIBCXX_USE_CHAR8_T
923 typedef char8_t __integral_type;
926 atomic() noexcept = default;
927 ~
atomic() noexcept = default;
932 constexpr
atomic(__integral_type __i) noexcept : __base_type(__i) { }
934 using __base_type::operator __integral_type;
935 using __base_type::operator=;
937 #if __cplusplus > 201402L
938 static constexpr
bool is_always_lock_free = ATOMIC_CHAR8_T_LOCK_FREE == 2;
947 typedef char16_t __integral_type;
950 atomic() noexcept =
default;
951 ~
atomic() noexcept =
default;
958 using __base_type::operator __integral_type;
959 using __base_type::operator=;
961 #if __cplusplus >= 201703L
962 static constexpr
bool is_always_lock_free = ATOMIC_CHAR16_T_LOCK_FREE == 2;
970 typedef char32_t __integral_type;
973 atomic() noexcept =
default;
974 ~
atomic() noexcept =
default;
981 using __base_type::operator __integral_type;
982 using __base_type::operator=;
984 #if __cplusplus >= 201703L
985 static constexpr
bool is_always_lock_free = ATOMIC_CHAR32_T_LOCK_FREE == 2;
1029 #ifdef _GLIBCXX_USE_CHAR8_T
1040 #ifdef _GLIBCXX_USE_C99_STDINT_TR1
1132 #ifdef _GLIBCXX_USE_C99_STDINT_TR1
1142 atomic_flag_test_and_set_explicit(
atomic_flag* __a,
1144 {
return __a->test_and_set(__m); }
1147 atomic_flag_test_and_set_explicit(
volatile atomic_flag* __a,
1149 {
return __a->test_and_set(__m); }
1152 atomic_flag_clear_explicit(atomic_flag* __a,
memory_order __m) noexcept
1153 { __a->clear(__m); }
1156 atomic_flag_clear_explicit(
volatile atomic_flag* __a,
1158 { __a->clear(__m); }
1161 atomic_flag_test_and_set(atomic_flag* __a) noexcept
1162 {
return atomic_flag_test_and_set_explicit(__a, memory_order_seq_cst); }
1165 atomic_flag_test_and_set(
volatile atomic_flag* __a) noexcept
1166 {
return atomic_flag_test_and_set_explicit(__a, memory_order_seq_cst); }
1169 atomic_flag_clear(atomic_flag* __a) noexcept
1170 { atomic_flag_clear_explicit(__a, memory_order_seq_cst); }
1173 atomic_flag_clear(
volatile atomic_flag* __a) noexcept
1174 { atomic_flag_clear_explicit(__a, memory_order_seq_cst); }
1177 template<
typename _Tp>
1178 using __atomic_val_t =
typename atomic<_Tp>::value_type;
1179 template<
typename _Tp>
1180 using __atomic_diff_t =
typename atomic<_Tp>::difference_type;
1184 template<
typename _ITp>
1186 atomic_is_lock_free(
const atomic<_ITp>* __a) noexcept
1187 {
return __a->is_lock_free(); }
1189 template<
typename _ITp>
1191 atomic_is_lock_free(
const volatile atomic<_ITp>* __a) noexcept
1192 {
return __a->is_lock_free(); }
1194 template<
typename _ITp>
1196 atomic_init(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i) noexcept
1197 { __a->store(__i, memory_order_relaxed); }
1199 template<
typename _ITp>
1201 atomic_init(
volatile atomic<_ITp>* __a, __atomic_val_t<_ITp> __i) noexcept
1202 { __a->store(__i, memory_order_relaxed); }
1204 template<
typename _ITp>
1206 atomic_store_explicit(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i,
1208 { __a->store(__i, __m); }
1210 template<
typename _ITp>
1212 atomic_store_explicit(
volatile atomic<_ITp>* __a, __atomic_val_t<_ITp> __i,
1214 { __a->store(__i, __m); }
1216 template<
typename _ITp>
1218 atomic_load_explicit(
const atomic<_ITp>* __a,
memory_order __m) noexcept
1219 {
return __a->load(__m); }
1221 template<
typename _ITp>
1223 atomic_load_explicit(
const volatile atomic<_ITp>* __a,
1225 {
return __a->load(__m); }
1227 template<
typename _ITp>
1229 atomic_exchange_explicit(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i,
1231 {
return __a->exchange(__i, __m); }
1233 template<
typename _ITp>
1235 atomic_exchange_explicit(
volatile atomic<_ITp>* __a,
1236 __atomic_val_t<_ITp> __i,
1238 {
return __a->exchange(__i, __m); }
1240 template<
typename _ITp>
1242 atomic_compare_exchange_weak_explicit(atomic<_ITp>* __a,
1243 __atomic_val_t<_ITp>* __i1,
1244 __atomic_val_t<_ITp> __i2,
1247 {
return __a->compare_exchange_weak(*__i1, __i2, __m1, __m2); }
1249 template<
typename _ITp>
1251 atomic_compare_exchange_weak_explicit(
volatile atomic<_ITp>* __a,
1252 __atomic_val_t<_ITp>* __i1,
1253 __atomic_val_t<_ITp> __i2,
1256 {
return __a->compare_exchange_weak(*__i1, __i2, __m1, __m2); }
1258 template<
typename _ITp>
1260 atomic_compare_exchange_strong_explicit(atomic<_ITp>* __a,
1261 __atomic_val_t<_ITp>* __i1,
1262 __atomic_val_t<_ITp> __i2,
1265 {
return __a->compare_exchange_strong(*__i1, __i2, __m1, __m2); }
1267 template<
typename _ITp>
1269 atomic_compare_exchange_strong_explicit(
volatile atomic<_ITp>* __a,
1270 __atomic_val_t<_ITp>* __i1,
1271 __atomic_val_t<_ITp> __i2,
1274 {
return __a->compare_exchange_strong(*__i1, __i2, __m1, __m2); }
1277 template<
typename _ITp>
1279 atomic_store(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i) noexcept
1280 { atomic_store_explicit(__a, __i, memory_order_seq_cst); }
1282 template<
typename _ITp>
1284 atomic_store(
volatile atomic<_ITp>* __a, __atomic_val_t<_ITp> __i) noexcept
1285 { atomic_store_explicit(__a, __i, memory_order_seq_cst); }
1287 template<
typename _ITp>
1289 atomic_load(
const atomic<_ITp>* __a) noexcept
1290 {
return atomic_load_explicit(__a, memory_order_seq_cst); }
1292 template<
typename _ITp>
1294 atomic_load(
const volatile atomic<_ITp>* __a) noexcept
1295 {
return atomic_load_explicit(__a, memory_order_seq_cst); }
1297 template<
typename _ITp>
1299 atomic_exchange(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i) noexcept
1300 {
return atomic_exchange_explicit(__a, __i, memory_order_seq_cst); }
1302 template<
typename _ITp>
1304 atomic_exchange(
volatile atomic<_ITp>* __a,
1305 __atomic_val_t<_ITp> __i) noexcept
1306 {
return atomic_exchange_explicit(__a, __i, memory_order_seq_cst); }
1308 template<
typename _ITp>
1310 atomic_compare_exchange_weak(atomic<_ITp>* __a,
1311 __atomic_val_t<_ITp>* __i1,
1312 __atomic_val_t<_ITp> __i2) noexcept
1314 return atomic_compare_exchange_weak_explicit(__a, __i1, __i2,
1315 memory_order_seq_cst,
1316 memory_order_seq_cst);
1319 template<
typename _ITp>
1321 atomic_compare_exchange_weak(
volatile atomic<_ITp>* __a,
1322 __atomic_val_t<_ITp>* __i1,
1323 __atomic_val_t<_ITp> __i2) noexcept
1325 return atomic_compare_exchange_weak_explicit(__a, __i1, __i2,
1326 memory_order_seq_cst,
1327 memory_order_seq_cst);
1330 template<
typename _ITp>
1332 atomic_compare_exchange_strong(atomic<_ITp>* __a,
1333 __atomic_val_t<_ITp>* __i1,
1334 __atomic_val_t<_ITp> __i2) noexcept
1336 return atomic_compare_exchange_strong_explicit(__a, __i1, __i2,
1337 memory_order_seq_cst,
1338 memory_order_seq_cst);
1341 template<
typename _ITp>
1343 atomic_compare_exchange_strong(
volatile atomic<_ITp>* __a,
1344 __atomic_val_t<_ITp>* __i1,
1345 __atomic_val_t<_ITp> __i2) noexcept
1347 return atomic_compare_exchange_strong_explicit(__a, __i1, __i2,
1348 memory_order_seq_cst,
1349 memory_order_seq_cst);
1356 template<
typename _ITp>
1358 atomic_fetch_add_explicit(atomic<_ITp>* __a,
1359 __atomic_diff_t<_ITp> __i,
1361 {
return __a->fetch_add(__i, __m); }
1363 template<
typename _ITp>
1365 atomic_fetch_add_explicit(
volatile atomic<_ITp>* __a,
1366 __atomic_diff_t<_ITp> __i,
1368 {
return __a->fetch_add(__i, __m); }
1370 template<
typename _ITp>
1372 atomic_fetch_sub_explicit(atomic<_ITp>* __a,
1373 __atomic_diff_t<_ITp> __i,
1375 {
return __a->fetch_sub(__i, __m); }
1377 template<
typename _ITp>
1379 atomic_fetch_sub_explicit(
volatile atomic<_ITp>* __a,
1380 __atomic_diff_t<_ITp> __i,
1382 {
return __a->fetch_sub(__i, __m); }
1384 template<
typename _ITp>
1386 atomic_fetch_and_explicit(__atomic_base<_ITp>* __a,
1387 __atomic_val_t<_ITp> __i,
1389 {
return __a->fetch_and(__i, __m); }
1391 template<
typename _ITp>
1393 atomic_fetch_and_explicit(
volatile __atomic_base<_ITp>* __a,
1394 __atomic_val_t<_ITp> __i,
1396 {
return __a->fetch_and(__i, __m); }
1398 template<
typename _ITp>
1400 atomic_fetch_or_explicit(__atomic_base<_ITp>* __a,
1401 __atomic_val_t<_ITp> __i,
1403 {
return __a->fetch_or(__i, __m); }
1405 template<
typename _ITp>
1407 atomic_fetch_or_explicit(
volatile __atomic_base<_ITp>* __a,
1408 __atomic_val_t<_ITp> __i,
1410 {
return __a->fetch_or(__i, __m); }
1412 template<
typename _ITp>
1414 atomic_fetch_xor_explicit(__atomic_base<_ITp>* __a,
1415 __atomic_val_t<_ITp> __i,
1417 {
return __a->fetch_xor(__i, __m); }
1419 template<
typename _ITp>
1421 atomic_fetch_xor_explicit(
volatile __atomic_base<_ITp>* __a,
1422 __atomic_val_t<_ITp> __i,
1424 {
return __a->fetch_xor(__i, __m); }
1426 template<
typename _ITp>
1428 atomic_fetch_add(atomic<_ITp>* __a,
1429 __atomic_diff_t<_ITp> __i) noexcept
1430 {
return atomic_fetch_add_explicit(__a, __i, memory_order_seq_cst); }
1432 template<
typename _ITp>
1434 atomic_fetch_add(
volatile atomic<_ITp>* __a,
1435 __atomic_diff_t<_ITp> __i) noexcept
1436 {
return atomic_fetch_add_explicit(__a, __i, memory_order_seq_cst); }
1438 template<
typename _ITp>
1440 atomic_fetch_sub(atomic<_ITp>* __a,
1441 __atomic_diff_t<_ITp> __i) noexcept
1442 {
return atomic_fetch_sub_explicit(__a, __i, memory_order_seq_cst); }
1444 template<
typename _ITp>
1446 atomic_fetch_sub(
volatile atomic<_ITp>* __a,
1447 __atomic_diff_t<_ITp> __i) noexcept
1448 {
return atomic_fetch_sub_explicit(__a, __i, memory_order_seq_cst); }
1450 template<
typename _ITp>
1452 atomic_fetch_and(__atomic_base<_ITp>* __a,
1453 __atomic_val_t<_ITp> __i) noexcept
1454 {
return atomic_fetch_and_explicit(__a, __i, memory_order_seq_cst); }
1456 template<
typename _ITp>
1458 atomic_fetch_and(
volatile __atomic_base<_ITp>* __a,
1459 __atomic_val_t<_ITp> __i) noexcept
1460 {
return atomic_fetch_and_explicit(__a, __i, memory_order_seq_cst); }
1462 template<
typename _ITp>
1464 atomic_fetch_or(__atomic_base<_ITp>* __a,
1465 __atomic_val_t<_ITp> __i) noexcept
1466 {
return atomic_fetch_or_explicit(__a, __i, memory_order_seq_cst); }
1468 template<
typename _ITp>
1470 atomic_fetch_or(
volatile __atomic_base<_ITp>* __a,
1471 __atomic_val_t<_ITp> __i) noexcept
1472 {
return atomic_fetch_or_explicit(__a, __i, memory_order_seq_cst); }
1474 template<
typename _ITp>
1476 atomic_fetch_xor(__atomic_base<_ITp>* __a,
1477 __atomic_val_t<_ITp> __i) noexcept
1478 {
return atomic_fetch_xor_explicit(__a, __i, memory_order_seq_cst); }
1480 template<
typename _ITp>
1482 atomic_fetch_xor(
volatile __atomic_base<_ITp>* __a,
1483 __atomic_val_t<_ITp> __i) noexcept
1484 {
return atomic_fetch_xor_explicit(__a, __i, memory_order_seq_cst); }
1486 #if __cplusplus > 201703L
1487 #define __cpp_lib_atomic_float 201711L
1489 struct atomic<float> : __atomic_float<float>
1491 atomic() noexcept = default;
1494 atomic(
float __fp) noexcept : __atomic_float<
float>(__fp)
1497 atomic& operator=(
const atomic&)
volatile =
delete;
1498 atomic& operator=(
const atomic&) =
delete;
1500 using __atomic_float<float>::operator=;
1504 struct atomic<double> : __atomic_float<double>
1506 atomic() noexcept = default;
1509 atomic(
double __fp) noexcept : __atomic_float<
double>(__fp)
1512 atomic& operator=(
const atomic&)
volatile =
delete;
1513 atomic& operator=(
const atomic&) =
delete;
1515 using __atomic_float<double>::operator=;
1519 struct atomic<long double> : __atomic_float<long double>
1521 atomic() noexcept = default;
1524 atomic(
long double __fp) noexcept : __atomic_float<
long double>(__fp)
1527 atomic& operator=(
const atomic&)
volatile =
delete;
1528 atomic& operator=(
const atomic&) =
delete;
1530 using __atomic_float<long double>::operator=;
1533 #define __cpp_lib_atomic_ref 201806L
1536 template<
typename _Tp>
1537 struct atomic_ref : __atomic_ref<_Tp>
1540 atomic_ref(_Tp& __t) noexcept : __atomic_ref<_Tp>(__t)
1543 atomic_ref& operator=(
const atomic_ref&) =
delete;
1545 atomic_ref(
const atomic_ref&) =
default;
1547 using __atomic_ref<_Tp>::operator=;
1554 _GLIBCXX_END_NAMESPACE_VERSION
constexpr _Tp * __addressof(_Tp &__r) noexcept
Same as C++11 std::addressof.
atomic< unsigned long > atomic_ulong
atomic_ulong
atomic< intmax_t > atomic_intmax_t
atomic_intmax_t
atomic< uintptr_t > atomic_uintptr_t
atomic_uintptr_t
atomic< signed char > atomic_schar
atomic_schar
atomic< int_least8_t > atomic_int_least8_t
atomic_int_least8_t
atomic< unsigned long long > atomic_ullong
atomic_ullong
atomic< uint_fast8_t > atomic_uint_fast8_t
atomic_uint_fast8_t
atomic< intptr_t > atomic_intptr_t
atomic_intptr_t
atomic< int16_t > atomic_int16_t
atomic_int16_t
atomic< size_t > atomic_size_t
atomic_size_t
atomic< long > atomic_long
atomic_long
atomic< uint_least8_t > atomic_uint_least8_t
atomic_uint_least8_t
atomic< short > atomic_short
atomic_short
atomic< uint_least16_t > atomic_uint_least16_t
atomic_uint_least16_t
atomic< uint16_t > atomic_uint16_t
atomic_uint16_t
atomic< uint64_t > atomic_uint64_t
atomic_uint64_t
atomic< int_least32_t > atomic_int_least32_t
atomic_int_least32_t
atomic< uint8_t > atomic_uint8_t
atomic_uint8_t
#define ATOMIC_BOOL_LOCK_FREE
atomic< wchar_t > atomic_wchar_t
atomic_wchar_t
atomic< unsigned int > atomic_uint
atomic_uint
atomic< uint_least32_t > atomic_uint_least32_t
atomic_uint_least32_t
atomic< uint_fast64_t > atomic_uint_fast64_t
atomic_uint_fast64_t
atomic< int_fast32_t > atomic_int_fast32_t
atomic_int_fast32_t
atomic< char > atomic_char
atomic_char
atomic< int > atomic_int
atomic_int
atomic< uint_least64_t > atomic_uint_least64_t
atomic_uint_least64_t
atomic< int64_t > atomic_int64_t
atomic_int64_t
atomic< uintmax_t > atomic_uintmax_t
atomic_uintmax_t
atomic< int_fast16_t > atomic_int_fast16_t
atomic_int_fast16_t
atomic< int32_t > atomic_int32_t
atomic_int32_t
memory_order
Enumeration for memory_order.
atomic< uint_fast16_t > atomic_uint_fast16_t
atomic_uint_fast16_t
atomic< int8_t > atomic_int8_t
atomic_int8_t
atomic< long long > atomic_llong
atomic_llong
atomic< char16_t > atomic_char16_t
atomic_char16_t
atomic< int_fast64_t > atomic_int_fast64_t
atomic_int_fast64_t
atomic< ptrdiff_t > atomic_ptrdiff_t
atomic_ptrdiff_t
atomic< char32_t > atomic_char32_t
atomic_char32_t
atomic< int_least16_t > atomic_int_least16_t
atomic_int_least16_t
atomic< unsigned char > atomic_uchar
atomic_uchar
atomic< int_fast8_t > atomic_int_fast8_t
atomic_int_fast8_t
atomic< unsigned short > atomic_ushort
atomic_ushort
atomic< int_least64_t > atomic_int_least64_t
atomic_int_least64_t
atomic< bool > atomic_bool
atomic_bool
atomic< uint_fast32_t > atomic_uint_fast32_t
atomic_uint_fast32_t
atomic< uint32_t > atomic_uint32_t
atomic_uint32_t
ISO C++ entities toplevel namespace is std.
constexpr _Tp exchange(_Tp &__obj, _Up &&__new_val)
Assign __new_val to __obj and return its previous value.
Generic atomic type, primary class template.
Explicit specialization for char.
Explicit specialization for signed char.
Explicit specialization for unsigned char.
Explicit specialization for short.
Explicit specialization for unsigned short.
Explicit specialization for int.
Explicit specialization for unsigned int.
Explicit specialization for long.
Explicit specialization for unsigned long.
Explicit specialization for long long.
Explicit specialization for unsigned long long.
Explicit specialization for wchar_t.
Explicit specialization for char16_t.
Explicit specialization for char32_t.