32#ifndef _GLIBCXX_ATOMIC
33#define _GLIBCXX_ATOMIC 1
35#pragma GCC system_header
37#if __cplusplus < 201103L
43namespace std _GLIBCXX_VISIBILITY(default)
45_GLIBCXX_BEGIN_NAMESPACE_VERSION
52#if __cplusplus >= 201703L
53# define __cpp_lib_atomic_is_always_lock_free 201603L
56 template<
typename _Tp>
64 using value_type = bool;
67 __atomic_base<bool> _M_base;
70 atomic()
noexcept =
default;
76 constexpr atomic(
bool __i) noexcept : _M_base(__i) { }
79 operator=(
bool __i)
noexcept
80 {
return _M_base.operator=(__i); }
83 operator=(
bool __i)
volatile noexcept
84 {
return _M_base.operator=(__i); }
86 operator bool()
const noexcept
87 {
return _M_base.load(); }
89 operator bool()
const volatile noexcept
90 {
return _M_base.load(); }
93 is_lock_free()
const noexcept {
return _M_base.is_lock_free(); }
96 is_lock_free()
const volatile noexcept {
return _M_base.is_lock_free(); }
98#if __cplusplus >= 201703L
103 store(
bool __i,
memory_order __m = memory_order_seq_cst)
noexcept
104 { _M_base.store(__i, __m); }
107 store(
bool __i,
memory_order __m = memory_order_seq_cst)
volatile noexcept
108 { _M_base.store(__i, __m); }
111 load(
memory_order __m = memory_order_seq_cst)
const noexcept
112 {
return _M_base.load(__m); }
115 load(
memory_order __m = memory_order_seq_cst)
const volatile noexcept
116 {
return _M_base.load(__m); }
119 exchange(
bool __i,
memory_order __m = memory_order_seq_cst)
noexcept
120 {
return _M_base.exchange(__i, __m); }
124 memory_order __m = memory_order_seq_cst)
volatile noexcept
125 {
return _M_base.exchange(__i, __m); }
128 compare_exchange_weak(
bool& __i1,
bool __i2,
memory_order __m1,
130 {
return _M_base.compare_exchange_weak(__i1, __i2, __m1, __m2); }
133 compare_exchange_weak(
bool& __i1,
bool __i2,
memory_order __m1,
135 {
return _M_base.compare_exchange_weak(__i1, __i2, __m1, __m2); }
138 compare_exchange_weak(
bool& __i1,
bool __i2,
140 {
return _M_base.compare_exchange_weak(__i1, __i2, __m); }
143 compare_exchange_weak(
bool& __i1,
bool __i2,
144 memory_order __m = memory_order_seq_cst)
volatile noexcept
145 {
return _M_base.compare_exchange_weak(__i1, __i2, __m); }
148 compare_exchange_strong(
bool& __i1,
bool __i2,
memory_order __m1,
150 {
return _M_base.compare_exchange_strong(__i1, __i2, __m1, __m2); }
153 compare_exchange_strong(
bool& __i1,
bool __i2,
memory_order __m1,
155 {
return _M_base.compare_exchange_strong(__i1, __i2, __m1, __m2); }
158 compare_exchange_strong(
bool& __i1,
bool __i2,
160 {
return _M_base.compare_exchange_strong(__i1, __i2, __m); }
163 compare_exchange_strong(
bool& __i1,
bool __i2,
164 memory_order __m = memory_order_seq_cst)
volatile noexcept
165 {
return _M_base.compare_exchange_strong(__i1, __i2, __m); }
167#if __cpp_lib_atomic_wait
169 wait(
bool __old,
memory_order __m = memory_order_seq_cst)
const noexcept
170 { _M_base.wait(__old, __m); }
175 notify_one()
noexcept
176 { _M_base.notify_one(); }
179 notify_all()
noexcept
180 { _M_base.notify_all(); }
185#if __cpp_lib_atomic_value_initialization
186# define _GLIBCXX20_INIT(I) = I
188# define _GLIBCXX20_INIT(I)
197 template<
typename _Tp>
200 using value_type = _Tp;
204 static constexpr int _S_min_alignment
205 = (
sizeof(_Tp) & (
sizeof(_Tp) - 1)) ||
sizeof(_Tp) > 16
208 static constexpr int _S_alignment
209 = _S_min_alignment >
alignof(_Tp) ? _S_min_alignment :
alignof(_Tp);
211 alignas(_S_alignment) _Tp _M_i _GLIBCXX20_INIT(_Tp());
213 static_assert(__is_trivially_copyable(_Tp),
214 "std::atomic requires a trivially copyable type");
216 static_assert(
sizeof(_Tp) > 0,
217 "Incomplete or zero-sized types are not supported");
219#if __cplusplus > 201703L
220 static_assert(is_copy_constructible_v<_Tp>);
221 static_assert(is_move_constructible_v<_Tp>);
222 static_assert(is_copy_assignable_v<_Tp>);
223 static_assert(is_move_assignable_v<_Tp>);
233 constexpr atomic(_Tp __i) noexcept : _M_i(__i)
235#if __cplusplus >= 201402L && __has_builtin(__builtin_clear_padding)
236 if _GLIBCXX17_CONSTEXPR (__atomic_impl::__maybe_has_padding<_Tp>())
241 operator _Tp()
const noexcept
244 operator _Tp()
const volatile noexcept
248 operator=(_Tp __i)
noexcept
249 { store(__i);
return __i; }
252 operator=(_Tp __i)
volatile noexcept
253 { store(__i);
return __i; }
256 is_lock_free()
const noexcept
259 return __atomic_is_lock_free(
sizeof(_M_i),
260 reinterpret_cast<void *
>(-_S_alignment));
264 is_lock_free()
const volatile noexcept
267 return __atomic_is_lock_free(
sizeof(_M_i),
268 reinterpret_cast<void *
>(-_S_alignment));
271#if __cplusplus >= 201703L
272 static constexpr bool is_always_lock_free
273 = __atomic_always_lock_free(
sizeof(_M_i), 0);
277 store(_Tp __i,
memory_order __m = memory_order_seq_cst)
noexcept
280 __atomic_impl::__clear_padding(__i),
285 store(_Tp __i,
memory_order __m = memory_order_seq_cst)
volatile noexcept
288 __atomic_impl::__clear_padding(__i),
293 load(
memory_order __m = memory_order_seq_cst)
const noexcept
295 alignas(_Tp)
unsigned char __buf[
sizeof(_Tp)];
296 _Tp* __ptr =
reinterpret_cast<_Tp*
>(__buf);
302 load(
memory_order __m = memory_order_seq_cst)
const volatile noexcept
304 alignas(_Tp)
unsigned char __buf[
sizeof(_Tp)];
305 _Tp* __ptr =
reinterpret_cast<_Tp*
>(__buf);
311 exchange(_Tp __i,
memory_order __m = memory_order_seq_cst)
noexcept
313 alignas(_Tp)
unsigned char __buf[
sizeof(_Tp)];
314 _Tp* __ptr =
reinterpret_cast<_Tp*
>(__buf);
316 __atomic_impl::__clear_padding(__i),
323 memory_order __m = memory_order_seq_cst)
volatile noexcept
325 alignas(_Tp)
unsigned char __buf[
sizeof(_Tp)];
326 _Tp* __ptr =
reinterpret_cast<_Tp*
>(__buf);
328 __atomic_impl::__clear_padding(__i),
334 compare_exchange_weak(_Tp& __e, _Tp __i,
memory_order __s,
337 return __atomic_impl::__compare_exchange(_M_i, __e, __i,
true,
342 compare_exchange_weak(_Tp& __e, _Tp __i,
memory_order __s,
345 return __atomic_impl::__compare_exchange(_M_i, __e, __i,
true,
350 compare_exchange_weak(_Tp& __e, _Tp __i,
352 {
return compare_exchange_weak(__e, __i, __m,
353 __cmpexch_failure_order(__m)); }
356 compare_exchange_weak(_Tp& __e, _Tp __i,
357 memory_order __m = memory_order_seq_cst)
volatile noexcept
358 {
return compare_exchange_weak(__e, __i, __m,
359 __cmpexch_failure_order(__m)); }
362 compare_exchange_strong(_Tp& __e, _Tp __i,
memory_order __s,
365 return __atomic_impl::__compare_exchange(_M_i, __e, __i,
false,
370 compare_exchange_strong(_Tp& __e, _Tp __i,
memory_order __s,
373 return __atomic_impl::__compare_exchange(_M_i, __e, __i,
false,
378 compare_exchange_strong(_Tp& __e, _Tp __i,
380 {
return compare_exchange_strong(__e, __i, __m,
381 __cmpexch_failure_order(__m)); }
384 compare_exchange_strong(_Tp& __e, _Tp __i,
385 memory_order __m = memory_order_seq_cst)
volatile noexcept
386 {
return compare_exchange_strong(__e, __i, __m,
387 __cmpexch_failure_order(__m)); }
389#if __cpp_lib_atomic_wait
391 wait(_Tp __old,
memory_order __m = memory_order_seq_cst)
const noexcept
393 std::__atomic_wait_address_v(&_M_i, __old,
394 [__m,
this] {
return this->load(__m); });
400 notify_one()
noexcept
401 { std::__atomic_notify_address(&_M_i,
false); }
404 notify_all()
noexcept
405 { std::__atomic_notify_address(&_M_i,
true); }
409#undef _GLIBCXX20_INIT
412 template<
typename _Tp>
415 using value_type = _Tp*;
416 using difference_type = ptrdiff_t;
418 typedef _Tp* __pointer_type;
419 typedef __atomic_base<_Tp*> __base_type;
422 atomic()
noexcept =
default;
428 constexpr atomic(__pointer_type __p) noexcept : _M_b(__p) { }
430 operator __pointer_type()
const noexcept
431 {
return __pointer_type(_M_b); }
433 operator __pointer_type()
const volatile noexcept
434 {
return __pointer_type(_M_b); }
437 operator=(__pointer_type __p)
noexcept
438 {
return _M_b.operator=(__p); }
441 operator=(__pointer_type __p)
volatile noexcept
442 {
return _M_b.operator=(__p); }
445 operator++(
int)
noexcept
447#if __cplusplus >= 201703L
454 operator++(
int)
volatile noexcept
456#if __cplusplus >= 201703L
463 operator--(
int)
noexcept
465#if __cplusplus >= 201703L
472 operator--(
int)
volatile noexcept
474#if __cplusplus >= 201703L
481 operator++()
noexcept
483#if __cplusplus >= 201703L
490 operator++()
volatile noexcept
492#if __cplusplus >= 201703L
499 operator--()
noexcept
501#if __cplusplus >= 201703L
508 operator--()
volatile noexcept
510#if __cplusplus >= 201703L
517 operator+=(ptrdiff_t __d)
noexcept
519#if __cplusplus >= 201703L
522 return _M_b.operator+=(__d);
526 operator+=(ptrdiff_t __d)
volatile noexcept
528#if __cplusplus >= 201703L
531 return _M_b.operator+=(__d);
535 operator-=(ptrdiff_t __d)
noexcept
537#if __cplusplus >= 201703L
540 return _M_b.operator-=(__d);
544 operator-=(ptrdiff_t __d)
volatile noexcept
546#if __cplusplus >= 201703L
549 return _M_b.operator-=(__d);
553 is_lock_free()
const noexcept
554 {
return _M_b.is_lock_free(); }
557 is_lock_free()
const volatile noexcept
558 {
return _M_b.is_lock_free(); }
560#if __cplusplus >= 201703L
561 static constexpr bool is_always_lock_free
562 = ATOMIC_POINTER_LOCK_FREE == 2;
566 store(__pointer_type __p,
568 {
return _M_b.store(__p, __m); }
571 store(__pointer_type __p,
572 memory_order __m = memory_order_seq_cst)
volatile noexcept
573 {
return _M_b.store(__p, __m); }
576 load(
memory_order __m = memory_order_seq_cst)
const noexcept
577 {
return _M_b.load(__m); }
580 load(
memory_order __m = memory_order_seq_cst)
const volatile noexcept
581 {
return _M_b.load(__m); }
586 {
return _M_b.exchange(__p, __m); }
590 memory_order __m = memory_order_seq_cst)
volatile noexcept
591 {
return _M_b.exchange(__p, __m); }
594 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
596 {
return _M_b.compare_exchange_weak(__p1, __p2, __m1, __m2); }
599 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
602 {
return _M_b.compare_exchange_weak(__p1, __p2, __m1, __m2); }
605 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
608 return compare_exchange_weak(__p1, __p2, __m,
609 __cmpexch_failure_order(__m));
613 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
614 memory_order __m = memory_order_seq_cst)
volatile noexcept
616 return compare_exchange_weak(__p1, __p2, __m,
617 __cmpexch_failure_order(__m));
621 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
623 {
return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
626 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
629 {
return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
632 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
635 return _M_b.compare_exchange_strong(__p1, __p2, __m,
636 __cmpexch_failure_order(__m));
640 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
641 memory_order __m = memory_order_seq_cst)
volatile noexcept
643 return _M_b.compare_exchange_strong(__p1, __p2, __m,
644 __cmpexch_failure_order(__m));
647#if __cpp_lib_atomic_wait
649 wait(__pointer_type __old,
memory_order __m = memory_order_seq_cst)
const noexcept
650 { _M_b.wait(__old, __m); }
655 notify_one()
noexcept
656 { _M_b.notify_one(); }
659 notify_all()
noexcept
660 { _M_b.notify_all(); }
664 fetch_add(ptrdiff_t __d,
667#if __cplusplus >= 201703L
670 return _M_b.fetch_add(__d, __m);
674 fetch_add(ptrdiff_t __d,
675 memory_order __m = memory_order_seq_cst)
volatile noexcept
677#if __cplusplus >= 201703L
680 return _M_b.fetch_add(__d, __m);
684 fetch_sub(ptrdiff_t __d,
687#if __cplusplus >= 201703L
690 return _M_b.fetch_sub(__d, __m);
694 fetch_sub(ptrdiff_t __d,
695 memory_order __m = memory_order_seq_cst)
volatile noexcept
697#if __cplusplus >= 201703L
700 return _M_b.fetch_sub(__d, __m);
707 struct atomic<char> : __atomic_base<char>
709 typedef char __integral_type;
710 typedef __atomic_base<char> __base_type;
712 atomic()
noexcept =
default;
718 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
720 using __base_type::operator __integral_type;
721 using __base_type::operator=;
723#if __cplusplus >= 201703L
724 static constexpr bool is_always_lock_free = ATOMIC_CHAR_LOCK_FREE == 2;
730 struct atomic<signed char> : __atomic_base<signed char>
732 typedef signed char __integral_type;
733 typedef __atomic_base<signed char> __base_type;
735 atomic()
noexcept=
default;
741 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
743 using __base_type::operator __integral_type;
744 using __base_type::operator=;
746#if __cplusplus >= 201703L
747 static constexpr bool is_always_lock_free = ATOMIC_CHAR_LOCK_FREE == 2;
753 struct atomic<unsigned char> : __atomic_base<unsigned char>
755 typedef unsigned char __integral_type;
756 typedef __atomic_base<unsigned char> __base_type;
758 atomic()
noexcept=
default;
764 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
766 using __base_type::operator __integral_type;
767 using __base_type::operator=;
769#if __cplusplus >= 201703L
770 static constexpr bool is_always_lock_free = ATOMIC_CHAR_LOCK_FREE == 2;
776 struct atomic<short> : __atomic_base<short>
778 typedef short __integral_type;
779 typedef __atomic_base<short> __base_type;
781 atomic()
noexcept =
default;
787 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
789 using __base_type::operator __integral_type;
790 using __base_type::operator=;
792#if __cplusplus >= 201703L
793 static constexpr bool is_always_lock_free = ATOMIC_SHORT_LOCK_FREE == 2;
799 struct atomic<unsigned short> : __atomic_base<unsigned short>
801 typedef unsigned short __integral_type;
802 typedef __atomic_base<unsigned short> __base_type;
804 atomic()
noexcept =
default;
810 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
812 using __base_type::operator __integral_type;
813 using __base_type::operator=;
815#if __cplusplus >= 201703L
816 static constexpr bool is_always_lock_free = ATOMIC_SHORT_LOCK_FREE == 2;
824 typedef int __integral_type;
825 typedef __atomic_base<int> __base_type;
827 atomic()
noexcept =
default;
833 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
835 using __base_type::operator __integral_type;
836 using __base_type::operator=;
838#if __cplusplus >= 201703L
839 static constexpr bool is_always_lock_free = ATOMIC_INT_LOCK_FREE == 2;
845 struct atomic<unsigned int> : __atomic_base<unsigned int>
847 typedef unsigned int __integral_type;
848 typedef __atomic_base<unsigned int> __base_type;
850 atomic()
noexcept =
default;
856 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
858 using __base_type::operator __integral_type;
859 using __base_type::operator=;
861#if __cplusplus >= 201703L
862 static constexpr bool is_always_lock_free = ATOMIC_INT_LOCK_FREE == 2;
868 struct atomic<long> : __atomic_base<long>
870 typedef long __integral_type;
871 typedef __atomic_base<long> __base_type;
873 atomic()
noexcept =
default;
879 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
881 using __base_type::operator __integral_type;
882 using __base_type::operator=;
884#if __cplusplus >= 201703L
885 static constexpr bool is_always_lock_free = ATOMIC_LONG_LOCK_FREE == 2;
891 struct atomic<unsigned long> : __atomic_base<unsigned long>
893 typedef unsigned long __integral_type;
894 typedef __atomic_base<unsigned long> __base_type;
896 atomic()
noexcept =
default;
902 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
904 using __base_type::operator __integral_type;
905 using __base_type::operator=;
907#if __cplusplus >= 201703L
908 static constexpr bool is_always_lock_free = ATOMIC_LONG_LOCK_FREE == 2;
914 struct atomic<long long> : __atomic_base<long long>
916 typedef long long __integral_type;
917 typedef __atomic_base<long long> __base_type;
919 atomic()
noexcept =
default;
925 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
927 using __base_type::operator __integral_type;
928 using __base_type::operator=;
930#if __cplusplus >= 201703L
931 static constexpr bool is_always_lock_free = ATOMIC_LLONG_LOCK_FREE == 2;
937 struct atomic<unsigned long long> : __atomic_base<unsigned long long>
939 typedef unsigned long long __integral_type;
940 typedef __atomic_base<unsigned long long> __base_type;
942 atomic()
noexcept =
default;
948 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
950 using __base_type::operator __integral_type;
951 using __base_type::operator=;
953#if __cplusplus >= 201703L
954 static constexpr bool is_always_lock_free = ATOMIC_LLONG_LOCK_FREE == 2;
960 struct atomic<wchar_t> : __atomic_base<wchar_t>
962 typedef wchar_t __integral_type;
963 typedef __atomic_base<wchar_t> __base_type;
965 atomic()
noexcept =
default;
971 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
973 using __base_type::operator __integral_type;
974 using __base_type::operator=;
976#if __cplusplus >= 201703L
977 static constexpr bool is_always_lock_free = ATOMIC_WCHAR_T_LOCK_FREE == 2;
981#ifdef _GLIBCXX_USE_CHAR8_T
984 struct atomic<char8_t> : __atomic_base<char8_t>
986 typedef char8_t __integral_type;
987 typedef __atomic_base<char8_t> __base_type;
989 atomic() noexcept = default;
990 ~
atomic() noexcept = default;
995 constexpr
atomic(__integral_type __i) noexcept : __base_type(__i) { }
997 using __base_type::operator __integral_type;
998 using __base_type::operator=;
1000#if __cplusplus > 201402L
1001 static constexpr bool is_always_lock_free
1002 = ATOMIC_CHAR8_T_LOCK_FREE == 2;
1009 struct atomic<char16_t> : __atomic_base<char16_t>
1011 typedef char16_t __integral_type;
1012 typedef __atomic_base<char16_t> __base_type;
1014 atomic()
noexcept =
default;
1020 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
1022 using __base_type::operator __integral_type;
1023 using __base_type::operator=;
1025#if __cplusplus >= 201703L
1026 static constexpr bool is_always_lock_free
1027 = ATOMIC_CHAR16_T_LOCK_FREE == 2;
1033 struct atomic<char32_t> : __atomic_base<char32_t>
1035 typedef char32_t __integral_type;
1036 typedef __atomic_base<char32_t> __base_type;
1038 atomic()
noexcept =
default;
1044 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
1046 using __base_type::operator __integral_type;
1047 using __base_type::operator=;
1049#if __cplusplus >= 201703L
1050 static constexpr bool is_always_lock_free
1051 = ATOMIC_CHAR32_T_LOCK_FREE == 2;
1095#ifdef _GLIBCXX_USE_CHAR8_T
1106#ifdef _GLIBCXX_USE_C99_STDINT_TR1
1198#ifdef _GLIBCXX_USE_C99_STDINT_TR1
1208 atomic_flag_test_and_set_explicit(
atomic_flag* __a,
1210 {
return __a->test_and_set(__m); }
1213 atomic_flag_test_and_set_explicit(
volatile atomic_flag* __a,
1215 {
return __a->test_and_set(__m); }
1218 atomic_flag_clear_explicit(atomic_flag* __a,
memory_order __m)
noexcept
1219 { __a->clear(__m); }
1222 atomic_flag_clear_explicit(
volatile atomic_flag* __a,
1224 { __a->clear(__m); }
1227 atomic_flag_test_and_set(atomic_flag* __a)
noexcept
1228 {
return atomic_flag_test_and_set_explicit(__a, memory_order_seq_cst); }
1231 atomic_flag_test_and_set(
volatile atomic_flag* __a)
noexcept
1232 {
return atomic_flag_test_and_set_explicit(__a, memory_order_seq_cst); }
1235 atomic_flag_clear(atomic_flag* __a)
noexcept
1236 { atomic_flag_clear_explicit(__a, memory_order_seq_cst); }
1239 atomic_flag_clear(
volatile atomic_flag* __a)
noexcept
1240 { atomic_flag_clear_explicit(__a, memory_order_seq_cst); }
1245 template<
typename _Tp>
1246 using __atomic_val_t = __type_identity_t<_Tp>;
1247 template<
typename _Tp>
1248 using __atomic_diff_t =
typename atomic<_Tp>::difference_type;
1253 template<
typename _ITp>
1255 atomic_is_lock_free(
const atomic<_ITp>* __a)
noexcept
1256 {
return __a->is_lock_free(); }
1258 template<
typename _ITp>
1260 atomic_is_lock_free(
const volatile atomic<_ITp>* __a)
noexcept
1261 {
return __a->is_lock_free(); }
1263 template<
typename _ITp>
1265 atomic_init(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i)
noexcept
1266 { __a->store(__i, memory_order_relaxed); }
1268 template<
typename _ITp>
1270 atomic_init(
volatile atomic<_ITp>* __a, __atomic_val_t<_ITp> __i)
noexcept
1271 { __a->store(__i, memory_order_relaxed); }
1273 template<
typename _ITp>
1275 atomic_store_explicit(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i,
1277 { __a->store(__i, __m); }
1279 template<
typename _ITp>
1281 atomic_store_explicit(
volatile atomic<_ITp>* __a, __atomic_val_t<_ITp> __i,
1283 { __a->store(__i, __m); }
1285 template<
typename _ITp>
1287 atomic_load_explicit(
const atomic<_ITp>* __a,
memory_order __m)
noexcept
1288 {
return __a->load(__m); }
1290 template<
typename _ITp>
1292 atomic_load_explicit(
const volatile atomic<_ITp>* __a,
1294 {
return __a->load(__m); }
1296 template<
typename _ITp>
1298 atomic_exchange_explicit(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i,
1300 {
return __a->exchange(__i, __m); }
1302 template<
typename _ITp>
1304 atomic_exchange_explicit(
volatile atomic<_ITp>* __a,
1305 __atomic_val_t<_ITp> __i,
1307 {
return __a->exchange(__i, __m); }
1309 template<
typename _ITp>
1311 atomic_compare_exchange_weak_explicit(atomic<_ITp>* __a,
1312 __atomic_val_t<_ITp>* __i1,
1313 __atomic_val_t<_ITp> __i2,
1316 {
return __a->compare_exchange_weak(*__i1, __i2, __m1, __m2); }
1318 template<
typename _ITp>
1320 atomic_compare_exchange_weak_explicit(
volatile atomic<_ITp>* __a,
1321 __atomic_val_t<_ITp>* __i1,
1322 __atomic_val_t<_ITp> __i2,
1325 {
return __a->compare_exchange_weak(*__i1, __i2, __m1, __m2); }
1327 template<
typename _ITp>
1329 atomic_compare_exchange_strong_explicit(atomic<_ITp>* __a,
1330 __atomic_val_t<_ITp>* __i1,
1331 __atomic_val_t<_ITp> __i2,
1334 {
return __a->compare_exchange_strong(*__i1, __i2, __m1, __m2); }
1336 template<
typename _ITp>
1338 atomic_compare_exchange_strong_explicit(
volatile atomic<_ITp>* __a,
1339 __atomic_val_t<_ITp>* __i1,
1340 __atomic_val_t<_ITp> __i2,
1343 {
return __a->compare_exchange_strong(*__i1, __i2, __m1, __m2); }
1346 template<
typename _ITp>
1348 atomic_store(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i)
noexcept
1349 { atomic_store_explicit(__a, __i, memory_order_seq_cst); }
1351 template<
typename _ITp>
1353 atomic_store(
volatile atomic<_ITp>* __a, __atomic_val_t<_ITp> __i)
noexcept
1354 { atomic_store_explicit(__a, __i, memory_order_seq_cst); }
1356 template<
typename _ITp>
1358 atomic_load(
const atomic<_ITp>* __a)
noexcept
1359 {
return atomic_load_explicit(__a, memory_order_seq_cst); }
1361 template<
typename _ITp>
1363 atomic_load(
const volatile atomic<_ITp>* __a)
noexcept
1364 {
return atomic_load_explicit(__a, memory_order_seq_cst); }
1366 template<
typename _ITp>
1368 atomic_exchange(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i)
noexcept
1369 {
return atomic_exchange_explicit(__a, __i, memory_order_seq_cst); }
1371 template<
typename _ITp>
1373 atomic_exchange(
volatile atomic<_ITp>* __a,
1374 __atomic_val_t<_ITp> __i)
noexcept
1375 {
return atomic_exchange_explicit(__a, __i, memory_order_seq_cst); }
1377 template<
typename _ITp>
1379 atomic_compare_exchange_weak(atomic<_ITp>* __a,
1380 __atomic_val_t<_ITp>* __i1,
1381 __atomic_val_t<_ITp> __i2)
noexcept
1383 return atomic_compare_exchange_weak_explicit(__a, __i1, __i2,
1384 memory_order_seq_cst,
1385 memory_order_seq_cst);
1388 template<
typename _ITp>
1390 atomic_compare_exchange_weak(
volatile atomic<_ITp>* __a,
1391 __atomic_val_t<_ITp>* __i1,
1392 __atomic_val_t<_ITp> __i2)
noexcept
1394 return atomic_compare_exchange_weak_explicit(__a, __i1, __i2,
1395 memory_order_seq_cst,
1396 memory_order_seq_cst);
1399 template<
typename _ITp>
1401 atomic_compare_exchange_strong(atomic<_ITp>* __a,
1402 __atomic_val_t<_ITp>* __i1,
1403 __atomic_val_t<_ITp> __i2)
noexcept
1405 return atomic_compare_exchange_strong_explicit(__a, __i1, __i2,
1406 memory_order_seq_cst,
1407 memory_order_seq_cst);
1410 template<
typename _ITp>
1412 atomic_compare_exchange_strong(
volatile atomic<_ITp>* __a,
1413 __atomic_val_t<_ITp>* __i1,
1414 __atomic_val_t<_ITp> __i2)
noexcept
1416 return atomic_compare_exchange_strong_explicit(__a, __i1, __i2,
1417 memory_order_seq_cst,
1418 memory_order_seq_cst);
1422#if __cpp_lib_atomic_wait
1423 template<
typename _Tp>
1425 atomic_wait(
const atomic<_Tp>* __a,
1426 typename std::atomic<_Tp>::value_type __old)
noexcept
1427 { __a->wait(__old); }
1429 template<
typename _Tp>
1431 atomic_wait_explicit(
const atomic<_Tp>* __a,
1432 typename std::atomic<_Tp>::value_type __old,
1434 { __a->wait(__old, __m); }
1436 template<
typename _Tp>
1438 atomic_notify_one(atomic<_Tp>* __a)
noexcept
1439 { __a->notify_one(); }
1441 template<
typename _Tp>
1443 atomic_notify_all(atomic<_Tp>* __a)
noexcept
1444 { __a->notify_all(); }
1451 template<
typename _ITp>
1453 atomic_fetch_add_explicit(atomic<_ITp>* __a,
1454 __atomic_diff_t<_ITp> __i,
1456 {
return __a->fetch_add(__i, __m); }
1458 template<
typename _ITp>
1460 atomic_fetch_add_explicit(
volatile atomic<_ITp>* __a,
1461 __atomic_diff_t<_ITp> __i,
1463 {
return __a->fetch_add(__i, __m); }
1465 template<
typename _ITp>
1467 atomic_fetch_sub_explicit(atomic<_ITp>* __a,
1468 __atomic_diff_t<_ITp> __i,
1470 {
return __a->fetch_sub(__i, __m); }
1472 template<
typename _ITp>
1474 atomic_fetch_sub_explicit(
volatile atomic<_ITp>* __a,
1475 __atomic_diff_t<_ITp> __i,
1477 {
return __a->fetch_sub(__i, __m); }
1479 template<
typename _ITp>
1481 atomic_fetch_and_explicit(__atomic_base<_ITp>* __a,
1482 __atomic_val_t<_ITp> __i,
1484 {
return __a->fetch_and(__i, __m); }
1486 template<
typename _ITp>
1488 atomic_fetch_and_explicit(
volatile __atomic_base<_ITp>* __a,
1489 __atomic_val_t<_ITp> __i,
1491 {
return __a->fetch_and(__i, __m); }
1493 template<
typename _ITp>
1495 atomic_fetch_or_explicit(__atomic_base<_ITp>* __a,
1496 __atomic_val_t<_ITp> __i,
1498 {
return __a->fetch_or(__i, __m); }
1500 template<
typename _ITp>
1502 atomic_fetch_or_explicit(
volatile __atomic_base<_ITp>* __a,
1503 __atomic_val_t<_ITp> __i,
1505 {
return __a->fetch_or(__i, __m); }
1507 template<
typename _ITp>
1509 atomic_fetch_xor_explicit(__atomic_base<_ITp>* __a,
1510 __atomic_val_t<_ITp> __i,
1512 {
return __a->fetch_xor(__i, __m); }
1514 template<
typename _ITp>
1516 atomic_fetch_xor_explicit(
volatile __atomic_base<_ITp>* __a,
1517 __atomic_val_t<_ITp> __i,
1519 {
return __a->fetch_xor(__i, __m); }
1521 template<
typename _ITp>
1523 atomic_fetch_add(atomic<_ITp>* __a,
1524 __atomic_diff_t<_ITp> __i)
noexcept
1525 {
return atomic_fetch_add_explicit(__a, __i, memory_order_seq_cst); }
1527 template<
typename _ITp>
1529 atomic_fetch_add(
volatile atomic<_ITp>* __a,
1530 __atomic_diff_t<_ITp> __i)
noexcept
1531 {
return atomic_fetch_add_explicit(__a, __i, memory_order_seq_cst); }
1533 template<
typename _ITp>
1535 atomic_fetch_sub(atomic<_ITp>* __a,
1536 __atomic_diff_t<_ITp> __i)
noexcept
1537 {
return atomic_fetch_sub_explicit(__a, __i, memory_order_seq_cst); }
1539 template<
typename _ITp>
1541 atomic_fetch_sub(
volatile atomic<_ITp>* __a,
1542 __atomic_diff_t<_ITp> __i)
noexcept
1543 {
return atomic_fetch_sub_explicit(__a, __i, memory_order_seq_cst); }
1545 template<
typename _ITp>
1547 atomic_fetch_and(__atomic_base<_ITp>* __a,
1548 __atomic_val_t<_ITp> __i)
noexcept
1549 {
return atomic_fetch_and_explicit(__a, __i, memory_order_seq_cst); }
1551 template<
typename _ITp>
1553 atomic_fetch_and(
volatile __atomic_base<_ITp>* __a,
1554 __atomic_val_t<_ITp> __i)
noexcept
1555 {
return atomic_fetch_and_explicit(__a, __i, memory_order_seq_cst); }
1557 template<
typename _ITp>
1559 atomic_fetch_or(__atomic_base<_ITp>* __a,
1560 __atomic_val_t<_ITp> __i)
noexcept
1561 {
return atomic_fetch_or_explicit(__a, __i, memory_order_seq_cst); }
1563 template<
typename _ITp>
1565 atomic_fetch_or(
volatile __atomic_base<_ITp>* __a,
1566 __atomic_val_t<_ITp> __i)
noexcept
1567 {
return atomic_fetch_or_explicit(__a, __i, memory_order_seq_cst); }
1569 template<
typename _ITp>
1571 atomic_fetch_xor(__atomic_base<_ITp>* __a,
1572 __atomic_val_t<_ITp> __i)
noexcept
1573 {
return atomic_fetch_xor_explicit(__a, __i, memory_order_seq_cst); }
1575 template<
typename _ITp>
1577 atomic_fetch_xor(
volatile __atomic_base<_ITp>* __a,
1578 __atomic_val_t<_ITp> __i)
noexcept
1579 {
return atomic_fetch_xor_explicit(__a, __i, memory_order_seq_cst); }
1581#if __cplusplus > 201703L
1582#define __cpp_lib_atomic_float 201711L
1584 struct atomic<float> : __atomic_float<float>
1586 atomic() noexcept = default;
1589 atomic(
float __fp) noexcept : __atomic_float<
float>(__fp)
1592 atomic& operator=(
const atomic&)
volatile =
delete;
1593 atomic& operator=(
const atomic&) =
delete;
1595 using __atomic_float<
float>::operator=;
1599 struct atomic<double> : __atomic_float<double>
1601 atomic() noexcept = default;
1604 atomic(
double __fp) noexcept : __atomic_float<
double>(__fp)
1607 atomic& operator=(
const atomic&)
volatile =
delete;
1608 atomic& operator=(
const atomic&) =
delete;
1610 using __atomic_float<
double>::operator=;
1614 struct atomic<long double> : __atomic_float<long double>
1616 atomic() noexcept = default;
1619 atomic(
long double __fp) noexcept : __atomic_float<
long double>(__fp)
1622 atomic& operator=(
const atomic&)
volatile =
delete;
1623 atomic& operator=(
const atomic&) =
delete;
1625 using __atomic_float<
long double>::operator=;
1628#define __cpp_lib_atomic_ref 201806L
1631 template<
typename _Tp>
1635 atomic_ref(_Tp& __t) noexcept : __atomic_ref<_Tp>(__t)
1642 using __atomic_ref<_Tp>::operator=;
1649_GLIBCXX_END_NAMESPACE_VERSION
constexpr _Tp * __addressof(_Tp &__r) noexcept
Same as C++11 std::addressof.
atomic< unsigned long > atomic_ulong
atomic_ulong
atomic< intmax_t > atomic_intmax_t
atomic_intmax_t
atomic< uintptr_t > atomic_uintptr_t
atomic_uintptr_t
atomic< signed char > atomic_schar
atomic_schar
atomic< int_least8_t > atomic_int_least8_t
atomic_int_least8_t
atomic< unsigned long long > atomic_ullong
atomic_ullong
atomic< uint_fast8_t > atomic_uint_fast8_t
atomic_uint_fast8_t
atomic< intptr_t > atomic_intptr_t
atomic_intptr_t
atomic< int16_t > atomic_int16_t
atomic_int16_t
atomic< size_t > atomic_size_t
atomic_size_t
atomic< long > atomic_long
atomic_long
atomic< uint_least8_t > atomic_uint_least8_t
atomic_uint_least8_t
atomic< short > atomic_short
atomic_short
atomic< uint_least16_t > atomic_uint_least16_t
atomic_uint_least16_t
atomic< uint16_t > atomic_uint16_t
atomic_uint16_t
atomic< uint64_t > atomic_uint64_t
atomic_uint64_t
atomic< int_least32_t > atomic_int_least32_t
atomic_int_least32_t
atomic< uint8_t > atomic_uint8_t
atomic_uint8_t
#define ATOMIC_BOOL_LOCK_FREE
atomic< wchar_t > atomic_wchar_t
atomic_wchar_t
atomic< unsigned int > atomic_uint
atomic_uint
atomic< uint_least32_t > atomic_uint_least32_t
atomic_uint_least32_t
atomic< uint_fast64_t > atomic_uint_fast64_t
atomic_uint_fast64_t
atomic< int_fast32_t > atomic_int_fast32_t
atomic_int_fast32_t
atomic< char > atomic_char
atomic_char
atomic< int > atomic_int
atomic_int
atomic< uint_least64_t > atomic_uint_least64_t
atomic_uint_least64_t
atomic< int64_t > atomic_int64_t
atomic_int64_t
atomic< uintmax_t > atomic_uintmax_t
atomic_uintmax_t
atomic< int_fast16_t > atomic_int_fast16_t
atomic_int_fast16_t
atomic< int32_t > atomic_int32_t
atomic_int32_t
atomic< uint_fast16_t > atomic_uint_fast16_t
atomic_uint_fast16_t
atomic< int8_t > atomic_int8_t
atomic_int8_t
atomic< long long > atomic_llong
atomic_llong
atomic< char16_t > atomic_char16_t
atomic_char16_t
atomic< int_fast64_t > atomic_int_fast64_t
atomic_int_fast64_t
atomic< ptrdiff_t > atomic_ptrdiff_t
atomic_ptrdiff_t
atomic< char32_t > atomic_char32_t
atomic_char32_t
atomic< int_least16_t > atomic_int_least16_t
atomic_int_least16_t
atomic< unsigned char > atomic_uchar
atomic_uchar
atomic< int_fast8_t > atomic_int_fast8_t
atomic_int_fast8_t
memory_order
Enumeration for memory_order.
atomic< unsigned short > atomic_ushort
atomic_ushort
atomic< int_least64_t > atomic_int_least64_t
atomic_int_least64_t
atomic< bool > atomic_bool
atomic_bool
atomic< uint_fast32_t > atomic_uint_fast32_t
atomic_uint_fast32_t
atomic< uint32_t > atomic_uint32_t
atomic_uint32_t
ISO C++ entities toplevel namespace is std.
constexpr _Tp exchange(_Tp &__obj, _Up &&__new_val) noexcept(__and_< is_nothrow_move_constructible< _Tp >, is_nothrow_assignable< _Tp &, _Up > >::value)
Assign __new_val to __obj and return its previous value.
Generic atomic type, primary class template.
Explicit specialization for char.
Explicit specialization for signed char.
Explicit specialization for unsigned char.
Explicit specialization for short.
Explicit specialization for unsigned short.
Explicit specialization for int.
Explicit specialization for unsigned int.
Explicit specialization for long.
Explicit specialization for unsigned long.
Explicit specialization for long long.
Explicit specialization for unsigned long long.
Explicit specialization for wchar_t.
Explicit specialization for char16_t.
Explicit specialization for char32_t.
Class template to provide atomic operations on a non-atomic variable.