30#ifndef _SHARED_PTR_ATOMIC_H
31#define _SHARED_PTR_ATOMIC_H 1
34#include <bits/shared_ptr.h>
37#if defined _GLIBCXX_TSAN && __has_include(<sanitizer/tsan_interface.h>)
38#include <sanitizer/tsan_interface.h>
39#define _GLIBCXX_TSAN_MUTEX_DESTROY(X) \
40 __tsan_mutex_destroy(X, __tsan_mutex_not_static)
41#define _GLIBCXX_TSAN_MUTEX_TRY_LOCK(X) \
42 __tsan_mutex_pre_lock(X, __tsan_mutex_not_static|__tsan_mutex_try_lock)
43#define _GLIBCXX_TSAN_MUTEX_TRY_LOCK_FAILED(X) __tsan_mutex_post_lock(X, \
44 __tsan_mutex_not_static|__tsan_mutex_try_lock_failed, 0)
45#define _GLIBCXX_TSAN_MUTEX_LOCKED(X) \
46 __tsan_mutex_post_lock(X, __tsan_mutex_not_static, 0)
47#define _GLIBCXX_TSAN_MUTEX_PRE_UNLOCK(X) __tsan_mutex_pre_unlock(X, 0)
48#define _GLIBCXX_TSAN_MUTEX_POST_UNLOCK(X) __tsan_mutex_post_unlock(X, 0)
49#define _GLIBCXX_TSAN_MUTEX_PRE_SIGNAL(X) __tsan_mutex_pre_signal(X, 0)
50#define _GLIBCXX_TSAN_MUTEX_POST_SIGNAL(X) __tsan_mutex_post_signal(X, 0)
52#define _GLIBCXX_TSAN_MUTEX_DESTROY(X)
53#define _GLIBCXX_TSAN_MUTEX_TRY_LOCK(X)
54#define _GLIBCXX_TSAN_MUTEX_TRY_LOCK_FAILED(X)
55#define _GLIBCXX_TSAN_MUTEX_LOCKED(X)
56#define _GLIBCXX_TSAN_MUTEX_PRE_UNLOCK(X)
57#define _GLIBCXX_TSAN_MUTEX_POST_UNLOCK(X)
58#define _GLIBCXX_TSAN_MUTEX_PRE_SIGNAL(X)
59#define _GLIBCXX_TSAN_MUTEX_POST_SIGNAL(X)
62namespace std _GLIBCXX_VISIBILITY(default)
64_GLIBCXX_BEGIN_NAMESPACE_VERSION
76 _Sp_locker(
const _Sp_locker&) =
delete;
77 _Sp_locker& operator=(
const _Sp_locker&) =
delete;
81 _Sp_locker(
const void*)
noexcept;
82 _Sp_locker(
const void*,
const void*)
noexcept;
86 unsigned char _M_key1;
87 unsigned char _M_key2;
89 explicit _Sp_locker(
const void*,
const void* =
nullptr) { }
101 template<
typename _Tp, _Lock_policy _Lp>
102 _GLIBCXX20_DEPRECATED_SUGGEST(
"std::atomic<std::shared_ptr<T>>")
104 atomic_is_lock_free(const __shared_ptr<_Tp, _Lp>*)
107 return __gthread_active_p() == 0;
113 template<
typename _Tp>
114 _GLIBCXX20_DEPRECATED_SUGGEST(
"std::atomic<std::shared_ptr<T>>")
117 {
return std::atomic_is_lock_free<_Tp, __default_lock_policy>(__p); }
130 template<
typename _Tp>
131 _GLIBCXX20_DEPRECATED_SUGGEST(
"std::atomic<std::shared_ptr<T>>")
132 inline shared_ptr<_Tp>
135 _Sp_locker __lock{__p};
139 template<
typename _Tp>
140 _GLIBCXX20_DEPRECATED_SUGGEST(
"std::atomic<std::shared_ptr<T>>")
141 inline shared_ptr<_Tp>
143 {
return std::atomic_load_explicit(__p, memory_order_seq_cst); }
145 template<
typename _Tp, _Lock_policy _Lp>
146 _GLIBCXX20_DEPRECATED_SUGGEST(
"std::atomic<std::shared_ptr<T>>")
147 inline __shared_ptr<_Tp, _Lp>
150 _Sp_locker __lock{__p};
154 template<
typename _Tp, _Lock_policy _Lp>
155 _GLIBCXX20_DEPRECATED_SUGGEST(
"std::atomic<std::shared_ptr<T>>")
156 inline __shared_ptr<_Tp, _Lp>
157 atomic_load(const __shared_ptr<_Tp, _Lp>* __p)
158 {
return std::atomic_load_explicit(__p, memory_order_seq_cst); }
170 template<
typename _Tp>
171 _GLIBCXX20_DEPRECATED_SUGGEST(
"std::atomic<std::shared_ptr<T>>")
176 _Sp_locker __lock{__p};
180 template<
typename _Tp>
181 _GLIBCXX20_DEPRECATED_SUGGEST(
"std::atomic<std::shared_ptr<T>>")
184 { std::atomic_store_explicit(__p,
std::move(__r), memory_order_seq_cst); }
186 template<
typename _Tp, _Lock_policy _Lp>
187 _GLIBCXX20_DEPRECATED_SUGGEST(
"std::atomic<std::shared_ptr<T>>")
189 atomic_store_explicit(__shared_ptr<_Tp, _Lp>* __p,
190 __shared_ptr<_Tp, _Lp> __r,
193 _Sp_locker __lock{__p};
197 template<
typename _Tp, _Lock_policy _Lp>
198 _GLIBCXX20_DEPRECATED_SUGGEST(
"std::atomic<std::shared_ptr<T>>")
200 atomic_store(__shared_ptr<_Tp, _Lp>* __p, __shared_ptr<_Tp, _Lp> __r)
201 { std::atomic_store_explicit(__p,
std::move(__r), memory_order_seq_cst); }
211 template<
typename _Tp>
212 _GLIBCXX20_DEPRECATED_SUGGEST(
"std::atomic<std::shared_ptr<T>>")
213 inline shared_ptr<_Tp>
217 _Sp_locker __lock{__p};
222 template<
typename _Tp>
223 _GLIBCXX20_DEPRECATED_SUGGEST(
"std::atomic<std::shared_ptr<T>>")
224 inline shared_ptr<_Tp>
227 return std::atomic_exchange_explicit(__p,
std::move(__r),
228 memory_order_seq_cst);
231 template<
typename _Tp, _Lock_policy _Lp>
232 _GLIBCXX20_DEPRECATED_SUGGEST(
"std::atomic<std::shared_ptr<T>>")
233 inline __shared_ptr<_Tp, _Lp>
234 atomic_exchange_explicit(__shared_ptr<_Tp, _Lp>* __p,
235 __shared_ptr<_Tp, _Lp> __r,
238 _Sp_locker __lock{__p};
243 template<
typename _Tp, _Lock_policy _Lp>
244 _GLIBCXX20_DEPRECATED_SUGGEST(
"std::atomic<std::shared_ptr<T>>")
245 inline __shared_ptr<_Tp, _Lp>
246 atomic_exchange(__shared_ptr<_Tp, _Lp>* __p, __shared_ptr<_Tp, _Lp> __r)
248 return std::atomic_exchange_explicit(__p,
std::move(__r),
249 memory_order_seq_cst);
264 template<
typename _Tp>
265 _GLIBCXX20_DEPRECATED_SUGGEST(
"std::atomic<std::shared_ptr<T>>")
267 atomic_compare_exchange_strong_explicit(
shared_ptr<_Tp>* __p,
274 _Sp_locker __lock{__p, __v};
276 if (*__p == *__v && !__less(*__p, *__v) && !__less(*__v, *__p))
287 template<
typename _Tp>
288 _GLIBCXX20_DEPRECATED_SUGGEST(
"std::atomic<std::shared_ptr<T>>")
293 return std::atomic_compare_exchange_strong_explicit(__p, __v,
294 std::move(__w), memory_order_seq_cst, memory_order_seq_cst);
297 template<
typename _Tp>
298 _GLIBCXX20_DEPRECATED_SUGGEST(
"std::atomic<std::shared_ptr<T>>")
306 return std::atomic_compare_exchange_strong_explicit(__p, __v,
310 template<
typename _Tp>
311 _GLIBCXX20_DEPRECATED_SUGGEST(
"std::atomic<std::shared_ptr<T>>")
316 return std::atomic_compare_exchange_weak_explicit(__p, __v,
317 std::move(__w), memory_order_seq_cst, memory_order_seq_cst);
320 template<
typename _Tp, _Lock_policy _Lp>
321 _GLIBCXX20_DEPRECATED_SUGGEST(
"std::atomic<std::shared_ptr<T>>")
323 atomic_compare_exchange_strong_explicit(__shared_ptr<_Tp, _Lp>* __p,
324 __shared_ptr<_Tp, _Lp>* __v,
325 __shared_ptr<_Tp, _Lp> __w,
329 __shared_ptr<_Tp, _Lp> __x;
330 _Sp_locker __lock{__p, __v};
332 if (*__p == *__v && !__less(*__p, *__v) && !__less(*__v, *__p))
343 template<
typename _Tp, _Lock_policy _Lp>
344 _GLIBCXX20_DEPRECATED_SUGGEST(
"std::atomic<std::shared_ptr<T>>")
346 atomic_compare_exchange_strong(__shared_ptr<_Tp, _Lp>* __p,
347 __shared_ptr<_Tp, _Lp>* __v,
348 __shared_ptr<_Tp, _Lp> __w)
350 return std::atomic_compare_exchange_strong_explicit(__p, __v,
351 std::move(__w), memory_order_seq_cst, memory_order_seq_cst);
354 template<
typename _Tp, _Lock_policy _Lp>
355 _GLIBCXX20_DEPRECATED_SUGGEST(
"std::atomic<std::shared_ptr<T>>")
357 atomic_compare_exchange_weak_explicit(__shared_ptr<_Tp, _Lp>* __p,
358 __shared_ptr<_Tp, _Lp>* __v,
359 __shared_ptr<_Tp, _Lp> __w,
363 return std::atomic_compare_exchange_strong_explicit(__p, __v,
367 template<
typename _Tp, _Lock_policy _Lp>
368 _GLIBCXX20_DEPRECATED_SUGGEST(
"std::atomic<std::shared_ptr<T>>")
370 atomic_compare_exchange_weak(__shared_ptr<_Tp, _Lp>* __p,
371 __shared_ptr<_Tp, _Lp>* __v,
372 __shared_ptr<_Tp, _Lp> __w)
374 return std::atomic_compare_exchange_weak_explicit(__p, __v,
375 std::move(__w), memory_order_seq_cst, memory_order_seq_cst);
381#ifdef __glibcxx_atomic_shared_ptr
382 template<
typename _Tp>
391 template<
typename _Tp>
394 using value_type = _Tp;
396 friend struct atomic<_Tp>;
403 using __count_type =
decltype(_Tp::_M_refcount);
404 using uintptr_t = __UINTPTR_TYPE__;
407 using pointer =
decltype(__count_type::_M_pi);
410 static_assert(
alignof(remove_pointer_t<pointer>) > 1);
412 constexpr _Atomic_count() noexcept = default;
415 _Atomic_count(__count_type&& __c) noexcept
416 : _M_val(reinterpret_cast<uintptr_t>(__c._M_pi))
423 auto __val = _M_val.load(memory_order_relaxed);
424 _GLIBCXX_TSAN_MUTEX_DESTROY(&_M_val);
425 __glibcxx_assert(!(__val & _S_lock_bit));
426 if (
auto __pi =
reinterpret_cast<pointer
>(__val))
428 if constexpr (__is_shared_ptr<_Tp>)
431 __pi->_M_weak_release();
435 _Atomic_count(
const _Atomic_count&) =
delete;
436 _Atomic_count& operator=(
const _Atomic_count&) =
delete;
445 auto __current = _M_val.load(memory_order_relaxed);
446 while (__current & _S_lock_bit)
448#if __glibcxx_atomic_wait
449 __detail::__thread_relax();
451 __current = _M_val.load(memory_order_relaxed);
454 _GLIBCXX_TSAN_MUTEX_TRY_LOCK(&_M_val);
456 while (!_M_val.compare_exchange_strong(__current,
457 __current | _S_lock_bit,
459 memory_order_relaxed))
461 _GLIBCXX_TSAN_MUTEX_TRY_LOCK_FAILED(&_M_val);
462#if __glibcxx_atomic_wait
463 __detail::__thread_relax();
465 __current = __current & ~_S_lock_bit;
466 _GLIBCXX_TSAN_MUTEX_TRY_LOCK(&_M_val);
468 _GLIBCXX_TSAN_MUTEX_LOCKED(&_M_val);
469 return reinterpret_cast<pointer
>(__current);
476 _GLIBCXX_TSAN_MUTEX_PRE_UNLOCK(&_M_val);
477 _M_val.fetch_sub(1, __o);
478 _GLIBCXX_TSAN_MUTEX_POST_UNLOCK(&_M_val);
484 _M_swap_unlock(__count_type& __c,
memory_order __o)
noexcept
486 if (__o != memory_order_seq_cst)
487 __o = memory_order_release;
488 auto __x =
reinterpret_cast<uintptr_t
>(__c._M_pi);
489 _GLIBCXX_TSAN_MUTEX_PRE_UNLOCK(&_M_val);
490 __x = _M_val.exchange(__x, __o);
491 _GLIBCXX_TSAN_MUTEX_POST_UNLOCK(&_M_val);
492 __c._M_pi =
reinterpret_cast<pointer
>(__x & ~_S_lock_bit);
495#if __glibcxx_atomic_wait
500 _GLIBCXX_TSAN_MUTEX_PRE_UNLOCK(&_M_val);
501 auto __v = _M_val.fetch_sub(1, memory_order_relaxed);
502 _GLIBCXX_TSAN_MUTEX_POST_UNLOCK(&_M_val);
503 _M_val.wait(__v & ~_S_lock_bit, __o);
507 notify_one() noexcept
509 _GLIBCXX_TSAN_MUTEX_PRE_SIGNAL(&_M_val);
511 _GLIBCXX_TSAN_MUTEX_POST_SIGNAL(&_M_val);
515 notify_all() noexcept
517 _GLIBCXX_TSAN_MUTEX_PRE_SIGNAL(&_M_val);
519 _GLIBCXX_TSAN_MUTEX_POST_SIGNAL(&_M_val);
524 mutable __atomic_base<uintptr_t> _M_val{0};
525 static constexpr uintptr_t _S_lock_bit{1};
528 typename _Tp::element_type* _M_ptr =
nullptr;
529 _Atomic_count _M_refcount;
531 static typename _Atomic_count::pointer
532 _S_add_ref(
typename _Atomic_count::pointer __p)
536 if constexpr (__is_shared_ptr<_Tp>)
537 __p->_M_add_ref_copy();
539 __p->_M_weak_add_ref();
544 constexpr _Sp_atomic() noexcept = default;
547 _Sp_atomic(value_type __r) noexcept
548 : _M_ptr(__r._M_ptr), _M_refcount(
std::
move(__r._M_refcount))
551 ~_Sp_atomic() =
default;
553 _Sp_atomic(
const _Sp_atomic&) =
delete;
554 void operator=(
const _Sp_atomic&) =
delete;
559 __glibcxx_assert(__o != memory_order_release
560 && __o != memory_order_acq_rel);
563 if (__o != memory_order_seq_cst)
564 __o = memory_order_acquire;
567 auto __pi = _M_refcount.lock(__o);
568 __ret._M_ptr = _M_ptr;
569 __ret._M_refcount._M_pi = _S_add_ref(__pi);
570 _M_refcount.unlock(memory_order_relaxed);
577 _M_refcount.lock(memory_order_acquire);
578 std::swap(_M_ptr, __r._M_ptr);
579 _M_refcount._M_swap_unlock(__r._M_refcount, __o);
583 compare_exchange_strong(value_type& __expected, value_type __desired,
586 bool __result =
true;
587 auto __pi = _M_refcount.lock(memory_order_acquire);
588 if (_M_ptr == __expected._M_ptr
589 && __pi == __expected._M_refcount._M_pi)
591 _M_ptr = __desired._M_ptr;
592 _M_refcount._M_swap_unlock(__desired._M_refcount, __o);
597 __expected._M_ptr = _M_ptr;
598 __expected._M_refcount._M_pi = _S_add_ref(__pi);
599 _M_refcount.unlock(__o2);
605#if __glibcxx_atomic_wait
609 auto __pi = _M_refcount.lock(memory_order_acquire);
610 if (_M_ptr == __old._M_ptr && __pi == __old._M_refcount._M_pi)
611 _M_refcount._M_wait_unlock(__o);
613 _M_refcount.unlock(memory_order_relaxed);
617 notify_one() noexcept
619 _M_refcount.notify_one();
623 notify_all() noexcept
625 _M_refcount.notify_all();
630 template<
typename _Tp>
631 struct atomic<shared_ptr<_Tp>>
634 using value_type = shared_ptr<_Tp>;
636 static constexpr bool is_always_lock_free =
false;
639 is_lock_free() const noexcept
642 constexpr atomic() noexcept = default;
646 constexpr atomic(nullptr_t) noexcept : atomic() { }
648 atomic(shared_ptr<_Tp> __r) noexcept
652 atomic(
const atomic&) =
delete;
653 void operator=(
const atomic&) =
delete;
656 load(
memory_order __o = memory_order_seq_cst)
const noexcept
657 {
return _M_impl.load(__o); }
659 operator shared_ptr<_Tp>() const noexcept
660 {
return _M_impl.load(memory_order_seq_cst); }
663 store(shared_ptr<_Tp> __desired,
665 { _M_impl.swap(__desired, __o); }
668 operator=(shared_ptr<_Tp> __desired)
noexcept
669 { _M_impl.swap(__desired, memory_order_seq_cst); }
674 operator=(nullptr_t)
noexcept
678 exchange(shared_ptr<_Tp> __desired,
681 _M_impl.swap(__desired, __o);
686 compare_exchange_strong(shared_ptr<_Tp>& __expected,
687 shared_ptr<_Tp> __desired,
690 return _M_impl.compare_exchange_strong(__expected, __desired, __o, __o2);
694 compare_exchange_strong(value_type& __expected, value_type __desired,
700 case memory_order_acq_rel:
701 __o2 = memory_order_acquire;
703 case memory_order_release:
704 __o2 = memory_order_relaxed;
709 return compare_exchange_strong(__expected,
std::move(__desired),
714 compare_exchange_weak(value_type& __expected, value_type __desired,
717 return compare_exchange_strong(__expected,
std::move(__desired),
722 compare_exchange_weak(value_type& __expected, value_type __desired,
725 return compare_exchange_strong(__expected,
std::move(__desired), __o);
728#if __glibcxx_atomic_wait
730 wait(value_type __old,
737 notify_one() noexcept
739 _M_impl.notify_one();
743 notify_all() noexcept
745 _M_impl.notify_all();
750 _Sp_atomic<shared_ptr<_Tp>> _M_impl;
753 template<
typename _Tp>
754 struct atomic<weak_ptr<_Tp>>
757 using value_type = weak_ptr<_Tp>;
759 static constexpr bool is_always_lock_free =
false;
762 is_lock_free() const noexcept
765 constexpr atomic() noexcept = default;
767 atomic(weak_ptr<_Tp> __r) noexcept
771 atomic(
const atomic&) =
delete;
772 void operator=(
const atomic&) =
delete;
775 load(
memory_order __o = memory_order_seq_cst)
const noexcept
776 {
return _M_impl.load(__o); }
778 operator weak_ptr<_Tp>() const noexcept
779 {
return _M_impl.load(memory_order_seq_cst); }
782 store(weak_ptr<_Tp> __desired,
784 { _M_impl.swap(__desired, __o); }
787 operator=(weak_ptr<_Tp> __desired)
noexcept
788 { _M_impl.swap(__desired, memory_order_seq_cst); }
791 exchange(weak_ptr<_Tp> __desired,
794 _M_impl.swap(__desired, __o);
799 compare_exchange_strong(weak_ptr<_Tp>& __expected,
800 weak_ptr<_Tp> __desired,
803 return _M_impl.compare_exchange_strong(__expected, __desired, __o, __o2);
807 compare_exchange_strong(value_type& __expected, value_type __desired,
813 case memory_order_acq_rel:
814 __o2 = memory_order_acquire;
816 case memory_order_release:
817 __o2 = memory_order_relaxed;
822 return compare_exchange_strong(__expected,
std::move(__desired),
827 compare_exchange_weak(value_type& __expected, value_type __desired,
830 return compare_exchange_strong(__expected,
std::move(__desired),
835 compare_exchange_weak(value_type& __expected, value_type __desired,
838 return compare_exchange_strong(__expected,
std::move(__desired), __o);
841#if __glibcxx_atomic_wait
843 wait(value_type __old,
850 notify_one() noexcept
852 _M_impl.notify_one();
856 notify_all() noexcept
858 _M_impl.notify_all();
863 _Sp_atomic<weak_ptr<_Tp>> _M_impl;
868_GLIBCXX_END_NAMESPACE_VERSION
constexpr std::remove_reference< _Tp >::type && move(_Tp &&__t) noexcept
Convert a value to an rvalue.
memory_order
Enumeration for memory_order.
void lock(_L1 &__l1, _L2 &__l2, _L3 &... __l3)
Generic lock.
ISO C++ entities toplevel namespace is std.
A smart pointer with reference-counted copy semantics.
Primary template owner_less.