30 #ifndef _SHARED_PTR_ATOMIC_H 31 #define _SHARED_PTR_ATOMIC_H 1 34 #include <bits/shared_ptr.h> 37 #if defined _GLIBCXX_TSAN && __has_include(<sanitizer/tsan_interface.h>) 38 #include <sanitizer/tsan_interface.h> 39 #define _GLIBCXX_TSAN_MUTEX_DESTROY(X) \ 40 __tsan_mutex_destroy(X, __tsan_mutex_not_static) 41 #define _GLIBCXX_TSAN_MUTEX_TRY_LOCK(X) \ 42 __tsan_mutex_pre_lock(X, __tsan_mutex_not_static|__tsan_mutex_try_lock) 43 #define _GLIBCXX_TSAN_MUTEX_TRY_LOCK_FAILED(X) __tsan_mutex_post_lock(X, \ 44 __tsan_mutex_not_static|__tsan_mutex_try_lock_failed, 0) 45 #define _GLIBCXX_TSAN_MUTEX_LOCKED(X) \ 46 __tsan_mutex_post_lock(X, __tsan_mutex_not_static, 0) 47 #define _GLIBCXX_TSAN_MUTEX_PRE_UNLOCK(X) __tsan_mutex_pre_unlock(X, 0) 48 #define _GLIBCXX_TSAN_MUTEX_POST_UNLOCK(X) __tsan_mutex_post_unlock(X, 0) 49 #define _GLIBCXX_TSAN_MUTEX_PRE_SIGNAL(X) __tsan_mutex_pre_signal(X, 0) 50 #define _GLIBCXX_TSAN_MUTEX_POST_SIGNAL(X) __tsan_mutex_post_signal(X, 0) 52 #define _GLIBCXX_TSAN_MUTEX_DESTROY(X) 53 #define _GLIBCXX_TSAN_MUTEX_TRY_LOCK(X) 54 #define _GLIBCXX_TSAN_MUTEX_TRY_LOCK_FAILED(X) 55 #define _GLIBCXX_TSAN_MUTEX_LOCKED(X) 56 #define _GLIBCXX_TSAN_MUTEX_PRE_UNLOCK(X) 57 #define _GLIBCXX_TSAN_MUTEX_POST_UNLOCK(X) 58 #define _GLIBCXX_TSAN_MUTEX_PRE_SIGNAL(X) 59 #define _GLIBCXX_TSAN_MUTEX_POST_SIGNAL(X) 62 namespace std _GLIBCXX_VISIBILITY(default)
64 _GLIBCXX_BEGIN_NAMESPACE_VERSION
76 _Sp_locker(
const _Sp_locker&) =
delete;
77 _Sp_locker& operator=(
const _Sp_locker&) =
delete;
81 _Sp_locker(
const void*) noexcept;
82 _Sp_locker(
const void*,
const void*) noexcept;
86 unsigned char _M_key1;
87 unsigned char _M_key2;
89 explicit _Sp_locker(
const void*,
const void* =
nullptr) { }
101 template<
typename _Tp, _Lock_policy _Lp>
102 _GLIBCXX20_DEPRECATED_SUGGEST(
"std::atomic<std::shared_ptr<T>>")
104 atomic_is_lock_free(const __shared_ptr<_Tp, _Lp>*)
107 return __gthread_active_p() == 0;
113 template<
typename _Tp>
114 _GLIBCXX20_DEPRECATED_SUGGEST(
"std::atomic<std::shared_ptr<T>>")
117 {
return std::atomic_is_lock_free<_Tp, __default_lock_policy>(__p); }
130 template<
typename _Tp>
131 _GLIBCXX20_DEPRECATED_SUGGEST(
"std::atomic<std::shared_ptr<T>>")
135 _Sp_locker __lock{__p};
139 template<
typename _Tp>
140 _GLIBCXX20_DEPRECATED_SUGGEST(
"std::atomic<std::shared_ptr<T>>")
145 template<
typename _Tp, _Lock_policy _Lp>
146 _GLIBCXX20_DEPRECATED_SUGGEST(
"std::atomic<std::shared_ptr<T>>")
147 inline __shared_ptr<_Tp, _Lp>
150 _Sp_locker __lock{__p};
154 template<
typename _Tp, _Lock_policy _Lp>
155 _GLIBCXX20_DEPRECATED_SUGGEST(
"std::atomic<std::shared_ptr<T>>")
156 inline __shared_ptr<_Tp, _Lp>
157 atomic_load(
const __shared_ptr<_Tp, _Lp>* __p)
170 template<
typename _Tp>
171 _GLIBCXX20_DEPRECATED_SUGGEST(
"std::atomic<std::shared_ptr<T>>")
176 _Sp_locker __lock{__p};
180 template<
typename _Tp>
181 _GLIBCXX20_DEPRECATED_SUGGEST(
"std::atomic<std::shared_ptr<T>>")
186 template<
typename _Tp, _Lock_policy _Lp>
187 _GLIBCXX20_DEPRECATED_SUGGEST(
"std::atomic<std::shared_ptr<T>>")
189 atomic_store_explicit(__shared_ptr<_Tp, _Lp>* __p,
190 __shared_ptr<_Tp, _Lp> __r,
193 _Sp_locker __lock{__p};
197 template<
typename _Tp, _Lock_policy _Lp>
198 _GLIBCXX20_DEPRECATED_SUGGEST(
"std::atomic<std::shared_ptr<T>>")
200 atomic_store(__shared_ptr<_Tp, _Lp>* __p, __shared_ptr<_Tp, _Lp> __r)
211 template<
typename _Tp>
212 _GLIBCXX20_DEPRECATED_SUGGEST(
"std::atomic<std::shared_ptr<T>>")
217 _Sp_locker __lock{__p};
222 template<
typename _Tp>
223 _GLIBCXX20_DEPRECATED_SUGGEST(
"std::atomic<std::shared_ptr<T>>")
228 memory_order_seq_cst);
231 template<
typename _Tp, _Lock_policy _Lp>
232 _GLIBCXX20_DEPRECATED_SUGGEST(
"std::atomic<std::shared_ptr<T>>")
233 inline __shared_ptr<_Tp, _Lp>
234 atomic_exchange_explicit(__shared_ptr<_Tp, _Lp>* __p,
235 __shared_ptr<_Tp, _Lp> __r,
238 _Sp_locker __lock{__p};
243 template<
typename _Tp, _Lock_policy _Lp>
244 _GLIBCXX20_DEPRECATED_SUGGEST(
"std::atomic<std::shared_ptr<T>>")
245 inline __shared_ptr<_Tp, _Lp>
246 atomic_exchange(__shared_ptr<_Tp, _Lp>* __p, __shared_ptr<_Tp, _Lp> __r)
249 memory_order_seq_cst);
264 template<
typename _Tp>
265 _GLIBCXX20_DEPRECATED_SUGGEST(
"std::atomic<std::shared_ptr<T>>")
274 _Sp_locker __lock{__p, __v};
276 if (*__p == *__v && !__less(*__p, *__v) && !__less(*__v, *__p))
287 template<
typename _Tp>
288 _GLIBCXX20_DEPRECATED_SUGGEST(
"std::atomic<std::shared_ptr<T>>")
294 std::move(__w), memory_order_seq_cst, memory_order_seq_cst);
297 template<
typename _Tp>
298 _GLIBCXX20_DEPRECATED_SUGGEST(
"std::atomic<std::shared_ptr<T>>")
310 template<
typename _Tp>
311 _GLIBCXX20_DEPRECATED_SUGGEST(
"std::atomic<std::shared_ptr<T>>")
317 std::move(__w), memory_order_seq_cst, memory_order_seq_cst);
320 template<
typename _Tp, _Lock_policy _Lp>
321 _GLIBCXX20_DEPRECATED_SUGGEST(
"std::atomic<std::shared_ptr<T>>")
323 atomic_compare_exchange_strong_explicit(__shared_ptr<_Tp, _Lp>* __p,
324 __shared_ptr<_Tp, _Lp>* __v,
325 __shared_ptr<_Tp, _Lp> __w,
329 __shared_ptr<_Tp, _Lp> __x;
330 _Sp_locker __lock{__p, __v};
332 if (*__p == *__v && !__less(*__p, *__v) && !__less(*__v, *__p))
343 template<
typename _Tp, _Lock_policy _Lp>
344 _GLIBCXX20_DEPRECATED_SUGGEST(
"std::atomic<std::shared_ptr<T>>")
346 atomic_compare_exchange_strong(__shared_ptr<_Tp, _Lp>* __p,
347 __shared_ptr<_Tp, _Lp>* __v,
348 __shared_ptr<_Tp, _Lp> __w)
351 std::move(__w), memory_order_seq_cst, memory_order_seq_cst);
354 template<
typename _Tp, _Lock_policy _Lp>
355 _GLIBCXX20_DEPRECATED_SUGGEST(
"std::atomic<std::shared_ptr<T>>")
357 atomic_compare_exchange_weak_explicit(__shared_ptr<_Tp, _Lp>* __p,
358 __shared_ptr<_Tp, _Lp>* __v,
359 __shared_ptr<_Tp, _Lp> __w,
367 template<
typename _Tp, _Lock_policy _Lp>
368 _GLIBCXX20_DEPRECATED_SUGGEST(
"std::atomic<std::shared_ptr<T>>")
370 atomic_compare_exchange_weak(__shared_ptr<_Tp, _Lp>* __p,
371 __shared_ptr<_Tp, _Lp>* __v,
372 __shared_ptr<_Tp, _Lp> __w)
375 std::move(__w), memory_order_seq_cst, memory_order_seq_cst);
381 #ifdef __glibcxx_atomic_shared_ptr // C++ >= 20 && HOSTED 382 template<
typename _Tp>
391 template<
typename _Tp>
394 using value_type = _Tp;
395 using element_type =
typename _Tp::element_type;
397 friend struct atomic<_Tp>;
404 using __count_type = decltype(_Tp::_M_refcount);
405 using uintptr_t = __UINTPTR_TYPE__;
408 using pointer = decltype(__count_type::_M_pi);
413 constexpr _Atomic_count() noexcept =
default;
416 _Atomic_count(__count_type&& __c) noexcept
417 : _M_val(reinterpret_cast<uintptr_t>(__c._M_pi))
424 auto __val = _AtomicRef(&_M_val).load(memory_order_relaxed);
425 _GLIBCXX_TSAN_MUTEX_DESTROY(&_M_val);
426 __glibcxx_assert(!(__val & _S_lock_bit));
427 if (
auto __pi = reinterpret_cast<pointer>(__val))
429 if constexpr (__is_shared_ptr<_Tp>)
432 __pi->_M_weak_release();
436 _Atomic_count(
const _Atomic_count&) =
delete;
437 _Atomic_count& operator=(
const _Atomic_count&) =
delete;
446 _AtomicRef __aref(&_M_val);
447 auto __current = __aref.load(memory_order_relaxed);
448 while (__current & _S_lock_bit)
450 #if __glibcxx_atomic_wait 451 __detail::__thread_relax();
453 __current = __aref.load(memory_order_relaxed);
456 _GLIBCXX_TSAN_MUTEX_TRY_LOCK(&_M_val);
458 while (!__aref.compare_exchange_strong(__current,
459 __current | _S_lock_bit,
461 memory_order_relaxed))
463 _GLIBCXX_TSAN_MUTEX_TRY_LOCK_FAILED(&_M_val);
464 #if __glibcxx_atomic_wait 465 __detail::__thread_relax();
467 __current = __current & ~_S_lock_bit;
468 _GLIBCXX_TSAN_MUTEX_TRY_LOCK(&_M_val);
470 _GLIBCXX_TSAN_MUTEX_LOCKED(&_M_val);
471 return reinterpret_cast<pointer
>(__current);
478 _GLIBCXX_TSAN_MUTEX_PRE_UNLOCK(&_M_val);
479 _AtomicRef(&_M_val).fetch_sub(1, __o);
480 _GLIBCXX_TSAN_MUTEX_POST_UNLOCK(&_M_val);
486 _M_swap_unlock(__count_type& __c,
memory_order __o) noexcept
488 if (__o != memory_order_seq_cst)
489 __o = memory_order_release;
490 auto __x =
reinterpret_cast<uintptr_t
>(__c._M_pi);
491 _GLIBCXX_TSAN_MUTEX_PRE_UNLOCK(&_M_val);
492 __x = _AtomicRef(&_M_val).exchange(__x, __o);
493 _GLIBCXX_TSAN_MUTEX_POST_UNLOCK(&_M_val);
494 __c._M_pi =
reinterpret_cast<pointer
>(__x & ~_S_lock_bit);
497 #if __glibcxx_atomic_wait 500 _M_wait_unlock(
const element_type*
const& __ptr,
memory_order __o)
const noexcept
502 auto __old_ptr = __ptr;
503 _GLIBCXX_TSAN_MUTEX_PRE_UNLOCK(&_M_val);
505 = _AtomicRef(&_M_val).fetch_sub(1, memory_order_relaxed) - 1u;
506 _GLIBCXX_TSAN_MUTEX_POST_UNLOCK(&_M_val);
511 if (__o != memory_order_seq_cst)
512 __lo = memory_order_acquire;
514 std::__atomic_wait_address(
516 [=, &__ptr,
this](uintptr_t __new_pi)
518 if (__old_pi != (__new_pi & ~_S_lock_bit))
525 __new_pi =
reinterpret_cast<uintptr_t
>(this->
lock(__lo));
526 auto __new_ptr = __ptr;
527 this->unlock(memory_order_relaxed);
529 return __new_pi != __old_pi || __new_ptr != __old_ptr;
531 [__o,
this] {
return _AtomicRef(&_M_val).load(__o); });
535 notify_one() noexcept
537 _GLIBCXX_TSAN_MUTEX_PRE_SIGNAL(&_M_val);
538 _AtomicRef(&_M_val).notify_one();
539 _GLIBCXX_TSAN_MUTEX_POST_SIGNAL(&_M_val);
543 notify_all() noexcept
545 _GLIBCXX_TSAN_MUTEX_PRE_SIGNAL(&_M_val);
546 _AtomicRef(&_M_val).notify_all();
547 _GLIBCXX_TSAN_MUTEX_POST_SIGNAL(&_M_val);
552 using _AtomicRef = __atomic_ref<uintptr_t>;
553 alignas(_AtomicRef::required_alignment)
mutable uintptr_t _M_val{0};
554 static constexpr uintptr_t _S_lock_bit{1};
557 element_type* _M_ptr =
nullptr;
558 _Atomic_count _M_refcount;
560 static typename _Atomic_count::pointer
561 _S_add_ref(
typename _Atomic_count::pointer __p)
565 if constexpr (__is_shared_ptr<_Tp>)
566 __p->_M_add_ref_copy();
568 __p->_M_weak_add_ref();
573 constexpr _Sp_atomic() noexcept =
default;
576 _Sp_atomic(value_type __r) noexcept
577 : _M_ptr(__r._M_ptr), _M_refcount(
std::move(__r._M_refcount))
580 ~_Sp_atomic() =
default;
582 _Sp_atomic(
const _Sp_atomic&) =
delete;
583 void operator=(
const _Sp_atomic&) =
delete;
588 __glibcxx_assert(__o != memory_order_release
589 && __o != memory_order_acq_rel);
592 if (__o != memory_order_seq_cst)
593 __o = memory_order_acquire;
596 auto __pi = _M_refcount.lock(__o);
597 __ret._M_ptr = _M_ptr;
598 __ret._M_refcount._M_pi = _S_add_ref(__pi);
599 _M_refcount.unlock(memory_order_relaxed);
606 _M_refcount.lock(memory_order_acquire);
607 std::swap(_M_ptr, __r._M_ptr);
608 _M_refcount._M_swap_unlock(__r._M_refcount, __o);
612 compare_exchange_strong(value_type& __expected, value_type __desired,
615 bool __result =
true;
616 auto __pi = _M_refcount.lock(memory_order_acquire);
617 if (_M_ptr == __expected._M_ptr
618 && __pi == __expected._M_refcount._M_pi)
620 _M_ptr = __desired._M_ptr;
621 _M_refcount._M_swap_unlock(__desired._M_refcount, __o);
626 __expected._M_ptr = _M_ptr;
627 __expected._M_refcount._M_pi = _S_add_ref(__pi);
628 _M_refcount.unlock(__o2);
634 #if __glibcxx_atomic_wait 638 auto __pi = _M_refcount.lock(memory_order_acquire);
639 if (_M_ptr == __old._M_ptr && __pi == __old._M_refcount._M_pi)
640 _M_refcount._M_wait_unlock(_M_ptr, __o);
642 _M_refcount.unlock(memory_order_relaxed);
646 notify_one() noexcept
648 _M_refcount.notify_one();
652 notify_all() noexcept
654 _M_refcount.notify_all();
659 template<
typename _Tp>
660 struct atomic<shared_ptr<_Tp>>
665 static constexpr
bool is_always_lock_free =
false;
668 is_lock_free()
const noexcept
671 constexpr
atomic() noexcept =
default;
682 void operator=(
const atomic&) =
delete;
685 load(
memory_order __o = memory_order_seq_cst)
const noexcept
686 {
return _M_impl.load(__o); }
689 {
return _M_impl.load(memory_order_seq_cst); }
694 { _M_impl.
swap(__desired, __o); }
698 { _M_impl.swap(__desired, memory_order_seq_cst); }
703 operator=(nullptr_t) noexcept
710 _M_impl.
swap(__desired, __o);
719 return _M_impl.compare_exchange_strong(__expected, __desired, __o, __o2);
723 compare_exchange_strong(value_type& __expected, value_type __desired,
729 case memory_order_acq_rel:
730 __o2 = memory_order_acquire;
732 case memory_order_release:
733 __o2 = memory_order_relaxed;
738 return compare_exchange_strong(__expected,
std::move(__desired),
743 compare_exchange_weak(value_type& __expected, value_type __desired,
746 return compare_exchange_strong(__expected,
std::move(__desired),
751 compare_exchange_weak(value_type& __expected, value_type __desired,
754 return compare_exchange_strong(__expected,
std::move(__desired), __o);
757 #if __glibcxx_atomic_wait 759 wait(value_type __old,
766 notify_one() noexcept
768 _M_impl.notify_one();
772 notify_all() noexcept
774 _M_impl.notify_all();
779 _Sp_atomic<shared_ptr<_Tp>> _M_impl;
782 template<
typename _Tp>
783 struct atomic<weak_ptr<_Tp>>
788 static constexpr
bool is_always_lock_free =
false;
791 is_lock_free()
const noexcept
794 constexpr
atomic() noexcept =
default;
801 void operator=(
const atomic&) =
delete;
804 load(
memory_order __o = memory_order_seq_cst)
const noexcept
805 {
return _M_impl.load(__o); }
808 {
return _M_impl.load(memory_order_seq_cst); }
813 { _M_impl.
swap(__desired, __o); }
817 { _M_impl.swap(__desired, memory_order_seq_cst); }
823 _M_impl.
swap(__desired, __o);
832 return _M_impl.compare_exchange_strong(__expected, __desired, __o, __o2);
836 compare_exchange_strong(value_type& __expected, value_type __desired,
842 case memory_order_acq_rel:
843 __o2 = memory_order_acquire;
845 case memory_order_release:
846 __o2 = memory_order_relaxed;
851 return compare_exchange_strong(__expected,
std::move(__desired),
856 compare_exchange_weak(value_type& __expected, value_type __desired,
859 return compare_exchange_strong(__expected,
std::move(__desired),
864 compare_exchange_weak(value_type& __expected, value_type __desired,
867 return compare_exchange_strong(__expected,
std::move(__desired), __o);
870 #if __glibcxx_atomic_wait 872 wait(value_type __old,
879 notify_one() noexcept
881 _M_impl.notify_one();
885 notify_all() noexcept
887 _M_impl.notify_all();
892 _Sp_atomic<weak_ptr<_Tp>> _M_impl;
897 _GLIBCXX_END_NAMESPACE_VERSION
900 #endif // _SHARED_PTR_ATOMIC_H void swap(shared_ptr< _Tp > &__a, shared_ptr< _Tp > &__b) noexcept
Swap overload for shared_ptr.
Generic atomic type, primary class template.
A non-owning observer for a pointer owned by a shared_ptr.
void swap(weak_ptr< _Tp > &__a, weak_ptr< _Tp > &__b) noexcept
Swap overload for weak_ptr.
__shared_ptr< _Tp, _Lp > atomic_exchange_explicit(__shared_ptr< _Tp, _Lp > *__p, __shared_ptr< _Tp, _Lp > __r, memory_order)
Atomic exchange for shared_ptr objects.
A smart pointer with reference-counted copy semantics.
ISO C++ entities toplevel namespace is std.
__shared_ptr< _Tp, _Lp > atomic_load_explicit(const __shared_ptr< _Tp, _Lp > *__p, memory_order)
Atomic load for shared_ptr objects.
Primary template owner_less.
void atomic_store_explicit(__shared_ptr< _Tp, _Lp > *__p, __shared_ptr< _Tp, _Lp > __r, memory_order)
Atomic store for shared_ptr objects.
bool atomic_compare_exchange_strong_explicit(__shared_ptr< _Tp, _Lp > *__p, __shared_ptr< _Tp, _Lp > *__v, __shared_ptr< _Tp, _Lp > __w, memory_order, memory_order)
Atomic compare-and-swap for shared_ptr objects.
constexpr std::remove_reference< _Tp >::type && move(_Tp &&__t) noexcept
Convert a value to an rvalue.
typename remove_pointer< _Tp >::type remove_pointer_t
Alias template for remove_pointer.
void lock(_L1 &__l1, _L2 &__l2, _L3 &... __l3)
Generic lock.
bool atomic_compare_exchange_weak_explicit(__shared_ptr< _Tp, _Lp > *__p, __shared_ptr< _Tp, _Lp > *__v, __shared_ptr< _Tp, _Lp > __w, memory_order __success, memory_order __failure)
Atomic compare-and-swap for shared_ptr objects.
memory_order
Enumeration for memory_order.