3 // Copyright (C) 2008-2023 Free Software Foundation, Inc.
5 // This file is part of the GNU ISO C++ Library. This library is free
6 // software; you can redistribute it and/or modify it under the
7 // terms of the GNU General Public License as published by the
8 // Free Software Foundation; either version 3, or (at your option)
11 // This library is distributed in the hope that it will be useful,
12 // but WITHOUT ANY WARRANTY; without even the implied warranty of
13 // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 // GNU General Public License for more details.
16 // Under Section 7 of GPL version 3, you are granted additional
17 // permissions described in the GCC Runtime Library Exception, version
18 // 3.1, as published by the Free Software Foundation.
20 // You should have received a copy of the GNU General Public License and
21 // a copy of the GCC Runtime Library Exception along with this program;
22 // see the files COPYING3 and COPYING.RUNTIME respectively. If not, see
23 // <http://www.gnu.org/licenses/>.
25 /** @file include/atomic
26 * This is a Standard C++ Library header.
29 // Based on "C++ Atomic Types and Operations" by Hans Boehm and Lawrence Crowl.
30 // http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2007/n2427.html
32 #ifndef _GLIBCXX_ATOMIC
33 #define _GLIBCXX_ATOMIC 1
35 #pragma GCC system_header
37 #if __cplusplus < 201103L
38 # include <bits/c++0x_warning.h>
41 #include <bits/atomic_base.h>
43 namespace std _GLIBCXX_VISIBILITY(default)
45 _GLIBCXX_BEGIN_NAMESPACE_VERSION
52 #if __cplusplus >= 201703L
53 # define __cpp_lib_atomic_is_always_lock_free 201603L
56 template<typename _Tp>
60 // NB: No operators or fetch-operations for this type.
64 using value_type = bool;
67 __atomic_base<bool> _M_base;
70 atomic() noexcept = default;
71 ~atomic() noexcept = default;
72 atomic(const atomic&) = delete;
73 atomic& operator=(const atomic&) = delete;
74 atomic& operator=(const atomic&) volatile = delete;
76 constexpr atomic(bool __i) noexcept : _M_base(__i) { }
79 operator=(bool __i) noexcept
80 { return _M_base.operator=(__i); }
83 operator=(bool __i) volatile noexcept
84 { return _M_base.operator=(__i); }
86 operator bool() const noexcept
87 { return _M_base.load(); }
89 operator bool() const volatile noexcept
90 { return _M_base.load(); }
93 is_lock_free() const noexcept { return _M_base.is_lock_free(); }
96 is_lock_free() const volatile noexcept { return _M_base.is_lock_free(); }
98 #if __cplusplus >= 201703L
99 static constexpr bool is_always_lock_free = ATOMIC_BOOL_LOCK_FREE == 2;
103 store(bool __i, memory_order __m = memory_order_seq_cst) noexcept
104 { _M_base.store(__i, __m); }
107 store(bool __i, memory_order __m = memory_order_seq_cst) volatile noexcept
108 { _M_base.store(__i, __m); }
111 load(memory_order __m = memory_order_seq_cst) const noexcept
112 { return _M_base.load(__m); }
115 load(memory_order __m = memory_order_seq_cst) const volatile noexcept
116 { return _M_base.load(__m); }
119 exchange(bool __i, memory_order __m = memory_order_seq_cst) noexcept
120 { return _M_base.exchange(__i, __m); }
124 memory_order __m = memory_order_seq_cst) volatile noexcept
125 { return _M_base.exchange(__i, __m); }
128 compare_exchange_weak(bool& __i1, bool __i2, memory_order __m1,
129 memory_order __m2) noexcept
130 { return _M_base.compare_exchange_weak(__i1, __i2, __m1, __m2); }
133 compare_exchange_weak(bool& __i1, bool __i2, memory_order __m1,
134 memory_order __m2) volatile noexcept
135 { return _M_base.compare_exchange_weak(__i1, __i2, __m1, __m2); }
138 compare_exchange_weak(bool& __i1, bool __i2,
139 memory_order __m = memory_order_seq_cst) noexcept
140 { return _M_base.compare_exchange_weak(__i1, __i2, __m); }
143 compare_exchange_weak(bool& __i1, bool __i2,
144 memory_order __m = memory_order_seq_cst) volatile noexcept
145 { return _M_base.compare_exchange_weak(__i1, __i2, __m); }
148 compare_exchange_strong(bool& __i1, bool __i2, memory_order __m1,
149 memory_order __m2) noexcept
150 { return _M_base.compare_exchange_strong(__i1, __i2, __m1, __m2); }
153 compare_exchange_strong(bool& __i1, bool __i2, memory_order __m1,
154 memory_order __m2) volatile noexcept
155 { return _M_base.compare_exchange_strong(__i1, __i2, __m1, __m2); }
158 compare_exchange_strong(bool& __i1, bool __i2,
159 memory_order __m = memory_order_seq_cst) noexcept
160 { return _M_base.compare_exchange_strong(__i1, __i2, __m); }
163 compare_exchange_strong(bool& __i1, bool __i2,
164 memory_order __m = memory_order_seq_cst) volatile noexcept
165 { return _M_base.compare_exchange_strong(__i1, __i2, __m); }
167 #if __cpp_lib_atomic_wait
169 wait(bool __old, memory_order __m = memory_order_seq_cst) const noexcept
170 { _M_base.wait(__old, __m); }
172 // TODO add const volatile overload
175 notify_one() noexcept
176 { _M_base.notify_one(); }
179 notify_all() noexcept
180 { _M_base.notify_all(); }
181 #endif // __cpp_lib_atomic_wait
184 /// @cond undocumented
185 #if __cpp_lib_atomic_value_initialization
186 # define _GLIBCXX20_INIT(I) = I
188 # define _GLIBCXX20_INIT(I)
193 * @brief Generic atomic type, primary class template.
195 * @tparam _Tp Type to be made atomic, must be trivially copyable.
197 template<typename _Tp>
200 using value_type = _Tp;
203 // Align 1/2/4/8/16-byte types to at least their size.
204 static constexpr int _S_min_alignment
205 = (sizeof(_Tp) & (sizeof(_Tp) - 1)) || sizeof(_Tp) > 16
208 static constexpr int _S_alignment
209 = _S_min_alignment > alignof(_Tp) ? _S_min_alignment : alignof(_Tp);
211 alignas(_S_alignment) _Tp _M_i _GLIBCXX20_INIT(_Tp());
213 static_assert(__is_trivially_copyable(_Tp),
214 "std::atomic requires a trivially copyable type");
216 static_assert(sizeof(_Tp) > 0,
217 "Incomplete or zero-sized types are not supported");
219 #if __cplusplus > 201703L
220 static_assert(is_copy_constructible_v<_Tp>);
221 static_assert(is_move_constructible_v<_Tp>);
222 static_assert(is_copy_assignable_v<_Tp>);
223 static_assert(is_move_assignable_v<_Tp>);
228 ~atomic() noexcept = default;
229 atomic(const atomic&) = delete;
230 atomic& operator=(const atomic&) = delete;
231 atomic& operator=(const atomic&) volatile = delete;
233 constexpr atomic(_Tp __i) noexcept : _M_i(__i)
235 #if __cplusplus >= 201402L && __has_builtin(__builtin_clear_padding)
236 if _GLIBCXX17_CONSTEXPR (__atomic_impl::__maybe_has_padding<_Tp>())
237 __builtin_clear_padding(std::__addressof(_M_i));
241 operator _Tp() const noexcept
244 operator _Tp() const volatile noexcept
248 operator=(_Tp __i) noexcept
249 { store(__i); return __i; }
252 operator=(_Tp __i) volatile noexcept
253 { store(__i); return __i; }
256 is_lock_free() const noexcept
258 // Produce a fake, minimally aligned pointer.
259 return __atomic_is_lock_free(sizeof(_M_i),
260 reinterpret_cast<void *>(-_S_alignment));
264 is_lock_free() const volatile noexcept
266 // Produce a fake, minimally aligned pointer.
267 return __atomic_is_lock_free(sizeof(_M_i),
268 reinterpret_cast<void *>(-_S_alignment));
271 #if __cplusplus >= 201703L
272 static constexpr bool is_always_lock_free
273 = __atomic_always_lock_free(sizeof(_M_i), 0);
277 store(_Tp __i, memory_order __m = memory_order_seq_cst) noexcept
279 __atomic_store(std::__addressof(_M_i),
280 __atomic_impl::__clear_padding(__i),
285 store(_Tp __i, memory_order __m = memory_order_seq_cst) volatile noexcept
287 __atomic_store(std::__addressof(_M_i),
288 __atomic_impl::__clear_padding(__i),
293 load(memory_order __m = memory_order_seq_cst) const noexcept
295 alignas(_Tp) unsigned char __buf[sizeof(_Tp)];
296 _Tp* __ptr = reinterpret_cast<_Tp*>(__buf);
297 __atomic_load(std::__addressof(_M_i), __ptr, int(__m));
302 load(memory_order __m = memory_order_seq_cst) const volatile noexcept
304 alignas(_Tp) unsigned char __buf[sizeof(_Tp)];
305 _Tp* __ptr = reinterpret_cast<_Tp*>(__buf);
306 __atomic_load(std::__addressof(_M_i), __ptr, int(__m));
311 exchange(_Tp __i, memory_order __m = memory_order_seq_cst) noexcept
313 alignas(_Tp) unsigned char __buf[sizeof(_Tp)];
314 _Tp* __ptr = reinterpret_cast<_Tp*>(__buf);
315 __atomic_exchange(std::__addressof(_M_i),
316 __atomic_impl::__clear_padding(__i),
323 memory_order __m = memory_order_seq_cst) volatile noexcept
325 alignas(_Tp) unsigned char __buf[sizeof(_Tp)];
326 _Tp* __ptr = reinterpret_cast<_Tp*>(__buf);
327 __atomic_exchange(std::__addressof(_M_i),
328 __atomic_impl::__clear_padding(__i),
334 compare_exchange_weak(_Tp& __e, _Tp __i, memory_order __s,
335 memory_order __f) noexcept
337 return __atomic_impl::__compare_exchange(_M_i, __e, __i, true,
342 compare_exchange_weak(_Tp& __e, _Tp __i, memory_order __s,
343 memory_order __f) volatile noexcept
345 return __atomic_impl::__compare_exchange(_M_i, __e, __i, true,
350 compare_exchange_weak(_Tp& __e, _Tp __i,
351 memory_order __m = memory_order_seq_cst) noexcept
352 { return compare_exchange_weak(__e, __i, __m,
353 __cmpexch_failure_order(__m)); }
356 compare_exchange_weak(_Tp& __e, _Tp __i,
357 memory_order __m = memory_order_seq_cst) volatile noexcept
358 { return compare_exchange_weak(__e, __i, __m,
359 __cmpexch_failure_order(__m)); }
362 compare_exchange_strong(_Tp& __e, _Tp __i, memory_order __s,
363 memory_order __f) noexcept
365 return __atomic_impl::__compare_exchange(_M_i, __e, __i, false,
370 compare_exchange_strong(_Tp& __e, _Tp __i, memory_order __s,
371 memory_order __f) volatile noexcept
373 return __atomic_impl::__compare_exchange(_M_i, __e, __i, false,
378 compare_exchange_strong(_Tp& __e, _Tp __i,
379 memory_order __m = memory_order_seq_cst) noexcept
380 { return compare_exchange_strong(__e, __i, __m,
381 __cmpexch_failure_order(__m)); }
384 compare_exchange_strong(_Tp& __e, _Tp __i,
385 memory_order __m = memory_order_seq_cst) volatile noexcept
386 { return compare_exchange_strong(__e, __i, __m,
387 __cmpexch_failure_order(__m)); }
389 #if __cpp_lib_atomic_wait
391 wait(_Tp __old, memory_order __m = memory_order_seq_cst) const noexcept
393 std::__atomic_wait_address_v(std::addressof(_M_i), __old,
394 [__m, this] { return this->load(__m); });
397 // TODO add const volatile overload
400 notify_one() noexcept
401 { std::__atomic_notify_address(std::addressof(_M_i), false); }
404 notify_all() noexcept
405 { std::__atomic_notify_address(std::addressof(_M_i), true); }
406 #endif // __cpp_lib_atomic_wait
408 #undef _GLIBCXX20_INIT
410 /// Partial specialization for pointer types.
411 template<typename _Tp>
414 using value_type = _Tp*;
415 using difference_type = ptrdiff_t;
417 typedef _Tp* __pointer_type;
418 typedef __atomic_base<_Tp*> __base_type;
421 atomic() noexcept = default;
422 ~atomic() noexcept = default;
423 atomic(const atomic&) = delete;
424 atomic& operator=(const atomic&) = delete;
425 atomic& operator=(const atomic&) volatile = delete;
427 constexpr atomic(__pointer_type __p) noexcept : _M_b(__p) { }
429 operator __pointer_type() const noexcept
430 { return __pointer_type(_M_b); }
432 operator __pointer_type() const volatile noexcept
433 { return __pointer_type(_M_b); }
436 operator=(__pointer_type __p) noexcept
437 { return _M_b.operator=(__p); }
440 operator=(__pointer_type __p) volatile noexcept
441 { return _M_b.operator=(__p); }
444 operator++(int) noexcept
446 #if __cplusplus >= 201703L
447 static_assert( is_object<_Tp>::value, "pointer to object type" );
453 operator++(int) volatile noexcept
455 #if __cplusplus >= 201703L
456 static_assert( is_object<_Tp>::value, "pointer to object type" );
462 operator--(int) noexcept
464 #if __cplusplus >= 201703L
465 static_assert( is_object<_Tp>::value, "pointer to object type" );
471 operator--(int) volatile noexcept
473 #if __cplusplus >= 201703L
474 static_assert( is_object<_Tp>::value, "pointer to object type" );
480 operator++() noexcept
482 #if __cplusplus >= 201703L
483 static_assert( is_object<_Tp>::value, "pointer to object type" );
489 operator++() volatile noexcept
491 #if __cplusplus >= 201703L
492 static_assert( is_object<_Tp>::value, "pointer to object type" );
498 operator--() noexcept
500 #if __cplusplus >= 201703L
501 static_assert( is_object<_Tp>::value, "pointer to object type" );
507 operator--() volatile noexcept
509 #if __cplusplus >= 201703L
510 static_assert( is_object<_Tp>::value, "pointer to object type" );
516 operator+=(ptrdiff_t __d) noexcept
518 #if __cplusplus >= 201703L
519 static_assert( is_object<_Tp>::value, "pointer to object type" );
521 return _M_b.operator+=(__d);
525 operator+=(ptrdiff_t __d) volatile noexcept
527 #if __cplusplus >= 201703L
528 static_assert( is_object<_Tp>::value, "pointer to object type" );
530 return _M_b.operator+=(__d);
534 operator-=(ptrdiff_t __d) noexcept
536 #if __cplusplus >= 201703L
537 static_assert( is_object<_Tp>::value, "pointer to object type" );
539 return _M_b.operator-=(__d);
543 operator-=(ptrdiff_t __d) volatile noexcept
545 #if __cplusplus >= 201703L
546 static_assert( is_object<_Tp>::value, "pointer to object type" );
548 return _M_b.operator-=(__d);
552 is_lock_free() const noexcept
553 { return _M_b.is_lock_free(); }
556 is_lock_free() const volatile noexcept
557 { return _M_b.is_lock_free(); }
559 #if __cplusplus >= 201703L
560 static constexpr bool is_always_lock_free
561 = ATOMIC_POINTER_LOCK_FREE == 2;
565 store(__pointer_type __p,
566 memory_order __m = memory_order_seq_cst) noexcept
567 { return _M_b.store(__p, __m); }
570 store(__pointer_type __p,
571 memory_order __m = memory_order_seq_cst) volatile noexcept
572 { return _M_b.store(__p, __m); }
575 load(memory_order __m = memory_order_seq_cst) const noexcept
576 { return _M_b.load(__m); }
579 load(memory_order __m = memory_order_seq_cst) const volatile noexcept
580 { return _M_b.load(__m); }
583 exchange(__pointer_type __p,
584 memory_order __m = memory_order_seq_cst) noexcept
585 { return _M_b.exchange(__p, __m); }
588 exchange(__pointer_type __p,
589 memory_order __m = memory_order_seq_cst) volatile noexcept
590 { return _M_b.exchange(__p, __m); }
593 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
594 memory_order __m1, memory_order __m2) noexcept
595 { return _M_b.compare_exchange_weak(__p1, __p2, __m1, __m2); }
598 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
600 memory_order __m2) volatile noexcept
601 { return _M_b.compare_exchange_weak(__p1, __p2, __m1, __m2); }
604 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
605 memory_order __m = memory_order_seq_cst) noexcept
607 return compare_exchange_weak(__p1, __p2, __m,
608 __cmpexch_failure_order(__m));
612 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
613 memory_order __m = memory_order_seq_cst) volatile noexcept
615 return compare_exchange_weak(__p1, __p2, __m,
616 __cmpexch_failure_order(__m));
620 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
621 memory_order __m1, memory_order __m2) noexcept
622 { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
625 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
627 memory_order __m2) volatile noexcept
628 { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
631 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
632 memory_order __m = memory_order_seq_cst) noexcept
634 return _M_b.compare_exchange_strong(__p1, __p2, __m,
635 __cmpexch_failure_order(__m));
639 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
640 memory_order __m = memory_order_seq_cst) volatile noexcept
642 return _M_b.compare_exchange_strong(__p1, __p2, __m,
643 __cmpexch_failure_order(__m));
646 #if __cpp_lib_atomic_wait
648 wait(__pointer_type __old, memory_order __m = memory_order_seq_cst) const noexcept
649 { _M_b.wait(__old, __m); }
651 // TODO add const volatile overload
654 notify_one() noexcept
655 { _M_b.notify_one(); }
658 notify_all() noexcept
659 { _M_b.notify_all(); }
660 #endif // __cpp_lib_atomic_wait
663 fetch_add(ptrdiff_t __d,
664 memory_order __m = memory_order_seq_cst) noexcept
666 #if __cplusplus >= 201703L
667 static_assert( is_object<_Tp>::value, "pointer to object type" );
669 return _M_b.fetch_add(__d, __m);
673 fetch_add(ptrdiff_t __d,
674 memory_order __m = memory_order_seq_cst) volatile noexcept
676 #if __cplusplus >= 201703L
677 static_assert( is_object<_Tp>::value, "pointer to object type" );
679 return _M_b.fetch_add(__d, __m);
683 fetch_sub(ptrdiff_t __d,
684 memory_order __m = memory_order_seq_cst) noexcept
686 #if __cplusplus >= 201703L
687 static_assert( is_object<_Tp>::value, "pointer to object type" );
689 return _M_b.fetch_sub(__d, __m);
693 fetch_sub(ptrdiff_t __d,
694 memory_order __m = memory_order_seq_cst) volatile noexcept
696 #if __cplusplus >= 201703L
697 static_assert( is_object<_Tp>::value, "pointer to object type" );
699 return _M_b.fetch_sub(__d, __m);
704 /// Explicit specialization for char.
706 struct atomic<char> : __atomic_base<char>
708 typedef char __integral_type;
709 typedef __atomic_base<char> __base_type;
711 atomic() noexcept = default;
712 ~atomic() noexcept = default;
713 atomic(const atomic&) = delete;
714 atomic& operator=(const atomic&) = delete;
715 atomic& operator=(const atomic&) volatile = delete;
717 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
719 using __base_type::operator __integral_type;
720 using __base_type::operator=;
722 #if __cplusplus >= 201703L
723 static constexpr bool is_always_lock_free = ATOMIC_CHAR_LOCK_FREE == 2;
727 /// Explicit specialization for signed char.
729 struct atomic<signed char> : __atomic_base<signed char>
731 typedef signed char __integral_type;
732 typedef __atomic_base<signed char> __base_type;
734 atomic() noexcept= default;
735 ~atomic() noexcept = default;
736 atomic(const atomic&) = delete;
737 atomic& operator=(const atomic&) = delete;
738 atomic& operator=(const atomic&) volatile = delete;
740 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
742 using __base_type::operator __integral_type;
743 using __base_type::operator=;
745 #if __cplusplus >= 201703L
746 static constexpr bool is_always_lock_free = ATOMIC_CHAR_LOCK_FREE == 2;
750 /// Explicit specialization for unsigned char.
752 struct atomic<unsigned char> : __atomic_base<unsigned char>
754 typedef unsigned char __integral_type;
755 typedef __atomic_base<unsigned char> __base_type;
757 atomic() noexcept= default;
758 ~atomic() noexcept = default;
759 atomic(const atomic&) = delete;
760 atomic& operator=(const atomic&) = delete;
761 atomic& operator=(const atomic&) volatile = delete;
763 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
765 using __base_type::operator __integral_type;
766 using __base_type::operator=;
768 #if __cplusplus >= 201703L
769 static constexpr bool is_always_lock_free = ATOMIC_CHAR_LOCK_FREE == 2;
773 /// Explicit specialization for short.
775 struct atomic<short> : __atomic_base<short>
777 typedef short __integral_type;
778 typedef __atomic_base<short> __base_type;
780 atomic() noexcept = default;
781 ~atomic() noexcept = default;
782 atomic(const atomic&) = delete;
783 atomic& operator=(const atomic&) = delete;
784 atomic& operator=(const atomic&) volatile = delete;
786 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
788 using __base_type::operator __integral_type;
789 using __base_type::operator=;
791 #if __cplusplus >= 201703L
792 static constexpr bool is_always_lock_free = ATOMIC_SHORT_LOCK_FREE == 2;
796 /// Explicit specialization for unsigned short.
798 struct atomic<unsigned short> : __atomic_base<unsigned short>
800 typedef unsigned short __integral_type;
801 typedef __atomic_base<unsigned short> __base_type;
803 atomic() noexcept = default;
804 ~atomic() noexcept = default;
805 atomic(const atomic&) = delete;
806 atomic& operator=(const atomic&) = delete;
807 atomic& operator=(const atomic&) volatile = delete;
809 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
811 using __base_type::operator __integral_type;
812 using __base_type::operator=;
814 #if __cplusplus >= 201703L
815 static constexpr bool is_always_lock_free = ATOMIC_SHORT_LOCK_FREE == 2;
819 /// Explicit specialization for int.
821 struct atomic<int> : __atomic_base<int>
823 typedef int __integral_type;
824 typedef __atomic_base<int> __base_type;
826 atomic() noexcept = default;
827 ~atomic() noexcept = default;
828 atomic(const atomic&) = delete;
829 atomic& operator=(const atomic&) = delete;
830 atomic& operator=(const atomic&) volatile = delete;
832 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
834 using __base_type::operator __integral_type;
835 using __base_type::operator=;
837 #if __cplusplus >= 201703L
838 static constexpr bool is_always_lock_free = ATOMIC_INT_LOCK_FREE == 2;
842 /// Explicit specialization for unsigned int.
844 struct atomic<unsigned int> : __atomic_base<unsigned int>
846 typedef unsigned int __integral_type;
847 typedef __atomic_base<unsigned int> __base_type;
849 atomic() noexcept = default;
850 ~atomic() noexcept = default;
851 atomic(const atomic&) = delete;
852 atomic& operator=(const atomic&) = delete;
853 atomic& operator=(const atomic&) volatile = delete;
855 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
857 using __base_type::operator __integral_type;
858 using __base_type::operator=;
860 #if __cplusplus >= 201703L
861 static constexpr bool is_always_lock_free = ATOMIC_INT_LOCK_FREE == 2;
865 /// Explicit specialization for long.
867 struct atomic<long> : __atomic_base<long>
869 typedef long __integral_type;
870 typedef __atomic_base<long> __base_type;
872 atomic() noexcept = default;
873 ~atomic() noexcept = default;
874 atomic(const atomic&) = delete;
875 atomic& operator=(const atomic&) = delete;
876 atomic& operator=(const atomic&) volatile = delete;
878 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
880 using __base_type::operator __integral_type;
881 using __base_type::operator=;
883 #if __cplusplus >= 201703L
884 static constexpr bool is_always_lock_free = ATOMIC_LONG_LOCK_FREE == 2;
888 /// Explicit specialization for unsigned long.
890 struct atomic<unsigned long> : __atomic_base<unsigned long>
892 typedef unsigned long __integral_type;
893 typedef __atomic_base<unsigned long> __base_type;
895 atomic() noexcept = default;
896 ~atomic() noexcept = default;
897 atomic(const atomic&) = delete;
898 atomic& operator=(const atomic&) = delete;
899 atomic& operator=(const atomic&) volatile = delete;
901 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
903 using __base_type::operator __integral_type;
904 using __base_type::operator=;
906 #if __cplusplus >= 201703L
907 static constexpr bool is_always_lock_free = ATOMIC_LONG_LOCK_FREE == 2;
911 /// Explicit specialization for long long.
913 struct atomic<long long> : __atomic_base<long long>
915 typedef long long __integral_type;
916 typedef __atomic_base<long long> __base_type;
918 atomic() noexcept = default;
919 ~atomic() noexcept = default;
920 atomic(const atomic&) = delete;
921 atomic& operator=(const atomic&) = delete;
922 atomic& operator=(const atomic&) volatile = delete;
924 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
926 using __base_type::operator __integral_type;
927 using __base_type::operator=;
929 #if __cplusplus >= 201703L
930 static constexpr bool is_always_lock_free = ATOMIC_LLONG_LOCK_FREE == 2;
934 /// Explicit specialization for unsigned long long.
936 struct atomic<unsigned long long> : __atomic_base<unsigned long long>
938 typedef unsigned long long __integral_type;
939 typedef __atomic_base<unsigned long long> __base_type;
941 atomic() noexcept = default;
942 ~atomic() noexcept = default;
943 atomic(const atomic&) = delete;
944 atomic& operator=(const atomic&) = delete;
945 atomic& operator=(const atomic&) volatile = delete;
947 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
949 using __base_type::operator __integral_type;
950 using __base_type::operator=;
952 #if __cplusplus >= 201703L
953 static constexpr bool is_always_lock_free = ATOMIC_LLONG_LOCK_FREE == 2;
957 /// Explicit specialization for wchar_t.
959 struct atomic<wchar_t> : __atomic_base<wchar_t>
961 typedef wchar_t __integral_type;
962 typedef __atomic_base<wchar_t> __base_type;
964 atomic() noexcept = default;
965 ~atomic() noexcept = default;
966 atomic(const atomic&) = delete;
967 atomic& operator=(const atomic&) = delete;
968 atomic& operator=(const atomic&) volatile = delete;
970 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
972 using __base_type::operator __integral_type;
973 using __base_type::operator=;
975 #if __cplusplus >= 201703L
976 static constexpr bool is_always_lock_free = ATOMIC_WCHAR_T_LOCK_FREE == 2;
980 #ifdef _GLIBCXX_USE_CHAR8_T
981 /// Explicit specialization for char8_t.
983 struct atomic<char8_t> : __atomic_base<char8_t>
985 typedef char8_t __integral_type;
986 typedef __atomic_base<char8_t> __base_type;
988 atomic() noexcept = default;
989 ~atomic() noexcept = default;
990 atomic(const atomic&) = delete;
991 atomic& operator=(const atomic&) = delete;
992 atomic& operator=(const atomic&) volatile = delete;
994 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
996 using __base_type::operator __integral_type;
997 using __base_type::operator=;
999 #if __cplusplus > 201402L
1000 static constexpr bool is_always_lock_free
1001 = ATOMIC_CHAR8_T_LOCK_FREE == 2;
1006 /// Explicit specialization for char16_t.
1008 struct atomic<char16_t> : __atomic_base<char16_t>
1010 typedef char16_t __integral_type;
1011 typedef __atomic_base<char16_t> __base_type;
1013 atomic() noexcept = default;
1014 ~atomic() noexcept = default;
1015 atomic(const atomic&) = delete;
1016 atomic& operator=(const atomic&) = delete;
1017 atomic& operator=(const atomic&) volatile = delete;
1019 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
1021 using __base_type::operator __integral_type;
1022 using __base_type::operator=;
1024 #if __cplusplus >= 201703L
1025 static constexpr bool is_always_lock_free
1026 = ATOMIC_CHAR16_T_LOCK_FREE == 2;
1030 /// Explicit specialization for char32_t.
1032 struct atomic<char32_t> : __atomic_base<char32_t>
1034 typedef char32_t __integral_type;
1035 typedef __atomic_base<char32_t> __base_type;
1037 atomic() noexcept = default;
1038 ~atomic() noexcept = default;
1039 atomic(const atomic&) = delete;
1040 atomic& operator=(const atomic&) = delete;
1041 atomic& operator=(const atomic&) volatile = delete;
1043 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
1045 using __base_type::operator __integral_type;
1046 using __base_type::operator=;
1048 #if __cplusplus >= 201703L
1049 static constexpr bool is_always_lock_free
1050 = ATOMIC_CHAR32_T_LOCK_FREE == 2;
1056 typedef atomic<bool> atomic_bool;
1059 typedef atomic<char> atomic_char;
1062 typedef atomic<signed char> atomic_schar;
1065 typedef atomic<unsigned char> atomic_uchar;
1068 typedef atomic<short> atomic_short;
1071 typedef atomic<unsigned short> atomic_ushort;
1074 typedef atomic<int> atomic_int;
1077 typedef atomic<unsigned int> atomic_uint;
1080 typedef atomic<long> atomic_long;
1083 typedef atomic<unsigned long> atomic_ulong;
1086 typedef atomic<long long> atomic_llong;
1089 typedef atomic<unsigned long long> atomic_ullong;
1092 typedef atomic<wchar_t> atomic_wchar_t;
1094 #ifdef _GLIBCXX_USE_CHAR8_T
1096 typedef atomic<char8_t> atomic_char8_t;
1100 typedef atomic<char16_t> atomic_char16_t;
1103 typedef atomic<char32_t> atomic_char32_t;
1105 #ifdef _GLIBCXX_USE_C99_STDINT_TR1
1106 // _GLIBCXX_RESOLVE_LIB_DEFECTS
1107 // 2441. Exact-width atomic typedefs should be provided
1110 typedef atomic<int8_t> atomic_int8_t;
1113 typedef atomic<uint8_t> atomic_uint8_t;
1116 typedef atomic<int16_t> atomic_int16_t;
1119 typedef atomic<uint16_t> atomic_uint16_t;
1122 typedef atomic<int32_t> atomic_int32_t;
1125 typedef atomic<uint32_t> atomic_uint32_t;
1128 typedef atomic<int64_t> atomic_int64_t;
1131 typedef atomic<uint64_t> atomic_uint64_t;
1134 /// atomic_int_least8_t
1135 typedef atomic<int_least8_t> atomic_int_least8_t;
1137 /// atomic_uint_least8_t
1138 typedef atomic<uint_least8_t> atomic_uint_least8_t;
1140 /// atomic_int_least16_t
1141 typedef atomic<int_least16_t> atomic_int_least16_t;
1143 /// atomic_uint_least16_t
1144 typedef atomic<uint_least16_t> atomic_uint_least16_t;
1146 /// atomic_int_least32_t
1147 typedef atomic<int_least32_t> atomic_int_least32_t;
1149 /// atomic_uint_least32_t
1150 typedef atomic<uint_least32_t> atomic_uint_least32_t;
1152 /// atomic_int_least64_t
1153 typedef atomic<int_least64_t> atomic_int_least64_t;
1155 /// atomic_uint_least64_t
1156 typedef atomic<uint_least64_t> atomic_uint_least64_t;
1159 /// atomic_int_fast8_t
1160 typedef atomic<int_fast8_t> atomic_int_fast8_t;
1162 /// atomic_uint_fast8_t
1163 typedef atomic<uint_fast8_t> atomic_uint_fast8_t;
1165 /// atomic_int_fast16_t
1166 typedef atomic<int_fast16_t> atomic_int_fast16_t;
1168 /// atomic_uint_fast16_t
1169 typedef atomic<uint_fast16_t> atomic_uint_fast16_t;
1171 /// atomic_int_fast32_t
1172 typedef atomic<int_fast32_t> atomic_int_fast32_t;
1174 /// atomic_uint_fast32_t
1175 typedef atomic<uint_fast32_t> atomic_uint_fast32_t;
1177 /// atomic_int_fast64_t
1178 typedef atomic<int_fast64_t> atomic_int_fast64_t;
1180 /// atomic_uint_fast64_t
1181 typedef atomic<uint_fast64_t> atomic_uint_fast64_t;
1186 typedef atomic<intptr_t> atomic_intptr_t;
1188 /// atomic_uintptr_t
1189 typedef atomic<uintptr_t> atomic_uintptr_t;
1192 typedef atomic<size_t> atomic_size_t;
1194 /// atomic_ptrdiff_t
1195 typedef atomic<ptrdiff_t> atomic_ptrdiff_t;
1197 #ifdef _GLIBCXX_USE_C99_STDINT_TR1
1199 typedef atomic<intmax_t> atomic_intmax_t;
1201 /// atomic_uintmax_t
1202 typedef atomic<uintmax_t> atomic_uintmax_t;
1205 // Function definitions, atomic_flag operations.
1207 atomic_flag_test_and_set_explicit(atomic_flag* __a,
1208 memory_order __m) noexcept
1209 { return __a->test_and_set(__m); }
1212 atomic_flag_test_and_set_explicit(volatile atomic_flag* __a,
1213 memory_order __m) noexcept
1214 { return __a->test_and_set(__m); }
1216 #if __cpp_lib_atomic_flag_test
1218 atomic_flag_test(const atomic_flag* __a) noexcept
1219 { return __a->test(); }
1222 atomic_flag_test(const volatile atomic_flag* __a) noexcept
1223 { return __a->test(); }
1226 atomic_flag_test_explicit(const atomic_flag* __a,
1227 memory_order __m) noexcept
1228 { return __a->test(__m); }
1231 atomic_flag_test_explicit(const volatile atomic_flag* __a,
1232 memory_order __m) noexcept
1233 { return __a->test(__m); }
1237 atomic_flag_clear_explicit(atomic_flag* __a, memory_order __m) noexcept
1238 { __a->clear(__m); }
1241 atomic_flag_clear_explicit(volatile atomic_flag* __a,
1242 memory_order __m) noexcept
1243 { __a->clear(__m); }
1246 atomic_flag_test_and_set(atomic_flag* __a) noexcept
1247 { return atomic_flag_test_and_set_explicit(__a, memory_order_seq_cst); }
1250 atomic_flag_test_and_set(volatile atomic_flag* __a) noexcept
1251 { return atomic_flag_test_and_set_explicit(__a, memory_order_seq_cst); }
1254 atomic_flag_clear(atomic_flag* __a) noexcept
1255 { atomic_flag_clear_explicit(__a, memory_order_seq_cst); }
1258 atomic_flag_clear(volatile atomic_flag* __a) noexcept
1259 { atomic_flag_clear_explicit(__a, memory_order_seq_cst); }
1261 #if __cpp_lib_atomic_wait
1263 atomic_flag_wait(atomic_flag* __a, bool __old) noexcept
1264 { __a->wait(__old); }
1267 atomic_flag_wait_explicit(atomic_flag* __a, bool __old,
1268 memory_order __m) noexcept
1269 { __a->wait(__old, __m); }
1272 atomic_flag_notify_one(atomic_flag* __a) noexcept
1273 { __a->notify_one(); }
1276 atomic_flag_notify_all(atomic_flag* __a) noexcept
1277 { __a->notify_all(); }
1278 #endif // __cpp_lib_atomic_wait
1280 /// @cond undocumented
1281 // _GLIBCXX_RESOLVE_LIB_DEFECTS
1282 // 3220. P0558 broke conforming C++14 uses of atomic shared_ptr
1283 template<typename _Tp>
1284 using __atomic_val_t = __type_identity_t<_Tp>;
1285 template<typename _Tp>
1286 using __atomic_diff_t = typename atomic<_Tp>::difference_type;
1289 // [atomics.nonmembers] Non-member functions.
1290 // Function templates generally applicable to atomic types.
1291 template<typename _ITp>
1293 atomic_is_lock_free(const atomic<_ITp>* __a) noexcept
1294 { return __a->is_lock_free(); }
1296 template<typename _ITp>
1298 atomic_is_lock_free(const volatile atomic<_ITp>* __a) noexcept
1299 { return __a->is_lock_free(); }
1301 template<typename _ITp>
1303 atomic_init(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i) noexcept
1304 { __a->store(__i, memory_order_relaxed); }
1306 template<typename _ITp>
1308 atomic_init(volatile atomic<_ITp>* __a, __atomic_val_t<_ITp> __i) noexcept
1309 { __a->store(__i, memory_order_relaxed); }
1311 template<typename _ITp>
1313 atomic_store_explicit(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i,
1314 memory_order __m) noexcept
1315 { __a->store(__i, __m); }
1317 template<typename _ITp>
1319 atomic_store_explicit(volatile atomic<_ITp>* __a, __atomic_val_t<_ITp> __i,
1320 memory_order __m) noexcept
1321 { __a->store(__i, __m); }
1323 template<typename _ITp>
1325 atomic_load_explicit(const atomic<_ITp>* __a, memory_order __m) noexcept
1326 { return __a->load(__m); }
1328 template<typename _ITp>
1330 atomic_load_explicit(const volatile atomic<_ITp>* __a,
1331 memory_order __m) noexcept
1332 { return __a->load(__m); }
1334 template<typename _ITp>
1336 atomic_exchange_explicit(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i,
1337 memory_order __m) noexcept
1338 { return __a->exchange(__i, __m); }
1340 template<typename _ITp>
1342 atomic_exchange_explicit(volatile atomic<_ITp>* __a,
1343 __atomic_val_t<_ITp> __i,
1344 memory_order __m) noexcept
1345 { return __a->exchange(__i, __m); }
1347 template<typename _ITp>
1349 atomic_compare_exchange_weak_explicit(atomic<_ITp>* __a,
1350 __atomic_val_t<_ITp>* __i1,
1351 __atomic_val_t<_ITp> __i2,
1353 memory_order __m2) noexcept
1354 { return __a->compare_exchange_weak(*__i1, __i2, __m1, __m2); }
1356 template<typename _ITp>
1358 atomic_compare_exchange_weak_explicit(volatile atomic<_ITp>* __a,
1359 __atomic_val_t<_ITp>* __i1,
1360 __atomic_val_t<_ITp> __i2,
1362 memory_order __m2) noexcept
1363 { return __a->compare_exchange_weak(*__i1, __i2, __m1, __m2); }
1365 template<typename _ITp>
1367 atomic_compare_exchange_strong_explicit(atomic<_ITp>* __a,
1368 __atomic_val_t<_ITp>* __i1,
1369 __atomic_val_t<_ITp> __i2,
1371 memory_order __m2) noexcept
1372 { return __a->compare_exchange_strong(*__i1, __i2, __m1, __m2); }
1374 template<typename _ITp>
1376 atomic_compare_exchange_strong_explicit(volatile atomic<_ITp>* __a,
1377 __atomic_val_t<_ITp>* __i1,
1378 __atomic_val_t<_ITp> __i2,
1380 memory_order __m2) noexcept
1381 { return __a->compare_exchange_strong(*__i1, __i2, __m1, __m2); }
1384 template<typename _ITp>
1386 atomic_store(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i) noexcept
1387 { atomic_store_explicit(__a, __i, memory_order_seq_cst); }
1389 template<typename _ITp>
1391 atomic_store(volatile atomic<_ITp>* __a, __atomic_val_t<_ITp> __i) noexcept
1392 { atomic_store_explicit(__a, __i, memory_order_seq_cst); }
1394 template<typename _ITp>
1396 atomic_load(const atomic<_ITp>* __a) noexcept
1397 { return atomic_load_explicit(__a, memory_order_seq_cst); }
1399 template<typename _ITp>
1401 atomic_load(const volatile atomic<_ITp>* __a) noexcept
1402 { return atomic_load_explicit(__a, memory_order_seq_cst); }
1404 template<typename _ITp>
1406 atomic_exchange(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i) noexcept
1407 { return atomic_exchange_explicit(__a, __i, memory_order_seq_cst); }
1409 template<typename _ITp>
1411 atomic_exchange(volatile atomic<_ITp>* __a,
1412 __atomic_val_t<_ITp> __i) noexcept
1413 { return atomic_exchange_explicit(__a, __i, memory_order_seq_cst); }
1415 template<typename _ITp>
1417 atomic_compare_exchange_weak(atomic<_ITp>* __a,
1418 __atomic_val_t<_ITp>* __i1,
1419 __atomic_val_t<_ITp> __i2) noexcept
1421 return atomic_compare_exchange_weak_explicit(__a, __i1, __i2,
1422 memory_order_seq_cst,
1423 memory_order_seq_cst);
1426 template<typename _ITp>
1428 atomic_compare_exchange_weak(volatile atomic<_ITp>* __a,
1429 __atomic_val_t<_ITp>* __i1,
1430 __atomic_val_t<_ITp> __i2) noexcept
1432 return atomic_compare_exchange_weak_explicit(__a, __i1, __i2,
1433 memory_order_seq_cst,
1434 memory_order_seq_cst);
1437 template<typename _ITp>
1439 atomic_compare_exchange_strong(atomic<_ITp>* __a,
1440 __atomic_val_t<_ITp>* __i1,
1441 __atomic_val_t<_ITp> __i2) noexcept
1443 return atomic_compare_exchange_strong_explicit(__a, __i1, __i2,
1444 memory_order_seq_cst,
1445 memory_order_seq_cst);
1448 template<typename _ITp>
1450 atomic_compare_exchange_strong(volatile atomic<_ITp>* __a,
1451 __atomic_val_t<_ITp>* __i1,
1452 __atomic_val_t<_ITp> __i2) noexcept
1454 return atomic_compare_exchange_strong_explicit(__a, __i1, __i2,
1455 memory_order_seq_cst,
1456 memory_order_seq_cst);
1460 #if __cpp_lib_atomic_wait
1461 template<typename _Tp>
1463 atomic_wait(const atomic<_Tp>* __a,
1464 typename std::atomic<_Tp>::value_type __old) noexcept
1465 { __a->wait(__old); }
1467 template<typename _Tp>
1469 atomic_wait_explicit(const atomic<_Tp>* __a,
1470 typename std::atomic<_Tp>::value_type __old,
1471 std::memory_order __m) noexcept
1472 { __a->wait(__old, __m); }
1474 template<typename _Tp>
1476 atomic_notify_one(atomic<_Tp>* __a) noexcept
1477 { __a->notify_one(); }
1479 template<typename _Tp>
1481 atomic_notify_all(atomic<_Tp>* __a) noexcept
1482 { __a->notify_all(); }
1483 #endif // __cpp_lib_atomic_wait
1485 // Function templates for atomic_integral and atomic_pointer operations only.
1486 // Some operations (and, or, xor) are only available for atomic integrals,
1487 // which is implemented by taking a parameter of type __atomic_base<_ITp>*.
1489 template<typename _ITp>
1491 atomic_fetch_add_explicit(atomic<_ITp>* __a,
1492 __atomic_diff_t<_ITp> __i,
1493 memory_order __m) noexcept
1494 { return __a->fetch_add(__i, __m); }
1496 template<typename _ITp>
1498 atomic_fetch_add_explicit(volatile atomic<_ITp>* __a,
1499 __atomic_diff_t<_ITp> __i,
1500 memory_order __m) noexcept
1501 { return __a->fetch_add(__i, __m); }
1503 template<typename _ITp>
1505 atomic_fetch_sub_explicit(atomic<_ITp>* __a,
1506 __atomic_diff_t<_ITp> __i,
1507 memory_order __m) noexcept
1508 { return __a->fetch_sub(__i, __m); }
1510 template<typename _ITp>
1512 atomic_fetch_sub_explicit(volatile atomic<_ITp>* __a,
1513 __atomic_diff_t<_ITp> __i,
1514 memory_order __m) noexcept
1515 { return __a->fetch_sub(__i, __m); }
1517 template<typename _ITp>
1519 atomic_fetch_and_explicit(__atomic_base<_ITp>* __a,
1520 __atomic_val_t<_ITp> __i,
1521 memory_order __m) noexcept
1522 { return __a->fetch_and(__i, __m); }
1524 template<typename _ITp>
1526 atomic_fetch_and_explicit(volatile __atomic_base<_ITp>* __a,
1527 __atomic_val_t<_ITp> __i,
1528 memory_order __m) noexcept
1529 { return __a->fetch_and(__i, __m); }
1531 template<typename _ITp>
1533 atomic_fetch_or_explicit(__atomic_base<_ITp>* __a,
1534 __atomic_val_t<_ITp> __i,
1535 memory_order __m) noexcept
1536 { return __a->fetch_or(__i, __m); }
1538 template<typename _ITp>
1540 atomic_fetch_or_explicit(volatile __atomic_base<_ITp>* __a,
1541 __atomic_val_t<_ITp> __i,
1542 memory_order __m) noexcept
1543 { return __a->fetch_or(__i, __m); }
1545 template<typename _ITp>
1547 atomic_fetch_xor_explicit(__atomic_base<_ITp>* __a,
1548 __atomic_val_t<_ITp> __i,
1549 memory_order __m) noexcept
1550 { return __a->fetch_xor(__i, __m); }
1552 template<typename _ITp>
1554 atomic_fetch_xor_explicit(volatile __atomic_base<_ITp>* __a,
1555 __atomic_val_t<_ITp> __i,
1556 memory_order __m) noexcept
1557 { return __a->fetch_xor(__i, __m); }
1559 template<typename _ITp>
1561 atomic_fetch_add(atomic<_ITp>* __a,
1562 __atomic_diff_t<_ITp> __i) noexcept
1563 { return atomic_fetch_add_explicit(__a, __i, memory_order_seq_cst); }
1565 template<typename _ITp>
1567 atomic_fetch_add(volatile atomic<_ITp>* __a,
1568 __atomic_diff_t<_ITp> __i) noexcept
1569 { return atomic_fetch_add_explicit(__a, __i, memory_order_seq_cst); }
1571 template<typename _ITp>
1573 atomic_fetch_sub(atomic<_ITp>* __a,
1574 __atomic_diff_t<_ITp> __i) noexcept
1575 { return atomic_fetch_sub_explicit(__a, __i, memory_order_seq_cst); }
1577 template<typename _ITp>
1579 atomic_fetch_sub(volatile atomic<_ITp>* __a,
1580 __atomic_diff_t<_ITp> __i) noexcept
1581 { return atomic_fetch_sub_explicit(__a, __i, memory_order_seq_cst); }
1583 template<typename _ITp>
1585 atomic_fetch_and(__atomic_base<_ITp>* __a,
1586 __atomic_val_t<_ITp> __i) noexcept
1587 { return atomic_fetch_and_explicit(__a, __i, memory_order_seq_cst); }
1589 template<typename _ITp>
1591 atomic_fetch_and(volatile __atomic_base<_ITp>* __a,
1592 __atomic_val_t<_ITp> __i) noexcept
1593 { return atomic_fetch_and_explicit(__a, __i, memory_order_seq_cst); }
1595 template<typename _ITp>
1597 atomic_fetch_or(__atomic_base<_ITp>* __a,
1598 __atomic_val_t<_ITp> __i) noexcept
1599 { return atomic_fetch_or_explicit(__a, __i, memory_order_seq_cst); }
1601 template<typename _ITp>
1603 atomic_fetch_or(volatile __atomic_base<_ITp>* __a,
1604 __atomic_val_t<_ITp> __i) noexcept
1605 { return atomic_fetch_or_explicit(__a, __i, memory_order_seq_cst); }
1607 template<typename _ITp>
1609 atomic_fetch_xor(__atomic_base<_ITp>* __a,
1610 __atomic_val_t<_ITp> __i) noexcept
1611 { return atomic_fetch_xor_explicit(__a, __i, memory_order_seq_cst); }
1613 template<typename _ITp>
1615 atomic_fetch_xor(volatile __atomic_base<_ITp>* __a,
1616 __atomic_val_t<_ITp> __i) noexcept
1617 { return atomic_fetch_xor_explicit(__a, __i, memory_order_seq_cst); }
1619 #if __cplusplus > 201703L
1620 #define __cpp_lib_atomic_float 201711L
1622 struct atomic<float> : __atomic_float<float>
1624 atomic() noexcept = default;
1627 atomic(float __fp) noexcept : __atomic_float<float>(__fp)
1630 atomic& operator=(const atomic&) volatile = delete;
1631 atomic& operator=(const atomic&) = delete;
1633 using __atomic_float<float>::operator=;
1637 struct atomic<double> : __atomic_float<double>
1639 atomic() noexcept = default;
1642 atomic(double __fp) noexcept : __atomic_float<double>(__fp)
1645 atomic& operator=(const atomic&) volatile = delete;
1646 atomic& operator=(const atomic&) = delete;
1648 using __atomic_float<double>::operator=;
1652 struct atomic<long double> : __atomic_float<long double>
1654 atomic() noexcept = default;
1657 atomic(long double __fp) noexcept : __atomic_float<long double>(__fp)
1660 atomic& operator=(const atomic&) volatile = delete;
1661 atomic& operator=(const atomic&) = delete;
1663 using __atomic_float<long double>::operator=;
1666 #ifdef __STDCPP_FLOAT16_T__
1668 struct atomic<_Float16> : __atomic_float<_Float16>
1670 atomic() noexcept = default;
1673 atomic(_Float16 __fp) noexcept : __atomic_float<_Float16>(__fp)
1676 atomic& operator=(const atomic&) volatile = delete;
1677 atomic& operator=(const atomic&) = delete;
1679 using __atomic_float<_Float16>::operator=;
1683 #ifdef __STDCPP_FLOAT32_T__
1685 struct atomic<_Float32> : __atomic_float<_Float32>
1687 atomic() noexcept = default;
1690 atomic(_Float32 __fp) noexcept : __atomic_float<_Float32>(__fp)
1693 atomic& operator=(const atomic&) volatile = delete;
1694 atomic& operator=(const atomic&) = delete;
1696 using __atomic_float<_Float32>::operator=;
1700 #ifdef __STDCPP_FLOAT64_T__
1702 struct atomic<_Float64> : __atomic_float<_Float64>
1704 atomic() noexcept = default;
1707 atomic(_Float64 __fp) noexcept : __atomic_float<_Float64>(__fp)
1710 atomic& operator=(const atomic&) volatile = delete;
1711 atomic& operator=(const atomic&) = delete;
1713 using __atomic_float<_Float64>::operator=;
1717 #ifdef __STDCPP_FLOAT128_T__
1719 struct atomic<_Float128> : __atomic_float<_Float128>
1721 atomic() noexcept = default;
1724 atomic(_Float128 __fp) noexcept : __atomic_float<_Float128>(__fp)
1727 atomic& operator=(const atomic&) volatile = delete;
1728 atomic& operator=(const atomic&) = delete;
1730 using __atomic_float<_Float128>::operator=;
1734 #ifdef __STDCPP_BFLOAT16_T__
1736 struct atomic<__gnu_cxx::__bfloat16_t> : __atomic_float<__gnu_cxx::__bfloat16_t>
1738 atomic() noexcept = default;
1741 atomic(__gnu_cxx::__bfloat16_t __fp) noexcept : __atomic_float<__gnu_cxx::__bfloat16_t>(__fp)
1744 atomic& operator=(const atomic&) volatile = delete;
1745 atomic& operator=(const atomic&) = delete;
1747 using __atomic_float<__gnu_cxx::__bfloat16_t>::operator=;
1751 #define __cpp_lib_atomic_ref 201806L
1753 /// Class template to provide atomic operations on a non-atomic variable.
1754 template<typename _Tp>
1755 struct atomic_ref : __atomic_ref<_Tp>
1758 atomic_ref(_Tp& __t) noexcept : __atomic_ref<_Tp>(__t)
1761 atomic_ref& operator=(const atomic_ref&) = delete;
1763 atomic_ref(const atomic_ref&) = default;
1765 using __atomic_ref<_Tp>::operator=;
1768 #define __cpp_lib_atomic_lock_free_type_aliases 201907L
1769 #ifdef _GLIBCXX_HAVE_PLATFORM_WAIT
1770 using atomic_signed_lock_free
1771 = atomic<make_signed_t<__detail::__platform_wait_t>>;
1772 using atomic_unsigned_lock_free
1773 = atomic<make_unsigned_t<__detail::__platform_wait_t>>;
1774 #elif ATOMIC_INT_LOCK_FREE || !(ATOMIC_LONG_LOCK_FREE || ATOMIC_CHAR_LOCK_FREE)
1775 using atomic_signed_lock_free = atomic<signed int>;
1776 using atomic_unsigned_lock_free = atomic<unsigned int>;
1777 #elif ATOMIC_LONG_LOCK_FREE
1778 using atomic_signed_lock_free = atomic<signed long>;
1779 using atomic_unsigned_lock_free = atomic<unsigned long>;
1780 #elif ATOMIC_CHAR_LOCK_FREE
1781 using atomic_signed_lock_free = atomic<signed char>;
1782 using atomic_unsigned_lock_free = atomic<unsigned char>;
1787 /// @} group atomics
1789 _GLIBCXX_END_NAMESPACE_VERSION
1794 #endif // _GLIBCXX_ATOMIC