libstdc++
atomic
Go to the documentation of this file.
1 // -*- C++ -*- header.
2 
3 // Copyright (C) 2008-2023 Free Software Foundation, Inc.
4 //
5 // This file is part of the GNU ISO C++ Library. This library is free
6 // software; you can redistribute it and/or modify it under the
7 // terms of the GNU General Public License as published by the
8 // Free Software Foundation; either version 3, or (at your option)
9 // any later version.
10 
11 // This library is distributed in the hope that it will be useful,
12 // but WITHOUT ANY WARRANTY; without even the implied warranty of
13 // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 // GNU General Public License for more details.
15 
16 // Under Section 7 of GPL version 3, you are granted additional
17 // permissions described in the GCC Runtime Library Exception, version
18 // 3.1, as published by the Free Software Foundation.
19 
20 // You should have received a copy of the GNU General Public License and
21 // a copy of the GCC Runtime Library Exception along with this program;
22 // see the files COPYING3 and COPYING.RUNTIME respectively. If not, see
23 // <http://www.gnu.org/licenses/>.
24 
25 /** @file include/atomic
26  * This is a Standard C++ Library header.
27  */
28 
29 // Based on "C++ Atomic Types and Operations" by Hans Boehm and Lawrence Crowl.
30 // http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2007/n2427.html
31 
32 #ifndef _GLIBCXX_ATOMIC
33 #define _GLIBCXX_ATOMIC 1
34 
35 #pragma GCC system_header
36 
37 #if __cplusplus < 201103L
38 # include <bits/c++0x_warning.h>
39 #else
40 
41 #include <bits/atomic_base.h>
42 
43 namespace std _GLIBCXX_VISIBILITY(default)
44 {
45 _GLIBCXX_BEGIN_NAMESPACE_VERSION
46 
47  /**
48  * @addtogroup atomics
49  * @{
50  */
51 
52 #if __cplusplus >= 201703L
53 # define __cpp_lib_atomic_is_always_lock_free 201603L
54 #endif
55 
56  template<typename _Tp>
57  struct atomic;
58 
59  /// atomic<bool>
60  // NB: No operators or fetch-operations for this type.
61  template<>
62  struct atomic<bool>
63  {
64  using value_type = bool;
65 
66  private:
67  __atomic_base<bool> _M_base;
68 
69  public:
70  atomic() noexcept = default;
71  ~atomic() noexcept = default;
72  atomic(const atomic&) = delete;
73  atomic& operator=(const atomic&) = delete;
74  atomic& operator=(const atomic&) volatile = delete;
75 
76  constexpr atomic(bool __i) noexcept : _M_base(__i) { }
77 
78  bool
79  operator=(bool __i) noexcept
80  { return _M_base.operator=(__i); }
81 
82  bool
83  operator=(bool __i) volatile noexcept
84  { return _M_base.operator=(__i); }
85 
86  operator bool() const noexcept
87  { return _M_base.load(); }
88 
89  operator bool() const volatile noexcept
90  { return _M_base.load(); }
91 
92  bool
93  is_lock_free() const noexcept { return _M_base.is_lock_free(); }
94 
95  bool
96  is_lock_free() const volatile noexcept { return _M_base.is_lock_free(); }
97 
98 #if __cplusplus >= 201703L
99  static constexpr bool is_always_lock_free = ATOMIC_BOOL_LOCK_FREE == 2;
100 #endif
101 
102  void
103  store(bool __i, memory_order __m = memory_order_seq_cst) noexcept
104  { _M_base.store(__i, __m); }
105 
106  void
107  store(bool __i, memory_order __m = memory_order_seq_cst) volatile noexcept
108  { _M_base.store(__i, __m); }
109 
110  bool
111  load(memory_order __m = memory_order_seq_cst) const noexcept
112  { return _M_base.load(__m); }
113 
114  bool
115  load(memory_order __m = memory_order_seq_cst) const volatile noexcept
116  { return _M_base.load(__m); }
117 
118  bool
119  exchange(bool __i, memory_order __m = memory_order_seq_cst) noexcept
120  { return _M_base.exchange(__i, __m); }
121 
122  bool
123  exchange(bool __i,
124  memory_order __m = memory_order_seq_cst) volatile noexcept
125  { return _M_base.exchange(__i, __m); }
126 
127  bool
128  compare_exchange_weak(bool& __i1, bool __i2, memory_order __m1,
129  memory_order __m2) noexcept
130  { return _M_base.compare_exchange_weak(__i1, __i2, __m1, __m2); }
131 
132  bool
133  compare_exchange_weak(bool& __i1, bool __i2, memory_order __m1,
134  memory_order __m2) volatile noexcept
135  { return _M_base.compare_exchange_weak(__i1, __i2, __m1, __m2); }
136 
137  bool
138  compare_exchange_weak(bool& __i1, bool __i2,
139  memory_order __m = memory_order_seq_cst) noexcept
140  { return _M_base.compare_exchange_weak(__i1, __i2, __m); }
141 
142  bool
143  compare_exchange_weak(bool& __i1, bool __i2,
144  memory_order __m = memory_order_seq_cst) volatile noexcept
145  { return _M_base.compare_exchange_weak(__i1, __i2, __m); }
146 
147  bool
148  compare_exchange_strong(bool& __i1, bool __i2, memory_order __m1,
149  memory_order __m2) noexcept
150  { return _M_base.compare_exchange_strong(__i1, __i2, __m1, __m2); }
151 
152  bool
153  compare_exchange_strong(bool& __i1, bool __i2, memory_order __m1,
154  memory_order __m2) volatile noexcept
155  { return _M_base.compare_exchange_strong(__i1, __i2, __m1, __m2); }
156 
157  bool
158  compare_exchange_strong(bool& __i1, bool __i2,
159  memory_order __m = memory_order_seq_cst) noexcept
160  { return _M_base.compare_exchange_strong(__i1, __i2, __m); }
161 
162  bool
163  compare_exchange_strong(bool& __i1, bool __i2,
164  memory_order __m = memory_order_seq_cst) volatile noexcept
165  { return _M_base.compare_exchange_strong(__i1, __i2, __m); }
166 
167 #if __cpp_lib_atomic_wait
168  void
169  wait(bool __old, memory_order __m = memory_order_seq_cst) const noexcept
170  { _M_base.wait(__old, __m); }
171 
172  // TODO add const volatile overload
173 
174  void
175  notify_one() noexcept
176  { _M_base.notify_one(); }
177 
178  void
179  notify_all() noexcept
180  { _M_base.notify_all(); }
181 #endif // __cpp_lib_atomic_wait
182  };
183 
184 /// @cond undocumented
185 #if __cpp_lib_atomic_value_initialization
186 # define _GLIBCXX20_INIT(I) = I
187 #else
188 # define _GLIBCXX20_INIT(I)
189 #endif
190 /// @endcond
191 
192  /**
193  * @brief Generic atomic type, primary class template.
194  *
195  * @tparam _Tp Type to be made atomic, must be trivially copyable.
196  */
197  template<typename _Tp>
198  struct atomic
199  {
200  using value_type = _Tp;
201 
202  private:
203  // Align 1/2/4/8/16-byte types to at least their size.
204  static constexpr int _S_min_alignment
205  = (sizeof(_Tp) & (sizeof(_Tp) - 1)) || sizeof(_Tp) > 16
206  ? 0 : sizeof(_Tp);
207 
208  static constexpr int _S_alignment
209  = _S_min_alignment > alignof(_Tp) ? _S_min_alignment : alignof(_Tp);
210 
211  alignas(_S_alignment) _Tp _M_i _GLIBCXX20_INIT(_Tp());
212 
213  static_assert(__is_trivially_copyable(_Tp),
214  "std::atomic requires a trivially copyable type");
215 
216  static_assert(sizeof(_Tp) > 0,
217  "Incomplete or zero-sized types are not supported");
218 
219 #if __cplusplus > 201703L
220  static_assert(is_copy_constructible_v<_Tp>);
221  static_assert(is_move_constructible_v<_Tp>);
222  static_assert(is_copy_assignable_v<_Tp>);
223  static_assert(is_move_assignable_v<_Tp>);
224 #endif
225 
226  public:
227  atomic() = default;
228  ~atomic() noexcept = default;
229  atomic(const atomic&) = delete;
230  atomic& operator=(const atomic&) = delete;
231  atomic& operator=(const atomic&) volatile = delete;
232 
233  constexpr atomic(_Tp __i) noexcept : _M_i(__i)
234  {
235 #if __cplusplus >= 201402L && __has_builtin(__builtin_clear_padding)
236  if _GLIBCXX17_CONSTEXPR (__atomic_impl::__maybe_has_padding<_Tp>())
237  __builtin_clear_padding(std::__addressof(_M_i));
238 #endif
239  }
240 
241  operator _Tp() const noexcept
242  { return load(); }
243 
244  operator _Tp() const volatile noexcept
245  { return load(); }
246 
247  _Tp
248  operator=(_Tp __i) noexcept
249  { store(__i); return __i; }
250 
251  _Tp
252  operator=(_Tp __i) volatile noexcept
253  { store(__i); return __i; }
254 
255  bool
256  is_lock_free() const noexcept
257  {
258  // Produce a fake, minimally aligned pointer.
259  return __atomic_is_lock_free(sizeof(_M_i),
260  reinterpret_cast<void *>(-_S_alignment));
261  }
262 
263  bool
264  is_lock_free() const volatile noexcept
265  {
266  // Produce a fake, minimally aligned pointer.
267  return __atomic_is_lock_free(sizeof(_M_i),
268  reinterpret_cast<void *>(-_S_alignment));
269  }
270 
271 #if __cplusplus >= 201703L
272  static constexpr bool is_always_lock_free
273  = __atomic_always_lock_free(sizeof(_M_i), 0);
274 #endif
275 
276  void
277  store(_Tp __i, memory_order __m = memory_order_seq_cst) noexcept
278  {
279  __atomic_store(std::__addressof(_M_i),
280  __atomic_impl::__clear_padding(__i),
281  int(__m));
282  }
283 
284  void
285  store(_Tp __i, memory_order __m = memory_order_seq_cst) volatile noexcept
286  {
287  __atomic_store(std::__addressof(_M_i),
288  __atomic_impl::__clear_padding(__i),
289  int(__m));
290  }
291 
292  _Tp
293  load(memory_order __m = memory_order_seq_cst) const noexcept
294  {
295  alignas(_Tp) unsigned char __buf[sizeof(_Tp)];
296  _Tp* __ptr = reinterpret_cast<_Tp*>(__buf);
297  __atomic_load(std::__addressof(_M_i), __ptr, int(__m));
298  return *__ptr;
299  }
300 
301  _Tp
302  load(memory_order __m = memory_order_seq_cst) const volatile noexcept
303  {
304  alignas(_Tp) unsigned char __buf[sizeof(_Tp)];
305  _Tp* __ptr = reinterpret_cast<_Tp*>(__buf);
306  __atomic_load(std::__addressof(_M_i), __ptr, int(__m));
307  return *__ptr;
308  }
309 
310  _Tp
311  exchange(_Tp __i, memory_order __m = memory_order_seq_cst) noexcept
312  {
313  alignas(_Tp) unsigned char __buf[sizeof(_Tp)];
314  _Tp* __ptr = reinterpret_cast<_Tp*>(__buf);
315  __atomic_exchange(std::__addressof(_M_i),
316  __atomic_impl::__clear_padding(__i),
317  __ptr, int(__m));
318  return *__ptr;
319  }
320 
321  _Tp
322  exchange(_Tp __i,
323  memory_order __m = memory_order_seq_cst) volatile noexcept
324  {
325  alignas(_Tp) unsigned char __buf[sizeof(_Tp)];
326  _Tp* __ptr = reinterpret_cast<_Tp*>(__buf);
327  __atomic_exchange(std::__addressof(_M_i),
328  __atomic_impl::__clear_padding(__i),
329  __ptr, int(__m));
330  return *__ptr;
331  }
332 
333  bool
334  compare_exchange_weak(_Tp& __e, _Tp __i, memory_order __s,
335  memory_order __f) noexcept
336  {
337  return __atomic_impl::__compare_exchange(_M_i, __e, __i, true,
338  __s, __f);
339  }
340 
341  bool
342  compare_exchange_weak(_Tp& __e, _Tp __i, memory_order __s,
343  memory_order __f) volatile noexcept
344  {
345  return __atomic_impl::__compare_exchange(_M_i, __e, __i, true,
346  __s, __f);
347  }
348 
349  bool
350  compare_exchange_weak(_Tp& __e, _Tp __i,
351  memory_order __m = memory_order_seq_cst) noexcept
352  { return compare_exchange_weak(__e, __i, __m,
353  __cmpexch_failure_order(__m)); }
354 
355  bool
356  compare_exchange_weak(_Tp& __e, _Tp __i,
357  memory_order __m = memory_order_seq_cst) volatile noexcept
358  { return compare_exchange_weak(__e, __i, __m,
359  __cmpexch_failure_order(__m)); }
360 
361  bool
362  compare_exchange_strong(_Tp& __e, _Tp __i, memory_order __s,
363  memory_order __f) noexcept
364  {
365  return __atomic_impl::__compare_exchange(_M_i, __e, __i, false,
366  __s, __f);
367  }
368 
369  bool
370  compare_exchange_strong(_Tp& __e, _Tp __i, memory_order __s,
371  memory_order __f) volatile noexcept
372  {
373  return __atomic_impl::__compare_exchange(_M_i, __e, __i, false,
374  __s, __f);
375  }
376 
377  bool
378  compare_exchange_strong(_Tp& __e, _Tp __i,
379  memory_order __m = memory_order_seq_cst) noexcept
380  { return compare_exchange_strong(__e, __i, __m,
381  __cmpexch_failure_order(__m)); }
382 
383  bool
384  compare_exchange_strong(_Tp& __e, _Tp __i,
385  memory_order __m = memory_order_seq_cst) volatile noexcept
386  { return compare_exchange_strong(__e, __i, __m,
387  __cmpexch_failure_order(__m)); }
388 
389 #if __cpp_lib_atomic_wait
390  void
391  wait(_Tp __old, memory_order __m = memory_order_seq_cst) const noexcept
392  {
393  std::__atomic_wait_address_v(std::addressof(_M_i), __old,
394  [__m, this] { return this->load(__m); });
395  }
396 
397  // TODO add const volatile overload
398 
399  void
400  notify_one() noexcept
401  { std::__atomic_notify_address(std::addressof(_M_i), false); }
402 
403  void
404  notify_all() noexcept
405  { std::__atomic_notify_address(std::addressof(_M_i), true); }
406 #endif // __cpp_lib_atomic_wait
407  };
408 #undef _GLIBCXX20_INIT
409 
410  /// Partial specialization for pointer types.
411  template<typename _Tp>
412  struct atomic<_Tp*>
413  {
414  using value_type = _Tp*;
415  using difference_type = ptrdiff_t;
416 
417  typedef _Tp* __pointer_type;
418  typedef __atomic_base<_Tp*> __base_type;
419  __base_type _M_b;
420 
421  atomic() noexcept = default;
422  ~atomic() noexcept = default;
423  atomic(const atomic&) = delete;
424  atomic& operator=(const atomic&) = delete;
425  atomic& operator=(const atomic&) volatile = delete;
426 
427  constexpr atomic(__pointer_type __p) noexcept : _M_b(__p) { }
428 
429  operator __pointer_type() const noexcept
430  { return __pointer_type(_M_b); }
431 
432  operator __pointer_type() const volatile noexcept
433  { return __pointer_type(_M_b); }
434 
435  __pointer_type
436  operator=(__pointer_type __p) noexcept
437  { return _M_b.operator=(__p); }
438 
439  __pointer_type
440  operator=(__pointer_type __p) volatile noexcept
441  { return _M_b.operator=(__p); }
442 
443  __pointer_type
444  operator++(int) noexcept
445  {
446 #if __cplusplus >= 201703L
447  static_assert( is_object<_Tp>::value, "pointer to object type" );
448 #endif
449  return _M_b++;
450  }
451 
452  __pointer_type
453  operator++(int) volatile noexcept
454  {
455 #if __cplusplus >= 201703L
456  static_assert( is_object<_Tp>::value, "pointer to object type" );
457 #endif
458  return _M_b++;
459  }
460 
461  __pointer_type
462  operator--(int) noexcept
463  {
464 #if __cplusplus >= 201703L
465  static_assert( is_object<_Tp>::value, "pointer to object type" );
466 #endif
467  return _M_b--;
468  }
469 
470  __pointer_type
471  operator--(int) volatile noexcept
472  {
473 #if __cplusplus >= 201703L
474  static_assert( is_object<_Tp>::value, "pointer to object type" );
475 #endif
476  return _M_b--;
477  }
478 
479  __pointer_type
480  operator++() noexcept
481  {
482 #if __cplusplus >= 201703L
483  static_assert( is_object<_Tp>::value, "pointer to object type" );
484 #endif
485  return ++_M_b;
486  }
487 
488  __pointer_type
489  operator++() volatile noexcept
490  {
491 #if __cplusplus >= 201703L
492  static_assert( is_object<_Tp>::value, "pointer to object type" );
493 #endif
494  return ++_M_b;
495  }
496 
497  __pointer_type
498  operator--() noexcept
499  {
500 #if __cplusplus >= 201703L
501  static_assert( is_object<_Tp>::value, "pointer to object type" );
502 #endif
503  return --_M_b;
504  }
505 
506  __pointer_type
507  operator--() volatile noexcept
508  {
509 #if __cplusplus >= 201703L
510  static_assert( is_object<_Tp>::value, "pointer to object type" );
511 #endif
512  return --_M_b;
513  }
514 
515  __pointer_type
516  operator+=(ptrdiff_t __d) noexcept
517  {
518 #if __cplusplus >= 201703L
519  static_assert( is_object<_Tp>::value, "pointer to object type" );
520 #endif
521  return _M_b.operator+=(__d);
522  }
523 
524  __pointer_type
525  operator+=(ptrdiff_t __d) volatile noexcept
526  {
527 #if __cplusplus >= 201703L
528  static_assert( is_object<_Tp>::value, "pointer to object type" );
529 #endif
530  return _M_b.operator+=(__d);
531  }
532 
533  __pointer_type
534  operator-=(ptrdiff_t __d) noexcept
535  {
536 #if __cplusplus >= 201703L
537  static_assert( is_object<_Tp>::value, "pointer to object type" );
538 #endif
539  return _M_b.operator-=(__d);
540  }
541 
542  __pointer_type
543  operator-=(ptrdiff_t __d) volatile noexcept
544  {
545 #if __cplusplus >= 201703L
546  static_assert( is_object<_Tp>::value, "pointer to object type" );
547 #endif
548  return _M_b.operator-=(__d);
549  }
550 
551  bool
552  is_lock_free() const noexcept
553  { return _M_b.is_lock_free(); }
554 
555  bool
556  is_lock_free() const volatile noexcept
557  { return _M_b.is_lock_free(); }
558 
559 #if __cplusplus >= 201703L
560  static constexpr bool is_always_lock_free
561  = ATOMIC_POINTER_LOCK_FREE == 2;
562 #endif
563 
564  void
565  store(__pointer_type __p,
566  memory_order __m = memory_order_seq_cst) noexcept
567  { return _M_b.store(__p, __m); }
568 
569  void
570  store(__pointer_type __p,
571  memory_order __m = memory_order_seq_cst) volatile noexcept
572  { return _M_b.store(__p, __m); }
573 
574  __pointer_type
575  load(memory_order __m = memory_order_seq_cst) const noexcept
576  { return _M_b.load(__m); }
577 
578  __pointer_type
579  load(memory_order __m = memory_order_seq_cst) const volatile noexcept
580  { return _M_b.load(__m); }
581 
582  __pointer_type
583  exchange(__pointer_type __p,
584  memory_order __m = memory_order_seq_cst) noexcept
585  { return _M_b.exchange(__p, __m); }
586 
587  __pointer_type
588  exchange(__pointer_type __p,
589  memory_order __m = memory_order_seq_cst) volatile noexcept
590  { return _M_b.exchange(__p, __m); }
591 
592  bool
593  compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
594  memory_order __m1, memory_order __m2) noexcept
595  { return _M_b.compare_exchange_weak(__p1, __p2, __m1, __m2); }
596 
597  bool
598  compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
599  memory_order __m1,
600  memory_order __m2) volatile noexcept
601  { return _M_b.compare_exchange_weak(__p1, __p2, __m1, __m2); }
602 
603  bool
604  compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
605  memory_order __m = memory_order_seq_cst) noexcept
606  {
607  return compare_exchange_weak(__p1, __p2, __m,
608  __cmpexch_failure_order(__m));
609  }
610 
611  bool
612  compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
613  memory_order __m = memory_order_seq_cst) volatile noexcept
614  {
615  return compare_exchange_weak(__p1, __p2, __m,
616  __cmpexch_failure_order(__m));
617  }
618 
619  bool
620  compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
621  memory_order __m1, memory_order __m2) noexcept
622  { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
623 
624  bool
625  compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
626  memory_order __m1,
627  memory_order __m2) volatile noexcept
628  { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
629 
630  bool
631  compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
632  memory_order __m = memory_order_seq_cst) noexcept
633  {
634  return _M_b.compare_exchange_strong(__p1, __p2, __m,
635  __cmpexch_failure_order(__m));
636  }
637 
638  bool
639  compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
640  memory_order __m = memory_order_seq_cst) volatile noexcept
641  {
642  return _M_b.compare_exchange_strong(__p1, __p2, __m,
643  __cmpexch_failure_order(__m));
644  }
645 
646 #if __cpp_lib_atomic_wait
647  void
648  wait(__pointer_type __old, memory_order __m = memory_order_seq_cst) const noexcept
649  { _M_b.wait(__old, __m); }
650 
651  // TODO add const volatile overload
652 
653  void
654  notify_one() noexcept
655  { _M_b.notify_one(); }
656 
657  void
658  notify_all() noexcept
659  { _M_b.notify_all(); }
660 #endif // __cpp_lib_atomic_wait
661 
662  __pointer_type
663  fetch_add(ptrdiff_t __d,
664  memory_order __m = memory_order_seq_cst) noexcept
665  {
666 #if __cplusplus >= 201703L
667  static_assert( is_object<_Tp>::value, "pointer to object type" );
668 #endif
669  return _M_b.fetch_add(__d, __m);
670  }
671 
672  __pointer_type
673  fetch_add(ptrdiff_t __d,
674  memory_order __m = memory_order_seq_cst) volatile noexcept
675  {
676 #if __cplusplus >= 201703L
677  static_assert( is_object<_Tp>::value, "pointer to object type" );
678 #endif
679  return _M_b.fetch_add(__d, __m);
680  }
681 
682  __pointer_type
683  fetch_sub(ptrdiff_t __d,
684  memory_order __m = memory_order_seq_cst) noexcept
685  {
686 #if __cplusplus >= 201703L
687  static_assert( is_object<_Tp>::value, "pointer to object type" );
688 #endif
689  return _M_b.fetch_sub(__d, __m);
690  }
691 
692  __pointer_type
693  fetch_sub(ptrdiff_t __d,
694  memory_order __m = memory_order_seq_cst) volatile noexcept
695  {
696 #if __cplusplus >= 201703L
697  static_assert( is_object<_Tp>::value, "pointer to object type" );
698 #endif
699  return _M_b.fetch_sub(__d, __m);
700  }
701  };
702 
703 
704  /// Explicit specialization for char.
705  template<>
706  struct atomic<char> : __atomic_base<char>
707  {
708  typedef char __integral_type;
709  typedef __atomic_base<char> __base_type;
710 
711  atomic() noexcept = default;
712  ~atomic() noexcept = default;
713  atomic(const atomic&) = delete;
714  atomic& operator=(const atomic&) = delete;
715  atomic& operator=(const atomic&) volatile = delete;
716 
717  constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
718 
719  using __base_type::operator __integral_type;
720  using __base_type::operator=;
721 
722 #if __cplusplus >= 201703L
723  static constexpr bool is_always_lock_free = ATOMIC_CHAR_LOCK_FREE == 2;
724 #endif
725  };
726 
727  /// Explicit specialization for signed char.
728  template<>
729  struct atomic<signed char> : __atomic_base<signed char>
730  {
731  typedef signed char __integral_type;
732  typedef __atomic_base<signed char> __base_type;
733 
734  atomic() noexcept= default;
735  ~atomic() noexcept = default;
736  atomic(const atomic&) = delete;
737  atomic& operator=(const atomic&) = delete;
738  atomic& operator=(const atomic&) volatile = delete;
739 
740  constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
741 
742  using __base_type::operator __integral_type;
743  using __base_type::operator=;
744 
745 #if __cplusplus >= 201703L
746  static constexpr bool is_always_lock_free = ATOMIC_CHAR_LOCK_FREE == 2;
747 #endif
748  };
749 
750  /// Explicit specialization for unsigned char.
751  template<>
752  struct atomic<unsigned char> : __atomic_base<unsigned char>
753  {
754  typedef unsigned char __integral_type;
755  typedef __atomic_base<unsigned char> __base_type;
756 
757  atomic() noexcept= default;
758  ~atomic() noexcept = default;
759  atomic(const atomic&) = delete;
760  atomic& operator=(const atomic&) = delete;
761  atomic& operator=(const atomic&) volatile = delete;
762 
763  constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
764 
765  using __base_type::operator __integral_type;
766  using __base_type::operator=;
767 
768 #if __cplusplus >= 201703L
769  static constexpr bool is_always_lock_free = ATOMIC_CHAR_LOCK_FREE == 2;
770 #endif
771  };
772 
773  /// Explicit specialization for short.
774  template<>
775  struct atomic<short> : __atomic_base<short>
776  {
777  typedef short __integral_type;
778  typedef __atomic_base<short> __base_type;
779 
780  atomic() noexcept = default;
781  ~atomic() noexcept = default;
782  atomic(const atomic&) = delete;
783  atomic& operator=(const atomic&) = delete;
784  atomic& operator=(const atomic&) volatile = delete;
785 
786  constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
787 
788  using __base_type::operator __integral_type;
789  using __base_type::operator=;
790 
791 #if __cplusplus >= 201703L
792  static constexpr bool is_always_lock_free = ATOMIC_SHORT_LOCK_FREE == 2;
793 #endif
794  };
795 
796  /// Explicit specialization for unsigned short.
797  template<>
798  struct atomic<unsigned short> : __atomic_base<unsigned short>
799  {
800  typedef unsigned short __integral_type;
801  typedef __atomic_base<unsigned short> __base_type;
802 
803  atomic() noexcept = default;
804  ~atomic() noexcept = default;
805  atomic(const atomic&) = delete;
806  atomic& operator=(const atomic&) = delete;
807  atomic& operator=(const atomic&) volatile = delete;
808 
809  constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
810 
811  using __base_type::operator __integral_type;
812  using __base_type::operator=;
813 
814 #if __cplusplus >= 201703L
815  static constexpr bool is_always_lock_free = ATOMIC_SHORT_LOCK_FREE == 2;
816 #endif
817  };
818 
819  /// Explicit specialization for int.
820  template<>
821  struct atomic<int> : __atomic_base<int>
822  {
823  typedef int __integral_type;
824  typedef __atomic_base<int> __base_type;
825 
826  atomic() noexcept = default;
827  ~atomic() noexcept = default;
828  atomic(const atomic&) = delete;
829  atomic& operator=(const atomic&) = delete;
830  atomic& operator=(const atomic&) volatile = delete;
831 
832  constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
833 
834  using __base_type::operator __integral_type;
835  using __base_type::operator=;
836 
837 #if __cplusplus >= 201703L
838  static constexpr bool is_always_lock_free = ATOMIC_INT_LOCK_FREE == 2;
839 #endif
840  };
841 
842  /// Explicit specialization for unsigned int.
843  template<>
844  struct atomic<unsigned int> : __atomic_base<unsigned int>
845  {
846  typedef unsigned int __integral_type;
847  typedef __atomic_base<unsigned int> __base_type;
848 
849  atomic() noexcept = default;
850  ~atomic() noexcept = default;
851  atomic(const atomic&) = delete;
852  atomic& operator=(const atomic&) = delete;
853  atomic& operator=(const atomic&) volatile = delete;
854 
855  constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
856 
857  using __base_type::operator __integral_type;
858  using __base_type::operator=;
859 
860 #if __cplusplus >= 201703L
861  static constexpr bool is_always_lock_free = ATOMIC_INT_LOCK_FREE == 2;
862 #endif
863  };
864 
865  /// Explicit specialization for long.
866  template<>
867  struct atomic<long> : __atomic_base<long>
868  {
869  typedef long __integral_type;
870  typedef __atomic_base<long> __base_type;
871 
872  atomic() noexcept = default;
873  ~atomic() noexcept = default;
874  atomic(const atomic&) = delete;
875  atomic& operator=(const atomic&) = delete;
876  atomic& operator=(const atomic&) volatile = delete;
877 
878  constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
879 
880  using __base_type::operator __integral_type;
881  using __base_type::operator=;
882 
883 #if __cplusplus >= 201703L
884  static constexpr bool is_always_lock_free = ATOMIC_LONG_LOCK_FREE == 2;
885 #endif
886  };
887 
888  /// Explicit specialization for unsigned long.
889  template<>
890  struct atomic<unsigned long> : __atomic_base<unsigned long>
891  {
892  typedef unsigned long __integral_type;
893  typedef __atomic_base<unsigned long> __base_type;
894 
895  atomic() noexcept = default;
896  ~atomic() noexcept = default;
897  atomic(const atomic&) = delete;
898  atomic& operator=(const atomic&) = delete;
899  atomic& operator=(const atomic&) volatile = delete;
900 
901  constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
902 
903  using __base_type::operator __integral_type;
904  using __base_type::operator=;
905 
906 #if __cplusplus >= 201703L
907  static constexpr bool is_always_lock_free = ATOMIC_LONG_LOCK_FREE == 2;
908 #endif
909  };
910 
911  /// Explicit specialization for long long.
912  template<>
913  struct atomic<long long> : __atomic_base<long long>
914  {
915  typedef long long __integral_type;
916  typedef __atomic_base<long long> __base_type;
917 
918  atomic() noexcept = default;
919  ~atomic() noexcept = default;
920  atomic(const atomic&) = delete;
921  atomic& operator=(const atomic&) = delete;
922  atomic& operator=(const atomic&) volatile = delete;
923 
924  constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
925 
926  using __base_type::operator __integral_type;
927  using __base_type::operator=;
928 
929 #if __cplusplus >= 201703L
930  static constexpr bool is_always_lock_free = ATOMIC_LLONG_LOCK_FREE == 2;
931 #endif
932  };
933 
934  /// Explicit specialization for unsigned long long.
935  template<>
936  struct atomic<unsigned long long> : __atomic_base<unsigned long long>
937  {
938  typedef unsigned long long __integral_type;
939  typedef __atomic_base<unsigned long long> __base_type;
940 
941  atomic() noexcept = default;
942  ~atomic() noexcept = default;
943  atomic(const atomic&) = delete;
944  atomic& operator=(const atomic&) = delete;
945  atomic& operator=(const atomic&) volatile = delete;
946 
947  constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
948 
949  using __base_type::operator __integral_type;
950  using __base_type::operator=;
951 
952 #if __cplusplus >= 201703L
953  static constexpr bool is_always_lock_free = ATOMIC_LLONG_LOCK_FREE == 2;
954 #endif
955  };
956 
957  /// Explicit specialization for wchar_t.
958  template<>
959  struct atomic<wchar_t> : __atomic_base<wchar_t>
960  {
961  typedef wchar_t __integral_type;
962  typedef __atomic_base<wchar_t> __base_type;
963 
964  atomic() noexcept = default;
965  ~atomic() noexcept = default;
966  atomic(const atomic&) = delete;
967  atomic& operator=(const atomic&) = delete;
968  atomic& operator=(const atomic&) volatile = delete;
969 
970  constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
971 
972  using __base_type::operator __integral_type;
973  using __base_type::operator=;
974 
975 #if __cplusplus >= 201703L
976  static constexpr bool is_always_lock_free = ATOMIC_WCHAR_T_LOCK_FREE == 2;
977 #endif
978  };
979 
980 #ifdef _GLIBCXX_USE_CHAR8_T
981  /// Explicit specialization for char8_t.
982  template<>
983  struct atomic<char8_t> : __atomic_base<char8_t>
984  {
985  typedef char8_t __integral_type;
986  typedef __atomic_base<char8_t> __base_type;
987 
988  atomic() noexcept = default;
989  ~atomic() noexcept = default;
990  atomic(const atomic&) = delete;
991  atomic& operator=(const atomic&) = delete;
992  atomic& operator=(const atomic&) volatile = delete;
993 
994  constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
995 
996  using __base_type::operator __integral_type;
997  using __base_type::operator=;
998 
999 #if __cplusplus > 201402L
1000  static constexpr bool is_always_lock_free
1001  = ATOMIC_CHAR8_T_LOCK_FREE == 2;
1002 #endif
1003  };
1004 #endif
1005 
1006  /// Explicit specialization for char16_t.
1007  template<>
1008  struct atomic<char16_t> : __atomic_base<char16_t>
1009  {
1010  typedef char16_t __integral_type;
1011  typedef __atomic_base<char16_t> __base_type;
1012 
1013  atomic() noexcept = default;
1014  ~atomic() noexcept = default;
1015  atomic(const atomic&) = delete;
1016  atomic& operator=(const atomic&) = delete;
1017  atomic& operator=(const atomic&) volatile = delete;
1018 
1019  constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
1020 
1021  using __base_type::operator __integral_type;
1022  using __base_type::operator=;
1023 
1024 #if __cplusplus >= 201703L
1025  static constexpr bool is_always_lock_free
1026  = ATOMIC_CHAR16_T_LOCK_FREE == 2;
1027 #endif
1028  };
1029 
1030  /// Explicit specialization for char32_t.
1031  template<>
1032  struct atomic<char32_t> : __atomic_base<char32_t>
1033  {
1034  typedef char32_t __integral_type;
1035  typedef __atomic_base<char32_t> __base_type;
1036 
1037  atomic() noexcept = default;
1038  ~atomic() noexcept = default;
1039  atomic(const atomic&) = delete;
1040  atomic& operator=(const atomic&) = delete;
1041  atomic& operator=(const atomic&) volatile = delete;
1042 
1043  constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
1044 
1045  using __base_type::operator __integral_type;
1046  using __base_type::operator=;
1047 
1048 #if __cplusplus >= 201703L
1049  static constexpr bool is_always_lock_free
1050  = ATOMIC_CHAR32_T_LOCK_FREE == 2;
1051 #endif
1052  };
1053 
1054 
1055  /// atomic_bool
1056  typedef atomic<bool> atomic_bool;
1057 
1058  /// atomic_char
1059  typedef atomic<char> atomic_char;
1060 
1061  /// atomic_schar
1062  typedef atomic<signed char> atomic_schar;
1063 
1064  /// atomic_uchar
1065  typedef atomic<unsigned char> atomic_uchar;
1066 
1067  /// atomic_short
1068  typedef atomic<short> atomic_short;
1069 
1070  /// atomic_ushort
1071  typedef atomic<unsigned short> atomic_ushort;
1072 
1073  /// atomic_int
1074  typedef atomic<int> atomic_int;
1075 
1076  /// atomic_uint
1077  typedef atomic<unsigned int> atomic_uint;
1078 
1079  /// atomic_long
1080  typedef atomic<long> atomic_long;
1081 
1082  /// atomic_ulong
1083  typedef atomic<unsigned long> atomic_ulong;
1084 
1085  /// atomic_llong
1086  typedef atomic<long long> atomic_llong;
1087 
1088  /// atomic_ullong
1089  typedef atomic<unsigned long long> atomic_ullong;
1090 
1091  /// atomic_wchar_t
1092  typedef atomic<wchar_t> atomic_wchar_t;
1093 
1094 #ifdef _GLIBCXX_USE_CHAR8_T
1095  /// atomic_char8_t
1096  typedef atomic<char8_t> atomic_char8_t;
1097 #endif
1098 
1099  /// atomic_char16_t
1100  typedef atomic<char16_t> atomic_char16_t;
1101 
1102  /// atomic_char32_t
1103  typedef atomic<char32_t> atomic_char32_t;
1104 
1105 #ifdef _GLIBCXX_USE_C99_STDINT_TR1
1106  // _GLIBCXX_RESOLVE_LIB_DEFECTS
1107  // 2441. Exact-width atomic typedefs should be provided
1108 
1109  /// atomic_int8_t
1110  typedef atomic<int8_t> atomic_int8_t;
1111 
1112  /// atomic_uint8_t
1113  typedef atomic<uint8_t> atomic_uint8_t;
1114 
1115  /// atomic_int16_t
1116  typedef atomic<int16_t> atomic_int16_t;
1117 
1118  /// atomic_uint16_t
1119  typedef atomic<uint16_t> atomic_uint16_t;
1120 
1121  /// atomic_int32_t
1122  typedef atomic<int32_t> atomic_int32_t;
1123 
1124  /// atomic_uint32_t
1125  typedef atomic<uint32_t> atomic_uint32_t;
1126 
1127  /// atomic_int64_t
1128  typedef atomic<int64_t> atomic_int64_t;
1129 
1130  /// atomic_uint64_t
1131  typedef atomic<uint64_t> atomic_uint64_t;
1132 
1133 
1134  /// atomic_int_least8_t
1135  typedef atomic<int_least8_t> atomic_int_least8_t;
1136 
1137  /// atomic_uint_least8_t
1138  typedef atomic<uint_least8_t> atomic_uint_least8_t;
1139 
1140  /// atomic_int_least16_t
1141  typedef atomic<int_least16_t> atomic_int_least16_t;
1142 
1143  /// atomic_uint_least16_t
1144  typedef atomic<uint_least16_t> atomic_uint_least16_t;
1145 
1146  /// atomic_int_least32_t
1147  typedef atomic<int_least32_t> atomic_int_least32_t;
1148 
1149  /// atomic_uint_least32_t
1150  typedef atomic<uint_least32_t> atomic_uint_least32_t;
1151 
1152  /// atomic_int_least64_t
1153  typedef atomic<int_least64_t> atomic_int_least64_t;
1154 
1155  /// atomic_uint_least64_t
1156  typedef atomic<uint_least64_t> atomic_uint_least64_t;
1157 
1158 
1159  /// atomic_int_fast8_t
1160  typedef atomic<int_fast8_t> atomic_int_fast8_t;
1161 
1162  /// atomic_uint_fast8_t
1163  typedef atomic<uint_fast8_t> atomic_uint_fast8_t;
1164 
1165  /// atomic_int_fast16_t
1166  typedef atomic<int_fast16_t> atomic_int_fast16_t;
1167 
1168  /// atomic_uint_fast16_t
1169  typedef atomic<uint_fast16_t> atomic_uint_fast16_t;
1170 
1171  /// atomic_int_fast32_t
1172  typedef atomic<int_fast32_t> atomic_int_fast32_t;
1173 
1174  /// atomic_uint_fast32_t
1175  typedef atomic<uint_fast32_t> atomic_uint_fast32_t;
1176 
1177  /// atomic_int_fast64_t
1178  typedef atomic<int_fast64_t> atomic_int_fast64_t;
1179 
1180  /// atomic_uint_fast64_t
1181  typedef atomic<uint_fast64_t> atomic_uint_fast64_t;
1182 #endif
1183 
1184 
1185  /// atomic_intptr_t
1186  typedef atomic<intptr_t> atomic_intptr_t;
1187 
1188  /// atomic_uintptr_t
1189  typedef atomic<uintptr_t> atomic_uintptr_t;
1190 
1191  /// atomic_size_t
1192  typedef atomic<size_t> atomic_size_t;
1193 
1194  /// atomic_ptrdiff_t
1195  typedef atomic<ptrdiff_t> atomic_ptrdiff_t;
1196 
1197 #ifdef _GLIBCXX_USE_C99_STDINT_TR1
1198  /// atomic_intmax_t
1199  typedef atomic<intmax_t> atomic_intmax_t;
1200 
1201  /// atomic_uintmax_t
1202  typedef atomic<uintmax_t> atomic_uintmax_t;
1203 #endif
1204 
1205  // Function definitions, atomic_flag operations.
1206  inline bool
1207  atomic_flag_test_and_set_explicit(atomic_flag* __a,
1208  memory_order __m) noexcept
1209  { return __a->test_and_set(__m); }
1210 
1211  inline bool
1212  atomic_flag_test_and_set_explicit(volatile atomic_flag* __a,
1213  memory_order __m) noexcept
1214  { return __a->test_and_set(__m); }
1215 
1216 #if __cpp_lib_atomic_flag_test
1217  inline bool
1218  atomic_flag_test(const atomic_flag* __a) noexcept
1219  { return __a->test(); }
1220 
1221  inline bool
1222  atomic_flag_test(const volatile atomic_flag* __a) noexcept
1223  { return __a->test(); }
1224 
1225  inline bool
1226  atomic_flag_test_explicit(const atomic_flag* __a,
1227  memory_order __m) noexcept
1228  { return __a->test(__m); }
1229 
1230  inline bool
1231  atomic_flag_test_explicit(const volatile atomic_flag* __a,
1232  memory_order __m) noexcept
1233  { return __a->test(__m); }
1234 #endif
1235 
1236  inline void
1237  atomic_flag_clear_explicit(atomic_flag* __a, memory_order __m) noexcept
1238  { __a->clear(__m); }
1239 
1240  inline void
1241  atomic_flag_clear_explicit(volatile atomic_flag* __a,
1242  memory_order __m) noexcept
1243  { __a->clear(__m); }
1244 
1245  inline bool
1246  atomic_flag_test_and_set(atomic_flag* __a) noexcept
1247  { return atomic_flag_test_and_set_explicit(__a, memory_order_seq_cst); }
1248 
1249  inline bool
1250  atomic_flag_test_and_set(volatile atomic_flag* __a) noexcept
1251  { return atomic_flag_test_and_set_explicit(__a, memory_order_seq_cst); }
1252 
1253  inline void
1254  atomic_flag_clear(atomic_flag* __a) noexcept
1255  { atomic_flag_clear_explicit(__a, memory_order_seq_cst); }
1256 
1257  inline void
1258  atomic_flag_clear(volatile atomic_flag* __a) noexcept
1259  { atomic_flag_clear_explicit(__a, memory_order_seq_cst); }
1260 
1261 #if __cpp_lib_atomic_wait
1262  inline void
1263  atomic_flag_wait(atomic_flag* __a, bool __old) noexcept
1264  { __a->wait(__old); }
1265 
1266  inline void
1267  atomic_flag_wait_explicit(atomic_flag* __a, bool __old,
1268  memory_order __m) noexcept
1269  { __a->wait(__old, __m); }
1270 
1271  inline void
1272  atomic_flag_notify_one(atomic_flag* __a) noexcept
1273  { __a->notify_one(); }
1274 
1275  inline void
1276  atomic_flag_notify_all(atomic_flag* __a) noexcept
1277  { __a->notify_all(); }
1278 #endif // __cpp_lib_atomic_wait
1279 
1280  /// @cond undocumented
1281  // _GLIBCXX_RESOLVE_LIB_DEFECTS
1282  // 3220. P0558 broke conforming C++14 uses of atomic shared_ptr
1283  template<typename _Tp>
1284  using __atomic_val_t = __type_identity_t<_Tp>;
1285  template<typename _Tp>
1286  using __atomic_diff_t = typename atomic<_Tp>::difference_type;
1287  /// @endcond
1288 
1289  // [atomics.nonmembers] Non-member functions.
1290  // Function templates generally applicable to atomic types.
1291  template<typename _ITp>
1292  inline bool
1293  atomic_is_lock_free(const atomic<_ITp>* __a) noexcept
1294  { return __a->is_lock_free(); }
1295 
1296  template<typename _ITp>
1297  inline bool
1298  atomic_is_lock_free(const volatile atomic<_ITp>* __a) noexcept
1299  { return __a->is_lock_free(); }
1300 
1301  template<typename _ITp>
1302  inline void
1303  atomic_init(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i) noexcept
1304  { __a->store(__i, memory_order_relaxed); }
1305 
1306  template<typename _ITp>
1307  inline void
1308  atomic_init(volatile atomic<_ITp>* __a, __atomic_val_t<_ITp> __i) noexcept
1309  { __a->store(__i, memory_order_relaxed); }
1310 
1311  template<typename _ITp>
1312  inline void
1313  atomic_store_explicit(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i,
1314  memory_order __m) noexcept
1315  { __a->store(__i, __m); }
1316 
1317  template<typename _ITp>
1318  inline void
1319  atomic_store_explicit(volatile atomic<_ITp>* __a, __atomic_val_t<_ITp> __i,
1320  memory_order __m) noexcept
1321  { __a->store(__i, __m); }
1322 
1323  template<typename _ITp>
1324  inline _ITp
1325  atomic_load_explicit(const atomic<_ITp>* __a, memory_order __m) noexcept
1326  { return __a->load(__m); }
1327 
1328  template<typename _ITp>
1329  inline _ITp
1330  atomic_load_explicit(const volatile atomic<_ITp>* __a,
1331  memory_order __m) noexcept
1332  { return __a->load(__m); }
1333 
1334  template<typename _ITp>
1335  inline _ITp
1336  atomic_exchange_explicit(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i,
1337  memory_order __m) noexcept
1338  { return __a->exchange(__i, __m); }
1339 
1340  template<typename _ITp>
1341  inline _ITp
1342  atomic_exchange_explicit(volatile atomic<_ITp>* __a,
1343  __atomic_val_t<_ITp> __i,
1344  memory_order __m) noexcept
1345  { return __a->exchange(__i, __m); }
1346 
1347  template<typename _ITp>
1348  inline bool
1349  atomic_compare_exchange_weak_explicit(atomic<_ITp>* __a,
1350  __atomic_val_t<_ITp>* __i1,
1351  __atomic_val_t<_ITp> __i2,
1352  memory_order __m1,
1353  memory_order __m2) noexcept
1354  { return __a->compare_exchange_weak(*__i1, __i2, __m1, __m2); }
1355 
1356  template<typename _ITp>
1357  inline bool
1358  atomic_compare_exchange_weak_explicit(volatile atomic<_ITp>* __a,
1359  __atomic_val_t<_ITp>* __i1,
1360  __atomic_val_t<_ITp> __i2,
1361  memory_order __m1,
1362  memory_order __m2) noexcept
1363  { return __a->compare_exchange_weak(*__i1, __i2, __m1, __m2); }
1364 
1365  template<typename _ITp>
1366  inline bool
1367  atomic_compare_exchange_strong_explicit(atomic<_ITp>* __a,
1368  __atomic_val_t<_ITp>* __i1,
1369  __atomic_val_t<_ITp> __i2,
1370  memory_order __m1,
1371  memory_order __m2) noexcept
1372  { return __a->compare_exchange_strong(*__i1, __i2, __m1, __m2); }
1373 
1374  template<typename _ITp>
1375  inline bool
1376  atomic_compare_exchange_strong_explicit(volatile atomic<_ITp>* __a,
1377  __atomic_val_t<_ITp>* __i1,
1378  __atomic_val_t<_ITp> __i2,
1379  memory_order __m1,
1380  memory_order __m2) noexcept
1381  { return __a->compare_exchange_strong(*__i1, __i2, __m1, __m2); }
1382 
1383 
1384  template<typename _ITp>
1385  inline void
1386  atomic_store(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i) noexcept
1387  { atomic_store_explicit(__a, __i, memory_order_seq_cst); }
1388 
1389  template<typename _ITp>
1390  inline void
1391  atomic_store(volatile atomic<_ITp>* __a, __atomic_val_t<_ITp> __i) noexcept
1392  { atomic_store_explicit(__a, __i, memory_order_seq_cst); }
1393 
1394  template<typename _ITp>
1395  inline _ITp
1396  atomic_load(const atomic<_ITp>* __a) noexcept
1397  { return atomic_load_explicit(__a, memory_order_seq_cst); }
1398 
1399  template<typename _ITp>
1400  inline _ITp
1401  atomic_load(const volatile atomic<_ITp>* __a) noexcept
1402  { return atomic_load_explicit(__a, memory_order_seq_cst); }
1403 
1404  template<typename _ITp>
1405  inline _ITp
1406  atomic_exchange(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i) noexcept
1407  { return atomic_exchange_explicit(__a, __i, memory_order_seq_cst); }
1408 
1409  template<typename _ITp>
1410  inline _ITp
1411  atomic_exchange(volatile atomic<_ITp>* __a,
1412  __atomic_val_t<_ITp> __i) noexcept
1413  { return atomic_exchange_explicit(__a, __i, memory_order_seq_cst); }
1414 
1415  template<typename _ITp>
1416  inline bool
1417  atomic_compare_exchange_weak(atomic<_ITp>* __a,
1418  __atomic_val_t<_ITp>* __i1,
1419  __atomic_val_t<_ITp> __i2) noexcept
1420  {
1421  return atomic_compare_exchange_weak_explicit(__a, __i1, __i2,
1422  memory_order_seq_cst,
1423  memory_order_seq_cst);
1424  }
1425 
1426  template<typename _ITp>
1427  inline bool
1428  atomic_compare_exchange_weak(volatile atomic<_ITp>* __a,
1429  __atomic_val_t<_ITp>* __i1,
1430  __atomic_val_t<_ITp> __i2) noexcept
1431  {
1432  return atomic_compare_exchange_weak_explicit(__a, __i1, __i2,
1433  memory_order_seq_cst,
1434  memory_order_seq_cst);
1435  }
1436 
1437  template<typename _ITp>
1438  inline bool
1439  atomic_compare_exchange_strong(atomic<_ITp>* __a,
1440  __atomic_val_t<_ITp>* __i1,
1441  __atomic_val_t<_ITp> __i2) noexcept
1442  {
1443  return atomic_compare_exchange_strong_explicit(__a, __i1, __i2,
1444  memory_order_seq_cst,
1445  memory_order_seq_cst);
1446  }
1447 
1448  template<typename _ITp>
1449  inline bool
1450  atomic_compare_exchange_strong(volatile atomic<_ITp>* __a,
1451  __atomic_val_t<_ITp>* __i1,
1452  __atomic_val_t<_ITp> __i2) noexcept
1453  {
1454  return atomic_compare_exchange_strong_explicit(__a, __i1, __i2,
1455  memory_order_seq_cst,
1456  memory_order_seq_cst);
1457  }
1458 
1459 
1460 #if __cpp_lib_atomic_wait
1461  template<typename _Tp>
1462  inline void
1463  atomic_wait(const atomic<_Tp>* __a,
1464  typename std::atomic<_Tp>::value_type __old) noexcept
1465  { __a->wait(__old); }
1466 
1467  template<typename _Tp>
1468  inline void
1469  atomic_wait_explicit(const atomic<_Tp>* __a,
1470  typename std::atomic<_Tp>::value_type __old,
1471  std::memory_order __m) noexcept
1472  { __a->wait(__old, __m); }
1473 
1474  template<typename _Tp>
1475  inline void
1476  atomic_notify_one(atomic<_Tp>* __a) noexcept
1477  { __a->notify_one(); }
1478 
1479  template<typename _Tp>
1480  inline void
1481  atomic_notify_all(atomic<_Tp>* __a) noexcept
1482  { __a->notify_all(); }
1483 #endif // __cpp_lib_atomic_wait
1484 
1485  // Function templates for atomic_integral and atomic_pointer operations only.
1486  // Some operations (and, or, xor) are only available for atomic integrals,
1487  // which is implemented by taking a parameter of type __atomic_base<_ITp>*.
1488 
1489  template<typename _ITp>
1490  inline _ITp
1491  atomic_fetch_add_explicit(atomic<_ITp>* __a,
1492  __atomic_diff_t<_ITp> __i,
1493  memory_order __m) noexcept
1494  { return __a->fetch_add(__i, __m); }
1495 
1496  template<typename _ITp>
1497  inline _ITp
1498  atomic_fetch_add_explicit(volatile atomic<_ITp>* __a,
1499  __atomic_diff_t<_ITp> __i,
1500  memory_order __m) noexcept
1501  { return __a->fetch_add(__i, __m); }
1502 
1503  template<typename _ITp>
1504  inline _ITp
1505  atomic_fetch_sub_explicit(atomic<_ITp>* __a,
1506  __atomic_diff_t<_ITp> __i,
1507  memory_order __m) noexcept
1508  { return __a->fetch_sub(__i, __m); }
1509 
1510  template<typename _ITp>
1511  inline _ITp
1512  atomic_fetch_sub_explicit(volatile atomic<_ITp>* __a,
1513  __atomic_diff_t<_ITp> __i,
1514  memory_order __m) noexcept
1515  { return __a->fetch_sub(__i, __m); }
1516 
1517  template<typename _ITp>
1518  inline _ITp
1519  atomic_fetch_and_explicit(__atomic_base<_ITp>* __a,
1520  __atomic_val_t<_ITp> __i,
1521  memory_order __m) noexcept
1522  { return __a->fetch_and(__i, __m); }
1523 
1524  template<typename _ITp>
1525  inline _ITp
1526  atomic_fetch_and_explicit(volatile __atomic_base<_ITp>* __a,
1527  __atomic_val_t<_ITp> __i,
1528  memory_order __m) noexcept
1529  { return __a->fetch_and(__i, __m); }
1530 
1531  template<typename _ITp>
1532  inline _ITp
1533  atomic_fetch_or_explicit(__atomic_base<_ITp>* __a,
1534  __atomic_val_t<_ITp> __i,
1535  memory_order __m) noexcept
1536  { return __a->fetch_or(__i, __m); }
1537 
1538  template<typename _ITp>
1539  inline _ITp
1540  atomic_fetch_or_explicit(volatile __atomic_base<_ITp>* __a,
1541  __atomic_val_t<_ITp> __i,
1542  memory_order __m) noexcept
1543  { return __a->fetch_or(__i, __m); }
1544 
1545  template<typename _ITp>
1546  inline _ITp
1547  atomic_fetch_xor_explicit(__atomic_base<_ITp>* __a,
1548  __atomic_val_t<_ITp> __i,
1549  memory_order __m) noexcept
1550  { return __a->fetch_xor(__i, __m); }
1551 
1552  template<typename _ITp>
1553  inline _ITp
1554  atomic_fetch_xor_explicit(volatile __atomic_base<_ITp>* __a,
1555  __atomic_val_t<_ITp> __i,
1556  memory_order __m) noexcept
1557  { return __a->fetch_xor(__i, __m); }
1558 
1559  template<typename _ITp>
1560  inline _ITp
1561  atomic_fetch_add(atomic<_ITp>* __a,
1562  __atomic_diff_t<_ITp> __i) noexcept
1563  { return atomic_fetch_add_explicit(__a, __i, memory_order_seq_cst); }
1564 
1565  template<typename _ITp>
1566  inline _ITp
1567  atomic_fetch_add(volatile atomic<_ITp>* __a,
1568  __atomic_diff_t<_ITp> __i) noexcept
1569  { return atomic_fetch_add_explicit(__a, __i, memory_order_seq_cst); }
1570 
1571  template<typename _ITp>
1572  inline _ITp
1573  atomic_fetch_sub(atomic<_ITp>* __a,
1574  __atomic_diff_t<_ITp> __i) noexcept
1575  { return atomic_fetch_sub_explicit(__a, __i, memory_order_seq_cst); }
1576 
1577  template<typename _ITp>
1578  inline _ITp
1579  atomic_fetch_sub(volatile atomic<_ITp>* __a,
1580  __atomic_diff_t<_ITp> __i) noexcept
1581  { return atomic_fetch_sub_explicit(__a, __i, memory_order_seq_cst); }
1582 
1583  template<typename _ITp>
1584  inline _ITp
1585  atomic_fetch_and(__atomic_base<_ITp>* __a,
1586  __atomic_val_t<_ITp> __i) noexcept
1587  { return atomic_fetch_and_explicit(__a, __i, memory_order_seq_cst); }
1588 
1589  template<typename _ITp>
1590  inline _ITp
1591  atomic_fetch_and(volatile __atomic_base<_ITp>* __a,
1592  __atomic_val_t<_ITp> __i) noexcept
1593  { return atomic_fetch_and_explicit(__a, __i, memory_order_seq_cst); }
1594 
1595  template<typename _ITp>
1596  inline _ITp
1597  atomic_fetch_or(__atomic_base<_ITp>* __a,
1598  __atomic_val_t<_ITp> __i) noexcept
1599  { return atomic_fetch_or_explicit(__a, __i, memory_order_seq_cst); }
1600 
1601  template<typename _ITp>
1602  inline _ITp
1603  atomic_fetch_or(volatile __atomic_base<_ITp>* __a,
1604  __atomic_val_t<_ITp> __i) noexcept
1605  { return atomic_fetch_or_explicit(__a, __i, memory_order_seq_cst); }
1606 
1607  template<typename _ITp>
1608  inline _ITp
1609  atomic_fetch_xor(__atomic_base<_ITp>* __a,
1610  __atomic_val_t<_ITp> __i) noexcept
1611  { return atomic_fetch_xor_explicit(__a, __i, memory_order_seq_cst); }
1612 
1613  template<typename _ITp>
1614  inline _ITp
1615  atomic_fetch_xor(volatile __atomic_base<_ITp>* __a,
1616  __atomic_val_t<_ITp> __i) noexcept
1617  { return atomic_fetch_xor_explicit(__a, __i, memory_order_seq_cst); }
1618 
1619 #if __cplusplus > 201703L
1620 #define __cpp_lib_atomic_float 201711L
1621  template<>
1622  struct atomic<float> : __atomic_float<float>
1623  {
1624  atomic() noexcept = default;
1625 
1626  constexpr
1627  atomic(float __fp) noexcept : __atomic_float<float>(__fp)
1628  { }
1629 
1630  atomic& operator=(const atomic&) volatile = delete;
1631  atomic& operator=(const atomic&) = delete;
1632 
1633  using __atomic_float<float>::operator=;
1634  };
1635 
1636  template<>
1637  struct atomic<double> : __atomic_float<double>
1638  {
1639  atomic() noexcept = default;
1640 
1641  constexpr
1642  atomic(double __fp) noexcept : __atomic_float<double>(__fp)
1643  { }
1644 
1645  atomic& operator=(const atomic&) volatile = delete;
1646  atomic& operator=(const atomic&) = delete;
1647 
1648  using __atomic_float<double>::operator=;
1649  };
1650 
1651  template<>
1652  struct atomic<long double> : __atomic_float<long double>
1653  {
1654  atomic() noexcept = default;
1655 
1656  constexpr
1657  atomic(long double __fp) noexcept : __atomic_float<long double>(__fp)
1658  { }
1659 
1660  atomic& operator=(const atomic&) volatile = delete;
1661  atomic& operator=(const atomic&) = delete;
1662 
1663  using __atomic_float<long double>::operator=;
1664  };
1665 
1666 #ifdef __STDCPP_FLOAT16_T__
1667  template<>
1668  struct atomic<_Float16> : __atomic_float<_Float16>
1669  {
1670  atomic() noexcept = default;
1671 
1672  constexpr
1673  atomic(_Float16 __fp) noexcept : __atomic_float<_Float16>(__fp)
1674  { }
1675 
1676  atomic& operator=(const atomic&) volatile = delete;
1677  atomic& operator=(const atomic&) = delete;
1678 
1679  using __atomic_float<_Float16>::operator=;
1680  };
1681 #endif
1682 
1683 #ifdef __STDCPP_FLOAT32_T__
1684  template<>
1685  struct atomic<_Float32> : __atomic_float<_Float32>
1686  {
1687  atomic() noexcept = default;
1688 
1689  constexpr
1690  atomic(_Float32 __fp) noexcept : __atomic_float<_Float32>(__fp)
1691  { }
1692 
1693  atomic& operator=(const atomic&) volatile = delete;
1694  atomic& operator=(const atomic&) = delete;
1695 
1696  using __atomic_float<_Float32>::operator=;
1697  };
1698 #endif
1699 
1700 #ifdef __STDCPP_FLOAT64_T__
1701  template<>
1702  struct atomic<_Float64> : __atomic_float<_Float64>
1703  {
1704  atomic() noexcept = default;
1705 
1706  constexpr
1707  atomic(_Float64 __fp) noexcept : __atomic_float<_Float64>(__fp)
1708  { }
1709 
1710  atomic& operator=(const atomic&) volatile = delete;
1711  atomic& operator=(const atomic&) = delete;
1712 
1713  using __atomic_float<_Float64>::operator=;
1714  };
1715 #endif
1716 
1717 #ifdef __STDCPP_FLOAT128_T__
1718  template<>
1719  struct atomic<_Float128> : __atomic_float<_Float128>
1720  {
1721  atomic() noexcept = default;
1722 
1723  constexpr
1724  atomic(_Float128 __fp) noexcept : __atomic_float<_Float128>(__fp)
1725  { }
1726 
1727  atomic& operator=(const atomic&) volatile = delete;
1728  atomic& operator=(const atomic&) = delete;
1729 
1730  using __atomic_float<_Float128>::operator=;
1731  };
1732 #endif
1733 
1734 #ifdef __STDCPP_BFLOAT16_T__
1735  template<>
1736  struct atomic<__gnu_cxx::__bfloat16_t> : __atomic_float<__gnu_cxx::__bfloat16_t>
1737  {
1738  atomic() noexcept = default;
1739 
1740  constexpr
1741  atomic(__gnu_cxx::__bfloat16_t __fp) noexcept : __atomic_float<__gnu_cxx::__bfloat16_t>(__fp)
1742  { }
1743 
1744  atomic& operator=(const atomic&) volatile = delete;
1745  atomic& operator=(const atomic&) = delete;
1746 
1747  using __atomic_float<__gnu_cxx::__bfloat16_t>::operator=;
1748  };
1749 #endif
1750 
1751 #define __cpp_lib_atomic_ref 201806L
1752 
1753  /// Class template to provide atomic operations on a non-atomic variable.
1754  template<typename _Tp>
1755  struct atomic_ref : __atomic_ref<_Tp>
1756  {
1757  explicit
1758  atomic_ref(_Tp& __t) noexcept : __atomic_ref<_Tp>(__t)
1759  { }
1760 
1761  atomic_ref& operator=(const atomic_ref&) = delete;
1762 
1763  atomic_ref(const atomic_ref&) = default;
1764 
1765  using __atomic_ref<_Tp>::operator=;
1766  };
1767 
1768 #define __cpp_lib_atomic_lock_free_type_aliases 201907L
1769 #ifdef _GLIBCXX_HAVE_PLATFORM_WAIT
1770  using atomic_signed_lock_free
1771  = atomic<make_signed_t<__detail::__platform_wait_t>>;
1772  using atomic_unsigned_lock_free
1773  = atomic<make_unsigned_t<__detail::__platform_wait_t>>;
1774 #elif ATOMIC_INT_LOCK_FREE || !(ATOMIC_LONG_LOCK_FREE || ATOMIC_CHAR_LOCK_FREE)
1775  using atomic_signed_lock_free = atomic<signed int>;
1776  using atomic_unsigned_lock_free = atomic<unsigned int>;
1777 #elif ATOMIC_LONG_LOCK_FREE
1778  using atomic_signed_lock_free = atomic<signed long>;
1779  using atomic_unsigned_lock_free = atomic<unsigned long>;
1780 #elif ATOMIC_CHAR_LOCK_FREE
1781  using atomic_signed_lock_free = atomic<signed char>;
1782  using atomic_unsigned_lock_free = atomic<unsigned char>;
1783 #endif
1784 
1785 #endif // C++2a
1786 
1787  /// @} group atomics
1788 
1789 _GLIBCXX_END_NAMESPACE_VERSION
1790 } // namespace
1791 
1792 #endif // C++11
1793 
1794 #endif // _GLIBCXX_ATOMIC