libstdc++
atomic_base.h
Go to the documentation of this file.
1 // -*- C++ -*- header.
2 
3 // Copyright (C) 2008, 2009, 2010, 2011, 2012 Free Software Foundation, Inc.
4 //
5 // This file is part of the GNU ISO C++ Library. This library is free
6 // software; you can redistribute it and/or modify it under the
7 // terms of the GNU General Public License as published by the
8 // Free Software Foundation; either version 3, or (at your option)
9 // any later version.
10 
11 // This library is distributed in the hope that it will be useful,
12 // but WITHOUT ANY WARRANTY; without even the implied warranty of
13 // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 // GNU General Public License for more details.
15 
16 // Under Section 7 of GPL version 3, you are granted additional
17 // permissions described in the GCC Runtime Library Exception, version
18 // 3.1, as published by the Free Software Foundation.
19 
20 // You should have received a copy of the GNU General Public License and
21 // a copy of the GCC Runtime Library Exception along with this program;
22 // see the files COPYING3 and COPYING.RUNTIME respectively. If not, see
23 // <http://www.gnu.org/licenses/>.
24 
25 /** @file bits/atomic_base.h
26  * This is an internal header file, included by other library headers.
27  * Do not attempt to use it directly. @headername{atomic}
28  */
29 
30 #ifndef _GLIBCXX_ATOMIC_BASE_H
31 #define _GLIBCXX_ATOMIC_BASE_H 1
32 
33 #pragma GCC system_header
34 
35 #include <bits/c++config.h>
36 #include <stdbool.h>
37 #include <stdint.h>
39 
40 namespace std _GLIBCXX_VISIBILITY(default)
41 {
42 _GLIBCXX_BEGIN_NAMESPACE_VERSION
43 
44  /**
45  * @defgroup atomics Atomics
46  *
47  * Components for performing atomic operations.
48  * @{
49  */
50 
51  /// Enumeration for memory_order
52  typedef enum memory_order
53  {
54  memory_order_relaxed,
55  memory_order_consume,
56  memory_order_acquire,
57  memory_order_release,
58  memory_order_acq_rel,
59  memory_order_seq_cst
60  } memory_order;
61 
62  // Drop release ordering as per [atomics.types.operations.req]/21
63  constexpr memory_order
64  __cmpexch_failure_order(memory_order __m) noexcept
65  {
66  return __m == memory_order_acq_rel ? memory_order_acquire
67  : __m == memory_order_release ? memory_order_relaxed : __m;
68  }
69 
70  inline void
71  atomic_thread_fence(memory_order __m) noexcept
72  { __atomic_thread_fence(__m); }
73 
74  inline void
75  atomic_signal_fence(memory_order __m) noexcept
76  { __atomic_thread_fence(__m); }
77 
78  /// kill_dependency
79  template<typename _Tp>
80  inline _Tp
81  kill_dependency(_Tp __y) noexcept
82  {
83  _Tp __ret(__y);
84  return __ret;
85  }
86 
87 
88  // Base types for atomics.
89  template<typename _IntTp>
90  struct __atomic_base;
91 
92  /// atomic_char
94 
95  /// atomic_schar
97 
98  /// atomic_uchar
100 
101  /// atomic_short
103 
104  /// atomic_ushort
106 
107  /// atomic_int
109 
110  /// atomic_uint
112 
113  /// atomic_long
115 
116  /// atomic_ulong
118 
119  /// atomic_llong
121 
122  /// atomic_ullong
124 
125  /// atomic_wchar_t
127 
128  /// atomic_char16_t
130 
131  /// atomic_char32_t
133 
134  /// atomic_char32_t
136 
137 
138  /// atomic_int_least8_t
140 
141  /// atomic_uint_least8_t
143 
144  /// atomic_int_least16_t
146 
147  /// atomic_uint_least16_t
149 
150  /// atomic_int_least32_t
152 
153  /// atomic_uint_least32_t
155 
156  /// atomic_int_least64_t
158 
159  /// atomic_uint_least64_t
161 
162 
163  /// atomic_int_fast8_t
165 
166  /// atomic_uint_fast8_t
168 
169  /// atomic_int_fast16_t
171 
172  /// atomic_uint_fast16_t
174 
175  /// atomic_int_fast32_t
177 
178  /// atomic_uint_fast32_t
180 
181  /// atomic_int_fast64_t
183 
184  /// atomic_uint_fast64_t
186 
187 
188  /// atomic_intptr_t
190 
191  /// atomic_uintptr_t
193 
194  /// atomic_size_t
196 
197  /// atomic_intmax_t
199 
200  /// atomic_uintmax_t
202 
203  /// atomic_ptrdiff_t
205 
206 
207 #define ATOMIC_VAR_INIT(_VI) { _VI }
208 
209  template<typename _Tp>
210  struct atomic;
211 
212  template<typename _Tp>
213  struct atomic<_Tp*>;
214 
215 
216  /**
217  * @brief Base type for atomic_flag.
218  *
219  * Base type is POD with data, allowing atomic_flag to derive from
220  * it and meet the standard layout type requirement. In addition to
221  * compatibilty with a C interface, this allows different
222  * implementations of atomic_flag to use the same atomic operation
223  * functions, via a standard conversion to the __atomic_flag_base
224  * argument.
225  */
226  _GLIBCXX_BEGIN_EXTERN_C
227 
229  {
230  /* The target's "set" value for test-and-set may not be exactly 1. */
231 #if __GCC_ATOMIC_TEST_AND_SET_TRUEVAL == 1
232  bool _M_i;
233 #else
234  unsigned char _M_i;
235 #endif
236  };
237 
238  _GLIBCXX_END_EXTERN_C
239 
240 #define ATOMIC_FLAG_INIT { 0 }
241 
242  /// atomic_flag
244  {
245  atomic_flag() noexcept = default;
246  ~atomic_flag() noexcept = default;
247  atomic_flag(const atomic_flag&) = delete;
248  atomic_flag& operator=(const atomic_flag&) = delete;
249  atomic_flag& operator=(const atomic_flag&) volatile = delete;
250 
251  // Conversion to ATOMIC_FLAG_INIT.
252  constexpr atomic_flag(bool __i) noexcept
253  : __atomic_flag_base({ __i ? __GCC_ATOMIC_TEST_AND_SET_TRUEVAL : 0 })
254  { }
255 
256  bool
257  test_and_set(memory_order __m = memory_order_seq_cst) noexcept
258  {
259  return __atomic_test_and_set (&_M_i, __m);
260  }
261 
262  bool
263  test_and_set(memory_order __m = memory_order_seq_cst) volatile noexcept
264  {
265  return __atomic_test_and_set (&_M_i, __m);
266  }
267 
268  void
269  clear(memory_order __m = memory_order_seq_cst) noexcept
270  {
271  __glibcxx_assert(__m != memory_order_consume);
272  __glibcxx_assert(__m != memory_order_acquire);
273  __glibcxx_assert(__m != memory_order_acq_rel);
274 
275  __atomic_clear (&_M_i, __m);
276  }
277 
278  void
279  clear(memory_order __m = memory_order_seq_cst) volatile noexcept
280  {
281  __glibcxx_assert(__m != memory_order_consume);
282  __glibcxx_assert(__m != memory_order_acquire);
283  __glibcxx_assert(__m != memory_order_acq_rel);
284 
285  __atomic_clear (&_M_i, __m);
286  }
287  };
288 
289 
290  /// Base class for atomic integrals.
291  //
292  // For each of the integral types, define atomic_[integral type] struct
293  //
294  // atomic_bool bool
295  // atomic_char char
296  // atomic_schar signed char
297  // atomic_uchar unsigned char
298  // atomic_short short
299  // atomic_ushort unsigned short
300  // atomic_int int
301  // atomic_uint unsigned int
302  // atomic_long long
303  // atomic_ulong unsigned long
304  // atomic_llong long long
305  // atomic_ullong unsigned long long
306  // atomic_char16_t char16_t
307  // atomic_char32_t char32_t
308  // atomic_wchar_t wchar_t
309  //
310  // NB: Assuming _ITp is an integral scalar type that is 1, 2, 4, or
311  // 8 bytes, since that is what GCC built-in functions for atomic
312  // memory access expect.
313  template<typename _ITp>
315  {
316  private:
317  typedef _ITp __int_type;
318 
319  __int_type _M_i;
320 
321  public:
322  __atomic_base() noexcept = default;
323  ~__atomic_base() noexcept = default;
324  __atomic_base(const __atomic_base&) = delete;
325  __atomic_base& operator=(const __atomic_base&) = delete;
326  __atomic_base& operator=(const __atomic_base&) volatile = delete;
327 
328  // Requires __int_type convertible to _M_i.
329  constexpr __atomic_base(__int_type __i) noexcept : _M_i (__i) { }
330 
331  operator __int_type() const noexcept
332  { return load(); }
333 
334  operator __int_type() const volatile noexcept
335  { return load(); }
336 
337  __int_type
338  operator=(__int_type __i) noexcept
339  {
340  store(__i);
341  return __i;
342  }
343 
344  __int_type
345  operator=(__int_type __i) volatile noexcept
346  {
347  store(__i);
348  return __i;
349  }
350 
351  __int_type
352  operator++(int) noexcept
353  { return fetch_add(1); }
354 
355  __int_type
356  operator++(int) volatile noexcept
357  { return fetch_add(1); }
358 
359  __int_type
360  operator--(int) noexcept
361  { return fetch_sub(1); }
362 
363  __int_type
364  operator--(int) volatile noexcept
365  { return fetch_sub(1); }
366 
367  __int_type
368  operator++() noexcept
369  { return __atomic_add_fetch(&_M_i, 1, memory_order_seq_cst); }
370 
371  __int_type
372  operator++() volatile noexcept
373  { return __atomic_add_fetch(&_M_i, 1, memory_order_seq_cst); }
374 
375  __int_type
376  operator--() noexcept
377  { return __atomic_sub_fetch(&_M_i, 1, memory_order_seq_cst); }
378 
379  __int_type
380  operator--() volatile noexcept
381  { return __atomic_sub_fetch(&_M_i, 1, memory_order_seq_cst); }
382 
383  __int_type
384  operator+=(__int_type __i) noexcept
385  { return __atomic_add_fetch(&_M_i, __i, memory_order_seq_cst); }
386 
387  __int_type
388  operator+=(__int_type __i) volatile noexcept
389  { return __atomic_add_fetch(&_M_i, __i, memory_order_seq_cst); }
390 
391  __int_type
392  operator-=(__int_type __i) noexcept
393  { return __atomic_sub_fetch(&_M_i, __i, memory_order_seq_cst); }
394 
395  __int_type
396  operator-=(__int_type __i) volatile noexcept
397  { return __atomic_sub_fetch(&_M_i, __i, memory_order_seq_cst); }
398 
399  __int_type
400  operator&=(__int_type __i) noexcept
401  { return __atomic_and_fetch(&_M_i, __i, memory_order_seq_cst); }
402 
403  __int_type
404  operator&=(__int_type __i) volatile noexcept
405  { return __atomic_and_fetch(&_M_i, __i, memory_order_seq_cst); }
406 
407  __int_type
408  operator|=(__int_type __i) noexcept
409  { return __atomic_or_fetch(&_M_i, __i, memory_order_seq_cst); }
410 
411  __int_type
412  operator|=(__int_type __i) volatile noexcept
413  { return __atomic_or_fetch(&_M_i, __i, memory_order_seq_cst); }
414 
415  __int_type
416  operator^=(__int_type __i) noexcept
417  { return __atomic_xor_fetch(&_M_i, __i, memory_order_seq_cst); }
418 
419  __int_type
420  operator^=(__int_type __i) volatile noexcept
421  { return __atomic_xor_fetch(&_M_i, __i, memory_order_seq_cst); }
422 
423  bool
424  is_lock_free() const noexcept
425  { return __atomic_is_lock_free (sizeof (_M_i), &_M_i); }
426 
427  bool
428  is_lock_free() const volatile noexcept
429  { return __atomic_is_lock_free (sizeof (_M_i), &_M_i); }
430 
431  void
432  store(__int_type __i, memory_order __m = memory_order_seq_cst) noexcept
433  {
434  __glibcxx_assert(__m != memory_order_acquire);
435  __glibcxx_assert(__m != memory_order_acq_rel);
436  __glibcxx_assert(__m != memory_order_consume);
437 
438  __atomic_store_n(&_M_i, __i, __m);
439  }
440 
441  void
442  store(__int_type __i,
443  memory_order __m = memory_order_seq_cst) volatile noexcept
444  {
445  __glibcxx_assert(__m != memory_order_acquire);
446  __glibcxx_assert(__m != memory_order_acq_rel);
447  __glibcxx_assert(__m != memory_order_consume);
448 
449  __atomic_store_n(&_M_i, __i, __m);
450  }
451 
452  __int_type
453  load(memory_order __m = memory_order_seq_cst) const noexcept
454  {
455  __glibcxx_assert(__m != memory_order_release);
456  __glibcxx_assert(__m != memory_order_acq_rel);
457 
458  return __atomic_load_n(&_M_i, __m);
459  }
460 
461  __int_type
462  load(memory_order __m = memory_order_seq_cst) const volatile noexcept
463  {
464  __glibcxx_assert(__m != memory_order_release);
465  __glibcxx_assert(__m != memory_order_acq_rel);
466 
467  return __atomic_load_n(&_M_i, __m);
468  }
469 
470  __int_type
471  exchange(__int_type __i,
472  memory_order __m = memory_order_seq_cst) noexcept
473  {
474  return __atomic_exchange_n(&_M_i, __i, __m);
475  }
476 
477 
478  __int_type
479  exchange(__int_type __i,
480  memory_order __m = memory_order_seq_cst) volatile noexcept
481  {
482  return __atomic_exchange_n(&_M_i, __i, __m);
483  }
484 
485  bool
486  compare_exchange_weak(__int_type& __i1, __int_type __i2,
487  memory_order __m1, memory_order __m2) noexcept
488  {
489  __glibcxx_assert(__m2 != memory_order_release);
490  __glibcxx_assert(__m2 != memory_order_acq_rel);
491  __glibcxx_assert(__m2 <= __m1);
492 
493  return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 1, __m1, __m2);
494  }
495 
496  bool
497  compare_exchange_weak(__int_type& __i1, __int_type __i2,
498  memory_order __m1,
499  memory_order __m2) volatile noexcept
500  {
501  __glibcxx_assert(__m2 != memory_order_release);
502  __glibcxx_assert(__m2 != memory_order_acq_rel);
503  __glibcxx_assert(__m2 <= __m1);
504 
505  return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 1, __m1, __m2);
506  }
507 
508  bool
509  compare_exchange_weak(__int_type& __i1, __int_type __i2,
510  memory_order __m = memory_order_seq_cst) noexcept
511  {
512  return compare_exchange_weak(__i1, __i2, __m,
513  __cmpexch_failure_order(__m));
514  }
515 
516  bool
517  compare_exchange_weak(__int_type& __i1, __int_type __i2,
518  memory_order __m = memory_order_seq_cst) volatile noexcept
519  {
520  return compare_exchange_weak(__i1, __i2, __m,
521  __cmpexch_failure_order(__m));
522  }
523 
524  bool
525  compare_exchange_strong(__int_type& __i1, __int_type __i2,
526  memory_order __m1, memory_order __m2) noexcept
527  {
528  __glibcxx_assert(__m2 != memory_order_release);
529  __glibcxx_assert(__m2 != memory_order_acq_rel);
530  __glibcxx_assert(__m2 <= __m1);
531 
532  return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 0, __m1, __m2);
533  }
534 
535  bool
536  compare_exchange_strong(__int_type& __i1, __int_type __i2,
537  memory_order __m1,
538  memory_order __m2) volatile noexcept
539  {
540  __glibcxx_assert(__m2 != memory_order_release);
541  __glibcxx_assert(__m2 != memory_order_acq_rel);
542  __glibcxx_assert(__m2 <= __m1);
543 
544  return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 0, __m1, __m2);
545  }
546 
547  bool
548  compare_exchange_strong(__int_type& __i1, __int_type __i2,
549  memory_order __m = memory_order_seq_cst) noexcept
550  {
551  return compare_exchange_strong(__i1, __i2, __m,
552  __cmpexch_failure_order(__m));
553  }
554 
555  bool
556  compare_exchange_strong(__int_type& __i1, __int_type __i2,
557  memory_order __m = memory_order_seq_cst) volatile noexcept
558  {
559  return compare_exchange_strong(__i1, __i2, __m,
560  __cmpexch_failure_order(__m));
561  }
562 
563  __int_type
564  fetch_add(__int_type __i,
565  memory_order __m = memory_order_seq_cst) noexcept
566  { return __atomic_fetch_add(&_M_i, __i, __m); }
567 
568  __int_type
569  fetch_add(__int_type __i,
570  memory_order __m = memory_order_seq_cst) volatile noexcept
571  { return __atomic_fetch_add(&_M_i, __i, __m); }
572 
573  __int_type
574  fetch_sub(__int_type __i,
575  memory_order __m = memory_order_seq_cst) noexcept
576  { return __atomic_fetch_sub(&_M_i, __i, __m); }
577 
578  __int_type
579  fetch_sub(__int_type __i,
580  memory_order __m = memory_order_seq_cst) volatile noexcept
581  { return __atomic_fetch_sub(&_M_i, __i, __m); }
582 
583  __int_type
584  fetch_and(__int_type __i,
585  memory_order __m = memory_order_seq_cst) noexcept
586  { return __atomic_fetch_and(&_M_i, __i, __m); }
587 
588  __int_type
589  fetch_and(__int_type __i,
590  memory_order __m = memory_order_seq_cst) volatile noexcept
591  { return __atomic_fetch_and(&_M_i, __i, __m); }
592 
593  __int_type
594  fetch_or(__int_type __i,
595  memory_order __m = memory_order_seq_cst) noexcept
596  { return __atomic_fetch_or(&_M_i, __i, __m); }
597 
598  __int_type
599  fetch_or(__int_type __i,
600  memory_order __m = memory_order_seq_cst) volatile noexcept
601  { return __atomic_fetch_or(&_M_i, __i, __m); }
602 
603  __int_type
604  fetch_xor(__int_type __i,
605  memory_order __m = memory_order_seq_cst) noexcept
606  { return __atomic_fetch_xor(&_M_i, __i, __m); }
607 
608  __int_type
609  fetch_xor(__int_type __i,
610  memory_order __m = memory_order_seq_cst) volatile noexcept
611  { return __atomic_fetch_xor(&_M_i, __i, __m); }
612  };
613 
614 
615  /// Partial specialization for pointer types.
616  template<typename _PTp>
617  struct __atomic_base<_PTp*>
618  {
619  private:
620  typedef _PTp* __pointer_type;
621 
622  __pointer_type _M_p;
623 
624  // Factored out to facilitate explicit specialization.
625  constexpr ptrdiff_t
626  _M_type_size(ptrdiff_t __d) { return __d * sizeof(_PTp); }
627 
628  constexpr ptrdiff_t
629  _M_type_size(ptrdiff_t __d) volatile { return __d * sizeof(_PTp); }
630 
631  public:
632  __atomic_base() noexcept = default;
633  ~__atomic_base() noexcept = default;
634  __atomic_base(const __atomic_base&) = delete;
635  __atomic_base& operator=(const __atomic_base&) = delete;
636  __atomic_base& operator=(const __atomic_base&) volatile = delete;
637 
638  // Requires __pointer_type convertible to _M_p.
639  constexpr __atomic_base(__pointer_type __p) noexcept : _M_p (__p) { }
640 
641  operator __pointer_type() const noexcept
642  { return load(); }
643 
644  operator __pointer_type() const volatile noexcept
645  { return load(); }
646 
647  __pointer_type
648  operator=(__pointer_type __p) noexcept
649  {
650  store(__p);
651  return __p;
652  }
653 
654  __pointer_type
655  operator=(__pointer_type __p) volatile noexcept
656  {
657  store(__p);
658  return __p;
659  }
660 
661  __pointer_type
662  operator++(int) noexcept
663  { return fetch_add(1); }
664 
665  __pointer_type
666  operator++(int) volatile noexcept
667  { return fetch_add(1); }
668 
669  __pointer_type
670  operator--(int) noexcept
671  { return fetch_sub(1); }
672 
673  __pointer_type
674  operator--(int) volatile noexcept
675  { return fetch_sub(1); }
676 
677  __pointer_type
678  operator++() noexcept
679  { return __atomic_add_fetch(&_M_p, _M_type_size(1),
680  memory_order_seq_cst); }
681 
682  __pointer_type
683  operator++() volatile noexcept
684  { return __atomic_add_fetch(&_M_p, _M_type_size(1),
685  memory_order_seq_cst); }
686 
687  __pointer_type
688  operator--() noexcept
689  { return __atomic_sub_fetch(&_M_p, _M_type_size(1),
690  memory_order_seq_cst); }
691 
692  __pointer_type
693  operator--() volatile noexcept
694  { return __atomic_sub_fetch(&_M_p, _M_type_size(1),
695  memory_order_seq_cst); }
696 
697  __pointer_type
698  operator+=(ptrdiff_t __d) noexcept
699  { return __atomic_add_fetch(&_M_p, _M_type_size(__d),
700  memory_order_seq_cst); }
701 
702  __pointer_type
703  operator+=(ptrdiff_t __d) volatile noexcept
704  { return __atomic_add_fetch(&_M_p, _M_type_size(__d),
705  memory_order_seq_cst); }
706 
707  __pointer_type
708  operator-=(ptrdiff_t __d) noexcept
709  { return __atomic_sub_fetch(&_M_p, _M_type_size(__d),
710  memory_order_seq_cst); }
711 
712  __pointer_type
713  operator-=(ptrdiff_t __d) volatile noexcept
714  { return __atomic_sub_fetch(&_M_p, _M_type_size(__d),
715  memory_order_seq_cst); }
716 
717  bool
718  is_lock_free() const noexcept
719  { return __atomic_is_lock_free(_M_type_size(1), &_M_p); }
720 
721  bool
722  is_lock_free() const volatile noexcept
723  { return __atomic_is_lock_free(_M_type_size(1), &_M_p); }
724 
725  void
726  store(__pointer_type __p,
727  memory_order __m = memory_order_seq_cst) noexcept
728  {
729  __glibcxx_assert(__m != memory_order_acquire);
730  __glibcxx_assert(__m != memory_order_acq_rel);
731  __glibcxx_assert(__m != memory_order_consume);
732 
733  __atomic_store_n(&_M_p, __p, __m);
734  }
735 
736  void
737  store(__pointer_type __p,
738  memory_order __m = memory_order_seq_cst) volatile noexcept
739  {
740  __glibcxx_assert(__m != memory_order_acquire);
741  __glibcxx_assert(__m != memory_order_acq_rel);
742  __glibcxx_assert(__m != memory_order_consume);
743 
744  __atomic_store_n(&_M_p, __p, __m);
745  }
746 
747  __pointer_type
748  load(memory_order __m = memory_order_seq_cst) const noexcept
749  {
750  __glibcxx_assert(__m != memory_order_release);
751  __glibcxx_assert(__m != memory_order_acq_rel);
752 
753  return __atomic_load_n(&_M_p, __m);
754  }
755 
756  __pointer_type
757  load(memory_order __m = memory_order_seq_cst) const volatile noexcept
758  {
759  __glibcxx_assert(__m != memory_order_release);
760  __glibcxx_assert(__m != memory_order_acq_rel);
761 
762  return __atomic_load_n(&_M_p, __m);
763  }
764 
765  __pointer_type
766  exchange(__pointer_type __p,
767  memory_order __m = memory_order_seq_cst) noexcept
768  {
769  return __atomic_exchange_n(&_M_p, __p, __m);
770  }
771 
772 
773  __pointer_type
774  exchange(__pointer_type __p,
775  memory_order __m = memory_order_seq_cst) volatile noexcept
776  {
777  return __atomic_exchange_n(&_M_p, __p, __m);
778  }
779 
780  bool
781  compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
782  memory_order __m1,
783  memory_order __m2) noexcept
784  {
785  __glibcxx_assert(__m2 != memory_order_release);
786  __glibcxx_assert(__m2 != memory_order_acq_rel);
787  __glibcxx_assert(__m2 <= __m1);
788 
789  return __atomic_compare_exchange_n(&_M_p, &__p1, __p2, 0, __m1, __m2);
790  }
791 
792  bool
793  compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
794  memory_order __m1,
795  memory_order __m2) volatile noexcept
796  {
797  __glibcxx_assert(__m2 != memory_order_release);
798  __glibcxx_assert(__m2 != memory_order_acq_rel);
799  __glibcxx_assert(__m2 <= __m1);
800 
801  return __atomic_compare_exchange_n(&_M_p, &__p1, __p2, 0, __m1, __m2);
802  }
803 
804  __pointer_type
805  fetch_add(ptrdiff_t __d,
806  memory_order __m = memory_order_seq_cst) noexcept
807  { return __atomic_fetch_add(&_M_p, _M_type_size(__d), __m); }
808 
809  __pointer_type
810  fetch_add(ptrdiff_t __d,
811  memory_order __m = memory_order_seq_cst) volatile noexcept
812  { return __atomic_fetch_add(&_M_p, _M_type_size(__d), __m); }
813 
814  __pointer_type
815  fetch_sub(ptrdiff_t __d,
816  memory_order __m = memory_order_seq_cst) noexcept
817  { return __atomic_fetch_sub(&_M_p, _M_type_size(__d), __m); }
818 
819  __pointer_type
820  fetch_sub(ptrdiff_t __d,
821  memory_order __m = memory_order_seq_cst) volatile noexcept
822  { return __atomic_fetch_sub(&_M_p, _M_type_size(__d), __m); }
823  };
824 
825  // @} group atomics
826 
827 _GLIBCXX_END_NAMESPACE_VERSION
828 } // namespace std
829 
830 #endif