libstdc++
atomic_0.h
Go to the documentation of this file.
00001 // -*- C++ -*- header.
00002 
00003 // Copyright (C) 2008, 2009, 2010, 2011
00004 // Free Software Foundation, Inc.
00005 //
00006 // This file is part of the GNU ISO C++ Library.  This library is free
00007 // software; you can redistribute it and/or modify it under the
00008 // terms of the GNU General Public License as published by the
00009 // Free Software Foundation; either version 3, or (at your option)
00010 // any later version.
00011 
00012 // This library is distributed in the hope that it will be useful,
00013 // but WITHOUT ANY WARRANTY; without even the implied warranty of
00014 // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
00015 // GNU General Public License for more details.
00016 
00017 // Under Section 7 of GPL version 3, you are granted additional
00018 // permissions described in the GCC Runtime Library Exception, version
00019 // 3.1, as published by the Free Software Foundation.
00020 
00021 // You should have received a copy of the GNU General Public License and
00022 // a copy of the GCC Runtime Library Exception along with this program;
00023 // see the files COPYING3 and COPYING.RUNTIME respectively.  If not, see
00024 // <http://www.gnu.org/licenses/>.
00025 
00026 /** @file bits/atomic_0.h
00027  *  This is an internal header file, included by other library headers.
00028  *  Do not attempt to use it directly. @headername{atomic}
00029  */
00030 
00031 #ifndef _GLIBCXX_ATOMIC_0_H
00032 #define _GLIBCXX_ATOMIC_0_H 1
00033 
00034 #pragma GCC system_header
00035 
00036 namespace std _GLIBCXX_VISIBILITY(default)
00037 {
00038 _GLIBCXX_BEGIN_NAMESPACE_VERSION
00039 
00040 // 0 == __atomic0 == Never lock-free
00041 namespace __atomic0
00042 {
00043   _GLIBCXX_BEGIN_EXTERN_C
00044 
00045   void
00046   atomic_flag_clear_explicit(__atomic_flag_base*, memory_order)
00047   _GLIBCXX_NOTHROW;
00048 
00049   void
00050   __atomic_flag_wait_explicit(__atomic_flag_base*, memory_order)
00051   _GLIBCXX_NOTHROW;
00052 
00053   _GLIBCXX_CONST __atomic_flag_base*
00054   __atomic_flag_for_address(const volatile void* __z) _GLIBCXX_NOTHROW;
00055 
00056   _GLIBCXX_END_EXTERN_C
00057 
00058   // Implementation specific defines.
00059 #define _ATOMIC_MEMBER_ _M_i
00060 
00061   // Implementation specific defines.
00062 #define _ATOMIC_LOAD_(__a, __x)                        \
00063   ({typedef __typeof__(_ATOMIC_MEMBER_) __i_type;                          \
00064     __i_type* __p = &_ATOMIC_MEMBER_;                      \
00065     __atomic_flag_base* __g = __atomic_flag_for_address(__p);          \
00066     __atomic_flag_wait_explicit(__g, __x);                 \
00067     __i_type __r = *__p;                           \
00068     atomic_flag_clear_explicit(__g, __x);                      \
00069     __r; })
00070 
00071 #define _ATOMIC_STORE_(__a, __n, __x)                      \
00072   ({typedef __typeof__(_ATOMIC_MEMBER_) __i_type;                          \
00073     __i_type* __p = &_ATOMIC_MEMBER_;                      \
00074     __typeof__(__n) __w = (__n);                           \
00075     __atomic_flag_base* __g = __atomic_flag_for_address(__p);          \
00076     __atomic_flag_wait_explicit(__g, __x);                 \
00077     *__p = __w;                                \
00078     atomic_flag_clear_explicit(__g, __x);                      \
00079     __w; })
00080 
00081 #define _ATOMIC_MODIFY_(__a, __o, __n, __x)                \
00082   ({typedef __typeof__(_ATOMIC_MEMBER_) __i_type;                          \
00083     __i_type* __p = &_ATOMIC_MEMBER_;                      \
00084     __typeof__(__n) __w = (__n);                           \
00085     __atomic_flag_base* __g = __atomic_flag_for_address(__p);          \
00086     __atomic_flag_wait_explicit(__g, __x);                 \
00087     __i_type __r = *__p;                               \
00088     *__p __o __w;                                  \
00089     atomic_flag_clear_explicit(__g, __x);                      \
00090     __r; })
00091 
00092 #define _ATOMIC_CMPEXCHNG_(__a, __e, __n, __x)                 \
00093   ({typedef __typeof__(_ATOMIC_MEMBER_) __i_type;                          \
00094     __i_type* __p = &_ATOMIC_MEMBER_;                      \
00095     __typeof__(__e) __q = (__e);                           \
00096     __typeof__(__n) __w = (__n);                           \
00097     bool __r;                                      \
00098     __atomic_flag_base* __g = __atomic_flag_for_address(__p);          \
00099     __atomic_flag_wait_explicit(__g, __x);                 \
00100     __i_type __t = *__p;                               \
00101     if (*__q == __t)                               \
00102       {                                    \
00103     *__p = (__i_type)__w;                          \
00104     __r = true;                            \
00105       }                                    \
00106     else { *__q = __t; __r = false; }                          \
00107     atomic_flag_clear_explicit(__g, __x);                      \
00108     __r; })
00109 
00110 
00111   /// atomic_flag
00112   struct atomic_flag : public __atomic_flag_base
00113   {
00114     atomic_flag() = default;
00115     ~atomic_flag() = default;
00116     atomic_flag(const atomic_flag&) = delete;
00117     atomic_flag& operator=(const atomic_flag&) = delete;
00118     atomic_flag& operator=(const atomic_flag&) volatile = delete;
00119 
00120     // Conversion to ATOMIC_FLAG_INIT.
00121     atomic_flag(bool __i): __atomic_flag_base({ __i }) { }
00122 
00123     bool
00124     test_and_set(memory_order __m = memory_order_seq_cst);
00125 
00126     bool
00127     test_and_set(memory_order __m = memory_order_seq_cst) volatile;
00128 
00129     void
00130     clear(memory_order __m = memory_order_seq_cst);
00131 
00132     void
00133     clear(memory_order __m = memory_order_seq_cst) volatile;
00134   };
00135 
00136 
00137   /// Base class for atomic integrals.
00138   //
00139   // For each of the integral types, define atomic_[integral type] struct
00140   //
00141   // atomic_bool     bool
00142   // atomic_char     char
00143   // atomic_schar    signed char
00144   // atomic_uchar    unsigned char
00145   // atomic_short    short
00146   // atomic_ushort   unsigned short
00147   // atomic_int      int
00148   // atomic_uint     unsigned int
00149   // atomic_long     long
00150   // atomic_ulong    unsigned long
00151   // atomic_llong    long long
00152   // atomic_ullong   unsigned long long
00153   // atomic_char16_t char16_t
00154   // atomic_char32_t char32_t
00155   // atomic_wchar_t  wchar_t
00156 
00157   // Base type.
00158   // NB: Assuming _ITp is an integral scalar type that is 1, 2, 4, or 8 bytes,
00159   // since that is what GCC built-in functions for atomic memory access work on.
00160   template<typename _ITp>
00161     struct __atomic_base
00162     {
00163     private:
00164       typedef _ITp  __int_type;
00165 
00166       __int_type    _M_i;
00167 
00168     public:
00169       __atomic_base() = default;
00170       ~__atomic_base() = default;
00171       __atomic_base(const __atomic_base&) = delete;
00172       __atomic_base& operator=(const __atomic_base&) = delete;
00173       __atomic_base& operator=(const __atomic_base&) volatile = delete;
00174 
00175       // Requires __int_type convertible to _M_base._M_i.
00176       constexpr __atomic_base(__int_type __i): _M_i (__i) { }
00177 
00178       operator __int_type() const
00179       { return load(); }
00180 
00181       operator __int_type() const volatile
00182       { return load(); }
00183 
00184       __int_type
00185       operator=(__int_type __i)
00186       {
00187     store(__i);
00188     return __i;
00189       }
00190 
00191       __int_type
00192       operator=(__int_type __i) volatile
00193       {
00194     store(__i);
00195     return __i;
00196       }
00197 
00198       __int_type
00199       operator++(int)
00200       { return fetch_add(1); }
00201 
00202       __int_type
00203       operator++(int) volatile
00204       { return fetch_add(1); }
00205 
00206       __int_type
00207       operator--(int)
00208       { return fetch_sub(1); }
00209 
00210       __int_type
00211       operator--(int) volatile
00212       { return fetch_sub(1); }
00213 
00214       __int_type
00215       operator++()
00216       { return fetch_add(1) + 1; }
00217 
00218       __int_type
00219       operator++() volatile
00220       { return fetch_add(1) + 1; }
00221 
00222       __int_type
00223       operator--()
00224       { return fetch_sub(1) - 1; }
00225 
00226       __int_type
00227       operator--() volatile
00228       { return fetch_sub(1) - 1; }
00229 
00230       __int_type
00231       operator+=(__int_type __i)
00232       { return fetch_add(__i) + __i; }
00233 
00234       __int_type
00235       operator+=(__int_type __i) volatile
00236       { return fetch_add(__i) + __i; }
00237 
00238       __int_type
00239       operator-=(__int_type __i)
00240       { return fetch_sub(__i) - __i; }
00241 
00242       __int_type
00243       operator-=(__int_type __i) volatile
00244       { return fetch_sub(__i) - __i; }
00245 
00246       __int_type
00247       operator&=(__int_type __i)
00248       { return fetch_and(__i) & __i; }
00249 
00250       __int_type
00251       operator&=(__int_type __i) volatile
00252       { return fetch_and(__i) & __i; }
00253 
00254       __int_type
00255       operator|=(__int_type __i)
00256       { return fetch_or(__i) | __i; }
00257 
00258       __int_type
00259       operator|=(__int_type __i) volatile
00260       { return fetch_or(__i) | __i; }
00261 
00262       __int_type
00263       operator^=(__int_type __i)
00264       { return fetch_xor(__i) ^ __i; }
00265 
00266       __int_type
00267       operator^=(__int_type __i) volatile
00268       { return fetch_xor(__i) ^ __i; }
00269 
00270       bool
00271       is_lock_free() const
00272       { return false; }
00273 
00274       bool
00275       is_lock_free() const volatile
00276       { return false; }
00277 
00278       void
00279       store(__int_type __i, memory_order __m = memory_order_seq_cst)
00280       {
00281     __glibcxx_assert(__m != memory_order_acquire);
00282     __glibcxx_assert(__m != memory_order_acq_rel);
00283     __glibcxx_assert(__m != memory_order_consume);
00284     _ATOMIC_STORE_(this, __i, __m);
00285       }
00286 
00287       void
00288       store(__int_type __i, memory_order __m = memory_order_seq_cst) volatile
00289       {
00290     __glibcxx_assert(__m != memory_order_acquire);
00291     __glibcxx_assert(__m != memory_order_acq_rel);
00292     __glibcxx_assert(__m != memory_order_consume);
00293     _ATOMIC_STORE_(this, __i, __m);
00294       }
00295 
00296       __int_type
00297       load(memory_order __m = memory_order_seq_cst) const
00298       {
00299     __glibcxx_assert(__m != memory_order_release);
00300     __glibcxx_assert(__m != memory_order_acq_rel);
00301     return _ATOMIC_LOAD_(this, __m);
00302       }
00303 
00304       __int_type
00305       load(memory_order __m = memory_order_seq_cst) const volatile
00306       {
00307     __glibcxx_assert(__m != memory_order_release);
00308     __glibcxx_assert(__m != memory_order_acq_rel);
00309     return _ATOMIC_LOAD_(this, __m);
00310       }
00311 
00312       __int_type
00313       exchange(__int_type __i, memory_order __m = memory_order_seq_cst)
00314       { return _ATOMIC_MODIFY_(this, =, __i, __m); }
00315 
00316       __int_type
00317       exchange(__int_type __i, memory_order __m = memory_order_seq_cst) volatile
00318       { return _ATOMIC_MODIFY_(this, =, __i, __m); }
00319 
00320       bool
00321       compare_exchange_weak(__int_type& __i1, __int_type __i2,
00322                 memory_order __m1, memory_order __m2)
00323       {
00324     __glibcxx_assert(__m2 != memory_order_release);
00325     __glibcxx_assert(__m2 != memory_order_acq_rel);
00326     __glibcxx_assert(__m2 <= __m1);
00327     return _ATOMIC_CMPEXCHNG_(this, &__i1, __i2, __m1);
00328       }
00329 
00330       bool
00331       compare_exchange_weak(__int_type& __i1, __int_type __i2,
00332                 memory_order __m1, memory_order __m2) volatile
00333       {
00334     __glibcxx_assert(__m2 != memory_order_release);
00335     __glibcxx_assert(__m2 != memory_order_acq_rel);
00336     __glibcxx_assert(__m2 <= __m1);
00337     return _ATOMIC_CMPEXCHNG_(this, &__i1, __i2, __m1);
00338       }
00339 
00340       bool
00341       compare_exchange_weak(__int_type& __i1, __int_type __i2,
00342                 memory_order __m = memory_order_seq_cst)
00343       {
00344     return compare_exchange_weak(__i1, __i2, __m,
00345                      __calculate_memory_order(__m));
00346       }
00347 
00348       bool
00349       compare_exchange_weak(__int_type& __i1, __int_type __i2,
00350                 memory_order __m = memory_order_seq_cst) volatile
00351       {
00352     return compare_exchange_weak(__i1, __i2, __m,
00353                      __calculate_memory_order(__m));
00354       }
00355 
00356       bool
00357       compare_exchange_strong(__int_type& __i1, __int_type __i2,
00358                   memory_order __m1, memory_order __m2)
00359       {
00360     __glibcxx_assert(__m2 != memory_order_release);
00361     __glibcxx_assert(__m2 != memory_order_acq_rel);
00362     __glibcxx_assert(__m2 <= __m1);
00363     return _ATOMIC_CMPEXCHNG_(this, &__i1, __i2, __m1);
00364       }
00365 
00366       bool
00367       compare_exchange_strong(__int_type& __i1, __int_type __i2,
00368                   memory_order __m1, memory_order __m2) volatile
00369       {
00370     __glibcxx_assert(__m2 != memory_order_release);
00371     __glibcxx_assert(__m2 != memory_order_acq_rel);
00372     __glibcxx_assert(__m2 <= __m1);
00373     return _ATOMIC_CMPEXCHNG_(this, &__i1, __i2, __m1);
00374       }
00375 
00376       bool
00377       compare_exchange_strong(__int_type& __i1, __int_type __i2,
00378                   memory_order __m = memory_order_seq_cst)
00379       {
00380     return compare_exchange_strong(__i1, __i2, __m,
00381                        __calculate_memory_order(__m));
00382       }
00383 
00384       bool
00385       compare_exchange_strong(__int_type& __i1, __int_type __i2,
00386                   memory_order __m = memory_order_seq_cst) volatile
00387       {
00388     return compare_exchange_strong(__i1, __i2, __m,
00389                        __calculate_memory_order(__m));
00390       }
00391 
00392       __int_type
00393       fetch_add(__int_type __i, memory_order __m = memory_order_seq_cst)
00394       { return _ATOMIC_MODIFY_(this, +=, __i, __m); }
00395 
00396       __int_type
00397       fetch_add(__int_type __i,
00398         memory_order __m = memory_order_seq_cst) volatile
00399       { return _ATOMIC_MODIFY_(this, +=, __i, __m); }
00400 
00401       __int_type
00402       fetch_sub(__int_type __i, memory_order __m = memory_order_seq_cst)
00403       { return _ATOMIC_MODIFY_(this, -=, __i, __m); }
00404 
00405       __int_type
00406       fetch_sub(__int_type __i,
00407         memory_order __m = memory_order_seq_cst) volatile
00408       { return _ATOMIC_MODIFY_(this, -=, __i, __m); }
00409 
00410       __int_type
00411       fetch_and(__int_type __i, memory_order __m = memory_order_seq_cst)
00412       { return _ATOMIC_MODIFY_(this, &=, __i, __m); }
00413 
00414       __int_type
00415       fetch_and(__int_type __i,
00416         memory_order __m = memory_order_seq_cst) volatile
00417       { return _ATOMIC_MODIFY_(this, &=, __i, __m); }
00418 
00419       __int_type
00420       fetch_or(__int_type __i, memory_order __m = memory_order_seq_cst)
00421       { return _ATOMIC_MODIFY_(this, |=, __i, __m); }
00422 
00423       __int_type
00424       fetch_or(__int_type __i, memory_order __m = memory_order_seq_cst) volatile
00425       { return _ATOMIC_MODIFY_(this, |=, __i, __m); }
00426 
00427       __int_type
00428       fetch_xor(__int_type __i, memory_order __m = memory_order_seq_cst)
00429       { return _ATOMIC_MODIFY_(this, ^=, __i, __m); }
00430 
00431       __int_type
00432       fetch_xor(__int_type __i,
00433         memory_order __m = memory_order_seq_cst) volatile
00434       { return _ATOMIC_MODIFY_(this, ^=, __i, __m); }
00435     };
00436 
00437 
00438   /// Partial specialization for pointer types.
00439   template<typename _PTp>
00440     struct __atomic_base<_PTp*>
00441     {
00442     private:
00443       typedef _PTp*     __return_pointer_type;
00444       typedef void*     __pointer_type;
00445       __pointer_type    _M_i;
00446 
00447     public:
00448       __atomic_base() = default;
00449       ~__atomic_base() = default;
00450       __atomic_base(const __atomic_base&) = delete;
00451       __atomic_base& operator=(const __atomic_base&) = delete;
00452       __atomic_base& operator=(const __atomic_base&) volatile = delete;
00453 
00454       // Requires __pointer_type convertible to _M_i.
00455       constexpr __atomic_base(__return_pointer_type __p): _M_i (__p) { }
00456 
00457       operator __return_pointer_type() const
00458       { return reinterpret_cast<__return_pointer_type>(load()); }
00459 
00460       operator __return_pointer_type() const volatile
00461       { return reinterpret_cast<__return_pointer_type>(load()); }
00462 
00463       __return_pointer_type
00464       operator=(__pointer_type __p)
00465       {
00466     store(__p);
00467     return reinterpret_cast<__return_pointer_type>(__p);
00468       }
00469 
00470       __return_pointer_type
00471       operator=(__pointer_type __p) volatile
00472       {
00473     store(__p);
00474     return reinterpret_cast<__return_pointer_type>(__p);
00475       }
00476 
00477       __return_pointer_type
00478       operator++(int)
00479       { return reinterpret_cast<__return_pointer_type>(fetch_add(1)); }
00480 
00481       __return_pointer_type
00482       operator++(int) volatile
00483       { return reinterpret_cast<__return_pointer_type>(fetch_add(1)); }
00484 
00485       __return_pointer_type
00486       operator--(int)
00487       { return reinterpret_cast<__return_pointer_type>(fetch_sub(1)); }
00488 
00489       __return_pointer_type
00490       operator--(int) volatile
00491       { return reinterpret_cast<__return_pointer_type>(fetch_sub(1)); }
00492 
00493       __return_pointer_type
00494       operator++()
00495       { return reinterpret_cast<__return_pointer_type>(fetch_add(1) + 1); }
00496 
00497       __return_pointer_type
00498       operator++() volatile
00499       { return reinterpret_cast<__return_pointer_type>(fetch_add(1) + 1); }
00500 
00501       __return_pointer_type
00502       operator--()
00503       { return reinterpret_cast<__return_pointer_type>(fetch_sub(1) - 1); }
00504 
00505       __return_pointer_type
00506       operator--() volatile
00507       { return reinterpret_cast<__return_pointer_type>(fetch_sub(1) - 1); }
00508 
00509       __return_pointer_type
00510       operator+=(ptrdiff_t __d)
00511       { return reinterpret_cast<__return_pointer_type>(fetch_add(__d) + __d); }
00512 
00513       __return_pointer_type
00514       operator+=(ptrdiff_t __d) volatile
00515       { return reinterpret_cast<__return_pointer_type>(fetch_add(__d) + __d); }
00516 
00517       __return_pointer_type
00518       operator-=(ptrdiff_t __d)
00519       { return reinterpret_cast<__return_pointer_type>(fetch_sub(__d) - __d); }
00520 
00521       __return_pointer_type
00522       operator-=(ptrdiff_t __d) volatile
00523       { return reinterpret_cast<__return_pointer_type>(fetch_sub(__d) - __d); }
00524 
00525       bool
00526       is_lock_free() const
00527       { return true; }
00528 
00529       bool
00530       is_lock_free() const volatile
00531       { return true; }
00532 
00533       void
00534       store(__pointer_type __p, memory_order __m = memory_order_seq_cst)
00535       {
00536     __glibcxx_assert(__m != memory_order_acquire);
00537     __glibcxx_assert(__m != memory_order_acq_rel);
00538     __glibcxx_assert(__m != memory_order_consume);
00539     _ATOMIC_STORE_(this, __p, __m);
00540       }
00541 
00542       void
00543       store(__pointer_type __p,
00544         memory_order __m = memory_order_seq_cst) volatile
00545       {
00546     __glibcxx_assert(__m != memory_order_acquire);
00547     __glibcxx_assert(__m != memory_order_acq_rel);
00548     __glibcxx_assert(__m != memory_order_consume);
00549     volatile __pointer_type* __p2 = &_M_i;
00550     __typeof__(__p) __w = (__p);
00551     __atomic_flag_base* __g = __atomic_flag_for_address(__p2);
00552     __atomic_flag_wait_explicit(__g, __m);
00553     *__p2 = reinterpret_cast<__pointer_type>(__w);
00554     atomic_flag_clear_explicit(__g, __m);
00555     __w;
00556       }
00557 
00558       __return_pointer_type
00559       load(memory_order __m = memory_order_seq_cst) const
00560       {
00561     __glibcxx_assert(__m != memory_order_release);
00562     __glibcxx_assert(__m != memory_order_acq_rel);
00563     void* __v = _ATOMIC_LOAD_(this, __m);
00564     return reinterpret_cast<__return_pointer_type>(__v);
00565       }
00566 
00567       __return_pointer_type
00568       load(memory_order __m = memory_order_seq_cst) const volatile
00569       {
00570     __glibcxx_assert(__m != memory_order_release);
00571     __glibcxx_assert(__m != memory_order_acq_rel);
00572     void* __v = _ATOMIC_LOAD_(this, __m);
00573     return reinterpret_cast<__return_pointer_type>(__v);
00574       }
00575 
00576       __return_pointer_type
00577       exchange(__pointer_type __p, memory_order __m = memory_order_seq_cst)
00578       {
00579     void* __v = _ATOMIC_MODIFY_(this, =, __p, __m);
00580     return reinterpret_cast<__return_pointer_type>(__v);
00581       }
00582 
00583       __return_pointer_type
00584       exchange(__pointer_type __p,
00585            memory_order __m = memory_order_seq_cst) volatile
00586       {
00587     volatile __pointer_type* __p2 = &_M_i;
00588     __typeof__(__p) __w = (__p);
00589     __atomic_flag_base* __g = __atomic_flag_for_address(__p2);
00590     __atomic_flag_wait_explicit(__g, __m);
00591     __pointer_type __r = *__p2;
00592     *__p2 = __w;
00593     atomic_flag_clear_explicit(__g, __m);
00594     __r;
00595     return reinterpret_cast<__return_pointer_type>(_M_i);
00596       }
00597 
00598       bool
00599       compare_exchange_strong(__return_pointer_type& __rp1, __pointer_type __p2,
00600                   memory_order __m1, memory_order __m2)
00601       {
00602     __glibcxx_assert(__m2 != memory_order_release);
00603     __glibcxx_assert(__m2 != memory_order_acq_rel);
00604     __glibcxx_assert(__m2 <= __m1);
00605     __pointer_type& __p1 = reinterpret_cast<void*&>(__rp1);
00606     return _ATOMIC_CMPEXCHNG_(this, &__p1, __p2, __m1);
00607       }
00608 
00609       bool
00610       compare_exchange_strong(__return_pointer_type& __rp1, __pointer_type __p2,
00611                   memory_order __m1, memory_order __m2) volatile
00612       {
00613     __glibcxx_assert(__m2 != memory_order_release);
00614     __glibcxx_assert(__m2 != memory_order_acq_rel);
00615     __glibcxx_assert(__m2 <= __m1);
00616     __pointer_type& __p1 = reinterpret_cast<void*&>(__rp1);
00617     return _ATOMIC_CMPEXCHNG_(this, &__p1, __p2, __m1);
00618       }
00619 
00620       __return_pointer_type
00621       fetch_add(ptrdiff_t __d, memory_order __m = memory_order_seq_cst)
00622       {
00623     void* __v = _ATOMIC_MODIFY_(this, +=, __d, __m);
00624     return reinterpret_cast<__return_pointer_type>(__v);
00625       }
00626 
00627       __return_pointer_type
00628       fetch_add(ptrdiff_t __d,
00629         memory_order __m = memory_order_seq_cst) volatile
00630       {
00631     void* __v = _ATOMIC_MODIFY_(this, +=, __d, __m);
00632     return reinterpret_cast<__return_pointer_type>(__v);
00633       }
00634 
00635       __return_pointer_type
00636       fetch_sub(ptrdiff_t __d, memory_order __m = memory_order_seq_cst)
00637       {
00638     void* __v = _ATOMIC_MODIFY_(this, -=, __d, __m);
00639     return reinterpret_cast<__return_pointer_type>(__v);
00640       }
00641 
00642       __return_pointer_type
00643       fetch_sub(ptrdiff_t __d,
00644         memory_order __m = memory_order_seq_cst) volatile
00645       {
00646     void* __v = _ATOMIC_MODIFY_(this, -=, __d, __m);
00647     return reinterpret_cast<__return_pointer_type>(__v);
00648       }
00649     };
00650 
00651 #undef _ATOMIC_LOAD_
00652 #undef _ATOMIC_STORE_
00653 #undef _ATOMIC_MODIFY_
00654 #undef _ATOMIC_CMPEXCHNG_
00655 } // namespace __atomic0
00656 
00657 _GLIBCXX_END_NAMESPACE_VERSION
00658 } // namespace std
00659 
00660 #endif