atomic_2.h

Go to the documentation of this file.
00001 // -*- C++ -*- header.
00002 
00003 // Copyright (C) 2008, 2009, 2010, 2011
00004 // Free Software Foundation, Inc.
00005 //
00006 // This file is part of the GNU ISO C++ Library.  This library is free
00007 // software; you can redistribute it and/or modify it under the
00008 // terms of the GNU General Public License as published by the
00009 // Free Software Foundation; either version 3, or (at your option)
00010 // any later version.
00011 
00012 // This library is distributed in the hope that it will be useful,
00013 // but WITHOUT ANY WARRANTY; without even the implied warranty of
00014 // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
00015 // GNU General Public License for more details.
00016 
00017 // Under Section 7 of GPL version 3, you are granted additional
00018 // permissions described in the GCC Runtime Library Exception, version
00019 // 3.1, as published by the Free Software Foundation.
00020 
00021 // You should have received a copy of the GNU General Public License and
00022 // a copy of the GCC Runtime Library Exception along with this program;
00023 // see the files COPYING3 and COPYING.RUNTIME respectively.  If not, see
00024 // <http://www.gnu.org/licenses/>.
00025 
00026 /** @file bits/atomic_2.h
00027  *  This is an internal header file, included by other library headers.
00028  *  Do not attempt to use it directly. @headername{atomic}
00029  */
00030 
00031 #ifndef _GLIBCXX_ATOMIC_2_H
00032 #define _GLIBCXX_ATOMIC_2_H 1
00033 
00034 #pragma GCC system_header
00035 
00036 namespace std _GLIBCXX_VISIBILITY(default)
00037 {
00038 _GLIBCXX_BEGIN_NAMESPACE_VERSION
00039 
00040 // 2 == __atomic2 == Always lock-free
00041 // Assumed:
00042 // _GLIBCXX_ATOMIC_BUILTINS_1
00043 // _GLIBCXX_ATOMIC_BUILTINS_2
00044 // _GLIBCXX_ATOMIC_BUILTINS_4
00045 // _GLIBCXX_ATOMIC_BUILTINS_8
00046 namespace __atomic2
00047 {
00048   /// atomic_flag
00049   struct atomic_flag : public __atomic_flag_base
00050   {
00051     atomic_flag() = default;
00052     ~atomic_flag() = default;
00053     atomic_flag(const atomic_flag&) = delete;
00054     atomic_flag& operator=(const atomic_flag&) = delete;
00055     atomic_flag& operator=(const atomic_flag&) volatile = delete;
00056 
00057     // Conversion to ATOMIC_FLAG_INIT.
00058     atomic_flag(bool __i): __atomic_flag_base({ __i }) { }
00059 
00060     bool
00061     test_and_set(memory_order __m = memory_order_seq_cst)
00062     {
00063       // Redundant synchronize if built-in for lock is a full barrier.
00064       if (__m != memory_order_acquire && __m != memory_order_acq_rel)
00065     __sync_synchronize();
00066       return __sync_lock_test_and_set(&_M_i, 1);
00067     }
00068 
00069     bool
00070     test_and_set(memory_order __m = memory_order_seq_cst) volatile
00071     {
00072       // Redundant synchronize if built-in for lock is a full barrier.
00073       if (__m != memory_order_acquire && __m != memory_order_acq_rel)
00074     __sync_synchronize();
00075       return __sync_lock_test_and_set(&_M_i, 1);
00076     }
00077 
00078     void
00079     clear(memory_order __m = memory_order_seq_cst)
00080     {
00081       __glibcxx_assert(__m != memory_order_consume);
00082       __glibcxx_assert(__m != memory_order_acquire);
00083       __glibcxx_assert(__m != memory_order_acq_rel);
00084 
00085       __sync_lock_release(&_M_i);
00086       if (__m != memory_order_acquire && __m != memory_order_acq_rel)
00087     __sync_synchronize();
00088     }
00089 
00090     void
00091     clear(memory_order __m = memory_order_seq_cst) volatile
00092     {
00093       __glibcxx_assert(__m != memory_order_consume);
00094       __glibcxx_assert(__m != memory_order_acquire);
00095       __glibcxx_assert(__m != memory_order_acq_rel);
00096 
00097       __sync_lock_release(&_M_i);
00098       if (__m != memory_order_acquire && __m != memory_order_acq_rel)
00099     __sync_synchronize();
00100     }
00101   };
00102 
00103 
00104   /// Base class for atomic integrals.
00105   //
00106   // For each of the integral types, define atomic_[integral type] struct
00107   //
00108   // atomic_bool     bool
00109   // atomic_char     char
00110   // atomic_schar    signed char
00111   // atomic_uchar    unsigned char
00112   // atomic_short    short
00113   // atomic_ushort   unsigned short
00114   // atomic_int      int
00115   // atomic_uint     unsigned int
00116   // atomic_long     long
00117   // atomic_ulong    unsigned long
00118   // atomic_llong    long long
00119   // atomic_ullong   unsigned long long
00120   // atomic_char16_t char16_t
00121   // atomic_char32_t char32_t
00122   // atomic_wchar_t  wchar_t
00123   //
00124   // NB: Assuming _ITp is an integral scalar type that is 1, 2, 4, or
00125   // 8 bytes, since that is what GCC built-in functions for atomic
00126   // memory access expect.
00127   template<typename _ITp>
00128     struct __atomic_base
00129     {
00130     private:
00131       typedef _ITp  __int_type;
00132 
00133       __int_type    _M_i;
00134 
00135     public:
00136       __atomic_base() = default;
00137       ~__atomic_base() = default;
00138       __atomic_base(const __atomic_base&) = delete;
00139       __atomic_base& operator=(const __atomic_base&) = delete;
00140       __atomic_base& operator=(const __atomic_base&) volatile = delete;
00141 
00142       // Requires __int_type convertible to _M_i.
00143       constexpr __atomic_base(__int_type __i): _M_i (__i) { }
00144 
00145       operator __int_type() const
00146       { return load(); }
00147 
00148       operator __int_type() const volatile
00149       { return load(); }
00150 
00151       __int_type
00152       operator=(__int_type __i)
00153       {
00154     store(__i);
00155     return __i;
00156       }
00157 
00158       __int_type
00159       operator=(__int_type __i) volatile
00160       {
00161     store(__i);
00162     return __i;
00163       }
00164 
00165       __int_type
00166       operator++(int)
00167       { return fetch_add(1); }
00168 
00169       __int_type
00170       operator++(int) volatile
00171       { return fetch_add(1); }
00172 
00173       __int_type
00174       operator--(int)
00175       { return fetch_sub(1); }
00176 
00177       __int_type
00178       operator--(int) volatile
00179       { return fetch_sub(1); }
00180 
00181       __int_type
00182       operator++()
00183       { return __sync_add_and_fetch(&_M_i, 1); }
00184 
00185       __int_type
00186       operator++() volatile
00187       { return __sync_add_and_fetch(&_M_i, 1); }
00188 
00189       __int_type
00190       operator--()
00191       { return __sync_sub_and_fetch(&_M_i, 1); }
00192 
00193       __int_type
00194       operator--() volatile
00195       { return __sync_sub_and_fetch(&_M_i, 1); }
00196 
00197       __int_type
00198       operator+=(__int_type __i)
00199       { return __sync_add_and_fetch(&_M_i, __i); }
00200 
00201       __int_type
00202       operator+=(__int_type __i) volatile
00203       { return __sync_add_and_fetch(&_M_i, __i); }
00204 
00205       __int_type
00206       operator-=(__int_type __i)
00207       { return __sync_sub_and_fetch(&_M_i, __i); }
00208 
00209       __int_type
00210       operator-=(__int_type __i) volatile
00211       { return __sync_sub_and_fetch(&_M_i, __i); }
00212 
00213       __int_type
00214       operator&=(__int_type __i)
00215       { return __sync_and_and_fetch(&_M_i, __i); }
00216 
00217       __int_type
00218       operator&=(__int_type __i) volatile
00219       { return __sync_and_and_fetch(&_M_i, __i); }
00220 
00221       __int_type
00222       operator|=(__int_type __i)
00223       { return __sync_or_and_fetch(&_M_i, __i); }
00224 
00225       __int_type
00226       operator|=(__int_type __i) volatile
00227       { return __sync_or_and_fetch(&_M_i, __i); }
00228 
00229       __int_type
00230       operator^=(__int_type __i)
00231       { return __sync_xor_and_fetch(&_M_i, __i); }
00232 
00233       __int_type
00234       operator^=(__int_type __i) volatile
00235       { return __sync_xor_and_fetch(&_M_i, __i); }
00236 
00237       bool
00238       is_lock_free() const
00239       { return true; }
00240 
00241       bool
00242       is_lock_free() const volatile
00243       { return true; }
00244 
00245       void
00246       store(__int_type __i, memory_order __m = memory_order_seq_cst)
00247       {
00248     __glibcxx_assert(__m != memory_order_acquire);
00249     __glibcxx_assert(__m != memory_order_acq_rel);
00250     __glibcxx_assert(__m != memory_order_consume);
00251 
00252     if (__m == memory_order_relaxed)
00253       _M_i = __i;
00254     else
00255       {
00256         // write_mem_barrier();
00257         _M_i = __i;
00258         if (__m == memory_order_seq_cst)
00259           __sync_synchronize();
00260       }
00261       }
00262 
00263       void
00264       store(__int_type __i, memory_order __m = memory_order_seq_cst) volatile
00265       {
00266     __glibcxx_assert(__m != memory_order_acquire);
00267     __glibcxx_assert(__m != memory_order_acq_rel);
00268     __glibcxx_assert(__m != memory_order_consume);
00269 
00270     if (__m == memory_order_relaxed)
00271       _M_i = __i;
00272     else
00273       {
00274         // write_mem_barrier();
00275         _M_i = __i;
00276         if (__m == memory_order_seq_cst)
00277           __sync_synchronize();
00278       }
00279       }
00280 
00281       __int_type
00282       load(memory_order __m = memory_order_seq_cst) const
00283       {
00284     __glibcxx_assert(__m != memory_order_release);
00285     __glibcxx_assert(__m != memory_order_acq_rel);
00286 
00287     __sync_synchronize();
00288     __int_type __ret = _M_i;
00289     __sync_synchronize();
00290     return __ret;
00291       }
00292 
00293       __int_type
00294       load(memory_order __m = memory_order_seq_cst) const volatile
00295       {
00296     __glibcxx_assert(__m != memory_order_release);
00297     __glibcxx_assert(__m != memory_order_acq_rel);
00298 
00299     __sync_synchronize();
00300     __int_type __ret = _M_i;
00301     __sync_synchronize();
00302     return __ret;
00303       }
00304 
00305       __int_type
00306       exchange(__int_type __i, memory_order __m = memory_order_seq_cst)
00307       {
00308     // XXX built-in assumes memory_order_acquire.
00309     return __sync_lock_test_and_set(&_M_i, __i);
00310       }
00311 
00312 
00313       __int_type
00314       exchange(__int_type __i, memory_order __m = memory_order_seq_cst) volatile
00315       {
00316     // XXX built-in assumes memory_order_acquire.
00317     return __sync_lock_test_and_set(&_M_i, __i);
00318       }
00319 
00320       bool
00321       compare_exchange_weak(__int_type& __i1, __int_type __i2,
00322                 memory_order __m1, memory_order __m2)
00323       { return compare_exchange_strong(__i1, __i2, __m1, __m2); }
00324 
00325       bool
00326       compare_exchange_weak(__int_type& __i1, __int_type __i2,
00327                 memory_order __m1, memory_order __m2) volatile
00328       { return compare_exchange_strong(__i1, __i2, __m1, __m2); }
00329 
00330       bool
00331       compare_exchange_weak(__int_type& __i1, __int_type __i2,
00332                 memory_order __m = memory_order_seq_cst)
00333       {
00334     return compare_exchange_weak(__i1, __i2, __m,
00335                      __calculate_memory_order(__m));
00336       }
00337 
00338       bool
00339       compare_exchange_weak(__int_type& __i1, __int_type __i2,
00340                 memory_order __m = memory_order_seq_cst) volatile
00341       {
00342     return compare_exchange_weak(__i1, __i2, __m,
00343                      __calculate_memory_order(__m));
00344       }
00345 
00346       bool
00347       compare_exchange_strong(__int_type& __i1, __int_type __i2,
00348                   memory_order __m1, memory_order __m2)
00349       {
00350     __glibcxx_assert(__m2 != memory_order_release);
00351     __glibcxx_assert(__m2 != memory_order_acq_rel);
00352     __glibcxx_assert(__m2 <= __m1);
00353 
00354     __int_type __i1o = __i1;
00355     __int_type __i1n = __sync_val_compare_and_swap(&_M_i, __i1o, __i2);
00356 
00357     // Assume extra stores (of same value) allowed in true case.
00358     __i1 = __i1n;
00359     return __i1o == __i1n;
00360       }
00361 
00362       bool
00363       compare_exchange_strong(__int_type& __i1, __int_type __i2,
00364                   memory_order __m1, memory_order __m2) volatile
00365       {
00366     __glibcxx_assert(__m2 != memory_order_release);
00367     __glibcxx_assert(__m2 != memory_order_acq_rel);
00368     __glibcxx_assert(__m2 <= __m1);
00369 
00370     __int_type __i1o = __i1;
00371     __int_type __i1n = __sync_val_compare_and_swap(&_M_i, __i1o, __i2);
00372 
00373     // Assume extra stores (of same value) allowed in true case.
00374     __i1 = __i1n;
00375     return __i1o == __i1n;
00376       }
00377 
00378       bool
00379       compare_exchange_strong(__int_type& __i1, __int_type __i2,
00380                   memory_order __m = memory_order_seq_cst)
00381       {
00382     return compare_exchange_strong(__i1, __i2, __m,
00383                        __calculate_memory_order(__m));
00384       }
00385 
00386       bool
00387       compare_exchange_strong(__int_type& __i1, __int_type __i2,
00388                   memory_order __m = memory_order_seq_cst) volatile
00389       {
00390     return compare_exchange_strong(__i1, __i2, __m,
00391                        __calculate_memory_order(__m));
00392       }
00393 
00394       __int_type
00395       fetch_add(__int_type __i, memory_order __m = memory_order_seq_cst)
00396       { return __sync_fetch_and_add(&_M_i, __i); }
00397 
00398       __int_type
00399       fetch_add(__int_type __i,
00400         memory_order __m = memory_order_seq_cst) volatile
00401       { return __sync_fetch_and_add(&_M_i, __i); }
00402 
00403       __int_type
00404       fetch_sub(__int_type __i, memory_order __m = memory_order_seq_cst)
00405       { return __sync_fetch_and_sub(&_M_i, __i); }
00406 
00407       __int_type
00408       fetch_sub(__int_type __i,
00409         memory_order __m = memory_order_seq_cst) volatile
00410       { return __sync_fetch_and_sub(&_M_i, __i); }
00411 
00412       __int_type
00413       fetch_and(__int_type __i, memory_order __m = memory_order_seq_cst)
00414       { return __sync_fetch_and_and(&_M_i, __i); }
00415 
00416       __int_type
00417       fetch_and(__int_type __i,
00418         memory_order __m = memory_order_seq_cst) volatile
00419       { return __sync_fetch_and_and(&_M_i, __i); }
00420 
00421       __int_type
00422       fetch_or(__int_type __i, memory_order __m = memory_order_seq_cst)
00423       { return __sync_fetch_and_or(&_M_i, __i); }
00424 
00425       __int_type
00426       fetch_or(__int_type __i,
00427            memory_order __m = memory_order_seq_cst) volatile
00428       { return __sync_fetch_and_or(&_M_i, __i); }
00429 
00430       __int_type
00431       fetch_xor(__int_type __i, memory_order __m = memory_order_seq_cst)
00432       { return __sync_fetch_and_xor(&_M_i, __i); }
00433 
00434       __int_type
00435       fetch_xor(__int_type __i,
00436         memory_order __m = memory_order_seq_cst) volatile
00437       { return __sync_fetch_and_xor(&_M_i, __i); }
00438     };
00439 
00440 
00441   /// Partial specialization for pointer types.
00442   template<typename _PTp>
00443     struct __atomic_base<_PTp*>
00444     {
00445     private:
00446       typedef _PTp*     __pointer_type;
00447 
00448       __pointer_type    _M_p;
00449 
00450     public:
00451       __atomic_base() = default;
00452       ~__atomic_base() = default;
00453       __atomic_base(const __atomic_base&) = delete;
00454       __atomic_base& operator=(const __atomic_base&) = delete;
00455       __atomic_base& operator=(const __atomic_base&) volatile = delete;
00456 
00457       // Requires __pointer_type convertible to _M_p.
00458       constexpr __atomic_base(__pointer_type __p): _M_p (__p) { }
00459 
00460       operator __pointer_type() const
00461       { return load(); }
00462 
00463       operator __pointer_type() const volatile
00464       { return load(); }
00465 
00466       __pointer_type
00467       operator=(__pointer_type __p)
00468       {
00469     store(__p);
00470     return __p;
00471       }
00472 
00473       __pointer_type
00474       operator=(__pointer_type __p) volatile
00475       {
00476     store(__p);
00477     return __p;
00478       }
00479 
00480       __pointer_type
00481       operator++(int)
00482       { return fetch_add(1); }
00483 
00484       __pointer_type
00485       operator++(int) volatile
00486       { return fetch_add(1); }
00487 
00488       __pointer_type
00489       operator--(int)
00490       { return fetch_sub(1); }
00491 
00492       __pointer_type
00493       operator--(int) volatile
00494       { return fetch_sub(1); }
00495 
00496       __pointer_type
00497       operator++()
00498       { return fetch_add(1) + 1; }
00499 
00500       __pointer_type
00501       operator++() volatile
00502       { return fetch_add(1) + 1; }
00503 
00504       __pointer_type
00505       operator--()
00506       { return fetch_sub(1) -1; }
00507 
00508       __pointer_type
00509       operator--() volatile
00510       { return fetch_sub(1) -1; }
00511 
00512       __pointer_type
00513       operator+=(ptrdiff_t __d)
00514       { return fetch_add(__d) + __d; }
00515 
00516       __pointer_type
00517       operator+=(ptrdiff_t __d) volatile
00518       { return fetch_add(__d) + __d; }
00519 
00520       __pointer_type
00521       operator-=(ptrdiff_t __d)
00522       { return fetch_sub(__d) - __d; }
00523 
00524       __pointer_type
00525       operator-=(ptrdiff_t __d) volatile
00526       { return fetch_sub(__d) - __d; }
00527 
00528       bool
00529       is_lock_free() const
00530       { return true; }
00531 
00532       bool
00533       is_lock_free() const volatile
00534       { return true; }
00535 
00536       void
00537       store(__pointer_type __p, memory_order __m = memory_order_seq_cst)
00538       {
00539     __glibcxx_assert(__m != memory_order_acquire);
00540     __glibcxx_assert(__m != memory_order_acq_rel);
00541     __glibcxx_assert(__m != memory_order_consume);
00542 
00543     if (__m == memory_order_relaxed)
00544       _M_p = __p;
00545     else
00546       {
00547         // write_mem_barrier();
00548         _M_p = __p;
00549         if (__m == memory_order_seq_cst)
00550           __sync_synchronize();
00551       }
00552       }
00553 
00554       void
00555       store(__pointer_type __p,
00556         memory_order __m = memory_order_seq_cst) volatile
00557       {
00558     __glibcxx_assert(__m != memory_order_acquire);
00559     __glibcxx_assert(__m != memory_order_acq_rel);
00560     __glibcxx_assert(__m != memory_order_consume);
00561 
00562     if (__m == memory_order_relaxed)
00563       _M_p = __p;
00564     else
00565       {
00566         // write_mem_barrier();
00567         _M_p = __p;
00568         if (__m == memory_order_seq_cst)
00569           __sync_synchronize();
00570       }
00571       }
00572 
00573       __pointer_type
00574       load(memory_order __m = memory_order_seq_cst) const
00575       {
00576     __glibcxx_assert(__m != memory_order_release);
00577     __glibcxx_assert(__m != memory_order_acq_rel);
00578 
00579     __sync_synchronize();
00580     __pointer_type __ret = _M_p;
00581     __sync_synchronize();
00582     return __ret;
00583       }
00584 
00585       __pointer_type
00586       load(memory_order __m = memory_order_seq_cst) const volatile
00587       {
00588     __glibcxx_assert(__m != memory_order_release);
00589     __glibcxx_assert(__m != memory_order_acq_rel);
00590 
00591     __sync_synchronize();
00592     __pointer_type __ret = _M_p;
00593     __sync_synchronize();
00594     return __ret;
00595       }
00596 
00597       __pointer_type
00598       exchange(__pointer_type __p, memory_order __m = memory_order_seq_cst)
00599       {
00600     // XXX built-in assumes memory_order_acquire.
00601     return __sync_lock_test_and_set(&_M_p, __p);
00602       }
00603 
00604 
00605       __pointer_type
00606       exchange(__pointer_type __p,
00607            memory_order __m = memory_order_seq_cst) volatile
00608       {
00609     // XXX built-in assumes memory_order_acquire.
00610     return __sync_lock_test_and_set(&_M_p, __p);
00611       }
00612 
00613       bool
00614       compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
00615                   memory_order __m1, memory_order __m2)
00616       {
00617     __glibcxx_assert(__m2 != memory_order_release);
00618     __glibcxx_assert(__m2 != memory_order_acq_rel);
00619     __glibcxx_assert(__m2 <= __m1);
00620 
00621     __pointer_type __p1o = __p1;
00622     __pointer_type __p1n = __sync_val_compare_and_swap(&_M_p, __p1o, __p2);
00623 
00624     // Assume extra stores (of same value) allowed in true case.
00625     __p1 = __p1n;
00626     return __p1o == __p1n;
00627       }
00628 
00629       bool
00630       compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
00631                   memory_order __m1, memory_order __m2) volatile
00632       {
00633     __glibcxx_assert(__m2 != memory_order_release);
00634     __glibcxx_assert(__m2 != memory_order_acq_rel);
00635     __glibcxx_assert(__m2 <= __m1);
00636 
00637     __pointer_type __p1o = __p1;
00638     __pointer_type __p1n = __sync_val_compare_and_swap(&_M_p, __p1o, __p2);
00639 
00640     // Assume extra stores (of same value) allowed in true case.
00641     __p1 = __p1n;
00642     return __p1o == __p1n;
00643       }
00644 
00645       __pointer_type
00646       fetch_add(ptrdiff_t __d, memory_order __m = memory_order_seq_cst)
00647       { return __sync_fetch_and_add(&_M_p, __d); }
00648 
00649       __pointer_type
00650       fetch_add(ptrdiff_t __d,
00651         memory_order __m = memory_order_seq_cst) volatile
00652       { return __sync_fetch_and_add(&_M_p, __d); }
00653 
00654       __pointer_type
00655       fetch_sub(ptrdiff_t __d, memory_order __m = memory_order_seq_cst)
00656       { return __sync_fetch_and_sub(&_M_p, __d); }
00657 
00658       __pointer_type
00659       fetch_sub(ptrdiff_t __d,
00660         memory_order __m = memory_order_seq_cst) volatile
00661       { return __sync_fetch_and_sub(&_M_p, __d); }
00662     };
00663 
00664 } // namespace __atomic2
00665 
00666 _GLIBCXX_END_NAMESPACE_VERSION
00667 } // namespace std
00668 
00669 #endif