libstdc++
atomic_base.h
Go to the documentation of this file.
00001 // -*- C++ -*- header.
00002 
00003 // Copyright (C) 2008-2015 Free Software Foundation, Inc.
00004 //
00005 // This file is part of the GNU ISO C++ Library.  This library is free
00006 // software; you can redistribute it and/or modify it under the
00007 // terms of the GNU General Public License as published by the
00008 // Free Software Foundation; either version 3, or (at your option)
00009 // any later version.
00010 
00011 // This library is distributed in the hope that it will be useful,
00012 // but WITHOUT ANY WARRANTY; without even the implied warranty of
00013 // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
00014 // GNU General Public License for more details.
00015 
00016 // Under Section 7 of GPL version 3, you are granted additional
00017 // permissions described in the GCC Runtime Library Exception, version
00018 // 3.1, as published by the Free Software Foundation.
00019 
00020 // You should have received a copy of the GNU General Public License and
00021 // a copy of the GCC Runtime Library Exception along with this program;
00022 // see the files COPYING3 and COPYING.RUNTIME respectively.  If not, see
00023 // <http://www.gnu.org/licenses/>.
00024 
00025 /** @file bits/atomic_base.h
00026  *  This is an internal header file, included by other library headers.
00027  *  Do not attempt to use it directly. @headername{atomic}
00028  */
00029 
00030 #ifndef _GLIBCXX_ATOMIC_BASE_H
00031 #define _GLIBCXX_ATOMIC_BASE_H 1
00032 
00033 #pragma GCC system_header
00034 
00035 #include <bits/c++config.h>
00036 #include <stdint.h>
00037 #include <bits/atomic_lockfree_defines.h>
00038 
00039 #ifndef _GLIBCXX_ALWAYS_INLINE
00040 #define _GLIBCXX_ALWAYS_INLINE inline __attribute__((__always_inline__))
00041 #endif
00042 
00043 namespace std _GLIBCXX_VISIBILITY(default)
00044 {
00045 _GLIBCXX_BEGIN_NAMESPACE_VERSION
00046 
00047   /**
00048    * @defgroup atomics Atomics
00049    *
00050    * Components for performing atomic operations.
00051    * @{
00052    */
00053 
00054   /// Enumeration for memory_order
00055   typedef enum memory_order
00056     {
00057       memory_order_relaxed,
00058       memory_order_consume,
00059       memory_order_acquire,
00060       memory_order_release,
00061       memory_order_acq_rel,
00062       memory_order_seq_cst
00063     } memory_order;
00064 
00065   enum __memory_order_modifier
00066     {
00067       __memory_order_mask          = 0x0ffff,
00068       __memory_order_modifier_mask = 0xffff0000,
00069       __memory_order_hle_acquire   = 0x10000,
00070       __memory_order_hle_release   = 0x20000
00071     };
00072 
00073   constexpr memory_order
00074   operator|(memory_order __m, __memory_order_modifier __mod)
00075   {
00076     return memory_order(__m | int(__mod));
00077   }
00078 
00079   constexpr memory_order
00080   operator&(memory_order __m, __memory_order_modifier __mod)
00081   {
00082     return memory_order(__m & int(__mod));
00083   }
00084 
00085   // Drop release ordering as per [atomics.types.operations.req]/21
00086   constexpr memory_order
00087   __cmpexch_failure_order2(memory_order __m) noexcept
00088   {
00089     return __m == memory_order_acq_rel ? memory_order_acquire
00090       : __m == memory_order_release ? memory_order_relaxed : __m;
00091   }
00092 
00093   constexpr memory_order
00094   __cmpexch_failure_order(memory_order __m) noexcept
00095   {
00096     return memory_order(__cmpexch_failure_order2(__m & __memory_order_mask)
00097       | (__m & __memory_order_modifier_mask));
00098   }
00099 
00100   _GLIBCXX_ALWAYS_INLINE void
00101   atomic_thread_fence(memory_order __m) noexcept
00102   { __atomic_thread_fence(__m); }
00103 
00104   _GLIBCXX_ALWAYS_INLINE void
00105   atomic_signal_fence(memory_order __m) noexcept
00106   { __atomic_signal_fence(__m); }
00107 
00108   /// kill_dependency
00109   template<typename _Tp>
00110     inline _Tp
00111     kill_dependency(_Tp __y) noexcept
00112     {
00113       _Tp __ret(__y);
00114       return __ret;
00115     }
00116 
00117 
00118   // Base types for atomics.
00119   template<typename _IntTp>
00120     struct __atomic_base;
00121 
00122 
00123 #define ATOMIC_VAR_INIT(_VI) { _VI }
00124 
00125   template<typename _Tp>
00126     struct atomic;
00127 
00128   template<typename _Tp>
00129     struct atomic<_Tp*>;
00130 
00131     /* The target's "set" value for test-and-set may not be exactly 1.  */
00132 #if __GCC_ATOMIC_TEST_AND_SET_TRUEVAL == 1
00133     typedef bool __atomic_flag_data_type;
00134 #else
00135     typedef unsigned char __atomic_flag_data_type;
00136 #endif
00137 
00138   /**
00139    *  @brief Base type for atomic_flag.
00140    *
00141    *  Base type is POD with data, allowing atomic_flag to derive from
00142    *  it and meet the standard layout type requirement. In addition to
00143    *  compatibility with a C interface, this allows different
00144    *  implementations of atomic_flag to use the same atomic operation
00145    *  functions, via a standard conversion to the __atomic_flag_base
00146    *  argument.
00147   */
00148   _GLIBCXX_BEGIN_EXTERN_C
00149 
00150   struct __atomic_flag_base
00151   {
00152     __atomic_flag_data_type _M_i;
00153   };
00154 
00155   _GLIBCXX_END_EXTERN_C
00156 
00157 #define ATOMIC_FLAG_INIT { 0 }
00158 
00159   /// atomic_flag
00160   struct atomic_flag : public __atomic_flag_base
00161   {
00162     atomic_flag() noexcept = default;
00163     ~atomic_flag() noexcept = default;
00164     atomic_flag(const atomic_flag&) = delete;
00165     atomic_flag& operator=(const atomic_flag&) = delete;
00166     atomic_flag& operator=(const atomic_flag&) volatile = delete;
00167 
00168     // Conversion to ATOMIC_FLAG_INIT.
00169     constexpr atomic_flag(bool __i) noexcept
00170       : __atomic_flag_base{ _S_init(__i) }
00171     { }
00172 
00173     _GLIBCXX_ALWAYS_INLINE bool
00174     test_and_set(memory_order __m = memory_order_seq_cst) noexcept
00175     {
00176       return __atomic_test_and_set (&_M_i, __m);
00177     }
00178 
00179     _GLIBCXX_ALWAYS_INLINE bool
00180     test_and_set(memory_order __m = memory_order_seq_cst) volatile noexcept
00181     {
00182       return __atomic_test_and_set (&_M_i, __m);
00183     }
00184 
00185     _GLIBCXX_ALWAYS_INLINE void
00186     clear(memory_order __m = memory_order_seq_cst) noexcept
00187     {
00188       memory_order __b = __m & __memory_order_mask;
00189       __glibcxx_assert(__b != memory_order_consume);
00190       __glibcxx_assert(__b != memory_order_acquire);
00191       __glibcxx_assert(__b != memory_order_acq_rel);
00192 
00193       __atomic_clear (&_M_i, __m);
00194     }
00195 
00196     _GLIBCXX_ALWAYS_INLINE void
00197     clear(memory_order __m = memory_order_seq_cst) volatile noexcept
00198     {
00199       memory_order __b = __m & __memory_order_mask;
00200       __glibcxx_assert(__b != memory_order_consume);
00201       __glibcxx_assert(__b != memory_order_acquire);
00202       __glibcxx_assert(__b != memory_order_acq_rel);
00203 
00204       __atomic_clear (&_M_i, __m);
00205     }
00206 
00207   private:
00208     static constexpr __atomic_flag_data_type
00209     _S_init(bool __i)
00210     { return __i ? __GCC_ATOMIC_TEST_AND_SET_TRUEVAL : 0; }
00211   };
00212 
00213 
00214   /// Base class for atomic integrals.
00215   //
00216   // For each of the integral types, define atomic_[integral type] struct
00217   //
00218   // atomic_bool     bool
00219   // atomic_char     char
00220   // atomic_schar    signed char
00221   // atomic_uchar    unsigned char
00222   // atomic_short    short
00223   // atomic_ushort   unsigned short
00224   // atomic_int      int
00225   // atomic_uint     unsigned int
00226   // atomic_long     long
00227   // atomic_ulong    unsigned long
00228   // atomic_llong    long long
00229   // atomic_ullong   unsigned long long
00230   // atomic_char16_t char16_t
00231   // atomic_char32_t char32_t
00232   // atomic_wchar_t  wchar_t
00233   //
00234   // NB: Assuming _ITp is an integral scalar type that is 1, 2, 4, or
00235   // 8 bytes, since that is what GCC built-in functions for atomic
00236   // memory access expect.
00237   template<typename _ITp>
00238     struct __atomic_base
00239     {
00240     private:
00241       typedef _ITp      __int_type;
00242 
00243       static constexpr int _S_alignment =
00244         sizeof(_ITp) > alignof(_ITp) ? sizeof(_ITp) : alignof(_ITp);
00245 
00246       alignas(_S_alignment) __int_type _M_i;
00247 
00248     public:
00249       __atomic_base() noexcept = default;
00250       ~__atomic_base() noexcept = default;
00251       __atomic_base(const __atomic_base&) = delete;
00252       __atomic_base& operator=(const __atomic_base&) = delete;
00253       __atomic_base& operator=(const __atomic_base&) volatile = delete;
00254 
00255       // Requires __int_type convertible to _M_i.
00256       constexpr __atomic_base(__int_type __i) noexcept : _M_i (__i) { }
00257 
00258       operator __int_type() const noexcept
00259       { return load(); }
00260 
00261       operator __int_type() const volatile noexcept
00262       { return load(); }
00263 
00264       __int_type
00265       operator=(__int_type __i) noexcept
00266       {
00267         store(__i);
00268         return __i;
00269       }
00270 
00271       __int_type
00272       operator=(__int_type __i) volatile noexcept
00273       {
00274         store(__i);
00275         return __i;
00276       }
00277 
00278       __int_type
00279       operator++(int) noexcept
00280       { return fetch_add(1); }
00281 
00282       __int_type
00283       operator++(int) volatile noexcept
00284       { return fetch_add(1); }
00285 
00286       __int_type
00287       operator--(int) noexcept
00288       { return fetch_sub(1); }
00289 
00290       __int_type
00291       operator--(int) volatile noexcept
00292       { return fetch_sub(1); }
00293 
00294       __int_type
00295       operator++() noexcept
00296       { return __atomic_add_fetch(&_M_i, 1, memory_order_seq_cst); }
00297 
00298       __int_type
00299       operator++() volatile noexcept
00300       { return __atomic_add_fetch(&_M_i, 1, memory_order_seq_cst); }
00301 
00302       __int_type
00303       operator--() noexcept
00304       { return __atomic_sub_fetch(&_M_i, 1, memory_order_seq_cst); }
00305 
00306       __int_type
00307       operator--() volatile noexcept
00308       { return __atomic_sub_fetch(&_M_i, 1, memory_order_seq_cst); }
00309 
00310       __int_type
00311       operator+=(__int_type __i) noexcept
00312       { return __atomic_add_fetch(&_M_i, __i, memory_order_seq_cst); }
00313 
00314       __int_type
00315       operator+=(__int_type __i) volatile noexcept
00316       { return __atomic_add_fetch(&_M_i, __i, memory_order_seq_cst); }
00317 
00318       __int_type
00319       operator-=(__int_type __i) noexcept
00320       { return __atomic_sub_fetch(&_M_i, __i, memory_order_seq_cst); }
00321 
00322       __int_type
00323       operator-=(__int_type __i) volatile noexcept
00324       { return __atomic_sub_fetch(&_M_i, __i, memory_order_seq_cst); }
00325 
00326       __int_type
00327       operator&=(__int_type __i) noexcept
00328       { return __atomic_and_fetch(&_M_i, __i, memory_order_seq_cst); }
00329 
00330       __int_type
00331       operator&=(__int_type __i) volatile noexcept
00332       { return __atomic_and_fetch(&_M_i, __i, memory_order_seq_cst); }
00333 
00334       __int_type
00335       operator|=(__int_type __i) noexcept
00336       { return __atomic_or_fetch(&_M_i, __i, memory_order_seq_cst); }
00337 
00338       __int_type
00339       operator|=(__int_type __i) volatile noexcept
00340       { return __atomic_or_fetch(&_M_i, __i, memory_order_seq_cst); }
00341 
00342       __int_type
00343       operator^=(__int_type __i) noexcept
00344       { return __atomic_xor_fetch(&_M_i, __i, memory_order_seq_cst); }
00345 
00346       __int_type
00347       operator^=(__int_type __i) volatile noexcept
00348       { return __atomic_xor_fetch(&_M_i, __i, memory_order_seq_cst); }
00349 
00350       bool
00351       is_lock_free() const noexcept
00352       {
00353         // Use a fake, minimally aligned pointer.
00354         return __atomic_is_lock_free(sizeof(_M_i),
00355             reinterpret_cast<void *>(-__alignof(_M_i)));
00356       }
00357 
00358       bool
00359       is_lock_free() const volatile noexcept
00360       {
00361         // Use a fake, minimally aligned pointer.
00362         return __atomic_is_lock_free(sizeof(_M_i),
00363             reinterpret_cast<void *>(-__alignof(_M_i)));
00364       }
00365 
00366       _GLIBCXX_ALWAYS_INLINE void
00367       store(__int_type __i, memory_order __m = memory_order_seq_cst) noexcept
00368       {
00369         memory_order __b = __m & __memory_order_mask;
00370         __glibcxx_assert(__b != memory_order_acquire);
00371         __glibcxx_assert(__b != memory_order_acq_rel);
00372         __glibcxx_assert(__b != memory_order_consume);
00373 
00374         __atomic_store_n(&_M_i, __i, __m);
00375       }
00376 
00377       _GLIBCXX_ALWAYS_INLINE void
00378       store(__int_type __i,
00379             memory_order __m = memory_order_seq_cst) volatile noexcept
00380       {
00381         memory_order __b = __m & __memory_order_mask;
00382         __glibcxx_assert(__b != memory_order_acquire);
00383         __glibcxx_assert(__b != memory_order_acq_rel);
00384         __glibcxx_assert(__b != memory_order_consume);
00385 
00386         __atomic_store_n(&_M_i, __i, __m);
00387       }
00388 
00389       _GLIBCXX_ALWAYS_INLINE __int_type
00390       load(memory_order __m = memory_order_seq_cst) const noexcept
00391       {
00392        memory_order __b = __m & __memory_order_mask;
00393         __glibcxx_assert(__b != memory_order_release);
00394         __glibcxx_assert(__b != memory_order_acq_rel);
00395 
00396         return __atomic_load_n(&_M_i, __m);
00397       }
00398 
00399       _GLIBCXX_ALWAYS_INLINE __int_type
00400       load(memory_order __m = memory_order_seq_cst) const volatile noexcept
00401       {
00402         memory_order __b = __m & __memory_order_mask;
00403         __glibcxx_assert(__b != memory_order_release);
00404         __glibcxx_assert(__b != memory_order_acq_rel);
00405 
00406         return __atomic_load_n(&_M_i, __m);
00407       }
00408 
00409       _GLIBCXX_ALWAYS_INLINE __int_type
00410       exchange(__int_type __i,
00411                memory_order __m = memory_order_seq_cst) noexcept
00412       {
00413         return __atomic_exchange_n(&_M_i, __i, __m);
00414       }
00415 
00416 
00417       _GLIBCXX_ALWAYS_INLINE __int_type
00418       exchange(__int_type __i,
00419                memory_order __m = memory_order_seq_cst) volatile noexcept
00420       {
00421         return __atomic_exchange_n(&_M_i, __i, __m);
00422       }
00423 
00424       _GLIBCXX_ALWAYS_INLINE bool
00425       compare_exchange_weak(__int_type& __i1, __int_type __i2,
00426                             memory_order __m1, memory_order __m2) noexcept
00427       {
00428        memory_order __b2 = __m2 & __memory_order_mask;
00429        memory_order __b1 = __m1 & __memory_order_mask;
00430         __glibcxx_assert(__b2 != memory_order_release);
00431         __glibcxx_assert(__b2 != memory_order_acq_rel);
00432         __glibcxx_assert(__b2 <= __b1);
00433 
00434         return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 1, __m1, __m2);
00435       }
00436 
00437       _GLIBCXX_ALWAYS_INLINE bool
00438       compare_exchange_weak(__int_type& __i1, __int_type __i2,
00439                             memory_order __m1,
00440                             memory_order __m2) volatile noexcept
00441       {
00442        memory_order __b2 = __m2 & __memory_order_mask;
00443        memory_order __b1 = __m1 & __memory_order_mask;
00444         __glibcxx_assert(__b2 != memory_order_release);
00445         __glibcxx_assert(__b2 != memory_order_acq_rel);
00446         __glibcxx_assert(__b2 <= __b1);
00447 
00448         return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 1, __m1, __m2);
00449       }
00450 
00451       _GLIBCXX_ALWAYS_INLINE bool
00452       compare_exchange_weak(__int_type& __i1, __int_type __i2,
00453                             memory_order __m = memory_order_seq_cst) noexcept
00454       {
00455         return compare_exchange_weak(__i1, __i2, __m,
00456                                      __cmpexch_failure_order(__m));
00457       }
00458 
00459       _GLIBCXX_ALWAYS_INLINE bool
00460       compare_exchange_weak(__int_type& __i1, __int_type __i2,
00461                    memory_order __m = memory_order_seq_cst) volatile noexcept
00462       {
00463         return compare_exchange_weak(__i1, __i2, __m,
00464                                      __cmpexch_failure_order(__m));
00465       }
00466 
00467       _GLIBCXX_ALWAYS_INLINE bool
00468       compare_exchange_strong(__int_type& __i1, __int_type __i2,
00469                               memory_order __m1, memory_order __m2) noexcept
00470       {
00471         memory_order __b2 = __m2 & __memory_order_mask;
00472         memory_order __b1 = __m1 & __memory_order_mask;
00473         __glibcxx_assert(__b2 != memory_order_release);
00474         __glibcxx_assert(__b2 != memory_order_acq_rel);
00475         __glibcxx_assert(__b2 <= __b1);
00476 
00477         return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 0, __m1, __m2);
00478       }
00479 
00480       _GLIBCXX_ALWAYS_INLINE bool
00481       compare_exchange_strong(__int_type& __i1, __int_type __i2,
00482                               memory_order __m1,
00483                               memory_order __m2) volatile noexcept
00484       {
00485         memory_order __b2 = __m2 & __memory_order_mask;
00486         memory_order __b1 = __m1 & __memory_order_mask;
00487 
00488         __glibcxx_assert(__b2 != memory_order_release);
00489         __glibcxx_assert(__b2 != memory_order_acq_rel);
00490         __glibcxx_assert(__b2 <= __b1);
00491 
00492         return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 0, __m1, __m2);
00493       }
00494 
00495       _GLIBCXX_ALWAYS_INLINE bool
00496       compare_exchange_strong(__int_type& __i1, __int_type __i2,
00497                               memory_order __m = memory_order_seq_cst) noexcept
00498       {
00499         return compare_exchange_strong(__i1, __i2, __m,
00500                                        __cmpexch_failure_order(__m));
00501       }
00502 
00503       _GLIBCXX_ALWAYS_INLINE bool
00504       compare_exchange_strong(__int_type& __i1, __int_type __i2,
00505                  memory_order __m = memory_order_seq_cst) volatile noexcept
00506       {
00507         return compare_exchange_strong(__i1, __i2, __m,
00508                                        __cmpexch_failure_order(__m));
00509       }
00510 
00511       _GLIBCXX_ALWAYS_INLINE __int_type
00512       fetch_add(__int_type __i,
00513                 memory_order __m = memory_order_seq_cst) noexcept
00514       { return __atomic_fetch_add(&_M_i, __i, __m); }
00515 
00516       _GLIBCXX_ALWAYS_INLINE __int_type
00517       fetch_add(__int_type __i,
00518                 memory_order __m = memory_order_seq_cst) volatile noexcept
00519       { return __atomic_fetch_add(&_M_i, __i, __m); }
00520 
00521       _GLIBCXX_ALWAYS_INLINE __int_type
00522       fetch_sub(__int_type __i,
00523                 memory_order __m = memory_order_seq_cst) noexcept
00524       { return __atomic_fetch_sub(&_M_i, __i, __m); }
00525 
00526       _GLIBCXX_ALWAYS_INLINE __int_type
00527       fetch_sub(__int_type __i,
00528                 memory_order __m = memory_order_seq_cst) volatile noexcept
00529       { return __atomic_fetch_sub(&_M_i, __i, __m); }
00530 
00531       _GLIBCXX_ALWAYS_INLINE __int_type
00532       fetch_and(__int_type __i,
00533                 memory_order __m = memory_order_seq_cst) noexcept
00534       { return __atomic_fetch_and(&_M_i, __i, __m); }
00535 
00536       _GLIBCXX_ALWAYS_INLINE __int_type
00537       fetch_and(__int_type __i,
00538                 memory_order __m = memory_order_seq_cst) volatile noexcept
00539       { return __atomic_fetch_and(&_M_i, __i, __m); }
00540 
00541       _GLIBCXX_ALWAYS_INLINE __int_type
00542       fetch_or(__int_type __i,
00543                memory_order __m = memory_order_seq_cst) noexcept
00544       { return __atomic_fetch_or(&_M_i, __i, __m); }
00545 
00546       _GLIBCXX_ALWAYS_INLINE __int_type
00547       fetch_or(__int_type __i,
00548                memory_order __m = memory_order_seq_cst) volatile noexcept
00549       { return __atomic_fetch_or(&_M_i, __i, __m); }
00550 
00551       _GLIBCXX_ALWAYS_INLINE __int_type
00552       fetch_xor(__int_type __i,
00553                 memory_order __m = memory_order_seq_cst) noexcept
00554       { return __atomic_fetch_xor(&_M_i, __i, __m); }
00555 
00556       _GLIBCXX_ALWAYS_INLINE __int_type
00557       fetch_xor(__int_type __i,
00558                 memory_order __m = memory_order_seq_cst) volatile noexcept
00559       { return __atomic_fetch_xor(&_M_i, __i, __m); }
00560     };
00561 
00562 
00563   /// Partial specialization for pointer types.
00564   template<typename _PTp>
00565     struct __atomic_base<_PTp*>
00566     {
00567     private:
00568       typedef _PTp*     __pointer_type;
00569 
00570       __pointer_type    _M_p;
00571 
00572       // Factored out to facilitate explicit specialization.
00573       constexpr ptrdiff_t
00574       _M_type_size(ptrdiff_t __d) const { return __d * sizeof(_PTp); }
00575 
00576       constexpr ptrdiff_t
00577       _M_type_size(ptrdiff_t __d) const volatile { return __d * sizeof(_PTp); }
00578 
00579     public:
00580       __atomic_base() noexcept = default;
00581       ~__atomic_base() noexcept = default;
00582       __atomic_base(const __atomic_base&) = delete;
00583       __atomic_base& operator=(const __atomic_base&) = delete;
00584       __atomic_base& operator=(const __atomic_base&) volatile = delete;
00585 
00586       // Requires __pointer_type convertible to _M_p.
00587       constexpr __atomic_base(__pointer_type __p) noexcept : _M_p (__p) { }
00588 
00589       operator __pointer_type() const noexcept
00590       { return load(); }
00591 
00592       operator __pointer_type() const volatile noexcept
00593       { return load(); }
00594 
00595       __pointer_type
00596       operator=(__pointer_type __p) noexcept
00597       {
00598         store(__p);
00599         return __p;
00600       }
00601 
00602       __pointer_type
00603       operator=(__pointer_type __p) volatile noexcept
00604       {
00605         store(__p);
00606         return __p;
00607       }
00608 
00609       __pointer_type
00610       operator++(int) noexcept
00611       { return fetch_add(1); }
00612 
00613       __pointer_type
00614       operator++(int) volatile noexcept
00615       { return fetch_add(1); }
00616 
00617       __pointer_type
00618       operator--(int) noexcept
00619       { return fetch_sub(1); }
00620 
00621       __pointer_type
00622       operator--(int) volatile noexcept
00623       { return fetch_sub(1); }
00624 
00625       __pointer_type
00626       operator++() noexcept
00627       { return __atomic_add_fetch(&_M_p, _M_type_size(1),
00628                                   memory_order_seq_cst); }
00629 
00630       __pointer_type
00631       operator++() volatile noexcept
00632       { return __atomic_add_fetch(&_M_p, _M_type_size(1),
00633                                   memory_order_seq_cst); }
00634 
00635       __pointer_type
00636       operator--() noexcept
00637       { return __atomic_sub_fetch(&_M_p, _M_type_size(1),
00638                                   memory_order_seq_cst); }
00639 
00640       __pointer_type
00641       operator--() volatile noexcept
00642       { return __atomic_sub_fetch(&_M_p, _M_type_size(1),
00643                                   memory_order_seq_cst); }
00644 
00645       __pointer_type
00646       operator+=(ptrdiff_t __d) noexcept
00647       { return __atomic_add_fetch(&_M_p, _M_type_size(__d),
00648                                   memory_order_seq_cst); }
00649 
00650       __pointer_type
00651       operator+=(ptrdiff_t __d) volatile noexcept
00652       { return __atomic_add_fetch(&_M_p, _M_type_size(__d),
00653                                   memory_order_seq_cst); }
00654 
00655       __pointer_type
00656       operator-=(ptrdiff_t __d) noexcept
00657       { return __atomic_sub_fetch(&_M_p, _M_type_size(__d),
00658                                   memory_order_seq_cst); }
00659 
00660       __pointer_type
00661       operator-=(ptrdiff_t __d) volatile noexcept
00662       { return __atomic_sub_fetch(&_M_p, _M_type_size(__d),
00663                                   memory_order_seq_cst); }
00664 
00665       bool
00666       is_lock_free() const noexcept
00667       {
00668         // Produce a fake, minimally aligned pointer.
00669         return __atomic_is_lock_free(sizeof(_M_p),
00670             reinterpret_cast<void *>(-__alignof(_M_p)));
00671       }
00672 
00673       bool
00674       is_lock_free() const volatile noexcept
00675       {
00676         // Produce a fake, minimally aligned pointer.
00677         return __atomic_is_lock_free(sizeof(_M_p),
00678             reinterpret_cast<void *>(-__alignof(_M_p)));
00679       }
00680 
00681       _GLIBCXX_ALWAYS_INLINE void
00682       store(__pointer_type __p,
00683             memory_order __m = memory_order_seq_cst) noexcept
00684       {
00685         memory_order __b = __m & __memory_order_mask;
00686 
00687         __glibcxx_assert(__b != memory_order_acquire);
00688         __glibcxx_assert(__b != memory_order_acq_rel);
00689         __glibcxx_assert(__b != memory_order_consume);
00690 
00691         __atomic_store_n(&_M_p, __p, __m);
00692       }
00693 
00694       _GLIBCXX_ALWAYS_INLINE void
00695       store(__pointer_type __p,
00696             memory_order __m = memory_order_seq_cst) volatile noexcept
00697       {
00698         memory_order __b = __m & __memory_order_mask;
00699         __glibcxx_assert(__b != memory_order_acquire);
00700         __glibcxx_assert(__b != memory_order_acq_rel);
00701         __glibcxx_assert(__b != memory_order_consume);
00702 
00703         __atomic_store_n(&_M_p, __p, __m);
00704       }
00705 
00706       _GLIBCXX_ALWAYS_INLINE __pointer_type
00707       load(memory_order __m = memory_order_seq_cst) const noexcept
00708       {
00709         memory_order __b = __m & __memory_order_mask;
00710         __glibcxx_assert(__b != memory_order_release);
00711         __glibcxx_assert(__b != memory_order_acq_rel);
00712 
00713         return __atomic_load_n(&_M_p, __m);
00714       }
00715 
00716       _GLIBCXX_ALWAYS_INLINE __pointer_type
00717       load(memory_order __m = memory_order_seq_cst) const volatile noexcept
00718       {
00719         memory_order __b = __m & __memory_order_mask;
00720         __glibcxx_assert(__b != memory_order_release);
00721         __glibcxx_assert(__b != memory_order_acq_rel);
00722 
00723         return __atomic_load_n(&_M_p, __m);
00724       }
00725 
00726       _GLIBCXX_ALWAYS_INLINE __pointer_type
00727       exchange(__pointer_type __p,
00728                memory_order __m = memory_order_seq_cst) noexcept
00729       {
00730         return __atomic_exchange_n(&_M_p, __p, __m);
00731       }
00732 
00733 
00734       _GLIBCXX_ALWAYS_INLINE __pointer_type
00735       exchange(__pointer_type __p,
00736                memory_order __m = memory_order_seq_cst) volatile noexcept
00737       {
00738         return __atomic_exchange_n(&_M_p, __p, __m);
00739       }
00740 
00741       _GLIBCXX_ALWAYS_INLINE bool
00742       compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
00743                               memory_order __m1,
00744                               memory_order __m2) noexcept
00745       {
00746         memory_order __b2 = __m2 & __memory_order_mask;
00747         memory_order __b1 = __m1 & __memory_order_mask;
00748         __glibcxx_assert(__b2 != memory_order_release);
00749         __glibcxx_assert(__b2 != memory_order_acq_rel);
00750         __glibcxx_assert(__b2 <= __b1);
00751 
00752         return __atomic_compare_exchange_n(&_M_p, &__p1, __p2, 0, __m1, __m2);
00753       }
00754 
00755       _GLIBCXX_ALWAYS_INLINE bool
00756       compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
00757                               memory_order __m1,
00758                               memory_order __m2) volatile noexcept
00759       {
00760         memory_order __b2 = __m2 & __memory_order_mask;
00761         memory_order __b1 = __m1 & __memory_order_mask;
00762 
00763         __glibcxx_assert(__b2 != memory_order_release);
00764         __glibcxx_assert(__b2 != memory_order_acq_rel);
00765         __glibcxx_assert(__b2 <= __b1);
00766 
00767         return __atomic_compare_exchange_n(&_M_p, &__p1, __p2, 0, __m1, __m2);
00768       }
00769 
00770       _GLIBCXX_ALWAYS_INLINE __pointer_type
00771       fetch_add(ptrdiff_t __d,
00772                 memory_order __m = memory_order_seq_cst) noexcept
00773       { return __atomic_fetch_add(&_M_p, _M_type_size(__d), __m); }
00774 
00775       _GLIBCXX_ALWAYS_INLINE __pointer_type
00776       fetch_add(ptrdiff_t __d,
00777                 memory_order __m = memory_order_seq_cst) volatile noexcept
00778       { return __atomic_fetch_add(&_M_p, _M_type_size(__d), __m); }
00779 
00780       _GLIBCXX_ALWAYS_INLINE __pointer_type
00781       fetch_sub(ptrdiff_t __d,
00782                 memory_order __m = memory_order_seq_cst) noexcept
00783       { return __atomic_fetch_sub(&_M_p, _M_type_size(__d), __m); }
00784 
00785       _GLIBCXX_ALWAYS_INLINE __pointer_type
00786       fetch_sub(ptrdiff_t __d,
00787                 memory_order __m = memory_order_seq_cst) volatile noexcept
00788       { return __atomic_fetch_sub(&_M_p, _M_type_size(__d), __m); }
00789     };
00790 
00791   // @} group atomics
00792 
00793 _GLIBCXX_END_NAMESPACE_VERSION
00794 } // namespace std
00795 
00796 #endif