libstdc++
atomic
Go to the documentation of this file.
00001 // -*- C++ -*- header.
00002 
00003 // Copyright (C) 2008-2018 Free Software Foundation, Inc.
00004 //
00005 // This file is part of the GNU ISO C++ Library.  This library is free
00006 // software; you can redistribute it and/or modify it under the
00007 // terms of the GNU General Public License as published by the
00008 // Free Software Foundation; either version 3, or (at your option)
00009 // any later version.
00010 
00011 // This library is distributed in the hope that it will be useful,
00012 // but WITHOUT ANY WARRANTY; without even the implied warranty of
00013 // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
00014 // GNU General Public License for more details.
00015 
00016 // Under Section 7 of GPL version 3, you are granted additional
00017 // permissions described in the GCC Runtime Library Exception, version
00018 // 3.1, as published by the Free Software Foundation.
00019 
00020 // You should have received a copy of the GNU General Public License and
00021 // a copy of the GCC Runtime Library Exception along with this program;
00022 // see the files COPYING3 and COPYING.RUNTIME respectively.  If not, see
00023 // <http://www.gnu.org/licenses/>.
00024 
00025 /** @file include/atomic
00026  *  This is a Standard C++ Library header.
00027  */
00028 
00029 // Based on "C++ Atomic Types and Operations" by Hans Boehm and Lawrence Crowl.
00030 // http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2007/n2427.html
00031 
00032 #ifndef _GLIBCXX_ATOMIC
00033 #define _GLIBCXX_ATOMIC 1
00034 
00035 #pragma GCC system_header
00036 
00037 #if __cplusplus < 201103L
00038 # include <bits/c++0x_warning.h>
00039 #else
00040 
00041 #include <bits/atomic_base.h>
00042 #include <bits/move.h>
00043 
00044 namespace std _GLIBCXX_VISIBILITY(default)
00045 {
00046 _GLIBCXX_BEGIN_NAMESPACE_VERSION
00047 
00048   /**
00049    * @addtogroup atomics
00050    * @{
00051    */
00052 
00053 #if __cplusplus > 201402L
00054 # define __cpp_lib_atomic_is_always_lock_free 201603
00055 #endif
00056 
00057   template<typename _Tp>
00058     struct atomic;
00059 
00060   /// atomic<bool>
00061   // NB: No operators or fetch-operations for this type.
00062   template<>
00063   struct atomic<bool>
00064   {
00065   private:
00066     __atomic_base<bool> _M_base;
00067 
00068   public:
00069     atomic() noexcept = default;
00070     ~atomic() noexcept = default;
00071     atomic(const atomic&) = delete;
00072     atomic& operator=(const atomic&) = delete;
00073     atomic& operator=(const atomic&) volatile = delete;
00074 
00075     constexpr atomic(bool __i) noexcept : _M_base(__i) { }
00076 
00077     bool
00078     operator=(bool __i) noexcept
00079     { return _M_base.operator=(__i); }
00080 
00081     bool
00082     operator=(bool __i) volatile noexcept
00083     { return _M_base.operator=(__i); }
00084 
00085     operator bool() const noexcept
00086     { return _M_base.load(); }
00087 
00088     operator bool() const volatile noexcept
00089     { return _M_base.load(); }
00090 
00091     bool
00092     is_lock_free() const noexcept { return _M_base.is_lock_free(); }
00093 
00094     bool
00095     is_lock_free() const volatile noexcept { return _M_base.is_lock_free(); }
00096 
00097 #if __cplusplus > 201402L
00098     static constexpr bool is_always_lock_free = ATOMIC_BOOL_LOCK_FREE == 2;
00099 #endif
00100 
00101     void
00102     store(bool __i, memory_order __m = memory_order_seq_cst) noexcept
00103     { _M_base.store(__i, __m); }
00104 
00105     void
00106     store(bool __i, memory_order __m = memory_order_seq_cst) volatile noexcept
00107     { _M_base.store(__i, __m); }
00108 
00109     bool
00110     load(memory_order __m = memory_order_seq_cst) const noexcept
00111     { return _M_base.load(__m); }
00112 
00113     bool
00114     load(memory_order __m = memory_order_seq_cst) const volatile noexcept
00115     { return _M_base.load(__m); }
00116 
00117     bool
00118     exchange(bool __i, memory_order __m = memory_order_seq_cst) noexcept
00119     { return _M_base.exchange(__i, __m); }
00120 
00121     bool
00122     exchange(bool __i,
00123              memory_order __m = memory_order_seq_cst) volatile noexcept
00124     { return _M_base.exchange(__i, __m); }
00125 
00126     bool
00127     compare_exchange_weak(bool& __i1, bool __i2, memory_order __m1,
00128                           memory_order __m2) noexcept
00129     { return _M_base.compare_exchange_weak(__i1, __i2, __m1, __m2); }
00130 
00131     bool
00132     compare_exchange_weak(bool& __i1, bool __i2, memory_order __m1,
00133                           memory_order __m2) volatile noexcept
00134     { return _M_base.compare_exchange_weak(__i1, __i2, __m1, __m2); }
00135 
00136     bool
00137     compare_exchange_weak(bool& __i1, bool __i2,
00138                           memory_order __m = memory_order_seq_cst) noexcept
00139     { return _M_base.compare_exchange_weak(__i1, __i2, __m); }
00140 
00141     bool
00142     compare_exchange_weak(bool& __i1, bool __i2,
00143                      memory_order __m = memory_order_seq_cst) volatile noexcept
00144     { return _M_base.compare_exchange_weak(__i1, __i2, __m); }
00145 
00146     bool
00147     compare_exchange_strong(bool& __i1, bool __i2, memory_order __m1,
00148                             memory_order __m2) noexcept
00149     { return _M_base.compare_exchange_strong(__i1, __i2, __m1, __m2); }
00150 
00151     bool
00152     compare_exchange_strong(bool& __i1, bool __i2, memory_order __m1,
00153                             memory_order __m2) volatile noexcept
00154     { return _M_base.compare_exchange_strong(__i1, __i2, __m1, __m2); }
00155 
00156     bool
00157     compare_exchange_strong(bool& __i1, bool __i2,
00158                             memory_order __m = memory_order_seq_cst) noexcept
00159     { return _M_base.compare_exchange_strong(__i1, __i2, __m); }
00160 
00161     bool
00162     compare_exchange_strong(bool& __i1, bool __i2,
00163                     memory_order __m = memory_order_seq_cst) volatile noexcept
00164     { return _M_base.compare_exchange_strong(__i1, __i2, __m); }
00165   };
00166 
00167 
00168   /**
00169    *  @brief Generic atomic type, primary class template.
00170    *
00171    *  @tparam _Tp  Type to be made atomic, must be trivally copyable.
00172    */
00173   template<typename _Tp>
00174     struct atomic
00175     {
00176     private:
00177       // Align 1/2/4/8/16-byte types to at least their size.
00178       static constexpr int _S_min_alignment
00179         = (sizeof(_Tp) & (sizeof(_Tp) - 1)) || sizeof(_Tp) > 16
00180         ? 0 : sizeof(_Tp);
00181 
00182       static constexpr int _S_alignment
00183         = _S_min_alignment > alignof(_Tp) ? _S_min_alignment : alignof(_Tp);
00184 
00185       alignas(_S_alignment) _Tp _M_i;
00186 
00187       static_assert(__is_trivially_copyable(_Tp),
00188                     "std::atomic requires a trivially copyable type");
00189 
00190       static_assert(sizeof(_Tp) > 0,
00191                     "Incomplete or zero-sized types are not supported");
00192 
00193     public:
00194       atomic() noexcept = default;
00195       ~atomic() noexcept = default;
00196       atomic(const atomic&) = delete;
00197       atomic& operator=(const atomic&) = delete;
00198       atomic& operator=(const atomic&) volatile = delete;
00199 
00200       constexpr atomic(_Tp __i) noexcept : _M_i(__i) { }
00201 
00202       operator _Tp() const noexcept
00203       { return load(); }
00204 
00205       operator _Tp() const volatile noexcept
00206       { return load(); }
00207 
00208       _Tp
00209       operator=(_Tp __i) noexcept
00210       { store(__i); return __i; }
00211 
00212       _Tp
00213       operator=(_Tp __i) volatile noexcept
00214       { store(__i); return __i; }
00215 
00216       bool
00217       is_lock_free() const noexcept
00218       {
00219         // Produce a fake, minimally aligned pointer.
00220         return __atomic_is_lock_free(sizeof(_M_i),
00221             reinterpret_cast<void *>(-__alignof(_M_i)));
00222       }
00223 
00224       bool
00225       is_lock_free() const volatile noexcept
00226       {
00227         // Produce a fake, minimally aligned pointer.
00228         return __atomic_is_lock_free(sizeof(_M_i),
00229             reinterpret_cast<void *>(-__alignof(_M_i)));
00230       }
00231 
00232 #if __cplusplus > 201402L
00233       static constexpr bool is_always_lock_free
00234         = __atomic_always_lock_free(sizeof(_M_i), 0);
00235 #endif
00236 
00237       void
00238       store(_Tp __i, memory_order __m = memory_order_seq_cst) noexcept
00239       { __atomic_store(std::__addressof(_M_i), std::__addressof(__i), __m); }
00240 
00241       void
00242       store(_Tp __i, memory_order __m = memory_order_seq_cst) volatile noexcept
00243       { __atomic_store(std::__addressof(_M_i), std::__addressof(__i), __m); }
00244 
00245       _Tp
00246       load(memory_order __m = memory_order_seq_cst) const noexcept
00247       {
00248         alignas(_Tp) unsigned char __buf[sizeof(_Tp)];
00249         _Tp* __ptr = reinterpret_cast<_Tp*>(__buf);
00250         __atomic_load(std::__addressof(_M_i), __ptr, __m);
00251         return *__ptr;
00252       }
00253 
00254       _Tp
00255       load(memory_order __m = memory_order_seq_cst) const volatile noexcept
00256       {
00257         alignas(_Tp) unsigned char __buf[sizeof(_Tp)];
00258         _Tp* __ptr = reinterpret_cast<_Tp*>(__buf);
00259         __atomic_load(std::__addressof(_M_i), __ptr, __m);
00260         return *__ptr;
00261       }
00262 
00263       _Tp
00264       exchange(_Tp __i, memory_order __m = memory_order_seq_cst) noexcept
00265       {
00266         alignas(_Tp) unsigned char __buf[sizeof(_Tp)];
00267         _Tp* __ptr = reinterpret_cast<_Tp*>(__buf);
00268         __atomic_exchange(std::__addressof(_M_i), std::__addressof(__i),
00269                           __ptr, __m);
00270         return *__ptr;
00271       }
00272 
00273       _Tp
00274       exchange(_Tp __i,
00275                memory_order __m = memory_order_seq_cst) volatile noexcept
00276       {
00277         alignas(_Tp) unsigned char __buf[sizeof(_Tp)];
00278         _Tp* __ptr = reinterpret_cast<_Tp*>(__buf);
00279         __atomic_exchange(std::__addressof(_M_i), std::__addressof(__i),
00280                           __ptr, __m);
00281         return *__ptr;
00282       }
00283 
00284       bool
00285       compare_exchange_weak(_Tp& __e, _Tp __i, memory_order __s,
00286                             memory_order __f) noexcept
00287       {
00288         return __atomic_compare_exchange(std::__addressof(_M_i),
00289                                          std::__addressof(__e),
00290                                          std::__addressof(__i),
00291                                          true, __s, __f);
00292       }
00293 
00294       bool
00295       compare_exchange_weak(_Tp& __e, _Tp __i, memory_order __s,
00296                             memory_order __f) volatile noexcept
00297       {
00298         return __atomic_compare_exchange(std::__addressof(_M_i),
00299                                          std::__addressof(__e),
00300                                          std::__addressof(__i),
00301                                          true, __s, __f);
00302       }
00303 
00304       bool
00305       compare_exchange_weak(_Tp& __e, _Tp __i,
00306                             memory_order __m = memory_order_seq_cst) noexcept
00307       { return compare_exchange_weak(__e, __i, __m,
00308                                      __cmpexch_failure_order(__m)); }
00309 
00310       bool
00311       compare_exchange_weak(_Tp& __e, _Tp __i,
00312                      memory_order __m = memory_order_seq_cst) volatile noexcept
00313       { return compare_exchange_weak(__e, __i, __m,
00314                                      __cmpexch_failure_order(__m)); }
00315 
00316       bool
00317       compare_exchange_strong(_Tp& __e, _Tp __i, memory_order __s,
00318                               memory_order __f) noexcept
00319       {
00320         return __atomic_compare_exchange(std::__addressof(_M_i),
00321                                          std::__addressof(__e),
00322                                          std::__addressof(__i),
00323                                          false, __s, __f);
00324       }
00325 
00326       bool
00327       compare_exchange_strong(_Tp& __e, _Tp __i, memory_order __s,
00328                               memory_order __f) volatile noexcept
00329       {
00330         return __atomic_compare_exchange(std::__addressof(_M_i),
00331                                          std::__addressof(__e),
00332                                          std::__addressof(__i),
00333                                          false, __s, __f);
00334       }
00335 
00336       bool
00337       compare_exchange_strong(_Tp& __e, _Tp __i,
00338                                memory_order __m = memory_order_seq_cst) noexcept
00339       { return compare_exchange_strong(__e, __i, __m,
00340                                        __cmpexch_failure_order(__m)); }
00341 
00342       bool
00343       compare_exchange_strong(_Tp& __e, _Tp __i,
00344                      memory_order __m = memory_order_seq_cst) volatile noexcept
00345       { return compare_exchange_strong(__e, __i, __m,
00346                                        __cmpexch_failure_order(__m)); }
00347     };
00348 
00349 
00350   /// Partial specialization for pointer types.
00351   template<typename _Tp>
00352     struct atomic<_Tp*>
00353     {
00354       typedef _Tp*                      __pointer_type;
00355       typedef __atomic_base<_Tp*>       __base_type;
00356       __base_type                       _M_b;
00357 
00358       atomic() noexcept = default;
00359       ~atomic() noexcept = default;
00360       atomic(const atomic&) = delete;
00361       atomic& operator=(const atomic&) = delete;
00362       atomic& operator=(const atomic&) volatile = delete;
00363 
00364       constexpr atomic(__pointer_type __p) noexcept : _M_b(__p) { }
00365 
00366       operator __pointer_type() const noexcept
00367       { return __pointer_type(_M_b); }
00368 
00369       operator __pointer_type() const volatile noexcept
00370       { return __pointer_type(_M_b); }
00371 
00372       __pointer_type
00373       operator=(__pointer_type __p) noexcept
00374       { return _M_b.operator=(__p); }
00375 
00376       __pointer_type
00377       operator=(__pointer_type __p) volatile noexcept
00378       { return _M_b.operator=(__p); }
00379 
00380       __pointer_type
00381       operator++(int) noexcept
00382       { return _M_b++; }
00383 
00384       __pointer_type
00385       operator++(int) volatile noexcept
00386       { return _M_b++; }
00387 
00388       __pointer_type
00389       operator--(int) noexcept
00390       { return _M_b--; }
00391 
00392       __pointer_type
00393       operator--(int) volatile noexcept
00394       { return _M_b--; }
00395 
00396       __pointer_type
00397       operator++() noexcept
00398       { return ++_M_b; }
00399 
00400       __pointer_type
00401       operator++() volatile noexcept
00402       { return ++_M_b; }
00403 
00404       __pointer_type
00405       operator--() noexcept
00406       { return --_M_b; }
00407 
00408       __pointer_type
00409       operator--() volatile noexcept
00410       { return --_M_b; }
00411 
00412       __pointer_type
00413       operator+=(ptrdiff_t __d) noexcept
00414       { return _M_b.operator+=(__d); }
00415 
00416       __pointer_type
00417       operator+=(ptrdiff_t __d) volatile noexcept
00418       { return _M_b.operator+=(__d); }
00419 
00420       __pointer_type
00421       operator-=(ptrdiff_t __d) noexcept
00422       { return _M_b.operator-=(__d); }
00423 
00424       __pointer_type
00425       operator-=(ptrdiff_t __d) volatile noexcept
00426       { return _M_b.operator-=(__d); }
00427 
00428       bool
00429       is_lock_free() const noexcept
00430       { return _M_b.is_lock_free(); }
00431 
00432       bool
00433       is_lock_free() const volatile noexcept
00434       { return _M_b.is_lock_free(); }
00435 
00436 #if __cplusplus > 201402L
00437     static constexpr bool is_always_lock_free = ATOMIC_POINTER_LOCK_FREE == 2;
00438 #endif
00439 
00440       void
00441       store(__pointer_type __p,
00442             memory_order __m = memory_order_seq_cst) noexcept
00443       { return _M_b.store(__p, __m); }
00444 
00445       void
00446       store(__pointer_type __p,
00447             memory_order __m = memory_order_seq_cst) volatile noexcept
00448       { return _M_b.store(__p, __m); }
00449 
00450       __pointer_type
00451       load(memory_order __m = memory_order_seq_cst) const noexcept
00452       { return _M_b.load(__m); }
00453 
00454       __pointer_type
00455       load(memory_order __m = memory_order_seq_cst) const volatile noexcept
00456       { return _M_b.load(__m); }
00457 
00458       __pointer_type
00459       exchange(__pointer_type __p,
00460                memory_order __m = memory_order_seq_cst) noexcept
00461       { return _M_b.exchange(__p, __m); }
00462 
00463       __pointer_type
00464       exchange(__pointer_type __p,
00465                memory_order __m = memory_order_seq_cst) volatile noexcept
00466       { return _M_b.exchange(__p, __m); }
00467 
00468       bool
00469       compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
00470                             memory_order __m1, memory_order __m2) noexcept
00471       { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
00472 
00473       bool
00474       compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
00475                             memory_order __m1,
00476                             memory_order __m2) volatile noexcept
00477       { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
00478 
00479       bool
00480       compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
00481                             memory_order __m = memory_order_seq_cst) noexcept
00482       {
00483         return compare_exchange_weak(__p1, __p2, __m,
00484                                      __cmpexch_failure_order(__m));
00485       }
00486 
00487       bool
00488       compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
00489                     memory_order __m = memory_order_seq_cst) volatile noexcept
00490       {
00491         return compare_exchange_weak(__p1, __p2, __m,
00492                                      __cmpexch_failure_order(__m));
00493       }
00494 
00495       bool
00496       compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
00497                               memory_order __m1, memory_order __m2) noexcept
00498       { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
00499 
00500       bool
00501       compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
00502                               memory_order __m1,
00503                               memory_order __m2) volatile noexcept
00504       { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
00505 
00506       bool
00507       compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
00508                               memory_order __m = memory_order_seq_cst) noexcept
00509       {
00510         return _M_b.compare_exchange_strong(__p1, __p2, __m,
00511                                             __cmpexch_failure_order(__m));
00512       }
00513 
00514       bool
00515       compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
00516                     memory_order __m = memory_order_seq_cst) volatile noexcept
00517       {
00518         return _M_b.compare_exchange_strong(__p1, __p2, __m,
00519                                             __cmpexch_failure_order(__m));
00520       }
00521 
00522       __pointer_type
00523       fetch_add(ptrdiff_t __d,
00524                 memory_order __m = memory_order_seq_cst) noexcept
00525       { return _M_b.fetch_add(__d, __m); }
00526 
00527       __pointer_type
00528       fetch_add(ptrdiff_t __d,
00529                 memory_order __m = memory_order_seq_cst) volatile noexcept
00530       { return _M_b.fetch_add(__d, __m); }
00531 
00532       __pointer_type
00533       fetch_sub(ptrdiff_t __d,
00534                 memory_order __m = memory_order_seq_cst) noexcept
00535       { return _M_b.fetch_sub(__d, __m); }
00536 
00537       __pointer_type
00538       fetch_sub(ptrdiff_t __d,
00539                 memory_order __m = memory_order_seq_cst) volatile noexcept
00540       { return _M_b.fetch_sub(__d, __m); }
00541     };
00542 
00543 
00544   /// Explicit specialization for char.
00545   template<>
00546     struct atomic<char> : __atomic_base<char>
00547     {
00548       typedef char                      __integral_type;
00549       typedef __atomic_base<char>       __base_type;
00550 
00551       atomic() noexcept = default;
00552       ~atomic() noexcept = default;
00553       atomic(const atomic&) = delete;
00554       atomic& operator=(const atomic&) = delete;
00555       atomic& operator=(const atomic&) volatile = delete;
00556 
00557       constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
00558 
00559       using __base_type::operator __integral_type;
00560       using __base_type::operator=;
00561 
00562 #if __cplusplus > 201402L
00563     static constexpr bool is_always_lock_free = ATOMIC_CHAR_LOCK_FREE == 2;
00564 #endif
00565     };
00566 
00567   /// Explicit specialization for signed char.
00568   template<>
00569     struct atomic<signed char> : __atomic_base<signed char>
00570     {
00571       typedef signed char               __integral_type;
00572       typedef __atomic_base<signed char>        __base_type;
00573 
00574       atomic() noexcept= default;
00575       ~atomic() noexcept = default;
00576       atomic(const atomic&) = delete;
00577       atomic& operator=(const atomic&) = delete;
00578       atomic& operator=(const atomic&) volatile = delete;
00579 
00580       constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
00581 
00582       using __base_type::operator __integral_type;
00583       using __base_type::operator=;
00584 
00585 #if __cplusplus > 201402L
00586     static constexpr bool is_always_lock_free = ATOMIC_CHAR_LOCK_FREE == 2;
00587 #endif
00588     };
00589 
00590   /// Explicit specialization for unsigned char.
00591   template<>
00592     struct atomic<unsigned char> : __atomic_base<unsigned char>
00593     {
00594       typedef unsigned char             __integral_type;
00595       typedef __atomic_base<unsigned char>      __base_type;
00596 
00597       atomic() noexcept= default;
00598       ~atomic() noexcept = default;
00599       atomic(const atomic&) = delete;
00600       atomic& operator=(const atomic&) = delete;
00601       atomic& operator=(const atomic&) volatile = delete;
00602 
00603       constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
00604 
00605       using __base_type::operator __integral_type;
00606       using __base_type::operator=;
00607 
00608 #if __cplusplus > 201402L
00609     static constexpr bool is_always_lock_free = ATOMIC_CHAR_LOCK_FREE == 2;
00610 #endif
00611     };
00612 
00613   /// Explicit specialization for short.
00614   template<>
00615     struct atomic<short> : __atomic_base<short>
00616     {
00617       typedef short                     __integral_type;
00618       typedef __atomic_base<short>              __base_type;
00619 
00620       atomic() noexcept = default;
00621       ~atomic() noexcept = default;
00622       atomic(const atomic&) = delete;
00623       atomic& operator=(const atomic&) = delete;
00624       atomic& operator=(const atomic&) volatile = delete;
00625 
00626       constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
00627 
00628       using __base_type::operator __integral_type;
00629       using __base_type::operator=;
00630 
00631 #if __cplusplus > 201402L
00632     static constexpr bool is_always_lock_free = ATOMIC_SHORT_LOCK_FREE == 2;
00633 #endif
00634     };
00635 
00636   /// Explicit specialization for unsigned short.
00637   template<>
00638     struct atomic<unsigned short> : __atomic_base<unsigned short>
00639     {
00640       typedef unsigned short            __integral_type;
00641       typedef __atomic_base<unsigned short>             __base_type;
00642 
00643       atomic() noexcept = default;
00644       ~atomic() noexcept = default;
00645       atomic(const atomic&) = delete;
00646       atomic& operator=(const atomic&) = delete;
00647       atomic& operator=(const atomic&) volatile = delete;
00648 
00649       constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
00650 
00651       using __base_type::operator __integral_type;
00652       using __base_type::operator=;
00653 
00654 #if __cplusplus > 201402L
00655     static constexpr bool is_always_lock_free = ATOMIC_SHORT_LOCK_FREE == 2;
00656 #endif
00657     };
00658 
00659   /// Explicit specialization for int.
00660   template<>
00661     struct atomic<int> : __atomic_base<int>
00662     {
00663       typedef int                       __integral_type;
00664       typedef __atomic_base<int>                __base_type;
00665 
00666       atomic() noexcept = default;
00667       ~atomic() noexcept = default;
00668       atomic(const atomic&) = delete;
00669       atomic& operator=(const atomic&) = delete;
00670       atomic& operator=(const atomic&) volatile = delete;
00671 
00672       constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
00673 
00674       using __base_type::operator __integral_type;
00675       using __base_type::operator=;
00676 
00677 #if __cplusplus > 201402L
00678     static constexpr bool is_always_lock_free = ATOMIC_INT_LOCK_FREE == 2;
00679 #endif
00680     };
00681 
00682   /// Explicit specialization for unsigned int.
00683   template<>
00684     struct atomic<unsigned int> : __atomic_base<unsigned int>
00685     {
00686       typedef unsigned int              __integral_type;
00687       typedef __atomic_base<unsigned int>       __base_type;
00688 
00689       atomic() noexcept = default;
00690       ~atomic() noexcept = default;
00691       atomic(const atomic&) = delete;
00692       atomic& operator=(const atomic&) = delete;
00693       atomic& operator=(const atomic&) volatile = delete;
00694 
00695       constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
00696 
00697       using __base_type::operator __integral_type;
00698       using __base_type::operator=;
00699 
00700 #if __cplusplus > 201402L
00701     static constexpr bool is_always_lock_free = ATOMIC_INT_LOCK_FREE == 2;
00702 #endif
00703     };
00704 
00705   /// Explicit specialization for long.
00706   template<>
00707     struct atomic<long> : __atomic_base<long>
00708     {
00709       typedef long                      __integral_type;
00710       typedef __atomic_base<long>       __base_type;
00711 
00712       atomic() noexcept = default;
00713       ~atomic() noexcept = default;
00714       atomic(const atomic&) = delete;
00715       atomic& operator=(const atomic&) = delete;
00716       atomic& operator=(const atomic&) volatile = delete;
00717 
00718       constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
00719 
00720       using __base_type::operator __integral_type;
00721       using __base_type::operator=;
00722 
00723 #if __cplusplus > 201402L
00724     static constexpr bool is_always_lock_free = ATOMIC_LONG_LOCK_FREE == 2;
00725 #endif
00726     };
00727 
00728   /// Explicit specialization for unsigned long.
00729   template<>
00730     struct atomic<unsigned long> : __atomic_base<unsigned long>
00731     {
00732       typedef unsigned long             __integral_type;
00733       typedef __atomic_base<unsigned long>      __base_type;
00734 
00735       atomic() noexcept = default;
00736       ~atomic() noexcept = default;
00737       atomic(const atomic&) = delete;
00738       atomic& operator=(const atomic&) = delete;
00739       atomic& operator=(const atomic&) volatile = delete;
00740 
00741       constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
00742 
00743       using __base_type::operator __integral_type;
00744       using __base_type::operator=;
00745 
00746 #if __cplusplus > 201402L
00747     static constexpr bool is_always_lock_free = ATOMIC_LONG_LOCK_FREE == 2;
00748 #endif
00749     };
00750 
00751   /// Explicit specialization for long long.
00752   template<>
00753     struct atomic<long long> : __atomic_base<long long>
00754     {
00755       typedef long long                 __integral_type;
00756       typedef __atomic_base<long long>          __base_type;
00757 
00758       atomic() noexcept = default;
00759       ~atomic() noexcept = default;
00760       atomic(const atomic&) = delete;
00761       atomic& operator=(const atomic&) = delete;
00762       atomic& operator=(const atomic&) volatile = delete;
00763 
00764       constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
00765 
00766       using __base_type::operator __integral_type;
00767       using __base_type::operator=;
00768 
00769 #if __cplusplus > 201402L
00770     static constexpr bool is_always_lock_free = ATOMIC_LLONG_LOCK_FREE == 2;
00771 #endif
00772     };
00773 
00774   /// Explicit specialization for unsigned long long.
00775   template<>
00776     struct atomic<unsigned long long> : __atomic_base<unsigned long long>
00777     {
00778       typedef unsigned long long        __integral_type;
00779       typedef __atomic_base<unsigned long long>         __base_type;
00780 
00781       atomic() noexcept = default;
00782       ~atomic() noexcept = default;
00783       atomic(const atomic&) = delete;
00784       atomic& operator=(const atomic&) = delete;
00785       atomic& operator=(const atomic&) volatile = delete;
00786 
00787       constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
00788 
00789       using __base_type::operator __integral_type;
00790       using __base_type::operator=;
00791 
00792 #if __cplusplus > 201402L
00793     static constexpr bool is_always_lock_free = ATOMIC_LLONG_LOCK_FREE == 2;
00794 #endif
00795     };
00796 
00797   /// Explicit specialization for wchar_t.
00798   template<>
00799     struct atomic<wchar_t> : __atomic_base<wchar_t>
00800     {
00801       typedef wchar_t                   __integral_type;
00802       typedef __atomic_base<wchar_t>    __base_type;
00803 
00804       atomic() noexcept = default;
00805       ~atomic() noexcept = default;
00806       atomic(const atomic&) = delete;
00807       atomic& operator=(const atomic&) = delete;
00808       atomic& operator=(const atomic&) volatile = delete;
00809 
00810       constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
00811 
00812       using __base_type::operator __integral_type;
00813       using __base_type::operator=;
00814 
00815 #if __cplusplus > 201402L
00816     static constexpr bool is_always_lock_free = ATOMIC_WCHAR_T_LOCK_FREE == 2;
00817 #endif
00818     };
00819 
00820   /// Explicit specialization for char16_t.
00821   template<>
00822     struct atomic<char16_t> : __atomic_base<char16_t>
00823     {
00824       typedef char16_t                  __integral_type;
00825       typedef __atomic_base<char16_t>   __base_type;
00826 
00827       atomic() noexcept = default;
00828       ~atomic() noexcept = default;
00829       atomic(const atomic&) = delete;
00830       atomic& operator=(const atomic&) = delete;
00831       atomic& operator=(const atomic&) volatile = delete;
00832 
00833       constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
00834 
00835       using __base_type::operator __integral_type;
00836       using __base_type::operator=;
00837 
00838 #if __cplusplus > 201402L
00839     static constexpr bool is_always_lock_free = ATOMIC_CHAR16_T_LOCK_FREE == 2;
00840 #endif
00841     };
00842 
00843   /// Explicit specialization for char32_t.
00844   template<>
00845     struct atomic<char32_t> : __atomic_base<char32_t>
00846     {
00847       typedef char32_t                  __integral_type;
00848       typedef __atomic_base<char32_t>   __base_type;
00849 
00850       atomic() noexcept = default;
00851       ~atomic() noexcept = default;
00852       atomic(const atomic&) = delete;
00853       atomic& operator=(const atomic&) = delete;
00854       atomic& operator=(const atomic&) volatile = delete;
00855 
00856       constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
00857 
00858       using __base_type::operator __integral_type;
00859       using __base_type::operator=;
00860 
00861 #if __cplusplus > 201402L
00862     static constexpr bool is_always_lock_free = ATOMIC_CHAR32_T_LOCK_FREE == 2;
00863 #endif
00864     };
00865 
00866 
00867   /// atomic_bool
00868   typedef atomic<bool>                  atomic_bool;
00869 
00870   /// atomic_char
00871   typedef atomic<char>                  atomic_char;
00872 
00873   /// atomic_schar
00874   typedef atomic<signed char>           atomic_schar;
00875 
00876   /// atomic_uchar
00877   typedef atomic<unsigned char>         atomic_uchar;
00878 
00879   /// atomic_short
00880   typedef atomic<short>                 atomic_short;
00881 
00882   /// atomic_ushort
00883   typedef atomic<unsigned short>        atomic_ushort;
00884 
00885   /// atomic_int
00886   typedef atomic<int>                   atomic_int;
00887 
00888   /// atomic_uint
00889   typedef atomic<unsigned int>          atomic_uint;
00890 
00891   /// atomic_long
00892   typedef atomic<long>                  atomic_long;
00893 
00894   /// atomic_ulong
00895   typedef atomic<unsigned long>         atomic_ulong;
00896 
00897   /// atomic_llong
00898   typedef atomic<long long>             atomic_llong;
00899 
00900   /// atomic_ullong
00901   typedef atomic<unsigned long long>    atomic_ullong;
00902 
00903   /// atomic_wchar_t
00904   typedef atomic<wchar_t>               atomic_wchar_t;
00905 
00906   /// atomic_char16_t
00907   typedef atomic<char16_t>              atomic_char16_t;
00908 
00909   /// atomic_char32_t
00910   typedef atomic<char32_t>              atomic_char32_t;
00911 
00912 #ifdef _GLIBCXX_USE_C99_STDINT_TR1
00913   // _GLIBCXX_RESOLVE_LIB_DEFECTS
00914   // 2441. Exact-width atomic typedefs should be provided
00915 
00916   /// atomic_int8_t
00917   typedef atomic<int8_t>                atomic_int8_t;
00918 
00919   /// atomic_uint8_t
00920   typedef atomic<uint8_t>               atomic_uint8_t;
00921 
00922   /// atomic_int16_t
00923   typedef atomic<int16_t>               atomic_int16_t;
00924 
00925   /// atomic_uint16_t
00926   typedef atomic<uint16_t>              atomic_uint16_t;
00927 
00928   /// atomic_int32_t
00929   typedef atomic<int32_t>               atomic_int32_t;
00930 
00931   /// atomic_uint32_t
00932   typedef atomic<uint32_t>              atomic_uint32_t;
00933 
00934   /// atomic_int64_t
00935   typedef atomic<int64_t>               atomic_int64_t;
00936 
00937   /// atomic_uint64_t
00938   typedef atomic<uint64_t>              atomic_uint64_t;
00939 
00940 
00941   /// atomic_int_least8_t
00942   typedef atomic<int_least8_t>          atomic_int_least8_t;
00943 
00944   /// atomic_uint_least8_t
00945   typedef atomic<uint_least8_t>         atomic_uint_least8_t;
00946 
00947   /// atomic_int_least16_t
00948   typedef atomic<int_least16_t>         atomic_int_least16_t;
00949 
00950   /// atomic_uint_least16_t
00951   typedef atomic<uint_least16_t>        atomic_uint_least16_t;
00952 
00953   /// atomic_int_least32_t
00954   typedef atomic<int_least32_t>         atomic_int_least32_t;
00955 
00956   /// atomic_uint_least32_t
00957   typedef atomic<uint_least32_t>        atomic_uint_least32_t;
00958 
00959   /// atomic_int_least64_t
00960   typedef atomic<int_least64_t>         atomic_int_least64_t;
00961 
00962   /// atomic_uint_least64_t
00963   typedef atomic<uint_least64_t>        atomic_uint_least64_t;
00964 
00965 
00966   /// atomic_int_fast8_t
00967   typedef atomic<int_fast8_t>           atomic_int_fast8_t;
00968 
00969   /// atomic_uint_fast8_t
00970   typedef atomic<uint_fast8_t>          atomic_uint_fast8_t;
00971 
00972   /// atomic_int_fast16_t
00973   typedef atomic<int_fast16_t>          atomic_int_fast16_t;
00974 
00975   /// atomic_uint_fast16_t
00976   typedef atomic<uint_fast16_t>         atomic_uint_fast16_t;
00977 
00978   /// atomic_int_fast32_t
00979   typedef atomic<int_fast32_t>          atomic_int_fast32_t;
00980 
00981   /// atomic_uint_fast32_t
00982   typedef atomic<uint_fast32_t>         atomic_uint_fast32_t;
00983 
00984   /// atomic_int_fast64_t
00985   typedef atomic<int_fast64_t>          atomic_int_fast64_t;
00986 
00987   /// atomic_uint_fast64_t
00988   typedef atomic<uint_fast64_t>         atomic_uint_fast64_t;
00989 #endif
00990 
00991 
00992   /// atomic_intptr_t
00993   typedef atomic<intptr_t>              atomic_intptr_t;
00994 
00995   /// atomic_uintptr_t
00996   typedef atomic<uintptr_t>             atomic_uintptr_t;
00997 
00998   /// atomic_size_t
00999   typedef atomic<size_t>                atomic_size_t;
01000 
01001   /// atomic_ptrdiff_t
01002   typedef atomic<ptrdiff_t>             atomic_ptrdiff_t;
01003 
01004 #ifdef _GLIBCXX_USE_C99_STDINT_TR1
01005   /// atomic_intmax_t
01006   typedef atomic<intmax_t>              atomic_intmax_t;
01007 
01008   /// atomic_uintmax_t
01009   typedef atomic<uintmax_t>             atomic_uintmax_t;
01010 #endif
01011 
01012   // Function definitions, atomic_flag operations.
01013   inline bool
01014   atomic_flag_test_and_set_explicit(atomic_flag* __a,
01015                                     memory_order __m) noexcept
01016   { return __a->test_and_set(__m); }
01017 
01018   inline bool
01019   atomic_flag_test_and_set_explicit(volatile atomic_flag* __a,
01020                                     memory_order __m) noexcept
01021   { return __a->test_and_set(__m); }
01022 
01023   inline void
01024   atomic_flag_clear_explicit(atomic_flag* __a, memory_order __m) noexcept
01025   { __a->clear(__m); }
01026 
01027   inline void
01028   atomic_flag_clear_explicit(volatile atomic_flag* __a,
01029                              memory_order __m) noexcept
01030   { __a->clear(__m); }
01031 
01032   inline bool
01033   atomic_flag_test_and_set(atomic_flag* __a) noexcept
01034   { return atomic_flag_test_and_set_explicit(__a, memory_order_seq_cst); }
01035 
01036   inline bool
01037   atomic_flag_test_and_set(volatile atomic_flag* __a) noexcept
01038   { return atomic_flag_test_and_set_explicit(__a, memory_order_seq_cst); }
01039 
01040   inline void
01041   atomic_flag_clear(atomic_flag* __a) noexcept
01042   { atomic_flag_clear_explicit(__a, memory_order_seq_cst); }
01043 
01044   inline void
01045   atomic_flag_clear(volatile atomic_flag* __a) noexcept
01046   { atomic_flag_clear_explicit(__a, memory_order_seq_cst); }
01047 
01048 
01049   // Function templates generally applicable to atomic types.
01050   template<typename _ITp>
01051     inline bool
01052     atomic_is_lock_free(const atomic<_ITp>* __a) noexcept
01053     { return __a->is_lock_free(); }
01054 
01055   template<typename _ITp>
01056     inline bool
01057     atomic_is_lock_free(const volatile atomic<_ITp>* __a) noexcept
01058     { return __a->is_lock_free(); }
01059 
01060   template<typename _ITp>
01061     inline void
01062     atomic_init(atomic<_ITp>* __a, _ITp __i) noexcept
01063     { __a->store(__i, memory_order_relaxed); }
01064 
01065   template<typename _ITp>
01066     inline void
01067     atomic_init(volatile atomic<_ITp>* __a, _ITp __i) noexcept
01068     { __a->store(__i, memory_order_relaxed); }
01069 
01070   template<typename _ITp>
01071     inline void
01072     atomic_store_explicit(atomic<_ITp>* __a, _ITp __i,
01073                           memory_order __m) noexcept
01074     { __a->store(__i, __m); }
01075 
01076   template<typename _ITp>
01077     inline void
01078     atomic_store_explicit(volatile atomic<_ITp>* __a, _ITp __i,
01079                           memory_order __m) noexcept
01080     { __a->store(__i, __m); }
01081 
01082   template<typename _ITp>
01083     inline _ITp
01084     atomic_load_explicit(const atomic<_ITp>* __a, memory_order __m) noexcept
01085     { return __a->load(__m); }
01086 
01087   template<typename _ITp>
01088     inline _ITp
01089     atomic_load_explicit(const volatile atomic<_ITp>* __a,
01090                          memory_order __m) noexcept
01091     { return __a->load(__m); }
01092 
01093   template<typename _ITp>
01094     inline _ITp
01095     atomic_exchange_explicit(atomic<_ITp>* __a, _ITp __i,
01096                              memory_order __m) noexcept
01097     { return __a->exchange(__i, __m); }
01098 
01099   template<typename _ITp>
01100     inline _ITp
01101     atomic_exchange_explicit(volatile atomic<_ITp>* __a, _ITp __i,
01102                              memory_order __m) noexcept
01103     { return __a->exchange(__i, __m); }
01104 
01105   template<typename _ITp>
01106     inline bool
01107     atomic_compare_exchange_weak_explicit(atomic<_ITp>* __a,
01108                                           _ITp* __i1, _ITp __i2,
01109                                           memory_order __m1,
01110                                           memory_order __m2) noexcept
01111     { return __a->compare_exchange_weak(*__i1, __i2, __m1, __m2); }
01112 
01113   template<typename _ITp>
01114     inline bool
01115     atomic_compare_exchange_weak_explicit(volatile atomic<_ITp>* __a,
01116                                           _ITp* __i1, _ITp __i2,
01117                                           memory_order __m1,
01118                                           memory_order __m2) noexcept
01119     { return __a->compare_exchange_weak(*__i1, __i2, __m1, __m2); }
01120 
01121   template<typename _ITp>
01122     inline bool
01123     atomic_compare_exchange_strong_explicit(atomic<_ITp>* __a,
01124                                             _ITp* __i1, _ITp __i2,
01125                                             memory_order __m1,
01126                                             memory_order __m2) noexcept
01127     { return __a->compare_exchange_strong(*__i1, __i2, __m1, __m2); }
01128 
01129   template<typename _ITp>
01130     inline bool
01131     atomic_compare_exchange_strong_explicit(volatile atomic<_ITp>* __a,
01132                                             _ITp* __i1, _ITp __i2,
01133                                             memory_order __m1,
01134                                             memory_order __m2) noexcept
01135     { return __a->compare_exchange_strong(*__i1, __i2, __m1, __m2); }
01136 
01137 
01138   template<typename _ITp>
01139     inline void
01140     atomic_store(atomic<_ITp>* __a, _ITp __i) noexcept
01141     { atomic_store_explicit(__a, __i, memory_order_seq_cst); }
01142 
01143   template<typename _ITp>
01144     inline void
01145     atomic_store(volatile atomic<_ITp>* __a, _ITp __i) noexcept
01146     { atomic_store_explicit(__a, __i, memory_order_seq_cst); }
01147 
01148   template<typename _ITp>
01149     inline _ITp
01150     atomic_load(const atomic<_ITp>* __a) noexcept
01151     { return atomic_load_explicit(__a, memory_order_seq_cst); }
01152 
01153   template<typename _ITp>
01154     inline _ITp
01155     atomic_load(const volatile atomic<_ITp>* __a) noexcept
01156     { return atomic_load_explicit(__a, memory_order_seq_cst); }
01157 
01158   template<typename _ITp>
01159     inline _ITp
01160     atomic_exchange(atomic<_ITp>* __a, _ITp __i) noexcept
01161     { return atomic_exchange_explicit(__a, __i, memory_order_seq_cst); }
01162 
01163   template<typename _ITp>
01164     inline _ITp
01165     atomic_exchange(volatile atomic<_ITp>* __a, _ITp __i) noexcept
01166     { return atomic_exchange_explicit(__a, __i, memory_order_seq_cst); }
01167 
01168   template<typename _ITp>
01169     inline bool
01170     atomic_compare_exchange_weak(atomic<_ITp>* __a,
01171                                  _ITp* __i1, _ITp __i2) noexcept
01172     {
01173       return atomic_compare_exchange_weak_explicit(__a, __i1, __i2,
01174                                                    memory_order_seq_cst,
01175                                                    memory_order_seq_cst);
01176     }
01177 
01178   template<typename _ITp>
01179     inline bool
01180     atomic_compare_exchange_weak(volatile atomic<_ITp>* __a,
01181                                  _ITp* __i1, _ITp __i2) noexcept
01182     {
01183       return atomic_compare_exchange_weak_explicit(__a, __i1, __i2,
01184                                                    memory_order_seq_cst,
01185                                                    memory_order_seq_cst);
01186     }
01187 
01188   template<typename _ITp>
01189     inline bool
01190     atomic_compare_exchange_strong(atomic<_ITp>* __a,
01191                                    _ITp* __i1, _ITp __i2) noexcept
01192     {
01193       return atomic_compare_exchange_strong_explicit(__a, __i1, __i2,
01194                                                      memory_order_seq_cst,
01195                                                      memory_order_seq_cst);
01196     }
01197 
01198   template<typename _ITp>
01199     inline bool
01200     atomic_compare_exchange_strong(volatile atomic<_ITp>* __a,
01201                                    _ITp* __i1, _ITp __i2) noexcept
01202     {
01203       return atomic_compare_exchange_strong_explicit(__a, __i1, __i2,
01204                                                      memory_order_seq_cst,
01205                                                      memory_order_seq_cst);
01206     }
01207 
01208   // Function templates for atomic_integral operations only, using
01209   // __atomic_base. Template argument should be constricted to
01210   // intergral types as specified in the standard, excluding address
01211   // types.
01212   template<typename _ITp>
01213     inline _ITp
01214     atomic_fetch_add_explicit(__atomic_base<_ITp>* __a, _ITp __i,
01215                               memory_order __m) noexcept
01216     { return __a->fetch_add(__i, __m); }
01217 
01218   template<typename _ITp>
01219     inline _ITp
01220     atomic_fetch_add_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i,
01221                               memory_order __m) noexcept
01222     { return __a->fetch_add(__i, __m); }
01223 
01224   template<typename _ITp>
01225     inline _ITp
01226     atomic_fetch_sub_explicit(__atomic_base<_ITp>* __a, _ITp __i,
01227                               memory_order __m) noexcept
01228     { return __a->fetch_sub(__i, __m); }
01229 
01230   template<typename _ITp>
01231     inline _ITp
01232     atomic_fetch_sub_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i,
01233                               memory_order __m) noexcept
01234     { return __a->fetch_sub(__i, __m); }
01235 
01236   template<typename _ITp>
01237     inline _ITp
01238     atomic_fetch_and_explicit(__atomic_base<_ITp>* __a, _ITp __i,
01239                               memory_order __m) noexcept
01240     { return __a->fetch_and(__i, __m); }
01241 
01242   template<typename _ITp>
01243     inline _ITp
01244     atomic_fetch_and_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i,
01245                               memory_order __m) noexcept
01246     { return __a->fetch_and(__i, __m); }
01247 
01248   template<typename _ITp>
01249     inline _ITp
01250     atomic_fetch_or_explicit(__atomic_base<_ITp>* __a, _ITp __i,
01251                              memory_order __m) noexcept
01252     { return __a->fetch_or(__i, __m); }
01253 
01254   template<typename _ITp>
01255     inline _ITp
01256     atomic_fetch_or_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i,
01257                              memory_order __m) noexcept
01258     { return __a->fetch_or(__i, __m); }
01259 
01260   template<typename _ITp>
01261     inline _ITp
01262     atomic_fetch_xor_explicit(__atomic_base<_ITp>* __a, _ITp __i,
01263                               memory_order __m) noexcept
01264     { return __a->fetch_xor(__i, __m); }
01265 
01266   template<typename _ITp>
01267     inline _ITp
01268     atomic_fetch_xor_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i,
01269                               memory_order __m) noexcept
01270     { return __a->fetch_xor(__i, __m); }
01271 
01272   template<typename _ITp>
01273     inline _ITp
01274     atomic_fetch_add(__atomic_base<_ITp>* __a, _ITp __i) noexcept
01275     { return atomic_fetch_add_explicit(__a, __i, memory_order_seq_cst); }
01276 
01277   template<typename _ITp>
01278     inline _ITp
01279     atomic_fetch_add(volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept
01280     { return atomic_fetch_add_explicit(__a, __i, memory_order_seq_cst); }
01281 
01282   template<typename _ITp>
01283     inline _ITp
01284     atomic_fetch_sub(__atomic_base<_ITp>* __a, _ITp __i) noexcept
01285     { return atomic_fetch_sub_explicit(__a, __i, memory_order_seq_cst); }
01286 
01287   template<typename _ITp>
01288     inline _ITp
01289     atomic_fetch_sub(volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept
01290     { return atomic_fetch_sub_explicit(__a, __i, memory_order_seq_cst); }
01291 
01292   template<typename _ITp>
01293     inline _ITp
01294     atomic_fetch_and(__atomic_base<_ITp>* __a, _ITp __i) noexcept
01295     { return atomic_fetch_and_explicit(__a, __i, memory_order_seq_cst); }
01296 
01297   template<typename _ITp>
01298     inline _ITp
01299     atomic_fetch_and(volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept
01300     { return atomic_fetch_and_explicit(__a, __i, memory_order_seq_cst); }
01301 
01302   template<typename _ITp>
01303     inline _ITp
01304     atomic_fetch_or(__atomic_base<_ITp>* __a, _ITp __i) noexcept
01305     { return atomic_fetch_or_explicit(__a, __i, memory_order_seq_cst); }
01306 
01307   template<typename _ITp>
01308     inline _ITp
01309     atomic_fetch_or(volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept
01310     { return atomic_fetch_or_explicit(__a, __i, memory_order_seq_cst); }
01311 
01312   template<typename _ITp>
01313     inline _ITp
01314     atomic_fetch_xor(__atomic_base<_ITp>* __a, _ITp __i) noexcept
01315     { return atomic_fetch_xor_explicit(__a, __i, memory_order_seq_cst); }
01316 
01317   template<typename _ITp>
01318     inline _ITp
01319     atomic_fetch_xor(volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept
01320     { return atomic_fetch_xor_explicit(__a, __i, memory_order_seq_cst); }
01321 
01322 
01323   // Partial specializations for pointers.
01324   template<typename _ITp>
01325     inline _ITp*
01326     atomic_fetch_add_explicit(atomic<_ITp*>* __a, ptrdiff_t __d,
01327                               memory_order __m) noexcept
01328     { return __a->fetch_add(__d, __m); }
01329 
01330   template<typename _ITp>
01331     inline _ITp*
01332     atomic_fetch_add_explicit(volatile atomic<_ITp*>* __a, ptrdiff_t __d,
01333                               memory_order __m) noexcept
01334     { return __a->fetch_add(__d, __m); }
01335 
01336   template<typename _ITp>
01337     inline _ITp*
01338     atomic_fetch_add(volatile atomic<_ITp*>* __a, ptrdiff_t __d) noexcept
01339     { return __a->fetch_add(__d); }
01340 
01341   template<typename _ITp>
01342     inline _ITp*
01343     atomic_fetch_add(atomic<_ITp*>* __a, ptrdiff_t __d) noexcept
01344     { return __a->fetch_add(__d); }
01345 
01346   template<typename _ITp>
01347     inline _ITp*
01348     atomic_fetch_sub_explicit(volatile atomic<_ITp*>* __a,
01349                               ptrdiff_t __d, memory_order __m) noexcept
01350     { return __a->fetch_sub(__d, __m); }
01351 
01352   template<typename _ITp>
01353     inline _ITp*
01354     atomic_fetch_sub_explicit(atomic<_ITp*>* __a, ptrdiff_t __d,
01355                               memory_order __m) noexcept
01356     { return __a->fetch_sub(__d, __m); }
01357 
01358   template<typename _ITp>
01359     inline _ITp*
01360     atomic_fetch_sub(volatile atomic<_ITp*>* __a, ptrdiff_t __d) noexcept
01361     { return __a->fetch_sub(__d); }
01362 
01363   template<typename _ITp>
01364     inline _ITp*
01365     atomic_fetch_sub(atomic<_ITp*>* __a, ptrdiff_t __d) noexcept
01366     { return __a->fetch_sub(__d); }
01367   // @} group atomics
01368 
01369 _GLIBCXX_END_NAMESPACE_VERSION
01370 } // namespace
01371 
01372 #endif // C++11
01373 
01374 #endif // _GLIBCXX_ATOMIC