libstdc++
atomic
Go to the documentation of this file.
00001 // -*- C++ -*- header.
00002 
00003 // Copyright (C) 2008-2019 Free Software Foundation, Inc.
00004 //
00005 // This file is part of the GNU ISO C++ Library.  This library is free
00006 // software; you can redistribute it and/or modify it under the
00007 // terms of the GNU General Public License as published by the
00008 // Free Software Foundation; either version 3, or (at your option)
00009 // any later version.
00010 
00011 // This library is distributed in the hope that it will be useful,
00012 // but WITHOUT ANY WARRANTY; without even the implied warranty of
00013 // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
00014 // GNU General Public License for more details.
00015 
00016 // Under Section 7 of GPL version 3, you are granted additional
00017 // permissions described in the GCC Runtime Library Exception, version
00018 // 3.1, as published by the Free Software Foundation.
00019 
00020 // You should have received a copy of the GNU General Public License and
00021 // a copy of the GCC Runtime Library Exception along with this program;
00022 // see the files COPYING3 and COPYING.RUNTIME respectively.  If not, see
00023 // <http://www.gnu.org/licenses/>.
00024 
00025 /** @file include/atomic
00026  *  This is a Standard C++ Library header.
00027  */
00028 
00029 // Based on "C++ Atomic Types and Operations" by Hans Boehm and Lawrence Crowl.
00030 // http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2007/n2427.html
00031 
00032 #ifndef _GLIBCXX_ATOMIC
00033 #define _GLIBCXX_ATOMIC 1
00034 
00035 #pragma GCC system_header
00036 
00037 #if __cplusplus < 201103L
00038 # include <bits/c++0x_warning.h>
00039 #else
00040 
00041 #include <bits/atomic_base.h>
00042 #include <bits/move.h>
00043 
00044 namespace std _GLIBCXX_VISIBILITY(default)
00045 {
00046 _GLIBCXX_BEGIN_NAMESPACE_VERSION
00047 
00048   /**
00049    * @addtogroup atomics
00050    * @{
00051    */
00052 
00053 #if __cplusplus >= 201703L
00054 # define __cpp_lib_atomic_is_always_lock_free 201603
00055 #endif
00056 
00057   template<typename _Tp>
00058     struct atomic;
00059 
00060   /// atomic<bool>
00061   // NB: No operators or fetch-operations for this type.
00062   template<>
00063   struct atomic<bool>
00064   {
00065     using value_type = bool;
00066 
00067   private:
00068     __atomic_base<bool> _M_base;
00069 
00070   public:
00071     atomic() noexcept = default;
00072     ~atomic() noexcept = default;
00073     atomic(const atomic&) = delete;
00074     atomic& operator=(const atomic&) = delete;
00075     atomic& operator=(const atomic&) volatile = delete;
00076 
00077     constexpr atomic(bool __i) noexcept : _M_base(__i) { }
00078 
00079     bool
00080     operator=(bool __i) noexcept
00081     { return _M_base.operator=(__i); }
00082 
00083     bool
00084     operator=(bool __i) volatile noexcept
00085     { return _M_base.operator=(__i); }
00086 
00087     operator bool() const noexcept
00088     { return _M_base.load(); }
00089 
00090     operator bool() const volatile noexcept
00091     { return _M_base.load(); }
00092 
00093     bool
00094     is_lock_free() const noexcept { return _M_base.is_lock_free(); }
00095 
00096     bool
00097     is_lock_free() const volatile noexcept { return _M_base.is_lock_free(); }
00098 
00099 #if __cplusplus >= 201703L
00100     static constexpr bool is_always_lock_free = ATOMIC_BOOL_LOCK_FREE == 2;
00101 #endif
00102 
00103     void
00104     store(bool __i, memory_order __m = memory_order_seq_cst) noexcept
00105     { _M_base.store(__i, __m); }
00106 
00107     void
00108     store(bool __i, memory_order __m = memory_order_seq_cst) volatile noexcept
00109     { _M_base.store(__i, __m); }
00110 
00111     bool
00112     load(memory_order __m = memory_order_seq_cst) const noexcept
00113     { return _M_base.load(__m); }
00114 
00115     bool
00116     load(memory_order __m = memory_order_seq_cst) const volatile noexcept
00117     { return _M_base.load(__m); }
00118 
00119     bool
00120     exchange(bool __i, memory_order __m = memory_order_seq_cst) noexcept
00121     { return _M_base.exchange(__i, __m); }
00122 
00123     bool
00124     exchange(bool __i,
00125              memory_order __m = memory_order_seq_cst) volatile noexcept
00126     { return _M_base.exchange(__i, __m); }
00127 
00128     bool
00129     compare_exchange_weak(bool& __i1, bool __i2, memory_order __m1,
00130                           memory_order __m2) noexcept
00131     { return _M_base.compare_exchange_weak(__i1, __i2, __m1, __m2); }
00132 
00133     bool
00134     compare_exchange_weak(bool& __i1, bool __i2, memory_order __m1,
00135                           memory_order __m2) volatile noexcept
00136     { return _M_base.compare_exchange_weak(__i1, __i2, __m1, __m2); }
00137 
00138     bool
00139     compare_exchange_weak(bool& __i1, bool __i2,
00140                           memory_order __m = memory_order_seq_cst) noexcept
00141     { return _M_base.compare_exchange_weak(__i1, __i2, __m); }
00142 
00143     bool
00144     compare_exchange_weak(bool& __i1, bool __i2,
00145                      memory_order __m = memory_order_seq_cst) volatile noexcept
00146     { return _M_base.compare_exchange_weak(__i1, __i2, __m); }
00147 
00148     bool
00149     compare_exchange_strong(bool& __i1, bool __i2, memory_order __m1,
00150                             memory_order __m2) noexcept
00151     { return _M_base.compare_exchange_strong(__i1, __i2, __m1, __m2); }
00152 
00153     bool
00154     compare_exchange_strong(bool& __i1, bool __i2, memory_order __m1,
00155                             memory_order __m2) volatile noexcept
00156     { return _M_base.compare_exchange_strong(__i1, __i2, __m1, __m2); }
00157 
00158     bool
00159     compare_exchange_strong(bool& __i1, bool __i2,
00160                             memory_order __m = memory_order_seq_cst) noexcept
00161     { return _M_base.compare_exchange_strong(__i1, __i2, __m); }
00162 
00163     bool
00164     compare_exchange_strong(bool& __i1, bool __i2,
00165                     memory_order __m = memory_order_seq_cst) volatile noexcept
00166     { return _M_base.compare_exchange_strong(__i1, __i2, __m); }
00167   };
00168 
00169 
00170   /**
00171    *  @brief Generic atomic type, primary class template.
00172    *
00173    *  @tparam _Tp  Type to be made atomic, must be trivally copyable.
00174    */
00175   template<typename _Tp>
00176     struct atomic
00177     {
00178       using value_type = _Tp;
00179 
00180     private:
00181       // Align 1/2/4/8/16-byte types to at least their size.
00182       static constexpr int _S_min_alignment
00183         = (sizeof(_Tp) & (sizeof(_Tp) - 1)) || sizeof(_Tp) > 16
00184         ? 0 : sizeof(_Tp);
00185 
00186       static constexpr int _S_alignment
00187         = _S_min_alignment > alignof(_Tp) ? _S_min_alignment : alignof(_Tp);
00188 
00189       alignas(_S_alignment) _Tp _M_i;
00190 
00191       static_assert(__is_trivially_copyable(_Tp),
00192                     "std::atomic requires a trivially copyable type");
00193 
00194       static_assert(sizeof(_Tp) > 0,
00195                     "Incomplete or zero-sized types are not supported");
00196 
00197     public:
00198       atomic() noexcept = default;
00199       ~atomic() noexcept = default;
00200       atomic(const atomic&) = delete;
00201       atomic& operator=(const atomic&) = delete;
00202       atomic& operator=(const atomic&) volatile = delete;
00203 
00204       constexpr atomic(_Tp __i) noexcept : _M_i(__i) { }
00205 
00206       operator _Tp() const noexcept
00207       { return load(); }
00208 
00209       operator _Tp() const volatile noexcept
00210       { return load(); }
00211 
00212       _Tp
00213       operator=(_Tp __i) noexcept
00214       { store(__i); return __i; }
00215 
00216       _Tp
00217       operator=(_Tp __i) volatile noexcept
00218       { store(__i); return __i; }
00219 
00220       bool
00221       is_lock_free() const noexcept
00222       {
00223         // Produce a fake, minimally aligned pointer.
00224         return __atomic_is_lock_free(sizeof(_M_i),
00225             reinterpret_cast<void *>(-_S_alignment));
00226       }
00227 
00228       bool
00229       is_lock_free() const volatile noexcept
00230       {
00231         // Produce a fake, minimally aligned pointer.
00232         return __atomic_is_lock_free(sizeof(_M_i),
00233             reinterpret_cast<void *>(-_S_alignment));
00234       }
00235 
00236 #if __cplusplus >= 201703L
00237       static constexpr bool is_always_lock_free
00238         = __atomic_always_lock_free(sizeof(_M_i), 0);
00239 #endif
00240 
00241       void
00242       store(_Tp __i, memory_order __m = memory_order_seq_cst) noexcept
00243       { __atomic_store(std::__addressof(_M_i), std::__addressof(__i), int(__m)); }
00244 
00245       void
00246       store(_Tp __i, memory_order __m = memory_order_seq_cst) volatile noexcept
00247       { __atomic_store(std::__addressof(_M_i), std::__addressof(__i), int(__m)); }
00248 
00249       _Tp
00250       load(memory_order __m = memory_order_seq_cst) const noexcept
00251       {
00252         alignas(_Tp) unsigned char __buf[sizeof(_Tp)];
00253         _Tp* __ptr = reinterpret_cast<_Tp*>(__buf);
00254         __atomic_load(std::__addressof(_M_i), __ptr, int(__m));
00255         return *__ptr;
00256       }
00257 
00258       _Tp
00259       load(memory_order __m = memory_order_seq_cst) const volatile noexcept
00260       {
00261         alignas(_Tp) unsigned char __buf[sizeof(_Tp)];
00262         _Tp* __ptr = reinterpret_cast<_Tp*>(__buf);
00263         __atomic_load(std::__addressof(_M_i), __ptr, int(__m));
00264         return *__ptr;
00265       }
00266 
00267       _Tp
00268       exchange(_Tp __i, memory_order __m = memory_order_seq_cst) noexcept
00269       {
00270         alignas(_Tp) unsigned char __buf[sizeof(_Tp)];
00271         _Tp* __ptr = reinterpret_cast<_Tp*>(__buf);
00272         __atomic_exchange(std::__addressof(_M_i), std::__addressof(__i),
00273                           __ptr, int(__m));
00274         return *__ptr;
00275       }
00276 
00277       _Tp
00278       exchange(_Tp __i,
00279                memory_order __m = memory_order_seq_cst) volatile noexcept
00280       {
00281         alignas(_Tp) unsigned char __buf[sizeof(_Tp)];
00282         _Tp* __ptr = reinterpret_cast<_Tp*>(__buf);
00283         __atomic_exchange(std::__addressof(_M_i), std::__addressof(__i),
00284                           __ptr, int(__m));
00285         return *__ptr;
00286       }
00287 
00288       bool
00289       compare_exchange_weak(_Tp& __e, _Tp __i, memory_order __s,
00290                             memory_order __f) noexcept
00291       {
00292         return __atomic_compare_exchange(std::__addressof(_M_i),
00293                                          std::__addressof(__e),
00294                                          std::__addressof(__i),
00295                                          true, int(__s), int(__f));
00296       }
00297 
00298       bool
00299       compare_exchange_weak(_Tp& __e, _Tp __i, memory_order __s,
00300                             memory_order __f) volatile noexcept
00301       {
00302         return __atomic_compare_exchange(std::__addressof(_M_i),
00303                                          std::__addressof(__e),
00304                                          std::__addressof(__i),
00305                                          true, int(__s), int(__f));
00306       }
00307 
00308       bool
00309       compare_exchange_weak(_Tp& __e, _Tp __i,
00310                             memory_order __m = memory_order_seq_cst) noexcept
00311       { return compare_exchange_weak(__e, __i, __m,
00312                                      __cmpexch_failure_order(__m)); }
00313 
00314       bool
00315       compare_exchange_weak(_Tp& __e, _Tp __i,
00316                      memory_order __m = memory_order_seq_cst) volatile noexcept
00317       { return compare_exchange_weak(__e, __i, __m,
00318                                      __cmpexch_failure_order(__m)); }
00319 
00320       bool
00321       compare_exchange_strong(_Tp& __e, _Tp __i, memory_order __s,
00322                               memory_order __f) noexcept
00323       {
00324         return __atomic_compare_exchange(std::__addressof(_M_i),
00325                                          std::__addressof(__e),
00326                                          std::__addressof(__i),
00327                                          false, int(__s), int(__f));
00328       }
00329 
00330       bool
00331       compare_exchange_strong(_Tp& __e, _Tp __i, memory_order __s,
00332                               memory_order __f) volatile noexcept
00333       {
00334         return __atomic_compare_exchange(std::__addressof(_M_i),
00335                                          std::__addressof(__e),
00336                                          std::__addressof(__i),
00337                                          false, int(__s), int(__f));
00338       }
00339 
00340       bool
00341       compare_exchange_strong(_Tp& __e, _Tp __i,
00342                                memory_order __m = memory_order_seq_cst) noexcept
00343       { return compare_exchange_strong(__e, __i, __m,
00344                                        __cmpexch_failure_order(__m)); }
00345 
00346       bool
00347       compare_exchange_strong(_Tp& __e, _Tp __i,
00348                      memory_order __m = memory_order_seq_cst) volatile noexcept
00349       { return compare_exchange_strong(__e, __i, __m,
00350                                        __cmpexch_failure_order(__m)); }
00351     };
00352 
00353 
00354   /// Partial specialization for pointer types.
00355   template<typename _Tp>
00356     struct atomic<_Tp*>
00357     {
00358       using value_type = _Tp*;
00359       using difference_type = ptrdiff_t;
00360 
00361       typedef _Tp*                      __pointer_type;
00362       typedef __atomic_base<_Tp*>       __base_type;
00363       __base_type                       _M_b;
00364 
00365       atomic() noexcept = default;
00366       ~atomic() noexcept = default;
00367       atomic(const atomic&) = delete;
00368       atomic& operator=(const atomic&) = delete;
00369       atomic& operator=(const atomic&) volatile = delete;
00370 
00371       constexpr atomic(__pointer_type __p) noexcept : _M_b(__p) { }
00372 
00373       operator __pointer_type() const noexcept
00374       { return __pointer_type(_M_b); }
00375 
00376       operator __pointer_type() const volatile noexcept
00377       { return __pointer_type(_M_b); }
00378 
00379       __pointer_type
00380       operator=(__pointer_type __p) noexcept
00381       { return _M_b.operator=(__p); }
00382 
00383       __pointer_type
00384       operator=(__pointer_type __p) volatile noexcept
00385       { return _M_b.operator=(__p); }
00386 
00387       __pointer_type
00388       operator++(int) noexcept
00389       {
00390 #if __cplusplus >= 201703L
00391         static_assert( is_object<_Tp>::value, "pointer to object type" );
00392 #endif
00393         return _M_b++;
00394       }
00395 
00396       __pointer_type
00397       operator++(int) volatile noexcept
00398       {
00399 #if __cplusplus >= 201703L
00400         static_assert( is_object<_Tp>::value, "pointer to object type" );
00401 #endif
00402         return _M_b++;
00403       }
00404 
00405       __pointer_type
00406       operator--(int) noexcept
00407       {
00408 #if __cplusplus >= 201703L
00409         static_assert( is_object<_Tp>::value, "pointer to object type" );
00410 #endif
00411         return _M_b--;
00412       }
00413 
00414       __pointer_type
00415       operator--(int) volatile noexcept
00416       {
00417 #if __cplusplus >= 201703L
00418         static_assert( is_object<_Tp>::value, "pointer to object type" );
00419 #endif
00420         return _M_b--;
00421       }
00422 
00423       __pointer_type
00424       operator++() noexcept
00425       {
00426 #if __cplusplus >= 201703L
00427         static_assert( is_object<_Tp>::value, "pointer to object type" );
00428 #endif
00429         return ++_M_b;
00430       }
00431 
00432       __pointer_type
00433       operator++() volatile noexcept
00434       {
00435 #if __cplusplus >= 201703L
00436         static_assert( is_object<_Tp>::value, "pointer to object type" );
00437 #endif
00438         return ++_M_b;
00439       }
00440 
00441       __pointer_type
00442       operator--() noexcept
00443       {
00444 #if __cplusplus >= 201703L
00445         static_assert( is_object<_Tp>::value, "pointer to object type" );
00446 #endif
00447         return --_M_b;
00448       }
00449 
00450       __pointer_type
00451       operator--() volatile noexcept
00452       {
00453 #if __cplusplus >= 201703L
00454         static_assert( is_object<_Tp>::value, "pointer to object type" );
00455 #endif
00456         return --_M_b;
00457       }
00458 
00459       __pointer_type
00460       operator+=(ptrdiff_t __d) noexcept
00461       {
00462 #if __cplusplus >= 201703L
00463         static_assert( is_object<_Tp>::value, "pointer to object type" );
00464 #endif
00465         return _M_b.operator+=(__d);
00466       }
00467 
00468       __pointer_type
00469       operator+=(ptrdiff_t __d) volatile noexcept
00470       {
00471 #if __cplusplus >= 201703L
00472         static_assert( is_object<_Tp>::value, "pointer to object type" );
00473 #endif
00474         return _M_b.operator+=(__d);
00475       }
00476 
00477       __pointer_type
00478       operator-=(ptrdiff_t __d) noexcept
00479       {
00480 #if __cplusplus >= 201703L
00481         static_assert( is_object<_Tp>::value, "pointer to object type" );
00482 #endif
00483         return _M_b.operator-=(__d);
00484       }
00485 
00486       __pointer_type
00487       operator-=(ptrdiff_t __d) volatile noexcept
00488       {
00489 #if __cplusplus >= 201703L
00490         static_assert( is_object<_Tp>::value, "pointer to object type" );
00491 #endif
00492         return _M_b.operator-=(__d);
00493       }
00494 
00495       bool
00496       is_lock_free() const noexcept
00497       { return _M_b.is_lock_free(); }
00498 
00499       bool
00500       is_lock_free() const volatile noexcept
00501       { return _M_b.is_lock_free(); }
00502 
00503 #if __cplusplus >= 201703L
00504     static constexpr bool is_always_lock_free = ATOMIC_POINTER_LOCK_FREE == 2;
00505 #endif
00506 
00507       void
00508       store(__pointer_type __p,
00509             memory_order __m = memory_order_seq_cst) noexcept
00510       { return _M_b.store(__p, __m); }
00511 
00512       void
00513       store(__pointer_type __p,
00514             memory_order __m = memory_order_seq_cst) volatile noexcept
00515       { return _M_b.store(__p, __m); }
00516 
00517       __pointer_type
00518       load(memory_order __m = memory_order_seq_cst) const noexcept
00519       { return _M_b.load(__m); }
00520 
00521       __pointer_type
00522       load(memory_order __m = memory_order_seq_cst) const volatile noexcept
00523       { return _M_b.load(__m); }
00524 
00525       __pointer_type
00526       exchange(__pointer_type __p,
00527                memory_order __m = memory_order_seq_cst) noexcept
00528       { return _M_b.exchange(__p, __m); }
00529 
00530       __pointer_type
00531       exchange(__pointer_type __p,
00532                memory_order __m = memory_order_seq_cst) volatile noexcept
00533       { return _M_b.exchange(__p, __m); }
00534 
00535       bool
00536       compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
00537                             memory_order __m1, memory_order __m2) noexcept
00538       { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
00539 
00540       bool
00541       compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
00542                             memory_order __m1,
00543                             memory_order __m2) volatile noexcept
00544       { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
00545 
00546       bool
00547       compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
00548                             memory_order __m = memory_order_seq_cst) noexcept
00549       {
00550         return compare_exchange_weak(__p1, __p2, __m,
00551                                      __cmpexch_failure_order(__m));
00552       }
00553 
00554       bool
00555       compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
00556                     memory_order __m = memory_order_seq_cst) volatile noexcept
00557       {
00558         return compare_exchange_weak(__p1, __p2, __m,
00559                                      __cmpexch_failure_order(__m));
00560       }
00561 
00562       bool
00563       compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
00564                               memory_order __m1, memory_order __m2) noexcept
00565       { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
00566 
00567       bool
00568       compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
00569                               memory_order __m1,
00570                               memory_order __m2) volatile noexcept
00571       { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
00572 
00573       bool
00574       compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
00575                               memory_order __m = memory_order_seq_cst) noexcept
00576       {
00577         return _M_b.compare_exchange_strong(__p1, __p2, __m,
00578                                             __cmpexch_failure_order(__m));
00579       }
00580 
00581       bool
00582       compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
00583                     memory_order __m = memory_order_seq_cst) volatile noexcept
00584       {
00585         return _M_b.compare_exchange_strong(__p1, __p2, __m,
00586                                             __cmpexch_failure_order(__m));
00587       }
00588 
00589       __pointer_type
00590       fetch_add(ptrdiff_t __d,
00591                 memory_order __m = memory_order_seq_cst) noexcept
00592       {
00593 #if __cplusplus >= 201703L
00594         static_assert( is_object<_Tp>::value, "pointer to object type" );
00595 #endif
00596         return _M_b.fetch_add(__d, __m);
00597       }
00598 
00599       __pointer_type
00600       fetch_add(ptrdiff_t __d,
00601                 memory_order __m = memory_order_seq_cst) volatile noexcept
00602       {
00603 #if __cplusplus >= 201703L
00604         static_assert( is_object<_Tp>::value, "pointer to object type" );
00605 #endif
00606         return _M_b.fetch_add(__d, __m);
00607       }
00608 
00609       __pointer_type
00610       fetch_sub(ptrdiff_t __d,
00611                 memory_order __m = memory_order_seq_cst) noexcept
00612       {
00613 #if __cplusplus >= 201703L
00614         static_assert( is_object<_Tp>::value, "pointer to object type" );
00615 #endif
00616         return _M_b.fetch_sub(__d, __m);
00617       }
00618 
00619       __pointer_type
00620       fetch_sub(ptrdiff_t __d,
00621                 memory_order __m = memory_order_seq_cst) volatile noexcept
00622       {
00623 #if __cplusplus >= 201703L
00624         static_assert( is_object<_Tp>::value, "pointer to object type" );
00625 #endif
00626         return _M_b.fetch_sub(__d, __m);
00627       }
00628     };
00629 
00630 
00631   /// Explicit specialization for char.
00632   template<>
00633     struct atomic<char> : __atomic_base<char>
00634     {
00635       typedef char                      __integral_type;
00636       typedef __atomic_base<char>       __base_type;
00637 
00638       atomic() noexcept = default;
00639       ~atomic() noexcept = default;
00640       atomic(const atomic&) = delete;
00641       atomic& operator=(const atomic&) = delete;
00642       atomic& operator=(const atomic&) volatile = delete;
00643 
00644       constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
00645 
00646       using __base_type::operator __integral_type;
00647       using __base_type::operator=;
00648 
00649 #if __cplusplus >= 201703L
00650     static constexpr bool is_always_lock_free = ATOMIC_CHAR_LOCK_FREE == 2;
00651 #endif
00652     };
00653 
00654   /// Explicit specialization for signed char.
00655   template<>
00656     struct atomic<signed char> : __atomic_base<signed char>
00657     {
00658       typedef signed char               __integral_type;
00659       typedef __atomic_base<signed char>        __base_type;
00660 
00661       atomic() noexcept= default;
00662       ~atomic() noexcept = default;
00663       atomic(const atomic&) = delete;
00664       atomic& operator=(const atomic&) = delete;
00665       atomic& operator=(const atomic&) volatile = delete;
00666 
00667       constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
00668 
00669       using __base_type::operator __integral_type;
00670       using __base_type::operator=;
00671 
00672 #if __cplusplus >= 201703L
00673     static constexpr bool is_always_lock_free = ATOMIC_CHAR_LOCK_FREE == 2;
00674 #endif
00675     };
00676 
00677   /// Explicit specialization for unsigned char.
00678   template<>
00679     struct atomic<unsigned char> : __atomic_base<unsigned char>
00680     {
00681       typedef unsigned char             __integral_type;
00682       typedef __atomic_base<unsigned char>      __base_type;
00683 
00684       atomic() noexcept= default;
00685       ~atomic() noexcept = default;
00686       atomic(const atomic&) = delete;
00687       atomic& operator=(const atomic&) = delete;
00688       atomic& operator=(const atomic&) volatile = delete;
00689 
00690       constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
00691 
00692       using __base_type::operator __integral_type;
00693       using __base_type::operator=;
00694 
00695 #if __cplusplus >= 201703L
00696     static constexpr bool is_always_lock_free = ATOMIC_CHAR_LOCK_FREE == 2;
00697 #endif
00698     };
00699 
00700   /// Explicit specialization for short.
00701   template<>
00702     struct atomic<short> : __atomic_base<short>
00703     {
00704       typedef short                     __integral_type;
00705       typedef __atomic_base<short>              __base_type;
00706 
00707       atomic() noexcept = default;
00708       ~atomic() noexcept = default;
00709       atomic(const atomic&) = delete;
00710       atomic& operator=(const atomic&) = delete;
00711       atomic& operator=(const atomic&) volatile = delete;
00712 
00713       constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
00714 
00715       using __base_type::operator __integral_type;
00716       using __base_type::operator=;
00717 
00718 #if __cplusplus >= 201703L
00719     static constexpr bool is_always_lock_free = ATOMIC_SHORT_LOCK_FREE == 2;
00720 #endif
00721     };
00722 
00723   /// Explicit specialization for unsigned short.
00724   template<>
00725     struct atomic<unsigned short> : __atomic_base<unsigned short>
00726     {
00727       typedef unsigned short            __integral_type;
00728       typedef __atomic_base<unsigned short>             __base_type;
00729 
00730       atomic() noexcept = default;
00731       ~atomic() noexcept = default;
00732       atomic(const atomic&) = delete;
00733       atomic& operator=(const atomic&) = delete;
00734       atomic& operator=(const atomic&) volatile = delete;
00735 
00736       constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
00737 
00738       using __base_type::operator __integral_type;
00739       using __base_type::operator=;
00740 
00741 #if __cplusplus >= 201703L
00742     static constexpr bool is_always_lock_free = ATOMIC_SHORT_LOCK_FREE == 2;
00743 #endif
00744     };
00745 
00746   /// Explicit specialization for int.
00747   template<>
00748     struct atomic<int> : __atomic_base<int>
00749     {
00750       typedef int                       __integral_type;
00751       typedef __atomic_base<int>                __base_type;
00752 
00753       atomic() noexcept = default;
00754       ~atomic() noexcept = default;
00755       atomic(const atomic&) = delete;
00756       atomic& operator=(const atomic&) = delete;
00757       atomic& operator=(const atomic&) volatile = delete;
00758 
00759       constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
00760 
00761       using __base_type::operator __integral_type;
00762       using __base_type::operator=;
00763 
00764 #if __cplusplus >= 201703L
00765     static constexpr bool is_always_lock_free = ATOMIC_INT_LOCK_FREE == 2;
00766 #endif
00767     };
00768 
00769   /// Explicit specialization for unsigned int.
00770   template<>
00771     struct atomic<unsigned int> : __atomic_base<unsigned int>
00772     {
00773       typedef unsigned int              __integral_type;
00774       typedef __atomic_base<unsigned int>       __base_type;
00775 
00776       atomic() noexcept = default;
00777       ~atomic() noexcept = default;
00778       atomic(const atomic&) = delete;
00779       atomic& operator=(const atomic&) = delete;
00780       atomic& operator=(const atomic&) volatile = delete;
00781 
00782       constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
00783 
00784       using __base_type::operator __integral_type;
00785       using __base_type::operator=;
00786 
00787 #if __cplusplus >= 201703L
00788     static constexpr bool is_always_lock_free = ATOMIC_INT_LOCK_FREE == 2;
00789 #endif
00790     };
00791 
00792   /// Explicit specialization for long.
00793   template<>
00794     struct atomic<long> : __atomic_base<long>
00795     {
00796       typedef long                      __integral_type;
00797       typedef __atomic_base<long>       __base_type;
00798 
00799       atomic() noexcept = default;
00800       ~atomic() noexcept = default;
00801       atomic(const atomic&) = delete;
00802       atomic& operator=(const atomic&) = delete;
00803       atomic& operator=(const atomic&) volatile = delete;
00804 
00805       constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
00806 
00807       using __base_type::operator __integral_type;
00808       using __base_type::operator=;
00809 
00810 #if __cplusplus >= 201703L
00811     static constexpr bool is_always_lock_free = ATOMIC_LONG_LOCK_FREE == 2;
00812 #endif
00813     };
00814 
00815   /// Explicit specialization for unsigned long.
00816   template<>
00817     struct atomic<unsigned long> : __atomic_base<unsigned long>
00818     {
00819       typedef unsigned long             __integral_type;
00820       typedef __atomic_base<unsigned long>      __base_type;
00821 
00822       atomic() noexcept = default;
00823       ~atomic() noexcept = default;
00824       atomic(const atomic&) = delete;
00825       atomic& operator=(const atomic&) = delete;
00826       atomic& operator=(const atomic&) volatile = delete;
00827 
00828       constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
00829 
00830       using __base_type::operator __integral_type;
00831       using __base_type::operator=;
00832 
00833 #if __cplusplus >= 201703L
00834     static constexpr bool is_always_lock_free = ATOMIC_LONG_LOCK_FREE == 2;
00835 #endif
00836     };
00837 
00838   /// Explicit specialization for long long.
00839   template<>
00840     struct atomic<long long> : __atomic_base<long long>
00841     {
00842       typedef long long                 __integral_type;
00843       typedef __atomic_base<long long>          __base_type;
00844 
00845       atomic() noexcept = default;
00846       ~atomic() noexcept = default;
00847       atomic(const atomic&) = delete;
00848       atomic& operator=(const atomic&) = delete;
00849       atomic& operator=(const atomic&) volatile = delete;
00850 
00851       constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
00852 
00853       using __base_type::operator __integral_type;
00854       using __base_type::operator=;
00855 
00856 #if __cplusplus >= 201703L
00857     static constexpr bool is_always_lock_free = ATOMIC_LLONG_LOCK_FREE == 2;
00858 #endif
00859     };
00860 
00861   /// Explicit specialization for unsigned long long.
00862   template<>
00863     struct atomic<unsigned long long> : __atomic_base<unsigned long long>
00864     {
00865       typedef unsigned long long        __integral_type;
00866       typedef __atomic_base<unsigned long long>         __base_type;
00867 
00868       atomic() noexcept = default;
00869       ~atomic() noexcept = default;
00870       atomic(const atomic&) = delete;
00871       atomic& operator=(const atomic&) = delete;
00872       atomic& operator=(const atomic&) volatile = delete;
00873 
00874       constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
00875 
00876       using __base_type::operator __integral_type;
00877       using __base_type::operator=;
00878 
00879 #if __cplusplus >= 201703L
00880     static constexpr bool is_always_lock_free = ATOMIC_LLONG_LOCK_FREE == 2;
00881 #endif
00882     };
00883 
00884   /// Explicit specialization for wchar_t.
00885   template<>
00886     struct atomic<wchar_t> : __atomic_base<wchar_t>
00887     {
00888       typedef wchar_t                   __integral_type;
00889       typedef __atomic_base<wchar_t>    __base_type;
00890 
00891       atomic() noexcept = default;
00892       ~atomic() noexcept = default;
00893       atomic(const atomic&) = delete;
00894       atomic& operator=(const atomic&) = delete;
00895       atomic& operator=(const atomic&) volatile = delete;
00896 
00897       constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
00898 
00899       using __base_type::operator __integral_type;
00900       using __base_type::operator=;
00901 
00902 #if __cplusplus >= 201703L
00903     static constexpr bool is_always_lock_free = ATOMIC_WCHAR_T_LOCK_FREE == 2;
00904 #endif
00905     };
00906 
00907 #ifdef _GLIBCXX_USE_CHAR8_T
00908   /// Explicit specialization for char8_t.
00909   template<>
00910     struct atomic<char8_t> : __atomic_base<char8_t>
00911     {
00912       typedef char8_t                   __integral_type;
00913       typedef __atomic_base<char8_t>    __base_type;
00914 
00915       atomic() noexcept = default;
00916       ~atomic() noexcept = default;
00917       atomic(const atomic&) = delete;
00918       atomic& operator=(const atomic&) = delete;
00919       atomic& operator=(const atomic&) volatile = delete;
00920 
00921       constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
00922 
00923       using __base_type::operator __integral_type;
00924       using __base_type::operator=;
00925 
00926 #if __cplusplus > 201402L
00927     static constexpr bool is_always_lock_free = ATOMIC_CHAR8_T_LOCK_FREE == 2;
00928 #endif
00929     };
00930 #endif
00931 
00932   /// Explicit specialization for char16_t.
00933   template<>
00934     struct atomic<char16_t> : __atomic_base<char16_t>
00935     {
00936       typedef char16_t                  __integral_type;
00937       typedef __atomic_base<char16_t>   __base_type;
00938 
00939       atomic() noexcept = default;
00940       ~atomic() noexcept = default;
00941       atomic(const atomic&) = delete;
00942       atomic& operator=(const atomic&) = delete;
00943       atomic& operator=(const atomic&) volatile = delete;
00944 
00945       constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
00946 
00947       using __base_type::operator __integral_type;
00948       using __base_type::operator=;
00949 
00950 #if __cplusplus >= 201703L
00951     static constexpr bool is_always_lock_free = ATOMIC_CHAR16_T_LOCK_FREE == 2;
00952 #endif
00953     };
00954 
00955   /// Explicit specialization for char32_t.
00956   template<>
00957     struct atomic<char32_t> : __atomic_base<char32_t>
00958     {
00959       typedef char32_t                  __integral_type;
00960       typedef __atomic_base<char32_t>   __base_type;
00961 
00962       atomic() noexcept = default;
00963       ~atomic() noexcept = default;
00964       atomic(const atomic&) = delete;
00965       atomic& operator=(const atomic&) = delete;
00966       atomic& operator=(const atomic&) volatile = delete;
00967 
00968       constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
00969 
00970       using __base_type::operator __integral_type;
00971       using __base_type::operator=;
00972 
00973 #if __cplusplus >= 201703L
00974     static constexpr bool is_always_lock_free = ATOMIC_CHAR32_T_LOCK_FREE == 2;
00975 #endif
00976     };
00977 
00978 
00979   /// atomic_bool
00980   typedef atomic<bool>                  atomic_bool;
00981 
00982   /// atomic_char
00983   typedef atomic<char>                  atomic_char;
00984 
00985   /// atomic_schar
00986   typedef atomic<signed char>           atomic_schar;
00987 
00988   /// atomic_uchar
00989   typedef atomic<unsigned char>         atomic_uchar;
00990 
00991   /// atomic_short
00992   typedef atomic<short>                 atomic_short;
00993 
00994   /// atomic_ushort
00995   typedef atomic<unsigned short>        atomic_ushort;
00996 
00997   /// atomic_int
00998   typedef atomic<int>                   atomic_int;
00999 
01000   /// atomic_uint
01001   typedef atomic<unsigned int>          atomic_uint;
01002 
01003   /// atomic_long
01004   typedef atomic<long>                  atomic_long;
01005 
01006   /// atomic_ulong
01007   typedef atomic<unsigned long>         atomic_ulong;
01008 
01009   /// atomic_llong
01010   typedef atomic<long long>             atomic_llong;
01011 
01012   /// atomic_ullong
01013   typedef atomic<unsigned long long>    atomic_ullong;
01014 
01015   /// atomic_wchar_t
01016   typedef atomic<wchar_t>               atomic_wchar_t;
01017 
01018 #ifdef _GLIBCXX_USE_CHAR8_T
01019   /// atomic_char8_t
01020   typedef atomic<char8_t>               atomic_char8_t;
01021 #endif
01022 
01023   /// atomic_char16_t
01024   typedef atomic<char16_t>              atomic_char16_t;
01025 
01026   /// atomic_char32_t
01027   typedef atomic<char32_t>              atomic_char32_t;
01028 
01029 #ifdef _GLIBCXX_USE_C99_STDINT_TR1
01030   // _GLIBCXX_RESOLVE_LIB_DEFECTS
01031   // 2441. Exact-width atomic typedefs should be provided
01032 
01033   /// atomic_int8_t
01034   typedef atomic<int8_t>                atomic_int8_t;
01035 
01036   /// atomic_uint8_t
01037   typedef atomic<uint8_t>               atomic_uint8_t;
01038 
01039   /// atomic_int16_t
01040   typedef atomic<int16_t>               atomic_int16_t;
01041 
01042   /// atomic_uint16_t
01043   typedef atomic<uint16_t>              atomic_uint16_t;
01044 
01045   /// atomic_int32_t
01046   typedef atomic<int32_t>               atomic_int32_t;
01047 
01048   /// atomic_uint32_t
01049   typedef atomic<uint32_t>              atomic_uint32_t;
01050 
01051   /// atomic_int64_t
01052   typedef atomic<int64_t>               atomic_int64_t;
01053 
01054   /// atomic_uint64_t
01055   typedef atomic<uint64_t>              atomic_uint64_t;
01056 
01057 
01058   /// atomic_int_least8_t
01059   typedef atomic<int_least8_t>          atomic_int_least8_t;
01060 
01061   /// atomic_uint_least8_t
01062   typedef atomic<uint_least8_t>         atomic_uint_least8_t;
01063 
01064   /// atomic_int_least16_t
01065   typedef atomic<int_least16_t>         atomic_int_least16_t;
01066 
01067   /// atomic_uint_least16_t
01068   typedef atomic<uint_least16_t>        atomic_uint_least16_t;
01069 
01070   /// atomic_int_least32_t
01071   typedef atomic<int_least32_t>         atomic_int_least32_t;
01072 
01073   /// atomic_uint_least32_t
01074   typedef atomic<uint_least32_t>        atomic_uint_least32_t;
01075 
01076   /// atomic_int_least64_t
01077   typedef atomic<int_least64_t>         atomic_int_least64_t;
01078 
01079   /// atomic_uint_least64_t
01080   typedef atomic<uint_least64_t>        atomic_uint_least64_t;
01081 
01082 
01083   /// atomic_int_fast8_t
01084   typedef atomic<int_fast8_t>           atomic_int_fast8_t;
01085 
01086   /// atomic_uint_fast8_t
01087   typedef atomic<uint_fast8_t>          atomic_uint_fast8_t;
01088 
01089   /// atomic_int_fast16_t
01090   typedef atomic<int_fast16_t>          atomic_int_fast16_t;
01091 
01092   /// atomic_uint_fast16_t
01093   typedef atomic<uint_fast16_t>         atomic_uint_fast16_t;
01094 
01095   /// atomic_int_fast32_t
01096   typedef atomic<int_fast32_t>          atomic_int_fast32_t;
01097 
01098   /// atomic_uint_fast32_t
01099   typedef atomic<uint_fast32_t>         atomic_uint_fast32_t;
01100 
01101   /// atomic_int_fast64_t
01102   typedef atomic<int_fast64_t>          atomic_int_fast64_t;
01103 
01104   /// atomic_uint_fast64_t
01105   typedef atomic<uint_fast64_t>         atomic_uint_fast64_t;
01106 #endif
01107 
01108 
01109   /// atomic_intptr_t
01110   typedef atomic<intptr_t>              atomic_intptr_t;
01111 
01112   /// atomic_uintptr_t
01113   typedef atomic<uintptr_t>             atomic_uintptr_t;
01114 
01115   /// atomic_size_t
01116   typedef atomic<size_t>                atomic_size_t;
01117 
01118   /// atomic_ptrdiff_t
01119   typedef atomic<ptrdiff_t>             atomic_ptrdiff_t;
01120 
01121 #ifdef _GLIBCXX_USE_C99_STDINT_TR1
01122   /// atomic_intmax_t
01123   typedef atomic<intmax_t>              atomic_intmax_t;
01124 
01125   /// atomic_uintmax_t
01126   typedef atomic<uintmax_t>             atomic_uintmax_t;
01127 #endif
01128 
01129   // Function definitions, atomic_flag operations.
01130   inline bool
01131   atomic_flag_test_and_set_explicit(atomic_flag* __a,
01132                                     memory_order __m) noexcept
01133   { return __a->test_and_set(__m); }
01134 
01135   inline bool
01136   atomic_flag_test_and_set_explicit(volatile atomic_flag* __a,
01137                                     memory_order __m) noexcept
01138   { return __a->test_and_set(__m); }
01139 
01140   inline void
01141   atomic_flag_clear_explicit(atomic_flag* __a, memory_order __m) noexcept
01142   { __a->clear(__m); }
01143 
01144   inline void
01145   atomic_flag_clear_explicit(volatile atomic_flag* __a,
01146                              memory_order __m) noexcept
01147   { __a->clear(__m); }
01148 
01149   inline bool
01150   atomic_flag_test_and_set(atomic_flag* __a) noexcept
01151   { return atomic_flag_test_and_set_explicit(__a, memory_order_seq_cst); }
01152 
01153   inline bool
01154   atomic_flag_test_and_set(volatile atomic_flag* __a) noexcept
01155   { return atomic_flag_test_and_set_explicit(__a, memory_order_seq_cst); }
01156 
01157   inline void
01158   atomic_flag_clear(atomic_flag* __a) noexcept
01159   { atomic_flag_clear_explicit(__a, memory_order_seq_cst); }
01160 
01161   inline void
01162   atomic_flag_clear(volatile atomic_flag* __a) noexcept
01163   { atomic_flag_clear_explicit(__a, memory_order_seq_cst); }
01164 
01165 
01166   template<typename _Tp>
01167     using __atomic_val_t = typename atomic<_Tp>::value_type;
01168   template<typename _Tp>
01169     using __atomic_diff_t = typename atomic<_Tp>::difference_type;
01170 
01171   // [atomics.nonmembers] Non-member functions.
01172   // Function templates generally applicable to atomic types.
01173   template<typename _ITp>
01174     inline bool
01175     atomic_is_lock_free(const atomic<_ITp>* __a) noexcept
01176     { return __a->is_lock_free(); }
01177 
01178   template<typename _ITp>
01179     inline bool
01180     atomic_is_lock_free(const volatile atomic<_ITp>* __a) noexcept
01181     { return __a->is_lock_free(); }
01182 
01183   template<typename _ITp>
01184     inline void
01185     atomic_init(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i) noexcept
01186     { __a->store(__i, memory_order_relaxed); }
01187 
01188   template<typename _ITp>
01189     inline void
01190     atomic_init(volatile atomic<_ITp>* __a, __atomic_val_t<_ITp> __i) noexcept
01191     { __a->store(__i, memory_order_relaxed); }
01192 
01193   template<typename _ITp>
01194     inline void
01195     atomic_store_explicit(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i,
01196                           memory_order __m) noexcept
01197     { __a->store(__i, __m); }
01198 
01199   template<typename _ITp>
01200     inline void
01201     atomic_store_explicit(volatile atomic<_ITp>* __a, __atomic_val_t<_ITp> __i,
01202                           memory_order __m) noexcept
01203     { __a->store(__i, __m); }
01204 
01205   template<typename _ITp>
01206     inline _ITp
01207     atomic_load_explicit(const atomic<_ITp>* __a, memory_order __m) noexcept
01208     { return __a->load(__m); }
01209 
01210   template<typename _ITp>
01211     inline _ITp
01212     atomic_load_explicit(const volatile atomic<_ITp>* __a,
01213                          memory_order __m) noexcept
01214     { return __a->load(__m); }
01215 
01216   template<typename _ITp>
01217     inline _ITp
01218     atomic_exchange_explicit(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i,
01219                              memory_order __m) noexcept
01220     { return __a->exchange(__i, __m); }
01221 
01222   template<typename _ITp>
01223     inline _ITp
01224     atomic_exchange_explicit(volatile atomic<_ITp>* __a,
01225                              __atomic_val_t<_ITp> __i,
01226                              memory_order __m) noexcept
01227     { return __a->exchange(__i, __m); }
01228 
01229   template<typename _ITp>
01230     inline bool
01231     atomic_compare_exchange_weak_explicit(atomic<_ITp>* __a,
01232                                           __atomic_val_t<_ITp>* __i1,
01233                                           __atomic_val_t<_ITp> __i2,
01234                                           memory_order __m1,
01235                                           memory_order __m2) noexcept
01236     { return __a->compare_exchange_weak(*__i1, __i2, __m1, __m2); }
01237 
01238   template<typename _ITp>
01239     inline bool
01240     atomic_compare_exchange_weak_explicit(volatile atomic<_ITp>* __a,
01241                                           __atomic_val_t<_ITp>* __i1,
01242                                           __atomic_val_t<_ITp> __i2,
01243                                           memory_order __m1,
01244                                           memory_order __m2) noexcept
01245     { return __a->compare_exchange_weak(*__i1, __i2, __m1, __m2); }
01246 
01247   template<typename _ITp>
01248     inline bool
01249     atomic_compare_exchange_strong_explicit(atomic<_ITp>* __a,
01250                                             __atomic_val_t<_ITp>* __i1,
01251                                             __atomic_val_t<_ITp> __i2,
01252                                             memory_order __m1,
01253                                             memory_order __m2) noexcept
01254     { return __a->compare_exchange_strong(*__i1, __i2, __m1, __m2); }
01255 
01256   template<typename _ITp>
01257     inline bool
01258     atomic_compare_exchange_strong_explicit(volatile atomic<_ITp>* __a,
01259                                             __atomic_val_t<_ITp>* __i1,
01260                                             __atomic_val_t<_ITp> __i2,
01261                                             memory_order __m1,
01262                                             memory_order __m2) noexcept
01263     { return __a->compare_exchange_strong(*__i1, __i2, __m1, __m2); }
01264 
01265 
01266   template<typename _ITp>
01267     inline void
01268     atomic_store(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i) noexcept
01269     { atomic_store_explicit(__a, __i, memory_order_seq_cst); }
01270 
01271   template<typename _ITp>
01272     inline void
01273     atomic_store(volatile atomic<_ITp>* __a, __atomic_val_t<_ITp> __i) noexcept
01274     { atomic_store_explicit(__a, __i, memory_order_seq_cst); }
01275 
01276   template<typename _ITp>
01277     inline _ITp
01278     atomic_load(const atomic<_ITp>* __a) noexcept
01279     { return atomic_load_explicit(__a, memory_order_seq_cst); }
01280 
01281   template<typename _ITp>
01282     inline _ITp
01283     atomic_load(const volatile atomic<_ITp>* __a) noexcept
01284     { return atomic_load_explicit(__a, memory_order_seq_cst); }
01285 
01286   template<typename _ITp>
01287     inline _ITp
01288     atomic_exchange(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i) noexcept
01289     { return atomic_exchange_explicit(__a, __i, memory_order_seq_cst); }
01290 
01291   template<typename _ITp>
01292     inline _ITp
01293     atomic_exchange(volatile atomic<_ITp>* __a,
01294                     __atomic_val_t<_ITp> __i) noexcept
01295     { return atomic_exchange_explicit(__a, __i, memory_order_seq_cst); }
01296 
01297   template<typename _ITp>
01298     inline bool
01299     atomic_compare_exchange_weak(atomic<_ITp>* __a,
01300                                  __atomic_val_t<_ITp>* __i1,
01301                                  __atomic_val_t<_ITp> __i2) noexcept
01302     {
01303       return atomic_compare_exchange_weak_explicit(__a, __i1, __i2,
01304                                                    memory_order_seq_cst,
01305                                                    memory_order_seq_cst);
01306     }
01307 
01308   template<typename _ITp>
01309     inline bool
01310     atomic_compare_exchange_weak(volatile atomic<_ITp>* __a,
01311                                  __atomic_val_t<_ITp>* __i1,
01312                                  __atomic_val_t<_ITp> __i2) noexcept
01313     {
01314       return atomic_compare_exchange_weak_explicit(__a, __i1, __i2,
01315                                                    memory_order_seq_cst,
01316                                                    memory_order_seq_cst);
01317     }
01318 
01319   template<typename _ITp>
01320     inline bool
01321     atomic_compare_exchange_strong(atomic<_ITp>* __a,
01322                                    __atomic_val_t<_ITp>* __i1,
01323                                    __atomic_val_t<_ITp> __i2) noexcept
01324     {
01325       return atomic_compare_exchange_strong_explicit(__a, __i1, __i2,
01326                                                      memory_order_seq_cst,
01327                                                      memory_order_seq_cst);
01328     }
01329 
01330   template<typename _ITp>
01331     inline bool
01332     atomic_compare_exchange_strong(volatile atomic<_ITp>* __a,
01333                                    __atomic_val_t<_ITp>* __i1,
01334                                    __atomic_val_t<_ITp> __i2) noexcept
01335     {
01336       return atomic_compare_exchange_strong_explicit(__a, __i1, __i2,
01337                                                      memory_order_seq_cst,
01338                                                      memory_order_seq_cst);
01339     }
01340 
01341   // Function templates for atomic_integral and atomic_pointer operations only.
01342   // Some operations (and, or, xor) are only available for atomic integrals,
01343   // which is implemented by taking a parameter of type __atomic_base<_ITp>*.
01344 
01345   template<typename _ITp>
01346     inline _ITp
01347     atomic_fetch_add_explicit(atomic<_ITp>* __a,
01348                               __atomic_diff_t<_ITp> __i,
01349                               memory_order __m) noexcept
01350     { return __a->fetch_add(__i, __m); }
01351 
01352   template<typename _ITp>
01353     inline _ITp
01354     atomic_fetch_add_explicit(volatile atomic<_ITp>* __a,
01355                               __atomic_diff_t<_ITp> __i,
01356                               memory_order __m) noexcept
01357     { return __a->fetch_add(__i, __m); }
01358 
01359   template<typename _ITp>
01360     inline _ITp
01361     atomic_fetch_sub_explicit(atomic<_ITp>* __a,
01362                               __atomic_diff_t<_ITp> __i,
01363                               memory_order __m) noexcept
01364     { return __a->fetch_sub(__i, __m); }
01365 
01366   template<typename _ITp>
01367     inline _ITp
01368     atomic_fetch_sub_explicit(volatile atomic<_ITp>* __a,
01369                               __atomic_diff_t<_ITp> __i,
01370                               memory_order __m) noexcept
01371     { return __a->fetch_sub(__i, __m); }
01372 
01373   template<typename _ITp>
01374     inline _ITp
01375     atomic_fetch_and_explicit(__atomic_base<_ITp>* __a,
01376                               __atomic_val_t<_ITp> __i,
01377                               memory_order __m) noexcept
01378     { return __a->fetch_and(__i, __m); }
01379 
01380   template<typename _ITp>
01381     inline _ITp
01382     atomic_fetch_and_explicit(volatile __atomic_base<_ITp>* __a,
01383                               __atomic_val_t<_ITp> __i,
01384                               memory_order __m) noexcept
01385     { return __a->fetch_and(__i, __m); }
01386 
01387   template<typename _ITp>
01388     inline _ITp
01389     atomic_fetch_or_explicit(__atomic_base<_ITp>* __a,
01390                              __atomic_val_t<_ITp> __i,
01391                              memory_order __m) noexcept
01392     { return __a->fetch_or(__i, __m); }
01393 
01394   template<typename _ITp>
01395     inline _ITp
01396     atomic_fetch_or_explicit(volatile __atomic_base<_ITp>* __a,
01397                              __atomic_val_t<_ITp> __i,
01398                              memory_order __m) noexcept
01399     { return __a->fetch_or(__i, __m); }
01400 
01401   template<typename _ITp>
01402     inline _ITp
01403     atomic_fetch_xor_explicit(__atomic_base<_ITp>* __a,
01404                               __atomic_val_t<_ITp> __i,
01405                               memory_order __m) noexcept
01406     { return __a->fetch_xor(__i, __m); }
01407 
01408   template<typename _ITp>
01409     inline _ITp
01410     atomic_fetch_xor_explicit(volatile __atomic_base<_ITp>* __a,
01411                               __atomic_val_t<_ITp> __i,
01412                               memory_order __m) noexcept
01413     { return __a->fetch_xor(__i, __m); }
01414 
01415   template<typename _ITp>
01416     inline _ITp
01417     atomic_fetch_add(atomic<_ITp>* __a,
01418                      __atomic_diff_t<_ITp> __i) noexcept
01419     { return atomic_fetch_add_explicit(__a, __i, memory_order_seq_cst); }
01420 
01421   template<typename _ITp>
01422     inline _ITp
01423     atomic_fetch_add(volatile atomic<_ITp>* __a,
01424                      __atomic_diff_t<_ITp> __i) noexcept
01425     { return atomic_fetch_add_explicit(__a, __i, memory_order_seq_cst); }
01426 
01427   template<typename _ITp>
01428     inline _ITp
01429     atomic_fetch_sub(atomic<_ITp>* __a,
01430                      __atomic_diff_t<_ITp> __i) noexcept
01431     { return atomic_fetch_sub_explicit(__a, __i, memory_order_seq_cst); }
01432 
01433   template<typename _ITp>
01434     inline _ITp
01435     atomic_fetch_sub(volatile atomic<_ITp>* __a,
01436                      __atomic_diff_t<_ITp> __i) noexcept
01437     { return atomic_fetch_sub_explicit(__a, __i, memory_order_seq_cst); }
01438 
01439   template<typename _ITp>
01440     inline _ITp
01441     atomic_fetch_and(__atomic_base<_ITp>* __a,
01442                      __atomic_val_t<_ITp> __i) noexcept
01443     { return atomic_fetch_and_explicit(__a, __i, memory_order_seq_cst); }
01444 
01445   template<typename _ITp>
01446     inline _ITp
01447     atomic_fetch_and(volatile __atomic_base<_ITp>* __a,
01448                      __atomic_val_t<_ITp> __i) noexcept
01449     { return atomic_fetch_and_explicit(__a, __i, memory_order_seq_cst); }
01450 
01451   template<typename _ITp>
01452     inline _ITp
01453     atomic_fetch_or(__atomic_base<_ITp>* __a,
01454                     __atomic_val_t<_ITp> __i) noexcept
01455     { return atomic_fetch_or_explicit(__a, __i, memory_order_seq_cst); }
01456 
01457   template<typename _ITp>
01458     inline _ITp
01459     atomic_fetch_or(volatile __atomic_base<_ITp>* __a,
01460                     __atomic_val_t<_ITp> __i) noexcept
01461     { return atomic_fetch_or_explicit(__a, __i, memory_order_seq_cst); }
01462 
01463   template<typename _ITp>
01464     inline _ITp
01465     atomic_fetch_xor(__atomic_base<_ITp>* __a,
01466                      __atomic_val_t<_ITp> __i) noexcept
01467     { return atomic_fetch_xor_explicit(__a, __i, memory_order_seq_cst); }
01468 
01469   template<typename _ITp>
01470     inline _ITp
01471     atomic_fetch_xor(volatile __atomic_base<_ITp>* __a,
01472                      __atomic_val_t<_ITp> __i) noexcept
01473     { return atomic_fetch_xor_explicit(__a, __i, memory_order_seq_cst); }
01474 
01475   // @} group atomics
01476 
01477 _GLIBCXX_END_NAMESPACE_VERSION
01478 } // namespace
01479 
01480 #endif // C++11
01481 
01482 #endif // _GLIBCXX_ATOMIC