libstdc++
|
00001 // -*- C++ -*- header. 00002 00003 // Copyright (C) 2008-2019 Free Software Foundation, Inc. 00004 // 00005 // This file is part of the GNU ISO C++ Library. This library is free 00006 // software; you can redistribute it and/or modify it under the 00007 // terms of the GNU General Public License as published by the 00008 // Free Software Foundation; either version 3, or (at your option) 00009 // any later version. 00010 00011 // This library is distributed in the hope that it will be useful, 00012 // but WITHOUT ANY WARRANTY; without even the implied warranty of 00013 // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 00014 // GNU General Public License for more details. 00015 00016 // Under Section 7 of GPL version 3, you are granted additional 00017 // permissions described in the GCC Runtime Library Exception, version 00018 // 3.1, as published by the Free Software Foundation. 00019 00020 // You should have received a copy of the GNU General Public License and 00021 // a copy of the GCC Runtime Library Exception along with this program; 00022 // see the files COPYING3 and COPYING.RUNTIME respectively. If not, see 00023 // <http://www.gnu.org/licenses/>. 00024 00025 /** @file bits/atomic_base.h 00026 * This is an internal header file, included by other library headers. 00027 * Do not attempt to use it directly. @headername{atomic} 00028 */ 00029 00030 #ifndef _GLIBCXX_ATOMIC_BASE_H 00031 #define _GLIBCXX_ATOMIC_BASE_H 1 00032 00033 #pragma GCC system_header 00034 00035 #include <bits/c++config.h> 00036 #include <stdint.h> 00037 #include <bits/atomic_lockfree_defines.h> 00038 00039 #ifndef _GLIBCXX_ALWAYS_INLINE 00040 #define _GLIBCXX_ALWAYS_INLINE inline __attribute__((__always_inline__)) 00041 #endif 00042 00043 namespace std _GLIBCXX_VISIBILITY(default) 00044 { 00045 _GLIBCXX_BEGIN_NAMESPACE_VERSION 00046 00047 /** 00048 * @defgroup atomics Atomics 00049 * 00050 * Components for performing atomic operations. 00051 * @{ 00052 */ 00053 00054 /// Enumeration for memory_order 00055 #if __cplusplus > 201703L 00056 enum class memory_order : int 00057 { 00058 relaxed, 00059 consume, 00060 acquire, 00061 release, 00062 acq_rel, 00063 seq_cst 00064 }; 00065 00066 inline constexpr memory_order memory_order_relaxed = memory_order::relaxed; 00067 inline constexpr memory_order memory_order_consume = memory_order::consume; 00068 inline constexpr memory_order memory_order_acquire = memory_order::acquire; 00069 inline constexpr memory_order memory_order_release = memory_order::release; 00070 inline constexpr memory_order memory_order_acq_rel = memory_order::acq_rel; 00071 inline constexpr memory_order memory_order_seq_cst = memory_order::seq_cst; 00072 #else 00073 typedef enum memory_order 00074 { 00075 memory_order_relaxed, 00076 memory_order_consume, 00077 memory_order_acquire, 00078 memory_order_release, 00079 memory_order_acq_rel, 00080 memory_order_seq_cst 00081 } memory_order; 00082 #endif 00083 00084 enum __memory_order_modifier 00085 { 00086 __memory_order_mask = 0x0ffff, 00087 __memory_order_modifier_mask = 0xffff0000, 00088 __memory_order_hle_acquire = 0x10000, 00089 __memory_order_hle_release = 0x20000 00090 }; 00091 00092 constexpr memory_order 00093 operator|(memory_order __m, __memory_order_modifier __mod) 00094 { 00095 return memory_order(int(__m) | int(__mod)); 00096 } 00097 00098 constexpr memory_order 00099 operator&(memory_order __m, __memory_order_modifier __mod) 00100 { 00101 return memory_order(int(__m) & int(__mod)); 00102 } 00103 00104 // Drop release ordering as per [atomics.types.operations.req]/21 00105 constexpr memory_order 00106 __cmpexch_failure_order2(memory_order __m) noexcept 00107 { 00108 return __m == memory_order_acq_rel ? memory_order_acquire 00109 : __m == memory_order_release ? memory_order_relaxed : __m; 00110 } 00111 00112 constexpr memory_order 00113 __cmpexch_failure_order(memory_order __m) noexcept 00114 { 00115 return memory_order(__cmpexch_failure_order2(__m & __memory_order_mask) 00116 | __memory_order_modifier(__m & __memory_order_modifier_mask)); 00117 } 00118 00119 _GLIBCXX_ALWAYS_INLINE void 00120 atomic_thread_fence(memory_order __m) noexcept 00121 { __atomic_thread_fence(int(__m)); } 00122 00123 _GLIBCXX_ALWAYS_INLINE void 00124 atomic_signal_fence(memory_order __m) noexcept 00125 { __atomic_signal_fence(int(__m)); } 00126 00127 /// kill_dependency 00128 template<typename _Tp> 00129 inline _Tp 00130 kill_dependency(_Tp __y) noexcept 00131 { 00132 _Tp __ret(__y); 00133 return __ret; 00134 } 00135 00136 00137 // Base types for atomics. 00138 template<typename _IntTp> 00139 struct __atomic_base; 00140 00141 00142 #define ATOMIC_VAR_INIT(_VI) { _VI } 00143 00144 template<typename _Tp> 00145 struct atomic; 00146 00147 template<typename _Tp> 00148 struct atomic<_Tp*>; 00149 00150 /* The target's "set" value for test-and-set may not be exactly 1. */ 00151 #if __GCC_ATOMIC_TEST_AND_SET_TRUEVAL == 1 00152 typedef bool __atomic_flag_data_type; 00153 #else 00154 typedef unsigned char __atomic_flag_data_type; 00155 #endif 00156 00157 /** 00158 * @brief Base type for atomic_flag. 00159 * 00160 * Base type is POD with data, allowing atomic_flag to derive from 00161 * it and meet the standard layout type requirement. In addition to 00162 * compatibility with a C interface, this allows different 00163 * implementations of atomic_flag to use the same atomic operation 00164 * functions, via a standard conversion to the __atomic_flag_base 00165 * argument. 00166 */ 00167 _GLIBCXX_BEGIN_EXTERN_C 00168 00169 struct __atomic_flag_base 00170 { 00171 __atomic_flag_data_type _M_i; 00172 }; 00173 00174 _GLIBCXX_END_EXTERN_C 00175 00176 #define ATOMIC_FLAG_INIT { 0 } 00177 00178 /// atomic_flag 00179 struct atomic_flag : public __atomic_flag_base 00180 { 00181 atomic_flag() noexcept = default; 00182 ~atomic_flag() noexcept = default; 00183 atomic_flag(const atomic_flag&) = delete; 00184 atomic_flag& operator=(const atomic_flag&) = delete; 00185 atomic_flag& operator=(const atomic_flag&) volatile = delete; 00186 00187 // Conversion to ATOMIC_FLAG_INIT. 00188 constexpr atomic_flag(bool __i) noexcept 00189 : __atomic_flag_base{ _S_init(__i) } 00190 { } 00191 00192 _GLIBCXX_ALWAYS_INLINE bool 00193 test_and_set(memory_order __m = memory_order_seq_cst) noexcept 00194 { 00195 return __atomic_test_and_set (&_M_i, int(__m)); 00196 } 00197 00198 _GLIBCXX_ALWAYS_INLINE bool 00199 test_and_set(memory_order __m = memory_order_seq_cst) volatile noexcept 00200 { 00201 return __atomic_test_and_set (&_M_i, int(__m)); 00202 } 00203 00204 _GLIBCXX_ALWAYS_INLINE void 00205 clear(memory_order __m = memory_order_seq_cst) noexcept 00206 { 00207 memory_order __b = __m & __memory_order_mask; 00208 __glibcxx_assert(__b != memory_order_consume); 00209 __glibcxx_assert(__b != memory_order_acquire); 00210 __glibcxx_assert(__b != memory_order_acq_rel); 00211 00212 __atomic_clear (&_M_i, int(__m)); 00213 } 00214 00215 _GLIBCXX_ALWAYS_INLINE void 00216 clear(memory_order __m = memory_order_seq_cst) volatile noexcept 00217 { 00218 memory_order __b = __m & __memory_order_mask; 00219 __glibcxx_assert(__b != memory_order_consume); 00220 __glibcxx_assert(__b != memory_order_acquire); 00221 __glibcxx_assert(__b != memory_order_acq_rel); 00222 00223 __atomic_clear (&_M_i, int(__m)); 00224 } 00225 00226 private: 00227 static constexpr __atomic_flag_data_type 00228 _S_init(bool __i) 00229 { return __i ? __GCC_ATOMIC_TEST_AND_SET_TRUEVAL : 0; } 00230 }; 00231 00232 00233 /// Base class for atomic integrals. 00234 // 00235 // For each of the integral types, define atomic_[integral type] struct 00236 // 00237 // atomic_bool bool 00238 // atomic_char char 00239 // atomic_schar signed char 00240 // atomic_uchar unsigned char 00241 // atomic_short short 00242 // atomic_ushort unsigned short 00243 // atomic_int int 00244 // atomic_uint unsigned int 00245 // atomic_long long 00246 // atomic_ulong unsigned long 00247 // atomic_llong long long 00248 // atomic_ullong unsigned long long 00249 // atomic_char8_t char8_t 00250 // atomic_char16_t char16_t 00251 // atomic_char32_t char32_t 00252 // atomic_wchar_t wchar_t 00253 // 00254 // NB: Assuming _ITp is an integral scalar type that is 1, 2, 4, or 00255 // 8 bytes, since that is what GCC built-in functions for atomic 00256 // memory access expect. 00257 template<typename _ITp> 00258 struct __atomic_base 00259 { 00260 using value_type = _ITp; 00261 using difference_type = value_type; 00262 00263 private: 00264 typedef _ITp __int_type; 00265 00266 static constexpr int _S_alignment = 00267 sizeof(_ITp) > alignof(_ITp) ? sizeof(_ITp) : alignof(_ITp); 00268 00269 alignas(_S_alignment) __int_type _M_i; 00270 00271 public: 00272 __atomic_base() noexcept = default; 00273 ~__atomic_base() noexcept = default; 00274 __atomic_base(const __atomic_base&) = delete; 00275 __atomic_base& operator=(const __atomic_base&) = delete; 00276 __atomic_base& operator=(const __atomic_base&) volatile = delete; 00277 00278 // Requires __int_type convertible to _M_i. 00279 constexpr __atomic_base(__int_type __i) noexcept : _M_i (__i) { } 00280 00281 operator __int_type() const noexcept 00282 { return load(); } 00283 00284 operator __int_type() const volatile noexcept 00285 { return load(); } 00286 00287 __int_type 00288 operator=(__int_type __i) noexcept 00289 { 00290 store(__i); 00291 return __i; 00292 } 00293 00294 __int_type 00295 operator=(__int_type __i) volatile noexcept 00296 { 00297 store(__i); 00298 return __i; 00299 } 00300 00301 __int_type 00302 operator++(int) noexcept 00303 { return fetch_add(1); } 00304 00305 __int_type 00306 operator++(int) volatile noexcept 00307 { return fetch_add(1); } 00308 00309 __int_type 00310 operator--(int) noexcept 00311 { return fetch_sub(1); } 00312 00313 __int_type 00314 operator--(int) volatile noexcept 00315 { return fetch_sub(1); } 00316 00317 __int_type 00318 operator++() noexcept 00319 { return __atomic_add_fetch(&_M_i, 1, int(memory_order_seq_cst)); } 00320 00321 __int_type 00322 operator++() volatile noexcept 00323 { return __atomic_add_fetch(&_M_i, 1, int(memory_order_seq_cst)); } 00324 00325 __int_type 00326 operator--() noexcept 00327 { return __atomic_sub_fetch(&_M_i, 1, int(memory_order_seq_cst)); } 00328 00329 __int_type 00330 operator--() volatile noexcept 00331 { return __atomic_sub_fetch(&_M_i, 1, int(memory_order_seq_cst)); } 00332 00333 __int_type 00334 operator+=(__int_type __i) noexcept 00335 { return __atomic_add_fetch(&_M_i, __i, int(memory_order_seq_cst)); } 00336 00337 __int_type 00338 operator+=(__int_type __i) volatile noexcept 00339 { return __atomic_add_fetch(&_M_i, __i, int(memory_order_seq_cst)); } 00340 00341 __int_type 00342 operator-=(__int_type __i) noexcept 00343 { return __atomic_sub_fetch(&_M_i, __i, int(memory_order_seq_cst)); } 00344 00345 __int_type 00346 operator-=(__int_type __i) volatile noexcept 00347 { return __atomic_sub_fetch(&_M_i, __i, int(memory_order_seq_cst)); } 00348 00349 __int_type 00350 operator&=(__int_type __i) noexcept 00351 { return __atomic_and_fetch(&_M_i, __i, int(memory_order_seq_cst)); } 00352 00353 __int_type 00354 operator&=(__int_type __i) volatile noexcept 00355 { return __atomic_and_fetch(&_M_i, __i, int(memory_order_seq_cst)); } 00356 00357 __int_type 00358 operator|=(__int_type __i) noexcept 00359 { return __atomic_or_fetch(&_M_i, __i, int(memory_order_seq_cst)); } 00360 00361 __int_type 00362 operator|=(__int_type __i) volatile noexcept 00363 { return __atomic_or_fetch(&_M_i, __i, int(memory_order_seq_cst)); } 00364 00365 __int_type 00366 operator^=(__int_type __i) noexcept 00367 { return __atomic_xor_fetch(&_M_i, __i, int(memory_order_seq_cst)); } 00368 00369 __int_type 00370 operator^=(__int_type __i) volatile noexcept 00371 { return __atomic_xor_fetch(&_M_i, __i, int(memory_order_seq_cst)); } 00372 00373 bool 00374 is_lock_free() const noexcept 00375 { 00376 // Use a fake, minimally aligned pointer. 00377 return __atomic_is_lock_free(sizeof(_M_i), 00378 reinterpret_cast<void *>(-_S_alignment)); 00379 } 00380 00381 bool 00382 is_lock_free() const volatile noexcept 00383 { 00384 // Use a fake, minimally aligned pointer. 00385 return __atomic_is_lock_free(sizeof(_M_i), 00386 reinterpret_cast<void *>(-_S_alignment)); 00387 } 00388 00389 _GLIBCXX_ALWAYS_INLINE void 00390 store(__int_type __i, memory_order __m = memory_order_seq_cst) noexcept 00391 { 00392 memory_order __b = __m & __memory_order_mask; 00393 __glibcxx_assert(__b != memory_order_acquire); 00394 __glibcxx_assert(__b != memory_order_acq_rel); 00395 __glibcxx_assert(__b != memory_order_consume); 00396 00397 __atomic_store_n(&_M_i, __i, int(__m)); 00398 } 00399 00400 _GLIBCXX_ALWAYS_INLINE void 00401 store(__int_type __i, 00402 memory_order __m = memory_order_seq_cst) volatile noexcept 00403 { 00404 memory_order __b = __m & __memory_order_mask; 00405 __glibcxx_assert(__b != memory_order_acquire); 00406 __glibcxx_assert(__b != memory_order_acq_rel); 00407 __glibcxx_assert(__b != memory_order_consume); 00408 00409 __atomic_store_n(&_M_i, __i, int(__m)); 00410 } 00411 00412 _GLIBCXX_ALWAYS_INLINE __int_type 00413 load(memory_order __m = memory_order_seq_cst) const noexcept 00414 { 00415 memory_order __b = __m & __memory_order_mask; 00416 __glibcxx_assert(__b != memory_order_release); 00417 __glibcxx_assert(__b != memory_order_acq_rel); 00418 00419 return __atomic_load_n(&_M_i, int(__m)); 00420 } 00421 00422 _GLIBCXX_ALWAYS_INLINE __int_type 00423 load(memory_order __m = memory_order_seq_cst) const volatile noexcept 00424 { 00425 memory_order __b = __m & __memory_order_mask; 00426 __glibcxx_assert(__b != memory_order_release); 00427 __glibcxx_assert(__b != memory_order_acq_rel); 00428 00429 return __atomic_load_n(&_M_i, int(__m)); 00430 } 00431 00432 _GLIBCXX_ALWAYS_INLINE __int_type 00433 exchange(__int_type __i, 00434 memory_order __m = memory_order_seq_cst) noexcept 00435 { 00436 return __atomic_exchange_n(&_M_i, __i, int(__m)); 00437 } 00438 00439 00440 _GLIBCXX_ALWAYS_INLINE __int_type 00441 exchange(__int_type __i, 00442 memory_order __m = memory_order_seq_cst) volatile noexcept 00443 { 00444 return __atomic_exchange_n(&_M_i, __i, int(__m)); 00445 } 00446 00447 _GLIBCXX_ALWAYS_INLINE bool 00448 compare_exchange_weak(__int_type& __i1, __int_type __i2, 00449 memory_order __m1, memory_order __m2) noexcept 00450 { 00451 memory_order __b2 = __m2 & __memory_order_mask; 00452 memory_order __b1 = __m1 & __memory_order_mask; 00453 __glibcxx_assert(__b2 != memory_order_release); 00454 __glibcxx_assert(__b2 != memory_order_acq_rel); 00455 __glibcxx_assert(__b2 <= __b1); 00456 00457 return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 1, 00458 int(__m1), int(__m2)); 00459 } 00460 00461 _GLIBCXX_ALWAYS_INLINE bool 00462 compare_exchange_weak(__int_type& __i1, __int_type __i2, 00463 memory_order __m1, 00464 memory_order __m2) volatile noexcept 00465 { 00466 memory_order __b2 = __m2 & __memory_order_mask; 00467 memory_order __b1 = __m1 & __memory_order_mask; 00468 __glibcxx_assert(__b2 != memory_order_release); 00469 __glibcxx_assert(__b2 != memory_order_acq_rel); 00470 __glibcxx_assert(__b2 <= __b1); 00471 00472 return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 1, 00473 int(__m1), int(__m2)); 00474 } 00475 00476 _GLIBCXX_ALWAYS_INLINE bool 00477 compare_exchange_weak(__int_type& __i1, __int_type __i2, 00478 memory_order __m = memory_order_seq_cst) noexcept 00479 { 00480 return compare_exchange_weak(__i1, __i2, __m, 00481 __cmpexch_failure_order(__m)); 00482 } 00483 00484 _GLIBCXX_ALWAYS_INLINE bool 00485 compare_exchange_weak(__int_type& __i1, __int_type __i2, 00486 memory_order __m = memory_order_seq_cst) volatile noexcept 00487 { 00488 return compare_exchange_weak(__i1, __i2, __m, 00489 __cmpexch_failure_order(__m)); 00490 } 00491 00492 _GLIBCXX_ALWAYS_INLINE bool 00493 compare_exchange_strong(__int_type& __i1, __int_type __i2, 00494 memory_order __m1, memory_order __m2) noexcept 00495 { 00496 memory_order __b2 = __m2 & __memory_order_mask; 00497 memory_order __b1 = __m1 & __memory_order_mask; 00498 __glibcxx_assert(__b2 != memory_order_release); 00499 __glibcxx_assert(__b2 != memory_order_acq_rel); 00500 __glibcxx_assert(__b2 <= __b1); 00501 00502 return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 0, 00503 int(__m1), int(__m2)); 00504 } 00505 00506 _GLIBCXX_ALWAYS_INLINE bool 00507 compare_exchange_strong(__int_type& __i1, __int_type __i2, 00508 memory_order __m1, 00509 memory_order __m2) volatile noexcept 00510 { 00511 memory_order __b2 = __m2 & __memory_order_mask; 00512 memory_order __b1 = __m1 & __memory_order_mask; 00513 00514 __glibcxx_assert(__b2 != memory_order_release); 00515 __glibcxx_assert(__b2 != memory_order_acq_rel); 00516 __glibcxx_assert(__b2 <= __b1); 00517 00518 return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 0, 00519 int(__m1), int(__m2)); 00520 } 00521 00522 _GLIBCXX_ALWAYS_INLINE bool 00523 compare_exchange_strong(__int_type& __i1, __int_type __i2, 00524 memory_order __m = memory_order_seq_cst) noexcept 00525 { 00526 return compare_exchange_strong(__i1, __i2, __m, 00527 __cmpexch_failure_order(__m)); 00528 } 00529 00530 _GLIBCXX_ALWAYS_INLINE bool 00531 compare_exchange_strong(__int_type& __i1, __int_type __i2, 00532 memory_order __m = memory_order_seq_cst) volatile noexcept 00533 { 00534 return compare_exchange_strong(__i1, __i2, __m, 00535 __cmpexch_failure_order(__m)); 00536 } 00537 00538 _GLIBCXX_ALWAYS_INLINE __int_type 00539 fetch_add(__int_type __i, 00540 memory_order __m = memory_order_seq_cst) noexcept 00541 { return __atomic_fetch_add(&_M_i, __i, int(__m)); } 00542 00543 _GLIBCXX_ALWAYS_INLINE __int_type 00544 fetch_add(__int_type __i, 00545 memory_order __m = memory_order_seq_cst) volatile noexcept 00546 { return __atomic_fetch_add(&_M_i, __i, int(__m)); } 00547 00548 _GLIBCXX_ALWAYS_INLINE __int_type 00549 fetch_sub(__int_type __i, 00550 memory_order __m = memory_order_seq_cst) noexcept 00551 { return __atomic_fetch_sub(&_M_i, __i, int(__m)); } 00552 00553 _GLIBCXX_ALWAYS_INLINE __int_type 00554 fetch_sub(__int_type __i, 00555 memory_order __m = memory_order_seq_cst) volatile noexcept 00556 { return __atomic_fetch_sub(&_M_i, __i, int(__m)); } 00557 00558 _GLIBCXX_ALWAYS_INLINE __int_type 00559 fetch_and(__int_type __i, 00560 memory_order __m = memory_order_seq_cst) noexcept 00561 { return __atomic_fetch_and(&_M_i, __i, int(__m)); } 00562 00563 _GLIBCXX_ALWAYS_INLINE __int_type 00564 fetch_and(__int_type __i, 00565 memory_order __m = memory_order_seq_cst) volatile noexcept 00566 { return __atomic_fetch_and(&_M_i, __i, int(__m)); } 00567 00568 _GLIBCXX_ALWAYS_INLINE __int_type 00569 fetch_or(__int_type __i, 00570 memory_order __m = memory_order_seq_cst) noexcept 00571 { return __atomic_fetch_or(&_M_i, __i, int(__m)); } 00572 00573 _GLIBCXX_ALWAYS_INLINE __int_type 00574 fetch_or(__int_type __i, 00575 memory_order __m = memory_order_seq_cst) volatile noexcept 00576 { return __atomic_fetch_or(&_M_i, __i, int(__m)); } 00577 00578 _GLIBCXX_ALWAYS_INLINE __int_type 00579 fetch_xor(__int_type __i, 00580 memory_order __m = memory_order_seq_cst) noexcept 00581 { return __atomic_fetch_xor(&_M_i, __i, int(__m)); } 00582 00583 _GLIBCXX_ALWAYS_INLINE __int_type 00584 fetch_xor(__int_type __i, 00585 memory_order __m = memory_order_seq_cst) volatile noexcept 00586 { return __atomic_fetch_xor(&_M_i, __i, int(__m)); } 00587 }; 00588 00589 00590 /// Partial specialization for pointer types. 00591 template<typename _PTp> 00592 struct __atomic_base<_PTp*> 00593 { 00594 private: 00595 typedef _PTp* __pointer_type; 00596 00597 __pointer_type _M_p; 00598 00599 // Factored out to facilitate explicit specialization. 00600 constexpr ptrdiff_t 00601 _M_type_size(ptrdiff_t __d) const { return __d * sizeof(_PTp); } 00602 00603 constexpr ptrdiff_t 00604 _M_type_size(ptrdiff_t __d) const volatile { return __d * sizeof(_PTp); } 00605 00606 public: 00607 __atomic_base() noexcept = default; 00608 ~__atomic_base() noexcept = default; 00609 __atomic_base(const __atomic_base&) = delete; 00610 __atomic_base& operator=(const __atomic_base&) = delete; 00611 __atomic_base& operator=(const __atomic_base&) volatile = delete; 00612 00613 // Requires __pointer_type convertible to _M_p. 00614 constexpr __atomic_base(__pointer_type __p) noexcept : _M_p (__p) { } 00615 00616 operator __pointer_type() const noexcept 00617 { return load(); } 00618 00619 operator __pointer_type() const volatile noexcept 00620 { return load(); } 00621 00622 __pointer_type 00623 operator=(__pointer_type __p) noexcept 00624 { 00625 store(__p); 00626 return __p; 00627 } 00628 00629 __pointer_type 00630 operator=(__pointer_type __p) volatile noexcept 00631 { 00632 store(__p); 00633 return __p; 00634 } 00635 00636 __pointer_type 00637 operator++(int) noexcept 00638 { return fetch_add(1); } 00639 00640 __pointer_type 00641 operator++(int) volatile noexcept 00642 { return fetch_add(1); } 00643 00644 __pointer_type 00645 operator--(int) noexcept 00646 { return fetch_sub(1); } 00647 00648 __pointer_type 00649 operator--(int) volatile noexcept 00650 { return fetch_sub(1); } 00651 00652 __pointer_type 00653 operator++() noexcept 00654 { return __atomic_add_fetch(&_M_p, _M_type_size(1), 00655 int(memory_order_seq_cst)); } 00656 00657 __pointer_type 00658 operator++() volatile noexcept 00659 { return __atomic_add_fetch(&_M_p, _M_type_size(1), 00660 int(memory_order_seq_cst)); } 00661 00662 __pointer_type 00663 operator--() noexcept 00664 { return __atomic_sub_fetch(&_M_p, _M_type_size(1), 00665 int(memory_order_seq_cst)); } 00666 00667 __pointer_type 00668 operator--() volatile noexcept 00669 { return __atomic_sub_fetch(&_M_p, _M_type_size(1), 00670 int(memory_order_seq_cst)); } 00671 00672 __pointer_type 00673 operator+=(ptrdiff_t __d) noexcept 00674 { return __atomic_add_fetch(&_M_p, _M_type_size(__d), 00675 int(memory_order_seq_cst)); } 00676 00677 __pointer_type 00678 operator+=(ptrdiff_t __d) volatile noexcept 00679 { return __atomic_add_fetch(&_M_p, _M_type_size(__d), 00680 int(memory_order_seq_cst)); } 00681 00682 __pointer_type 00683 operator-=(ptrdiff_t __d) noexcept 00684 { return __atomic_sub_fetch(&_M_p, _M_type_size(__d), 00685 int(memory_order_seq_cst)); } 00686 00687 __pointer_type 00688 operator-=(ptrdiff_t __d) volatile noexcept 00689 { return __atomic_sub_fetch(&_M_p, _M_type_size(__d), 00690 int(memory_order_seq_cst)); } 00691 00692 bool 00693 is_lock_free() const noexcept 00694 { 00695 // Produce a fake, minimally aligned pointer. 00696 return __atomic_is_lock_free(sizeof(_M_p), 00697 reinterpret_cast<void *>(-__alignof(_M_p))); 00698 } 00699 00700 bool 00701 is_lock_free() const volatile noexcept 00702 { 00703 // Produce a fake, minimally aligned pointer. 00704 return __atomic_is_lock_free(sizeof(_M_p), 00705 reinterpret_cast<void *>(-__alignof(_M_p))); 00706 } 00707 00708 _GLIBCXX_ALWAYS_INLINE void 00709 store(__pointer_type __p, 00710 memory_order __m = memory_order_seq_cst) noexcept 00711 { 00712 memory_order __b = __m & __memory_order_mask; 00713 00714 __glibcxx_assert(__b != memory_order_acquire); 00715 __glibcxx_assert(__b != memory_order_acq_rel); 00716 __glibcxx_assert(__b != memory_order_consume); 00717 00718 __atomic_store_n(&_M_p, __p, int(__m)); 00719 } 00720 00721 _GLIBCXX_ALWAYS_INLINE void 00722 store(__pointer_type __p, 00723 memory_order __m = memory_order_seq_cst) volatile noexcept 00724 { 00725 memory_order __b = __m & __memory_order_mask; 00726 __glibcxx_assert(__b != memory_order_acquire); 00727 __glibcxx_assert(__b != memory_order_acq_rel); 00728 __glibcxx_assert(__b != memory_order_consume); 00729 00730 __atomic_store_n(&_M_p, __p, int(__m)); 00731 } 00732 00733 _GLIBCXX_ALWAYS_INLINE __pointer_type 00734 load(memory_order __m = memory_order_seq_cst) const noexcept 00735 { 00736 memory_order __b = __m & __memory_order_mask; 00737 __glibcxx_assert(__b != memory_order_release); 00738 __glibcxx_assert(__b != memory_order_acq_rel); 00739 00740 return __atomic_load_n(&_M_p, int(__m)); 00741 } 00742 00743 _GLIBCXX_ALWAYS_INLINE __pointer_type 00744 load(memory_order __m = memory_order_seq_cst) const volatile noexcept 00745 { 00746 memory_order __b = __m & __memory_order_mask; 00747 __glibcxx_assert(__b != memory_order_release); 00748 __glibcxx_assert(__b != memory_order_acq_rel); 00749 00750 return __atomic_load_n(&_M_p, int(__m)); 00751 } 00752 00753 _GLIBCXX_ALWAYS_INLINE __pointer_type 00754 exchange(__pointer_type __p, 00755 memory_order __m = memory_order_seq_cst) noexcept 00756 { 00757 return __atomic_exchange_n(&_M_p, __p, int(__m)); 00758 } 00759 00760 00761 _GLIBCXX_ALWAYS_INLINE __pointer_type 00762 exchange(__pointer_type __p, 00763 memory_order __m = memory_order_seq_cst) volatile noexcept 00764 { 00765 return __atomic_exchange_n(&_M_p, __p, int(__m)); 00766 } 00767 00768 _GLIBCXX_ALWAYS_INLINE bool 00769 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2, 00770 memory_order __m1, 00771 memory_order __m2) noexcept 00772 { 00773 memory_order __b2 = __m2 & __memory_order_mask; 00774 memory_order __b1 = __m1 & __memory_order_mask; 00775 __glibcxx_assert(__b2 != memory_order_release); 00776 __glibcxx_assert(__b2 != memory_order_acq_rel); 00777 __glibcxx_assert(__b2 <= __b1); 00778 00779 return __atomic_compare_exchange_n(&_M_p, &__p1, __p2, 0, 00780 int(__m1), int(__m2)); 00781 } 00782 00783 _GLIBCXX_ALWAYS_INLINE bool 00784 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2, 00785 memory_order __m1, 00786 memory_order __m2) volatile noexcept 00787 { 00788 memory_order __b2 = __m2 & __memory_order_mask; 00789 memory_order __b1 = __m1 & __memory_order_mask; 00790 00791 __glibcxx_assert(__b2 != memory_order_release); 00792 __glibcxx_assert(__b2 != memory_order_acq_rel); 00793 __glibcxx_assert(__b2 <= __b1); 00794 00795 return __atomic_compare_exchange_n(&_M_p, &__p1, __p2, 0, 00796 int(__m1), int(__m2)); 00797 } 00798 00799 _GLIBCXX_ALWAYS_INLINE __pointer_type 00800 fetch_add(ptrdiff_t __d, 00801 memory_order __m = memory_order_seq_cst) noexcept 00802 { return __atomic_fetch_add(&_M_p, _M_type_size(__d), int(__m)); } 00803 00804 _GLIBCXX_ALWAYS_INLINE __pointer_type 00805 fetch_add(ptrdiff_t __d, 00806 memory_order __m = memory_order_seq_cst) volatile noexcept 00807 { return __atomic_fetch_add(&_M_p, _M_type_size(__d), int(__m)); } 00808 00809 _GLIBCXX_ALWAYS_INLINE __pointer_type 00810 fetch_sub(ptrdiff_t __d, 00811 memory_order __m = memory_order_seq_cst) noexcept 00812 { return __atomic_fetch_sub(&_M_p, _M_type_size(__d), int(__m)); } 00813 00814 _GLIBCXX_ALWAYS_INLINE __pointer_type 00815 fetch_sub(ptrdiff_t __d, 00816 memory_order __m = memory_order_seq_cst) volatile noexcept 00817 { return __atomic_fetch_sub(&_M_p, _M_type_size(__d), int(__m)); } 00818 }; 00819 00820 // @} group atomics 00821 00822 _GLIBCXX_END_NAMESPACE_VERSION 00823 } // namespace std 00824 00825 #endif