3 // Copyright (C) 2008-2014 Free Software Foundation, Inc.
5 // This file is part of the GNU ISO C++ Library. This library is free
6 // software; you can redistribute it and/or modify it under the
7 // terms of the GNU General Public License as published by the
8 // Free Software Foundation; either version 3, or (at your option)
11 // This library is distributed in the hope that it will be useful,
12 // but WITHOUT ANY WARRANTY; without even the implied warranty of
13 // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 // GNU General Public License for more details.
16 // Under Section 7 of GPL version 3, you are granted additional
17 // permissions described in the GCC Runtime Library Exception, version
18 // 3.1, as published by the Free Software Foundation.
20 // You should have received a copy of the GNU General Public License and
21 // a copy of the GCC Runtime Library Exception along with this program;
22 // see the files COPYING3 and COPYING.RUNTIME respectively. If not, see
23 // <http://www.gnu.org/licenses/>.
25 /** @file bits/atomic_base.h
26 * This is an internal header file, included by other library headers.
27 * Do not attempt to use it directly. @headername{atomic}
30 #ifndef _GLIBCXX_ATOMIC_BASE_H
31 #define _GLIBCXX_ATOMIC_BASE_H 1
33 #pragma GCC system_header
35 #include <bits/c++config.h>
38 #include <bits/atomic_lockfree_defines.h>
40 #ifndef _GLIBCXX_ALWAYS_INLINE
41 #define _GLIBCXX_ALWAYS_INLINE inline __attribute__((always_inline))
44 namespace std _GLIBCXX_VISIBILITY(default)
46 _GLIBCXX_BEGIN_NAMESPACE_VERSION
49 * @defgroup atomics Atomics
51 * Components for performing atomic operations.
55 /// Enumeration for memory_order
56 typedef enum memory_order
66 enum __memory_order_modifier
68 __memory_order_mask = 0x0ffff,
69 __memory_order_modifier_mask = 0xffff0000,
70 __memory_order_hle_acquire = 0x10000,
71 __memory_order_hle_release = 0x20000
74 constexpr memory_order
75 operator|(memory_order __m, __memory_order_modifier __mod)
77 return memory_order(__m | int(__mod));
80 constexpr memory_order
81 operator&(memory_order __m, __memory_order_modifier __mod)
83 return memory_order(__m & int(__mod));
86 // Drop release ordering as per [atomics.types.operations.req]/21
87 constexpr memory_order
88 __cmpexch_failure_order2(memory_order __m) noexcept
90 return __m == memory_order_acq_rel ? memory_order_acquire
91 : __m == memory_order_release ? memory_order_relaxed : __m;
94 constexpr memory_order
95 __cmpexch_failure_order(memory_order __m) noexcept
97 return memory_order(__cmpexch_failure_order2(__m & __memory_order_mask)
98 | (__m & __memory_order_modifier_mask));
101 _GLIBCXX_ALWAYS_INLINE void
102 atomic_thread_fence(memory_order __m) noexcept
103 { __atomic_thread_fence(__m); }
105 _GLIBCXX_ALWAYS_INLINE void
106 atomic_signal_fence(memory_order __m) noexcept
107 { __atomic_signal_fence(__m); }
110 template<typename _Tp>
112 kill_dependency(_Tp __y) noexcept
119 // Base types for atomics.
120 template<typename _IntTp>
121 struct __atomic_base;
124 typedef __atomic_base<char> atomic_char;
127 typedef __atomic_base<signed char> atomic_schar;
130 typedef __atomic_base<unsigned char> atomic_uchar;
133 typedef __atomic_base<short> atomic_short;
136 typedef __atomic_base<unsigned short> atomic_ushort;
139 typedef __atomic_base<int> atomic_int;
142 typedef __atomic_base<unsigned int> atomic_uint;
145 typedef __atomic_base<long> atomic_long;
148 typedef __atomic_base<unsigned long> atomic_ulong;
151 typedef __atomic_base<long long> atomic_llong;
154 typedef __atomic_base<unsigned long long> atomic_ullong;
157 typedef __atomic_base<wchar_t> atomic_wchar_t;
160 typedef __atomic_base<char16_t> atomic_char16_t;
163 typedef __atomic_base<char32_t> atomic_char32_t;
166 typedef __atomic_base<char32_t> atomic_char32_t;
169 /// atomic_int_least8_t
170 typedef __atomic_base<int_least8_t> atomic_int_least8_t;
172 /// atomic_uint_least8_t
173 typedef __atomic_base<uint_least8_t> atomic_uint_least8_t;
175 /// atomic_int_least16_t
176 typedef __atomic_base<int_least16_t> atomic_int_least16_t;
178 /// atomic_uint_least16_t
179 typedef __atomic_base<uint_least16_t> atomic_uint_least16_t;
181 /// atomic_int_least32_t
182 typedef __atomic_base<int_least32_t> atomic_int_least32_t;
184 /// atomic_uint_least32_t
185 typedef __atomic_base<uint_least32_t> atomic_uint_least32_t;
187 /// atomic_int_least64_t
188 typedef __atomic_base<int_least64_t> atomic_int_least64_t;
190 /// atomic_uint_least64_t
191 typedef __atomic_base<uint_least64_t> atomic_uint_least64_t;
194 /// atomic_int_fast8_t
195 typedef __atomic_base<int_fast8_t> atomic_int_fast8_t;
197 /// atomic_uint_fast8_t
198 typedef __atomic_base<uint_fast8_t> atomic_uint_fast8_t;
200 /// atomic_int_fast16_t
201 typedef __atomic_base<int_fast16_t> atomic_int_fast16_t;
203 /// atomic_uint_fast16_t
204 typedef __atomic_base<uint_fast16_t> atomic_uint_fast16_t;
206 /// atomic_int_fast32_t
207 typedef __atomic_base<int_fast32_t> atomic_int_fast32_t;
209 /// atomic_uint_fast32_t
210 typedef __atomic_base<uint_fast32_t> atomic_uint_fast32_t;
212 /// atomic_int_fast64_t
213 typedef __atomic_base<int_fast64_t> atomic_int_fast64_t;
215 /// atomic_uint_fast64_t
216 typedef __atomic_base<uint_fast64_t> atomic_uint_fast64_t;
220 typedef __atomic_base<intptr_t> atomic_intptr_t;
223 typedef __atomic_base<uintptr_t> atomic_uintptr_t;
226 typedef __atomic_base<size_t> atomic_size_t;
229 typedef __atomic_base<intmax_t> atomic_intmax_t;
232 typedef __atomic_base<uintmax_t> atomic_uintmax_t;
235 typedef __atomic_base<ptrdiff_t> atomic_ptrdiff_t;
238 #define ATOMIC_VAR_INIT(_VI) { _VI }
240 template<typename _Tp>
243 template<typename _Tp>
246 /* The target's "set" value for test-and-set may not be exactly 1. */
247 #if __GCC_ATOMIC_TEST_AND_SET_TRUEVAL == 1
248 typedef bool __atomic_flag_data_type;
250 typedef unsigned char __atomic_flag_data_type;
254 * @brief Base type for atomic_flag.
256 * Base type is POD with data, allowing atomic_flag to derive from
257 * it and meet the standard layout type requirement. In addition to
258 * compatibility with a C interface, this allows different
259 * implementations of atomic_flag to use the same atomic operation
260 * functions, via a standard conversion to the __atomic_flag_base
263 _GLIBCXX_BEGIN_EXTERN_C
265 struct __atomic_flag_base
267 __atomic_flag_data_type _M_i;
270 _GLIBCXX_END_EXTERN_C
272 #define ATOMIC_FLAG_INIT { 0 }
275 struct atomic_flag : public __atomic_flag_base
277 atomic_flag() noexcept = default;
278 ~atomic_flag() noexcept = default;
279 atomic_flag(const atomic_flag&) = delete;
280 atomic_flag& operator=(const atomic_flag&) = delete;
281 atomic_flag& operator=(const atomic_flag&) volatile = delete;
283 // Conversion to ATOMIC_FLAG_INIT.
284 constexpr atomic_flag(bool __i) noexcept
285 : __atomic_flag_base{ _S_init(__i) }
288 _GLIBCXX_ALWAYS_INLINE bool
289 test_and_set(memory_order __m = memory_order_seq_cst) noexcept
291 return __atomic_test_and_set (&_M_i, __m);
294 _GLIBCXX_ALWAYS_INLINE bool
295 test_and_set(memory_order __m = memory_order_seq_cst) volatile noexcept
297 return __atomic_test_and_set (&_M_i, __m);
300 _GLIBCXX_ALWAYS_INLINE void
301 clear(memory_order __m = memory_order_seq_cst) noexcept
303 memory_order __b = __m & __memory_order_mask;
304 __glibcxx_assert(__b != memory_order_consume);
305 __glibcxx_assert(__b != memory_order_acquire);
306 __glibcxx_assert(__b != memory_order_acq_rel);
308 __atomic_clear (&_M_i, __m);
311 _GLIBCXX_ALWAYS_INLINE void
312 clear(memory_order __m = memory_order_seq_cst) volatile noexcept
314 memory_order __b = __m & __memory_order_mask;
315 __glibcxx_assert(__b != memory_order_consume);
316 __glibcxx_assert(__b != memory_order_acquire);
317 __glibcxx_assert(__b != memory_order_acq_rel);
319 __atomic_clear (&_M_i, __m);
323 static constexpr __atomic_flag_data_type
325 { return __i ? __GCC_ATOMIC_TEST_AND_SET_TRUEVAL : 0; }
329 /// Base class for atomic integrals.
331 // For each of the integral types, define atomic_[integral type] struct
335 // atomic_schar signed char
336 // atomic_uchar unsigned char
337 // atomic_short short
338 // atomic_ushort unsigned short
340 // atomic_uint unsigned int
342 // atomic_ulong unsigned long
343 // atomic_llong long long
344 // atomic_ullong unsigned long long
345 // atomic_char16_t char16_t
346 // atomic_char32_t char32_t
347 // atomic_wchar_t wchar_t
349 // NB: Assuming _ITp is an integral scalar type that is 1, 2, 4, or
350 // 8 bytes, since that is what GCC built-in functions for atomic
351 // memory access expect.
352 template<typename _ITp>
356 typedef _ITp __int_type;
361 __atomic_base() noexcept = default;
362 ~__atomic_base() noexcept = default;
363 __atomic_base(const __atomic_base&) = delete;
364 __atomic_base& operator=(const __atomic_base&) = delete;
365 __atomic_base& operator=(const __atomic_base&) volatile = delete;
367 // Requires __int_type convertible to _M_i.
368 constexpr __atomic_base(__int_type __i) noexcept : _M_i (__i) { }
370 operator __int_type() const noexcept
373 operator __int_type() const volatile noexcept
377 operator=(__int_type __i) noexcept
384 operator=(__int_type __i) volatile noexcept
391 operator++(int) noexcept
392 { return fetch_add(1); }
395 operator++(int) volatile noexcept
396 { return fetch_add(1); }
399 operator--(int) noexcept
400 { return fetch_sub(1); }
403 operator--(int) volatile noexcept
404 { return fetch_sub(1); }
407 operator++() noexcept
408 { return __atomic_add_fetch(&_M_i, 1, memory_order_seq_cst); }
411 operator++() volatile noexcept
412 { return __atomic_add_fetch(&_M_i, 1, memory_order_seq_cst); }
415 operator--() noexcept
416 { return __atomic_sub_fetch(&_M_i, 1, memory_order_seq_cst); }
419 operator--() volatile noexcept
420 { return __atomic_sub_fetch(&_M_i, 1, memory_order_seq_cst); }
423 operator+=(__int_type __i) noexcept
424 { return __atomic_add_fetch(&_M_i, __i, memory_order_seq_cst); }
427 operator+=(__int_type __i) volatile noexcept
428 { return __atomic_add_fetch(&_M_i, __i, memory_order_seq_cst); }
431 operator-=(__int_type __i) noexcept
432 { return __atomic_sub_fetch(&_M_i, __i, memory_order_seq_cst); }
435 operator-=(__int_type __i) volatile noexcept
436 { return __atomic_sub_fetch(&_M_i, __i, memory_order_seq_cst); }
439 operator&=(__int_type __i) noexcept
440 { return __atomic_and_fetch(&_M_i, __i, memory_order_seq_cst); }
443 operator&=(__int_type __i) volatile noexcept
444 { return __atomic_and_fetch(&_M_i, __i, memory_order_seq_cst); }
447 operator|=(__int_type __i) noexcept
448 { return __atomic_or_fetch(&_M_i, __i, memory_order_seq_cst); }
451 operator|=(__int_type __i) volatile noexcept
452 { return __atomic_or_fetch(&_M_i, __i, memory_order_seq_cst); }
455 operator^=(__int_type __i) noexcept
456 { return __atomic_xor_fetch(&_M_i, __i, memory_order_seq_cst); }
459 operator^=(__int_type __i) volatile noexcept
460 { return __atomic_xor_fetch(&_M_i, __i, memory_order_seq_cst); }
463 is_lock_free() const noexcept
464 { return __atomic_is_lock_free(sizeof(_M_i), nullptr); }
467 is_lock_free() const volatile noexcept
468 { return __atomic_is_lock_free(sizeof(_M_i), nullptr); }
470 _GLIBCXX_ALWAYS_INLINE void
471 store(__int_type __i, memory_order __m = memory_order_seq_cst) noexcept
473 memory_order __b = __m & __memory_order_mask;
474 __glibcxx_assert(__b != memory_order_acquire);
475 __glibcxx_assert(__b != memory_order_acq_rel);
476 __glibcxx_assert(__b != memory_order_consume);
478 __atomic_store_n(&_M_i, __i, __m);
481 _GLIBCXX_ALWAYS_INLINE void
482 store(__int_type __i,
483 memory_order __m = memory_order_seq_cst) volatile noexcept
485 memory_order __b = __m & __memory_order_mask;
486 __glibcxx_assert(__b != memory_order_acquire);
487 __glibcxx_assert(__b != memory_order_acq_rel);
488 __glibcxx_assert(__b != memory_order_consume);
490 __atomic_store_n(&_M_i, __i, __m);
493 _GLIBCXX_ALWAYS_INLINE __int_type
494 load(memory_order __m = memory_order_seq_cst) const noexcept
496 memory_order __b = __m & __memory_order_mask;
497 __glibcxx_assert(__b != memory_order_release);
498 __glibcxx_assert(__b != memory_order_acq_rel);
500 return __atomic_load_n(&_M_i, __m);
503 _GLIBCXX_ALWAYS_INLINE __int_type
504 load(memory_order __m = memory_order_seq_cst) const volatile noexcept
506 memory_order __b = __m & __memory_order_mask;
507 __glibcxx_assert(__b != memory_order_release);
508 __glibcxx_assert(__b != memory_order_acq_rel);
510 return __atomic_load_n(&_M_i, __m);
513 _GLIBCXX_ALWAYS_INLINE __int_type
514 exchange(__int_type __i,
515 memory_order __m = memory_order_seq_cst) noexcept
517 return __atomic_exchange_n(&_M_i, __i, __m);
521 _GLIBCXX_ALWAYS_INLINE __int_type
522 exchange(__int_type __i,
523 memory_order __m = memory_order_seq_cst) volatile noexcept
525 return __atomic_exchange_n(&_M_i, __i, __m);
528 _GLIBCXX_ALWAYS_INLINE bool
529 compare_exchange_weak(__int_type& __i1, __int_type __i2,
530 memory_order __m1, memory_order __m2) noexcept
532 memory_order __b2 = __m2 & __memory_order_mask;
533 memory_order __b1 = __m1 & __memory_order_mask;
534 __glibcxx_assert(__b2 != memory_order_release);
535 __glibcxx_assert(__b2 != memory_order_acq_rel);
536 __glibcxx_assert(__b2 <= __b1);
538 return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 1, __m1, __m2);
541 _GLIBCXX_ALWAYS_INLINE bool
542 compare_exchange_weak(__int_type& __i1, __int_type __i2,
544 memory_order __m2) volatile noexcept
546 memory_order __b2 = __m2 & __memory_order_mask;
547 memory_order __b1 = __m1 & __memory_order_mask;
548 __glibcxx_assert(__b2 != memory_order_release);
549 __glibcxx_assert(__b2 != memory_order_acq_rel);
550 __glibcxx_assert(__b2 <= __b1);
552 return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 1, __m1, __m2);
555 _GLIBCXX_ALWAYS_INLINE bool
556 compare_exchange_weak(__int_type& __i1, __int_type __i2,
557 memory_order __m = memory_order_seq_cst) noexcept
559 return compare_exchange_weak(__i1, __i2, __m,
560 __cmpexch_failure_order(__m));
563 _GLIBCXX_ALWAYS_INLINE bool
564 compare_exchange_weak(__int_type& __i1, __int_type __i2,
565 memory_order __m = memory_order_seq_cst) volatile noexcept
567 return compare_exchange_weak(__i1, __i2, __m,
568 __cmpexch_failure_order(__m));
571 _GLIBCXX_ALWAYS_INLINE bool
572 compare_exchange_strong(__int_type& __i1, __int_type __i2,
573 memory_order __m1, memory_order __m2) noexcept
575 memory_order __b2 = __m2 & __memory_order_mask;
576 memory_order __b1 = __m1 & __memory_order_mask;
577 __glibcxx_assert(__b2 != memory_order_release);
578 __glibcxx_assert(__b2 != memory_order_acq_rel);
579 __glibcxx_assert(__b2 <= __b1);
581 return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 0, __m1, __m2);
584 _GLIBCXX_ALWAYS_INLINE bool
585 compare_exchange_strong(__int_type& __i1, __int_type __i2,
587 memory_order __m2) volatile noexcept
589 memory_order __b2 = __m2 & __memory_order_mask;
590 memory_order __b1 = __m1 & __memory_order_mask;
592 __glibcxx_assert(__b2 != memory_order_release);
593 __glibcxx_assert(__b2 != memory_order_acq_rel);
594 __glibcxx_assert(__b2 <= __b1);
596 return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 0, __m1, __m2);
599 _GLIBCXX_ALWAYS_INLINE bool
600 compare_exchange_strong(__int_type& __i1, __int_type __i2,
601 memory_order __m = memory_order_seq_cst) noexcept
603 return compare_exchange_strong(__i1, __i2, __m,
604 __cmpexch_failure_order(__m));
607 _GLIBCXX_ALWAYS_INLINE bool
608 compare_exchange_strong(__int_type& __i1, __int_type __i2,
609 memory_order __m = memory_order_seq_cst) volatile noexcept
611 return compare_exchange_strong(__i1, __i2, __m,
612 __cmpexch_failure_order(__m));
615 _GLIBCXX_ALWAYS_INLINE __int_type
616 fetch_add(__int_type __i,
617 memory_order __m = memory_order_seq_cst) noexcept
618 { return __atomic_fetch_add(&_M_i, __i, __m); }
620 _GLIBCXX_ALWAYS_INLINE __int_type
621 fetch_add(__int_type __i,
622 memory_order __m = memory_order_seq_cst) volatile noexcept
623 { return __atomic_fetch_add(&_M_i, __i, __m); }
625 _GLIBCXX_ALWAYS_INLINE __int_type
626 fetch_sub(__int_type __i,
627 memory_order __m = memory_order_seq_cst) noexcept
628 { return __atomic_fetch_sub(&_M_i, __i, __m); }
630 _GLIBCXX_ALWAYS_INLINE __int_type
631 fetch_sub(__int_type __i,
632 memory_order __m = memory_order_seq_cst) volatile noexcept
633 { return __atomic_fetch_sub(&_M_i, __i, __m); }
635 _GLIBCXX_ALWAYS_INLINE __int_type
636 fetch_and(__int_type __i,
637 memory_order __m = memory_order_seq_cst) noexcept
638 { return __atomic_fetch_and(&_M_i, __i, __m); }
640 _GLIBCXX_ALWAYS_INLINE __int_type
641 fetch_and(__int_type __i,
642 memory_order __m = memory_order_seq_cst) volatile noexcept
643 { return __atomic_fetch_and(&_M_i, __i, __m); }
645 _GLIBCXX_ALWAYS_INLINE __int_type
646 fetch_or(__int_type __i,
647 memory_order __m = memory_order_seq_cst) noexcept
648 { return __atomic_fetch_or(&_M_i, __i, __m); }
650 _GLIBCXX_ALWAYS_INLINE __int_type
651 fetch_or(__int_type __i,
652 memory_order __m = memory_order_seq_cst) volatile noexcept
653 { return __atomic_fetch_or(&_M_i, __i, __m); }
655 _GLIBCXX_ALWAYS_INLINE __int_type
656 fetch_xor(__int_type __i,
657 memory_order __m = memory_order_seq_cst) noexcept
658 { return __atomic_fetch_xor(&_M_i, __i, __m); }
660 _GLIBCXX_ALWAYS_INLINE __int_type
661 fetch_xor(__int_type __i,
662 memory_order __m = memory_order_seq_cst) volatile noexcept
663 { return __atomic_fetch_xor(&_M_i, __i, __m); }
667 /// Partial specialization for pointer types.
668 template<typename _PTp>
669 struct __atomic_base<_PTp*>
672 typedef _PTp* __pointer_type;
676 // Factored out to facilitate explicit specialization.
678 _M_type_size(ptrdiff_t __d) { return __d * sizeof(_PTp); }
681 _M_type_size(ptrdiff_t __d) volatile { return __d * sizeof(_PTp); }
684 __atomic_base() noexcept = default;
685 ~__atomic_base() noexcept = default;
686 __atomic_base(const __atomic_base&) = delete;
687 __atomic_base& operator=(const __atomic_base&) = delete;
688 __atomic_base& operator=(const __atomic_base&) volatile = delete;
690 // Requires __pointer_type convertible to _M_p.
691 constexpr __atomic_base(__pointer_type __p) noexcept : _M_p (__p) { }
693 operator __pointer_type() const noexcept
696 operator __pointer_type() const volatile noexcept
700 operator=(__pointer_type __p) noexcept
707 operator=(__pointer_type __p) volatile noexcept
714 operator++(int) noexcept
715 { return fetch_add(1); }
718 operator++(int) volatile noexcept
719 { return fetch_add(1); }
722 operator--(int) noexcept
723 { return fetch_sub(1); }
726 operator--(int) volatile noexcept
727 { return fetch_sub(1); }
730 operator++() noexcept
731 { return __atomic_add_fetch(&_M_p, _M_type_size(1),
732 memory_order_seq_cst); }
735 operator++() volatile noexcept
736 { return __atomic_add_fetch(&_M_p, _M_type_size(1),
737 memory_order_seq_cst); }
740 operator--() noexcept
741 { return __atomic_sub_fetch(&_M_p, _M_type_size(1),
742 memory_order_seq_cst); }
745 operator--() volatile noexcept
746 { return __atomic_sub_fetch(&_M_p, _M_type_size(1),
747 memory_order_seq_cst); }
750 operator+=(ptrdiff_t __d) noexcept
751 { return __atomic_add_fetch(&_M_p, _M_type_size(__d),
752 memory_order_seq_cst); }
755 operator+=(ptrdiff_t __d) volatile noexcept
756 { return __atomic_add_fetch(&_M_p, _M_type_size(__d),
757 memory_order_seq_cst); }
760 operator-=(ptrdiff_t __d) noexcept
761 { return __atomic_sub_fetch(&_M_p, _M_type_size(__d),
762 memory_order_seq_cst); }
765 operator-=(ptrdiff_t __d) volatile noexcept
766 { return __atomic_sub_fetch(&_M_p, _M_type_size(__d),
767 memory_order_seq_cst); }
770 is_lock_free() const noexcept
771 { return __atomic_is_lock_free(sizeof(__pointer_type), nullptr); }
774 is_lock_free() const volatile noexcept
775 { return __atomic_is_lock_free(sizeof(__pointer_type), nullptr); }
777 _GLIBCXX_ALWAYS_INLINE void
778 store(__pointer_type __p,
779 memory_order __m = memory_order_seq_cst) noexcept
781 memory_order __b = __m & __memory_order_mask;
783 __glibcxx_assert(__b != memory_order_acquire);
784 __glibcxx_assert(__b != memory_order_acq_rel);
785 __glibcxx_assert(__b != memory_order_consume);
787 __atomic_store_n(&_M_p, __p, __m);
790 _GLIBCXX_ALWAYS_INLINE void
791 store(__pointer_type __p,
792 memory_order __m = memory_order_seq_cst) volatile noexcept
794 memory_order __b = __m & __memory_order_mask;
795 __glibcxx_assert(__b != memory_order_acquire);
796 __glibcxx_assert(__b != memory_order_acq_rel);
797 __glibcxx_assert(__b != memory_order_consume);
799 __atomic_store_n(&_M_p, __p, __m);
802 _GLIBCXX_ALWAYS_INLINE __pointer_type
803 load(memory_order __m = memory_order_seq_cst) const noexcept
805 memory_order __b = __m & __memory_order_mask;
806 __glibcxx_assert(__b != memory_order_release);
807 __glibcxx_assert(__b != memory_order_acq_rel);
809 return __atomic_load_n(&_M_p, __m);
812 _GLIBCXX_ALWAYS_INLINE __pointer_type
813 load(memory_order __m = memory_order_seq_cst) const volatile noexcept
815 memory_order __b = __m & __memory_order_mask;
816 __glibcxx_assert(__b != memory_order_release);
817 __glibcxx_assert(__b != memory_order_acq_rel);
819 return __atomic_load_n(&_M_p, __m);
822 _GLIBCXX_ALWAYS_INLINE __pointer_type
823 exchange(__pointer_type __p,
824 memory_order __m = memory_order_seq_cst) noexcept
826 return __atomic_exchange_n(&_M_p, __p, __m);
830 _GLIBCXX_ALWAYS_INLINE __pointer_type
831 exchange(__pointer_type __p,
832 memory_order __m = memory_order_seq_cst) volatile noexcept
834 return __atomic_exchange_n(&_M_p, __p, __m);
837 _GLIBCXX_ALWAYS_INLINE bool
838 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
840 memory_order __m2) noexcept
842 memory_order __b2 = __m2 & __memory_order_mask;
843 memory_order __b1 = __m1 & __memory_order_mask;
844 __glibcxx_assert(__b2 != memory_order_release);
845 __glibcxx_assert(__b2 != memory_order_acq_rel);
846 __glibcxx_assert(__b2 <= __b1);
848 return __atomic_compare_exchange_n(&_M_p, &__p1, __p2, 0, __m1, __m2);
851 _GLIBCXX_ALWAYS_INLINE bool
852 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
854 memory_order __m2) volatile noexcept
856 memory_order __b2 = __m2 & __memory_order_mask;
857 memory_order __b1 = __m1 & __memory_order_mask;
859 __glibcxx_assert(__b2 != memory_order_release);
860 __glibcxx_assert(__b2 != memory_order_acq_rel);
861 __glibcxx_assert(__b2 <= __b1);
863 return __atomic_compare_exchange_n(&_M_p, &__p1, __p2, 0, __m1, __m2);
866 _GLIBCXX_ALWAYS_INLINE __pointer_type
867 fetch_add(ptrdiff_t __d,
868 memory_order __m = memory_order_seq_cst) noexcept
869 { return __atomic_fetch_add(&_M_p, _M_type_size(__d), __m); }
871 _GLIBCXX_ALWAYS_INLINE __pointer_type
872 fetch_add(ptrdiff_t __d,
873 memory_order __m = memory_order_seq_cst) volatile noexcept
874 { return __atomic_fetch_add(&_M_p, _M_type_size(__d), __m); }
876 _GLIBCXX_ALWAYS_INLINE __pointer_type
877 fetch_sub(ptrdiff_t __d,
878 memory_order __m = memory_order_seq_cst) noexcept
879 { return __atomic_fetch_sub(&_M_p, _M_type_size(__d), __m); }
881 _GLIBCXX_ALWAYS_INLINE __pointer_type
882 fetch_sub(ptrdiff_t __d,
883 memory_order __m = memory_order_seq_cst) volatile noexcept
884 { return __atomic_fetch_sub(&_M_p, _M_type_size(__d), __m); }
889 _GLIBCXX_END_NAMESPACE_VERSION