3 // Copyright (C) 2008, 2009, 2010 Free Software Foundation, Inc.
5 // This file is part of the GNU ISO C++ Library. This library is free
6 // software; you can redistribute it and/or modify it under the
7 // terms of the GNU General Public License as published by the
8 // Free Software Foundation; either version 3, or (at your option)
11 // This library is distributed in the hope that it will be useful,
12 // but WITHOUT ANY WARRANTY; without even the implied warranty of
13 // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 // GNU General Public License for more details.
16 // Under Section 7 of GPL version 3, you are granted additional
17 // permissions described in the GCC Runtime Library Exception, version
18 // 3.1, as published by the Free Software Foundation.
20 // You should have received a copy of the GNU General Public License and
21 // a copy of the GCC Runtime Library Exception along with this program;
22 // see the files COPYING3 and COPYING.RUNTIME respectively. If not, see
23 // <http://www.gnu.org/licenses/>.
26 * This is a Standard C++ Library header.
29 // Based on "C++ Atomic Types and Operations" by Hans Boehm and Lawrence Crowl.
30 // http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2007/n2427.html
32 #ifndef _GLIBCXX_ATOMIC
33 #define _GLIBCXX_ATOMIC 1
35 #pragma GCC system_header
37 #ifndef __GXX_EXPERIMENTAL_CXX0X__
38 # include <bits/c++0x_warning.h>
41 #include <bits/atomic_base.h>
44 _GLIBCXX_BEGIN_NAMESPACE(std)
52 template<typename _Tp>
54 kill_dependency(_Tp __y)
61 __calculate_memory_order(memory_order __m)
63 const bool __cond1 = __m == memory_order_release;
64 const bool __cond2 = __m == memory_order_acq_rel;
65 memory_order __mo1(__cond1 ? memory_order_relaxed : __m);
66 memory_order __mo2(__cond2 ? memory_order_acquire : __mo1);
71 // Three nested namespaces for atomic implementation details.
73 // The nested namespace inlined into std:: is determined by the value
74 // of the _GLIBCXX_ATOMIC_PROPERTY macro and the resulting
75 // ATOMIC_*_LOCK_FREE macros. See file atomic_base.h.
77 // 0 == __atomic0 == Never lock-free
78 // 1 == __atomic1 == Best available, sometimes lock-free
79 // 2 == __atomic2 == Always lock-free
80 #include <bits/atomic_0.h>
81 #include <bits/atomic_2.h>
84 /// 29.4.3, Generic atomic type, primary class template.
85 template<typename _Tp>
94 atomic(const atomic&) = delete;
95 atomic& operator=(const atomic&) volatile = delete;
97 atomic(_Tp __i) : _M_i(__i) { }
102 operator=(_Tp __i) { store(__i); return __i; }
105 is_lock_free() const volatile;
108 store(_Tp, memory_order = memory_order_seq_cst) volatile;
111 load(memory_order = memory_order_seq_cst) const volatile;
114 exchange(_Tp __i, memory_order = memory_order_seq_cst) volatile;
117 compare_exchange_weak(_Tp&, _Tp, memory_order, memory_order) volatile;
120 compare_exchange_strong(_Tp&, _Tp, memory_order, memory_order) volatile;
123 compare_exchange_weak(_Tp&, _Tp,
124 memory_order = memory_order_seq_cst) volatile;
127 compare_exchange_strong(_Tp&, _Tp,
128 memory_order = memory_order_seq_cst) volatile;
132 /// Partial specialization for pointer types.
133 template<typename _Tp>
134 struct atomic<_Tp*> : atomic_address
138 atomic(const atomic&) = delete;
139 atomic& operator=(const atomic&) volatile = delete;
141 atomic(_Tp* __v) : atomic_address(__v) { }
144 store(_Tp* __v, memory_order __m = memory_order_seq_cst)
145 { atomic_address::store(__v, __m); }
148 load(memory_order __m = memory_order_seq_cst) const
149 { return static_cast<_Tp*>(atomic_address::load(__m)); }
152 exchange(_Tp* __v, memory_order __m = memory_order_seq_cst)
153 { return static_cast<_Tp*>(atomic_address::exchange(__v, __m)); }
156 compare_exchange_weak(_Tp*&, _Tp*, memory_order, memory_order);
159 compare_exchange_strong(_Tp*&, _Tp*, memory_order, memory_order);
162 compare_exchange_weak(_Tp*&, _Tp*, memory_order = memory_order_seq_cst);
165 compare_exchange_strong(_Tp*&, _Tp*, memory_order = memory_order_seq_cst);
168 fetch_add(ptrdiff_t, memory_order = memory_order_seq_cst);
171 fetch_sub(ptrdiff_t, memory_order = memory_order_seq_cst);
173 operator _Tp*() const
184 operator++(int) { return fetch_add(1); }
187 operator--(int) { return fetch_sub(1); }
190 operator++() { return fetch_add(1) + 1; }
193 operator--() { return fetch_sub(1) - 1; }
196 operator+=(ptrdiff_t __d)
197 { return fetch_add(__d) + __d; }
200 operator-=(ptrdiff_t __d)
201 { return fetch_sub(__d) - __d; }
205 /// Explicit specialization for void*
207 struct atomic<void*> : public atomic_address
209 typedef void* __integral_type;
210 typedef atomic_address __base_type;
214 atomic(const atomic&) = delete;
215 atomic& operator=(const atomic&) volatile = delete;
217 atomic(__integral_type __i) : __base_type(__i) { }
219 using __base_type::operator __integral_type;
220 using __base_type::operator=;
223 /// Explicit specialization for bool.
225 struct atomic<bool> : public atomic_bool
227 typedef bool __integral_type;
228 typedef atomic_bool __base_type;
232 atomic(const atomic&) = delete;
233 atomic& operator=(const atomic&) volatile = delete;
235 atomic(__integral_type __i) : __base_type(__i) { }
237 using __base_type::operator __integral_type;
238 using __base_type::operator=;
241 /// Explicit specialization for char.
243 struct atomic<char> : public atomic_char
245 typedef char __integral_type;
246 typedef atomic_char __base_type;
250 atomic(const atomic&) = delete;
251 atomic& operator=(const atomic&) volatile = delete;
253 atomic(__integral_type __i) : __base_type(__i) { }
255 using __base_type::operator __integral_type;
256 using __base_type::operator=;
259 /// Explicit specialization for signed char.
261 struct atomic<signed char> : public atomic_schar
263 typedef signed char __integral_type;
264 typedef atomic_schar __base_type;
268 atomic(const atomic&) = delete;
269 atomic& operator=(const atomic&) volatile = delete;
271 atomic(__integral_type __i) : __base_type(__i) { }
273 using __base_type::operator __integral_type;
274 using __base_type::operator=;
277 /// Explicit specialization for unsigned char.
279 struct atomic<unsigned char> : public atomic_uchar
281 typedef unsigned char __integral_type;
282 typedef atomic_uchar __base_type;
286 atomic(const atomic&) = delete;
287 atomic& operator=(const atomic&) volatile = delete;
289 atomic(__integral_type __i) : __base_type(__i) { }
291 using __base_type::operator __integral_type;
292 using __base_type::operator=;
295 /// Explicit specialization for short.
297 struct atomic<short> : public atomic_short
299 typedef short __integral_type;
300 typedef atomic_short __base_type;
304 atomic(const atomic&) = delete;
305 atomic& operator=(const atomic&) volatile = delete;
307 atomic(__integral_type __i) : __base_type(__i) { }
309 using __base_type::operator __integral_type;
310 using __base_type::operator=;
313 /// Explicit specialization for unsigned short.
315 struct atomic<unsigned short> : public atomic_ushort
317 typedef unsigned short __integral_type;
318 typedef atomic_ushort __base_type;
322 atomic(const atomic&) = delete;
323 atomic& operator=(const atomic&) volatile = delete;
325 atomic(__integral_type __i) : __base_type(__i) { }
327 using __base_type::operator __integral_type;
328 using __base_type::operator=;
331 /// Explicit specialization for int.
333 struct atomic<int> : atomic_int
335 typedef int __integral_type;
336 typedef atomic_int __base_type;
340 atomic(const atomic&) = delete;
341 atomic& operator=(const atomic&) volatile = delete;
343 atomic(__integral_type __i) : __base_type(__i) { }
345 using __base_type::operator __integral_type;
346 using __base_type::operator=;
349 /// Explicit specialization for unsigned int.
351 struct atomic<unsigned int> : public atomic_uint
353 typedef unsigned int __integral_type;
354 typedef atomic_uint __base_type;
358 atomic(const atomic&) = delete;
359 atomic& operator=(const atomic&) volatile = delete;
361 atomic(__integral_type __i) : __base_type(__i) { }
363 using __base_type::operator __integral_type;
364 using __base_type::operator=;
367 /// Explicit specialization for long.
369 struct atomic<long> : public atomic_long
371 typedef long __integral_type;
372 typedef atomic_long __base_type;
376 atomic(const atomic&) = delete;
377 atomic& operator=(const atomic&) volatile = delete;
379 atomic(__integral_type __i) : __base_type(__i) { }
381 using __base_type::operator __integral_type;
382 using __base_type::operator=;
385 /// Explicit specialization for unsigned long.
387 struct atomic<unsigned long> : public atomic_ulong
389 typedef unsigned long __integral_type;
390 typedef atomic_ulong __base_type;
394 atomic(const atomic&) = delete;
395 atomic& operator=(const atomic&) volatile = delete;
397 atomic(__integral_type __i) : __base_type(__i) { }
399 using __base_type::operator __integral_type;
400 using __base_type::operator=;
403 /// Explicit specialization for long long.
405 struct atomic<long long> : public atomic_llong
407 typedef long long __integral_type;
408 typedef atomic_llong __base_type;
412 atomic(const atomic&) = delete;
413 atomic& operator=(const atomic&) volatile = delete;
415 atomic(__integral_type __i) : __base_type(__i) { }
417 using __base_type::operator __integral_type;
418 using __base_type::operator=;
421 /// Explicit specialization for unsigned long long.
423 struct atomic<unsigned long long> : public atomic_ullong
425 typedef unsigned long long __integral_type;
426 typedef atomic_ullong __base_type;
430 atomic(const atomic&) = delete;
431 atomic& operator=(const atomic&) volatile = delete;
433 atomic(__integral_type __i) : __base_type(__i) { }
435 using __base_type::operator __integral_type;
436 using __base_type::operator=;
439 /// Explicit specialization for wchar_t.
441 struct atomic<wchar_t> : public atomic_wchar_t
443 typedef wchar_t __integral_type;
444 typedef atomic_wchar_t __base_type;
448 atomic(const atomic&) = delete;
449 atomic& operator=(const atomic&) volatile = delete;
451 atomic(__integral_type __i) : __base_type(__i) { }
453 using __base_type::operator __integral_type;
454 using __base_type::operator=;
457 /// Explicit specialization for char16_t.
459 struct atomic<char16_t> : public atomic_char16_t
461 typedef char16_t __integral_type;
462 typedef atomic_char16_t __base_type;
466 atomic(const atomic&) = delete;
467 atomic& operator=(const atomic&) volatile = delete;
469 atomic(__integral_type __i) : __base_type(__i) { }
471 using __base_type::operator __integral_type;
472 using __base_type::operator=;
475 /// Explicit specialization for char32_t.
477 struct atomic<char32_t> : public atomic_char32_t
479 typedef char32_t __integral_type;
480 typedef atomic_char32_t __base_type;
484 atomic(const atomic&) = delete;
485 atomic& operator=(const atomic&) volatile = delete;
487 atomic(__integral_type __i) : __base_type(__i) { }
489 using __base_type::operator __integral_type;
490 using __base_type::operator=;
493 template<typename _Tp>
495 atomic<_Tp*>::compare_exchange_weak(_Tp*& __r, _Tp* __v, memory_order __m1,
498 void** __vr = reinterpret_cast<void**>(&__r);
499 void* __vv = static_cast<void*>(__v);
500 return atomic_address::compare_exchange_weak(*__vr, __vv, __m1, __m2);
503 template<typename _Tp>
505 atomic<_Tp*>::compare_exchange_strong(_Tp*& __r, _Tp* __v,
509 void** __vr = reinterpret_cast<void**>(&__r);
510 void* __vv = static_cast<void*>(__v);
511 return atomic_address::compare_exchange_strong(*__vr, __vv, __m1, __m2);
514 template<typename _Tp>
516 atomic<_Tp*>::compare_exchange_weak(_Tp*& __r, _Tp* __v,
519 return compare_exchange_weak(__r, __v, __m,
520 __calculate_memory_order(__m));
523 template<typename _Tp>
525 atomic<_Tp*>::compare_exchange_strong(_Tp*& __r, _Tp* __v,
528 return compare_exchange_strong(__r, __v, __m,
529 __calculate_memory_order(__m));
532 template<typename _Tp>
534 atomic<_Tp*>::fetch_add(ptrdiff_t __d, memory_order __m)
536 void* __p = atomic_fetch_add_explicit(this, sizeof(_Tp) * __d, __m);
537 return static_cast<_Tp*>(__p);
540 template<typename _Tp>
542 atomic<_Tp*>::fetch_sub(ptrdiff_t __d, memory_order __m)
544 void* __p = atomic_fetch_sub_explicit(this, sizeof(_Tp) * __d, __m);
545 return static_cast<_Tp*>(__p);
548 // Convenience function definitions, atomic_flag.
550 atomic_flag_test_and_set_explicit(atomic_flag* __a, memory_order __m)
551 { return __a->test_and_set(__m); }
554 atomic_flag_clear_explicit(atomic_flag* __a, memory_order __m)
555 { return __a->clear(__m); }
558 // Convenience function definitions, atomic_address.
560 atomic_is_lock_free(const atomic_address* __a)
561 { return __a->is_lock_free(); }
564 atomic_store(atomic_address* __a, void* __v)
568 atomic_store_explicit(atomic_address* __a, void* __v, memory_order __m)
569 { __a->store(__v, __m); }
572 atomic_load(const atomic_address* __a)
573 { return __a->load(); }
576 atomic_load_explicit(const atomic_address* __a, memory_order __m)
577 { return __a->load(__m); }
580 atomic_exchange(atomic_address* __a, void* __v)
581 { return __a->exchange(__v); }
584 atomic_exchange_explicit(atomic_address* __a, void* __v, memory_order __m)
585 { return __a->exchange(__v, __m); }
588 atomic_compare_exchange_weak(atomic_address* __a, void** __v1, void* __v2)
590 return __a->compare_exchange_weak(*__v1, __v2, memory_order_seq_cst,
591 memory_order_seq_cst);
595 atomic_compare_exchange_strong(atomic_address* __a,
596 void** __v1, void* __v2)
598 return __a->compare_exchange_strong(*__v1, __v2, memory_order_seq_cst,
599 memory_order_seq_cst);
603 atomic_compare_exchange_weak_explicit(atomic_address* __a,
604 void** __v1, void* __v2,
605 memory_order __m1, memory_order __m2)
606 { return __a->compare_exchange_weak(*__v1, __v2, __m1, __m2); }
609 atomic_compare_exchange_strong_explicit(atomic_address* __a,
610 void** __v1, void* __v2,
611 memory_order __m1, memory_order __m2)
612 { return __a->compare_exchange_strong(*__v1, __v2, __m1, __m2); }
615 atomic_fetch_add_explicit(atomic_address* __a, ptrdiff_t __d,
617 { return __a->fetch_add(__d, __m); }
620 atomic_fetch_add(atomic_address* __a, ptrdiff_t __d)
621 { return __a->fetch_add(__d); }
624 atomic_fetch_sub_explicit(atomic_address* __a, ptrdiff_t __d,
626 { return __a->fetch_sub(__d, __m); }
629 atomic_fetch_sub(atomic_address* __a, ptrdiff_t __d)
630 { return __a->fetch_sub(__d); }
633 // Convenience function definitions, atomic_bool.
635 atomic_is_lock_free(const atomic_bool* __a)
636 { return __a->is_lock_free(); }
639 atomic_store(atomic_bool* __a, bool __i)
643 atomic_store_explicit(atomic_bool* __a, bool __i, memory_order __m)
644 { __a->store(__i, __m); }
647 atomic_load(const atomic_bool* __a)
648 { return __a->load(); }
651 atomic_load_explicit(const atomic_bool* __a, memory_order __m)
652 { return __a->load(__m); }
655 atomic_exchange(atomic_bool* __a, bool __i)
656 { return __a->exchange(__i); }
659 atomic_exchange_explicit(atomic_bool* __a, bool __i, memory_order __m)
660 { return __a->exchange(__i, __m); }
663 atomic_compare_exchange_weak(atomic_bool* __a, bool* __i1, bool __i2)
665 return __a->compare_exchange_weak(*__i1, __i2, memory_order_seq_cst,
666 memory_order_seq_cst);
670 atomic_compare_exchange_strong(atomic_bool* __a, bool* __i1, bool __i2)
672 return __a->compare_exchange_strong(*__i1, __i2, memory_order_seq_cst,
673 memory_order_seq_cst);
677 atomic_compare_exchange_weak_explicit(atomic_bool* __a, bool* __i1,
678 bool __i2, memory_order __m1,
680 { return __a->compare_exchange_weak(*__i1, __i2, __m1, __m2); }
683 atomic_compare_exchange_strong_explicit(atomic_bool* __a,
684 bool* __i1, bool __i2,
685 memory_order __m1, memory_order __m2)
686 { return __a->compare_exchange_strong(*__i1, __i2, __m1, __m2); }
690 // Free standing functions. Template argument should be constricted
691 // to intergral types as specified in the standard.
692 template<typename _ITp>
694 atomic_store_explicit(__atomic_base<_ITp>* __a, _ITp __i, memory_order __m)
695 { __a->store(__i, __m); }
697 template<typename _ITp>
699 atomic_load_explicit(const __atomic_base<_ITp>* __a, memory_order __m)
700 { return __a->load(__m); }
702 template<typename _ITp>
704 atomic_exchange_explicit(__atomic_base<_ITp>* __a, _ITp __i,
706 { return __a->exchange(__i, __m); }
708 template<typename _ITp>
710 atomic_compare_exchange_weak_explicit(__atomic_base<_ITp>* __a,
711 _ITp* __i1, _ITp __i2,
712 memory_order __m1, memory_order __m2)
713 { return __a->compare_exchange_weak(*__i1, __i2, __m1, __m2); }
715 template<typename _ITp>
717 atomic_compare_exchange_strong_explicit(__atomic_base<_ITp>* __a,
718 _ITp* __i1, _ITp __i2,
721 { return __a->compare_exchange_strong(*__i1, __i2, __m1, __m2); }
723 template<typename _ITp>
725 atomic_fetch_add_explicit(__atomic_base<_ITp>* __a, _ITp __i,
727 { return __a->fetch_add(__i, __m); }
729 template<typename _ITp>
731 atomic_fetch_sub_explicit(__atomic_base<_ITp>* __a, _ITp __i,
733 { return __a->fetch_sub(__i, __m); }
735 template<typename _ITp>
737 atomic_fetch_and_explicit(__atomic_base<_ITp>* __a, _ITp __i,
739 { return __a->fetch_and(__i, __m); }
741 template<typename _ITp>
743 atomic_fetch_or_explicit(__atomic_base<_ITp>* __a, _ITp __i,
745 { return __a->fetch_or(__i, __m); }
747 template<typename _ITp>
749 atomic_fetch_xor_explicit(__atomic_base<_ITp>* __a, _ITp __i,
751 { return __a->fetch_xor(__i, __m); }
753 template<typename _ITp>
755 atomic_is_lock_free(const __atomic_base<_ITp>* __a)
756 { return __a->is_lock_free(); }
758 template<typename _ITp>
760 atomic_store(__atomic_base<_ITp>* __a, _ITp __i)
761 { atomic_store_explicit(__a, __i, memory_order_seq_cst); }
763 template<typename _ITp>
765 atomic_load(const __atomic_base<_ITp>* __a)
766 { return atomic_load_explicit(__a, memory_order_seq_cst); }
768 template<typename _ITp>
770 atomic_exchange(__atomic_base<_ITp>* __a, _ITp __i)
771 { return atomic_exchange_explicit(__a, __i, memory_order_seq_cst); }
773 template<typename _ITp>
775 atomic_compare_exchange_weak(__atomic_base<_ITp>* __a,
776 _ITp* __i1, _ITp __i2)
778 return atomic_compare_exchange_weak_explicit(__a, __i1, __i2,
779 memory_order_seq_cst,
780 memory_order_seq_cst);
783 template<typename _ITp>
785 atomic_compare_exchange_strong(__atomic_base<_ITp>* __a,
786 _ITp* __i1, _ITp __i2)
788 return atomic_compare_exchange_strong_explicit(__a, __i1, __i2,
789 memory_order_seq_cst,
790 memory_order_seq_cst);
793 template<typename _ITp>
795 atomic_fetch_add(__atomic_base<_ITp>* __a, _ITp __i)
796 { return atomic_fetch_add_explicit(__a, __i, memory_order_seq_cst); }
798 template<typename _ITp>
800 atomic_fetch_sub(__atomic_base<_ITp>* __a, _ITp __i)
801 { return atomic_fetch_sub_explicit(__a, __i, memory_order_seq_cst); }
803 template<typename _ITp>
805 atomic_fetch_and(__atomic_base<_ITp>* __a, _ITp __i)
806 { return atomic_fetch_and_explicit(__a, __i, memory_order_seq_cst); }
808 template<typename _ITp>
810 atomic_fetch_or(__atomic_base<_ITp>* __a, _ITp __i)
811 { return atomic_fetch_or_explicit(__a, __i, memory_order_seq_cst); }
813 template<typename _ITp>
815 atomic_fetch_xor(__atomic_base<_ITp>* __a, _ITp __i)
816 { return atomic_fetch_xor_explicit(__a, __i, memory_order_seq_cst); }
820 _GLIBCXX_END_NAMESPACE