1 // Support for atomic operations -*- C++ -*-
3 // Copyright (C) 2004, 2005, 2006, 2008, 2009, 2010
4 // Free Software Foundation, Inc.
6 // This file is part of the GNU ISO C++ Library. This library is free
7 // software; you can redistribute it and/or modify it under the
8 // terms of the GNU General Public License as published by the
9 // Free Software Foundation; either version 3, or (at your option)
12 // This library is distributed in the hope that it will be useful,
13 // but WITHOUT ANY WARRANTY; without even the implied warranty of
14 // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 // GNU General Public License for more details.
17 // Under Section 7 of GPL version 3, you are granted additional
18 // permissions described in the GCC Runtime Library Exception, version
19 // 3.1, as published by the Free Software Foundation.
21 // You should have received a copy of the GNU General Public License and
22 // a copy of the GCC Runtime Library Exception along with this program;
23 // see the files COPYING3 and COPYING.RUNTIME respectively. If not, see
24 // <http://www.gnu.org/licenses/>.
26 /** @file ext/atomicity.h
27 * This file is a GNU extension to the Standard C++ Library.
30 #ifndef _GLIBCXX_ATOMICITY_H
31 #define _GLIBCXX_ATOMICITY_H 1
33 #include <bits/c++config.h>
34 #include <bits/gthr.h>
35 #include <bits/atomic_word.h>
37 namespace __gnu_cxx _GLIBCXX_VISIBILITY(default)
39 _GLIBCXX_BEGIN_NAMESPACE_VERSION
41 // Functions for portable atomic access.
42 // To abstract locking primitives across all thread policies, use:
43 // __exchange_and_add_dispatch
44 // __atomic_add_dispatch
45 #ifdef _GLIBCXX_ATOMIC_BUILTINS_4
46 static inline _Atomic_word
47 __exchange_and_add(volatile _Atomic_word* __mem, int __val)
48 { return __sync_fetch_and_add(__mem, __val); }
51 __atomic_add(volatile _Atomic_word* __mem, int __val)
52 { __sync_fetch_and_add(__mem, __val); }
55 __attribute__ ((__unused__))
56 __exchange_and_add(volatile _Atomic_word*, int) throw ();
59 __attribute__ ((__unused__))
60 __atomic_add(volatile _Atomic_word*, int) throw ();
63 static inline _Atomic_word
64 __exchange_and_add_single(_Atomic_word* __mem, int __val)
66 _Atomic_word __result = *__mem;
72 __atomic_add_single(_Atomic_word* __mem, int __val)
75 static inline _Atomic_word
76 __attribute__ ((__unused__))
77 __exchange_and_add_dispatch(_Atomic_word* __mem, int __val)
80 if (__gthread_active_p())
81 return __exchange_and_add(__mem, __val);
83 return __exchange_and_add_single(__mem, __val);
85 return __exchange_and_add_single(__mem, __val);
90 __attribute__ ((__unused__))
91 __atomic_add_dispatch(_Atomic_word* __mem, int __val)
94 if (__gthread_active_p())
95 __atomic_add(__mem, __val);
97 __atomic_add_single(__mem, __val);
99 __atomic_add_single(__mem, __val);
103 _GLIBCXX_END_NAMESPACE_VERSION
106 // Even if the CPU doesn't need a memory barrier, we need to ensure
107 // that the compiler doesn't reorder memory accesses across the
109 #ifndef _GLIBCXX_READ_MEM_BARRIER
110 #define _GLIBCXX_READ_MEM_BARRIER __asm __volatile ("":::"memory")
112 #ifndef _GLIBCXX_WRITE_MEM_BARRIER
113 #define _GLIBCXX_WRITE_MEM_BARRIER __asm __volatile ("":::"memory")