1 /* Linux-specific atomic operations for ARM EABI.
2 Copyright (C) 2008, 2009, 2010 Free Software Foundation, Inc.
3 Contributed by CodeSourcery.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 Under Section 7 of GPL version 3, you are granted additional
18 permissions described in the GCC Runtime Library Exception, version
19 3.1, as published by the Free Software Foundation.
21 You should have received a copy of the GNU General Public License and
22 a copy of the GCC Runtime Library Exception along with this program;
23 see the files COPYING3 and COPYING.RUNTIME respectively. If not, see
24 <http://www.gnu.org/licenses/>. */
26 /* Kernel helper for compare-and-exchange. */
29 #include <l4/sys/atomic.h>
30 #include <l4/sys/kip.h>
31 #include <l4/sys/compiler.h>
34 static inline int __kernel_cmpxchg(int oldval, int newval, int *ptr)
36 return !l4_atomic_cmpxchg((long *)ptr, oldval, newval);
39 /* This is just a temporary way of doing it */
40 static inline void __kernel_dmb(void)
42 extern char const __L4_KIP_ADDR__[];
43 l4_kernel_info_t *k = (l4_kernel_info_t *)__L4_KIP_ADDR__;
45 static_assert( (offsetof(l4_kernel_info_t, platform_info.is_mp) == 0x100)
46 && (offsetof(l4_kernel_info_t, platform_info.arch.cpuinfo.MIDR) == 0x104),
47 "Changed KIP layout, adapt");
49 if (k->platform_info.is_mp)
51 unsigned arch = (k->platform_info.arch.cpuinfo.MIDR >> 16) & 0xf;
53 asm volatile(".inst 0xf57ff05f" : : : "memory");
55 asm volatile("mcr p15, 0, r0, c7, c10, 5" : : : "memory");
59 typedef int (__kernel_cmpxchg_t) (int oldval, int newval, int *ptr);
60 #define __kernel_cmpxchg (*(__kernel_cmpxchg_t *) 0xffff0fc0)
62 /* Kernel helper for memory barrier. */
63 typedef void (__kernel_dmb_t) (void);
64 #define __kernel_dmb (*(__kernel_dmb_t *) 0xffff0fa0)
67 /* Note: we implement byte, short and int versions of atomic operations using
68 the above kernel helpers; see linux-atomic-64bit.c for "long long" (64-bit)
71 #define HIDDEN __attribute__ ((visibility ("hidden")))
74 #define INVERT_MASK_1 0
75 #define INVERT_MASK_2 0
77 #define INVERT_MASK_1 24
78 #define INVERT_MASK_2 16
82 #define MASK_2 0xffffu
84 #define FETCH_AND_OP_WORD(OP, PFX_OP, INF_OP) \
86 __sync_fetch_and_##OP##_4 (int *ptr, int val) \
92 failure = __kernel_cmpxchg (tmp, PFX_OP (tmp INF_OP val), ptr); \
93 } while (failure != 0); \
98 FETCH_AND_OP_WORD (add, , +)
99 FETCH_AND_OP_WORD (sub, , -)
100 FETCH_AND_OP_WORD (or, , |)
101 FETCH_AND_OP_WORD (and, , &)
102 FETCH_AND_OP_WORD (xor, , ^)
103 FETCH_AND_OP_WORD (nand, ~, &)
105 #define NAME_oldval(OP, WIDTH) __sync_fetch_and_##OP##_##WIDTH
106 #define NAME_newval(OP, WIDTH) __sync_##OP##_and_fetch_##WIDTH
108 /* Implement both __sync_<op>_and_fetch and __sync_fetch_and_<op> for
109 subword-sized quantities. */
111 #define SUBWORD_SYNC_OP(OP, PFX_OP, INF_OP, TYPE, WIDTH, RETURN) \
113 NAME##_##RETURN (OP, WIDTH) (TYPE *ptr, TYPE val) \
115 int *wordptr = (int *) ((unsigned int) ptr & ~3); \
116 unsigned int mask, shift, oldval, newval; \
119 shift = (((unsigned int) ptr & 3) << 3) ^ INVERT_MASK_##WIDTH; \
120 mask = MASK_##WIDTH << shift; \
124 newval = ((PFX_OP (((oldval & mask) >> shift) \
125 INF_OP (unsigned int) val)) << shift) & mask; \
126 newval |= oldval & ~mask; \
127 failure = __kernel_cmpxchg (oldval, newval, wordptr); \
128 } while (failure != 0); \
130 return (RETURN & mask) >> shift; \
133 SUBWORD_SYNC_OP (add, , +, unsigned short, 2, oldval)
134 SUBWORD_SYNC_OP (sub, , -, unsigned short, 2, oldval)
135 SUBWORD_SYNC_OP (or, , |, unsigned short, 2, oldval)
136 SUBWORD_SYNC_OP (and, , &, unsigned short, 2, oldval)
137 SUBWORD_SYNC_OP (xor, , ^, unsigned short, 2, oldval)
138 SUBWORD_SYNC_OP (nand, ~, &, unsigned short, 2, oldval)
140 SUBWORD_SYNC_OP (add, , +, unsigned char, 1, oldval)
141 SUBWORD_SYNC_OP (sub, , -, unsigned char, 1, oldval)
142 SUBWORD_SYNC_OP (or, , |, unsigned char, 1, oldval)
143 SUBWORD_SYNC_OP (and, , &, unsigned char, 1, oldval)
144 SUBWORD_SYNC_OP (xor, , ^, unsigned char, 1, oldval)
145 SUBWORD_SYNC_OP (nand, ~, &, unsigned char, 1, oldval)
147 #define OP_AND_FETCH_WORD(OP, PFX_OP, INF_OP) \
149 __sync_##OP##_and_fetch_4 (int *ptr, int val) \
155 failure = __kernel_cmpxchg (tmp, PFX_OP (tmp INF_OP val), ptr); \
156 } while (failure != 0); \
158 return PFX_OP (tmp INF_OP val); \
161 OP_AND_FETCH_WORD (add, , +)
162 OP_AND_FETCH_WORD (sub, , -)
163 OP_AND_FETCH_WORD (or, , |)
164 OP_AND_FETCH_WORD (and, , &)
165 OP_AND_FETCH_WORD (xor, , ^)
166 OP_AND_FETCH_WORD (nand, ~, &)
168 SUBWORD_SYNC_OP (add, , +, unsigned short, 2, newval)
169 SUBWORD_SYNC_OP (sub, , -, unsigned short, 2, newval)
170 SUBWORD_SYNC_OP (or, , |, unsigned short, 2, newval)
171 SUBWORD_SYNC_OP (and, , &, unsigned short, 2, newval)
172 SUBWORD_SYNC_OP (xor, , ^, unsigned short, 2, newval)
173 SUBWORD_SYNC_OP (nand, ~, &, unsigned short, 2, newval)
175 SUBWORD_SYNC_OP (add, , +, unsigned char, 1, newval)
176 SUBWORD_SYNC_OP (sub, , -, unsigned char, 1, newval)
177 SUBWORD_SYNC_OP (or, , |, unsigned char, 1, newval)
178 SUBWORD_SYNC_OP (and, , &, unsigned char, 1, newval)
179 SUBWORD_SYNC_OP (xor, , ^, unsigned char, 1, newval)
180 SUBWORD_SYNC_OP (nand, ~, &, unsigned char, 1, newval)
183 __sync_val_compare_and_swap_4 (int *ptr, int oldval, int newval)
185 int actual_oldval, fail;
189 actual_oldval = *ptr;
191 if (__builtin_expect (oldval != actual_oldval, 0))
192 return actual_oldval;
194 fail = __kernel_cmpxchg (actual_oldval, newval, ptr);
196 if (__builtin_expect (!fail, 1))
201 #define SUBWORD_VAL_CAS(TYPE, WIDTH) \
203 __sync_val_compare_and_swap_##WIDTH (TYPE *ptr, TYPE oldval, \
206 int *wordptr = (int *)((unsigned int) ptr & ~3), fail; \
207 unsigned int mask, shift, actual_oldval, actual_newval; \
209 shift = (((unsigned int) ptr & 3) << 3) ^ INVERT_MASK_##WIDTH; \
210 mask = MASK_##WIDTH << shift; \
214 actual_oldval = *wordptr; \
216 if (__builtin_expect (((actual_oldval & mask) >> shift) != \
217 (unsigned int) oldval, 0)) \
218 return (actual_oldval & mask) >> shift; \
220 actual_newval = (actual_oldval & ~mask) \
221 | (((unsigned int) newval << shift) & mask); \
223 fail = __kernel_cmpxchg (actual_oldval, actual_newval, \
226 if (__builtin_expect (!fail, 1)) \
231 SUBWORD_VAL_CAS (unsigned short, 2)
232 SUBWORD_VAL_CAS (unsigned char, 1)
234 typedef unsigned char bool;
237 __sync_bool_compare_and_swap_4 (int *ptr, int oldval, int newval)
239 int failure = __kernel_cmpxchg (oldval, newval, ptr);
240 return (failure == 0);
243 #define SUBWORD_BOOL_CAS(TYPE, WIDTH) \
245 __sync_bool_compare_and_swap_##WIDTH (TYPE *ptr, TYPE oldval, \
249 = __sync_val_compare_and_swap_##WIDTH (ptr, oldval, newval); \
250 return (oldval == actual_oldval); \
253 SUBWORD_BOOL_CAS (unsigned short, 2)
254 SUBWORD_BOOL_CAS (unsigned char, 1)
257 __sync_synchronize (void)
263 __sync_lock_test_and_set_4 (int *ptr, int val)
269 failure = __kernel_cmpxchg (oldval, val, ptr);
270 } while (failure != 0);
275 #define SUBWORD_TEST_AND_SET(TYPE, WIDTH) \
277 __sync_lock_test_and_set_##WIDTH (TYPE *ptr, TYPE val) \
280 unsigned int oldval, newval, shift, mask; \
281 int *wordptr = (int *) ((unsigned int) ptr & ~3); \
283 shift = (((unsigned int) ptr & 3) << 3) ^ INVERT_MASK_##WIDTH; \
284 mask = MASK_##WIDTH << shift; \
288 newval = (oldval & ~mask) \
289 | (((unsigned int) val << shift) & mask); \
290 failure = __kernel_cmpxchg (oldval, newval, wordptr); \
291 } while (failure != 0); \
293 return (oldval & mask) >> shift; \
296 SUBWORD_TEST_AND_SET (unsigned short, 2)
297 SUBWORD_TEST_AND_SET (unsigned char, 1)
299 #define SYNC_LOCK_RELEASE(TYPE, WIDTH) \
301 __sync_lock_release_##WIDTH (TYPE *ptr) \
303 /* All writes before this point must be seen before we release \
304 the lock itself. */ \
309 SYNC_LOCK_RELEASE (long long, 8)
310 SYNC_LOCK_RELEASE (int, 4)
311 SYNC_LOCK_RELEASE (short, 2)
312 SYNC_LOCK_RELEASE (char, 1)