]> rtime.felk.cvut.cz Git - l4.git/blob - l4/pkg/l4re-core/libgcc/lib/ARCH-arm/l4-atomic.c
Update
[l4.git] / l4 / pkg / l4re-core / libgcc / lib / ARCH-arm / l4-atomic.c
1 /* Linux-specific atomic operations for ARM EABI.
2    Copyright (C) 2008, 2009, 2010 Free Software Foundation, Inc.
3    Contributed by CodeSourcery.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15 for more details.
16
17 Under Section 7 of GPL version 3, you are granted additional
18 permissions described in the GCC Runtime Library Exception, version
19 3.1, as published by the Free Software Foundation.
20
21 You should have received a copy of the GNU General Public License and
22 a copy of the GCC Runtime Library Exception along with this program;
23 see the files COPYING3 and COPYING.RUNTIME respectively.  If not, see
24 <http://www.gnu.org/licenses/>.  */
25
26 /* Kernel helper for compare-and-exchange.  */
27 #ifdef IS_L4
28
29 #include <l4/sys/atomic.h>
30 #include <l4/sys/kip.h>
31 #include <l4/sys/compiler.h>
32 #include <stddef.h>
33
34 static inline int __kernel_cmpxchg(int oldval, int newval, int *ptr)
35 {
36   return !l4_atomic_cmpxchg((long *)ptr, oldval, newval);
37 }
38
39 /* This is just a temporary way of doing it */
40 static inline  void __kernel_dmb(void)
41 {
42   extern char const __L4_KIP_ADDR__[];
43   l4_kernel_info_t *k = (l4_kernel_info_t *)__L4_KIP_ADDR__;
44
45   static_assert(   (offsetof(l4_kernel_info_t, platform_info.is_mp) == 0x100)
46                 && (offsetof(l4_kernel_info_t, platform_info.arch.cpuinfo.MIDR) == 0x104),
47                 "Changed KIP layout, adapt");
48
49   if (k->platform_info.is_mp)
50     {
51       unsigned arch = (k->platform_info.arch.cpuinfo.MIDR >> 16) & 0xf;
52       if (arch == 0xf)
53         asm volatile(".inst 0xf57ff05f" : : : "memory");
54       else if (arch == 0x7)
55         asm volatile("mcr p15, 0, r0, c7, c10, 5" : : : "memory");
56     }
57 }
58 #else
59 typedef int (__kernel_cmpxchg_t) (int oldval, int newval, int *ptr);
60 #define __kernel_cmpxchg (*(__kernel_cmpxchg_t *) 0xffff0fc0)
61
62 /* Kernel helper for memory barrier.  */
63 typedef void (__kernel_dmb_t) (void);
64 #define __kernel_dmb (*(__kernel_dmb_t *) 0xffff0fa0)
65 #endif
66
67 /* Note: we implement byte, short and int versions of atomic operations using
68    the above kernel helpers; see linux-atomic-64bit.c for "long long" (64-bit)
69    operations.  */
70
71 #define HIDDEN __attribute__ ((visibility ("hidden")))
72
73 #ifdef __ARMEL__
74 #define INVERT_MASK_1 0
75 #define INVERT_MASK_2 0
76 #else
77 #define INVERT_MASK_1 24
78 #define INVERT_MASK_2 16
79 #endif
80
81 #define MASK_1 0xffu
82 #define MASK_2 0xffffu
83
84 #define FETCH_AND_OP_WORD(OP, PFX_OP, INF_OP)                           \
85   int HIDDEN                                                            \
86   __sync_fetch_and_##OP##_4 (int *ptr, int val)                         \
87   {                                                                     \
88     int failure, tmp;                                                   \
89                                                                         \
90     do {                                                                \
91       tmp = *ptr;                                                       \
92       failure = __kernel_cmpxchg (tmp, PFX_OP (tmp INF_OP val), ptr);   \
93     } while (failure != 0);                                             \
94                                                                         \
95     return tmp;                                                         \
96   }
97
98 FETCH_AND_OP_WORD (add,   , +)
99 FETCH_AND_OP_WORD (sub,   , -)
100 FETCH_AND_OP_WORD (or,    , |)
101 FETCH_AND_OP_WORD (and,   , &)
102 FETCH_AND_OP_WORD (xor,   , ^)
103 FETCH_AND_OP_WORD (nand, ~, &)
104
105 #define NAME_oldval(OP, WIDTH) __sync_fetch_and_##OP##_##WIDTH
106 #define NAME_newval(OP, WIDTH) __sync_##OP##_and_fetch_##WIDTH
107
108 /* Implement both __sync_<op>_and_fetch and __sync_fetch_and_<op> for
109    subword-sized quantities.  */
110
111 #define SUBWORD_SYNC_OP(OP, PFX_OP, INF_OP, TYPE, WIDTH, RETURN)        \
112   TYPE HIDDEN                                                           \
113   NAME##_##RETURN (OP, WIDTH) (TYPE *ptr, TYPE val)                     \
114   {                                                                     \
115     int *wordptr = (int *) ((unsigned int) ptr & ~3);                   \
116     unsigned int mask, shift, oldval, newval;                           \
117     int failure;                                                        \
118                                                                         \
119     shift = (((unsigned int) ptr & 3) << 3) ^ INVERT_MASK_##WIDTH;      \
120     mask = MASK_##WIDTH << shift;                                       \
121                                                                         \
122     do {                                                                \
123       oldval = *wordptr;                                                \
124       newval = ((PFX_OP (((oldval & mask) >> shift)                     \
125                          INF_OP (unsigned int) val)) << shift) & mask;  \
126       newval |= oldval & ~mask;                                         \
127       failure = __kernel_cmpxchg (oldval, newval, wordptr);             \
128     } while (failure != 0);                                             \
129                                                                         \
130     return (RETURN & mask) >> shift;                                    \
131   }
132
133 SUBWORD_SYNC_OP (add,   , +, unsigned short, 2, oldval)
134 SUBWORD_SYNC_OP (sub,   , -, unsigned short, 2, oldval)
135 SUBWORD_SYNC_OP (or,    , |, unsigned short, 2, oldval)
136 SUBWORD_SYNC_OP (and,   , &, unsigned short, 2, oldval)
137 SUBWORD_SYNC_OP (xor,   , ^, unsigned short, 2, oldval)
138 SUBWORD_SYNC_OP (nand, ~, &, unsigned short, 2, oldval)
139
140 SUBWORD_SYNC_OP (add,   , +, unsigned char, 1, oldval)
141 SUBWORD_SYNC_OP (sub,   , -, unsigned char, 1, oldval)
142 SUBWORD_SYNC_OP (or,    , |, unsigned char, 1, oldval)
143 SUBWORD_SYNC_OP (and,   , &, unsigned char, 1, oldval)
144 SUBWORD_SYNC_OP (xor,   , ^, unsigned char, 1, oldval)
145 SUBWORD_SYNC_OP (nand, ~, &, unsigned char, 1, oldval)
146
147 #define OP_AND_FETCH_WORD(OP, PFX_OP, INF_OP)                           \
148   int HIDDEN                                                            \
149   __sync_##OP##_and_fetch_4 (int *ptr, int val)                         \
150   {                                                                     \
151     int tmp, failure;                                                   \
152                                                                         \
153     do {                                                                \
154       tmp = *ptr;                                                       \
155       failure = __kernel_cmpxchg (tmp, PFX_OP (tmp INF_OP val), ptr);   \
156     } while (failure != 0);                                             \
157                                                                         \
158     return PFX_OP (tmp INF_OP val);                                     \
159   }
160
161 OP_AND_FETCH_WORD (add,   , +)
162 OP_AND_FETCH_WORD (sub,   , -)
163 OP_AND_FETCH_WORD (or,    , |)
164 OP_AND_FETCH_WORD (and,   , &)
165 OP_AND_FETCH_WORD (xor,   , ^)
166 OP_AND_FETCH_WORD (nand, ~, &)
167
168 SUBWORD_SYNC_OP (add,   , +, unsigned short, 2, newval)
169 SUBWORD_SYNC_OP (sub,   , -, unsigned short, 2, newval)
170 SUBWORD_SYNC_OP (or,    , |, unsigned short, 2, newval)
171 SUBWORD_SYNC_OP (and,   , &, unsigned short, 2, newval)
172 SUBWORD_SYNC_OP (xor,   , ^, unsigned short, 2, newval)
173 SUBWORD_SYNC_OP (nand, ~, &, unsigned short, 2, newval)
174
175 SUBWORD_SYNC_OP (add,   , +, unsigned char, 1, newval)
176 SUBWORD_SYNC_OP (sub,   , -, unsigned char, 1, newval)
177 SUBWORD_SYNC_OP (or,    , |, unsigned char, 1, newval)
178 SUBWORD_SYNC_OP (and,   , &, unsigned char, 1, newval)
179 SUBWORD_SYNC_OP (xor,   , ^, unsigned char, 1, newval)
180 SUBWORD_SYNC_OP (nand, ~, &, unsigned char, 1, newval)
181
182 int HIDDEN
183 __sync_val_compare_and_swap_4 (int *ptr, int oldval, int newval)
184 {
185   int actual_oldval, fail;
186     
187   while (1)
188     {
189       actual_oldval = *ptr;
190
191       if (__builtin_expect (oldval != actual_oldval, 0))
192         return actual_oldval;
193
194       fail = __kernel_cmpxchg (actual_oldval, newval, ptr);
195   
196       if (__builtin_expect (!fail, 1))
197         return oldval;
198     }
199 }
200
201 #define SUBWORD_VAL_CAS(TYPE, WIDTH)                                    \
202   TYPE HIDDEN                                                           \
203   __sync_val_compare_and_swap_##WIDTH (TYPE *ptr, TYPE oldval,          \
204                                        TYPE newval)                     \
205   {                                                                     \
206     int *wordptr = (int *)((unsigned int) ptr & ~3), fail;              \
207     unsigned int mask, shift, actual_oldval, actual_newval;             \
208                                                                         \
209     shift = (((unsigned int) ptr & 3) << 3) ^ INVERT_MASK_##WIDTH;      \
210     mask = MASK_##WIDTH << shift;                                       \
211                                                                         \
212     while (1)                                                           \
213       {                                                                 \
214         actual_oldval = *wordptr;                                       \
215                                                                         \
216         if (__builtin_expect (((actual_oldval & mask) >> shift) !=      \
217                               (unsigned int) oldval, 0))                \
218           return (actual_oldval & mask) >> shift;                       \
219                                                                         \
220         actual_newval = (actual_oldval & ~mask)                         \
221                         | (((unsigned int) newval << shift) & mask);    \
222                                                                         \
223         fail = __kernel_cmpxchg (actual_oldval, actual_newval,          \
224                                  wordptr);                              \
225                                                                         \
226       if (__builtin_expect (!fail, 1))                                  \
227           return oldval;                                                \
228       }                                                                 \
229   }
230
231 SUBWORD_VAL_CAS (unsigned short, 2)
232 SUBWORD_VAL_CAS (unsigned char,  1)
233
234 typedef unsigned char bool;
235
236 bool HIDDEN
237 __sync_bool_compare_and_swap_4 (int *ptr, int oldval, int newval)
238 {
239   int failure = __kernel_cmpxchg (oldval, newval, ptr);
240   return (failure == 0);
241 }
242
243 #define SUBWORD_BOOL_CAS(TYPE, WIDTH)                                   \
244   bool HIDDEN                                                           \
245   __sync_bool_compare_and_swap_##WIDTH (TYPE *ptr, TYPE oldval,         \
246                                         TYPE newval)                    \
247   {                                                                     \
248     TYPE actual_oldval                                                  \
249       = __sync_val_compare_and_swap_##WIDTH (ptr, oldval, newval);      \
250     return (oldval == actual_oldval);                                   \
251   }
252
253 SUBWORD_BOOL_CAS (unsigned short, 2)
254 SUBWORD_BOOL_CAS (unsigned char,  1)
255
256 void HIDDEN
257 __sync_synchronize (void)
258 {
259   __kernel_dmb ();
260 }
261
262 int HIDDEN
263 __sync_lock_test_and_set_4 (int *ptr, int val)
264 {
265   int failure, oldval;
266
267   do {
268     oldval = *ptr;
269     failure = __kernel_cmpxchg (oldval, val, ptr);
270   } while (failure != 0);
271
272   return oldval;
273 }
274
275 #define SUBWORD_TEST_AND_SET(TYPE, WIDTH)                               \
276   TYPE HIDDEN                                                           \
277   __sync_lock_test_and_set_##WIDTH (TYPE *ptr, TYPE val)                \
278   {                                                                     \
279     int failure;                                                        \
280     unsigned int oldval, newval, shift, mask;                           \
281     int *wordptr = (int *) ((unsigned int) ptr & ~3);                   \
282                                                                         \
283     shift = (((unsigned int) ptr & 3) << 3) ^ INVERT_MASK_##WIDTH;      \
284     mask = MASK_##WIDTH << shift;                                       \
285                                                                         \
286     do {                                                                \
287       oldval = *wordptr;                                                \
288       newval = (oldval & ~mask)                                         \
289                | (((unsigned int) val << shift) & mask);                \
290       failure = __kernel_cmpxchg (oldval, newval, wordptr);             \
291     } while (failure != 0);                                             \
292                                                                         \
293     return (oldval & mask) >> shift;                                    \
294   }
295
296 SUBWORD_TEST_AND_SET (unsigned short, 2)
297 SUBWORD_TEST_AND_SET (unsigned char,  1)
298
299 #define SYNC_LOCK_RELEASE(TYPE, WIDTH)                                  \
300   void HIDDEN                                                           \
301   __sync_lock_release_##WIDTH (TYPE *ptr)                               \
302   {                                                                     \
303     /* All writes before this point must be seen before we release      \
304        the lock itself.  */                                             \
305     __kernel_dmb ();                                                    \
306     *ptr = 0;                                                           \
307   }
308
309 SYNC_LOCK_RELEASE (long long,   8)
310 SYNC_LOCK_RELEASE (int,   4)
311 SYNC_LOCK_RELEASE (short, 2)
312 SYNC_LOCK_RELEASE (char,  1)