]> rtime.felk.cvut.cz Git - l4.git/blob - l4/pkg/uclibc/lib/contrib/uclibc/libc/sysdeps/linux/alpha/bits/atomic.h
update
[l4.git] / l4 / pkg / uclibc / lib / contrib / uclibc / libc / sysdeps / linux / alpha / bits / atomic.h
1 /* Copyright (C) 2003 Free Software Foundation, Inc.
2    This file is part of the GNU C Library.
3
4    The GNU C Library is free software; you can redistribute it and/or
5    modify it under the terms of the GNU Lesser General Public
6    License as published by the Free Software Foundation; either
7    version 2.1 of the License, or (at your option) any later version.
8
9    The GNU C Library is distributed in the hope that it will be useful,
10    but WITHOUT ANY WARRANTY; without even the implied warranty of
11    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
12    Lesser General Public License for more details.
13
14    You should have received a copy of the GNU Lesser General Public
15    License along with the GNU C Library; if not, see
16    <http://www.gnu.org/licenses/>.  */
17
18 #include <stdint.h>
19
20 typedef int8_t atomic8_t;
21 typedef uint8_t uatomic8_t;
22 typedef int_fast8_t atomic_fast8_t;
23 typedef uint_fast8_t uatomic_fast8_t;
24
25 typedef int16_t atomic16_t;
26 typedef uint16_t uatomic16_t;
27 typedef int_fast16_t atomic_fast16_t;
28 typedef uint_fast16_t uatomic_fast16_t;
29
30 typedef int32_t atomic32_t;
31 typedef uint32_t uatomic32_t;
32 typedef int_fast32_t atomic_fast32_t;
33 typedef uint_fast32_t uatomic_fast32_t;
34
35 typedef int64_t atomic64_t;
36 typedef uint64_t uatomic64_t;
37 typedef int_fast64_t atomic_fast64_t;
38 typedef uint_fast64_t uatomic_fast64_t;
39
40 typedef intptr_t atomicptr_t;
41 typedef uintptr_t uatomicptr_t;
42 typedef intmax_t atomic_max_t;
43 typedef uintmax_t uatomic_max_t;
44
45
46 #ifdef UP
47 # define __MB           /* nothing */
48 #else
49 # define __MB           "       mb\n"
50 #endif
51
52
53 /* Compare and exchange.  For all of the "xxx" routines, we expect a
54    "__prev" and a "__cmp" variable to be provided by the enclosing scope,
55    in which values are returned.  */
56
57 #define __arch_compare_and_exchange_xxx_8_int(mem, new, old, mb1, mb2)  \
58 ({                                                                      \
59   unsigned long __tmp, __snew, __addr64;                                \
60   __asm__ __volatile__ (                                                \
61                 mb1                                                     \
62         "       andnot  %[__addr8],7,%[__addr64]\n"                     \
63         "       insbl   %[__new],%[__addr8],%[__snew]\n"                \
64         "1:     ldq_l   %[__tmp],0(%[__addr64])\n"                      \
65         "       extbl   %[__tmp],%[__addr8],%[__prev]\n"                \
66         "       cmpeq   %[__prev],%[__old],%[__cmp]\n"                  \
67         "       beq     %[__cmp],2f\n"                                  \
68         "       mskbl   %[__tmp],%[__addr8],%[__tmp]\n"                 \
69         "       or      %[__snew],%[__tmp],%[__tmp]\n"                  \
70         "       stq_c   %[__tmp],0(%[__addr64])\n"                      \
71         "       beq     %[__tmp],1b\n"                                  \
72                 mb2                                                     \
73         "2:"                                                            \
74         : [__prev] "=&r" (__prev),                                      \
75           [__snew] "=&r" (__snew),                                      \
76           [__tmp] "=&r" (__tmp),                                        \
77           [__cmp] "=&r" (__cmp),                                        \
78           [__addr64] "=&r" (__addr64)                                   \
79         : [__addr8] "r" (mem),                                          \
80           [__old] "Ir" ((uint64_t)(uint8_t)(uint64_t)(old)),            \
81           [__new] "r" (new)                                             \
82         : "memory");                                                    \
83 })
84
85 #define __arch_compare_and_exchange_xxx_16_int(mem, new, old, mb1, mb2) \
86 ({                                                                      \
87   unsigned long __tmp, __snew, __addr64;                                \
88   __asm__ __volatile__ (                                                \
89                 mb1                                                     \
90         "       andnot  %[__addr16],7,%[__addr64]\n"                    \
91         "       inswl   %[__new],%[__addr16],%[__snew]\n"               \
92         "1:     ldq_l   %[__tmp],0(%[__addr64])\n"                      \
93         "       extwl   %[__tmp],%[__addr16],%[__prev]\n"               \
94         "       cmpeq   %[__prev],%[__old],%[__cmp]\n"                  \
95         "       beq     %[__cmp],2f\n"                                  \
96         "       mskwl   %[__tmp],%[__addr16],%[__tmp]\n"                \
97         "       or      %[__snew],%[__tmp],%[__tmp]\n"                  \
98         "       stq_c   %[__tmp],0(%[__addr64])\n"                      \
99         "       beq     %[__tmp],1b\n"                                  \
100                 mb2                                                     \
101         "2:"                                                            \
102         : [__prev] "=&r" (__prev),                                      \
103           [__snew] "=&r" (__snew),                                      \
104           [__tmp] "=&r" (__tmp),                                        \
105           [__cmp] "=&r" (__cmp),                                        \
106           [__addr64] "=&r" (__addr64)                                   \
107         : [__addr16] "r" (mem),                                         \
108           [__old] "Ir" ((uint64_t)(uint16_t)(uint64_t)(old)),           \
109           [__new] "r" (new)                                             \
110         : "memory");                                                    \
111 })
112
113 #define __arch_compare_and_exchange_xxx_32_int(mem, new, old, mb1, mb2) \
114 ({                                                                      \
115   __asm__ __volatile__ (                                                \
116                 mb1                                                     \
117         "1:     ldl_l   %[__prev],%[__mem]\n"                           \
118         "       cmpeq   %[__prev],%[__old],%[__cmp]\n"                  \
119         "       beq     %[__cmp],2f\n"                                  \
120         "       mov     %[__new],%[__cmp]\n"                            \
121         "       stl_c   %[__cmp],%[__mem]\n"                            \
122         "       beq     %[__cmp],1b\n"                                  \
123                 mb2                                                     \
124         "2:"                                                            \
125         : [__prev] "=&r" (__prev),                                      \
126           [__cmp] "=&r" (__cmp)                                         \
127         : [__mem] "m" (*(mem)),                                         \
128           [__old] "Ir" ((uint64_t)(atomic32_t)(uint64_t)(old)),         \
129           [__new] "Ir" (new)                                            \
130         : "memory");                                                    \
131 })
132
133 #define __arch_compare_and_exchange_xxx_64_int(mem, new, old, mb1, mb2) \
134 ({                                                                      \
135   __asm__ __volatile__ (                                                \
136                 mb1                                                     \
137         "1:     ldq_l   %[__prev],%[__mem]\n"                           \
138         "       cmpeq   %[__prev],%[__old],%[__cmp]\n"                  \
139         "       beq     %[__cmp],2f\n"                                  \
140         "       mov     %[__new],%[__cmp]\n"                            \
141         "       stq_c   %[__cmp],%[__mem]\n"                            \
142         "       beq     %[__cmp],1b\n"                                  \
143                 mb2                                                     \
144         "2:"                                                            \
145         : [__prev] "=&r" (__prev),                                      \
146           [__cmp] "=&r" (__cmp)                                         \
147         : [__mem] "m" (*(mem)),                                         \
148           [__old] "Ir" ((uint64_t)(old)),                               \
149           [__new] "Ir" (new)                                            \
150         : "memory");                                                    \
151 })
152
153 /* For all "bool" routines, we return FALSE if exchange succesful.  */
154
155 #define __arch_compare_and_exchange_bool_8_int(mem, new, old, mb1, mb2) \
156 ({ unsigned long __prev; int __cmp;                                     \
157    __arch_compare_and_exchange_xxx_8_int(mem, new, old, mb1, mb2);      \
158    !__cmp; })
159
160 #define __arch_compare_and_exchange_bool_16_int(mem, new, old, mb1, mb2) \
161 ({ unsigned long __prev; int __cmp;                                     \
162    __arch_compare_and_exchange_xxx_16_int(mem, new, old, mb1, mb2);     \
163    !__cmp; })
164
165 #define __arch_compare_and_exchange_bool_32_int(mem, new, old, mb1, mb2) \
166 ({ unsigned long __prev; int __cmp;                                     \
167    __arch_compare_and_exchange_xxx_32_int(mem, new, old, mb1, mb2);     \
168    !__cmp; })
169
170 #define __arch_compare_and_exchange_bool_64_int(mem, new, old, mb1, mb2) \
171 ({ unsigned long __prev; int __cmp;                                     \
172    __arch_compare_and_exchange_xxx_64_int(mem, new, old, mb1, mb2);     \
173    !__cmp; })
174
175 /* For all "val" routines, return the old value whether exchange
176    successful or not.  */
177
178 #define __arch_compare_and_exchange_val_8_int(mem, new, old, mb1, mb2)  \
179 ({ unsigned long __prev; int __cmp;                                     \
180    __arch_compare_and_exchange_xxx_8_int(mem, new, old, mb1, mb2);      \
181    (__typeof (*mem))__prev; })
182
183 #define __arch_compare_and_exchange_val_16_int(mem, new, old, mb1, mb2) \
184 ({ unsigned long __prev; int __cmp;                                     \
185    __arch_compare_and_exchange_xxx_16_int(mem, new, old, mb1, mb2);     \
186    (__typeof (*mem))__prev; })
187
188 #define __arch_compare_and_exchange_val_32_int(mem, new, old, mb1, mb2) \
189 ({ unsigned long __prev; int __cmp;                                     \
190    __arch_compare_and_exchange_xxx_32_int(mem, new, old, mb1, mb2);     \
191    (__typeof (*mem))__prev; })
192
193 #define __arch_compare_and_exchange_val_64_int(mem, new, old, mb1, mb2) \
194 ({ unsigned long __prev; int __cmp;                                     \
195    __arch_compare_and_exchange_xxx_64_int(mem, new, old, mb1, mb2);     \
196    (__typeof (*mem))__prev; })
197
198 /* Compare and exchange with "acquire" semantics, ie barrier after.  */
199
200 #define atomic_compare_and_exchange_bool_acq(mem, new, old)     \
201   __atomic_bool_bysize (__arch_compare_and_exchange_bool, int,  \
202                         mem, new, old, "", __MB)
203
204 #define atomic_compare_and_exchange_val_acq(mem, new, old)      \
205   __atomic_val_bysize (__arch_compare_and_exchange_val, int,    \
206                        mem, new, old, "", __MB)
207
208 /* Compare and exchange with "release" semantics, ie barrier before.  */
209
210 #define atomic_compare_and_exchange_bool_rel(mem, new, old)     \
211   __atomic_bool_bysize (__arch_compare_and_exchange_bool, int,  \
212                         mem, new, old, __MB, "")
213
214 #define atomic_compare_and_exchange_val_rel(mem, new, old)      \
215   __atomic_val_bysize (__arch_compare_and_exchange_val, int,    \
216                        mem, new, old, __MB, "")
217
218
219 /* Atomically store value and return the previous value.  */
220
221 #define __arch_exchange_8_int(mem, value, mb1, mb2)                     \
222 ({                                                                      \
223   unsigned long __ret, __tmp, __addr64, __sval;                         \
224   __asm__ __volatile__ (                                                \
225                 mb1                                                     \
226         "       andnot  %[__addr8],7,%[__addr64]\n"                     \
227         "       insbl   %[__value],%[__addr8],%[__sval]\n"              \
228         "1:     ldq_l   %[__tmp],0(%[__addr64])\n"                      \
229         "       extbl   %[__tmp],%[__addr8],%[__ret]\n"                 \
230         "       mskbl   %[__tmp],%[__addr8],%[__tmp]\n"                 \
231         "       or      %[__sval],%[__tmp],%[__tmp]\n"                  \
232         "       stq_c   %[__tmp],0(%[__addr64])\n"                      \
233         "       beq     %[__tmp],1b\n"                                  \
234                 mb2                                                     \
235         : [__ret] "=&r" (__ret),                                        \
236           [__sval] "=&r" (__sval),                                      \
237           [__tmp] "=&r" (__tmp),                                        \
238           [__addr64] "=&r" (__addr64)                                   \
239         : [__addr8] "r" (mem),                                          \
240           [__value] "r" (value)                                         \
241         : "memory");                                                    \
242   __ret; })
243
244 #define __arch_exchange_16_int(mem, value, mb1, mb2)                    \
245 ({                                                                      \
246   unsigned long __ret, __tmp, __addr64, __sval;                         \
247   __asm__ __volatile__ (                                                \
248                 mb1                                                     \
249         "       andnot  %[__addr16],7,%[__addr64]\n"                    \
250         "       inswl   %[__value],%[__addr16],%[__sval]\n"             \
251         "1:     ldq_l   %[__tmp],0(%[__addr64])\n"                      \
252         "       extwl   %[__tmp],%[__addr16],%[__ret]\n"                \
253         "       mskwl   %[__tmp],%[__addr16],%[__tmp]\n"                \
254         "       or      %[__sval],%[__tmp],%[__tmp]\n"                  \
255         "       stq_c   %[__tmp],0(%[__addr64])\n"                      \
256         "       beq     %[__tmp],1b\n"                                  \
257                 mb2                                                     \
258         : [__ret] "=&r" (__ret),                                        \
259           [__sval] "=&r" (__sval),                                      \
260           [__tmp] "=&r" (__tmp),                                        \
261           [__addr64] "=&r" (__addr64)                                   \
262         : [__addr16] "r" (mem),                                         \
263           [__value] "r" (value)                                         \
264         : "memory");                                                    \
265   __ret; })
266
267 #define __arch_exchange_32_int(mem, value, mb1, mb2)                    \
268 ({                                                                      \
269   signed int __ret, __tmp;                                              \
270   __asm__ __volatile__ (                                                \
271                 mb1                                                     \
272         "1:     ldl_l   %[__ret],%[__mem]\n"                            \
273         "       mov     %[__val],%[__tmp]\n"                            \
274         "       stl_c   %[__tmp],%[__mem]\n"                            \
275         "       beq     %[__tmp],1b\n"                                  \
276                 mb2                                                     \
277         : [__ret] "=&r" (__ret),                                        \
278           [__tmp] "=&r" (__tmp)                                         \
279         : [__mem] "m" (*(mem)),                                         \
280           [__val] "Ir" (value)                                          \
281         : "memory");                                                    \
282   __ret; })
283
284 #define __arch_exchange_64_int(mem, value, mb1, mb2)                    \
285 ({                                                                      \
286   unsigned long __ret, __tmp;                                           \
287   __asm__ __volatile__ (                                                \
288                 mb1                                                     \
289         "1:     ldq_l   %[__ret],%[__mem]\n"                            \
290         "       mov     %[__val],%[__tmp]\n"                            \
291         "       stq_c   %[__tmp],%[__mem]\n"                            \
292         "       beq     %[__tmp],1b\n"                                  \
293                 mb2                                                     \
294         : [__ret] "=&r" (__ret),                                        \
295           [__tmp] "=&r" (__tmp)                                         \
296         : [__mem] "m" (*(mem)),                                         \
297           [__val] "Ir" (value)                                          \
298         : "memory");                                                    \
299   __ret; })
300
301 #define atomic_exchange_acq(mem, value) \
302   __atomic_val_bysize (__arch_exchange, int, mem, value, "", __MB)
303
304 #define atomic_exchange_rel(mem, value) \
305   __atomic_val_bysize (__arch_exchange, int, mem, value, __MB, "")
306
307
308 /* Atomically add value and return the previous (unincremented) value.  */
309
310 #define __arch_exchange_and_add_8_int(mem, value, mb1, mb2) \
311   ({ __builtin_trap (); 0; })
312
313 #define __arch_exchange_and_add_16_int(mem, value, mb1, mb2) \
314   ({ __builtin_trap (); 0; })
315
316 #define __arch_exchange_and_add_32_int(mem, value, mb1, mb2)            \
317 ({                                                                      \
318   signed int __ret, __tmp;                                              \
319   __asm__ __volatile__ (                                                \
320                 mb1                                                     \
321         "1:     ldl_l   %[__ret],%[__mem]\n"                            \
322         "       addl    %[__ret],%[__val],%[__tmp]\n"                   \
323         "       stl_c   %[__tmp],%[__mem]\n"                            \
324         "       beq     %[__tmp],1b\n"                                  \
325                 mb2                                                     \
326         : [__ret] "=&r" (__ret),                                        \
327           [__tmp] "=&r" (__tmp)                                         \
328         : [__mem] "m" (*(mem)),                                         \
329           [__val] "Ir" ((signed int)(value))                            \
330         : "memory");                                                    \
331   __ret; })
332
333 #define __arch_exchange_and_add_64_int(mem, value, mb1, mb2)            \
334 ({                                                                      \
335   unsigned long __ret, __tmp;                                           \
336   __asm__ __volatile__ (                                                \
337                 mb1                                                     \
338         "1:     ldq_l   %[__ret],%[__mem]\n"                            \
339         "       addq    %[__ret],%[__val],%[__tmp]\n"                   \
340         "       stq_c   %[__tmp],%[__mem]\n"                            \
341         "       beq     %[__tmp],1b\n"                                  \
342                 mb2                                                     \
343         : [__ret] "=&r" (__ret),                                        \
344           [__tmp] "=&r" (__tmp)                                         \
345         : [__mem] "m" (*(mem)),                                         \
346           [__val] "Ir" ((unsigned long)(value))                         \
347         : "memory");                                                    \
348   __ret; })
349
350 /* ??? Barrier semantics for atomic_exchange_and_add appear to be
351    undefined.  Use full barrier for now, as that's safe.  */
352 #define atomic_exchange_and_add(mem, value) \
353   __atomic_val_bysize (__arch_exchange_and_add, int, mem, value, __MB, __MB)
354
355
356 /* ??? Blah, I'm lazy.  Implement these later.  Can do better than the
357    compare-and-exchange loop provided by generic code.
358
359 #define atomic_decrement_if_positive(mem)
360 #define atomic_bit_test_set(mem, bit)
361
362 */
363
364 #ifndef UP
365 # define atomic_full_barrier()  __asm__ ("mb" : : : "memory");
366 # define atomic_read_barrier()  __asm__ ("mb" : : : "memory");
367 # define atomic_write_barrier() __asm__ ("wmb" : : : "memory");
368 #endif