]> rtime.felk.cvut.cz Git - mcf548x/linux.git/blob - arch/m68k/include/asm/uaccess_mm.h
Current (FEC from 2.6.31 port, no CAN, no I2C, no PCI)
[mcf548x/linux.git] / arch / m68k / include / asm / uaccess_mm.h
1 #ifndef __M68K_UACCESS_H
2 #define __M68K_UACCESS_H
3
4 /*
5  * User space memory access functions
6  */
7 #include <linux/compiler.h>
8 #include <linux/errno.h>
9 #include <linux/types.h>
10 #include <linux/sched.h>
11 #include <asm/segment.h>
12
13 #define VERIFY_READ     0
14 #define VERIFY_WRITE    1
15
16 /* Coldifre doesn't have moves instruction; use move. */
17 #ifdef CONFIG_COLDFIRE
18 #define moves move
19 #endif
20
21 /* We let the MMU do all checking */
22 static inline int access_ok(int type, const void __user *addr,
23                             unsigned long size)
24 {
25         return 1;
26 }
27
28 /*
29  * The exception table consists of pairs of addresses: the first is the
30  * address of an instruction that is allowed to fault, and the second is
31  * the address at which the program should continue.  No registers are
32  * modified, so it is entirely up to the continuation code to figure out
33  * what to do.
34  *
35  * All the routines below use bits of fixup code that are out of line
36  * with the main instruction path.  This means when everything is well,
37  * we don't even have to jump over them.  Further, they do not intrude
38  * on our cache or tlb entries.
39  */
40
41 struct exception_table_entry
42 {
43         unsigned long insn, fixup;
44 };
45
46 extern int __put_user_bad(void);
47 extern int __get_user_bad(void);
48
49 #define __put_user_asm(res, x, ptr, bwl, reg, err)      \
50 asm volatile ("\n"                                      \
51         "1:     moves."#bwl"    %2,%1\n"                \
52         "2:\n"                                          \
53         "       .section .fixup,\"ax\"\n"               \
54         "       .even\n"                                \
55         "10:    moveq.l %3,%0\n"                        \
56         "       jra 2b\n"                               \
57         "       .previous\n"                            \
58         "\n"                                            \
59         "       .section __ex_table,\"a\"\n"            \
60         "       .align  4\n"                            \
61         "       .long   1b,10b\n"                       \
62         "       .long   2b,10b\n"                       \
63         "       .previous"                              \
64         : "+d" (res), "=m" (*(ptr))                     \
65         : #reg (x), "i" (err))
66
67 /*
68  * These are the main single-value transfer routines.  They automatically
69  * use the right size if we just have the right pointer type.
70  */
71
72 #define __put_user(x, ptr)                                              \
73 ({                                                                      \
74         typeof(*(ptr)) __pu_val = (x);                                  \
75         int __pu_err = 0;                                               \
76         __chk_user_ptr(ptr);                                            \
77         switch (sizeof (*(ptr))) {                                      \
78         case 1:                                                         \
79                 __put_user_asm(__pu_err, __pu_val, ptr, b, d, -EFAULT); \
80                 break;                                                  \
81         case 2:                                                         \
82                 __put_user_asm(__pu_err, __pu_val, ptr, w, d, -EFAULT); \
83                 break;                                                  \
84         case 4:                                                         \
85                 __put_user_asm(__pu_err, __pu_val, ptr, l, r, -EFAULT); \
86                 break;                                                  \
87         case 8:                                                         \
88             {                                                           \
89                 const void __user *__pu_ptr = (ptr);                    \
90                 asm volatile ("\n"                                      \
91                         "1:     moves.l %2,(%1)+\n"                     \
92                         "2:     moves.l %R2,(%1)\n"                     \
93                         "3:\n"                                          \
94                         "       .section .fixup,\"ax\"\n"               \
95                         "       .even\n"                                \
96                         "10:    movel %3,%0\n"                          \
97                         "       jra 3b\n"                               \
98                         "       .previous\n"                            \
99                         "\n"                                            \
100                         "       .section __ex_table,\"a\"\n"            \
101                         "       .align 4\n"                             \
102                         "       .long 1b,10b\n"                         \
103                         "       .long 2b,10b\n"                         \
104                         "       .long 3b,10b\n"                         \
105                         "       .previous"                              \
106                         : "+d" (__pu_err), "+a" (__pu_ptr)              \
107                         : "r" (__pu_val), "i" (-EFAULT)                 \
108                         : "memory");                                    \
109                 break;                                                  \
110             }                                                           \
111         default:                                                        \
112                 __pu_err = __put_user_bad();                            \
113                 break;                                                  \
114         }                                                               \
115         __pu_err;                                                       \
116 })
117 #define put_user(x, ptr)        __put_user(x, ptr)
118
119
120 #define __get_user_asm(res, x, ptr, type, bwl, reg, err) ({     \
121         type __gu_val;                                          \
122         asm volatile ("\n"                                      \
123                 "1:     moves."#bwl"    %2,%1\n"                \
124                 "2:\n"                                          \
125                 "       .section .fixup,\"ax\"\n"               \
126                 "       .even\n"                                \
127                 "10:    move.l  %3,%0\n"                        \
128                 "       sub."#bwl"      %1,%1\n"                \
129                 "       jra     2b\n"                           \
130                 "       .previous\n"                            \
131                 "\n"                                            \
132                 "       .section __ex_table,\"a\"\n"            \
133                 "       .align  4\n"                            \
134                 "       .long   1b,10b\n"                       \
135                 "       .previous"                              \
136                 : "+d" (res), "=&" #reg (__gu_val)              \
137                 : "m" (*(ptr)), "i" (err));                     \
138         (x) = (typeof(*(ptr)))(unsigned long)__gu_val;          \
139 })
140
141 #define __get_user(x, ptr)                                              \
142 ({                                                                      \
143         int __gu_err = 0;                                               \
144         __chk_user_ptr(ptr);                                            \
145         switch (sizeof(*(ptr))) {                                       \
146         case 1:                                                         \
147                 __get_user_asm(__gu_err, x, ptr, u8, b, d, -EFAULT);    \
148                 break;                                                  \
149         case 2:                                                         \
150                 __get_user_asm(__gu_err, x, ptr, u16, w, d, -EFAULT);   \
151                 break;                                                  \
152         case 4:                                                         \
153                 __get_user_asm(__gu_err, x, ptr, u32, l, r, -EFAULT);   \
154                 break;                                                  \
155 /*      case 8: disabled because gcc-4.1 has a broken typeof            \
156             {                                                           \
157                 const void *__gu_ptr = (ptr);                           \
158                 u64 __gu_val;                                           \
159                 asm volatile ("\n"                                      \
160                         "1:     moves.l (%2)+,%1\n"                     \
161                         "2:     moves.l (%2),%R1\n"                     \
162                         "3:\n"                                          \
163                         "       .section .fixup,\"ax\"\n"               \
164                         "       .even\n"                                \
165                         "10:    move.l  %3,%0\n"                        \
166                         "       sub.l   %1,%1\n"                        \
167                         "       sub.l   %R1,%R1\n"                      \
168                         "       jra     3b\n"                           \
169                         "       .previous\n"                            \
170                         "\n"                                            \
171                         "       .section __ex_table,\"a\"\n"            \
172                         "       .align  4\n"                            \
173                         "       .long   1b,10b\n"                       \
174                         "       .long   2b,10b\n"                       \
175                         "       .previous"                              \
176                         : "+d" (__gu_err), "=&r" (__gu_val),            \
177                           "+a" (__gu_ptr)                               \
178                         : "i" (-EFAULT)                                 \
179                         : "memory");                                    \
180                 (x) = (typeof(*(ptr)))__gu_val;                         \
181                 break;                                                  \
182             }   */                                                      \
183         default:                                                        \
184                 __gu_err = __get_user_bad();                            \
185                 break;                                                  \
186         }                                                               \
187         __gu_err;                                                       \
188 })
189 #define get_user(x, ptr) __get_user(x, ptr)
190
191 unsigned long __generic_copy_from_user(void *to, const void __user *from, unsigned long n);
192 unsigned long __generic_copy_to_user(void __user *to, const void *from, unsigned long n);
193
194 #define __constant_copy_from_user_asm(res, to, from, tmp, n, s1, s2, s3)\
195         asm volatile ("\n"                                              \
196                 "1:     moves."#s1"     (%2)+,%3\n"                     \
197                 "       move."#s1"      %3,(%1)+\n"                     \
198                 "2:     moves."#s2"     (%2)+,%3\n"                     \
199                 "       move."#s2"      %3,(%1)+\n"                     \
200                 "       .ifnc   \""#s3"\",\"\"\n"                       \
201                 "3:     moves."#s3"     (%2)+,%3\n"                     \
202                 "       move."#s3"      %3,(%1)+\n"                     \
203                 "       .endif\n"                                       \
204                 "4:\n"                                                  \
205                 "       .section __ex_table,\"a\"\n"                    \
206                 "       .align  4\n"                                    \
207                 "       .long   1b,10f\n"                               \
208                 "       .long   2b,20f\n"                               \
209                 "       .ifnc   \""#s3"\",\"\"\n"                       \
210                 "       .long   3b,30f\n"                               \
211                 "       .endif\n"                                       \
212                 "       .previous\n"                                    \
213                 "\n"                                                    \
214                 "       .section .fixup,\"ax\"\n"                       \
215                 "       .even\n"                                        \
216                 "10:    clr."#s1"       (%1)+\n"                        \
217                 "20:    clr."#s2"       (%1)+\n"                        \
218                 "       .ifnc   \""#s3"\",\"\"\n"                       \
219                 "30:    clr."#s3"       (%1)+\n"                        \
220                 "       .endif\n"                                       \
221                 "       moveq.l #"#n",%0\n"                             \
222                 "       jra     4b\n"                                   \
223                 "       .previous\n"                                    \
224                 : "+d" (res), "+&a" (to), "+a" (from), "=&d" (tmp)      \
225                 : : "memory")
226
227 static __always_inline unsigned long
228 __constant_copy_from_user(void *to, const void __user *from, unsigned long n)
229 {
230         unsigned long res = 0, tmp;
231
232         switch (n) {
233         case 1:
234                 __get_user_asm(res, *(u8 *)to, (u8 __user *)from, u8, b, d, 1);
235                 break;
236         case 2:
237                 __get_user_asm(res, *(u16 *)to, (u16 __user *)from, u16, w, d, 2);
238                 break;
239         case 3:
240                 __constant_copy_from_user_asm(res, to, from, tmp, 3, w, b,);
241                 break;
242         case 4:
243                 __get_user_asm(res, *(u32 *)to, (u32 __user *)from, u32, l, r, 4);
244                 break;
245         case 5:
246                 __constant_copy_from_user_asm(res, to, from, tmp, 5, l, b,);
247                 break;
248         case 6:
249                 __constant_copy_from_user_asm(res, to, from, tmp, 6, l, w,);
250                 break;
251         case 7:
252                 __constant_copy_from_user_asm(res, to, from, tmp, 7, l, w, b);
253                 break;
254         case 8:
255                 __constant_copy_from_user_asm(res, to, from, tmp, 8, l, l,);
256                 break;
257         case 9:
258                 __constant_copy_from_user_asm(res, to, from, tmp, 9, l, l, b);
259                 break;
260         case 10:
261                 __constant_copy_from_user_asm(res, to, from, tmp, 10, l, l, w);
262                 break;
263         case 12:
264                 __constant_copy_from_user_asm(res, to, from, tmp, 12, l, l, l);
265                 break;
266         default:
267                 /* we limit the inlined version to 3 moves */
268                 return __generic_copy_from_user(to, from, n);
269         }
270
271         return res;
272 }
273
274 #define __constant_copy_to_user_asm(res, to, from, tmp, n, s1, s2, s3)  \
275         asm volatile ("\n"                                              \
276                 "       move."#s1"      (%2)+,%3\n"                     \
277                 "11:    moves."#s1"     %3,(%1)+\n"                     \
278                 "12:    move."#s2"      (%2)+,%3\n"                     \
279                 "21:    moves."#s2"     %3,(%1)+\n"                     \
280                 "22:\n"                                                 \
281                 "       .ifnc   \""#s3"\",\"\"\n"                       \
282                 "       move."#s3"      (%2)+,%3\n"                     \
283                 "31:    moves."#s3"     %3,(%1)+\n"                     \
284                 "32:\n"                                                 \
285                 "       .endif\n"                                       \
286                 "4:\n"                                                  \
287                 "\n"                                                    \
288                 "       .section __ex_table,\"a\"\n"                    \
289                 "       .align  4\n"                                    \
290                 "       .long   11b,5f\n"                               \
291                 "       .long   12b,5f\n"                               \
292                 "       .long   21b,5f\n"                               \
293                 "       .long   22b,5f\n"                               \
294                 "       .ifnc   \""#s3"\",\"\"\n"                       \
295                 "       .long   31b,5f\n"                               \
296                 "       .long   32b,5f\n"                               \
297                 "       .endif\n"                                       \
298                 "       .previous\n"                                    \
299                 "\n"                                                    \
300                 "       .section .fixup,\"ax\"\n"                       \
301                 "       .even\n"                                        \
302                 "5:     moveq.l #"#n",%0\n"                             \
303                 "       jra     4b\n"                                   \
304                 "       .previous\n"                                    \
305                 : "+d" (res), "+a" (to), "+a" (from), "=&d" (tmp)       \
306                 : : "memory")
307
308 static __always_inline unsigned long
309 __constant_copy_to_user(void __user *to, const void *from, unsigned long n)
310 {
311         unsigned long res = 0, tmp;
312
313         switch (n) {
314         case 1:
315                 __put_user_asm(res, *(u8 *)from, (u8 __user *)to, b, d, 1);
316                 break;
317         case 2:
318                 __put_user_asm(res, *(u16 *)from, (u16 __user *)to, w, d, 2);
319                 break;
320         case 3:
321                 __constant_copy_to_user_asm(res, to, from, tmp, 3, w, b,);
322                 break;
323         case 4:
324                 __put_user_asm(res, *(u32 *)from, (u32 __user *)to, l, r, 4);
325                 break;
326         case 5:
327                 __constant_copy_to_user_asm(res, to, from, tmp, 5, l, b,);
328                 break;
329         case 6:
330                 __constant_copy_to_user_asm(res, to, from, tmp, 6, l, w,);
331                 break;
332         case 7:
333                 __constant_copy_to_user_asm(res, to, from, tmp, 7, l, w, b);
334                 break;
335         case 8:
336                 __constant_copy_to_user_asm(res, to, from, tmp, 8, l, l,);
337                 break;
338         case 9:
339                 __constant_copy_to_user_asm(res, to, from, tmp, 9, l, l, b);
340                 break;
341         case 10:
342                 __constant_copy_to_user_asm(res, to, from, tmp, 10, l, l, w);
343                 break;
344         case 12:
345                 __constant_copy_to_user_asm(res, to, from, tmp, 12, l, l, l);
346                 break;
347         default:
348                 /* limit the inlined version to 3 moves */
349                 return __generic_copy_to_user(to, from, n);
350         }
351
352         return res;
353 }
354
355 #define __copy_from_user(to, from, n)           \
356 (__builtin_constant_p(n) ?                      \
357  __constant_copy_from_user(to, from, n) :       \
358  __generic_copy_from_user(to, from, n))
359
360 #define __copy_to_user(to, from, n)             \
361 (__builtin_constant_p(n) ?                      \
362  __constant_copy_to_user(to, from, n) :         \
363  __generic_copy_to_user(to, from, n))
364
365 #define __copy_to_user_inatomic         __copy_to_user
366 #define __copy_from_user_inatomic       __copy_from_user
367
368 #define copy_from_user(to, from, n)     __copy_from_user(to, from, n)
369 #define copy_to_user(to, from, n)       __copy_to_user(to, from, n)
370
371 long strncpy_from_user(char *dst, const char __user *src, long count);
372 long strnlen_user(const char __user *src, long n);
373 unsigned long __clear_user(void __user *to, unsigned long n);
374
375 #define clear_user      __clear_user
376
377 #define strlen_user(str) strnlen_user(str, 32767)
378
379 #endif /* _M68K_UACCESS_H */