]> rtime.felk.cvut.cz Git - mcf548x/linux.git/blob - arch/m68k/lib/checksum.c
Fixes (asm, entry, irq, linker, defconfig)
[mcf548x/linux.git] / arch / m68k / lib / checksum.c
1 /*
2  * INET         An implementation of the TCP/IP protocol suite for the LINUX
3  *              operating system.  INET is implemented using the  BSD Socket
4  *              interface as the means of communication with the user level.
5  *
6  *              IP/TCP/UDP checksumming routines
7  *
8  * Authors:     Jorge Cwik, <jorge@laser.satlink.net>
9  *              Arnt Gulbrandsen, <agulbra@nvg.unit.no>
10  *              Tom May, <ftom@netcom.com>
11  *              Andreas Schwab, <schwab@issan.informatik.uni-dortmund.de>
12  *              Lots of code moved from tcp.c and ip.c; see those files
13  *              for more names.
14  *
15  * 03/02/96     Jes Sorensen, Andreas Schwab, Roman Hodek:
16  *              Fixed some nasty bugs, causing some horrible crashes.
17  *              A: At some points, the sum (%0) was used as
18  *              length-counter instead of the length counter
19  *              (%1). Thanks to Roman Hodek for pointing this out.
20  *              B: GCC seems to mess up if one uses too many
21  *              data-registers to hold input values and one tries to
22  *              specify d0 and d1 as scratch registers. Letting gcc
23  *              choose these registers itself solves the problem.
24  *
25  *              This program is free software; you can redistribute it and/or
26  *              modify it under the terms of the GNU General Public License
27  *              as published by the Free Software Foundation; either version
28  *              2 of the License, or (at your option) any later version.
29  *
30  * 1998/8/31    Andreas Schwab:
31  *              Zero out rest of buffer on exception in
32  *              csum_partial_copy_from_user.
33  */
34
35 #include <linux/module.h>
36 #include <net/checksum.h>
37
38 #ifdef CONFIG_COLDFIRE
39
40 static inline unsigned short from32to16(unsigned long x)
41 {
42         /* add up 16-bit and 16-bit for 16+c bit */
43         x = (x & 0xffff) + (x >> 16);
44         /* add up carry.. */
45         x = (x & 0xffff) + (x >> 16);
46         return x;
47 }
48
49 static unsigned long do_csum(const unsigned char *buff, int len)
50 {
51         int odd, count;
52         unsigned long result = 0;
53
54         if (len <= 0)
55                 goto out;
56         odd = 1 & (unsigned long) buff;
57         if (odd) {
58                 result = *buff;
59                 len--;
60                 buff++;
61         }
62         count = len >> 1;               /* nr of 16-bit words.. */
63         if (count) {
64                 if (2 & (unsigned long) buff) {
65                         result += *(unsigned short *) buff;
66                         count--;
67                         len -= 2;
68                         buff += 2;
69                 }
70                 count >>= 1;            /* nr of 32-bit words.. */
71                 if (count) {
72                         unsigned long carry = 0;
73                         do {
74                                 unsigned long w = *(unsigned long *) buff;
75                                 count--;
76                                 buff += 4;
77                                 result += carry;
78                                 result += w;
79                                 carry = (w > result);
80                         } while (count);
81                         result += carry;
82                         result = (result & 0xffff) + (result >> 16);
83                 }
84                 if (len & 2) {
85                         result += *(unsigned short *) buff;
86                         buff += 2;
87                 }
88         }
89         if (len & 1)
90                 result += (*buff << 8);
91         result = from32to16(result);
92         if (odd)
93                 result = ((result >> 8) & 0xff) | ((result & 0xff) << 8);
94 out:
95         return result;
96 }
97
98 /*
99  * computes the checksum of a memory block at buff, length len,
100  * and adds in "sum" (32-bit)
101  *
102  * returns a 32-bit number suitable for feeding into itself
103  * or csum_tcpudp_magic
104  *
105  * this function must be called with even lengths, except
106  * for the last fragment, which may be odd
107  *
108  * it's best to have buff aligned on a 32-bit boundary
109  */
110 __wsum csum_partial(const void *buff, int len, __wsum sum)
111 {
112         unsigned int result = do_csum(buff, len);
113
114         /* add in old sum, and carry.. */
115         result += sum;
116         if (sum > result)
117                 result += 1;
118         return result;
119 }
120 EXPORT_SYMBOL(csum_partial);
121
122 /*
123  * copy from fs while checksumming, otherwise like csum_partial
124  */
125
126 __wsum
127 csum_partial_copy_from_user(const void __user *src, void *dst, int len,
128                             __wsum sum, int *csum_err)
129 {
130         if (csum_err) *csum_err = 0;
131         memcpy(dst, src, len);
132         return csum_partial(dst, len, sum);
133 }
134 EXPORT_SYMBOL(csum_partial_copy_from_user);
135
136 /*
137  * copy from ds while checksumming, otherwise like csum_partial
138  */
139
140 __wsum
141 csum_partial_copy_nocheck(const void *src, void *dst, int len, __wsum sum)
142 {
143         memcpy(dst, src, len);
144         return csum_partial(dst, len, sum);
145 }
146 EXPORT_SYMBOL(csum_partial_copy_nocheck);
147
148 #else /* !CONFIG_COLDFIRE */
149
150 /*
151  * computes a partial checksum, e.g. for TCP/UDP fragments
152  */
153
154 __wsum csum_partial(const void *buff, int len, __wsum sum)
155 {
156         unsigned long tmp1, tmp2;
157           /*
158            * Experiments with ethernet and slip connections show that buff
159            * is aligned on either a 2-byte or 4-byte boundary.
160            */
161         __asm__("movel %2,%3\n\t"
162                 "btst #1,%3\n\t"        /* Check alignment */
163                 "jeq 2f\n\t"
164                 "subql #2,%1\n\t"       /* buff%4==2: treat first word */
165                 "jgt 1f\n\t"
166                 "addql #2,%1\n\t"       /* len was == 2, treat only rest */
167                 "jra 4f\n"
168              "1:\t"
169                 "addw %2@+,%0\n\t"      /* add first word to sum */
170                 "clrl %3\n\t"
171                 "addxl %3,%0\n"         /* add X bit */
172              "2:\t"
173                 /* unrolled loop for the main part: do 8 longs at once */
174                 "movel %1,%3\n\t"       /* save len in tmp1 */
175                 "lsrl #5,%1\n\t"        /* len/32 */
176                 "jeq 2f\n\t"            /* not enough... */
177                 "subql #1,%1\n"
178              "1:\t"
179                 "movel %2@+,%4\n\t"
180                 "addxl %4,%0\n\t"
181                 "movel %2@+,%4\n\t"
182                 "addxl %4,%0\n\t"
183                 "movel %2@+,%4\n\t"
184                 "addxl %4,%0\n\t"
185                 "movel %2@+,%4\n\t"
186                 "addxl %4,%0\n\t"
187                 "movel %2@+,%4\n\t"
188                 "addxl %4,%0\n\t"
189                 "movel %2@+,%4\n\t"
190                 "addxl %4,%0\n\t"
191                 "movel %2@+,%4\n\t"
192                 "addxl %4,%0\n\t"
193                 "movel %2@+,%4\n\t"
194                 "addxl %4,%0\n\t"
195                 "dbra %1,1b\n\t"
196                 "clrl %4\n\t"
197                 "addxl %4,%0\n\t"       /* add X bit */
198                 "clrw %1\n\t"
199                 "subql #1,%1\n\t"
200                 "jcc 1b\n"
201              "2:\t"
202                 "movel %3,%1\n\t"       /* restore len from tmp1 */
203                 "andw #0x1c,%3\n\t"     /* number of rest longs */
204                 "jeq 4f\n\t"
205                 "lsrw #2,%3\n\t"
206                 "subqw #1,%3\n"
207              "3:\t"
208                 /* loop for rest longs */
209                 "movel %2@+,%4\n\t"
210                 "addxl %4,%0\n\t"
211                 "dbra %3,3b\n\t"
212                 "clrl %4\n\t"
213                 "addxl %4,%0\n"         /* add X bit */
214              "4:\t"
215                 /* now check for rest bytes that do not fit into longs */
216                 "andw #3,%1\n\t"
217                 "jeq 7f\n\t"
218                 "clrl %4\n\t"           /* clear tmp2 for rest bytes */
219                 "subqw #2,%1\n\t"
220                 "jlt 5f\n\t"
221                 "movew %2@+,%4\n\t"     /* have rest >= 2: get word */
222                 "swap %4\n\t"           /* into bits 16..31 */
223                 "tstw %1\n\t"           /* another byte? */
224                 "jeq 6f\n"
225              "5:\t"
226                 "moveb %2@,%4\n\t"      /* have odd rest: get byte */
227                 "lslw #8,%4\n\t"        /* into bits 8..15; 16..31 untouched */
228              "6:\t"
229                 "addl %4,%0\n\t"        /* now add rest long to sum */
230                 "clrl %4\n\t"
231                 "addxl %4,%0\n"         /* add X bit */
232              "7:\t"
233                 : "=d" (sum), "=d" (len), "=a" (buff),
234                   "=&d" (tmp1), "=&d" (tmp2)
235                 : "0" (sum), "1" (len), "2" (buff)
236             );
237         return(sum);
238 }
239
240 EXPORT_SYMBOL(csum_partial);
241
242
243 /*
244  * copy from user space while checksumming, with exception handling.
245  */
246
247 __wsum
248 csum_partial_copy_from_user(const void __user *src, void *dst,
249                             int len, __wsum sum, int *csum_err)
250 {
251         /*
252          * GCC doesn't like more than 10 operands for the asm
253          * statements so we have to use tmp2 for the error
254          * code.
255          */
256         unsigned long tmp1, tmp2;
257
258         __asm__("movel %2,%4\n\t"
259                 "btst #1,%4\n\t"        /* Check alignment */
260                 "jeq 2f\n\t"
261                 "subql #2,%1\n\t"       /* buff%4==2: treat first word */
262                 "jgt 1f\n\t"
263                 "addql #2,%1\n\t"       /* len was == 2, treat only rest */
264                 "jra 4f\n"
265              "1:\n"
266              "10:\t"
267                 "movesw %2@+,%4\n\t"    /* add first word to sum */
268                 "addw %4,%0\n\t"
269                 "movew %4,%3@+\n\t"
270                 "clrl %4\n\t"
271                 "addxl %4,%0\n"         /* add X bit */
272              "2:\t"
273                 /* unrolled loop for the main part: do 8 longs at once */
274                 "movel %1,%4\n\t"       /* save len in tmp1 */
275                 "lsrl #5,%1\n\t"        /* len/32 */
276                 "jeq 2f\n\t"            /* not enough... */
277                 "subql #1,%1\n"
278              "1:\n"
279              "11:\t"
280                 "movesl %2@+,%5\n\t"
281                 "addxl %5,%0\n\t"
282                 "movel %5,%3@+\n\t"
283              "12:\t"
284                 "movesl %2@+,%5\n\t"
285                 "addxl %5,%0\n\t"
286                 "movel %5,%3@+\n\t"
287              "13:\t"
288                 "movesl %2@+,%5\n\t"
289                 "addxl %5,%0\n\t"
290                 "movel %5,%3@+\n\t"
291              "14:\t"
292                 "movesl %2@+,%5\n\t"
293                 "addxl %5,%0\n\t"
294                 "movel %5,%3@+\n\t"
295              "15:\t"
296                 "movesl %2@+,%5\n\t"
297                 "addxl %5,%0\n\t"
298                 "movel %5,%3@+\n\t"
299              "16:\t"
300                 "movesl %2@+,%5\n\t"
301                 "addxl %5,%0\n\t"
302                 "movel %5,%3@+\n\t"
303              "17:\t"
304                 "movesl %2@+,%5\n\t"
305                 "addxl %5,%0\n\t"
306                 "movel %5,%3@+\n\t"
307              "18:\t"
308                 "movesl %2@+,%5\n\t"
309                 "addxl %5,%0\n\t"
310                 "movel %5,%3@+\n\t"
311                 "dbra %1,1b\n\t"
312                 "clrl %5\n\t"
313                 "addxl %5,%0\n\t"       /* add X bit */
314                 "clrw %1\n\t"
315                 "subql #1,%1\n\t"
316                 "jcc 1b\n"
317              "2:\t"
318                 "movel %4,%1\n\t"       /* restore len from tmp1 */
319                 "andw #0x1c,%4\n\t"     /* number of rest longs */
320                 "jeq 4f\n\t"
321                 "lsrw #2,%4\n\t"
322                 "subqw #1,%4\n"
323              "3:\n"
324                 /* loop for rest longs */
325              "19:\t"
326                 "movesl %2@+,%5\n\t"
327                 "addxl %5,%0\n\t"
328                 "movel %5,%3@+\n\t"
329                 "dbra %4,3b\n\t"
330                 "clrl %5\n\t"
331                 "addxl %5,%0\n"         /* add X bit */
332              "4:\t"
333                 /* now check for rest bytes that do not fit into longs */
334                 "andw #3,%1\n\t"
335                 "jeq 7f\n\t"
336                 "clrl %5\n\t"           /* clear tmp2 for rest bytes */
337                 "subqw #2,%1\n\t"
338                 "jlt 5f\n\t"
339              "20:\t"
340                 "movesw %2@+,%5\n\t"    /* have rest >= 2: get word */
341                 "movew %5,%3@+\n\t"
342                 "swap %5\n\t"           /* into bits 16..31 */
343                 "tstw %1\n\t"           /* another byte? */
344                 "jeq 6f\n"
345              "5:\n"
346              "21:\t"
347                 "movesb %2@,%5\n\t"     /* have odd rest: get byte */
348                 "moveb %5,%3@+\n\t"
349                 "lslw #8,%5\n\t"        /* into bits 8..15; 16..31 untouched */
350              "6:\t"
351                 "addl %5,%0\n\t"        /* now add rest long to sum */
352                 "clrl %5\n\t"
353                 "addxl %5,%0\n\t"       /* add X bit */
354              "7:\t"
355                 "clrl %5\n"             /* no error - clear return value */
356              "8:\n"
357                 ".section .fixup,\"ax\"\n"
358                 ".even\n"
359                 /* If any exception occurs zero out the rest.
360                    Similarities with the code above are intentional :-) */
361              "90:\t"
362                 "clrw %3@+\n\t"
363                 "movel %1,%4\n\t"
364                 "lsrl #5,%1\n\t"
365                 "jeq 1f\n\t"
366                 "subql #1,%1\n"
367              "91:\t"
368                 "clrl %3@+\n"
369              "92:\t"
370                 "clrl %3@+\n"
371              "93:\t"
372                 "clrl %3@+\n"
373              "94:\t"
374                 "clrl %3@+\n"
375              "95:\t"
376                 "clrl %3@+\n"
377              "96:\t"
378                 "clrl %3@+\n"
379              "97:\t"
380                 "clrl %3@+\n"
381              "98:\t"
382                 "clrl %3@+\n\t"
383                 "dbra %1,91b\n\t"
384                 "clrw %1\n\t"
385                 "subql #1,%1\n\t"
386                 "jcc 91b\n"
387              "1:\t"
388                 "movel %4,%1\n\t"
389                 "andw #0x1c,%4\n\t"
390                 "jeq 1f\n\t"
391                 "lsrw #2,%4\n\t"
392                 "subqw #1,%4\n"
393              "99:\t"
394                 "clrl %3@+\n\t"
395                 "dbra %4,99b\n\t"
396              "1:\t"
397                 "andw #3,%1\n\t"
398                 "jeq 9f\n"
399              "100:\t"
400                 "clrw %3@+\n\t"
401                 "tstw %1\n\t"
402                 "jeq 9f\n"
403              "101:\t"
404                 "clrb %3@+\n"
405              "9:\t"
406 #define STR(X) STR1(X)
407 #define STR1(X) #X
408                 "moveq #-" STR(EFAULT) ",%5\n\t"
409                 "jra 8b\n"
410                 ".previous\n"
411                 ".section __ex_table,\"a\"\n"
412                 ".long 10b,90b\n"
413                 ".long 11b,91b\n"
414                 ".long 12b,92b\n"
415                 ".long 13b,93b\n"
416                 ".long 14b,94b\n"
417                 ".long 15b,95b\n"
418                 ".long 16b,96b\n"
419                 ".long 17b,97b\n"
420                 ".long 18b,98b\n"
421                 ".long 19b,99b\n"
422                 ".long 20b,100b\n"
423                 ".long 21b,101b\n"
424                 ".previous"
425                 : "=d" (sum), "=d" (len), "=a" (src), "=a" (dst),
426                   "=&d" (tmp1), "=d" (tmp2)
427                 : "0" (sum), "1" (len), "2" (src), "3" (dst)
428             );
429
430         *csum_err = tmp2;
431
432         return(sum);
433 }
434
435 EXPORT_SYMBOL(csum_partial_copy_from_user);
436
437
438 /*
439  * copy from kernel space while checksumming, otherwise like csum_partial
440  */
441
442 __wsum
443 csum_partial_copy_nocheck(const void *src, void *dst, int len, __wsum sum)
444 {
445         unsigned long tmp1, tmp2;
446         __asm__("movel %2,%4\n\t"
447                 "btst #1,%4\n\t"        /* Check alignment */
448                 "jeq 2f\n\t"
449                 "subql #2,%1\n\t"       /* buff%4==2: treat first word */
450                 "jgt 1f\n\t"
451                 "addql #2,%1\n\t"       /* len was == 2, treat only rest */
452                 "jra 4f\n"
453              "1:\t"
454                 "movew %2@+,%4\n\t"     /* add first word to sum */
455                 "addw %4,%0\n\t"
456                 "movew %4,%3@+\n\t"
457                 "clrl %4\n\t"
458                 "addxl %4,%0\n"         /* add X bit */
459              "2:\t"
460                 /* unrolled loop for the main part: do 8 longs at once */
461                 "movel %1,%4\n\t"       /* save len in tmp1 */
462                 "lsrl #5,%1\n\t"        /* len/32 */
463                 "jeq 2f\n\t"            /* not enough... */
464                 "subql #1,%1\n"
465              "1:\t"
466                 "movel %2@+,%5\n\t"
467                 "addxl %5,%0\n\t"
468                 "movel %5,%3@+\n\t"
469                 "movel %2@+,%5\n\t"
470                 "addxl %5,%0\n\t"
471                 "movel %5,%3@+\n\t"
472                 "movel %2@+,%5\n\t"
473                 "addxl %5,%0\n\t"
474                 "movel %5,%3@+\n\t"
475                 "movel %2@+,%5\n\t"
476                 "addxl %5,%0\n\t"
477                 "movel %5,%3@+\n\t"
478                 "movel %2@+,%5\n\t"
479                 "addxl %5,%0\n\t"
480                 "movel %5,%3@+\n\t"
481                 "movel %2@+,%5\n\t"
482                 "addxl %5,%0\n\t"
483                 "movel %5,%3@+\n\t"
484                 "movel %2@+,%5\n\t"
485                 "addxl %5,%0\n\t"
486                 "movel %5,%3@+\n\t"
487                 "movel %2@+,%5\n\t"
488                 "addxl %5,%0\n\t"
489                 "movel %5,%3@+\n\t"
490                 "dbra %1,1b\n\t"
491                 "clrl %5\n\t"
492                 "addxl %5,%0\n\t"       /* add X bit */
493                 "clrw %1\n\t"
494                 "subql #1,%1\n\t"
495                 "jcc 1b\n"
496              "2:\t"
497                 "movel %4,%1\n\t"       /* restore len from tmp1 */
498                 "andw #0x1c,%4\n\t"     /* number of rest longs */
499                 "jeq 4f\n\t"
500                 "lsrw #2,%4\n\t"
501                 "subqw #1,%4\n"
502              "3:\t"
503                 /* loop for rest longs */
504                 "movel %2@+,%5\n\t"
505                 "addxl %5,%0\n\t"
506                 "movel %5,%3@+\n\t"
507                 "dbra %4,3b\n\t"
508                 "clrl %5\n\t"
509                 "addxl %5,%0\n"         /* add X bit */
510              "4:\t"
511                 /* now check for rest bytes that do not fit into longs */
512                 "andw #3,%1\n\t"
513                 "jeq 7f\n\t"
514                 "clrl %5\n\t"           /* clear tmp2 for rest bytes */
515                 "subqw #2,%1\n\t"
516                 "jlt 5f\n\t"
517                 "movew %2@+,%5\n\t"     /* have rest >= 2: get word */
518                 "movew %5,%3@+\n\t"
519                 "swap %5\n\t"           /* into bits 16..31 */
520                 "tstw %1\n\t"           /* another byte? */
521                 "jeq 6f\n"
522              "5:\t"
523                 "moveb %2@,%5\n\t"      /* have odd rest: get byte */
524                 "moveb %5,%3@+\n\t"
525                 "lslw #8,%5\n"          /* into bits 8..15; 16..31 untouched */
526              "6:\t"
527                 "addl %5,%0\n\t"        /* now add rest long to sum */
528                 "clrl %5\n\t"
529                 "addxl %5,%0\n"         /* add X bit */
530              "7:\t"
531                 : "=d" (sum), "=d" (len), "=a" (src), "=a" (dst),
532                   "=&d" (tmp1), "=&d" (tmp2)
533                 : "0" (sum), "1" (len), "2" (src), "3" (dst)
534             );
535     return(sum);
536 }
537 EXPORT_SYMBOL(csum_partial_copy_nocheck);
538 #endif /* CONFIG_COLDFIRE */