2 * INET An implementation of the TCP/IP protocol suite for the LINUX
3 * operating system. INET is implemented using the BSD Socket
4 * interface as the means of communication with the user level.
6 * IP/TCP/UDP checksumming routines
8 * Authors: Jorge Cwik, <jorge@laser.satlink.net>
9 * Arnt Gulbrandsen, <agulbra@nvg.unit.no>
10 * Tom May, <ftom@netcom.com>
11 * Andreas Schwab, <schwab@issan.informatik.uni-dortmund.de>
12 * Lots of code moved from tcp.c and ip.c; see those files
15 * 03/02/96 Jes Sorensen, Andreas Schwab, Roman Hodek:
16 * Fixed some nasty bugs, causing some horrible crashes.
17 * A: At some points, the sum (%0) was used as
18 * length-counter instead of the length counter
19 * (%1). Thanks to Roman Hodek for pointing this out.
20 * B: GCC seems to mess up if one uses too many
21 * data-registers to hold input values and one tries to
22 * specify d0 and d1 as scratch registers. Letting gcc
23 * choose these registers itself solves the problem.
25 * This program is free software; you can redistribute it and/or
26 * modify it under the terms of the GNU General Public License
27 * as published by the Free Software Foundation; either version
28 * 2 of the License, or (at your option) any later version.
30 * 1998/8/31 Andreas Schwab:
31 * Zero out rest of buffer on exception in
32 * csum_partial_copy_from_user.
35 #include <linux/module.h>
36 #include <net/checksum.h>
38 #ifdef CONFIG_COLDFIRE
40 static inline unsigned short from32to16(unsigned long x)
42 /* add up 16-bit and 16-bit for 16+c bit */
43 x = (x & 0xffff) + (x >> 16);
45 x = (x & 0xffff) + (x >> 16);
49 static unsigned long do_csum(const unsigned char *buff, int len)
52 unsigned long result = 0;
56 odd = 1 & (unsigned long) buff;
62 count = len >> 1; /* nr of 16-bit words.. */
64 if (2 & (unsigned long) buff) {
65 result += *(unsigned short *) buff;
70 count >>= 1; /* nr of 32-bit words.. */
72 unsigned long carry = 0;
74 unsigned long w = *(unsigned long *) buff;
82 result = (result & 0xffff) + (result >> 16);
85 result += *(unsigned short *) buff;
90 result += (*buff << 8);
91 result = from32to16(result);
93 result = ((result >> 8) & 0xff) | ((result & 0xff) << 8);
99 * computes the checksum of a memory block at buff, length len,
100 * and adds in "sum" (32-bit)
102 * returns a 32-bit number suitable for feeding into itself
103 * or csum_tcpudp_magic
105 * this function must be called with even lengths, except
106 * for the last fragment, which may be odd
108 * it's best to have buff aligned on a 32-bit boundary
110 __wsum csum_partial(const void *buff, int len, __wsum sum)
112 unsigned int result = do_csum(buff, len);
114 /* add in old sum, and carry.. */
120 EXPORT_SYMBOL(csum_partial);
123 * copy from fs while checksumming, otherwise like csum_partial
127 csum_partial_copy_from_user(const void __user *src, void *dst, int len,
128 __wsum sum, int *csum_err)
130 if (csum_err) *csum_err = 0;
131 memcpy(dst, src, len);
132 return csum_partial(dst, len, sum);
134 EXPORT_SYMBOL(csum_partial_copy_from_user);
137 * copy from ds while checksumming, otherwise like csum_partial
141 csum_partial_copy_nocheck(const void *src, void *dst, int len, __wsum sum)
143 memcpy(dst, src, len);
144 return csum_partial(dst, len, sum);
146 EXPORT_SYMBOL(csum_partial_copy_nocheck);
148 #else /* !CONFIG_COLDFIRE */
151 * computes a partial checksum, e.g. for TCP/UDP fragments
154 __wsum csum_partial(const void *buff, int len, __wsum sum)
156 unsigned long tmp1, tmp2;
158 * Experiments with ethernet and slip connections show that buff
159 * is aligned on either a 2-byte or 4-byte boundary.
161 __asm__("movel %2,%3\n\t"
162 "btst #1,%3\n\t" /* Check alignment */
164 "subql #2,%1\n\t" /* buff%4==2: treat first word */
166 "addql #2,%1\n\t" /* len was == 2, treat only rest */
169 "addw %2@+,%0\n\t" /* add first word to sum */
171 "addxl %3,%0\n" /* add X bit */
173 /* unrolled loop for the main part: do 8 longs at once */
174 "movel %1,%3\n\t" /* save len in tmp1 */
175 "lsrl #5,%1\n\t" /* len/32 */
176 "jeq 2f\n\t" /* not enough... */
197 "addxl %4,%0\n\t" /* add X bit */
202 "movel %3,%1\n\t" /* restore len from tmp1 */
203 "andw #0x1c,%3\n\t" /* number of rest longs */
208 /* loop for rest longs */
213 "addxl %4,%0\n" /* add X bit */
215 /* now check for rest bytes that do not fit into longs */
218 "clrl %4\n\t" /* clear tmp2 for rest bytes */
221 "movew %2@+,%4\n\t" /* have rest >= 2: get word */
222 "swap %4\n\t" /* into bits 16..31 */
223 "tstw %1\n\t" /* another byte? */
226 "moveb %2@,%4\n\t" /* have odd rest: get byte */
227 "lslw #8,%4\n\t" /* into bits 8..15; 16..31 untouched */
229 "addl %4,%0\n\t" /* now add rest long to sum */
231 "addxl %4,%0\n" /* add X bit */
233 : "=d" (sum), "=d" (len), "=a" (buff),
234 "=&d" (tmp1), "=&d" (tmp2)
235 : "0" (sum), "1" (len), "2" (buff)
240 EXPORT_SYMBOL(csum_partial);
244 * copy from user space while checksumming, with exception handling.
248 csum_partial_copy_from_user(const void __user *src, void *dst,
249 int len, __wsum sum, int *csum_err)
252 * GCC doesn't like more than 10 operands for the asm
253 * statements so we have to use tmp2 for the error
256 unsigned long tmp1, tmp2;
258 __asm__("movel %2,%4\n\t"
259 "btst #1,%4\n\t" /* Check alignment */
261 "subql #2,%1\n\t" /* buff%4==2: treat first word */
263 "addql #2,%1\n\t" /* len was == 2, treat only rest */
267 "movesw %2@+,%4\n\t" /* add first word to sum */
271 "addxl %4,%0\n" /* add X bit */
273 /* unrolled loop for the main part: do 8 longs at once */
274 "movel %1,%4\n\t" /* save len in tmp1 */
275 "lsrl #5,%1\n\t" /* len/32 */
276 "jeq 2f\n\t" /* not enough... */
313 "addxl %5,%0\n\t" /* add X bit */
318 "movel %4,%1\n\t" /* restore len from tmp1 */
319 "andw #0x1c,%4\n\t" /* number of rest longs */
324 /* loop for rest longs */
331 "addxl %5,%0\n" /* add X bit */
333 /* now check for rest bytes that do not fit into longs */
336 "clrl %5\n\t" /* clear tmp2 for rest bytes */
340 "movesw %2@+,%5\n\t" /* have rest >= 2: get word */
342 "swap %5\n\t" /* into bits 16..31 */
343 "tstw %1\n\t" /* another byte? */
347 "movesb %2@,%5\n\t" /* have odd rest: get byte */
349 "lslw #8,%5\n\t" /* into bits 8..15; 16..31 untouched */
351 "addl %5,%0\n\t" /* now add rest long to sum */
353 "addxl %5,%0\n\t" /* add X bit */
355 "clrl %5\n" /* no error - clear return value */
357 ".section .fixup,\"ax\"\n"
359 /* If any exception occurs zero out the rest.
360 Similarities with the code above are intentional :-) */
406 #define STR(X) STR1(X)
408 "moveq #-" STR(EFAULT) ",%5\n\t"
411 ".section __ex_table,\"a\"\n"
425 : "=d" (sum), "=d" (len), "=a" (src), "=a" (dst),
426 "=&d" (tmp1), "=d" (tmp2)
427 : "0" (sum), "1" (len), "2" (src), "3" (dst)
435 EXPORT_SYMBOL(csum_partial_copy_from_user);
439 * copy from kernel space while checksumming, otherwise like csum_partial
443 csum_partial_copy_nocheck(const void *src, void *dst, int len, __wsum sum)
445 unsigned long tmp1, tmp2;
446 __asm__("movel %2,%4\n\t"
447 "btst #1,%4\n\t" /* Check alignment */
449 "subql #2,%1\n\t" /* buff%4==2: treat first word */
451 "addql #2,%1\n\t" /* len was == 2, treat only rest */
454 "movew %2@+,%4\n\t" /* add first word to sum */
458 "addxl %4,%0\n" /* add X bit */
460 /* unrolled loop for the main part: do 8 longs at once */
461 "movel %1,%4\n\t" /* save len in tmp1 */
462 "lsrl #5,%1\n\t" /* len/32 */
463 "jeq 2f\n\t" /* not enough... */
492 "addxl %5,%0\n\t" /* add X bit */
497 "movel %4,%1\n\t" /* restore len from tmp1 */
498 "andw #0x1c,%4\n\t" /* number of rest longs */
503 /* loop for rest longs */
509 "addxl %5,%0\n" /* add X bit */
511 /* now check for rest bytes that do not fit into longs */
514 "clrl %5\n\t" /* clear tmp2 for rest bytes */
517 "movew %2@+,%5\n\t" /* have rest >= 2: get word */
519 "swap %5\n\t" /* into bits 16..31 */
520 "tstw %1\n\t" /* another byte? */
523 "moveb %2@,%5\n\t" /* have odd rest: get byte */
525 "lslw #8,%5\n" /* into bits 8..15; 16..31 untouched */
527 "addl %5,%0\n\t" /* now add rest long to sum */
529 "addxl %5,%0\n" /* add X bit */
531 : "=d" (sum), "=d" (len), "=a" (src), "=a" (dst),
532 "=&d" (tmp1), "=&d" (tmp2)
533 : "0" (sum), "1" (len), "2" (src), "3" (dst)
537 EXPORT_SYMBOL(csum_partial_copy_nocheck);
538 #endif /* CONFIG_COLDFIRE */