~ [ source navigation ] ~ [ diff markup ] ~ [ identifier search ] ~

TOMOYO Linux Cross Reference
Linux/arch/s390/lib/csum-partial.c

Version: ~ [ linux-6.11.5 ] ~ [ linux-6.10.14 ] ~ [ linux-6.9.12 ] ~ [ linux-6.8.12 ] ~ [ linux-6.7.12 ] ~ [ linux-6.6.58 ] ~ [ linux-6.5.13 ] ~ [ linux-6.4.16 ] ~ [ linux-6.3.13 ] ~ [ linux-6.2.16 ] ~ [ linux-6.1.114 ] ~ [ linux-6.0.19 ] ~ [ linux-5.19.17 ] ~ [ linux-5.18.19 ] ~ [ linux-5.17.15 ] ~ [ linux-5.16.20 ] ~ [ linux-5.15.169 ] ~ [ linux-5.14.21 ] ~ [ linux-5.13.19 ] ~ [ linux-5.12.19 ] ~ [ linux-5.11.22 ] ~ [ linux-5.10.228 ] ~ [ linux-5.9.16 ] ~ [ linux-5.8.18 ] ~ [ linux-5.7.19 ] ~ [ linux-5.6.19 ] ~ [ linux-5.5.19 ] ~ [ linux-5.4.284 ] ~ [ linux-5.3.18 ] ~ [ linux-5.2.21 ] ~ [ linux-5.1.21 ] ~ [ linux-5.0.21 ] ~ [ linux-4.20.17 ] ~ [ linux-4.19.322 ] ~ [ linux-4.18.20 ] ~ [ linux-4.17.19 ] ~ [ linux-4.16.18 ] ~ [ linux-4.15.18 ] ~ [ linux-4.14.336 ] ~ [ linux-4.13.16 ] ~ [ linux-4.12.14 ] ~ [ linux-4.11.12 ] ~ [ linux-4.10.17 ] ~ [ linux-4.9.337 ] ~ [ linux-4.4.302 ] ~ [ linux-3.10.108 ] ~ [ linux-2.6.32.71 ] ~ [ linux-2.6.0 ] ~ [ linux-2.4.37.11 ] ~ [ unix-v6-master ] ~ [ ccs-tools-1.8.9 ] ~ [ policy-sample ] ~
Architecture: ~ [ i386 ] ~ [ alpha ] ~ [ m68k ] ~ [ mips ] ~ [ ppc ] ~ [ sparc ] ~ [ sparc64 ] ~

  1 // SPDX-License-Identifier: GPL-2.0
  2 
  3 #include <linux/export.h>
  4 #include <asm/checksum.h>
  5 #include <asm/fpu.h>
  6 
  7 /*
  8  * Computes the checksum of a memory block at src, length len,
  9  * and adds in "sum" (32-bit). If copy is true copies to dst.
 10  *
 11  * Returns a 32-bit number suitable for feeding into itself
 12  * or csum_tcpudp_magic.
 13  *
 14  * This function must be called with even lengths, except
 15  * for the last fragment, which may be odd.
 16  *
 17  * It's best to have src and dst aligned on a 64-bit boundary.
 18  */
 19 static __always_inline __wsum csum_copy(void *dst, const void *src, int len, __wsum sum, bool copy)
 20 {
 21         DECLARE_KERNEL_FPU_ONSTACK8(vxstate);
 22 
 23         if (!cpu_has_vx()) {
 24                 if (copy)
 25                         memcpy(dst, src, len);
 26                 return cksm(dst, len, sum);
 27         }
 28         kernel_fpu_begin(&vxstate, KERNEL_VXR_V16V23);
 29         fpu_vlvgf(16, (__force u32)sum, 1);
 30         fpu_vzero(17);
 31         fpu_vzero(18);
 32         fpu_vzero(19);
 33         while (len >= 64) {
 34                 fpu_vlm(20, 23, src);
 35                 if (copy) {
 36                         fpu_vstm(20, 23, dst);
 37                         dst += 64;
 38                 }
 39                 fpu_vcksm(16, 20, 16);
 40                 fpu_vcksm(17, 21, 17);
 41                 fpu_vcksm(18, 22, 18);
 42                 fpu_vcksm(19, 23, 19);
 43                 src += 64;
 44                 len -= 64;
 45         }
 46         while (len >= 32) {
 47                 fpu_vlm(20, 21, src);
 48                 if (copy) {
 49                         fpu_vstm(20, 21, dst);
 50                         dst += 32;
 51                 }
 52                 fpu_vcksm(16, 20, 16);
 53                 fpu_vcksm(17, 21, 17);
 54                 src += 32;
 55                 len -= 32;
 56         }
 57         while (len >= 16) {
 58                 fpu_vl(20, src);
 59                 if (copy) {
 60                         fpu_vst(20, dst);
 61                         dst += 16;
 62                 }
 63                 fpu_vcksm(16, 20, 16);
 64                 src += 16;
 65                 len -= 16;
 66         }
 67         if (len) {
 68                 fpu_vll(20, len - 1, src);
 69                 if (copy)
 70                         fpu_vstl(20, len - 1, dst);
 71                 fpu_vcksm(16, 20, 16);
 72         }
 73         fpu_vcksm(18, 19, 18);
 74         fpu_vcksm(16, 17, 16);
 75         fpu_vcksm(16, 18, 16);
 76         sum = (__force __wsum)fpu_vlgvf(16, 1);
 77         kernel_fpu_end(&vxstate, KERNEL_VXR_V16V23);
 78         return sum;
 79 }
 80 
 81 __wsum csum_partial(const void *buff, int len, __wsum sum)
 82 {
 83         return csum_copy(NULL, buff, len, sum, false);
 84 }
 85 EXPORT_SYMBOL(csum_partial);
 86 
 87 __wsum csum_partial_copy_nocheck(const void *src, void *dst, int len)
 88 {
 89         return csum_copy(dst, src, len, 0, true);
 90 }
 91 EXPORT_SYMBOL(csum_partial_copy_nocheck);
 92 

~ [ source navigation ] ~ [ diff markup ] ~ [ identifier search ] ~

kernel.org | git.kernel.org | LWN.net | Project Home | SVN repository | Mail admin

Linux® is a registered trademark of Linus Torvalds in the United States and other countries.
TOMOYO® is a registered trademark of NTT DATA CORPORATION.

sflogo.php