~ [ source navigation ] ~ [ diff markup ] ~ [ identifier search ] ~

TOMOYO Linux Cross Reference
Linux/lib/crypto/utils.c

Version: ~ [ linux-6.11-rc3 ] ~ [ linux-6.10.4 ] ~ [ linux-6.9.12 ] ~ [ linux-6.8.12 ] ~ [ linux-6.7.12 ] ~ [ linux-6.6.45 ] ~ [ linux-6.5.13 ] ~ [ linux-6.4.16 ] ~ [ linux-6.3.13 ] ~ [ linux-6.2.16 ] ~ [ linux-6.1.104 ] ~ [ linux-6.0.19 ] ~ [ linux-5.19.17 ] ~ [ linux-5.18.19 ] ~ [ linux-5.17.15 ] ~ [ linux-5.16.20 ] ~ [ linux-5.15.164 ] ~ [ linux-5.14.21 ] ~ [ linux-5.13.19 ] ~ [ linux-5.12.19 ] ~ [ linux-5.11.22 ] ~ [ linux-5.10.223 ] ~ [ linux-5.9.16 ] ~ [ linux-5.8.18 ] ~ [ linux-5.7.19 ] ~ [ linux-5.6.19 ] ~ [ linux-5.5.19 ] ~ [ linux-5.4.281 ] ~ [ linux-5.3.18 ] ~ [ linux-5.2.21 ] ~ [ linux-5.1.21 ] ~ [ linux-5.0.21 ] ~ [ linux-4.20.17 ] ~ [ linux-4.19.319 ] ~ [ linux-4.18.20 ] ~ [ linux-4.17.19 ] ~ [ linux-4.16.18 ] ~ [ linux-4.15.18 ] ~ [ linux-4.14.336 ] ~ [ linux-4.13.16 ] ~ [ linux-4.12.14 ] ~ [ linux-4.11.12 ] ~ [ linux-4.10.17 ] ~ [ linux-4.9.337 ] ~ [ linux-4.4.302 ] ~ [ linux-3.10.108 ] ~ [ linux-2.6.32.71 ] ~ [ linux-2.6.0 ] ~ [ linux-2.4.37.11 ] ~ [ unix-v6-master ] ~ [ ccs-tools-1.8.9 ] ~ [ policy-sample ] ~
Architecture: ~ [ i386 ] ~ [ alpha ] ~ [ m68k ] ~ [ mips ] ~ [ ppc ] ~ [ sparc ] ~ [ sparc64 ] ~

  1 // SPDX-License-Identifier: GPL-2.0-or-later
  2 /*
  3  * Crypto library utility functions
  4  *
  5  * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
  6  */
  7 
  8 #include <asm/unaligned.h>
  9 #include <crypto/utils.h>
 10 #include <linux/module.h>
 11 
 12 /*
 13  * XOR @len bytes from @src1 and @src2 together, writing the result to @dst
 14  * (which may alias one of the sources).  Don't call this directly; call
 15  * crypto_xor() or crypto_xor_cpy() instead.
 16  */
 17 void __crypto_xor(u8 *dst, const u8 *src1, const u8 *src2, unsigned int len)
 18 {
 19         int relalign = 0;
 20 
 21         if (!IS_ENABLED(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS)) {
 22                 int size = sizeof(unsigned long);
 23                 int d = (((unsigned long)dst ^ (unsigned long)src1) |
 24                          ((unsigned long)dst ^ (unsigned long)src2)) &
 25                         (size - 1);
 26 
 27                 relalign = d ? 1 << __ffs(d) : size;
 28 
 29                 /*
 30                  * If we care about alignment, process as many bytes as
 31                  * needed to advance dst and src to values whose alignments
 32                  * equal their relative alignment. This will allow us to
 33                  * process the remainder of the input using optimal strides.
 34                  */
 35                 while (((unsigned long)dst & (relalign - 1)) && len > 0) {
 36                         *dst++ = *src1++ ^ *src2++;
 37                         len--;
 38                 }
 39         }
 40 
 41         while (IS_ENABLED(CONFIG_64BIT) && len >= 8 && !(relalign & 7)) {
 42                 if (IS_ENABLED(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS)) {
 43                         u64 l = get_unaligned((u64 *)src1) ^
 44                                 get_unaligned((u64 *)src2);
 45                         put_unaligned(l, (u64 *)dst);
 46                 } else {
 47                         *(u64 *)dst = *(u64 *)src1 ^ *(u64 *)src2;
 48                 }
 49                 dst += 8;
 50                 src1 += 8;
 51                 src2 += 8;
 52                 len -= 8;
 53         }
 54 
 55         while (len >= 4 && !(relalign & 3)) {
 56                 if (IS_ENABLED(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS)) {
 57                         u32 l = get_unaligned((u32 *)src1) ^
 58                                 get_unaligned((u32 *)src2);
 59                         put_unaligned(l, (u32 *)dst);
 60                 } else {
 61                         *(u32 *)dst = *(u32 *)src1 ^ *(u32 *)src2;
 62                 }
 63                 dst += 4;
 64                 src1 += 4;
 65                 src2 += 4;
 66                 len -= 4;
 67         }
 68 
 69         while (len >= 2 && !(relalign & 1)) {
 70                 if (IS_ENABLED(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS)) {
 71                         u16 l = get_unaligned((u16 *)src1) ^
 72                                 get_unaligned((u16 *)src2);
 73                         put_unaligned(l, (u16 *)dst);
 74                 } else {
 75                         *(u16 *)dst = *(u16 *)src1 ^ *(u16 *)src2;
 76                 }
 77                 dst += 2;
 78                 src1 += 2;
 79                 src2 += 2;
 80                 len -= 2;
 81         }
 82 
 83         while (len--)
 84                 *dst++ = *src1++ ^ *src2++;
 85 }
 86 EXPORT_SYMBOL_GPL(__crypto_xor);
 87 
 88 MODULE_DESCRIPTION("Crypto library utility functions");
 89 MODULE_LICENSE("GPL");
 90 

~ [ source navigation ] ~ [ diff markup ] ~ [ identifier search ] ~

kernel.org | git.kernel.org | LWN.net | Project Home | SVN repository | Mail admin

Linux® is a registered trademark of Linus Torvalds in the United States and other countries.
TOMOYO® is a registered trademark of NTT DATA CORPORATION.

sflogo.php