aboutsummaryrefslogtreecommitdiffstatshomepage
path: root/src/crypto
diff options
context:
space:
mode:
authorJason A. Donenfeld <Jason@zx2c4.com>2016-12-11 14:37:45 +0100
committerJason A. Donenfeld <Jason@zx2c4.com>2016-12-11 14:37:45 +0100
commit26eeaa0bcb54e40cda62aeed528e5d50138fd40b (patch)
treeecb78dad7ee2131b9f7735ebd6228d470be931d8 /src/crypto
parentblake2s: move self tests to correct directory (diff)
downloadwireguard-monolithic-historical-26eeaa0bcb54e40cda62aeed528e5d50138fd40b.tar.xz
wireguard-monolithic-historical-26eeaa0bcb54e40cda62aeed528e5d50138fd40b.zip
crypto: use kernel's bitops functions
Diffstat (limited to '')
-rw-r--r--src/crypto/blake2s.c13
-rw-r--r--src/crypto/siphash24.c20
2 files changed, 15 insertions, 18 deletions
diff --git a/src/crypto/blake2s.c b/src/crypto/blake2s.c
index 7b3e169..0d3281e 100644
--- a/src/crypto/blake2s.c
+++ b/src/crypto/blake2s.c
@@ -40,11 +40,6 @@ static const u8 blake2s_sigma[10][16] = {
{10, 2, 8, 4, 7, 6, 1, 5, 15, 11, 9, 14, 3, 12, 13, 0},
};
-static inline u32 rotr32(const u32 w, const u8 c)
-{
- return (w >> c) | (w << (32 - c));
-}
-
static inline u32 le32_to_cpuvp(const void *p)
{
return le32_to_cpup(p);
@@ -135,13 +130,13 @@ static inline void blake2s_compress(struct blake2s_state *state, const u8 block[
#define G(r,i,a,b,c,d) \
do { \
a = a + b + m[blake2s_sigma[r][2 * i + 0]]; \
- d = rotr32(d ^ a, 16); \
+ d = ror32(d ^ a, 16); \
c = c + d; \
- b = rotr32(b ^ c, 12); \
+ b = ror32(b ^ c, 12); \
a = a + b + m[blake2s_sigma[r][2 * i + 1]]; \
- d = rotr32(d ^ a, 8); \
+ d = ror32(d ^ a, 8); \
c = c + d; \
- b = rotr32(b ^ c, 7); \
+ b = ror32(b ^ c, 7); \
} while(0)
#define ROUND(r) \
do { \
diff --git a/src/crypto/siphash24.c b/src/crypto/siphash24.c
index d841894..c9d4127 100644
--- a/src/crypto/siphash24.c
+++ b/src/crypto/siphash24.c
@@ -4,15 +4,17 @@
#include <linux/kernel.h>
-#define ROTL(x,b) (u64)(((x) << (b)) | ((x) >> (64 - (b))))
-#define U8TO64(p) le64_to_cpu(*(__le64 *)(p))
+static inline u64 le64_to_cpuvp(const void *p)
+{
+ return le64_to_cpup(p);
+}
#define SIPROUND \
do { \
- v0 += v1; v1 = ROTL(v1, 13); v1 ^= v0; v0 = ROTL(v0, 32); \
- v2 += v3; v3 = ROTL(v3, 16); v3 ^= v2; \
- v0 += v3; v3 = ROTL(v3, 21); v3 ^= v0; \
- v2 += v1; v1 = ROTL(v1, 17); v1 ^= v2; v2 = ROTL(v2, 32); \
+ v0 += v1; v1 = rol64(v1, 13); v1 ^= v0; v0 = rol64(v0, 32); \
+ v2 += v3; v3 = rol64(v3, 16); v3 ^= v2; \
+ v0 += v3; v3 = rol64(v3, 21); v3 ^= v0; \
+ v2 += v1; v1 = rol64(v1, 17); v1 ^= v2; v2 = rol64(v2, 32); \
} while(0)
__attribute__((optimize("unroll-loops")))
@@ -23,8 +25,8 @@ u64 siphash24(const u8 *data, size_t len, const u8 key[SIPHASH24_KEY_LEN])
u64 v2 = 0x6c7967656e657261ULL;
u64 v3 = 0x7465646279746573ULL;
u64 b;
- u64 k0 = U8TO64(key);
- u64 k1 = U8TO64(key + sizeof(u64));
+ u64 k0 = le64_to_cpuvp(key);
+ u64 k1 = le64_to_cpuvp(key + sizeof(u64));
u64 m;
const u8 *end = data + len - (len % sizeof(u64));
const u8 left = len & (sizeof(u64) - 1);
@@ -34,7 +36,7 @@ u64 siphash24(const u8 *data, size_t len, const u8 key[SIPHASH24_KEY_LEN])
v1 ^= k1;
v0 ^= k0;
for (; data != end; data += sizeof(u64)) {
- m = U8TO64(data);
+ m = le64_to_cpuvp(data);
v3 ^= m;
SIPROUND;
SIPROUND;