aboutsummaryrefslogtreecommitdiffstatshomepage
diff options
context:
space:
mode:
-rw-r--r--src/crypto/include/zinc/chacha20.h47
-rw-r--r--src/crypto/zinc/chacha20/chacha20-arm-glue.h10
-rw-r--r--src/crypto/zinc/chacha20/chacha20-mips-glue.h4
-rw-r--r--src/crypto/zinc/chacha20/chacha20-x86_64-glue.h12
-rw-r--r--src/crypto/zinc/chacha20/chacha20.c22
5 files changed, 50 insertions, 45 deletions
diff --git a/src/crypto/include/zinc/chacha20.h b/src/crypto/include/zinc/chacha20.h
index 14bbadd..37ec3b4 100644
--- a/src/crypto/include/zinc/chacha20.h
+++ b/src/crypto/include/zinc/chacha20.h
@@ -29,33 +29,38 @@ enum { /* expand 32-byte k */
};
struct chacha20_ctx {
- u32 constant[4];
- u32 key[8];
- u32 counter[4];
+ union {
+ u32 state[16];
+ struct {
+ u32 constant[4];
+ u32 key[8];
+ u32 counter[4];
+ };
+ };
} __aligned(32);
-static inline void chacha20_init(struct chacha20_ctx *state,
+static inline void chacha20_init(struct chacha20_ctx *ctx,
const u8 key[CHACHA20_KEY_SIZE],
const u64 nonce)
{
- state->constant[0] = CHACHA20_CONSTANT_EXPA;
- state->constant[1] = CHACHA20_CONSTANT_ND_3;
- state->constant[2] = CHACHA20_CONSTANT_2_BY;
- state->constant[3] = CHACHA20_CONSTANT_TE_K;
- state->key[0] = get_unaligned_le32(key + 0);
- state->key[1] = get_unaligned_le32(key + 4);
- state->key[2] = get_unaligned_le32(key + 8);
- state->key[3] = get_unaligned_le32(key + 12);
- state->key[4] = get_unaligned_le32(key + 16);
- state->key[5] = get_unaligned_le32(key + 20);
- state->key[6] = get_unaligned_le32(key + 24);
- state->key[7] = get_unaligned_le32(key + 28);
- state->counter[0] = 0;
- state->counter[1] = 0;
- state->counter[2] = nonce & U32_MAX;
- state->counter[3] = nonce >> 32;
+ ctx->constant[0] = CHACHA20_CONSTANT_EXPA;
+ ctx->constant[1] = CHACHA20_CONSTANT_ND_3;
+ ctx->constant[2] = CHACHA20_CONSTANT_2_BY;
+ ctx->constant[3] = CHACHA20_CONSTANT_TE_K;
+ ctx->key[0] = get_unaligned_le32(key + 0);
+ ctx->key[1] = get_unaligned_le32(key + 4);
+ ctx->key[2] = get_unaligned_le32(key + 8);
+ ctx->key[3] = get_unaligned_le32(key + 12);
+ ctx->key[4] = get_unaligned_le32(key + 16);
+ ctx->key[5] = get_unaligned_le32(key + 20);
+ ctx->key[6] = get_unaligned_le32(key + 24);
+ ctx->key[7] = get_unaligned_le32(key + 28);
+ ctx->counter[0] = 0;
+ ctx->counter[1] = 0;
+ ctx->counter[2] = nonce & U32_MAX;
+ ctx->counter[3] = nonce >> 32;
}
-void chacha20(struct chacha20_ctx *state, u8 *dst, const u8 *src, u32 len,
+void chacha20(struct chacha20_ctx *ctx, u8 *dst, const u8 *src, u32 len,
simd_context_t *simd_context);
void hchacha20(u32 derived_key[CHACHA20_KEY_WORDS],
diff --git a/src/crypto/zinc/chacha20/chacha20-arm-glue.h b/src/crypto/zinc/chacha20/chacha20-arm-glue.h
index 881e16c..2504448 100644
--- a/src/crypto/zinc/chacha20/chacha20-arm-glue.h
+++ b/src/crypto/zinc/chacha20/chacha20-arm-glue.h
@@ -37,7 +37,7 @@ static void __init chacha20_fpu_init(void)
#endif
}
-static inline bool chacha20_arch(struct chacha20_ctx *state, u8 *dst,
+static inline bool chacha20_arch(struct chacha20_ctx *ctx, u8 *dst,
const u8 *src, size_t len,
simd_context_t *simd_context)
{
@@ -50,8 +50,8 @@ static inline bool chacha20_arch(struct chacha20_ctx *state, u8 *dst,
len >= CHACHA20_BLOCK_SIZE * 3 && simd_use(simd_context)) {
const size_t bytes = min_t(size_t, len, PAGE_SIZE);
- chacha20_neon(dst, src, bytes, state->key, state->counter);
- state->counter[0] += (bytes + 63) / 64;
+ chacha20_neon(dst, src, bytes, ctx->key, ctx->counter);
+ ctx->counter[0] += (bytes + 63) / 64;
len -= bytes;
if (!len)
break;
@@ -59,8 +59,8 @@ static inline bool chacha20_arch(struct chacha20_ctx *state, u8 *dst,
src += bytes;
simd_relax(simd_context);
} else {
- chacha20_arm(dst, src, len, state->key, state->counter);
- state->counter[0] += (len + 63) / 64;
+ chacha20_arm(dst, src, len, ctx->key, ctx->counter);
+ ctx->counter[0] += (len + 63) / 64;
break;
}
}
diff --git a/src/crypto/zinc/chacha20/chacha20-mips-glue.h b/src/crypto/zinc/chacha20/chacha20-mips-glue.h
index 3904b34..9630cd0 100644
--- a/src/crypto/zinc/chacha20/chacha20-mips-glue.h
+++ b/src/crypto/zinc/chacha20/chacha20-mips-glue.h
@@ -10,11 +10,11 @@ static void __init chacha20_fpu_init(void)
{
}
-static inline bool chacha20_arch(struct chacha20_ctx *state, u8 *dst,
+static inline bool chacha20_arch(struct chacha20_ctx *ctx, u8 *dst,
const u8 *src, size_t len,
simd_context_t *simd_context)
{
- chacha20_mips((u32 *)state, dst, src, len);
+ chacha20_mips(ctx->state, dst, src, len);
return true;
}
diff --git a/src/crypto/zinc/chacha20/chacha20-x86_64-glue.h b/src/crypto/zinc/chacha20/chacha20-x86_64-glue.h
index 2939f1e..b1fc4c8 100644
--- a/src/crypto/zinc/chacha20/chacha20-x86_64-glue.h
+++ b/src/crypto/zinc/chacha20/chacha20-x86_64-glue.h
@@ -50,7 +50,7 @@ static void __init chacha20_fpu_init(void)
#endif
}
-static inline bool chacha20_arch(struct chacha20_ctx *state, u8 *dst,
+static inline bool chacha20_arch(struct chacha20_ctx *ctx, u8 *dst,
const u8 *src, size_t len,
simd_context_t *simd_context)
{
@@ -67,16 +67,16 @@ static inline bool chacha20_arch(struct chacha20_ctx *state, u8 *dst,
if (IS_ENABLED(CONFIG_AS_AVX512) && chacha20_use_avx512 &&
len >= CHACHA20_BLOCK_SIZE * 8)
- chacha20_avx512(dst, src, bytes, state->key, state->counter);
+ chacha20_avx512(dst, src, bytes, ctx->key, ctx->counter);
else if (IS_ENABLED(CONFIG_AS_AVX512) && chacha20_use_avx512vl &&
len >= CHACHA20_BLOCK_SIZE * 4)
- chacha20_avx512vl(dst, src, bytes, state->key, state->counter);
+ chacha20_avx512vl(dst, src, bytes, ctx->key, ctx->counter);
else if (IS_ENABLED(CONFIG_AS_AVX2) && chacha20_use_avx2 &&
len >= CHACHA20_BLOCK_SIZE * 4)
- chacha20_avx2(dst, src, bytes, state->key, state->counter);
+ chacha20_avx2(dst, src, bytes, ctx->key, ctx->counter);
else
- chacha20_ssse3(dst, src, bytes, state->key, state->counter);
- state->counter[0] += (bytes + 63) / 64;
+ chacha20_ssse3(dst, src, bytes, ctx->key, ctx->counter);
+ ctx->counter[0] += (bytes + 63) / 64;
len -= bytes;
if (!len)
break;
diff --git a/src/crypto/zinc/chacha20/chacha20.c b/src/crypto/zinc/chacha20/chacha20.c
index 3415105..3f0392f 100644
--- a/src/crypto/zinc/chacha20/chacha20.c
+++ b/src/crypto/zinc/chacha20/chacha20.c
@@ -25,7 +25,7 @@
static void __init chacha20_fpu_init(void)
{
}
-static inline bool chacha20_arch(struct chacha20_ctx *state, u8 *dst,
+static inline bool chacha20_arch(struct chacha20_ctx *ctx, u8 *dst,
const u8 *src, size_t len,
simd_context_t *simd_context)
{
@@ -79,45 +79,45 @@ static inline bool hchacha20_arch(u32 derived_key[CHACHA20_KEY_WORDS],
DOUBLE_ROUND(x) \
)
-static void chacha20_block_generic(__le32 *stream, u32 *state)
+static void chacha20_block_generic(struct chacha20_ctx *ctx, __le32 *stream)
{
u32 x[CHACHA20_BLOCK_WORDS];
int i;
for (i = 0; i < ARRAY_SIZE(x); ++i)
- x[i] = state[i];
+ x[i] = ctx->state[i];
TWENTY_ROUNDS(x);
for (i = 0; i < ARRAY_SIZE(x); ++i)
- stream[i] = cpu_to_le32(x[i] + state[i]);
+ stream[i] = cpu_to_le32(x[i] + ctx->state[i]);
- ++state[12];
+ ctx->counter[0] += 1;
}
-static void chacha20_generic(struct chacha20_ctx *state, u8 *out, const u8 *in,
+static void chacha20_generic(struct chacha20_ctx *ctx, u8 *out, const u8 *in,
u32 len)
{
__le32 buf[CHACHA20_BLOCK_WORDS];
while (len >= CHACHA20_BLOCK_SIZE) {
- chacha20_block_generic(buf, (u32 *)state);
+ chacha20_block_generic(ctx, buf);
crypto_xor_cpy(out, in, (u8 *)buf, CHACHA20_BLOCK_SIZE);
len -= CHACHA20_BLOCK_SIZE;
out += CHACHA20_BLOCK_SIZE;
in += CHACHA20_BLOCK_SIZE;
}
if (len) {
- chacha20_block_generic(buf, (u32 *)state);
+ chacha20_block_generic(ctx, buf);
crypto_xor_cpy(out, in, (u8 *)buf, len);
}
}
-void chacha20(struct chacha20_ctx *state, u8 *dst, const u8 *src, u32 len,
+void chacha20(struct chacha20_ctx *ctx, u8 *dst, const u8 *src, u32 len,
simd_context_t *simd_context)
{
- if (!chacha20_arch(state, dst, src, len, simd_context))
- chacha20_generic(state, dst, src, len);
+ if (!chacha20_arch(ctx, dst, src, len, simd_context))
+ chacha20_generic(ctx, dst, src, len);
}
EXPORT_SYMBOL(chacha20);