aboutsummaryrefslogtreecommitdiffstats
path: root/arch/arm/crypto/aes-neonbs-glue.c
diff options
context:
space:
mode:
Diffstat (limited to '')
-rw-r--r--arch/arm/crypto/aes-neonbs-glue.c103
1 files changed, 56 insertions, 47 deletions
diff --git a/arch/arm/crypto/aes-neonbs-glue.c b/arch/arm/crypto/aes-neonbs-glue.c
index e85839a8aaeb..f00f042ef357 100644
--- a/arch/arm/crypto/aes-neonbs-glue.c
+++ b/arch/arm/crypto/aes-neonbs-glue.c
@@ -8,8 +8,8 @@
#include <asm/neon.h>
#include <asm/simd.h>
#include <crypto/aes.h>
-#include <crypto/cbc.h>
#include <crypto/ctr.h>
+#include <crypto/internal/cipher.h>
#include <crypto/internal/simd.h>
#include <crypto/internal/skcipher.h>
#include <crypto/scatterwalk.h>
@@ -20,10 +20,12 @@ MODULE_AUTHOR("Ard Biesheuvel <ard.biesheuvel@linaro.org>");
MODULE_LICENSE("GPL v2");
MODULE_ALIAS_CRYPTO("ecb(aes)");
-MODULE_ALIAS_CRYPTO("cbc(aes)");
+MODULE_ALIAS_CRYPTO("cbc(aes)-all");
MODULE_ALIAS_CRYPTO("ctr(aes)");
MODULE_ALIAS_CRYPTO("xts(aes)");
+MODULE_IMPORT_NS(CRYPTO_INTERNAL);
+
asmlinkage void aesbs_convert_key(u8 out[], u32 const rk[], int rounds);
asmlinkage void aesbs_ecb_encrypt(u8 out[], u8 const in[], u8 const rk[],
@@ -35,7 +37,7 @@ asmlinkage void aesbs_cbc_decrypt(u8 out[], u8 const in[], u8 const rk[],
int rounds, int blocks, u8 iv[]);
asmlinkage void aesbs_ctr_encrypt(u8 out[], u8 const in[], u8 const rk[],
- int rounds, int blocks, u8 ctr[], u8 final[]);
+ int rounds, int blocks, u8 ctr[]);
asmlinkage void aesbs_xts_encrypt(u8 out[], u8 const in[], u8 const rk[],
int rounds, int blocks, u8 iv[], int);
@@ -49,7 +51,7 @@ struct aesbs_ctx {
struct aesbs_cbc_ctx {
struct aesbs_ctx key;
- struct crypto_cipher *enc_tfm;
+ struct crypto_skcipher *enc_tfm;
};
struct aesbs_xts_ctx {
@@ -138,20 +140,25 @@ static int aesbs_cbc_setkey(struct crypto_skcipher *tfm, const u8 *in_key,
kernel_neon_begin();
aesbs_convert_key(ctx->key.rk, rk.key_enc, ctx->key.rounds);
kernel_neon_end();
+ memzero_explicit(&rk, sizeof(rk));
- return crypto_cipher_setkey(ctx->enc_tfm, in_key, key_len);
+ return crypto_skcipher_setkey(ctx->enc_tfm, in_key, key_len);
}
-static void cbc_encrypt_one(struct crypto_skcipher *tfm, const u8 *src, u8 *dst)
+static int cbc_encrypt(struct skcipher_request *req)
{
+ struct skcipher_request *subreq = skcipher_request_ctx(req);
+ struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
struct aesbs_cbc_ctx *ctx = crypto_skcipher_ctx(tfm);
- crypto_cipher_encrypt_one(ctx->enc_tfm, dst, src);
-}
+ skcipher_request_set_tfm(subreq, ctx->enc_tfm);
+ skcipher_request_set_callback(subreq,
+ skcipher_request_flags(req),
+ NULL, NULL);
+ skcipher_request_set_crypt(subreq, req->src, req->dst,
+ req->cryptlen, req->iv);
-static int cbc_encrypt(struct skcipher_request *req)
-{
- return crypto_cbc_encrypt_walk(req, cbc_encrypt_one);
+ return crypto_skcipher_encrypt(subreq);
}
static int cbc_decrypt(struct skcipher_request *req)
@@ -182,20 +189,28 @@ static int cbc_decrypt(struct skcipher_request *req)
return err;
}
-static int cbc_init(struct crypto_tfm *tfm)
+static int cbc_init(struct crypto_skcipher *tfm)
{
- struct aesbs_cbc_ctx *ctx = crypto_tfm_ctx(tfm);
+ struct aesbs_cbc_ctx *ctx = crypto_skcipher_ctx(tfm);
+ unsigned int reqsize;
+
+ ctx->enc_tfm = crypto_alloc_skcipher("cbc(aes)", 0, CRYPTO_ALG_ASYNC |
+ CRYPTO_ALG_NEED_FALLBACK);
+ if (IS_ERR(ctx->enc_tfm))
+ return PTR_ERR(ctx->enc_tfm);
- ctx->enc_tfm = crypto_alloc_cipher("aes", 0, 0);
+ reqsize = sizeof(struct skcipher_request);
+ reqsize += crypto_skcipher_reqsize(ctx->enc_tfm);
+ crypto_skcipher_set_reqsize(tfm, reqsize);
- return PTR_ERR_OR_ZERO(ctx->enc_tfm);
+ return 0;
}
-static void cbc_exit(struct crypto_tfm *tfm)
+static void cbc_exit(struct crypto_skcipher *tfm)
{
- struct aesbs_cbc_ctx *ctx = crypto_tfm_ctx(tfm);
+ struct aesbs_cbc_ctx *ctx = crypto_skcipher_ctx(tfm);
- crypto_free_cipher(ctx->enc_tfm);
+ crypto_free_skcipher(ctx->enc_tfm);
}
static int aesbs_ctr_setkey_sync(struct crypto_skcipher *tfm, const u8 *in_key,
@@ -228,32 +243,25 @@ static int ctr_encrypt(struct skcipher_request *req)
err = skcipher_walk_virt(&walk, req, false);
while (walk.nbytes > 0) {
- unsigned int blocks = walk.nbytes / AES_BLOCK_SIZE;
- u8 *final = (walk.total % AES_BLOCK_SIZE) ? buf : NULL;
+ const u8 *src = walk.src.virt.addr;
+ u8 *dst = walk.dst.virt.addr;
+ int bytes = walk.nbytes;
- if (walk.nbytes < walk.total) {
- blocks = round_down(blocks,
- walk.stride / AES_BLOCK_SIZE);
- final = NULL;
- }
+ if (unlikely(bytes < AES_BLOCK_SIZE))
+ src = dst = memcpy(buf + sizeof(buf) - bytes,
+ src, bytes);
+ else if (walk.nbytes < walk.total)
+ bytes &= ~(8 * AES_BLOCK_SIZE - 1);
kernel_neon_begin();
- aesbs_ctr_encrypt(walk.dst.virt.addr, walk.src.virt.addr,
- ctx->rk, ctx->rounds, blocks, walk.iv, final);
+ aesbs_ctr_encrypt(dst, src, ctx->rk, ctx->rounds, bytes, walk.iv);
kernel_neon_end();
- if (final) {
- u8 *dst = walk.dst.virt.addr + blocks * AES_BLOCK_SIZE;
- u8 *src = walk.src.virt.addr + blocks * AES_BLOCK_SIZE;
-
- crypto_xor_cpy(dst, src, final,
- walk.total % AES_BLOCK_SIZE);
+ if (unlikely(bytes < AES_BLOCK_SIZE))
+ memcpy(walk.dst.virt.addr,
+ buf + sizeof(buf) - bytes, bytes);
- err = skcipher_walk_done(&walk, 0);
- break;
- }
- err = skcipher_walk_done(&walk,
- walk.nbytes - blocks * AES_BLOCK_SIZE);
+ err = skcipher_walk_done(&walk, walk.nbytes - bytes);
}
return err;
@@ -303,9 +311,9 @@ static int aesbs_xts_setkey(struct crypto_skcipher *tfm, const u8 *in_key,
return aesbs_setkey(tfm, in_key, key_len);
}
-static int xts_init(struct crypto_tfm *tfm)
+static int xts_init(struct crypto_skcipher *tfm)
{
- struct aesbs_xts_ctx *ctx = crypto_tfm_ctx(tfm);
+ struct aesbs_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
ctx->cts_tfm = crypto_alloc_cipher("aes", 0, 0);
if (IS_ERR(ctx->cts_tfm))
@@ -318,9 +326,9 @@ static int xts_init(struct crypto_tfm *tfm)
return PTR_ERR_OR_ZERO(ctx->tweak_tfm);
}
-static void xts_exit(struct crypto_tfm *tfm)
+static void xts_exit(struct crypto_skcipher *tfm)
{
- struct aesbs_xts_ctx *ctx = crypto_tfm_ctx(tfm);
+ struct aesbs_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
crypto_free_cipher(ctx->tweak_tfm);
crypto_free_cipher(ctx->cts_tfm);
@@ -430,9 +438,8 @@ static struct skcipher_alg aes_algs[] = { {
.base.cra_blocksize = AES_BLOCK_SIZE,
.base.cra_ctxsize = sizeof(struct aesbs_cbc_ctx),
.base.cra_module = THIS_MODULE,
- .base.cra_flags = CRYPTO_ALG_INTERNAL,
- .base.cra_init = cbc_init,
- .base.cra_exit = cbc_exit,
+ .base.cra_flags = CRYPTO_ALG_INTERNAL |
+ CRYPTO_ALG_NEED_FALLBACK,
.min_keysize = AES_MIN_KEY_SIZE,
.max_keysize = AES_MAX_KEY_SIZE,
@@ -441,6 +448,8 @@ static struct skcipher_alg aes_algs[] = { {
.setkey = aesbs_cbc_setkey,
.encrypt = cbc_encrypt,
.decrypt = cbc_decrypt,
+ .init = cbc_init,
+ .exit = cbc_exit,
}, {
.base.cra_name = "__ctr(aes)",
.base.cra_driver_name = "__ctr-aes-neonbs",
@@ -482,8 +491,6 @@ static struct skcipher_alg aes_algs[] = { {
.base.cra_ctxsize = sizeof(struct aesbs_xts_ctx),
.base.cra_module = THIS_MODULE,
.base.cra_flags = CRYPTO_ALG_INTERNAL,
- .base.cra_init = xts_init,
- .base.cra_exit = xts_exit,
.min_keysize = 2 * AES_MIN_KEY_SIZE,
.max_keysize = 2 * AES_MAX_KEY_SIZE,
@@ -492,6 +499,8 @@ static struct skcipher_alg aes_algs[] = { {
.setkey = aesbs_xts_setkey,
.encrypt = xts_encrypt,
.decrypt = xts_decrypt,
+ .init = xts_init,
+ .exit = xts_exit,
} };
static struct simd_skcipher_alg *aes_simd_algs[ARRAY_SIZE(aes_algs)];