aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorArd Biesheuvel <ardb@kernel.org>2021-01-16 17:48:09 +0100
committerHerbert Xu <herbert@gondor.apana.org.au>2021-01-22 14:58:04 +1100
commit64a49b85953cafeaba2b4c2c13d089b3ed41cca6 (patch)
tree551cd65b656fe0640c3ddcf921619dd67f2c5cd4
parentcrypto: keembay - use 64-bit arithmetic for computing bit_len (diff)
downloadlinux-dev-64a49b85953cafeaba2b4c2c13d089b3ed41cca6.tar.xz
linux-dev-64a49b85953cafeaba2b4c2c13d089b3ed41cca6.zip
crypto: aesni - replace CTR function pointer with static call
Indirect calls are very expensive on x86, so use a static call to set the system-wide AES-NI/CTR asm helper. Signed-off-by: Ard Biesheuvel <ardb@kernel.org> Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
-rw-r--r--arch/x86/crypto/aesni-intel_glue.c13
1 files changed, 7 insertions, 6 deletions
diff --git a/arch/x86/crypto/aesni-intel_glue.c b/arch/x86/crypto/aesni-intel_glue.c
index a548fdbc3073..d96685457196 100644
--- a/arch/x86/crypto/aesni-intel_glue.c
+++ b/arch/x86/crypto/aesni-intel_glue.c
@@ -34,6 +34,7 @@
#include <linux/jump_label.h>
#include <linux/workqueue.h>
#include <linux/spinlock.h>
+#include <linux/static_call.h>
#define AESNI_ALIGN 16
@@ -107,10 +108,9 @@ asmlinkage void aesni_xts_decrypt(const struct crypto_aes_ctx *ctx, u8 *out,
#ifdef CONFIG_X86_64
-static void (*aesni_ctr_enc_tfm)(struct crypto_aes_ctx *ctx, u8 *out,
- const u8 *in, unsigned int len, u8 *iv);
asmlinkage void aesni_ctr_enc(struct crypto_aes_ctx *ctx, u8 *out,
const u8 *in, unsigned int len, u8 *iv);
+DEFINE_STATIC_CALL(aesni_ctr_enc_tfm, aesni_ctr_enc);
/* Scatter / Gather routines, with args similar to above */
asmlinkage void aesni_gcm_init(void *ctx,
@@ -520,8 +520,10 @@ static int ctr_crypt(struct skcipher_request *req)
kernel_fpu_begin();
while ((nbytes = walk.nbytes) >= AES_BLOCK_SIZE) {
- aesni_ctr_enc_tfm(ctx, walk.dst.virt.addr, walk.src.virt.addr,
- nbytes & AES_BLOCK_MASK, walk.iv);
+ static_call(aesni_ctr_enc_tfm)(ctx, walk.dst.virt.addr,
+ walk.src.virt.addr,
+ nbytes & AES_BLOCK_MASK,
+ walk.iv);
nbytes &= AES_BLOCK_SIZE - 1;
err = skcipher_walk_done(&walk, nbytes);
}
@@ -1160,10 +1162,9 @@ static int __init aesni_init(void)
} else {
pr_info("SSE version of gcm_enc/dec engaged.\n");
}
- aesni_ctr_enc_tfm = aesni_ctr_enc;
if (boot_cpu_has(X86_FEATURE_AVX)) {
/* optimize performance of ctr mode encryption transform */
- aesni_ctr_enc_tfm = aesni_ctr_enc_avx_tfm;
+ static_call_update(aesni_ctr_enc_tfm, aesni_ctr_enc_avx_tfm);
pr_info("AES CTR mode by8 optimization enabled\n");
}
#endif