From 8691ccd764f9ecc69a6812dfe76214c86ac9ba06 Mon Sep 17 00:00:00 2001 From: Josh Poimboeuf Date: Thu, 21 Jan 2016 16:49:19 -0600 Subject: x86/asm/crypto: Create stack frames in crypto functions The crypto code has several callable non-leaf functions which don't honor CONFIG_FRAME_POINTER, which can result in bad stack traces. Create stack frames for them when CONFIG_FRAME_POINTER is enabled. Signed-off-by: Josh Poimboeuf Cc: Andrew Morton Cc: Andy Lutomirski Cc: Andy Lutomirski Cc: Arnaldo Carvalho de Melo Cc: Bernd Petrovitsch Cc: Borislav Petkov Cc: Brian Gerst Cc: Chris J Arges Cc: David S. Miller Cc: Denys Vlasenko Cc: H. Peter Anvin Cc: Herbert Xu Cc: Jiri Slaby Cc: Linus Torvalds Cc: Michal Marek Cc: Namhyung Kim Cc: Pedro Alves Cc: Peter Zijlstra Cc: Thomas Gleixner Cc: live-patching@vger.kernel.org Link: http://lkml.kernel.org/r/6c20192bcf1102ae18ae5a242cabf30ce9b29895.1453405861.git.jpoimboe@redhat.com Signed-off-by: Ingo Molnar --- arch/x86/crypto/aesni-intel_asm.S | 73 ++++++++++++++++++++++++--------------- 1 file changed, 46 insertions(+), 27 deletions(-) (limited to 'arch/x86/crypto/aesni-intel_asm.S') diff --git a/arch/x86/crypto/aesni-intel_asm.S b/arch/x86/crypto/aesni-intel_asm.S index c44cfedbe1c3..383a6f84a060 100644 --- a/arch/x86/crypto/aesni-intel_asm.S +++ b/arch/x86/crypto/aesni-intel_asm.S @@ -31,6 +31,7 @@ #include #include +#include /* * The following macros are used to move an (un)aligned 16 byte value to/from @@ -1800,11 +1801,12 @@ ENDPROC(_key_expansion_256b) * unsigned int key_len) */ ENTRY(aesni_set_key) + FRAME_BEGIN #ifndef __x86_64__ pushl KEYP - movl 8(%esp), KEYP # ctx - movl 12(%esp), UKEYP # in_key - movl 16(%esp), %edx # key_len + movl (FRAME_OFFSET+8)(%esp), KEYP # ctx + movl (FRAME_OFFSET+12)(%esp), UKEYP # in_key + movl (FRAME_OFFSET+16)(%esp), %edx # key_len #endif movups (UKEYP), %xmm0 # user key (first 16 bytes) movaps %xmm0, (KEYP) @@ -1905,6 +1907,7 @@ ENTRY(aesni_set_key) #ifndef __x86_64__ popl KEYP #endif + FRAME_END ret ENDPROC(aesni_set_key) @@ -1912,12 +1915,13 @@ ENDPROC(aesni_set_key) * void aesni_enc(struct crypto_aes_ctx *ctx, u8 *dst, const u8 *src) */ ENTRY(aesni_enc) + FRAME_BEGIN #ifndef __x86_64__ pushl KEYP pushl KLEN - movl 12(%esp), KEYP - movl 16(%esp), OUTP - movl 20(%esp), INP + movl (FRAME_OFFSET+12)(%esp), KEYP # ctx + movl (FRAME_OFFSET+16)(%esp), OUTP # dst + movl (FRAME_OFFSET+20)(%esp), INP # src #endif movl 480(KEYP), KLEN # key length movups (INP), STATE # input @@ -1927,6 +1931,7 @@ ENTRY(aesni_enc) popl KLEN popl KEYP #endif + FRAME_END ret ENDPROC(aesni_enc) @@ -2101,12 +2106,13 @@ ENDPROC(_aesni_enc4) * void aesni_dec (struct crypto_aes_ctx *ctx, u8 *dst, const u8 *src) */ ENTRY(aesni_dec) + FRAME_BEGIN #ifndef __x86_64__ pushl KEYP pushl KLEN - movl 12(%esp), KEYP - movl 16(%esp), OUTP - movl 20(%esp), INP + movl (FRAME_OFFSET+12)(%esp), KEYP # ctx + movl (FRAME_OFFSET+16)(%esp), OUTP # dst + movl (FRAME_OFFSET+20)(%esp), INP # src #endif mov 480(KEYP), KLEN # key length add $240, KEYP @@ -2117,6 +2123,7 @@ ENTRY(aesni_dec) popl KLEN popl KEYP #endif + FRAME_END ret ENDPROC(aesni_dec) @@ -2292,14 +2299,15 @@ ENDPROC(_aesni_dec4) * size_t len) */ ENTRY(aesni_ecb_enc) + FRAME_BEGIN #ifndef __x86_64__ pushl LEN pushl KEYP pushl KLEN - movl 16(%esp), KEYP - movl 20(%esp), OUTP - movl 24(%esp), INP - movl 28(%esp), LEN + movl (FRAME_OFFSET+16)(%esp), KEYP # ctx + movl (FRAME_OFFSET+20)(%esp), OUTP # dst + movl (FRAME_OFFSET+24)(%esp), INP # src + movl (FRAME_OFFSET+28)(%esp), LEN # len #endif test LEN, LEN # check length jz .Lecb_enc_ret @@ -2342,6 +2350,7 @@ ENTRY(aesni_ecb_enc) popl KEYP popl LEN #endif + FRAME_END ret ENDPROC(aesni_ecb_enc) @@ -2350,14 +2359,15 @@ ENDPROC(aesni_ecb_enc) * size_t len); */ ENTRY(aesni_ecb_dec) + FRAME_BEGIN #ifndef __x86_64__ pushl LEN pushl KEYP pushl KLEN - movl 16(%esp), KEYP - movl 20(%esp), OUTP - movl 24(%esp), INP - movl 28(%esp), LEN + movl (FRAME_OFFSET+16)(%esp), KEYP # ctx + movl (FRAME_OFFSET+20)(%esp), OUTP # dst + movl (FRAME_OFFSET+24)(%esp), INP # src + movl (FRAME_OFFSET+28)(%esp), LEN # len #endif test LEN, LEN jz .Lecb_dec_ret @@ -2401,6 +2411,7 @@ ENTRY(aesni_ecb_dec) popl KEYP popl LEN #endif + FRAME_END ret ENDPROC(aesni_ecb_dec) @@ -2409,16 +2420,17 @@ ENDPROC(aesni_ecb_dec) * size_t len, u8 *iv) */ ENTRY(aesni_cbc_enc) + FRAME_BEGIN #ifndef __x86_64__ pushl IVP pushl LEN pushl KEYP pushl KLEN - movl 20(%esp), KEYP - movl 24(%esp), OUTP - movl 28(%esp), INP - movl 32(%esp), LEN - movl 36(%esp), IVP + movl (FRAME_OFFSET+20)(%esp), KEYP # ctx + movl (FRAME_OFFSET+24)(%esp), OUTP # dst + movl (FRAME_OFFSET+28)(%esp), INP # src + movl (FRAME_OFFSET+32)(%esp), LEN # len + movl (FRAME_OFFSET+36)(%esp), IVP # iv #endif cmp $16, LEN jb .Lcbc_enc_ret @@ -2443,6 +2455,7 @@ ENTRY(aesni_cbc_enc) popl LEN popl IVP #endif + FRAME_END ret ENDPROC(aesni_cbc_enc) @@ -2451,16 +2464,17 @@ ENDPROC(aesni_cbc_enc) * size_t len, u8 *iv) */ ENTRY(aesni_cbc_dec) + FRAME_BEGIN #ifndef __x86_64__ pushl IVP pushl LEN pushl KEYP pushl KLEN - movl 20(%esp), KEYP - movl 24(%esp), OUTP - movl 28(%esp), INP - movl 32(%esp), LEN - movl 36(%esp), IVP + movl (FRAME_OFFSET+20)(%esp), KEYP # ctx + movl (FRAME_OFFSET+24)(%esp), OUTP # dst + movl (FRAME_OFFSET+28)(%esp), INP # src + movl (FRAME_OFFSET+32)(%esp), LEN # len + movl (FRAME_OFFSET+36)(%esp), IVP # iv #endif cmp $16, LEN jb .Lcbc_dec_just_ret @@ -2534,6 +2548,7 @@ ENTRY(aesni_cbc_dec) popl LEN popl IVP #endif + FRAME_END ret ENDPROC(aesni_cbc_dec) @@ -2600,6 +2615,7 @@ ENDPROC(_aesni_inc) * size_t len, u8 *iv) */ ENTRY(aesni_ctr_enc) + FRAME_BEGIN cmp $16, LEN jb .Lctr_enc_just_ret mov 480(KEYP), KLEN @@ -2653,6 +2669,7 @@ ENTRY(aesni_ctr_enc) .Lctr_enc_ret: movups IV, (IVP) .Lctr_enc_just_ret: + FRAME_END ret ENDPROC(aesni_ctr_enc) @@ -2679,6 +2696,7 @@ ENDPROC(aesni_ctr_enc) * bool enc, u8 *iv) */ ENTRY(aesni_xts_crypt8) + FRAME_BEGIN cmpb $0, %cl movl $0, %ecx movl $240, %r10d @@ -2779,6 +2797,7 @@ ENTRY(aesni_xts_crypt8) pxor INC, STATE4 movdqu STATE4, 0x70(OUTP) + FRAME_END ret ENDPROC(aesni_xts_crypt8) -- cgit v1.2.3-59-g8ed1b