diff options
author | 2025-01-09 14:04:15 +0000 | |
---|---|---|
committer | 2025-01-14 12:54:36 +0100 | |
commit | 85d724df8c82c060dcdeb8d0de0bd986e6c37b72 (patch) | |
tree | 803538f291b8b92a60936474ff2b082764d98bc6 | |
parent | x86/kexec: Ensure preserve_context flag is set on return to kernel (diff) | |
download | wireguard-linux-85d724df8c82c060dcdeb8d0de0bd986e6c37b72.tar.xz wireguard-linux-85d724df8c82c060dcdeb8d0de0bd986e6c37b72.zip |
x86/kexec: Use correct swap page in swap_pages function
The swap_pages function expects the swap page to be in %r10, but there
was no documentation to that effect. Once upon a time the setup code
used to load its value from a kernel virtual address and save it to an
address which is accessible in the identity-mapped page tables, and
*happened* to use %r10 to do so, with no comment that it was left there
on *purpose* instead of just being a scratch register. Once that was no
longer necessary, %r10 just holds whatever the kernel happened to leave
in it.
Now that the original value passed by the kernel is accessible via
%rip-relative addressing, load directly from there instead of using %r10
for it. But document the other parameters that the swap_pages function
*does* expect in registers.
Fixes: b3adabae8a96 ("x86/kexec: Drop page_list argument from relocate_kernel()")
Signed-off-by: David Woodhouse <dwmw@amazon.co.uk>
Signed-off-by: Borislav Petkov (AMD) <bp@alien8.de>
Link: https://lore.kernel.org/r/20250109140757.2841269-4-dwmw2@infradead.org
-rw-r--r-- | arch/x86/kernel/relocate_kernel_64.S | 8 |
1 files changed, 6 insertions, 2 deletions
diff --git a/arch/x86/kernel/relocate_kernel_64.S b/arch/x86/kernel/relocate_kernel_64.S index 6fce4b459652..3ca3bf6b3f49 100644 --- a/arch/x86/kernel/relocate_kernel_64.S +++ b/arch/x86/kernel/relocate_kernel_64.S @@ -264,6 +264,10 @@ SYM_CODE_END(virtual_mapped) /* Do the copies */ SYM_CODE_START_LOCAL_NOALIGN(swap_pages) UNWIND_HINT_END_OF_STACK + /* + * %rdi indirection page + * %r11 preserve_context + */ movq %rdi, %rcx /* Put the indirection_page in %rcx */ xorl %edi, %edi xorl %esi, %esi @@ -302,7 +306,7 @@ SYM_CODE_START_LOCAL_NOALIGN(swap_pages) jz .Lnoswap /* copy source page to swap page */ - movq %r10, %rdi + movq kexec_pa_swap_page(%rip), %rdi movl $512, %ecx rep ; movsq @@ -314,7 +318,7 @@ SYM_CODE_START_LOCAL_NOALIGN(swap_pages) /* copy swap page to destination page */ movq %rdx, %rdi - movq %r10, %rsi + movq kexec_pa_swap_page(%rip), %rsi .Lnoswap: movl $512, %ecx rep ; movsq |