aboutsummaryrefslogtreecommitdiffstatshomepage
path: root/arch/x86/kvm/vmx/vmenter.S
diff options
context:
space:
mode:
authorSean Christopherson <sean.j.christopherson@intel.com>2019-01-25 07:41:15 -0800
committerPaolo Bonzini <pbonzini@redhat.com>2019-02-20 22:48:15 +0100
commita62fd5a76c99dd96c74c6638408961b7ff3c71c4 (patch)
tree04a1631f17bbfd156d6f1332103eedfcac56b973 /arch/x86/kvm/vmx/vmenter.S
parentKVM: VMX: Rename ____vmx_vcpu_run() to __vmx_vcpu_run() (diff)
downloadwireguard-linux-a62fd5a76c99dd96c74c6638408961b7ff3c71c4.tar.xz
wireguard-linux-a62fd5a76c99dd96c74c6638408961b7ff3c71c4.zip
KVM: VMX: Use RAX as the scratch register during vCPU-run
...to prepare for making the sub-routine callable from C code. That means returning the result in RAX. Since RAX will be used to return the result, use it as the scratch register as well to make the code readable and to document that the scratch register is more or less arbitrary. Signed-off-by: Sean Christopherson <sean.j.christopherson@intel.com> Signed-off-by: Paolo Bonzini <pbonzini@redhat.com>
Diffstat (limited to 'arch/x86/kvm/vmx/vmenter.S')
-rw-r--r--arch/x86/kvm/vmx/vmenter.S76
1 files changed, 38 insertions, 38 deletions
diff --git a/arch/x86/kvm/vmx/vmenter.S b/arch/x86/kvm/vmx/vmenter.S
index fa6e03b36348..7d8b09abcdec 100644
--- a/arch/x86/kvm/vmx/vmenter.S
+++ b/arch/x86/kvm/vmx/vmenter.S
@@ -103,31 +103,31 @@ ENTRY(__vmx_vcpu_run)
lea -WORD_SIZE(%_ASM_SP), %_ASM_ARG2
call vmx_update_host_rsp
- /* Load @regs to RCX. */
- mov (%_ASM_SP), %_ASM_CX
+ /* Load @regs to RAX. */
+ mov (%_ASM_SP), %_ASM_AX
/* Check if vmlaunch or vmresume is needed */
cmpb $0, %bl
/* Load guest registers. Don't clobber flags. */
- mov VCPU_RAX(%_ASM_CX), %_ASM_AX
- mov VCPU_RBX(%_ASM_CX), %_ASM_BX
- mov VCPU_RDX(%_ASM_CX), %_ASM_DX
- mov VCPU_RSI(%_ASM_CX), %_ASM_SI
- mov VCPU_RDI(%_ASM_CX), %_ASM_DI
- mov VCPU_RBP(%_ASM_CX), %_ASM_BP
+ mov VCPU_RBX(%_ASM_AX), %_ASM_BX
+ mov VCPU_RCX(%_ASM_AX), %_ASM_CX
+ mov VCPU_RDX(%_ASM_AX), %_ASM_DX
+ mov VCPU_RSI(%_ASM_AX), %_ASM_SI
+ mov VCPU_RDI(%_ASM_AX), %_ASM_DI
+ mov VCPU_RBP(%_ASM_AX), %_ASM_BP
#ifdef CONFIG_X86_64
- mov VCPU_R8 (%_ASM_CX), %r8
- mov VCPU_R9 (%_ASM_CX), %r9
- mov VCPU_R10(%_ASM_CX), %r10
- mov VCPU_R11(%_ASM_CX), %r11
- mov VCPU_R12(%_ASM_CX), %r12
- mov VCPU_R13(%_ASM_CX), %r13
- mov VCPU_R14(%_ASM_CX), %r14
- mov VCPU_R15(%_ASM_CX), %r15
+ mov VCPU_R8 (%_ASM_AX), %r8
+ mov VCPU_R9 (%_ASM_AX), %r9
+ mov VCPU_R10(%_ASM_AX), %r10
+ mov VCPU_R11(%_ASM_AX), %r11
+ mov VCPU_R12(%_ASM_AX), %r12
+ mov VCPU_R13(%_ASM_AX), %r13
+ mov VCPU_R14(%_ASM_AX), %r14
+ mov VCPU_R15(%_ASM_AX), %r15
#endif
- /* Load guest RCX. This kills the vmx_vcpu pointer! */
- mov VCPU_RCX(%_ASM_CX), %_ASM_CX
+ /* Load guest RAX. This kills the vmx_vcpu pointer! */
+ mov VCPU_RAX(%_ASM_AX), %_ASM_AX
/* Enter guest mode */
call vmx_vmenter
@@ -135,29 +135,29 @@ ENTRY(__vmx_vcpu_run)
/* Jump on VM-Fail. */
jbe 2f
- /* Temporarily save guest's RCX. */
- push %_ASM_CX
+ /* Temporarily save guest's RAX. */
+ push %_ASM_AX
- /* Reload @regs to RCX. */
- mov WORD_SIZE(%_ASM_SP), %_ASM_CX
+ /* Reload @regs to RAX. */
+ mov WORD_SIZE(%_ASM_SP), %_ASM_AX
- /* Save all guest registers, including RCX from the stack */
- mov %_ASM_AX, VCPU_RAX(%_ASM_CX)
- mov %_ASM_BX, VCPU_RBX(%_ASM_CX)
- __ASM_SIZE(pop) VCPU_RCX(%_ASM_CX)
- mov %_ASM_DX, VCPU_RDX(%_ASM_CX)
- mov %_ASM_SI, VCPU_RSI(%_ASM_CX)
- mov %_ASM_DI, VCPU_RDI(%_ASM_CX)
- mov %_ASM_BP, VCPU_RBP(%_ASM_CX)
+ /* Save all guest registers, including RAX from the stack */
+ __ASM_SIZE(pop) VCPU_RAX(%_ASM_AX)
+ mov %_ASM_BX, VCPU_RBX(%_ASM_AX)
+ mov %_ASM_CX, VCPU_RCX(%_ASM_AX)
+ mov %_ASM_DX, VCPU_RDX(%_ASM_AX)
+ mov %_ASM_SI, VCPU_RSI(%_ASM_AX)
+ mov %_ASM_DI, VCPU_RDI(%_ASM_AX)
+ mov %_ASM_BP, VCPU_RBP(%_ASM_AX)
#ifdef CONFIG_X86_64
- mov %r8, VCPU_R8 (%_ASM_CX)
- mov %r9, VCPU_R9 (%_ASM_CX)
- mov %r10, VCPU_R10(%_ASM_CX)
- mov %r11, VCPU_R11(%_ASM_CX)
- mov %r12, VCPU_R12(%_ASM_CX)
- mov %r13, VCPU_R13(%_ASM_CX)
- mov %r14, VCPU_R14(%_ASM_CX)
- mov %r15, VCPU_R15(%_ASM_CX)
+ mov %r8, VCPU_R8 (%_ASM_AX)
+ mov %r9, VCPU_R9 (%_ASM_AX)
+ mov %r10, VCPU_R10(%_ASM_AX)
+ mov %r11, VCPU_R11(%_ASM_AX)
+ mov %r12, VCPU_R12(%_ASM_AX)
+ mov %r13, VCPU_R13(%_ASM_AX)
+ mov %r14, VCPU_R14(%_ASM_AX)
+ mov %r15, VCPU_R15(%_ASM_AX)
#endif
/* Clear EBX to indicate VM-Exit (as opposed to VM-Fail). */