#include "assym.h"
#include <machine/param.h>
#include <machine/asm.h>
#include <machine/codepatch.h>
#include <machine/psl.h>
#include <machine/specialreg.h>
#define VMX_FAIL_LAUNCH_UNKNOWN 1
#define VMX_FAIL_LAUNCH_INVALID_VMCS 2
#define VMX_FAIL_LAUNCH_VALID_VMCS 3
.global vmxon
.global vmxoff
.global vmclear
.global vmptrld
.global vmptrst
.global vmwrite
.global vmread
.global invvpid
.global invept
.global vmx_enter_guest
.global vmm_dispatch_intr
.global svm_enter_guest
.global svm_seves_enter_guest
.text
.code64
.align 16,0xcc
vmm_dispatch_intr:
movq %rsp, %r11
andq $0xFFFFFFFFFFFFFFF0, %rsp
movw %ss, %ax
pushq %rax
pushq %r11
pushfq
movw %cs, %ax
pushq %rax
cli
callq *%rdi
movq $0,-8(%rsp)
ret
lfence
ENTRY(vmxon)
RETGUARD_SETUP(vmxon, r11)
xorq %rax, %rax
vmxon (%rdi)
setna %al
RETGUARD_CHECK(vmxon, r11)
ret
lfence
END(vmxon)
ENTRY(vmxoff)
RETGUARD_SETUP(vmxoff, r11)
xorq %rax, %rax
vmxoff
setna %al
RETGUARD_CHECK(vmxoff, r11)
ret
lfence
END(vmxoff)
ENTRY(vmclear)
RETGUARD_SETUP(vmclear, r11)
xorq %rax, %rax
vmclear (%rdi)
setna %al
RETGUARD_CHECK(vmclear, r11)
ret
lfence
END(vmclear)
ENTRY(vmptrld)
RETGUARD_SETUP(vmptrld, r11)
xorq %rax, %rax
vmptrld (%rdi)
setna %al
RETGUARD_CHECK(vmptrld, r11)
ret
lfence
END(vmptrld)
ENTRY(vmptrst)
RETGUARD_SETUP(vmptrst, r11)
xorq %rax, %rax
vmptrst (%rdi)
setna %al
RETGUARD_CHECK(vmptrst, r11)
ret
lfence
ENTRY(vmwrite)
RETGUARD_SETUP(vmwrite, r11)
xorq %rax, %rax
vmwrite %rsi, %rdi
setna %al
RETGUARD_CHECK(vmwrite, r11)
ret
lfence
END(vmwrite)
ENTRY(vmread)
RETGUARD_SETUP(vmread, r11)
xorq %rax, %rax
vmread %rdi, (%rsi)
setna %al
RETGUARD_CHECK(vmread, r11)
ret
lfence
END(vmread)
ENTRY(invvpid)
RETGUARD_SETUP(invvpid, r11)
invvpid (%rsi), %rdi
jbe invvpid_fail
xorq %rax, %rax
jmp invvpid_ret
invvpid_fail:
movq $1, %rax
invvpid_ret:
RETGUARD_CHECK(invvpid, r11)
ret
lfence
END(invvpid)
ENTRY(invept)
RETGUARD_SETUP(invept, r11)
invept (%rsi), %rdi
jbe invept_fail
xorq %rax, %rax
jmp invept_ret
invept_fail:
movq $1, %rax
invept_ret:
RETGUARD_CHECK(invept, r11)
ret
lfence
END(invept)
ENTRY(vmx_enter_guest)
RETGUARD_SETUP(vmx_enter_guest, r11)
movq %rdx, %r8
movq %rcx, %r9
testq %r8, %r8
jnz skip_init
movq $VMCS_HOST_IA32_RIP, %rdi
movq $vmx_exit_handler_asm, %rax
vmwrite %rax, %rdi
skip_init:
RETGUARD_PUSH(r11)
pushq %r15
pushq %r14
pushq %r13
pushq %r12
pushq %rbp
pushq %rbx
pushq %rsi
CODEPATCH_START
xorl %eax,%eax
xorl %ebx,%ebx
xorl %ecx,%ecx
xorl %edx,%edx
xorl %esi,%esi
xorl %edi,%edi
xorl %ebp,%ebp
xorl %r10d,%r10d
xorl %r11d,%r11d
xorl %r12d,%r12d
xorl %r13d,%r13d
xorl %r14d,%r14d
xorl %r15d,%r15d
subq $8, %rsp
movw %ds, (%rsp)
verw (%rsp)
addq $8, %rsp
CODEPATCH_END(CPTAG_MDS_VMM)
movq (%rsp),%rsi
movq $VMCS_HOST_IA32_RSP, %rdi
movq %rsp, %rax
vmwrite %rax, %rdi
cmpq $VMX_SKIP_L1D_FLUSH, %r9
je done_flush
testq %r9, %r9
jz no_l1df_msr
movq $MSR_FLUSH_CMD, %rcx
xorq %rdx, %rdx
movq $FLUSH_CMD_L1D_FLUSH, %rax
wrmsr
jmp done_flush
no_l1df_msr:
xorq %r9, %r9
l1df_tlb_loop:
cmpq $VMX_L1D_FLUSH_SIZE, %r9
je l1df_tlb_done
movb l1tf_flush_region(%r9), %al
addq $PAGE_SIZE, %r9
jmp l1df_tlb_loop
l1df_tlb_done:
xorq %rax, %rax
cpuid
xorq %r9, %r9
l1df_load_cache:
movb l1tf_flush_region(%r9), %al
addq $0x40, %r9
cmpq $VMX_L1D_FLUSH_SIZE, %r9
jne l1df_load_cache
lfence
done_flush:
testq %r8, %r8
jnz do_resume
movq 0xa0(%rsi), %rax
movq %rax, %dr0
movq 0xa8(%rsi), %rax
movq %rax, %dr1
movq 0xb0(%rsi), %rax
movq %rax, %dr2
movq 0xb8(%rsi), %rax
movq %rax, %dr3
movq 0xc0(%rsi), %rax
movq %rax, %dr6
movq 0x78(%rsi), %rax
movq %rax, %cr2
movq 0x70(%rsi), %r15
movq 0x68(%rsi), %r14
movq 0x60(%rsi), %r13
movq 0x58(%rsi), %r12
movq 0x50(%rsi), %r11
movq 0x48(%rsi), %r10
movq 0x40(%rsi), %r9
movq %rsi, %r8
addq $0x40, %r8
clflush (%r8)
movq 0x38(%rsi), %r8
movq 0x30(%rsi), %rbp
movq 0x28(%rsi), %rdi
movq 0x20(%rsi), %rdx
movq 0x18(%rsi), %rcx
movq 0x10(%rsi), %rbx
movq 0x08(%rsi), %rax
clflush (%rsi)
movq 0x00(%rsi), %rsi
vmlaunch
jmp fail_launch_or_resume
do_resume:
movq 0xa0(%rsi), %rax
movq %rax, %dr0
movq 0xa8(%rsi), %rax
movq %rax, %dr1
movq 0xb0(%rsi), %rax
movq %rax, %dr2
movq 0xb8(%rsi), %rax
movq %rax, %dr3
movq 0xc0(%rsi), %rax
movq %rax, %dr6
movq 0x78(%rsi), %rax
movq %rax, %cr2
movq 0x70(%rsi), %r15
movq 0x68(%rsi), %r14
movq 0x60(%rsi), %r13
movq 0x58(%rsi), %r12
movq 0x50(%rsi), %r11
movq 0x48(%rsi), %r10
movq 0x40(%rsi), %r9
movq %rsi, %r8
addq $0x40, %r8
clflush (%r8)
movq 0x38(%rsi), %r8
movq 0x30(%rsi), %rbp
movq 0x28(%rsi), %rdi
movq 0x20(%rsi), %rdx
movq 0x18(%rsi), %rcx
movq 0x10(%rsi), %rbx
movq 0x08(%rsi), %rax
clflush (%rsi)
movq 0x00(%rsi), %rsi
vmresume
fail_launch_or_resume:
RET_STACK_REFILL_WITH_RCX
jc fail_launch_invalid_vmcs
jz fail_launch_valid_vmcs
fail_launch_unknown:
movq $VMX_FAIL_LAUNCH_UNKNOWN, %rdi
popq %rsi
jmp restore_host
fail_launch_invalid_vmcs:
movq $VMX_FAIL_LAUNCH_INVALID_VMCS, %rdi
popq %rsi
jmp restore_host
fail_launch_valid_vmcs:
movq $VMCS_INSTRUCTION_ERROR, %rdi
popq %rsi
vmread %rdi, %rax
movl %eax, 0x80(%rsi)
movq $VMX_FAIL_LAUNCH_VALID_VMCS, %rdi
jmp restore_host
vmx_exit_handler_asm:
pushq %rsi
pushq %rdi
movq 0x10(%rsp), %rdi
movq 0x8(%rsp), %rsi
movq %rsi, (%rdi)
popq %rdi
popq %rsi
popq %rsi
movq %rax, 0x8(%rsi)
movq %rbx, 0x10(%rsi)
movq %rcx, 0x18(%rsi)
movq %rdx, 0x20(%rsi)
movq %rdi, 0x28(%rsi)
movq %rbp, 0x30(%rsi)
movq %r8, 0x38(%rsi)
movq %r9, 0x40(%rsi)
movq %r10, 0x48(%rsi)
movq %r11, 0x50(%rsi)
movq %r12, 0x58(%rsi)
movq %r13, 0x60(%rsi)
movq %r14, 0x68(%rsi)
movq %r15, 0x70(%rsi)
movq %cr2, %rax
movq %rax, 0x78(%rsi)
movq %dr0, %rax
movq %rax, 0xa0(%rsi)
movq %dr1, %rax
movq %rax, 0xa8(%rsi)
movq %dr2, %rax
movq %rax, 0xb0(%rsi)
movq %dr3, %rax
movq %rax, 0xb8(%rsi)
movq %dr6, %rax
movq %rax, 0xc0(%rsi)
xorq %rdi, %rdi
RET_STACK_REFILL_WITH_RCX
restore_host:
popq %rbx
popq %rbp
popq %r12
popq %r13
popq %r14
popq %r15
RETGUARD_POP(r11)
movq %rdi, %rax
RETGUARD_CHECK(vmx_enter_guest, r11)
ret
lfence
END(vmx_enter_guest)
ENTRY(svm_enter_guest)
RETGUARD_SETUP(svm_enter_guest, r11)
clgi
movq %rdi, %r8
pushfq
pushq %rdx
strw %ax
pushw %ax
movw %es, %ax
pushw %ax
movw %ds, %ax
pushw %ax
movw %ss, %ax
pushw %ax
movq $MSR_FSBASE, %rcx
rdmsr
pushq %rax
pushq %rdx
pushw %fs
movq $MSR_GSBASE, %rcx
rdmsr
pushq %rax
pushq %rdx
pushw %gs
movq $MSR_KERNELGSBASE, %rcx
rdmsr
pushq %rax
pushq %rdx
movq $MSR_STAR, %rcx
rdmsr
pushq %rax
pushq %rdx
movq $MSR_LSTAR, %rcx
rdmsr
pushq %rax
pushq %rdx
movq $MSR_SFMASK, %rcx
rdmsr
pushq %rax
pushq %rdx
RETGUARD_PUSH(r11)
pushq %r15
pushq %r14
pushq %r13
pushq %r12
pushq %rbp
pushq %rbx
pushq %rsi
movq %r8, %rax
movq 0xa0(%rsi), %r8
movq %r8, %dr0
movq 0xa8(%rsi), %r8
movq %r8, %dr1
movq 0xb0(%rsi), %r8
movq %r8, %dr2
movq 0xb8(%rsi), %r8
movq %r8, %dr3
movq 0x78(%rsi), %r8
movq %r8, %cr2
movq 0x70(%rsi), %r15
movq 0x68(%rsi), %r14
movq 0x60(%rsi), %r13
movq 0x58(%rsi), %r12
movq 0x50(%rsi), %r11
movq 0x48(%rsi), %r10
movq 0x40(%rsi), %r9
movq 0x38(%rsi), %r8
movq 0x30(%rsi), %rbp
movq 0x28(%rsi), %rdi
movq 0x20(%rsi), %rdx
movq 0x18(%rsi), %rcx
movq 0x10(%rsi), %rbx
movq 0x00(%rsi), %rsi
vmload %rax
vmrun %rax
vmsave %rax
pushq %rsi
pushq %rdi
movq 0x10(%rsp), %rdi
movq 0x8(%rsp), %rsi
movq %rsi, (%rdi)
popq %rdi
popq %rsi
popq %rsi
movq %rbx, 0x10(%rsi)
movq %rcx, 0x18(%rsi)
movq %rdx, 0x20(%rsi)
movq %rdi, 0x28(%rsi)
movq %rbp, 0x30(%rsi)
movq %r8, 0x38(%rsi)
movq %r9, 0x40(%rsi)
movq %r10, 0x48(%rsi)
movq %r11, 0x50(%rsi)
movq %r12, 0x58(%rsi)
movq %r13, 0x60(%rsi)
movq %r14, 0x68(%rsi)
movq %r15, 0x70(%rsi)
movq %cr2, %rax
movq %rax, 0x78(%rsi)
movq %dr0, %rax
movq %rax, 0xa0(%rsi)
movq %dr1, %rax
movq %rax, 0xa8(%rsi)
movq %dr2, %rax
movq %rax, 0xb0(%rsi)
movq %dr3, %rax
movq %rax, 0xb8(%rsi)
xorq %rdi, %rdi
restore_host_svm:
popq %rbx
popq %rbp
popq %r12
popq %r13
popq %r14
popq %r15
RETGUARD_POP(r11)
popq %rdx
popq %rax
movq $MSR_SFMASK, %rcx
wrmsr
xorl %edx, %edx
xorl %eax, %eax
movq $MSR_CSTAR, %rcx
wrmsr
popq %rdx
popq %rax
movq $MSR_LSTAR, %rcx
wrmsr
popq %rdx
popq %rax
movq $MSR_STAR, %rcx
wrmsr
cli
popq %rdx
popq %rax
movq $MSR_KERNELGSBASE, %rcx
wrmsr
popw %gs
popq %rdx
popq %rax
movq $MSR_GSBASE, %rcx
wrmsr
popw %fs
popq %rdx
popq %rax
movq $MSR_FSBASE, %rcx
wrmsr
popw %ax
movw %ax, %ss
popw %ax
movw %ax, %ds
popw %ax
movw %ax, %es
xorq %rax, %rax
lldtw %ax
popw %ax
popq %rdx
addq $0x2, %rdx
movq (%rdx), %rdx
andb $0xF9, 5(%rdx, %rax)
ltrw %ax
popfq
movq %rdi, %rax
RETGUARD_CHECK(svm_enter_guest, r11)
ret
lfence
END(svm_enter_guest)
ENTRY(svm_seves_enter_guest)
RETGUARD_SETUP(svm_seves_enter_guest, r11)
clgi
movq %rdi, %r8
pushfq
pushq %rdx
strw %ax
pushw %ax
movw %es, %ax
pushw %ax
movw %ds, %ax
pushw %ax
movw %ss, %ax
pushw %ax
movq $MSR_FSBASE, %rcx
rdmsr
pushq %rax
pushq %rdx
pushw %fs
movq $MSR_GSBASE, %rcx
rdmsr
pushq %rax
pushq %rdx
pushw %gs
movq $MSR_KERNELGSBASE, %rcx
rdmsr
pushq %rax
pushq %rdx
movq $MSR_STAR, %rcx
rdmsr
pushq %rax
pushq %rdx
movq $MSR_LSTAR, %rcx
rdmsr
pushq %rax
pushq %rdx
movq $MSR_SFMASK, %rcx
rdmsr
pushq %rax
pushq %rdx
RETGUARD_PUSH(r11)
movq %r15, 0x378(%rsi)
movq %r14, 0x370(%rsi)
movq %r13, 0x368(%rsi)
movq %r12, 0x360(%rsi)
movq %rbp, 0x328(%rsi)
movq %rbx, 0x318(%rsi)
movq %r8, %rax
vmrun %rax
xorq %rdi, %rdi
RETGUARD_POP(r11)
popq %rdx
popq %rax
movq $MSR_SFMASK, %rcx
wrmsr
xorl %edx, %edx
xorl %eax, %eax
movq $MSR_CSTAR, %rcx
wrmsr
popq %rdx
popq %rax
movq $MSR_LSTAR, %rcx
wrmsr
popq %rdx
popq %rax
movq $MSR_STAR, %rcx
wrmsr
cli
popq %rdx
popq %rax
movq $MSR_KERNELGSBASE, %rcx
wrmsr
popw %gs
popq %rdx
popq %rax
movq $MSR_GSBASE, %rcx
wrmsr
popw %fs
popq %rdx
popq %rax
movq $MSR_FSBASE, %rcx
wrmsr
popw %ax
movw %ax, %ss
popw %ax
movw %ax, %ds
popw %ax
movw %ax, %es
xorq %rax, %rax
lldtw %ax
popw %ax
popq %rdx
addq $0x2, %rdx
movq (%rdx), %rdx
andb $0xF9, 5(%rdx, %rax)
ltrw %ax
popfq
movq %rdi, %rax
RETGUARD_CHECK(svm_seves_enter_guest, r11)
ret
lfence
END(svm_seves_enter_guest)