#include "assym.h"
#include "efi.h"
#include "lapic.h"
#include "ksyms.h"
#include "xen.h"
#include "hyperv.h"
#include <sys/syscall.h>
#include <machine/param.h>
#include <machine/codepatch.h>
#include <machine/psl.h>
#include <machine/segments.h>
#include <machine/specialreg.h>
#include <machine/trap.h>
#include <machine/frameasm.h>
#if NLAPIC > 0
#include <machine/i82489reg.h>
#endif
#define ALIGN_DATA .align 8,0xcc
#include <machine/asm.h>
#define SET_CURPROC(proc,cpu) \
movq CPUVAR(SELF),cpu ; \
movq proc,CPUVAR(CURPROC) ; \
movq cpu,P_CPU(proc)
#define GET_CURPCB(reg) movq CPUVAR(CURPCB),reg
#define SET_CURPCB(reg) movq reg,CPUVAR(CURPCB)
.data
#if NLAPIC > 0
.align NBPG, 0xcc
.globl local_apic
local_apic:
.space NBPG
#endif
.section .rodata
.globl sigcode
sigcode:
endbr64
call 1f
movq %rsp,%rdi
pushq %rdi
movq $SYS_sigreturn,%rax
.globl sigcodecall
sigcodecall:
syscall
.globl sigcoderet
sigcoderet:
int3
1: CODEPATCH_START
JMP_RETPOLINE(rax)
CODEPATCH_END(CPTAG_RETPOLINE_RAX)
.globl esigcode
esigcode:
.globl sigfill
sigfill:
int3
esigfill:
.globl sigfillsiz
sigfillsiz:
.long esigfill - sigfill
.text
NENTRY(lgdt)
RETGUARD_SETUP(lgdt, r11)
movq %rdi,%rax
lgdt (%rax)
jmp 1f
nop
1:
movl $GSEL(GDATA_SEL, SEL_KPL),%eax
movl %eax,%ds
movl %eax,%es
movl %eax,%ss
popq %rax
pushq $GSEL(GCODE_SEL, SEL_KPL)
pushq %rax
RETGUARD_CHECK(lgdt, r11)
lretq
END(lgdt)
#if defined(DDB) || NEFI > 0
ENTRY(setjmp)
RETGUARD_SETUP(setjmp, r11)
movq %rdi,%rax
movq %rbx,(%rax)
movq %rsp,8(%rax)
movq %rbp,16(%rax)
movq %r12,24(%rax)
movq %r13,32(%rax)
movq %r14,40(%rax)
movq %r15,48(%rax)
movq (%rsp),%rdx
movq %rdx,56(%rax)
xorl %eax,%eax
RETGUARD_CHECK(setjmp, r11)
ret
lfence
END(setjmp)
ENTRY(longjmp)
movq %rdi,%rax
movq 8(%rax),%rsp
movq 56(%rax),%rdx
movq %rdx,(%rsp)
RETGUARD_SETUP(longjmp, r11)
movq (%rax),%rbx
movq 16(%rax),%rbp
movq 24(%rax),%r12
movq 32(%rax),%r13
movq 40(%rax),%r14
movq 48(%rax),%r15
xorl %eax,%eax
incl %eax
RETGUARD_CHECK(longjmp, r11)
ret
lfence
END(longjmp)
#endif
ENTRY(cpu_switchto)
pushq %rbx
pushq %rbp
pushq %r12
pushq %r13
pushq %r14
pushq %r15
movq %rdi, %r13
movq %rsi, %r12
movb $SONPROC,P_STAT(%r12) # p->p_stat = SONPROC
SET_CURPROC(%r12,%rcx)
movl CPUVAR(CPUID),%r9d
movq xsave_mask(%rip),%rdx
movl %edx,%eax
shrq $32,%rdx
xorl %ecx,%ecx
testq %r13,%r13
jz switch_exited
testl $P_SYSTEM,P_FLAG(%r13)
movq P_ADDR(%r13),%r13
jnz 0f
movq PCB_PMAP(%r13),%rcx
0:
movq %rsp,PCB_RSP(%r13)
movq %rbp,PCB_RBP(%r13)
testl $CPUPF_USERXSTATE,CPUVAR(PFLAGS)
jz .Lxstate_reset
movq %r13, %rdi
#if PCB_SAVEFPU != 0
addq $PCB_SAVEFPU,%rdi
#endif
CODEPATCH_START
fxsave64 (%rdi)
CODEPATCH_END(CPTAG_XSAVE)
switch_exited:
movq proc0paddr(%rip),%rdi
#if PCB_SAVEFPU != 0
addq $PCB_SAVEFPU,%rdi
#endif
CODEPATCH_START
fxrstor64 (%rdi)
CODEPATCH_END(CPTAG_XRSTORS)
andl $~CPUPF_USERXSTATE,CPUVAR(PFLAGS)
.Lxstate_reset:
testl $CPUPF_USERSEGS,CPUVAR(PFLAGS)
jz restore_saved
andl $~CPUPF_USERSEGS,CPUVAR(PFLAGS)
movw $(GSEL(GUDATA_SEL, SEL_UPL)),%ax
movw %ax,%ds
movw %ax,%es
movw %ax,%fs
cli
swapgs
movw %ax,%gs
swapgs
restore_saved:
movq P_ADDR(%r12),%r13
xorl %ebx,%ebx
testl $P_SYSTEM,P_FLAG(%r12)
jnz 1f
movq PCB_PMAP(%r13),%rbx
1:
cli
movq PCB_RSP(%r13),%rsp
movq PCB_RBP(%r13),%rbp
RETGUARD_SETUP_OFF(cpu_switchto, r11, 6*8)
movq PCB_CR3(%r13),%rax
movq %cr3,%rdi
xorq %rax,%rdi
btrq $63,%rdi
testq %rdi,%rdi
jz .Lsame_cr3
#ifdef DIAGNOSTIC
cmpq %rcx,CPUVAR(PROC_PMAP)
jnz .Lbogus_proc_pmap
#endif
movq %rbx,CPUVAR(PROC_PMAP)
.Lset_cr3:
movq %rax,%cr3
.Lsame_cr3:
RET_STACK_REFILL_WITH_RCX
testq %rbx,%rbx
jz switch_restored
movq PCB_KSTACK(%r13),%rdx
subq $FRAMESIZE,%rdx
movq %rdx,CPUVAR(KERN_RSP)
CODEPATCH_START
movq PM_PDIRPA_INTEL(%rbx),%rdx
orq cr3_reuse_pcid,%rax
orq cr3_pcid_proc_intel,%rdx
movq %rax,CPUVAR(KERN_CR3)
movq %rdx,CPUVAR(USER_CR3)
CODEPATCH_END(CPTAG_MELTDOWN_NOP)
switch_restored:
SET_CURPCB(%r13)
sti
popq %r15
popq %r14
popq %r13
popq %r12
popq %rbp
popq %rbx
RETGUARD_CHECK(cpu_switchto, r11)
ret
lfence
#ifdef DIAGNOSTIC
.Lbogus_proc_pmap:
leaq bogus_proc_pmap,%rdi
call panic
int3
.pushsection .rodata
bogus_proc_pmap:
.asciz "curcpu->ci_proc_pmap didn't point to previous pmap"
.popsection
#endif
END(cpu_switchto)
NENTRY(retpoline_rax)
CODEPATCH_START
JMP_RETPOLINE(rax)
CODEPATCH_END(CPTAG_RETPOLINE_RAX)
END(retpoline_rax)
NENTRY(__x86_indirect_thunk_r11)
CODEPATCH_START
JMP_RETPOLINE(r11)
CODEPATCH_END(CPTAG_RETPOLINE_R11)
END(__x86_indirect_thunk_r11)
ENTRY(cpu_idle_cycle_hlt)
RETGUARD_SETUP(cpu_idle_cycle_hlt, r11)
sti
hlt
RETGUARD_CHECK(cpu_idle_cycle_hlt, r11)
ret
lfence
END(cpu_idle_cycle_hlt)
ENTRY(savectx)
RETGUARD_SETUP(savectx, r11)
movq %rsp,PCB_RSP(%rdi)
movq %rbp,PCB_RBP(%rdi)
RETGUARD_CHECK(savectx, r11)
ret
lfence
END(savectx)
KUTEXT_PAGE_START
.align NBPG, 0xcc
XUsyscall_meltdown:
endbr64
swapgs
movq %rax,CPUVAR(SCRATCH)
movq CPUVAR(KERN_CR3),%rax
movq %rax,%cr3
0: pause
lfence
jmp 0b
KUTEXT_PAGE_END
KTEXT_PAGE_START
.align NBPG, 0xcc
GENTRY(Xsyscall_meltdown)
movq CPUVAR(KERN_CR3),%rax
movq %rax,%cr3
GENTRY(Xsyscall)
endbr64
swapgs
movq %rax,CPUVAR(SCRATCH)
SYSCALL_ENTRY
sti
movq CPUVAR(CURPROC),%r14
movq %rsp,P_MD_REGS(%r14) # save pointer to frame
andl $~MDP_IRET,P_MD_FLAGS(%r14)
movq %rsp,%rdi
call syscall
.Lsyscall_check_asts:
cli
CHECK_ASTPENDING(%r11)
je 2f
CLEAR_ASTPENDING(%r11)
sti
movq %rsp,%rdi
call ast
jmp .Lsyscall_check_asts
2:
#ifdef DIAGNOSTIC
cmpl $IPL_NONE,CPUVAR(ILEVEL)
jne .Lsyscall_spl_not_lowered
#endif
testl $MDP_IRET, P_MD_FLAGS(%r14)
jne intr_user_exit_post_ast
testl $CPUPF_USERXSTATE,CPUVAR(PFLAGS)
jz .Lsyscall_restore_xstate
testl $CPUPF_USERSEGS,CPUVAR(PFLAGS)
jz .Lsyscall_restore_fsbase
.Lsyscall_restore_registers:
CODEPATCH_START
movq CPUVAR(PROC_PMAP),%rbx
cmpq CPUVAR(USER_PMAP),%rbx
je 1f
xorl %edx,%edx
movl $PRED_CMD_IBPB,%eax
movl $MSR_PRED_CMD,%ecx
wrmsr
movq %rbx,CPUVAR(USER_PMAP)
1:
CODEPATCH_END(CPTAG_IBPB_NOP)
call pku_xonly
RET_STACK_REFILL_WITH_RCX
movq TF_R8(%rsp),%r8
movq TF_R9(%rsp),%r9
movq TF_R10(%rsp),%r10
movq TF_R12(%rsp),%r12
movq TF_R13(%rsp),%r13
movq TF_R14(%rsp),%r14
movq TF_R15(%rsp),%r15
movq TF_RBX(%rsp),%rbx
movq TF_RDX(%rsp),%rdx
CODEPATCH_START
xorl %edi,%edi
xorl %esi,%esi
xorl %r11d,%r11d
xorl %eax,%eax
xorl %ecx,%ecx
movw %ds,TF_R8(%rsp)
verw TF_R8(%rsp)
CODEPATCH_END(CPTAG_MDS)
movq TF_RDI(%rsp),%rdi
movq TF_RSI(%rsp),%rsi
movq TF_RBP(%rsp),%rbp
movq TF_RAX(%rsp),%rax
movq TF_RIP(%rsp),%rcx
movq TF_RFLAGS(%rsp),%r11
movq TF_RSP(%rsp),%rsp
CODEPATCH_START
movq %rax,CPUVAR(SCRATCH)
movq CPUVAR(USER_CR3),%rax
PCID_SET_REUSE_NOP
movq %rax,%cr3
Xsyscall_trampback:
0: pause
lfence
jmp 0b
CODEPATCH_END(CPTAG_MELTDOWN_NOP)
swapgs
sysretq
END(Xsyscall)
END(Xsyscall_meltdown)
KTEXT_PAGE_END
KUTEXT_PAGE_START
.space (Xsyscall_trampback - Xsyscall_meltdown) - \
(. - XUsyscall_meltdown), 0xcc
movq %rax,%cr3
movq CPUVAR(SCRATCH),%rax
swapgs
sysretq
KUTEXT_PAGE_END
.text
_ALIGN_TRAPS
.Lsyscall_restore_fsbase:
orl $CPUPF_USERSEGS,CPUVAR(PFLAGS)
movq CPUVAR(CURPCB),%rdi
jmp .Lsyscall_restore_fsbase_real
_ALIGN_TRAPS
.Lsyscall_restore_xstate:
orl $(CPUPF_USERXSTATE|CPUPF_USERSEGS),CPUVAR(PFLAGS)
movq CPUVAR(CURPCB),%rdi
movq xsave_mask(%rip),%rdx
movl %edx,%eax
shrq $32,%rdx
#if PCB_SAVEFPU != 0
addq $PCB_SAVEFPU,%rdi
#endif
CODEPATCH_START
fxrstor64 (%rdi)
CODEPATCH_END(CPTAG_XRSTORS)
#if PCB_SAVEFPU != 0
subq $PCB_SAVEFPU,%rdi
#endif
.Lsyscall_restore_fsbase_real:
movq PCB_FSBASE(%rdi),%rdx
movl %edx,%eax
shrq $32,%rdx
movl $MSR_FSBASE,%ecx
wrmsr
jmp .Lsyscall_restore_registers
#ifdef DIAGNOSTIC
.Lsyscall_spl_not_lowered:
leaq spl_lowered(%rip), %rdi
movl TF_ERR(%rsp),%esi
movl TF_RDI(%rsp),%edx
movl %ebx,%ecx
movl CPUVAR(ILEVEL),%r8d
xorq %rax,%rax
call printf
#ifdef DDB
int $3
#endif
movl $IPL_NONE,CPUVAR(ILEVEL)
jmp .Lsyscall_check_asts
.section .rodata
spl_lowered:
.asciz "WARNING: SPL NOT LOWERED ON SYSCALL %d %d EXIT %x %x\n"
.text
#endif
NENTRY(proc_trampoline)
call proc_trampoline_mi
movq %r13,%rdi
movq %r12,%rax
call retpoline_rax
movq CPUVAR(CURPROC),%r14
jmp .Lsyscall_check_asts
END(proc_trampoline)
KTEXT_PAGE_START
_ALIGN_TRAPS
GENTRY(intr_user_exit)
#ifdef DIAGNOSTIC
pushfq
popq %rdx
testq $PSL_I,%rdx
jnz .Lintr_user_exit_not_blocked
#endif
CHECK_ASTPENDING(%r11)
je intr_user_exit_post_ast
CLEAR_ASTPENDING(%r11)
sti
movq %rsp,%rdi
call ast
cli
jmp intr_user_exit
intr_user_exit_post_ast:
testl $CPUPF_USERXSTATE,CPUVAR(PFLAGS)
jz .Lintr_restore_xstate
testl $CPUPF_USERSEGS,CPUVAR(PFLAGS)
jz .Lintr_restore_fsbase
.Lintr_restore_registers:
#ifdef DIAGNOSTIC
cmpl $0,CPUVAR(ILEVEL)
jne .Luser_spl_not_lowered
#endif
CODEPATCH_START
movq CPUVAR(PROC_PMAP),%rbx
cmpq CPUVAR(USER_PMAP),%rbx
je 1f
xorl %edx,%edx
movl $PRED_CMD_IBPB,%eax
movl $MSR_PRED_CMD,%ecx
wrmsr
movq %rbx,CPUVAR(USER_PMAP)
1:
CODEPATCH_END(CPTAG_IBPB_NOP)
call pku_xonly
RET_STACK_REFILL_WITH_RCX
movq TF_R8(%rsp),%r8
movq TF_R9(%rsp),%r9
movq TF_R10(%rsp),%r10
movq TF_R12(%rsp),%r12
movq TF_R13(%rsp),%r13
movq TF_R14(%rsp),%r14
movq TF_R15(%rsp),%r15
movq TF_RBX(%rsp),%rbx
CODEPATCH_START
xorl %edi,%edi
xorl %esi,%esi
xorl %r11d,%r11d
xorl %eax,%eax
xorl %edx,%edx
xorl %ecx,%ecx
movw %ds,TF_R8(%rsp)
verw TF_R8(%rsp)
CODEPATCH_END(CPTAG_MDS)
movq TF_RDI(%rsp),%rdi
movq TF_RSI(%rsp),%rsi
movq TF_RBP(%rsp),%rbp
movq CPUVAR(INTR_RSP),%rdx
movq $(GSEL(GUCODE_SEL,SEL_UPL)),IRETQ_CS(%rdx)
movq TF_RIP(%rsp),%rax
movq %rax,IRETQ_RIP(%rdx)
movq TF_RFLAGS(%rsp),%rax
movq %rax,IRETQ_RFLAGS(%rdx)
movq TF_RSP(%rsp),%rax
movq %rax,IRETQ_RSP(%rdx)
movq $(GSEL(GUDATA_SEL,SEL_UPL)),IRETQ_SS(%rdx)
movq TF_RAX(%rsp),%rax
movq TF_RCX(%rsp),%rcx
movq TF_R11(%rsp),%r11
xchgq %rdx,%rsp
movq TF_RDX(%rdx),%rdx
CODEPATCH_START
movq %rax,CPUVAR(SCRATCH)
movq CPUVAR(USER_CR3),%rax
PCID_SET_REUSE_NOP
movq %rax,%cr3
Xiretq_trampback:
KTEXT_PAGE_END
KUTEXT_PAGE_START
.space (Xiretq_trampback - Xsyscall_meltdown) - \
(. - XUsyscall_meltdown), 0xcc
movq CPUVAR(SCRATCH),%rax
.Liretq_swapgs:
swapgs
doreti_iret_meltdown:
iretq
KUTEXT_PAGE_END
KTEXT_PAGE_START
0: pause
lfence
jmp 0b
.LKiretq_swapgs:
.space 2, 0xcc
CODEPATCH_END(CPTAG_MELTDOWN_NOP)
swapgs
.globl doreti_iret
doreti_iret:
iretq
KTEXT_PAGE_END
.text
_ALIGN_TRAPS
.Lintr_restore_xstate:
orl $CPUPF_USERXSTATE,CPUVAR(PFLAGS)
movq CPUVAR(CURPCB),%rdi
#if PCB_SAVEFPU != 0
addq $PCB_SAVEFPU,%rdi
#endif
movq xsave_mask(%rip),%rdx
movl %edx,%eax
shrq $32, %rdx
CODEPATCH_START
fxrstor64 (%rdi)
CODEPATCH_END(CPTAG_XRSTORS)
.Lintr_restore_fsbase:
orl $CPUPF_USERSEGS,CPUVAR(PFLAGS)
movq CPUVAR(CURPCB),%rdx
movq PCB_FSBASE(%rdx),%rdx
movl %edx,%eax
shrq $32,%rdx
movl $MSR_FSBASE,%ecx
wrmsr
jmp .Lintr_restore_registers
.Lintr_xrstor_faulted:
sti
movq proc0paddr(%rip),%rdi
#if PCB_SAVEFPU != 0
addq $PCB_SAVEFPU,%rdi
#endif
CODEPATCH_START
fxrstor64 (%rdi)
CODEPATCH_END(CPTAG_XRSTORS)
movq $T_PROTFLT,TF_TRAPNO(%rsp)
jmp recall_trap
#ifdef DIAGNOSTIC
.Lintr_user_exit_not_blocked:
movl warn_once(%rip),%edi
testl %edi,%edi
jnz 1f
incl %edi
movl %edi,warn_once(%rip)
leaq .Lnot_blocked(%rip),%rdi
call printf
#ifdef DDB
int $3
#endif
1: cli
jmp intr_user_exit
.Luser_spl_not_lowered:
sti
leaq intr_spl_lowered(%rip),%rdi
movl CPUVAR(ILEVEL),%esi
xorl %edx,%edx
xorl %eax,%eax
call printf
#ifdef DDB
int $3
#endif
movl $0,CPUVAR(ILEVEL)
cli
jmp intr_user_exit
.section .rodata
intr_spl_lowered:
.asciz "WARNING: SPL NOT LOWERED ON TRAP EXIT %x %x\n"
.text
#endif
END(Xintr_user_exit)
NENTRY(intr_fast_exit)
#ifdef DIAGNOSTIC
pushfq
popq %rdx
testq $PSL_I,%rdx
jnz .Lintr_exit_not_blocked
#endif
movq TF_RDI(%rsp),%rdi
movq TF_RSI(%rsp),%rsi
movq TF_R8(%rsp),%r8
movq TF_R9(%rsp),%r9
movq TF_R10(%rsp),%r10
movq TF_R12(%rsp),%r12
movq TF_R13(%rsp),%r13
movq TF_R14(%rsp),%r14
movq TF_R15(%rsp),%r15
movq TF_RBP(%rsp),%rbp
movq TF_RBX(%rsp),%rbx
movq TF_RDX(%rsp),%rdx
movq TF_RCX(%rsp),%rcx
movq TF_R11(%rsp),%r11
movq TF_RAX(%rsp),%rax
addq $TF_RIP,%rsp
iretq
#ifdef DIAGNOSTIC
.Lintr_exit_not_blocked:
movl warn_once(%rip),%edi
testl %edi,%edi
jnz 1f
incl %edi
movl %edi,warn_once(%rip)
leaq .Lnot_blocked(%rip),%rdi
call printf
#ifdef DDB
int $3
#endif
1: cli
jmp intr_fast_exit
.data
.global warn_once
warn_once:
.long 0
.section .rodata
.Lnot_blocked:
.asciz "WARNING: INTERRUPTS NOT BLOCKED ON INTERRUPT RETURN: 0x%x 0x%x\n"
.text
#endif
END(intr_fast_exit)
ENTRY(xrstor_kern)
RETGUARD_SETUP(xrstor_kern, r11)
movq %rsi, %rdx
movl %esi, %eax
shrq $32, %rdx
CODEPATCH_START
fxrstor64 (%rdi)
CODEPATCH_END(CPTAG_XRSTORS)
RETGUARD_CHECK(xrstor_kern, r11)
ret
lfence
END(xrstor_kern)
ENTRY(xrstor_user)
RETGUARD_SETUP(xrstor_user, r11)
movq %rsi, %rdx
movl %esi, %eax
shrq $32, %rdx
.globl xrstor_fault
xrstor_fault:
CODEPATCH_START
fxrstor64 (%rdi)
CODEPATCH_END(CPTAG_XRSTOR)
xorl %eax, %eax
RETGUARD_CHECK(xrstor_user, r11)
ret
lfence
NENTRY(xrstor_resume)
movl $1, %eax
RETGUARD_CHECK(xrstor_user, r11)
ret
lfence
END(xrstor_user)
ENTRY(fpusave)
RETGUARD_SETUP(fpusave, r11)
movq xsave_mask(%rip),%rdx
movl %edx,%eax
shrq $32,%rdx
CODEPATCH_START
fxsave64 (%rdi)
CODEPATCH_END(CPTAG_XSAVE)
RETGUARD_CHECK(fpusave, r11)
ret
lfence
END(fpusave)
ENTRY(fpusavereset)
RETGUARD_SETUP(fpusavereset, r11)
movq xsave_mask(%rip),%rdx
movl %edx,%eax
shrq $32,%rdx
CODEPATCH_START
fxsave64 (%rdi)
CODEPATCH_END(CPTAG_XSAVE)
movq proc0paddr(%rip),%rdi
#if PCB_SAVEFPU != 0
addq $PCB_SAVEFPU,%rdi
#endif
CODEPATCH_START
fxrstor64 (%rdi)
CODEPATCH_END(CPTAG_XRSTORS)
RETGUARD_CHECK(fpusavereset, r11)
ret
lfence
END(fpusavereset)
ENTRY(xsetbv_user)
RETGUARD_SETUP(xsetbv_user, r11)
movl %edi, %ecx
movq %rsi, %rdx
movl %esi, %eax
shrq $32, %rdx
.globl xsetbv_fault
xsetbv_fault:
xsetbv
xorl %eax, %eax
RETGUARD_CHECK(xsetbv_user, r11)
ret
lfence
NENTRY(xsetbv_resume)
movl $1, %eax
RETGUARD_CHECK(xsetbv_user, r11)
ret
lfence
END(xsetbv_user)
CODEPATCH_CODE(_xrstor, xrstor64 (%rdi))
CODEPATCH_CODE(_xrstors, xrstors64 (%rdi))
CODEPATCH_CODE(_xsave, xsave64 (%rdi))
CODEPATCH_CODE(_xsaves, xsaves64 (%rdi))
CODEPATCH_CODE(_xsaveopt, xsaveopt64 (%rdi))
CODEPATCH_CODE(_pcid_set_reuse,
orl $(CR3_REUSE_PCID >> 32),CPUVAR(USER_CR3 + 4))
CODEPATCH_CODE_LEN(_jmprax, jmp *%rax; int3)
CODEPATCH_CODE_LEN(_jmpr11, jmp *%r11; int3)
CODEPATCH_CODE_LEN(_jmpr13, jmp *%r13; int3)
ENTRY(pagezero)
RETGUARD_SETUP(pagezero, r11)
movq $-PAGE_SIZE,%rdx
subq %rdx,%rdi
xorq %rax,%rax
1:
movnti %rax,(%rdi,%rdx)
movnti %rax,8(%rdi,%rdx)
movnti %rax,16(%rdi,%rdx)
movnti %rax,24(%rdi,%rdx)
addq $32,%rdx
jne 1b
sfence
RETGUARD_CHECK(pagezero, r11)
ret
lfence
END(pagezero)
ENTRY(pku_xonly)
movq pg_xo,%rax
cmpq $0,%rax
je 1f
movl $0,%ecx
movl $0,%edx
movl $PGK_VALUE,%eax
wrpkru
1: ret
lfence
END(pku_xonly)
ENTRY(rdmsr_safe)
RETGUARD_SETUP(rdmsr_safe, r10)
movl %edi, %ecx
.globl rdmsr_safe_fault
rdmsr_safe_fault:
rdmsr
salq $32, %rdx
movl %eax, %eax
orq %rdx, %rax
movq %rax, (%rsi)
xorq %rax, %rax
RETGUARD_CHECK(rdmsr_safe, r10)
ret
lfence
NENTRY(rdmsr_resume)
movl $0x1, %eax
RETGUARD_CHECK(rdmsr_safe, r10)
ret
lfence
END(rdmsr_safe)
#if NHYPERV > 0
NENTRY(hv_hypercall_trampoline)
endbr64
mov %rdx, %r8
mov %rsi, %rdx
mov %rdi, %rcx
jmp hv_hypercall_page
END(hv_hypercall_trampoline)
.text
.align NBPG, 0xcc
.globl hv_hypercall_page
hv_hypercall_page:
.skip 0x1000, 0xcc
#endif
#if NXEN > 0
.text
.align NBPG, 0xcc
.globl xen_hypercall_page
xen_hypercall_page:
.skip 0x1000, 0xcc
#endif