#include <sys/segments.h>
#include <sys/asm_linkage.h>
#include <sys/controlregs.h>
#include <sys/x86_archext.h>
#include <sys/privregs.h>
#include <sys/machprivregs.h>
#include <sys/kdi_regs.h>
#include <sys/psw.h>
#include <sys/uadmin.h>
#ifdef __xpv
#include <sys/hypervisor.h>
#endif
#include <kdi_assym.h>
#include <assym.h>
#define GET_CPUSAVE_ADDR \
movzbq %gs:CPU_ID, %rbx; \
movq %rbx, %rax; \
movq $KRS_SIZE, %rcx; \
mulq %rcx; \
movq $kdi_cpusave, %rdx; \
\
addq (%rdx), %rax
#define SAVE_IDTGDT \
movq %gs:CPU_IDT, %r11; \
leaq kdi_idt(%rip), %rsi; \
cmpq %rsi, %r11; \
je 1f; \
movq %r11, KRS_IDT(%rax); \
movq %gs:CPU_GDT, %r11; \
movq %r11, KRS_GDT(%rax); \
1:
#ifdef __xpv
#define SAVE_GSBASE(reg)
#define RESTORE_GSBASE(reg)
#else
#define SAVE_GSBASE(base) \
movl $MSR_AMD_GSBASE, %ecx; \
rdmsr; \
shlq $32, %rdx; \
orq %rax, %rdx; \
movq %rdx, REG_OFF(KDIREG_GSBASE)(base); \
movl $MSR_AMD_KGSBASE, %ecx; \
rdmsr; \
shlq $32, %rdx; \
orq %rax, %rdx; \
movq %rdx, REG_OFF(KDIREG_KGSBASE)(base)
#define RESTORE_GSBASE(base) \
movq REG_OFF(KDIREG_GSBASE)(base), %rdx; \
movq %rdx, %rax; \
shrq $32, %rdx; \
movl $MSR_AMD_GSBASE, %ecx; \
wrmsr
#endif
#define KDI_SAVE_REGS(base) \
movq %rdi, REG_OFF(KDIREG_RDI)(base); \
movq %rsi, REG_OFF(KDIREG_RSI)(base); \
movq %rdx, REG_OFF(KDIREG_RDX)(base); \
movq %rcx, REG_OFF(KDIREG_RCX)(base); \
movq %r8, REG_OFF(KDIREG_R8)(base); \
movq %r9, REG_OFF(KDIREG_R9)(base); \
movq %rax, REG_OFF(KDIREG_RAX)(base); \
movq %rbx, REG_OFF(KDIREG_RBX)(base); \
movq %rbp, REG_OFF(KDIREG_RBP)(base); \
movq %r10, REG_OFF(KDIREG_R10)(base); \
movq %r11, REG_OFF(KDIREG_R11)(base); \
movq %r12, REG_OFF(KDIREG_R12)(base); \
movq %r13, REG_OFF(KDIREG_R13)(base); \
movq %r14, REG_OFF(KDIREG_R14)(base); \
movq %r15, REG_OFF(KDIREG_R15)(base); \
movq %rbp, REG_OFF(KDIREG_SAVFP)(base); \
movq REG_OFF(KDIREG_RIP)(base), %rax; \
movq %rax, REG_OFF(KDIREG_SAVPC)(base); \
movq %cr2, %rax; \
movq %rax, REG_OFF(KDIREG_CR2)(base); \
clrq %rax; \
movw %ds, %ax; \
movq %rax, REG_OFF(KDIREG_DS)(base); \
movw %es, %ax; \
movq %rax, REG_OFF(KDIREG_ES)(base); \
movw %fs, %ax; \
movq %rax, REG_OFF(KDIREG_FS)(base); \
movw %gs, %ax; \
movq %rax, REG_OFF(KDIREG_GS)(base); \
SAVE_GSBASE(base)
#define KDI_RESTORE_REGS(base) \
movq base, %rdi; \
RESTORE_GSBASE(%rdi); \
movq REG_OFF(KDIREG_ES)(%rdi), %rax; \
movw %ax, %es; \
movq REG_OFF(KDIREG_DS)(%rdi), %rax; \
movw %ax, %ds; \
movq REG_OFF(KDIREG_CR2)(base), %rax; \
movq %rax, %cr2; \
movq REG_OFF(KDIREG_R15)(%rdi), %r15; \
movq REG_OFF(KDIREG_R14)(%rdi), %r14; \
movq REG_OFF(KDIREG_R13)(%rdi), %r13; \
movq REG_OFF(KDIREG_R12)(%rdi), %r12; \
movq REG_OFF(KDIREG_R11)(%rdi), %r11; \
movq REG_OFF(KDIREG_R10)(%rdi), %r10; \
movq REG_OFF(KDIREG_RBP)(%rdi), %rbp; \
movq REG_OFF(KDIREG_RBX)(%rdi), %rbx; \
movq REG_OFF(KDIREG_RAX)(%rdi), %rax; \
movq REG_OFF(KDIREG_R9)(%rdi), %r9; \
movq REG_OFF(KDIREG_R8)(%rdi), %r8; \
movq REG_OFF(KDIREG_RCX)(%rdi), %rcx; \
movq REG_OFF(KDIREG_RDX)(%rdi), %rdx; \
movq REG_OFF(KDIREG_RSI)(%rdi), %rsi; \
movq REG_OFF(KDIREG_RDI)(%rdi), %rdi
#define KDI_RESTORE_DEBUGGING_STATE \
pushq %rdi; \
leaq kdi_drreg(%rip), %r15; \
movl $7, %edi; \
movq DR_CTL(%r15), %rsi; \
call kdi_dreg_set; \
\
movl $6, %edi; \
movq $KDIREG_DRSTAT_RESERVED, %rsi; \
call kdi_dreg_set; \
\
movl $0, %edi; \
movq DRADDR_OFF(0)(%r15), %rsi; \
call kdi_dreg_set; \
movl $1, %edi; \
movq DRADDR_OFF(1)(%r15), %rsi; \
call kdi_dreg_set; \
movl $2, %edi; \
movq DRADDR_OFF(2)(%r15), %rsi; \
call kdi_dreg_set; \
movl $3, %edi; \
movq DRADDR_OFF(3)(%r15), %rsi; \
call kdi_dreg_set; \
popq %rdi;
#define ADVANCE_CRUMB_POINTER(cpusave, tmp1, tmp2) \
movq KRS_CURCRUMBIDX(cpusave), tmp1; \
cmpq $[KDI_NCRUMBS - 1], tmp1; \
jge 1f; \
\
addq $1, tmp1; \
movq tmp1, KRS_CURCRUMBIDX(cpusave); \
movq KRS_CURCRUMB(cpusave), tmp1; \
addq $KRM_SIZE, tmp1; \
jmp 2f; \
1: \
movq $0, KRS_CURCRUMBIDX(cpusave); \
leaq KRS_CRUMBS(cpusave), tmp1; \
2: movq tmp1, KRS_CURCRUMB(cpusave); \
\
movq $KDI_NCRUMBS, tmp2; \
3: movq $0, -4(tmp1, tmp2, 4); \
decq tmp2; \
jnz 3b
#define ADD_CRUMB(cpusave, offset, value, tmp) \
movq KRS_CURCRUMB(cpusave), tmp; \
movq value, offset(tmp)
ENTRY_NP(kdi_nmiint)
clrq %rcx
movq (%rcx), %rcx
SET_SIZE(kdi_nmiint)
ENTRY_NP(kdi_cmnint)
ALTENTRY(kdi_master_entry)
pushq %rax
CLI(%rax)
popq %rax
subq $REG_OFF(KDIREG_TRAPNO), %rsp
KDI_SAVE_REGS(%rsp)
#ifdef __xpv
movb $0, REG_OFF(KDIREG_CS)+4(%rsp)
#endif
#if !defined(__xpv)
subq $10, %rsp
sgdt (%rsp)
movq 2(%rsp), %rdi
addq $10, %rsp
call kdi_gdt2gsbase
movq %rax, %rdx
shrq $32, %rdx
movl $MSR_AMD_GSBASE, %ecx
wrmsr
mov %gs:(CPU_KPTI_DBG+KPTI_TR_CR3), %rdx
mov %rdx, REG_OFF(KDIREG_CR3)(%rsp)
cmpq %rdx, kpti_safe_cr3
je .no_kcr3
mov %gs:CPU_KPTI_KCR3, %rdx
cmpq $0, %rdx
je .no_kcr3
mov %rdx, %cr3
.no_kcr3:
#endif
GET_CPUSAVE_ADDR
ADVANCE_CRUMB_POINTER(%rax, %rcx, %rdx)
ADD_CRUMB(%rax, KRM_CPU_STATE, $KDI_CPU_STATE_MASTER, %rdx)
movq REG_OFF(KDIREG_RIP)(%rsp), %rcx
ADD_CRUMB(%rax, KRM_PC, %rcx, %rdx)
ADD_CRUMB(%rax, KRM_SP, %rsp, %rdx)
movq REG_OFF(KDIREG_TRAPNO)(%rsp), %rcx
ADD_CRUMB(%rax, KRM_TRAPNO, %rcx, %rdx)
movq %rsp, %rbp
pushq %rax
leaq kdi_memranges, %rcx
movl kdi_nmemranges, %edx
1:
cmpq MR_BASE(%rcx), %rsp
jl 2f
cmpq MR_LIM(%rcx), %rsp
jg 2f
jmp 3f
2:
decl %edx
jz kdi_save_common_state
addq $MR_SIZE, %rcx
jmp 1b
3:
movq REG_OFF(KDIREG_TRAPNO)(%rbp), %rdi
movq REG_OFF(KDIREG_RIP)(%rbp), %rsi
movq REG_OFF(KDIREG_RSP)(%rbp), %rdx
movq %rbx, %rcx
call kdi_dvec_handle_fault
jmp kdi_save_common_state
SET_SIZE(kdi_master_entry)
SET_SIZE(kdi_cmnint)
ENTRY_NP(kdi_slave_entry)
pushq %rsp
pushfq
CLI(%rax)
pushq $KCS_SEL
clrq %rax
movw %ss, %ax
pushq %rax
pushq $-1
pushq $-1
subq $REG_OFF(KDIREG_TRAPNO), %rsp
KDI_SAVE_REGS(%rsp)
movq %cr3, %rax
movq %rax, REG_OFF(KDIREG_CR3)(%rsp)
movq REG_OFF(KDIREG_SS)(%rsp), %rax
movq %rax, REG_OFF(KDIREG_SAVPC)(%rsp)
xchgq REG_OFF(KDIREG_RIP)(%rsp), %rax
movq %rax, REG_OFF(KDIREG_SS)(%rsp)
movq REG_OFF(KDIREG_RSP)(%rsp), %rax
addq $8, %rax
movq %rax, REG_OFF(KDIREG_RSP)(%rsp)
GET_CPUSAVE_ADDR
ADVANCE_CRUMB_POINTER(%rax, %rcx, %rdx)
ADD_CRUMB(%rax, KRM_CPU_STATE, $KDI_CPU_STATE_SLAVE, %rdx)
movq REG_OFF(KDIREG_RIP)(%rsp), %rcx
ADD_CRUMB(%rax, KRM_PC, %rcx, %rdx)
movq REG_OFF(KDIREG_RSP)(%rsp), %rcx
ADD_CRUMB(%rax, KRM_SP, %rcx, %rdx)
ADD_CRUMB(%rax, KRM_TRAPNO, $-1, %rdx)
movq $KDI_CPU_STATE_SLAVE, KRS_CPU_STATE(%rax)
pushq %rax
jmp kdi_save_common_state
SET_SIZE(kdi_slave_entry)
ENTRY_NP(kdi_save_common_state)
popq %rdi
movq %rsp, KRS_GREGS(%rdi)
pushq %rdi
call kdi_trap_pass
testq %rax, %rax
jnz kdi_pass_to_kernel
popq %rax
SAVE_IDTGDT
#if !defined(__xpv)
movq %cr0, %rcx
movq %rcx, KRS_CR0(%rax)
andq $_BITNOT(CR0_WP), %rcx
movq %rcx, %cr0
#endif
movq %rax, %r15
movl $7, %edi
call kdi_dreg_get
movq %rax, KRS_DRCTL(%r15)
andq $_BITNOT(KDIREG_DRCTL_WPALLEN_MASK), %rax
movq %rax, %rsi
movl $7, %edi
call kdi_dreg_set
movl $6, %edi
call kdi_dreg_get
movq %rax, KRS_DRSTAT(%r15)
movl $0, %edi
call kdi_dreg_get
movq %rax, KRS_DROFF(0)(%r15)
movl $1, %edi
call kdi_dreg_get
movq %rax, KRS_DROFF(1)(%r15)
movl $2, %edi
call kdi_dreg_get
movq %rax, KRS_DROFF(2)(%r15)
movl $3, %edi
call kdi_dreg_get
movq %rax, KRS_DROFF(3)(%r15)
movq %r15, %rax
clrq %rbp
pushq %rax
movq %rax, %rdi
call kdi_debugger_entry
popq %rdi
jmp kdi_resume
SET_SIZE(kdi_save_common_state)
ENTRY_NP(kdi_resume)
#if !defined(__xpv)
movq KRS_CR0(%rdi), %rdx
movq %rdx, %cr0
#endif
KDI_RESTORE_DEBUGGING_STATE
movq KRS_GREGS(%rdi), %rsp
#if !defined(__xpv)
movq %gs:CPU_SELF, %r13
addq $CPU_KPTI_DBG, %r13
movq REG_OFF(KDIREG_R13)(%rsp), %rdx
movq %rdx, KPTI_R13(%r13)
movq REG_OFF(KDIREG_CR3)(%rsp), %rdx
movq %rdx, KPTI_TR_CR3(%r13)
movq %r13, REG_OFF(KDIREG_R13)(%rsp)
#endif
KDI_RESTORE_REGS(%rsp)
addq $REG_OFF(KDIREG_RIP), %rsp
#if !defined(__xpv)
jmp tr_iret_kdi
#else
IRET
#endif
SET_SIZE(kdi_resume)
ENTRY_NP(kdi_pass_to_kernel)
popq %rdi
movq $KDI_CPU_STATE_NONE, KRS_CPU_STATE(%rdi)
movq KRS_GREGS(%rdi), %rsp
cmpq $2, %rax
jne no_restore_cr3
movq REG_OFF(KDIREG_CR3)(%rsp), %r11
movq %r11, %cr3
no_restore_cr3:
movq REG_OFF(KDIREG_TRAPNO)(%rsp), %rdi
cmpq $T_SGLSTP, %rdi
je kdi_pass_dbgtrap
cmpq $T_BPTFLT, %rdi
je kdi_pass_brktrap
cmpq $T_DBGENTR, %rdi
je kdi_pass_invaltrap
int $T_DBGENTR
#define CALL_TRAP_HANDLER(name) \
KDI_RESTORE_REGS(%rsp); \
\
addq $REG_OFF(KDIREG_RIP), %rsp; \
XPV_TRAP_PUSH; \
jmp %cs:name
kdi_pass_dbgtrap:
CALL_TRAP_HANDLER(dbgtrap)
kdi_pass_brktrap:
CALL_TRAP_HANDLER(brktrap)
kdi_pass_invaltrap:
CALL_TRAP_HANDLER(invaltrap)
SET_SIZE(kdi_pass_to_kernel)
ENTRY_NP(kdi_reboot)
movl $AD_BOOT, %edi
movl $A_SHUTDOWN, %esi
call *psm_shutdownf
#if defined(__xpv)
movl $SHUTDOWN_reboot, %edi
call HYPERVISOR_shutdown
#else
call reset
#endif
SET_SIZE(kdi_reboot)
ENTRY_NP(kdi_cpu_debug_init)
pushq %rbp
movq %rsp, %rbp
pushq %rbx
KDI_RESTORE_DEBUGGING_STATE
popq %rbx
leave
ret
SET_SIZE(kdi_cpu_debug_init)
#define GETDREG(name, r) \
ENTRY_NP(name); \
movq r, %rax; \
ret; \
SET_SIZE(name)
#define SETDREG(name, r) \
ENTRY_NP(name); \
movq %rdi, r; \
ret; \
SET_SIZE(name)
GETDREG(kdi_getdr0, %dr0)
GETDREG(kdi_getdr1, %dr1)
GETDREG(kdi_getdr2, %dr2)
GETDREG(kdi_getdr3, %dr3)
GETDREG(kdi_getdr6, %dr6)
GETDREG(kdi_getdr7, %dr7)
SETDREG(kdi_setdr0, %dr0)
SETDREG(kdi_setdr1, %dr1)
SETDREG(kdi_setdr2, %dr2)
SETDREG(kdi_setdr3, %dr3)
SETDREG(kdi_setdr6, %dr6)
SETDREG(kdi_setdr7, %dr7)