sys/arch/amd64/amd64/vmm_machdep.c
100
int svm_vmgexit_sync_host(struct vcpu *);
sys/arch/amd64/amd64/vmm_machdep.c
1002
vcpu_readregs_vmx(struct vcpu *vcpu, uint64_t regmask, int loadvmcs,
sys/arch/amd64/amd64/vmm_machdep.c
101
int svm_vmgexit_sync_guest(struct vcpu *);
sys/arch/amd64/amd64/vmm_machdep.c
1015
if (vcpu_reload_vmcs_vmx(vcpu))
sys/arch/amd64/amd64/vmm_machdep.c
102
int svm_handle_vmgexit(struct vcpu *);
sys/arch/amd64/amd64/vmm_machdep.c
1024
KASSERT(pa == vcpu->vc_control_pa);
sys/arch/amd64/amd64/vmm_machdep.c
1028
gprs[VCPU_REGS_RAX] = vcpu->vc_gueststate.vg_rax;
sys/arch/amd64/amd64/vmm_machdep.c
1029
gprs[VCPU_REGS_RBX] = vcpu->vc_gueststate.vg_rbx;
sys/arch/amd64/amd64/vmm_machdep.c
103
int svm_handle_efercr(struct vcpu *, uint64_t);
sys/arch/amd64/amd64/vmm_machdep.c
1030
gprs[VCPU_REGS_RCX] = vcpu->vc_gueststate.vg_rcx;
sys/arch/amd64/amd64/vmm_machdep.c
1031
gprs[VCPU_REGS_RDX] = vcpu->vc_gueststate.vg_rdx;
sys/arch/amd64/amd64/vmm_machdep.c
1032
gprs[VCPU_REGS_RSI] = vcpu->vc_gueststate.vg_rsi;
sys/arch/amd64/amd64/vmm_machdep.c
1033
gprs[VCPU_REGS_RDI] = vcpu->vc_gueststate.vg_rdi;
sys/arch/amd64/amd64/vmm_machdep.c
1034
gprs[VCPU_REGS_R8] = vcpu->vc_gueststate.vg_r8;
sys/arch/amd64/amd64/vmm_machdep.c
1035
gprs[VCPU_REGS_R9] = vcpu->vc_gueststate.vg_r9;
sys/arch/amd64/amd64/vmm_machdep.c
1036
gprs[VCPU_REGS_R10] = vcpu->vc_gueststate.vg_r10;
sys/arch/amd64/amd64/vmm_machdep.c
1037
gprs[VCPU_REGS_R11] = vcpu->vc_gueststate.vg_r11;
sys/arch/amd64/amd64/vmm_machdep.c
1038
gprs[VCPU_REGS_R12] = vcpu->vc_gueststate.vg_r12;
sys/arch/amd64/amd64/vmm_machdep.c
1039
gprs[VCPU_REGS_R13] = vcpu->vc_gueststate.vg_r13;
sys/arch/amd64/amd64/vmm_machdep.c
104
int svm_get_iflag(struct vcpu *, uint64_t);
sys/arch/amd64/amd64/vmm_machdep.c
1040
gprs[VCPU_REGS_R14] = vcpu->vc_gueststate.vg_r14;
sys/arch/amd64/amd64/vmm_machdep.c
1041
gprs[VCPU_REGS_R15] = vcpu->vc_gueststate.vg_r15;
sys/arch/amd64/amd64/vmm_machdep.c
1042
gprs[VCPU_REGS_RBP] = vcpu->vc_gueststate.vg_rbp;
sys/arch/amd64/amd64/vmm_machdep.c
1043
gprs[VCPU_REGS_RIP] = vcpu->vc_gueststate.vg_rip;
sys/arch/amd64/amd64/vmm_machdep.c
105
int svm_handle_msr(struct vcpu *);
sys/arch/amd64/amd64/vmm_machdep.c
106
int vmm_handle_xsetbv(struct vcpu *, uint64_t *);
sys/arch/amd64/amd64/vmm_machdep.c
107
int vmx_handle_xsetbv(struct vcpu *);
sys/arch/amd64/amd64/vmm_machdep.c
108
int svm_handle_xsetbv(struct vcpu *);
sys/arch/amd64/amd64/vmm_machdep.c
1083
crs[VCPU_REGS_CR2] = vcpu->vc_gueststate.vg_cr2;
sys/arch/amd64/amd64/vmm_machdep.c
1084
crs[VCPU_REGS_XCR0] = vcpu->vc_gueststate.vg_xcr0;
sys/arch/amd64/amd64/vmm_machdep.c
109
int vmm_handle_cpuid(struct vcpu *);
sys/arch/amd64/amd64/vmm_machdep.c
110
int vmx_handle_rdmsr(struct vcpu *);
sys/arch/amd64/amd64/vmm_machdep.c
1101
msr_store = (struct vmx_msr_store *)vcpu->vc_vmx_msr_exit_save_va;
sys/arch/amd64/amd64/vmm_machdep.c
111
int vmx_handle_wrmsr(struct vcpu *);
sys/arch/amd64/amd64/vmm_machdep.c
1110
drs[VCPU_REGS_DR0] = vcpu->vc_gueststate.vg_dr0;
sys/arch/amd64/amd64/vmm_machdep.c
1111
drs[VCPU_REGS_DR1] = vcpu->vc_gueststate.vg_dr1;
sys/arch/amd64/amd64/vmm_machdep.c
1112
drs[VCPU_REGS_DR2] = vcpu->vc_gueststate.vg_dr2;
sys/arch/amd64/amd64/vmm_machdep.c
1113
drs[VCPU_REGS_DR3] = vcpu->vc_gueststate.vg_dr3;
sys/arch/amd64/amd64/vmm_machdep.c
1114
drs[VCPU_REGS_DR6] = vcpu->vc_gueststate.vg_dr6;
sys/arch/amd64/amd64/vmm_machdep.c
112
int vmx_handle_cr0_write(struct vcpu *, uint64_t);
sys/arch/amd64/amd64/vmm_machdep.c
113
int vmx_handle_cr4_write(struct vcpu *, uint64_t);
sys/arch/amd64/amd64/vmm_machdep.c
114
int vmx_handle_cr(struct vcpu *);
sys/arch/amd64/amd64/vmm_machdep.c
1141
vcpu_readregs_svm(struct vcpu *vcpu, uint64_t regmask,
sys/arch/amd64/amd64/vmm_machdep.c
115
int svm_handle_inout(struct vcpu *);
sys/arch/amd64/amd64/vmm_machdep.c
1150
struct vmcb *vmcb = (struct vmcb *)vcpu->vc_control_va;
sys/arch/amd64/amd64/vmm_machdep.c
1154
gprs[VCPU_REGS_RBX] = vcpu->vc_gueststate.vg_rbx;
sys/arch/amd64/amd64/vmm_machdep.c
1155
gprs[VCPU_REGS_RCX] = vcpu->vc_gueststate.vg_rcx;
sys/arch/amd64/amd64/vmm_machdep.c
1156
gprs[VCPU_REGS_RDX] = vcpu->vc_gueststate.vg_rdx;
sys/arch/amd64/amd64/vmm_machdep.c
1157
gprs[VCPU_REGS_RSI] = vcpu->vc_gueststate.vg_rsi;
sys/arch/amd64/amd64/vmm_machdep.c
1158
gprs[VCPU_REGS_RDI] = vcpu->vc_gueststate.vg_rdi;
sys/arch/amd64/amd64/vmm_machdep.c
1159
gprs[VCPU_REGS_R8] = vcpu->vc_gueststate.vg_r8;
sys/arch/amd64/amd64/vmm_machdep.c
116
int vmx_handle_inout(struct vcpu *);
sys/arch/amd64/amd64/vmm_machdep.c
1160
gprs[VCPU_REGS_R9] = vcpu->vc_gueststate.vg_r9;
sys/arch/amd64/amd64/vmm_machdep.c
1161
gprs[VCPU_REGS_R10] = vcpu->vc_gueststate.vg_r10;
sys/arch/amd64/amd64/vmm_machdep.c
1162
gprs[VCPU_REGS_R11] = vcpu->vc_gueststate.vg_r11;
sys/arch/amd64/amd64/vmm_machdep.c
1163
gprs[VCPU_REGS_R12] = vcpu->vc_gueststate.vg_r12;
sys/arch/amd64/amd64/vmm_machdep.c
1164
gprs[VCPU_REGS_R13] = vcpu->vc_gueststate.vg_r13;
sys/arch/amd64/amd64/vmm_machdep.c
1165
gprs[VCPU_REGS_R14] = vcpu->vc_gueststate.vg_r14;
sys/arch/amd64/amd64/vmm_machdep.c
1166
gprs[VCPU_REGS_R15] = vcpu->vc_gueststate.vg_r15;
sys/arch/amd64/amd64/vmm_machdep.c
1167
gprs[VCPU_REGS_RBP] = vcpu->vc_gueststate.vg_rbp;
sys/arch/amd64/amd64/vmm_machdep.c
117
int svm_handle_hlt(struct vcpu *);
sys/arch/amd64/amd64/vmm_machdep.c
118
int vmx_handle_hlt(struct vcpu *);
sys/arch/amd64/amd64/vmm_machdep.c
119
int vmm_inject_ud(struct vcpu *);
sys/arch/amd64/amd64/vmm_machdep.c
120
int vmm_inject_gp(struct vcpu *);
sys/arch/amd64/amd64/vmm_machdep.c
121
int vmm_inject_db(struct vcpu *);
sys/arch/amd64/amd64/vmm_machdep.c
122
void vmx_handle_intr(struct vcpu *);
sys/arch/amd64/amd64/vmm_machdep.c
123
void vmx_handle_misc_enable_msr(struct vcpu *);
sys/arch/amd64/amd64/vmm_machdep.c
1240
crs[VCPU_REGS_CR2] = vcpu->vc_gueststate.vg_cr2;
sys/arch/amd64/amd64/vmm_machdep.c
1241
crs[VCPU_REGS_XCR0] = vcpu->vc_gueststate.vg_xcr0;
sys/arch/amd64/amd64/vmm_machdep.c
1254
drs[VCPU_REGS_DR0] = vcpu->vc_gueststate.vg_dr0;
sys/arch/amd64/amd64/vmm_machdep.c
1255
drs[VCPU_REGS_DR1] = vcpu->vc_gueststate.vg_dr1;
sys/arch/amd64/amd64/vmm_machdep.c
1256
drs[VCPU_REGS_DR2] = vcpu->vc_gueststate.vg_dr2;
sys/arch/amd64/amd64/vmm_machdep.c
1257
drs[VCPU_REGS_DR3] = vcpu->vc_gueststate.vg_dr3;
sys/arch/amd64/amd64/vmm_machdep.c
1281
vcpu_writeregs_vmx(struct vcpu *vcpu, uint64_t regmask, int loadvmcs,
sys/arch/amd64/amd64/vmm_machdep.c
129
int vmm_get_guest_cpu_cpl(struct vcpu *);
sys/arch/amd64/amd64/vmm_machdep.c
1295
if (vcpu_reload_vmcs_vmx(vcpu))
sys/arch/amd64/amd64/vmm_machdep.c
130
int vmm_get_guest_cpu_mode(struct vcpu *);
sys/arch/amd64/amd64/vmm_machdep.c
1304
KASSERT(pa == vcpu->vc_control_pa);
sys/arch/amd64/amd64/vmm_machdep.c
1308
vcpu->vc_gueststate.vg_rax = gprs[VCPU_REGS_RAX];
sys/arch/amd64/amd64/vmm_machdep.c
1309
vcpu->vc_gueststate.vg_rbx = gprs[VCPU_REGS_RBX];
sys/arch/amd64/amd64/vmm_machdep.c
131
int svm_fault_page(struct vcpu *, paddr_t);
sys/arch/amd64/amd64/vmm_machdep.c
1310
vcpu->vc_gueststate.vg_rcx = gprs[VCPU_REGS_RCX];
sys/arch/amd64/amd64/vmm_machdep.c
1311
vcpu->vc_gueststate.vg_rdx = gprs[VCPU_REGS_RDX];
sys/arch/amd64/amd64/vmm_machdep.c
1312
vcpu->vc_gueststate.vg_rsi = gprs[VCPU_REGS_RSI];
sys/arch/amd64/amd64/vmm_machdep.c
1313
vcpu->vc_gueststate.vg_rdi = gprs[VCPU_REGS_RDI];
sys/arch/amd64/amd64/vmm_machdep.c
1314
vcpu->vc_gueststate.vg_r8 = gprs[VCPU_REGS_R8];
sys/arch/amd64/amd64/vmm_machdep.c
1315
vcpu->vc_gueststate.vg_r9 = gprs[VCPU_REGS_R9];
sys/arch/amd64/amd64/vmm_machdep.c
1316
vcpu->vc_gueststate.vg_r10 = gprs[VCPU_REGS_R10];
sys/arch/amd64/amd64/vmm_machdep.c
1317
vcpu->vc_gueststate.vg_r11 = gprs[VCPU_REGS_R11];
sys/arch/amd64/amd64/vmm_machdep.c
1318
vcpu->vc_gueststate.vg_r12 = gprs[VCPU_REGS_R12];
sys/arch/amd64/amd64/vmm_machdep.c
1319
vcpu->vc_gueststate.vg_r13 = gprs[VCPU_REGS_R13];
sys/arch/amd64/amd64/vmm_machdep.c
132
int vmx_fault_page(struct vcpu *, paddr_t);
sys/arch/amd64/amd64/vmm_machdep.c
1320
vcpu->vc_gueststate.vg_r14 = gprs[VCPU_REGS_R14];
sys/arch/amd64/amd64/vmm_machdep.c
1321
vcpu->vc_gueststate.vg_r15 = gprs[VCPU_REGS_R15];
sys/arch/amd64/amd64/vmm_machdep.c
1322
vcpu->vc_gueststate.vg_rbp = gprs[VCPU_REGS_RBP];
sys/arch/amd64/amd64/vmm_machdep.c
1323
vcpu->vc_gueststate.vg_rip = gprs[VCPU_REGS_RIP];
sys/arch/amd64/amd64/vmm_machdep.c
133
int vmx_handle_np_fault(struct vcpu *);
sys/arch/amd64/amd64/vmm_machdep.c
134
int svm_handle_np_fault(struct vcpu *);
sys/arch/amd64/amd64/vmm_machdep.c
135
int vmm_alloc_vpid_vcpu(uint16_t *, struct vcpu *);
sys/arch/amd64/amd64/vmm_machdep.c
1364
vcpu->vc_gueststate.vg_xcr0 = crs[VCPU_REGS_XCR0];
sys/arch/amd64/amd64/vmm_machdep.c
137
int vmm_alloc_asid(uint16_t *, struct vcpu *);
sys/arch/amd64/amd64/vmm_machdep.c
1381
msr_store = (struct vmx_msr_store *)vcpu->vc_vmx_msr_exit_save_va;
sys/arch/amd64/amd64/vmm_machdep.c
1390
vcpu->vc_gueststate.vg_dr0 = drs[VCPU_REGS_DR0];
sys/arch/amd64/amd64/vmm_machdep.c
1391
vcpu->vc_gueststate.vg_dr1 = drs[VCPU_REGS_DR1];
sys/arch/amd64/amd64/vmm_machdep.c
1392
vcpu->vc_gueststate.vg_dr2 = drs[VCPU_REGS_DR2];
sys/arch/amd64/amd64/vmm_machdep.c
1393
vcpu->vc_gueststate.vg_dr3 = drs[VCPU_REGS_DR3];
sys/arch/amd64/amd64/vmm_machdep.c
1394
vcpu->vc_gueststate.vg_dr6 = drs[VCPU_REGS_DR6];
sys/arch/amd64/amd64/vmm_machdep.c
1405
if (vmclear(&vcpu->vc_control_pa))
sys/arch/amd64/amd64/vmm_machdep.c
1407
atomic_swap_uint(&vcpu->vc_vmx_vmcs_state, VMCS_CLEARED);
sys/arch/amd64/amd64/vmm_machdep.c
1427
vcpu_writeregs_svm(struct vcpu *vcpu, uint64_t regmask,
sys/arch/amd64/amd64/vmm_machdep.c
143
void svm_setmsrbr(struct vcpu *, uint32_t);
sys/arch/amd64/amd64/vmm_machdep.c
1436
struct vmcb *vmcb = (struct vmcb *)vcpu->vc_control_va;
sys/arch/amd64/amd64/vmm_machdep.c
1439
vcpu->vc_gueststate.vg_rax = gprs[VCPU_REGS_RAX];
sys/arch/amd64/amd64/vmm_machdep.c
144
void svm_setmsrbw(struct vcpu *, uint32_t);
sys/arch/amd64/amd64/vmm_machdep.c
1440
vcpu->vc_gueststate.vg_rbx = gprs[VCPU_REGS_RBX];
sys/arch/amd64/amd64/vmm_machdep.c
1441
vcpu->vc_gueststate.vg_rcx = gprs[VCPU_REGS_RCX];
sys/arch/amd64/amd64/vmm_machdep.c
1442
vcpu->vc_gueststate.vg_rdx = gprs[VCPU_REGS_RDX];
sys/arch/amd64/amd64/vmm_machdep.c
1443
vcpu->vc_gueststate.vg_rsi = gprs[VCPU_REGS_RSI];
sys/arch/amd64/amd64/vmm_machdep.c
1444
vcpu->vc_gueststate.vg_rdi = gprs[VCPU_REGS_RDI];
sys/arch/amd64/amd64/vmm_machdep.c
1445
vcpu->vc_gueststate.vg_r8 = gprs[VCPU_REGS_R8];
sys/arch/amd64/amd64/vmm_machdep.c
1446
vcpu->vc_gueststate.vg_r9 = gprs[VCPU_REGS_R9];
sys/arch/amd64/amd64/vmm_machdep.c
1447
vcpu->vc_gueststate.vg_r10 = gprs[VCPU_REGS_R10];
sys/arch/amd64/amd64/vmm_machdep.c
1448
vcpu->vc_gueststate.vg_r11 = gprs[VCPU_REGS_R11];
sys/arch/amd64/amd64/vmm_machdep.c
1449
vcpu->vc_gueststate.vg_r12 = gprs[VCPU_REGS_R12];
sys/arch/amd64/amd64/vmm_machdep.c
145
void svm_setmsrbrw(struct vcpu *, uint32_t);
sys/arch/amd64/amd64/vmm_machdep.c
1450
vcpu->vc_gueststate.vg_r13 = gprs[VCPU_REGS_R13];
sys/arch/amd64/amd64/vmm_machdep.c
1451
vcpu->vc_gueststate.vg_r14 = gprs[VCPU_REGS_R14];
sys/arch/amd64/amd64/vmm_machdep.c
1452
vcpu->vc_gueststate.vg_r15 = gprs[VCPU_REGS_R15];
sys/arch/amd64/amd64/vmm_machdep.c
1453
vcpu->vc_gueststate.vg_rbp = gprs[VCPU_REGS_RBP];
sys/arch/amd64/amd64/vmm_machdep.c
1454
vcpu->vc_gueststate.vg_rip = gprs[VCPU_REGS_RIP];
sys/arch/amd64/amd64/vmm_machdep.c
146
void vmx_setmsrbr(struct vcpu *, uint32_t);
sys/arch/amd64/amd64/vmm_machdep.c
147
void vmx_setmsrbw(struct vcpu *, uint32_t);
sys/arch/amd64/amd64/vmm_machdep.c
148
void vmx_setmsrbrw(struct vcpu *, uint32_t);
sys/arch/amd64/amd64/vmm_machdep.c
149
void svm_set_clean(struct vcpu *, uint32_t);
sys/arch/amd64/amd64/vmm_machdep.c
150
void svm_set_dirty(struct vcpu *, uint32_t);
sys/arch/amd64/amd64/vmm_machdep.c
1513
vcpu->vc_gueststate.vg_cr2 = crs[VCPU_REGS_CR2];
sys/arch/amd64/amd64/vmm_machdep.c
1514
vcpu->vc_gueststate.vg_xcr0 = crs[VCPU_REGS_XCR0];
sys/arch/amd64/amd64/vmm_machdep.c
1527
vcpu->vc_gueststate.vg_dr0 = drs[VCPU_REGS_DR0];
sys/arch/amd64/amd64/vmm_machdep.c
1528
vcpu->vc_gueststate.vg_dr1 = drs[VCPU_REGS_DR1];
sys/arch/amd64/amd64/vmm_machdep.c
1529
vcpu->vc_gueststate.vg_dr2 = drs[VCPU_REGS_DR2];
sys/arch/amd64/amd64/vmm_machdep.c
153
int vmm_gpa_is_valid(struct vcpu *vcpu, paddr_t gpa, size_t obj_size);
sys/arch/amd64/amd64/vmm_machdep.c
1530
vcpu->vc_gueststate.vg_dr3 = drs[VCPU_REGS_DR3];
sys/arch/amd64/amd64/vmm_machdep.c
154
void vmm_init_pvclock(struct vcpu *, paddr_t);
sys/arch/amd64/amd64/vmm_machdep.c
155
int vmm_update_pvclock(struct vcpu *);
sys/arch/amd64/amd64/vmm_machdep.c
1552
vcpu_reset_regs_svm(struct vcpu *vcpu, struct vcpu_reg_state *vrs)
sys/arch/amd64/amd64/vmm_machdep.c
1557
vmcb = (struct vmcb *)vcpu->vc_control_va;
sys/arch/amd64/amd64/vmm_machdep.c
156
void vmm_pv_wall_clock(struct vcpu *, paddr_t);
sys/arch/amd64/amd64/vmm_machdep.c
1597
if (xsave_mask && !vcpu->vc_seves)
sys/arch/amd64/amd64/vmm_machdep.c
160
static int vmx_remote_vmclear(struct cpu_info*, struct vcpu *);
sys/arch/amd64/amd64/vmm_machdep.c
1600
if (vcpu->vc_seves) {
sys/arch/amd64/amd64/vmm_machdep.c
1608
memset((uint8_t *)vcpu->vc_svm_ioio_va, 0xFF, 3 * PAGE_SIZE);
sys/arch/amd64/amd64/vmm_machdep.c
1609
vmcb->v_iopm_pa = (uint64_t)(vcpu->vc_svm_ioio_pa);
sys/arch/amd64/amd64/vmm_machdep.c
1612
memset((uint8_t *)vcpu->vc_msr_bitmap_va, 0xFF, 2 * PAGE_SIZE);
sys/arch/amd64/amd64/vmm_machdep.c
1613
vmcb->v_msrpm_pa = (uint64_t)(vcpu->vc_msr_bitmap_pa);
sys/arch/amd64/amd64/vmm_machdep.c
1614
svm_setmsrbrw(vcpu, MSR_IA32_FEATURE_CONTROL);
sys/arch/amd64/amd64/vmm_machdep.c
1615
svm_setmsrbrw(vcpu, MSR_SYSENTER_CS);
sys/arch/amd64/amd64/vmm_machdep.c
1616
svm_setmsrbrw(vcpu, MSR_SYSENTER_ESP);
sys/arch/amd64/amd64/vmm_machdep.c
1617
svm_setmsrbrw(vcpu, MSR_SYSENTER_EIP);
sys/arch/amd64/amd64/vmm_machdep.c
1618
svm_setmsrbrw(vcpu, MSR_STAR);
sys/arch/amd64/amd64/vmm_machdep.c
1619
svm_setmsrbrw(vcpu, MSR_LSTAR);
sys/arch/amd64/amd64/vmm_machdep.c
1620
svm_setmsrbrw(vcpu, MSR_CSTAR);
sys/arch/amd64/amd64/vmm_machdep.c
1621
svm_setmsrbrw(vcpu, MSR_SFMASK);
sys/arch/amd64/amd64/vmm_machdep.c
1622
svm_setmsrbrw(vcpu, MSR_FSBASE);
sys/arch/amd64/amd64/vmm_machdep.c
1623
svm_setmsrbrw(vcpu, MSR_GSBASE);
sys/arch/amd64/amd64/vmm_machdep.c
1624
svm_setmsrbrw(vcpu, MSR_KERNELGSBASE);
sys/arch/amd64/amd64/vmm_machdep.c
1627
svm_setmsrbrw(vcpu, MSR_SEV_STATUS);
sys/arch/amd64/amd64/vmm_machdep.c
1629
if (vcpu->vc_seves) {
sys/arch/amd64/amd64/vmm_machdep.c
1631
svm_setmsrbrw(vcpu, MSR_SEV_GHCB);
sys/arch/amd64/amd64/vmm_machdep.c
1634
svm_setmsrbr(vcpu, MSR_XSS);
sys/arch/amd64/amd64/vmm_machdep.c
164
void vmx_vcpu_dump_regs(struct vcpu *);
sys/arch/amd64/amd64/vmm_machdep.c
1641
svm_setmsrbrw(vcpu, MSR_EFER);
sys/arch/amd64/amd64/vmm_machdep.c
1644
svm_setmsrbr(vcpu, MSR_EFER);
sys/arch/amd64/amd64/vmm_machdep.c
1648
svm_setmsrbr(vcpu, MSR_TSC);
sys/arch/amd64/amd64/vmm_machdep.c
165
void vmx_dump_vmcs(struct vcpu *);
sys/arch/amd64/amd64/vmm_machdep.c
1651
svm_setmsrbr(vcpu, MSR_HWCR);
sys/arch/amd64/amd64/vmm_machdep.c
1652
svm_setmsrbr(vcpu, MSR_PSTATEDEF(0));
sys/arch/amd64/amd64/vmm_machdep.c
1655
vmcb->v_asid = vcpu->vc_vpid;
sys/arch/amd64/amd64/vmm_machdep.c
1671
vmcb->v_n_cr3 = vcpu->vc_parent->vm_pmap->pm_pdirpa;
sys/arch/amd64/amd64/vmm_machdep.c
1674
if (vcpu->vc_sev)
sys/arch/amd64/amd64/vmm_machdep.c
1678
if (vcpu->vc_seves) {
sys/arch/amd64/amd64/vmm_machdep.c
1683
vmcb->v_vmsa_pa = vcpu->vc_svm_vmsa_pa;
sys/arch/amd64/amd64/vmm_machdep.c
1689
if ((ret = vcpu_writeregs_svm(vcpu, VM_RWREGS_ALL, vrs)) != 0)
sys/arch/amd64/amd64/vmm_machdep.c
1693
vcpu->vc_gueststate.vg_xcr0 = XFEATURE_X87 & xsave_mask;
sys/arch/amd64/amd64/vmm_machdep.c
1695
vcpu->vc_parent->vm_pmap->eptp = 0;
sys/arch/amd64/amd64/vmm_machdep.c
1697
ret = vcpu_svm_init_vmsa(vcpu, vrs);
sys/arch/amd64/amd64/vmm_machdep.c
1708
vcpu_svm_init_vmsa(struct vcpu *vcpu, struct vcpu_reg_state *vrs)
sys/arch/amd64/amd64/vmm_machdep.c
1711
struct vmcb *vmcb = (struct vmcb *)vcpu->vc_control_va;
sys/arch/amd64/amd64/vmm_machdep.c
1714
if (!vcpu->vc_seves)
sys/arch/amd64/amd64/vmm_machdep.c
1717
vmsa = (struct vmsa *)vcpu->vc_svm_vmsa_va;
sys/arch/amd64/amd64/vmm_machdep.c
1740
vmsa->v_xcr0 = vcpu->vc_gueststate.vg_xcr0;
sys/arch/amd64/amd64/vmm_machdep.c
1759
svm_setmsrbr(struct vcpu *vcpu, uint32_t msr)
sys/arch/amd64/amd64/vmm_machdep.c
1764
msrs = (uint8_t *)vcpu->vc_msr_bitmap_va;
sys/arch/amd64/amd64/vmm_machdep.c
1800
svm_setmsrbw(struct vcpu *vcpu, uint32_t msr)
sys/arch/amd64/amd64/vmm_machdep.c
1805
msrs = (uint8_t *)vcpu->vc_msr_bitmap_va;
sys/arch/amd64/amd64/vmm_machdep.c
181
const char *vmm_decode_cpu_mode(struct vcpu *);
sys/arch/amd64/amd64/vmm_machdep.c
1841
svm_setmsrbrw(struct vcpu *vcpu, uint32_t msr)
sys/arch/amd64/amd64/vmm_machdep.c
1843
svm_setmsrbr(vcpu, msr);
sys/arch/amd64/amd64/vmm_machdep.c
1844
svm_setmsrbw(vcpu, msr);
sys/arch/amd64/amd64/vmm_machdep.c
1857
vmx_setmsrbr(struct vcpu *vcpu, uint32_t msr)
sys/arch/amd64/amd64/vmm_machdep.c
1862
msrs = (uint8_t *)vcpu->vc_msr_bitmap_va;
sys/arch/amd64/amd64/vmm_machdep.c
1889
vmx_setmsrbw(struct vcpu *vcpu, uint32_t msr)
sys/arch/amd64/amd64/vmm_machdep.c
1894
msrs = (uint8_t *)vcpu->vc_msr_bitmap_va;
sys/arch/amd64/amd64/vmm_machdep.c
1921
vmx_setmsrbrw(struct vcpu *vcpu, uint32_t msr)
sys/arch/amd64/amd64/vmm_machdep.c
1923
vmx_setmsrbr(vcpu, msr);
sys/arch/amd64/amd64/vmm_machdep.c
1924
vmx_setmsrbw(vcpu, msr);
sys/arch/amd64/amd64/vmm_machdep.c
1944
svm_set_clean(struct vcpu *vcpu, uint32_t value)
sys/arch/amd64/amd64/vmm_machdep.c
1952
vmcb = (struct vmcb *)vcpu->vc_control_va;
sys/arch/amd64/amd64/vmm_machdep.c
1971
svm_set_dirty(struct vcpu *vcpu, uint32_t value)
sys/arch/amd64/amd64/vmm_machdep.c
1979
vmcb = (struct vmcb *)vcpu->vc_control_va;
sys/arch/amd64/amd64/vmm_machdep.c
1998
vcpu_reset_regs_vmx(struct vcpu *vcpu, struct vcpu_reg_state *vrs)
sys/arch/amd64/amd64/vmm_machdep.c
2008
rw_assert_wrlock(&vcpu->vc_lock);
sys/arch/amd64/amd64/vmm_machdep.c
2012
if (vcpu_reload_vmcs_vmx(vcpu)) {
sys/arch/amd64/amd64/vmm_machdep.c
2023
KASSERT(pa == vcpu->vc_control_pa);
sys/arch/amd64/amd64/vmm_machdep.c
2027
vcpu->vc_vmx_basic = rdmsr(IA32_VMX_BASIC);
sys/arch/amd64/amd64/vmm_machdep.c
2028
vcpu->vc_vmx_entry_ctls = rdmsr(IA32_VMX_ENTRY_CTLS);
sys/arch/amd64/amd64/vmm_machdep.c
2029
vcpu->vc_vmx_exit_ctls = rdmsr(IA32_VMX_EXIT_CTLS);
sys/arch/amd64/amd64/vmm_machdep.c
2030
vcpu->vc_vmx_pinbased_ctls = rdmsr(IA32_VMX_PINBASED_CTLS);
sys/arch/amd64/amd64/vmm_machdep.c
2031
vcpu->vc_vmx_procbased_ctls = rdmsr(IA32_VMX_PROCBASED_CTLS);
sys/arch/amd64/amd64/vmm_machdep.c
2034
if (vcpu->vc_vmx_basic & IA32_VMX_TRUE_CTLS_AVAIL) {
sys/arch/amd64/amd64/vmm_machdep.c
2035
vcpu->vc_vmx_true_entry_ctls = rdmsr(IA32_VMX_TRUE_ENTRY_CTLS);
sys/arch/amd64/amd64/vmm_machdep.c
2036
vcpu->vc_vmx_true_exit_ctls = rdmsr(IA32_VMX_TRUE_EXIT_CTLS);
sys/arch/amd64/amd64/vmm_machdep.c
2037
vcpu->vc_vmx_true_pinbased_ctls =
sys/arch/amd64/amd64/vmm_machdep.c
2039
vcpu->vc_vmx_true_procbased_ctls =
sys/arch/amd64/amd64/vmm_machdep.c
2044
if (vcpu_vmx_check_cap(vcpu, IA32_VMX_PROCBASED_CTLS,
sys/arch/amd64/amd64/vmm_machdep.c
2046
vcpu->vc_vmx_procbased2_ctls = rdmsr(IA32_VMX_PROCBASED2_CTLS);
sys/arch/amd64/amd64/vmm_machdep.c
2059
if (vcpu->vc_vmx_basic & IA32_VMX_TRUE_CTLS_AVAIL) {
sys/arch/amd64/amd64/vmm_machdep.c
2061
ctrlval = vcpu->vc_vmx_true_pinbased_ctls;
sys/arch/amd64/amd64/vmm_machdep.c
2064
ctrlval = vcpu->vc_vmx_pinbased_ctls;
sys/arch/amd64/amd64/vmm_machdep.c
2109
if (vcpu->vc_vmx_basic & IA32_VMX_TRUE_CTLS_AVAIL) {
sys/arch/amd64/amd64/vmm_machdep.c
2111
ctrlval = vcpu->vc_vmx_true_procbased_ctls;
sys/arch/amd64/amd64/vmm_machdep.c
2114
ctrlval = vcpu->vc_vmx_procbased_ctls;
sys/arch/amd64/amd64/vmm_machdep.c
2146
if (vcpu_vmx_check_cap(vcpu, IA32_VMX_PROCBASED_CTLS,
sys/arch/amd64/amd64/vmm_machdep.c
2148
if (vcpu_vmx_check_cap(vcpu, IA32_VMX_PROCBASED2_CTLS,
sys/arch/amd64/amd64/vmm_machdep.c
2151
vcpu->vc_vmx_vpid_enabled = 1;
sys/arch/amd64/amd64/vmm_machdep.c
2155
if (vcpu_vmx_check_cap(vcpu, IA32_VMX_PROCBASED_CTLS,
sys/arch/amd64/amd64/vmm_machdep.c
2157
if (vcpu_vmx_check_cap(vcpu, IA32_VMX_PROCBASED2_CTLS,
sys/arch/amd64/amd64/vmm_machdep.c
2165
ctrlval = vcpu->vc_vmx_procbased2_ctls;
sys/arch/amd64/amd64/vmm_machdep.c
2195
if (vcpu->vc_vmx_basic & IA32_VMX_TRUE_CTLS_AVAIL) {
sys/arch/amd64/amd64/vmm_machdep.c
2197
ctrlval = vcpu->vc_vmx_true_exit_ctls;
sys/arch/amd64/amd64/vmm_machdep.c
2200
ctrlval = vcpu->vc_vmx_exit_ctls;
sys/arch/amd64/amd64/vmm_machdep.c
2239
if (vcpu->vc_vmx_basic & IA32_VMX_TRUE_CTLS_AVAIL) {
sys/arch/amd64/amd64/vmm_machdep.c
2241
ctrlval = vcpu->vc_vmx_true_entry_ctls;
sys/arch/amd64/amd64/vmm_machdep.c
2244
ctrlval = vcpu->vc_vmx_entry_ctls;
sys/arch/amd64/amd64/vmm_machdep.c
2262
if (vcpu_vmx_check_cap(vcpu, IA32_VMX_PROCBASED_CTLS,
sys/arch/amd64/amd64/vmm_machdep.c
2264
if (vcpu_vmx_check_cap(vcpu, IA32_VMX_PROCBASED2_CTLS,
sys/arch/amd64/amd64/vmm_machdep.c
2266
if (vmwrite(VMCS_GUEST_VPID, vcpu->vc_vpid)) {
sys/arch/amd64/amd64/vmm_machdep.c
2314
vcpu->vc_vmx_cr0_fixed1 = want1;
sys/arch/amd64/amd64/vmm_machdep.c
2315
vcpu->vc_vmx_cr0_fixed0 = want0;
sys/arch/amd64/amd64/vmm_machdep.c
2377
msr_store = (struct vmx_msr_store *)vcpu->vc_vmx_msr_exit_load_va;
sys/arch/amd64/amd64/vmm_machdep.c
2396
msr_store = (struct vmx_msr_store *)vcpu->vc_vmx_msr_exit_save_va;
sys/arch/amd64/amd64/vmm_machdep.c
2446
vcpu->vc_vmx_msr_exit_save_pa)) {
sys/arch/amd64/amd64/vmm_machdep.c
2454
vcpu->vc_vmx_msr_exit_load_pa)) {
sys/arch/amd64/amd64/vmm_machdep.c
2462
vcpu->vc_vmx_msr_exit_save_pa)) {
sys/arch/amd64/amd64/vmm_machdep.c
2470
vcpu->vc_msr_bitmap_pa)) {
sys/arch/amd64/amd64/vmm_machdep.c
2494
ret = vcpu_writeregs_vmx(vcpu, VM_RWREGS_ALL, 0, vrs);
sys/arch/amd64/amd64/vmm_machdep.c
2499
memset((uint8_t *)vcpu->vc_msr_bitmap_va, 0xFF, PAGE_SIZE);
sys/arch/amd64/amd64/vmm_machdep.c
2500
vmx_setmsrbrw(vcpu, MSR_IA32_FEATURE_CONTROL);
sys/arch/amd64/amd64/vmm_machdep.c
2501
vmx_setmsrbrw(vcpu, MSR_SYSENTER_CS);
sys/arch/amd64/amd64/vmm_machdep.c
2502
vmx_setmsrbrw(vcpu, MSR_SYSENTER_ESP);
sys/arch/amd64/amd64/vmm_machdep.c
2503
vmx_setmsrbrw(vcpu, MSR_SYSENTER_EIP);
sys/arch/amd64/amd64/vmm_machdep.c
2504
vmx_setmsrbrw(vcpu, MSR_EFER);
sys/arch/amd64/amd64/vmm_machdep.c
2505
vmx_setmsrbrw(vcpu, MSR_STAR);
sys/arch/amd64/amd64/vmm_machdep.c
2506
vmx_setmsrbrw(vcpu, MSR_LSTAR);
sys/arch/amd64/amd64/vmm_machdep.c
2507
vmx_setmsrbrw(vcpu, MSR_CSTAR);
sys/arch/amd64/amd64/vmm_machdep.c
2508
vmx_setmsrbrw(vcpu, MSR_SFMASK);
sys/arch/amd64/amd64/vmm_machdep.c
2509
vmx_setmsrbrw(vcpu, MSR_FSBASE);
sys/arch/amd64/amd64/vmm_machdep.c
2510
vmx_setmsrbrw(vcpu, MSR_GSBASE);
sys/arch/amd64/amd64/vmm_machdep.c
2511
vmx_setmsrbrw(vcpu, MSR_KERNELGSBASE);
sys/arch/amd64/amd64/vmm_machdep.c
2513
vmx_setmsrbr(vcpu, MSR_MISC_ENABLE);
sys/arch/amd64/amd64/vmm_machdep.c
2514
vmx_setmsrbr(vcpu, MSR_TSC);
sys/arch/amd64/amd64/vmm_machdep.c
2518
vmx_setmsrbrw(vcpu, MSR_S_CET);
sys/arch/amd64/amd64/vmm_machdep.c
2524
vcpu->vc_gueststate.vg_xcr0 = XFEATURE_X87 & xsave_mask;
sys/arch/amd64/amd64/vmm_machdep.c
2527
vcpu->vc_shadow_pat = rdmsr(MSR_CR_PAT);
sys/arch/amd64/amd64/vmm_machdep.c
2530
if (vmclear(&vcpu->vc_control_pa)) {
sys/arch/amd64/amd64/vmm_machdep.c
2534
atomic_swap_uint(&vcpu->vc_vmx_vmcs_state, VMCS_CLEARED);
sys/arch/amd64/amd64/vmm_machdep.c
2557
vcpu_init_vmx(struct vcpu *vcpu)
sys/arch/amd64/amd64/vmm_machdep.c
2565
if (vmm_alloc_vpid(&vcpu->vc_vpid))
sys/arch/amd64/amd64/vmm_machdep.c
2569
vcpu->vc_control_va = (vaddr_t)km_alloc(PAGE_SIZE, &kv_page, &kp_zero,
sys/arch/amd64/amd64/vmm_machdep.c
2571
vcpu->vc_vmx_vmcs_state = VMCS_CLEARED;
sys/arch/amd64/amd64/vmm_machdep.c
2573
if (!vcpu->vc_control_va) {
sys/arch/amd64/amd64/vmm_machdep.c
2579
if (!pmap_extract(pmap_kernel(), vcpu->vc_control_va,
sys/arch/amd64/amd64/vmm_machdep.c
2580
(paddr_t *)&vcpu->vc_control_pa)) {
sys/arch/amd64/amd64/vmm_machdep.c
2586
vcpu->vc_msr_bitmap_va = (vaddr_t)km_alloc(PAGE_SIZE, &kv_page, &kp_zero,
sys/arch/amd64/amd64/vmm_machdep.c
2589
if (!vcpu->vc_msr_bitmap_va) {
sys/arch/amd64/amd64/vmm_machdep.c
2595
if (!pmap_extract(pmap_kernel(), vcpu->vc_msr_bitmap_va,
sys/arch/amd64/amd64/vmm_machdep.c
2596
(paddr_t *)&vcpu->vc_msr_bitmap_pa)) {
sys/arch/amd64/amd64/vmm_machdep.c
2602
vcpu->vc_vmx_msr_exit_load_va = (vaddr_t)km_alloc(PAGE_SIZE, &kv_page,
sys/arch/amd64/amd64/vmm_machdep.c
2605
if (!vcpu->vc_vmx_msr_exit_load_va) {
sys/arch/amd64/amd64/vmm_machdep.c
2611
if (!pmap_extract(pmap_kernel(), vcpu->vc_vmx_msr_exit_load_va,
sys/arch/amd64/amd64/vmm_machdep.c
2612
&vcpu->vc_vmx_msr_exit_load_pa)) {
sys/arch/amd64/amd64/vmm_machdep.c
2618
vcpu->vc_vmx_msr_exit_save_va = (vaddr_t)km_alloc(PAGE_SIZE, &kv_page,
sys/arch/amd64/amd64/vmm_machdep.c
2621
if (!vcpu->vc_vmx_msr_exit_save_va) {
sys/arch/amd64/amd64/vmm_machdep.c
2627
if (!pmap_extract(pmap_kernel(), vcpu->vc_vmx_msr_exit_save_va,
sys/arch/amd64/amd64/vmm_machdep.c
2628
&vcpu->vc_vmx_msr_exit_save_pa)) {
sys/arch/amd64/amd64/vmm_machdep.c
2635
vcpu->vc_vmx_msr_entry_load_va = (vaddr_t)km_alloc(PAGE_SIZE, &kv_page,
sys/arch/amd64/amd64/vmm_machdep.c
2638
if (!vcpu->vc_vmx_msr_entry_load_va) {
sys/arch/amd64/amd64/vmm_machdep.c
2644
if (!pmap_extract(pmap_kernel(), vcpu->vc_vmx_msr_entry_load_va,
sys/arch/amd64/amd64/vmm_machdep.c
2645
&vcpu->vc_vmx_msr_entry_load_pa)) {
sys/arch/amd64/amd64/vmm_machdep.c
2651
vmcs = (struct vmcs *)vcpu->vc_control_va;
sys/arch/amd64/amd64/vmm_machdep.c
2657
if (vmptrld(&vcpu->vc_control_pa)) {
sys/arch/amd64/amd64/vmm_machdep.c
2663
eptp = vcpu->vc_parent->vm_pmap->pm_pdirpa;
sys/arch/amd64/amd64/vmm_machdep.c
2687
vcpu->vc_parent->vm_pmap->eptp = eptp;
sys/arch/amd64/amd64/vmm_machdep.c
2763
if (vmclear(&vcpu->vc_control_pa)) {
sys/arch/amd64/amd64/vmm_machdep.c
2770
vcpu_deinit_vmx(vcpu);
sys/arch/amd64/amd64/vmm_machdep.c
2790
vcpu_reset_regs(struct vcpu *vcpu, struct vcpu_reg_state *vrs)
sys/arch/amd64/amd64/vmm_machdep.c
2795
ret = vcpu_reset_regs_vmx(vcpu, vrs);
sys/arch/amd64/amd64/vmm_machdep.c
2797
ret = vcpu_reset_regs_svm(vcpu, vrs);
sys/arch/amd64/amd64/vmm_machdep.c
2822
vcpu_init_svm(struct vcpu *vcpu, struct vm_create_params *vcp)
sys/arch/amd64/amd64/vmm_machdep.c
2827
vcpu->vc_sev = vcp->vcp_sev;
sys/arch/amd64/amd64/vmm_machdep.c
2828
vcpu->vc_seves = vcp->vcp_seves;
sys/arch/amd64/amd64/vmm_machdep.c
2831
if (vmm_alloc_asid(&vcpu->vc_vpid, vcpu))
sys/arch/amd64/amd64/vmm_machdep.c
2835
vcpu->vc_control_va = (vaddr_t)km_alloc(PAGE_SIZE, &kv_page, &kp_zero,
sys/arch/amd64/amd64/vmm_machdep.c
2838
if (!vcpu->vc_control_va) {
sys/arch/amd64/amd64/vmm_machdep.c
2844
if (!pmap_extract(pmap_kernel(), vcpu->vc_control_va,
sys/arch/amd64/amd64/vmm_machdep.c
2845
(paddr_t *)&vcpu->vc_control_pa)) {
sys/arch/amd64/amd64/vmm_machdep.c
2851
(uint64_t)vcpu->vc_control_va,
sys/arch/amd64/amd64/vmm_machdep.c
2852
(uint64_t)vcpu->vc_control_pa);
sys/arch/amd64/amd64/vmm_machdep.c
2856
vcpu->vc_msr_bitmap_va = (vaddr_t)km_alloc(2 * PAGE_SIZE, &kv_any,
sys/arch/amd64/amd64/vmm_machdep.c
2859
if (!vcpu->vc_msr_bitmap_va) {
sys/arch/amd64/amd64/vmm_machdep.c
2865
if (!pmap_extract(pmap_kernel(), vcpu->vc_msr_bitmap_va,
sys/arch/amd64/amd64/vmm_machdep.c
2866
(paddr_t *)&vcpu->vc_msr_bitmap_pa)) {
sys/arch/amd64/amd64/vmm_machdep.c
2872
(uint64_t)vcpu->vc_msr_bitmap_va,
sys/arch/amd64/amd64/vmm_machdep.c
2873
(uint64_t)vcpu->vc_msr_bitmap_pa);
sys/arch/amd64/amd64/vmm_machdep.c
2876
vcpu->vc_svm_hsa_va = (vaddr_t)km_alloc(PAGE_SIZE, &kv_page,
sys/arch/amd64/amd64/vmm_machdep.c
2879
if (!vcpu->vc_svm_hsa_va) {
sys/arch/amd64/amd64/vmm_machdep.c
2885
if (!pmap_extract(pmap_kernel(), vcpu->vc_svm_hsa_va,
sys/arch/amd64/amd64/vmm_machdep.c
2886
&vcpu->vc_svm_hsa_pa)) {
sys/arch/amd64/amd64/vmm_machdep.c
2892
(uint64_t)vcpu->vc_svm_hsa_va,
sys/arch/amd64/amd64/vmm_machdep.c
2893
(uint64_t)vcpu->vc_svm_hsa_pa);
sys/arch/amd64/amd64/vmm_machdep.c
2896
vcpu->vc_svm_ioio_va = (vaddr_t)km_alloc(3 * PAGE_SIZE, &kv_any,
sys/arch/amd64/amd64/vmm_machdep.c
2899
if (!vcpu->vc_svm_ioio_va) {
sys/arch/amd64/amd64/vmm_machdep.c
2905
if (!pmap_extract(pmap_kernel(), vcpu->vc_svm_ioio_va,
sys/arch/amd64/amd64/vmm_machdep.c
2906
&vcpu->vc_svm_ioio_pa)) {
sys/arch/amd64/amd64/vmm_machdep.c
2912
(uint64_t)vcpu->vc_svm_ioio_va,
sys/arch/amd64/amd64/vmm_machdep.c
2913
(uint64_t)vcpu->vc_svm_ioio_pa);
sys/arch/amd64/amd64/vmm_machdep.c
2915
if (vcpu->vc_seves) {
sys/arch/amd64/amd64/vmm_machdep.c
2917
vcpu->vc_svm_vmsa_va = (vaddr_t)km_alloc(PAGE_SIZE, &kv_page,
sys/arch/amd64/amd64/vmm_machdep.c
2920
if (!vcpu->vc_svm_vmsa_va) {
sys/arch/amd64/amd64/vmm_machdep.c
2926
if (!pmap_extract(pmap_kernel(), vcpu->vc_svm_vmsa_va,
sys/arch/amd64/amd64/vmm_machdep.c
2927
&vcpu->vc_svm_vmsa_pa)) {
sys/arch/amd64/amd64/vmm_machdep.c
2933
(uint64_t)vcpu->vc_svm_vmsa_va,
sys/arch/amd64/amd64/vmm_machdep.c
2934
(uint64_t)vcpu->vc_svm_vmsa_pa);
sys/arch/amd64/amd64/vmm_machdep.c
2939
vcp->vcp_asid[vcpu->vc_id] = vcpu->vc_vpid;
sys/arch/amd64/amd64/vmm_machdep.c
2943
vcpu_deinit_svm(vcpu);
sys/arch/amd64/amd64/vmm_machdep.c
2954
vcpu_init(struct vcpu *vcpu, struct vm_create_params *vcp)
sys/arch/amd64/amd64/vmm_machdep.c
2958
vcpu->vc_virt_mode = vmm_softc->mode;
sys/arch/amd64/amd64/vmm_machdep.c
2959
vcpu->vc_state = VCPU_STATE_STOPPED;
sys/arch/amd64/amd64/vmm_machdep.c
2960
vcpu->vc_vpid = 0;
sys/arch/amd64/amd64/vmm_machdep.c
2961
vcpu->vc_pvclock_system_gpa = 0;
sys/arch/amd64/amd64/vmm_machdep.c
2962
vcpu->vc_last_pcpu = NULL;
sys/arch/amd64/amd64/vmm_machdep.c
2964
rw_init(&vcpu->vc_lock, "vcpu");
sys/arch/amd64/amd64/vmm_machdep.c
2967
vcpu->vc_shadow_pat = rdmsr(MSR_CR_PAT);
sys/arch/amd64/amd64/vmm_machdep.c
2970
ret = vcpu_init_vmx(vcpu);
sys/arch/amd64/amd64/vmm_machdep.c
2972
ret = vcpu_init_svm(vcpu, vcp);
sys/arch/amd64/amd64/vmm_machdep.c
2988
vcpu_deinit_vmx(struct vcpu *vcpu)
sys/arch/amd64/amd64/vmm_machdep.c
2990
if (vcpu->vc_control_va) {
sys/arch/amd64/amd64/vmm_machdep.c
2991
km_free((void *)vcpu->vc_control_va, PAGE_SIZE,
sys/arch/amd64/amd64/vmm_machdep.c
2993
vcpu->vc_control_va = 0;
sys/arch/amd64/amd64/vmm_machdep.c
2995
if (vcpu->vc_vmx_msr_exit_save_va) {
sys/arch/amd64/amd64/vmm_machdep.c
2996
km_free((void *)vcpu->vc_vmx_msr_exit_save_va,
sys/arch/amd64/amd64/vmm_machdep.c
2998
vcpu->vc_vmx_msr_exit_save_va = 0;
sys/arch/amd64/amd64/vmm_machdep.c
3000
if (vcpu->vc_vmx_msr_exit_load_va) {
sys/arch/amd64/amd64/vmm_machdep.c
3001
km_free((void *)vcpu->vc_vmx_msr_exit_load_va,
sys/arch/amd64/amd64/vmm_machdep.c
3003
vcpu->vc_vmx_msr_exit_load_va = 0;
sys/arch/amd64/amd64/vmm_machdep.c
3006
if (vcpu->vc_vmx_msr_entry_load_va) {
sys/arch/amd64/amd64/vmm_machdep.c
3007
km_free((void *)vcpu->vc_vmx_msr_entry_load_va,
sys/arch/amd64/amd64/vmm_machdep.c
3009
vcpu->vc_vmx_msr_entry_load_va = 0;
sys/arch/amd64/amd64/vmm_machdep.c
3013
vmm_free_vpid(vcpu->vc_vpid);
sys/arch/amd64/amd64/vmm_machdep.c
3025
vcpu_deinit_svm(struct vcpu *vcpu)
sys/arch/amd64/amd64/vmm_machdep.c
3027
if (vcpu->vc_control_va) {
sys/arch/amd64/amd64/vmm_machdep.c
3028
km_free((void *)vcpu->vc_control_va, PAGE_SIZE, &kv_page,
sys/arch/amd64/amd64/vmm_machdep.c
3030
vcpu->vc_control_va = 0;
sys/arch/amd64/amd64/vmm_machdep.c
3032
if (vcpu->vc_msr_bitmap_va) {
sys/arch/amd64/amd64/vmm_machdep.c
3033
km_free((void *)vcpu->vc_msr_bitmap_va, 2 * PAGE_SIZE, &kv_any,
sys/arch/amd64/amd64/vmm_machdep.c
3035
vcpu->vc_msr_bitmap_va = 0;
sys/arch/amd64/amd64/vmm_machdep.c
3037
if (vcpu->vc_svm_hsa_va) {
sys/arch/amd64/amd64/vmm_machdep.c
3038
km_free((void *)vcpu->vc_svm_hsa_va, PAGE_SIZE, &kv_page,
sys/arch/amd64/amd64/vmm_machdep.c
3040
vcpu->vc_svm_hsa_va = 0;
sys/arch/amd64/amd64/vmm_machdep.c
3042
if (vcpu->vc_svm_vmsa_va) {
sys/arch/amd64/amd64/vmm_machdep.c
3043
km_free((void *)vcpu->vc_svm_vmsa_va, PAGE_SIZE, &kv_page,
sys/arch/amd64/amd64/vmm_machdep.c
3045
vcpu->vc_svm_vmsa_va = 0;
sys/arch/amd64/amd64/vmm_machdep.c
3047
if (vcpu->vc_svm_ioio_va) {
sys/arch/amd64/amd64/vmm_machdep.c
3048
km_free((void *)vcpu->vc_svm_ioio_va, 3 * PAGE_SIZE, &kv_any,
sys/arch/amd64/amd64/vmm_machdep.c
3050
vcpu->vc_svm_ioio_va = 0;
sys/arch/amd64/amd64/vmm_machdep.c
3053
vmm_free_vpid(vcpu->vc_vpid);
sys/arch/amd64/amd64/vmm_machdep.c
3065
vcpu_deinit(struct vcpu *vcpu)
sys/arch/amd64/amd64/vmm_machdep.c
3068
vcpu_deinit_vmx(vcpu);
sys/arch/amd64/amd64/vmm_machdep.c
3070
vcpu_deinit_svm(vcpu);
sys/arch/amd64/amd64/vmm_machdep.c
3087
vcpu_vmx_check_cap(struct vcpu *vcpu, uint32_t msr, uint32_t cap, int set)
sys/arch/amd64/amd64/vmm_machdep.c
3091
if (vcpu->vc_vmx_basic & IA32_VMX_TRUE_CTLS_AVAIL) {
sys/arch/amd64/amd64/vmm_machdep.c
3094
ctl = vcpu->vc_vmx_true_pinbased_ctls;
sys/arch/amd64/amd64/vmm_machdep.c
3097
ctl = vcpu->vc_vmx_true_procbased_ctls;
sys/arch/amd64/amd64/vmm_machdep.c
3100
ctl = vcpu->vc_vmx_procbased2_ctls;
sys/arch/amd64/amd64/vmm_machdep.c
3103
ctl = vcpu->vc_vmx_true_entry_ctls;
sys/arch/amd64/amd64/vmm_machdep.c
3106
ctl = vcpu->vc_vmx_true_exit_ctls;
sys/arch/amd64/amd64/vmm_machdep.c
3114
ctl = vcpu->vc_vmx_pinbased_ctls;
sys/arch/amd64/amd64/vmm_machdep.c
3117
ctl = vcpu->vc_vmx_procbased_ctls;
sys/arch/amd64/amd64/vmm_machdep.c
3120
ctl = vcpu->vc_vmx_procbased2_ctls;
sys/arch/amd64/amd64/vmm_machdep.c
3123
ctl = vcpu->vc_vmx_entry_ctls;
sys/arch/amd64/amd64/vmm_machdep.c
3126
ctl = vcpu->vc_vmx_exit_ctls;
sys/arch/amd64/amd64/vmm_machdep.c
3346
struct vcpu *vcpu;
sys/arch/amd64/amd64/vmm_machdep.c
3357
vcpu = vm_find_vcpu(vm, vrp->vrp_vcpu_id);
sys/arch/amd64/amd64/vmm_machdep.c
3358
if (vcpu == NULL) {
sys/arch/amd64/amd64/vmm_machdep.c
3367
rw_enter_write(&vcpu->vc_lock);
sys/arch/amd64/amd64/vmm_machdep.c
3370
if (atomic_cas_uint(&vcpu->vc_state, old, next) != old) {
sys/arch/amd64/amd64/vmm_machdep.c
3381
ret = copyin(vrp->vrp_exit, &vcpu->vc_exit, sizeof(struct vm_exit));
sys/arch/amd64/amd64/vmm_machdep.c
3385
vcpu->vc_inject.vie_type = vrp->vrp_inject.vie_type;
sys/arch/amd64/amd64/vmm_machdep.c
3386
vcpu->vc_inject.vie_vector = vrp->vrp_inject.vie_vector;
sys/arch/amd64/amd64/vmm_machdep.c
3387
vcpu->vc_inject.vie_errorcode = vrp->vrp_inject.vie_errorcode;
sys/arch/amd64/amd64/vmm_machdep.c
3389
WRITE_ONCE(vcpu->vc_curcpu, curcpu());
sys/arch/amd64/amd64/vmm_machdep.c
3391
if (vcpu->vc_virt_mode == VMM_MODE_EPT) {
sys/arch/amd64/amd64/vmm_machdep.c
3392
vcpu_rv = vcpu_run_vmx(vcpu, vrp);
sys/arch/amd64/amd64/vmm_machdep.c
3393
} else if (vcpu->vc_virt_mode == VMM_MODE_RVI) {
sys/arch/amd64/amd64/vmm_machdep.c
3394
vcpu_rv = vcpu_run_svm(vcpu, vrp);
sys/arch/amd64/amd64/vmm_machdep.c
3396
WRITE_ONCE(vcpu->vc_curcpu, NULL);
sys/arch/amd64/amd64/vmm_machdep.c
3401
: vcpu->vc_gueststate.vg_exit_reason;
sys/arch/amd64/amd64/vmm_machdep.c
3402
vrp->vrp_irqready = vcpu->vc_irqready;
sys/arch/amd64/amd64/vmm_machdep.c
3403
vcpu->vc_state = VCPU_STATE_STOPPED;
sys/arch/amd64/amd64/vmm_machdep.c
3404
ret = copyout(&vcpu->vc_exit, vrp->vrp_exit,
sys/arch/amd64/amd64/vmm_machdep.c
3409
vcpu->vc_state = VCPU_STATE_TERMINATED;
sys/arch/amd64/amd64/vmm_machdep.c
3412
rw_exit_write(&vcpu->vc_lock);
sys/arch/amd64/amd64/vmm_machdep.c
3425
vmm_fpurestore(struct vcpu *vcpu)
sys/arch/amd64/amd64/vmm_machdep.c
3429
rw_assert_wrlock(&vcpu->vc_lock);
sys/arch/amd64/amd64/vmm_machdep.c
3437
if (vcpu->vc_fpuinited)
sys/arch/amd64/amd64/vmm_machdep.c
3438
xrstor_kern(&vcpu->vc_g_fpu, xsave_mask);
sys/arch/amd64/amd64/vmm_machdep.c
3442
if (xsetbv_user(0, vcpu->vc_gueststate.vg_xcr0)) {
sys/arch/amd64/amd64/vmm_machdep.c
3445
__func__, vcpu->vc_gueststate.vg_xcr0, xsave_mask);
sys/arch/amd64/amd64/vmm_machdep.c
3459
vmm_fpusave(struct vcpu *vcpu)
sys/arch/amd64/amd64/vmm_machdep.c
3461
rw_assert_wrlock(&vcpu->vc_lock);
sys/arch/amd64/amd64/vmm_machdep.c
3465
vcpu->vc_gueststate.vg_xcr0 = xgetbv(0);
sys/arch/amd64/amd64/vmm_machdep.c
3475
fpusavereset(&vcpu->vc_g_fpu);
sys/arch/amd64/amd64/vmm_machdep.c
3476
vcpu->vc_fpuinited = 1;
sys/arch/amd64/amd64/vmm_machdep.c
348
struct vcpu *vcpu;
sys/arch/amd64/amd64/vmm_machdep.c
3507
vmm_translate_gva(struct vcpu *vcpu, uint64_t va, uint64_t *pa, int mode)
sys/arch/amd64/amd64/vmm_machdep.c
3518
if (vcpu_readregs_vmx(vcpu, VM_RWREGS_ALL, 1, &vrs))
sys/arch/amd64/amd64/vmm_machdep.c
3521
if (vcpu_readregs_svm(vcpu, VM_RWREGS_ALL, &vrs))
sys/arch/amd64/amd64/vmm_machdep.c
3577
if (!pmap_extract(vcpu->vc_parent->vm_pmap, pte_paddr,
sys/arch/amd64/amd64/vmm_machdep.c
3604
if ((vcpu->vc_exit.cpl > 0) && !(pte & PG_u))
sys/arch/amd64/amd64/vmm_machdep.c
362
SLIST_FOREACH(vcpu, &vm->vm_vcpu_list, vc_vcpu_link) {
sys/arch/amd64/amd64/vmm_machdep.c
363
err = rw_enter(&vcpu->vc_lock, RW_WRITE | RW_NOSLEEP);
sys/arch/amd64/amd64/vmm_machdep.c
3650
vcpu_run_vmx(struct vcpu *vcpu, struct vm_run_params *vrp)
sys/arch/amd64/amd64/vmm_machdep.c
3663
rw_assert_wrlock(&vcpu->vc_lock);
sys/arch/amd64/amd64/vmm_machdep.c
3665
if (vcpu_reload_vmcs_vmx(vcpu)) {
sys/arch/amd64/amd64/vmm_machdep.c
3677
vcpu->vc_intr = 1;
sys/arch/amd64/amd64/vmm_machdep.c
3679
vcpu->vc_intr = 0;
sys/arch/amd64/amd64/vmm_machdep.c
368
if (atomic_load_int(&vcpu->vc_vmx_vmcs_state)
sys/arch/amd64/amd64/vmm_machdep.c
3681
switch (vcpu->vc_gueststate.vg_exit_reason) {
sys/arch/amd64/amd64/vmm_machdep.c
3683
if (vcpu->vc_exit.vei.vei_dir == VEI_DIR_IN)
sys/arch/amd64/amd64/vmm_machdep.c
3684
vcpu->vc_gueststate.vg_rax = vcpu->vc_exit.vei.vei_data;
sys/arch/amd64/amd64/vmm_machdep.c
3685
vcpu->vc_gueststate.vg_rip =
sys/arch/amd64/amd64/vmm_machdep.c
3686
vcpu->vc_exit.vrs.vrs_gprs[VCPU_REGS_RIP];
sys/arch/amd64/amd64/vmm_machdep.c
3687
if (vmwrite(VMCS_GUEST_IA32_RIP, vcpu->vc_gueststate.vg_rip)) {
sys/arch/amd64/amd64/vmm_machdep.c
3693
ret = vcpu_writeregs_vmx(vcpu, VM_RWREGS_GPRS, 0,
sys/arch/amd64/amd64/vmm_machdep.c
3694
&vcpu->vc_exit.vrs);
sys/arch/amd64/amd64/vmm_machdep.c
3697
__func__, vcpu->vc_parent->vm_id, vcpu->vc_id);
sys/arch/amd64/amd64/vmm_machdep.c
3702
memset(&vcpu->vc_exit, 0, sizeof(vcpu->vc_exit));
sys/arch/amd64/amd64/vmm_machdep.c
3706
if (vcpu->vc_inject.vie_type == VCPU_INJECT_INTR) {
sys/arch/amd64/amd64/vmm_machdep.c
371
__func__, vcpu->vc_id, vm->vm_id);
sys/arch/amd64/amd64/vmm_machdep.c
3714
if (!(int_st & 0x3) && vcpu->vc_irqready) {
sys/arch/amd64/amd64/vmm_machdep.c
3715
eii = (uint64_t)vcpu->vc_inject.vie_vector;
sys/arch/amd64/amd64/vmm_machdep.c
372
rw_exit_write(&vcpu->vc_lock);
sys/arch/amd64/amd64/vmm_machdep.c
3723
vcpu->vc_inject.vie_type = VCPU_INJECT_NONE;
sys/arch/amd64/amd64/vmm_machdep.c
3725
} else if (!vcpu->vc_intr) {
sys/arch/amd64/amd64/vmm_machdep.c
3743
msr_store = (struct vmx_msr_store *)vcpu->vc_vmx_msr_exit_load_va;
sys/arch/amd64/amd64/vmm_machdep.c
3748
KASSERT(pa == vcpu->vc_control_pa);
sys/arch/amd64/amd64/vmm_machdep.c
3751
vmm_update_pvclock(vcpu);
sys/arch/amd64/amd64/vmm_machdep.c
3755
vcpu->vc_last_pcpu = ci;
sys/arch/amd64/amd64/vmm_machdep.c
3759
vcpu->vc_parent->vm_pmap);
sys/arch/amd64/amd64/vmm_machdep.c
3763
vid_ept.vid_eptp = vcpu->vc_parent->vm_pmap->eptp;
sys/arch/amd64/amd64/vmm_machdep.c
377
if (vcpu->vc_last_pcpu != curcpu()) {
sys/arch/amd64/amd64/vmm_machdep.c
379
err = vmx_remote_vmclear(vcpu->vc_last_pcpu,
sys/arch/amd64/amd64/vmm_machdep.c
380
vcpu);
sys/arch/amd64/amd64/vmm_machdep.c
3823
if (vcpu->vc_inject.vie_type == VCPU_INJECT_EX) {
sys/arch/amd64/amd64/vmm_machdep.c
3824
eii = (uint64_t)vcpu->vc_inject.vie_vector;
sys/arch/amd64/amd64/vmm_machdep.c
3827
switch (vcpu->vc_inject.vie_vector) {
sys/arch/amd64/amd64/vmm_machdep.c
384
vcpu->vc_id, vm->vm_id);
sys/arch/amd64/amd64/vmm_machdep.c
3860
if (vcpu->vc_inject.vie_vector == VMM_EX_AC)
sys/arch/amd64/amd64/vmm_machdep.c
3861
vcpu->vc_inject.vie_errorcode = 0;
sys/arch/amd64/amd64/vmm_machdep.c
3870
vcpu->vc_inject.vie_errorcode)) {
sys/arch/amd64/amd64/vmm_machdep.c
3885
vcpu->vc_inject.vie_type = VCPU_INJECT_NONE;
sys/arch/amd64/amd64/vmm_machdep.c
3888
if (vcpu->vc_vmx_vpid_enabled) {
sys/arch/amd64/amd64/vmm_machdep.c
389
if ((err = vmclear(&vcpu->vc_control_pa)))
sys/arch/amd64/amd64/vmm_machdep.c
3890
vid.vid_vpid = vcpu->vc_vpid;
sys/arch/amd64/amd64/vmm_machdep.c
3899
if ((ret = vmm_fpurestore(vcpu))) {
sys/arch/amd64/amd64/vmm_machdep.c
3904
TRACEPOINT(vmm, guest_enter, vcpu, vrp);
sys/arch/amd64/amd64/vmm_machdep.c
3910
if (ci->ci_guest_vcpu != vcpu &&
sys/arch/amd64/amd64/vmm_machdep.c
3913
ci->ci_guest_vcpu = vcpu;
sys/arch/amd64/amd64/vmm_machdep.c
3918
wrpkru(0, vcpu->vc_pkru);
sys/arch/amd64/amd64/vmm_machdep.c
392
vcpu->vc_id, vm->vm_id);
sys/arch/amd64/amd64/vmm_machdep.c
3920
ret = vmx_enter_guest(&vcpu->vc_control_pa,
sys/arch/amd64/amd64/vmm_machdep.c
3921
&vcpu->vc_gueststate,
sys/arch/amd64/amd64/vmm_machdep.c
3922
(vcpu->vc_vmx_vmcs_state == VMCS_LAUNCHED),
sys/arch/amd64/amd64/vmm_machdep.c
3927
vcpu->vc_pkru = rdpkru(0);
sys/arch/amd64/amd64/vmm_machdep.c
393
atomic_swap_uint(&vcpu->vc_vmx_vmcs_state,
sys/arch/amd64/amd64/vmm_machdep.c
3945
vmm_fpusave(vcpu);
sys/arch/amd64/amd64/vmm_machdep.c
3948
atomic_swap_uint(&vcpu->vc_vmx_vmcs_state, VMCS_LAUNCHED);
sys/arch/amd64/amd64/vmm_machdep.c
3954
&vcpu->vc_gueststate.vg_rip, &exit_reason);
sys/arch/amd64/amd64/vmm_machdep.c
3966
vcpu->vc_gueststate.vg_exit_reason = exit_reason;
sys/arch/amd64/amd64/vmm_machdep.c
3967
TRACEPOINT(vmm, guest_exit, vcpu, vrp, exit_reason);
sys/arch/amd64/amd64/vmm_machdep.c
397
rw_exit_write(&vcpu->vc_lock);
sys/arch/amd64/amd64/vmm_machdep.c
3971
&vcpu->vc_gueststate.vg_rflags)) {
sys/arch/amd64/amd64/vmm_machdep.c
3982
ret = vmx_handle_exit(vcpu);
sys/arch/amd64/amd64/vmm_machdep.c
3984
if (vcpu->vc_gueststate.vg_rflags & PSL_I)
sys/arch/amd64/amd64/vmm_machdep.c
3985
vcpu->vc_irqready = 1;
sys/arch/amd64/amd64/vmm_machdep.c
3987
vcpu->vc_irqready = 0;
sys/arch/amd64/amd64/vmm_machdep.c
3993
if (vcpu->vc_irqready == 0 && vcpu->vc_intr) {
sys/arch/amd64/amd64/vmm_machdep.c
401
vcpu->vc_id, vm->vm_id);
sys/arch/amd64/amd64/vmm_machdep.c
4014
if (ret || vcpu_must_stop(vcpu))
sys/arch/amd64/amd64/vmm_machdep.c
4017
if (vcpu->vc_intr && vcpu->vc_irqready) {
sys/arch/amd64/amd64/vmm_machdep.c
4037
(vcpu->vc_vmx_vmcs_state == VMCS_LAUNCHED
sys/arch/amd64/amd64/vmm_machdep.c
4043
(vcpu->vc_vmx_vmcs_state == VMCS_LAUNCHED
sys/arch/amd64/amd64/vmm_machdep.c
4049
(vcpu->vc_vmx_vmcs_state == VMCS_LAUNCHED
sys/arch/amd64/amd64/vmm_machdep.c
4064
vmx_vcpu_dump_regs(vcpu);
sys/arch/amd64/amd64/vmm_machdep.c
4065
dump_vcpu(vcpu);
sys/arch/amd64/amd64/vmm_machdep.c
4070
vcpu->vc_last_pcpu = curcpu();
sys/arch/amd64/amd64/vmm_machdep.c
4073
if (vcpu_readregs_vmx(vcpu, VM_RWREGS_ALL, 0, &vcpu->vc_exit.vrs))
sys/arch/amd64/amd64/vmm_machdep.c
4075
vcpu->vc_exit.cpl = vmm_get_guest_cpu_cpl(vcpu);
sys/arch/amd64/amd64/vmm_machdep.c
4088
vmx_handle_intr(struct vcpu *vcpu)
sys/arch/amd64/amd64/vmm_machdep.c
4122
svm_handle_hlt(struct vcpu *vcpu)
sys/arch/amd64/amd64/vmm_machdep.c
4124
struct vmcb *vmcb = (struct vmcb *)vcpu->vc_control_va;
sys/arch/amd64/amd64/vmm_machdep.c
4128
vcpu->vc_gueststate.vg_rip += 1;
sys/arch/amd64/amd64/vmm_machdep.c
4130
if (!svm_get_iflag(vcpu, rflags)) {
sys/arch/amd64/amd64/vmm_machdep.c
4157
vmx_handle_hlt(struct vcpu *vcpu)
sys/arch/amd64/amd64/vmm_machdep.c
4183
vcpu->vc_gueststate.vg_rip += insn_length;
sys/arch/amd64/amd64/vmm_machdep.c
4214
svm_handle_exit(struct vcpu *vcpu)
sys/arch/amd64/amd64/vmm_machdep.c
4218
struct vmcb *vmcb = (struct vmcb *)vcpu->vc_control_va;
sys/arch/amd64/amd64/vmm_machdep.c
4221
exit_reason = vcpu->vc_gueststate.vg_exit_reason;
sys/arch/amd64/amd64/vmm_machdep.c
4222
rflags = vcpu->vc_gueststate.vg_rflags;
sys/arch/amd64/amd64/vmm_machdep.c
4226
if (!svm_get_iflag(vcpu, rflags)) {
sys/arch/amd64/amd64/vmm_machdep.c
4240
svm_set_dirty(vcpu, SVM_CLEANBITS_TPR | SVM_CLEANBITS_I);
sys/arch/amd64/amd64/vmm_machdep.c
4252
ret = svm_handle_np_fault(vcpu);
sys/arch/amd64/amd64/vmm_machdep.c
4255
ret = vmm_handle_cpuid(vcpu);
sys/arch/amd64/amd64/vmm_machdep.c
4259
ret = svm_handle_msr(vcpu);
sys/arch/amd64/amd64/vmm_machdep.c
4263
ret = svm_handle_xsetbv(vcpu);
sys/arch/amd64/amd64/vmm_machdep.c
4267
if (svm_handle_inout(vcpu) == 0)
sys/arch/amd64/amd64/vmm_machdep.c
4271
ret = svm_handle_hlt(vcpu);
sys/arch/amd64/amd64/vmm_machdep.c
4286
ret = vmm_inject_ud(vcpu);
sys/arch/amd64/amd64/vmm_machdep.c
4292
ret = svm_handle_efercr(vcpu, exit_reason);
sys/arch/amd64/amd64/vmm_machdep.c
4296
ret = svm_handle_vmgexit(vcpu);
sys/arch/amd64/amd64/vmm_machdep.c
4299
guest_cpl = vmm_get_guest_cpu_cpl(vcpu);
sys/arch/amd64/amd64/vmm_machdep.c
4301
vcpu->vc_gueststate.vg_rax == HVCALL_FORCED_ABORT)
sys/arch/amd64/amd64/vmm_machdep.c
4304
ret = vmm_inject_ud(vcpu);
sys/arch/amd64/amd64/vmm_machdep.c
4309
exit_reason, (uint64_t)vcpu->vc_control_pa);
sys/arch/amd64/amd64/vmm_machdep.c
4314
vmcb->v_rip = vcpu->vc_gueststate.vg_rip;
sys/arch/amd64/amd64/vmm_machdep.c
4317
if (vmm_inject_db(vcpu)) {
sys/arch/amd64/amd64/vmm_machdep.c
4327
svm_set_dirty(vcpu, SVM_CLEANBITS_CR);
sys/arch/amd64/amd64/vmm_machdep.c
4336
svm_vmgexit_sync_host(struct vcpu *vcpu)
sys/arch/amd64/amd64/vmm_machdep.c
4338
struct vmcb *vmcb = (struct vmcb *)vcpu->vc_control_va;
sys/arch/amd64/amd64/vmm_machdep.c
4343
if (!vcpu->vc_seves)
sys/arch/amd64/amd64/vmm_machdep.c
4346
if (vcpu->vc_svm_ghcb_va == 0)
sys/arch/amd64/amd64/vmm_machdep.c
4349
ghcb = (struct ghcb_sa *)vcpu->vc_svm_ghcb_va;
sys/arch/amd64/amd64/vmm_machdep.c
4395
vmcb->v_exitcode = vcpu->vc_gueststate.vg_exit_reason =
sys/arch/amd64/amd64/vmm_machdep.c
4401
vmcb->v_rax = vcpu->vc_gueststate.vg_rax = ghcb->v_rax;
sys/arch/amd64/amd64/vmm_machdep.c
4403
vcpu->vc_gueststate.vg_rbx = ghcb->v_rbx;
sys/arch/amd64/amd64/vmm_machdep.c
4405
vcpu->vc_gueststate.vg_rcx = ghcb->v_rcx;
sys/arch/amd64/amd64/vmm_machdep.c
4407
vcpu->vc_gueststate.vg_rdx = ghcb->v_rdx;
sys/arch/amd64/amd64/vmm_machdep.c
4416
svm_vmgexit_sync_guest(struct vcpu *vcpu)
sys/arch/amd64/amd64/vmm_machdep.c
4423
if (!vcpu->vc_seves)
sys/arch/amd64/amd64/vmm_machdep.c
4426
if (vcpu->vc_svm_ghcb_va == 0)
sys/arch/amd64/amd64/vmm_machdep.c
4429
ghcb = (struct ghcb_sa *)vcpu->vc_svm_ghcb_va;
sys/arch/amd64/amd64/vmm_machdep.c
4471
ghcb->v_rax = vcpu->vc_gueststate.vg_rax;
sys/arch/amd64/amd64/vmm_machdep.c
4473
ghcb->v_rbx = vcpu->vc_gueststate.vg_rbx;
sys/arch/amd64/amd64/vmm_machdep.c
4475
ghcb->v_rcx = vcpu->vc_gueststate.vg_rcx;
sys/arch/amd64/amd64/vmm_machdep.c
4477
ghcb->v_rdx = vcpu->vc_gueststate.vg_rdx;
sys/arch/amd64/amd64/vmm_machdep.c
4494
svm_handle_vmgexit(struct vcpu *vcpu)
sys/arch/amd64/amd64/vmm_machdep.c
4496
struct vmcb *vmcb = (struct vmcb *)vcpu->vc_control_va;
sys/arch/amd64/amd64/vmm_machdep.c
4497
struct vm *vm = vcpu->vc_parent;
sys/arch/amd64/amd64/vmm_machdep.c
4509
vcpu->vc_svm_ghcb_va = (vaddr_t)PMAP_DIRECT_MAP(ghcb_hpa);
sys/arch/amd64/amd64/vmm_machdep.c
4527
vcpu->vc_gueststate.vg_rax = 0;
sys/arch/amd64/amd64/vmm_machdep.c
4528
vcpu->vc_gueststate.vg_rbx = 0;
sys/arch/amd64/amd64/vmm_machdep.c
4529
vcpu->vc_gueststate.vg_rcx = 0;
sys/arch/amd64/amd64/vmm_machdep.c
4530
vcpu->vc_gueststate.vg_rdx = 0;
sys/arch/amd64/amd64/vmm_machdep.c
4531
error = vmm_handle_cpuid(vcpu);
sys/arch/amd64/amd64/vmm_machdep.c
4540
result = vcpu->vc_gueststate.vg_rbx;
sys/arch/amd64/amd64/vmm_machdep.c
4543
result = vcpu->vc_gueststate.vg_rcx;
sys/arch/amd64/amd64/vmm_machdep.c
4546
result = vcpu->vc_gueststate.vg_rdx;
sys/arch/amd64/amd64/vmm_machdep.c
4561
ghcb = (struct ghcb_sa *)vcpu->vc_svm_ghcb_va;
sys/arch/amd64/amd64/vmm_machdep.c
4562
if (svm_vmgexit_sync_host(vcpu)) {
sys/arch/amd64/amd64/vmm_machdep.c
4571
error = vmm_handle_cpuid(vcpu);
sys/arch/amd64/amd64/vmm_machdep.c
4572
vmcb->v_rip = vcpu->vc_gueststate.vg_rip;
sys/arch/amd64/amd64/vmm_machdep.c
4573
vcpu->vc_gueststate.vg_rax = vmcb->v_rax;
sys/arch/amd64/amd64/vmm_machdep.c
4577
if (svm_handle_inout(vcpu) == 0)
sys/arch/amd64/amd64/vmm_machdep.c
4581
error = svm_handle_msr(vcpu);
sys/arch/amd64/amd64/vmm_machdep.c
4582
vmcb->v_rip = vcpu->vc_gueststate.vg_rip;
sys/arch/amd64/amd64/vmm_machdep.c
4586
error = vmm_inject_ud(vcpu);
sys/arch/amd64/amd64/vmm_machdep.c
4595
error = svm_vmgexit_sync_guest(vcpu);
sys/arch/amd64/amd64/vmm_machdep.c
4609
svm_handle_efercr(struct vcpu *vcpu, uint64_t exit_reason)
sys/arch/amd64/amd64/vmm_machdep.c
4611
struct vmcb *vmcb = (struct vmcb *)vcpu->vc_control_va;
sys/arch/amd64/amd64/vmm_machdep.c
4638
svm_get_iflag(struct vcpu *vcpu, uint64_t rflags)
sys/arch/amd64/amd64/vmm_machdep.c
4640
struct vmcb *vmcb = (struct vmcb *)vcpu->vc_control_va;
sys/arch/amd64/amd64/vmm_machdep.c
4642
if (vcpu->vc_seves)
sys/arch/amd64/amd64/vmm_machdep.c
4654
vmx_handle_exit(struct vcpu *vcpu)
sys/arch/amd64/amd64/vmm_machdep.c
4660
exit_reason = vcpu->vc_gueststate.vg_exit_reason;
sys/arch/amd64/amd64/vmm_machdep.c
4661
rflags = vcpu->vc_gueststate.vg_rflags;
sys/arch/amd64/amd64/vmm_machdep.c
4676
ret = vmx_handle_np_fault(vcpu);
sys/arch/amd64/amd64/vmm_machdep.c
4679
ret = vmm_handle_cpuid(vcpu);
sys/arch/amd64/amd64/vmm_machdep.c
4683
if (vmx_handle_inout(vcpu) == 0)
sys/arch/amd64/amd64/vmm_machdep.c
4687
vmx_handle_intr(vcpu);
sys/arch/amd64/amd64/vmm_machdep.c
4691
ret = vmx_handle_cr(vcpu);
sys/arch/amd64/amd64/vmm_machdep.c
4695
ret = vmx_handle_hlt(vcpu);
sys/arch/amd64/amd64/vmm_machdep.c
4699
ret = vmx_handle_rdmsr(vcpu);
sys/arch/amd64/amd64/vmm_machdep.c
4703
ret = vmx_handle_wrmsr(vcpu);
sys/arch/amd64/amd64/vmm_machdep.c
4707
ret = vmx_handle_xsetbv(vcpu);
sys/arch/amd64/amd64/vmm_machdep.c
4724
ret = vmm_inject_ud(vcpu);
sys/arch/amd64/amd64/vmm_machdep.c
4730
vcpu->vc_parent->vm_id, vcpu->vc_id);
sys/arch/amd64/amd64/vmm_machdep.c
4731
vmx_vcpu_dump_regs(vcpu);
sys/arch/amd64/amd64/vmm_machdep.c
4732
dump_vcpu(vcpu);
sys/arch/amd64/amd64/vmm_machdep.c
4733
vmx_dump_vmcs(vcpu);
sys/arch/amd64/amd64/vmm_machdep.c
4739
guest_cpl = vmm_get_guest_cpu_cpl(vcpu);
sys/arch/amd64/amd64/vmm_machdep.c
4741
vcpu->vc_gueststate.vg_rax == HVCALL_FORCED_ABORT)
sys/arch/amd64/amd64/vmm_machdep.c
4744
ret = vmm_inject_ud(vcpu);
sys/arch/amd64/amd64/vmm_machdep.c
4757
vcpu->vc_gueststate.vg_rip)) {
sys/arch/amd64/amd64/vmm_machdep.c
4780
if (vmm_inject_db(vcpu)) {
sys/arch/amd64/amd64/vmm_machdep.c
4803
vmm_inject_gp(struct vcpu *vcpu)
sys/arch/amd64/amd64/vmm_machdep.c
4806
vcpu->vc_gueststate.vg_rip);
sys/arch/amd64/amd64/vmm_machdep.c
4807
vcpu->vc_inject.vie_vector = VMM_EX_GP;
sys/arch/amd64/amd64/vmm_machdep.c
4808
vcpu->vc_inject.vie_type = VCPU_INJECT_EX;
sys/arch/amd64/amd64/vmm_machdep.c
4809
vcpu->vc_inject.vie_errorcode = 0;
sys/arch/amd64/amd64/vmm_machdep.c
4826
vmm_inject_ud(struct vcpu *vcpu)
sys/arch/amd64/amd64/vmm_machdep.c
4829
vcpu->vc_gueststate.vg_rip);
sys/arch/amd64/amd64/vmm_machdep.c
4830
vcpu->vc_inject.vie_vector = VMM_EX_UD;
sys/arch/amd64/amd64/vmm_machdep.c
4831
vcpu->vc_inject.vie_type = VCPU_INJECT_EX;
sys/arch/amd64/amd64/vmm_machdep.c
4832
vcpu->vc_inject.vie_errorcode = 0;
sys/arch/amd64/amd64/vmm_machdep.c
4849
vmm_inject_db(struct vcpu *vcpu)
sys/arch/amd64/amd64/vmm_machdep.c
4852
vcpu->vc_gueststate.vg_rip);
sys/arch/amd64/amd64/vmm_machdep.c
4853
vcpu->vc_inject.vie_vector = VMM_EX_DB;
sys/arch/amd64/amd64/vmm_machdep.c
4854
vcpu->vc_inject.vie_type = VCPU_INJECT_EX;
sys/arch/amd64/amd64/vmm_machdep.c
4855
vcpu->vc_inject.vie_errorcode = 0;
sys/arch/amd64/amd64/vmm_machdep.c
486
struct vcpu *vcpu;
sys/arch/amd64/amd64/vmm_machdep.c
499
vcpu = vm_find_vcpu(vm, vip->vip_vcpu_id);
sys/arch/amd64/amd64/vmm_machdep.c
4995
svm_fault_page(struct vcpu *vcpu, paddr_t gpa)
sys/arch/amd64/amd64/vmm_machdep.c
5002
hva = vmm_translate_gpa(vcpu->vc_parent, pa);
sys/arch/amd64/amd64/vmm_machdep.c
501
if (vcpu == NULL) {
sys/arch/amd64/amd64/vmm_machdep.c
5029
ret = pmap_enter(vcpu->vc_parent->vm_pmap, pa, hpa,
sys/arch/amd64/amd64/vmm_machdep.c
5046
svm_handle_np_fault(struct vcpu *vcpu)
sys/arch/amd64/amd64/vmm_machdep.c
5050
struct vmcb *vmcb = (struct vmcb *)vcpu->vc_control_va;
sys/arch/amd64/amd64/vmm_machdep.c
5051
struct vm_exit_eptviolation *vee = &vcpu->vc_exit.vee;
sys/arch/amd64/amd64/vmm_machdep.c
5058
gpa_memtype = vmm_get_guest_memtype(vcpu->vc_parent, gpa);
sys/arch/amd64/amd64/vmm_machdep.c
506
vcpu->vc_intr = vip->vip_intr;
sys/arch/amd64/amd64/vmm_machdep.c
5062
ret = svm_fault_page(vcpu, gpa);
sys/arch/amd64/amd64/vmm_machdep.c
508
ci = READ_ONCE(vcpu->vc_curcpu);
sys/arch/amd64/amd64/vmm_machdep.c
5100
vmx_fault_page(struct vcpu *vcpu, paddr_t gpa)
sys/arch/amd64/amd64/vmm_machdep.c
5113
vcpu->vc_exit.vee.vee_fault_type = VEE_FAULT_PROTECT;
sys/arch/amd64/amd64/vmm_machdep.c
5116
vcpu->vc_exit.vee.vee_fault_type = VEE_FAULT_HANDLED;
sys/arch/amd64/amd64/vmm_machdep.c
5120
hva = vmm_translate_gpa(vcpu->vc_parent, pa);
sys/arch/amd64/amd64/vmm_machdep.c
5130
vcpu->vc_last_pcpu = curcpu(); /* uvm_fault may sleep. */
sys/arch/amd64/amd64/vmm_machdep.c
5138
if (vcpu_reload_vmcs_vmx(vcpu)) {
sys/arch/amd64/amd64/vmm_machdep.c
5152
ret = pmap_enter(vcpu->vc_parent->vm_pmap, pa, hpa,
sys/arch/amd64/amd64/vmm_machdep.c
5169
vmx_handle_np_fault(struct vcpu *vcpu)
sys/arch/amd64/amd64/vmm_machdep.c
5173
struct vm_exit_eptviolation *vee = &vcpu->vc_exit.vee;
sys/arch/amd64/amd64/vmm_machdep.c
5182
gpa_memtype = vmm_get_guest_memtype(vcpu->vc_parent, gpa);
sys/arch/amd64/amd64/vmm_machdep.c
5186
ret = vmx_fault_page(vcpu, gpa);
sys/arch/amd64/amd64/vmm_machdep.c
5226
vmm_get_guest_cpu_cpl(struct vcpu *vcpu)
sys/arch/amd64/amd64/vmm_machdep.c
5232
mode = vmm_get_guest_cpu_mode(vcpu);
sys/arch/amd64/amd64/vmm_machdep.c
5241
vmcb = (struct vmcb *)vcpu->vc_control_va;
sys/arch/amd64/amd64/vmm_machdep.c
5264
vmm_get_guest_cpu_mode(struct vcpu *vcpu)
sys/arch/amd64/amd64/vmm_machdep.c
5272
vmcb = (struct vmcb *)vcpu->vc_control_va;
sys/arch/amd64/amd64/vmm_machdep.c
5283
(struct vmx_msr_store *)vcpu->vc_vmx_msr_exit_save_va;
sys/arch/amd64/amd64/vmm_machdep.c
5324
svm_handle_inout(struct vcpu *vcpu)
sys/arch/amd64/amd64/vmm_machdep.c
5327
struct vmcb *vmcb = (struct vmcb *)vcpu->vc_control_va;
sys/arch/amd64/amd64/vmm_machdep.c
5334
vcpu->vc_exit.vei.vei_dir = VEI_DIR_IN;
sys/arch/amd64/amd64/vmm_machdep.c
5336
vcpu->vc_exit.vei.vei_dir = VEI_DIR_OUT;
sys/arch/amd64/amd64/vmm_machdep.c
5338
vcpu->vc_exit.vei.vei_string = (exit_qual & 0x4) >> 2;
sys/arch/amd64/amd64/vmm_machdep.c
5340
vcpu->vc_exit.vei.vei_rep = (exit_qual & 0x8) >> 3;
sys/arch/amd64/amd64/vmm_machdep.c
5344
vcpu->vc_exit.vei.vei_size = 1;
sys/arch/amd64/amd64/vmm_machdep.c
5346
vcpu->vc_exit.vei.vei_size = 2;
sys/arch/amd64/amd64/vmm_machdep.c
5348
vcpu->vc_exit.vei.vei_size = 4;
sys/arch/amd64/amd64/vmm_machdep.c
5351
vcpu->vc_exit.vei.vei_port = (exit_qual & 0xFFFF0000) >> 16;
sys/arch/amd64/amd64/vmm_machdep.c
5353
vcpu->vc_exit.vei.vei_data = vmcb->v_rax;
sys/arch/amd64/amd64/vmm_machdep.c
5355
vcpu->vc_exit.vei.vei_insn_len = (uint8_t)insn_length;
sys/arch/amd64/amd64/vmm_machdep.c
5357
TRACEPOINT(vmm, inout, vcpu, vcpu->vc_exit.vei.vei_port,
sys/arch/amd64/amd64/vmm_machdep.c
5358
vcpu->vc_exit.vei.vei_dir, vcpu->vc_exit.vei.vei_data);
sys/arch/amd64/amd64/vmm_machdep.c
537
struct vcpu *vcpu;
sys/arch/amd64/amd64/vmm_machdep.c
5376
vmx_handle_inout(struct vcpu *vcpu)
sys/arch/amd64/amd64/vmm_machdep.c
5391
vcpu->vc_exit.vei.vei_size = (exit_qual & 0x7) + 1;
sys/arch/amd64/amd64/vmm_machdep.c
5394
vcpu->vc_exit.vei.vei_dir = VEI_DIR_IN;
sys/arch/amd64/amd64/vmm_machdep.c
5396
vcpu->vc_exit.vei.vei_dir = VEI_DIR_OUT;
sys/arch/amd64/amd64/vmm_machdep.c
5398
vcpu->vc_exit.vei.vei_string = (exit_qual & 0x10) >> 4;
sys/arch/amd64/amd64/vmm_machdep.c
5400
vcpu->vc_exit.vei.vei_rep = (exit_qual & 0x20) >> 5;
sys/arch/amd64/amd64/vmm_machdep.c
5402
vcpu->vc_exit.vei.vei_encoding = (exit_qual & 0x40) >> 6;
sys/arch/amd64/amd64/vmm_machdep.c
5404
vcpu->vc_exit.vei.vei_port = (exit_qual & 0xFFFF0000) >> 16;
sys/arch/amd64/amd64/vmm_machdep.c
5406
vcpu->vc_exit.vei.vei_data = (uint32_t)vcpu->vc_gueststate.vg_rax;
sys/arch/amd64/amd64/vmm_machdep.c
5408
vcpu->vc_exit.vei.vei_insn_len = (uint8_t)insn_length;
sys/arch/amd64/amd64/vmm_machdep.c
5410
TRACEPOINT(vmm, inout, vcpu, vcpu->vc_exit.vei.vei_port,
sys/arch/amd64/amd64/vmm_machdep.c
5411
vcpu->vc_exit.vei.vei_dir, vcpu->vc_exit.vei.vei_data);
sys/arch/amd64/amd64/vmm_machdep.c
5431
vmx_load_pdptes(struct vcpu *vcpu)
sys/arch/amd64/amd64/vmm_machdep.c
5443
if (!pmap_extract(vcpu->vc_parent->vm_pmap, (vaddr_t)cr3,
sys/arch/amd64/amd64/vmm_machdep.c
547
vcpu = vm_find_vcpu(vm, vpp->vpp_vcpu_id);
sys/arch/amd64/amd64/vmm_machdep.c
5472
vcpu->vc_last_pcpu = curcpu();
sys/arch/amd64/amd64/vmm_machdep.c
5475
if (vcpu_reload_vmcs_vmx(vcpu)) {
sys/arch/amd64/amd64/vmm_machdep.c
549
if (vcpu == NULL) {
sys/arch/amd64/amd64/vmm_machdep.c
5518
vcpu->vc_last_pcpu = curcpu();
sys/arch/amd64/amd64/vmm_machdep.c
5520
if (vcpu_reload_vmcs_vmx(vcpu)) {
sys/arch/amd64/amd64/vmm_machdep.c
5543
vmx_handle_cr0_write(struct vcpu *vcpu, uint64_t r)
sys/arch/amd64/amd64/vmm_machdep.c
5551
mask = vcpu->vc_vmx_cr0_fixed1;
sys/arch/amd64/amd64/vmm_machdep.c
5556
vcpu->vc_vmx_cr0_fixed1, r);
sys/arch/amd64/amd64/vmm_machdep.c
5557
vmm_inject_gp(vcpu);
sys/arch/amd64/amd64/vmm_machdep.c
556
vpp->vpp_pvclock_version = vcpu->vc_pvclock_version;
sys/arch/amd64/amd64/vmm_machdep.c
5562
mask = vcpu->vc_vmx_cr0_fixed0;
sys/arch/amd64/amd64/vmm_machdep.c
5567
vcpu->vc_vmx_cr0_fixed0, r);
sys/arch/amd64/amd64/vmm_machdep.c
5568
vmm_inject_gp(vcpu);
sys/arch/amd64/amd64/vmm_machdep.c
5575
vmm_inject_gp(vcpu);
sys/arch/amd64/amd64/vmm_machdep.c
5582
vmm_inject_gp(vcpu);
sys/arch/amd64/amd64/vmm_machdep.c
5589
vmm_inject_gp(vcpu);
sys/arch/amd64/amd64/vmm_machdep.c
559
vcpu->vc_pvclock_system_gpa;
sys/arch/amd64/amd64/vmm_machdep.c
5609
if (vcpu->vc_vmx_vpid_enabled) {
sys/arch/amd64/amd64/vmm_machdep.c
5610
vid.vid_vpid = vcpu->vc_vpid;
sys/arch/amd64/amd64/vmm_machdep.c
5619
msr_store = (struct vmx_msr_store *)vcpu->vc_vmx_msr_exit_save_va;
sys/arch/amd64/amd64/vmm_machdep.c
562
vcpu->vc_pvclock_version = vpp->vpp_pvclock_version;
sys/arch/amd64/amd64/vmm_machdep.c
564
vmm_init_pvclock(vcpu, vpp->vpp_pvclock_system_gpa);
sys/arch/amd64/amd64/vmm_machdep.c
5643
ret = vmx_load_pdptes(vcpu);
sys/arch/amd64/amd64/vmm_machdep.c
5670
vmx_handle_cr4_write(struct vcpu *vcpu, uint64_t r)
sys/arch/amd64/amd64/vmm_machdep.c
5682
vmm_inject_gp(vcpu);
sys/arch/amd64/amd64/vmm_machdep.c
5694
vmm_inject_gp(vcpu);
sys/arch/amd64/amd64/vmm_machdep.c
5715
vmx_handle_cr(struct vcpu *vcpu)
sys/arch/amd64/amd64/vmm_machdep.c
5746
case 0: r = vcpu->vc_gueststate.vg_rax; break;
sys/arch/amd64/amd64/vmm_machdep.c
5747
case 1: r = vcpu->vc_gueststate.vg_rcx; break;
sys/arch/amd64/amd64/vmm_machdep.c
5748
case 2: r = vcpu->vc_gueststate.vg_rdx; break;
sys/arch/amd64/amd64/vmm_machdep.c
5749
case 3: r = vcpu->vc_gueststate.vg_rbx; break;
sys/arch/amd64/amd64/vmm_machdep.c
5756
case 5: r = vcpu->vc_gueststate.vg_rbp; break;
sys/arch/amd64/amd64/vmm_machdep.c
5757
case 6: r = vcpu->vc_gueststate.vg_rsi; break;
sys/arch/amd64/amd64/vmm_machdep.c
5758
case 7: r = vcpu->vc_gueststate.vg_rdi; break;
sys/arch/amd64/amd64/vmm_machdep.c
5759
case 8: r = vcpu->vc_gueststate.vg_r8; break;
sys/arch/amd64/amd64/vmm_machdep.c
5760
case 9: r = vcpu->vc_gueststate.vg_r9; break;
sys/arch/amd64/amd64/vmm_machdep.c
5761
case 10: r = vcpu->vc_gueststate.vg_r10; break;
sys/arch/amd64/amd64/vmm_machdep.c
5762
case 11: r = vcpu->vc_gueststate.vg_r11; break;
sys/arch/amd64/amd64/vmm_machdep.c
5763
case 12: r = vcpu->vc_gueststate.vg_r12; break;
sys/arch/amd64/amd64/vmm_machdep.c
5764
case 13: r = vcpu->vc_gueststate.vg_r13; break;
sys/arch/amd64/amd64/vmm_machdep.c
5765
case 14: r = vcpu->vc_gueststate.vg_r14; break;
sys/arch/amd64/amd64/vmm_machdep.c
5766
case 15: r = vcpu->vc_gueststate.vg_r15; break;
sys/arch/amd64/amd64/vmm_machdep.c
5769
__func__, crnum, vcpu->vc_gueststate.vg_rip, r);
sys/arch/amd64/amd64/vmm_machdep.c
5773
vmx_handle_cr0_write(vcpu, r);
sys/arch/amd64/amd64/vmm_machdep.c
5776
vmx_handle_cr4_write(vcpu, r);
sys/arch/amd64/amd64/vmm_machdep.c
5781
vcpu->vc_gueststate.vg_rip);
sys/arch/amd64/amd64/vmm_machdep.c
5785
vcpu->vc_gueststate.vg_rip);
sys/arch/amd64/amd64/vmm_machdep.c
5789
vcpu->vc_gueststate.vg_rip);
sys/arch/amd64/amd64/vmm_machdep.c
5793
vcpu->vc_gueststate.vg_rip);
sys/arch/amd64/amd64/vmm_machdep.c
5796
vcpu->vc_gueststate.vg_rip += insn_length;
sys/arch/amd64/amd64/vmm_machdep.c
5818
vmx_handle_rdmsr(struct vcpu *vcpu)
sys/arch/amd64/amd64/vmm_machdep.c
5836
rax = &vcpu->vc_gueststate.vg_rax;
sys/arch/amd64/amd64/vmm_machdep.c
5837
rcx = &vcpu->vc_gueststate.vg_rcx;
sys/arch/amd64/amd64/vmm_machdep.c
5838
rdx = &vcpu->vc_gueststate.vg_rdx;
sys/arch/amd64/amd64/vmm_machdep.c
5848
*rax = (vcpu->vc_shadow_pat & 0xFFFFFFFFULL);
sys/arch/amd64/amd64/vmm_machdep.c
5849
*rdx = (vcpu->vc_shadow_pat >> 32);
sys/arch/amd64/amd64/vmm_machdep.c
5855
ret = vmm_inject_gp(vcpu);
sys/arch/amd64/amd64/vmm_machdep.c
5859
vcpu->vc_gueststate.vg_rip += insn_length;
sys/arch/amd64/amd64/vmm_machdep.c
5877
vmx_handle_xsetbv(struct vcpu *vcpu)
sys/arch/amd64/amd64/vmm_machdep.c
5894
rax = &vcpu->vc_gueststate.vg_rax;
sys/arch/amd64/amd64/vmm_machdep.c
5896
ret = vmm_handle_xsetbv(vcpu, rax);
sys/arch/amd64/amd64/vmm_machdep.c
5898
vcpu->vc_gueststate.vg_rip += insn_length;
sys/arch/amd64/amd64/vmm_machdep.c
5916
svm_handle_xsetbv(struct vcpu *vcpu)
sys/arch/amd64/amd64/vmm_machdep.c
5920
struct vmcb *vmcb = (struct vmcb *)vcpu->vc_control_va;
sys/arch/amd64/amd64/vmm_machdep.c
5927
ret = vmm_handle_xsetbv(vcpu, rax);
sys/arch/amd64/amd64/vmm_machdep.c
5929
vcpu->vc_gueststate.vg_rip += insn_length;
sys/arch/amd64/amd64/vmm_machdep.c
593
struct vcpu *vcpu;
sys/arch/amd64/amd64/vmm_machdep.c
5949
vmm_handle_xsetbv(struct vcpu *vcpu, uint64_t *rax)
sys/arch/amd64/amd64/vmm_machdep.c
5953
rcx = &vcpu->vc_gueststate.vg_rcx;
sys/arch/amd64/amd64/vmm_machdep.c
5954
rdx = &vcpu->vc_gueststate.vg_rdx;
sys/arch/amd64/amd64/vmm_machdep.c
5956
if (vmm_get_guest_cpu_cpl(vcpu) != 0) {
sys/arch/amd64/amd64/vmm_machdep.c
5958
return (vmm_inject_gp(vcpu));
sys/arch/amd64/amd64/vmm_machdep.c
5964
return (vmm_inject_gp(vcpu));
sys/arch/amd64/amd64/vmm_machdep.c
5975
return (vmm_inject_gp(vcpu));
sys/arch/amd64/amd64/vmm_machdep.c
5978
vcpu->vc_gueststate.vg_xcr0 = val;
sys/arch/amd64/amd64/vmm_machdep.c
5995
vmx_handle_misc_enable_msr(struct vcpu *vcpu)
sys/arch/amd64/amd64/vmm_machdep.c
6000
rax = &vcpu->vc_gueststate.vg_rax;
sys/arch/amd64/amd64/vmm_machdep.c
6001
rdx = &vcpu->vc_gueststate.vg_rdx;
sys/arch/amd64/amd64/vmm_machdep.c
6002
msr_store = (struct vmx_msr_store *)vcpu->vc_vmx_msr_exit_save_va;
sys/arch/amd64/amd64/vmm_machdep.c
6026
vmx_handle_wrmsr(struct vcpu *vcpu)
sys/arch/amd64/amd64/vmm_machdep.c
604
vcpu = vm_find_vcpu(vm, vrwp->vrwp_vcpu_id);
sys/arch/amd64/amd64/vmm_machdep.c
6043
rax = &vcpu->vc_gueststate.vg_rax;
sys/arch/amd64/amd64/vmm_machdep.c
6044
rcx = &vcpu->vc_gueststate.vg_rcx;
sys/arch/amd64/amd64/vmm_machdep.c
6045
rdx = &vcpu->vc_gueststate.vg_rdx;
sys/arch/amd64/amd64/vmm_machdep.c
6051
ret = vmm_inject_gp(vcpu);
sys/arch/amd64/amd64/vmm_machdep.c
6054
vcpu->vc_shadow_pat = val;
sys/arch/amd64/amd64/vmm_machdep.c
6057
vmx_handle_misc_enable_msr(vcpu);
sys/arch/amd64/amd64/vmm_machdep.c
606
if (vcpu == NULL) {
sys/arch/amd64/amd64/vmm_machdep.c
6066
ret = vmm_inject_gp(vcpu);
sys/arch/amd64/amd64/vmm_machdep.c
6069
vmm_init_pvclock(vcpu,
sys/arch/amd64/amd64/vmm_machdep.c
6073
vmm_pv_wall_clock(vcpu,
sys/arch/amd64/amd64/vmm_machdep.c
6087
vcpu->vc_gueststate.vg_rip += insn_length;
sys/arch/amd64/amd64/vmm_machdep.c
6104
svm_handle_msr(struct vcpu *vcpu)
sys/arch/amd64/amd64/vmm_machdep.c
6108
struct vmcb *vmcb = (struct vmcb *)vcpu->vc_control_va;
sys/arch/amd64/amd64/vmm_machdep.c
611
rw_enter_write(&vcpu->vc_lock);
sys/arch/amd64/amd64/vmm_machdep.c
6115
rcx = &vcpu->vc_gueststate.vg_rcx;
sys/arch/amd64/amd64/vmm_machdep.c
6116
rdx = &vcpu->vc_gueststate.vg_rdx;
sys/arch/amd64/amd64/vmm_machdep.c
6125
ret = vmm_inject_gp(vcpu);
sys/arch/amd64/amd64/vmm_machdep.c
6128
vcpu->vc_shadow_pat = val;
sys/arch/amd64/amd64/vmm_machdep.c
6134
vmm_init_pvclock(vcpu,
sys/arch/amd64/amd64/vmm_machdep.c
6138
vmm_pv_wall_clock(vcpu,
sys/arch/amd64/amd64/vmm_machdep.c
614
vcpu_readregs_vmx(vcpu, vrwp->vrwp_mask, 1, vrs) :
sys/arch/amd64/amd64/vmm_machdep.c
615
vcpu_writeregs_vmx(vcpu, vrwp->vrwp_mask, 1, vrs);
sys/arch/amd64/amd64/vmm_machdep.c
6159
*rax = (vcpu->vc_shadow_pat & 0xFFFFFFFFULL);
sys/arch/amd64/amd64/vmm_machdep.c
6160
*rdx = (vcpu->vc_shadow_pat >> 32);
sys/arch/amd64/amd64/vmm_machdep.c
6174
ret = vmm_inject_gp(vcpu);
sys/arch/amd64/amd64/vmm_machdep.c
6179
vcpu->vc_gueststate.vg_rip += insn_length;
sys/arch/amd64/amd64/vmm_machdep.c
618
vcpu_readregs_svm(vcpu, vrwp->vrwp_mask, vrs) :
sys/arch/amd64/amd64/vmm_machdep.c
6186
vmm_handle_cpuid_0xd(struct vcpu *vcpu, uint32_t subleaf, uint64_t *rax,
sys/arch/amd64/amd64/vmm_machdep.c
6189
uint64_t xcr0 = vcpu->vc_gueststate.vg_xcr0;
sys/arch/amd64/amd64/vmm_machdep.c
619
vcpu_writeregs_svm(vcpu, vrwp->vrwp_mask, vrs);
sys/arch/amd64/amd64/vmm_machdep.c
624
rw_exit_write(&vcpu->vc_lock);
sys/arch/amd64/amd64/vmm_machdep.c
6251
vcpu->vc_gueststate.vg_rbx = ebx;
sys/arch/amd64/amd64/vmm_machdep.c
6252
vcpu->vc_gueststate.vg_rcx = ecx;
sys/arch/amd64/amd64/vmm_machdep.c
6253
vcpu->vc_gueststate.vg_rdx = edx;
sys/arch/amd64/amd64/vmm_machdep.c
6269
vmm_handle_cpuid(struct vcpu *vcpu)
sys/arch/amd64/amd64/vmm_machdep.c
6295
rax = &vcpu->vc_gueststate.vg_rax;
sys/arch/amd64/amd64/vmm_machdep.c
6303
(struct vmx_msr_store *)vcpu->vc_vmx_msr_exit_save_va;
sys/arch/amd64/amd64/vmm_machdep.c
6310
vmcb = (struct vmcb *)vcpu->vc_control_va;
sys/arch/amd64/amd64/vmm_machdep.c
6315
rbx = &vcpu->vc_gueststate.vg_rbx;
sys/arch/amd64/amd64/vmm_machdep.c
6316
rcx = &vcpu->vc_gueststate.vg_rcx;
sys/arch/amd64/amd64/vmm_machdep.c
6317
rdx = &vcpu->vc_gueststate.vg_rdx;
sys/arch/amd64/amd64/vmm_machdep.c
6318
vcpu->vc_gueststate.vg_rip += insn_length;
sys/arch/amd64/amd64/vmm_machdep.c
6341
vcpu->vc_gueststate.vg_rip - insn_length,
sys/arch/amd64/amd64/vmm_machdep.c
6363
*rbx |= (vcpu->vc_id & 0xFF) << 24;
sys/arch/amd64/amd64/vmm_machdep.c
6466
vmm_handle_cpuid_0xd(vcpu, subleaf, rax, eax, ebx, ecx, edx);
sys/arch/amd64/amd64/vmm_machdep.c
6638
vcpu_run_svm(struct vcpu *vcpu, struct vm_run_params *vrp)
sys/arch/amd64/amd64/vmm_machdep.c
6645
struct vmcb *vmcb = (struct vmcb *)vcpu->vc_control_va;
sys/arch/amd64/amd64/vmm_machdep.c
6648
vcpu->vc_intr = 1;
sys/arch/amd64/amd64/vmm_machdep.c
6650
vcpu->vc_intr = 0;
sys/arch/amd64/amd64/vmm_machdep.c
6658
switch (vcpu->vc_gueststate.vg_exit_reason) {
sys/arch/amd64/amd64/vmm_machdep.c
6660
if (vcpu->vc_exit.vei.vei_dir == VEI_DIR_IN) {
sys/arch/amd64/amd64/vmm_machdep.c
6661
vcpu->vc_gueststate.vg_rax =
sys/arch/amd64/amd64/vmm_machdep.c
6662
vcpu->vc_exit.vei.vei_data;
sys/arch/amd64/amd64/vmm_machdep.c
6663
vmcb->v_rax = vcpu->vc_gueststate.vg_rax;
sys/arch/amd64/amd64/vmm_machdep.c
6665
vcpu->vc_gueststate.vg_rip =
sys/arch/amd64/amd64/vmm_machdep.c
6666
vcpu->vc_exit.vrs.vrs_gprs[VCPU_REGS_RIP];
sys/arch/amd64/amd64/vmm_machdep.c
6667
vmcb->v_rip = vcpu->vc_gueststate.vg_rip;
sys/arch/amd64/amd64/vmm_machdep.c
6668
if (svm_vmgexit_sync_guest(vcpu))
sys/arch/amd64/amd64/vmm_machdep.c
6672
ret = vcpu_writeregs_svm(vcpu, VM_RWREGS_GPRS,
sys/arch/amd64/amd64/vmm_machdep.c
6673
&vcpu->vc_exit.vrs);
sys/arch/amd64/amd64/vmm_machdep.c
6677
vcpu->vc_parent->vm_id, vcpu->vc_id);
sys/arch/amd64/amd64/vmm_machdep.c
6682
memset(&vcpu->vc_exit, 0, sizeof(vcpu->vc_exit));
sys/arch/amd64/amd64/vmm_machdep.c
6685
vmm_update_pvclock(vcpu);
sys/arch/amd64/amd64/vmm_machdep.c
6695
if (ci != vcpu->vc_last_pcpu) {
sys/arch/amd64/amd64/vmm_machdep.c
6707
svm_set_dirty(vcpu, SVM_CLEANBITS_ALL);
sys/arch/amd64/amd64/vmm_machdep.c
6710
vcpu->vc_last_pcpu = ci;
sys/arch/amd64/amd64/vmm_machdep.c
6720
if (vcpu->vc_inject.vie_type == VCPU_INJECT_INTR &&
sys/arch/amd64/amd64/vmm_machdep.c
6721
vcpu->vc_irqready) {
sys/arch/amd64/amd64/vmm_machdep.c
6722
vmcb->v_eventinj = vcpu->vc_inject.vie_vector |
sys/arch/amd64/amd64/vmm_machdep.c
6724
vcpu->vc_inject.vie_type = VCPU_INJECT_NONE;
sys/arch/amd64/amd64/vmm_machdep.c
6728
if (vcpu->vc_inject.vie_type == VCPU_INJECT_EX) {
sys/arch/amd64/amd64/vmm_machdep.c
6729
vmcb->v_eventinj = vcpu->vc_inject.vie_vector;
sys/arch/amd64/amd64/vmm_machdep.c
6732
switch (vcpu->vc_inject.vie_vector) {
sys/arch/amd64/amd64/vmm_machdep.c
6747
vcpu->vc_inject.vie_errorcode = 0;
sys/arch/amd64/amd64/vmm_machdep.c
6762
vcpu->vc_inject.vie_errorcode << 32;
sys/arch/amd64/amd64/vmm_machdep.c
6767
__func__, vcpu->vc_inject.vie_vector);
sys/arch/amd64/amd64/vmm_machdep.c
6775
vcpu->vc_inject.vie_type = VCPU_INJECT_NONE;
sys/arch/amd64/amd64/vmm_machdep.c
6778
TRACEPOINT(vmm, guest_enter, vcpu, vrp);
sys/arch/amd64/amd64/vmm_machdep.c
6783
if ((ret = vmm_fpurestore(vcpu))) {
sys/arch/amd64/amd64/vmm_machdep.c
6792
if (ci->ci_guest_vcpu != vcpu &&
sys/arch/amd64/amd64/vmm_machdep.c
6795
ci->ci_guest_vcpu = vcpu;
sys/arch/amd64/amd64/vmm_machdep.c
6800
wrpkru(0, vcpu->vc_pkru);
sys/arch/amd64/amd64/vmm_machdep.c
6803
wrmsr(MSR_AMD_VM_HSAVE_PA, vcpu->vc_svm_hsa_pa);
sys/arch/amd64/amd64/vmm_machdep.c
6805
if (vcpu->vc_seves) {
sys/arch/amd64/amd64/vmm_machdep.c
6806
ret = svm_seves_enter_guest(vcpu->vc_control_pa,
sys/arch/amd64/amd64/vmm_machdep.c
6807
vcpu->vc_svm_hsa_va + SVM_HSA_OFFSET, &gdt);
sys/arch/amd64/amd64/vmm_machdep.c
6809
ret = svm_enter_guest(vcpu->vc_control_pa,
sys/arch/amd64/amd64/vmm_machdep.c
6810
&vcpu->vc_gueststate, &gdt);
sys/arch/amd64/amd64/vmm_machdep.c
6815
vcpu->vc_pkru = rdpkru(0);
sys/arch/amd64/amd64/vmm_machdep.c
6824
vmm_fpusave(vcpu);
sys/arch/amd64/amd64/vmm_machdep.c
6832
vcpu->vc_gueststate.vg_rip = vmcb->v_rip;
sys/arch/amd64/amd64/vmm_machdep.c
6834
svm_set_clean(vcpu, SVM_CLEANBITS_ALL);
sys/arch/amd64/amd64/vmm_machdep.c
6839
vcpu->vc_gueststate.vg_exit_reason = exit_reason;
sys/arch/amd64/amd64/vmm_machdep.c
6840
TRACEPOINT(vmm, guest_exit, vcpu, vrp, exit_reason);
sys/arch/amd64/amd64/vmm_machdep.c
6842
vcpu->vc_gueststate.vg_rflags = vmcb->v_rflags;
sys/arch/amd64/amd64/vmm_machdep.c
6848
ret = svm_handle_exit(vcpu);
sys/arch/amd64/amd64/vmm_machdep.c
6850
if (svm_get_iflag(vcpu, vcpu->vc_gueststate.vg_rflags))
sys/arch/amd64/amd64/vmm_machdep.c
6851
vcpu->vc_irqready = 1;
sys/arch/amd64/amd64/vmm_machdep.c
6853
vcpu->vc_irqready = 0;
sys/arch/amd64/amd64/vmm_machdep.c
6859
if (vcpu->vc_irqready == 0 && vcpu->vc_intr) {
sys/arch/amd64/amd64/vmm_machdep.c
6864
svm_set_dirty(vcpu, SVM_CLEANBITS_TPR |
sys/arch/amd64/amd64/vmm_machdep.c
6872
if (ret || vcpu_must_stop(vcpu))
sys/arch/amd64/amd64/vmm_machdep.c
6875
if (vcpu->vc_intr && vcpu->vc_irqready) {
sys/arch/amd64/amd64/vmm_machdep.c
6893
if (vcpu_readregs_svm(vcpu, VM_RWREGS_ALL, &vcpu->vc_exit.vrs))
sys/arch/amd64/amd64/vmm_machdep.c
6914
vmm_alloc_vpid_vcpu(uint16_t *vpid, struct vcpu *vcpu)
sys/arch/amd64/amd64/vmm_machdep.c
6921
if (vcpu == NULL || vcpu->vc_seves || amd64_min_noes_asid == 0)
sys/arch/amd64/amd64/vmm_machdep.c
6954
vmm_alloc_asid(uint16_t *asid, struct vcpu *vcpu)
sys/arch/amd64/amd64/vmm_machdep.c
6956
return vmm_alloc_vpid_vcpu(asid, vcpu);
sys/arch/amd64/amd64/vmm_machdep.c
6997
vmm_gpa_is_valid(struct vcpu *vcpu, paddr_t gpa, size_t obj_size)
sys/arch/amd64/amd64/vmm_machdep.c
6999
struct vm *vm = vcpu->vc_parent;
sys/arch/amd64/amd64/vmm_machdep.c
7015
vmm_init_pvclock(struct vcpu *vcpu, paddr_t gpa)
sys/arch/amd64/amd64/vmm_machdep.c
7018
if (!vmm_gpa_is_valid(vcpu, pvclock_gpa,
sys/arch/amd64/amd64/vmm_machdep.c
7021
vmm_inject_gp(vcpu);
sys/arch/amd64/amd64/vmm_machdep.c
7028
vmm_inject_gp(vcpu);
sys/arch/amd64/amd64/vmm_machdep.c
7032
vcpu->vc_pvclock_system_gpa = gpa;
sys/arch/amd64/amd64/vmm_machdep.c
7034
vcpu->vc_pvclock_system_tsc_mul =
sys/arch/amd64/amd64/vmm_machdep.c
7037
vcpu->vc_pvclock_system_tsc_mul = 0;
sys/arch/amd64/amd64/vmm_machdep.c
7038
vmm_update_pvclock(vcpu);
sys/arch/amd64/amd64/vmm_machdep.c
7042
vmm_update_pvclock(struct vcpu *vcpu)
sys/arch/amd64/amd64/vmm_machdep.c
7046
struct vm *vm = vcpu->vc_parent;
sys/arch/amd64/amd64/vmm_machdep.c
7049
if (vcpu->vc_pvclock_system_gpa & PVCLOCK_SYSTEM_TIME_ENABLE) {
sys/arch/amd64/amd64/vmm_machdep.c
7050
pvclock_gpa = vcpu->vc_pvclock_system_gpa & 0xFFFFFFFFFFFFFFF0;
sys/arch/amd64/amd64/vmm_machdep.c
7057
(++vcpu->vc_pvclock_version << 1) | 0x1;
sys/arch/amd64/amd64/vmm_machdep.c
7065
vcpu->vc_pvclock_system_tsc_mul;
sys/arch/amd64/amd64/vmm_machdep.c
7075
vmm_pv_wall_clock(struct vcpu *vcpu, paddr_t gpa)
sys/arch/amd64/amd64/vmm_machdep.c
7079
struct vm *vm = vcpu->vc_parent;
sys/arch/amd64/amd64/vmm_machdep.c
7082
if (!vmm_gpa_is_valid(vcpu, gpa, sizeof(struct pvclock_wall_clock)))
sys/arch/amd64/amd64/vmm_machdep.c
7100
vmm_inject_gp(vcpu);
sys/arch/amd64/amd64/vmm_machdep.c
7410
struct vcpu *vcpu;
sys/arch/amd64/amd64/vmm_machdep.c
7417
vcpu = vm_find_vcpu(vm, vcpuid);
sys/arch/amd64/amd64/vmm_machdep.c
7418
if (vcpu == NULL || !vcpu->vc_seves) {
sys/arch/amd64/amd64/vmm_machdep.c
7424
*vmsapa = vcpu->vc_svm_vmsa_pa;
sys/arch/amd64/amd64/vmm_machdep.c
7438
dump_vcpu(struct vcpu *vcpu)
sys/arch/amd64/amd64/vmm_machdep.c
7440
printf("vcpu @ %p\n", vcpu);
sys/arch/amd64/amd64/vmm_machdep.c
7441
printf(" parent vm @ %p\n", vcpu->vc_parent);
sys/arch/amd64/amd64/vmm_machdep.c
7443
if (vcpu->vc_virt_mode == VMM_MODE_EPT) {
sys/arch/amd64/amd64/vmm_machdep.c
7446
vcpu->vc_vmx_pinbased_ctls);
sys/arch/amd64/amd64/vmm_machdep.c
7448
vcpu->vc_vmx_true_pinbased_ctls);
sys/arch/amd64/amd64/vmm_machdep.c
7449
CTRL_DUMP(vcpu, PINBASED, EXTERNAL_INT_EXITING);
sys/arch/amd64/amd64/vmm_machdep.c
7450
CTRL_DUMP(vcpu, PINBASED, NMI_EXITING);
sys/arch/amd64/amd64/vmm_machdep.c
7451
CTRL_DUMP(vcpu, PINBASED, VIRTUAL_NMIS);
sys/arch/amd64/amd64/vmm_machdep.c
7452
CTRL_DUMP(vcpu, PINBASED, ACTIVATE_VMX_PREEMPTION_TIMER);
sys/arch/amd64/amd64/vmm_machdep.c
7453
CTRL_DUMP(vcpu, PINBASED, PROCESS_POSTED_INTERRUPTS);
sys/arch/amd64/amd64/vmm_machdep.c
7455
vcpu->vc_vmx_procbased_ctls);
sys/arch/amd64/amd64/vmm_machdep.c
7457
vcpu->vc_vmx_true_procbased_ctls);
sys/arch/amd64/amd64/vmm_machdep.c
7458
CTRL_DUMP(vcpu, PROCBASED, INTERRUPT_WINDOW_EXITING);
sys/arch/amd64/amd64/vmm_machdep.c
7459
CTRL_DUMP(vcpu, PROCBASED, USE_TSC_OFFSETTING);
sys/arch/amd64/amd64/vmm_machdep.c
7460
CTRL_DUMP(vcpu, PROCBASED, HLT_EXITING);
sys/arch/amd64/amd64/vmm_machdep.c
7461
CTRL_DUMP(vcpu, PROCBASED, INVLPG_EXITING);
sys/arch/amd64/amd64/vmm_machdep.c
7462
CTRL_DUMP(vcpu, PROCBASED, MWAIT_EXITING);
sys/arch/amd64/amd64/vmm_machdep.c
7463
CTRL_DUMP(vcpu, PROCBASED, RDPMC_EXITING);
sys/arch/amd64/amd64/vmm_machdep.c
7464
CTRL_DUMP(vcpu, PROCBASED, RDTSC_EXITING);
sys/arch/amd64/amd64/vmm_machdep.c
7465
CTRL_DUMP(vcpu, PROCBASED, CR3_LOAD_EXITING);
sys/arch/amd64/amd64/vmm_machdep.c
7466
CTRL_DUMP(vcpu, PROCBASED, CR3_STORE_EXITING);
sys/arch/amd64/amd64/vmm_machdep.c
7467
CTRL_DUMP(vcpu, PROCBASED, CR8_LOAD_EXITING);
sys/arch/amd64/amd64/vmm_machdep.c
7468
CTRL_DUMP(vcpu, PROCBASED, CR8_STORE_EXITING);
sys/arch/amd64/amd64/vmm_machdep.c
7469
CTRL_DUMP(vcpu, PROCBASED, USE_TPR_SHADOW);
sys/arch/amd64/amd64/vmm_machdep.c
7470
CTRL_DUMP(vcpu, PROCBASED, NMI_WINDOW_EXITING);
sys/arch/amd64/amd64/vmm_machdep.c
7471
CTRL_DUMP(vcpu, PROCBASED, MOV_DR_EXITING);
sys/arch/amd64/amd64/vmm_machdep.c
7472
CTRL_DUMP(vcpu, PROCBASED, UNCONDITIONAL_IO_EXITING);
sys/arch/amd64/amd64/vmm_machdep.c
7473
CTRL_DUMP(vcpu, PROCBASED, USE_IO_BITMAPS);
sys/arch/amd64/amd64/vmm_machdep.c
7474
CTRL_DUMP(vcpu, PROCBASED, MONITOR_TRAP_FLAG);
sys/arch/amd64/amd64/vmm_machdep.c
7475
CTRL_DUMP(vcpu, PROCBASED, USE_MSR_BITMAPS);
sys/arch/amd64/amd64/vmm_machdep.c
7476
CTRL_DUMP(vcpu, PROCBASED, MONITOR_EXITING);
sys/arch/amd64/amd64/vmm_machdep.c
7477
CTRL_DUMP(vcpu, PROCBASED, PAUSE_EXITING);
sys/arch/amd64/amd64/vmm_machdep.c
7478
if (vcpu_vmx_check_cap(vcpu, IA32_VMX_PROCBASED_CTLS,
sys/arch/amd64/amd64/vmm_machdep.c
7481
vcpu->vc_vmx_procbased2_ctls);
sys/arch/amd64/amd64/vmm_machdep.c
7482
CTRL_DUMP(vcpu, PROCBASED2, VIRTUALIZE_APIC);
sys/arch/amd64/amd64/vmm_machdep.c
7483
CTRL_DUMP(vcpu, PROCBASED2, ENABLE_EPT);
sys/arch/amd64/amd64/vmm_machdep.c
7484
CTRL_DUMP(vcpu, PROCBASED2, DESCRIPTOR_TABLE_EXITING);
sys/arch/amd64/amd64/vmm_machdep.c
7485
CTRL_DUMP(vcpu, PROCBASED2, ENABLE_RDTSCP);
sys/arch/amd64/amd64/vmm_machdep.c
7486
CTRL_DUMP(vcpu, PROCBASED2, VIRTUALIZE_X2APIC_MODE);
sys/arch/amd64/amd64/vmm_machdep.c
7487
CTRL_DUMP(vcpu, PROCBASED2, ENABLE_VPID);
sys/arch/amd64/amd64/vmm_machdep.c
7488
CTRL_DUMP(vcpu, PROCBASED2, WBINVD_EXITING);
sys/arch/amd64/amd64/vmm_machdep.c
7489
CTRL_DUMP(vcpu, PROCBASED2, UNRESTRICTED_GUEST);
sys/arch/amd64/amd64/vmm_machdep.c
7490
CTRL_DUMP(vcpu, PROCBASED2,
sys/arch/amd64/amd64/vmm_machdep.c
7492
CTRL_DUMP(vcpu, PROCBASED2,
sys/arch/amd64/amd64/vmm_machdep.c
7494
CTRL_DUMP(vcpu, PROCBASED2, PAUSE_LOOP_EXITING);
sys/arch/amd64/amd64/vmm_machdep.c
7495
CTRL_DUMP(vcpu, PROCBASED2, RDRAND_EXITING);
sys/arch/amd64/amd64/vmm_machdep.c
7496
CTRL_DUMP(vcpu, PROCBASED2, ENABLE_INVPCID);
sys/arch/amd64/amd64/vmm_machdep.c
7497
CTRL_DUMP(vcpu, PROCBASED2, ENABLE_VM_FUNCTIONS);
sys/arch/amd64/amd64/vmm_machdep.c
7498
CTRL_DUMP(vcpu, PROCBASED2, VMCS_SHADOWING);
sys/arch/amd64/amd64/vmm_machdep.c
7499
CTRL_DUMP(vcpu, PROCBASED2, ENABLE_ENCLS_EXITING);
sys/arch/amd64/amd64/vmm_machdep.c
7500
CTRL_DUMP(vcpu, PROCBASED2, RDSEED_EXITING);
sys/arch/amd64/amd64/vmm_machdep.c
7501
CTRL_DUMP(vcpu, PROCBASED2, ENABLE_PML);
sys/arch/amd64/amd64/vmm_machdep.c
7502
CTRL_DUMP(vcpu, PROCBASED2, EPT_VIOLATION_VE);
sys/arch/amd64/amd64/vmm_machdep.c
7503
CTRL_DUMP(vcpu, PROCBASED2, CONCEAL_VMX_FROM_PT);
sys/arch/amd64/amd64/vmm_machdep.c
7504
CTRL_DUMP(vcpu, PROCBASED2, ENABLE_XSAVES_XRSTORS);
sys/arch/amd64/amd64/vmm_machdep.c
7505
CTRL_DUMP(vcpu, PROCBASED2, ENABLE_TSC_SCALING);
sys/arch/amd64/amd64/vmm_machdep.c
7508
vcpu->vc_vmx_entry_ctls);
sys/arch/amd64/amd64/vmm_machdep.c
7510
vcpu->vc_vmx_true_entry_ctls);
sys/arch/amd64/amd64/vmm_machdep.c
7511
CTRL_DUMP(vcpu, ENTRY, LOAD_DEBUG_CONTROLS);
sys/arch/amd64/amd64/vmm_machdep.c
7512
CTRL_DUMP(vcpu, ENTRY, IA32E_MODE_GUEST);
sys/arch/amd64/amd64/vmm_machdep.c
7513
CTRL_DUMP(vcpu, ENTRY, ENTRY_TO_SMM);
sys/arch/amd64/amd64/vmm_machdep.c
7514
CTRL_DUMP(vcpu, ENTRY, DEACTIVATE_DUAL_MONITOR_TREATMENT);
sys/arch/amd64/amd64/vmm_machdep.c
7515
CTRL_DUMP(vcpu, ENTRY, LOAD_IA32_PERF_GLOBAL_CTRL_ON_ENTRY);
sys/arch/amd64/amd64/vmm_machdep.c
7516
CTRL_DUMP(vcpu, ENTRY, LOAD_IA32_PAT_ON_ENTRY);
sys/arch/amd64/amd64/vmm_machdep.c
7517
CTRL_DUMP(vcpu, ENTRY, LOAD_IA32_EFER_ON_ENTRY);
sys/arch/amd64/amd64/vmm_machdep.c
7518
CTRL_DUMP(vcpu, ENTRY, LOAD_IA32_BNDCFGS_ON_ENTRY);
sys/arch/amd64/amd64/vmm_machdep.c
7519
CTRL_DUMP(vcpu, ENTRY, CONCEAL_VM_ENTRIES_FROM_PT);
sys/arch/amd64/amd64/vmm_machdep.c
7521
vcpu->vc_vmx_exit_ctls);
sys/arch/amd64/amd64/vmm_machdep.c
7523
vcpu->vc_vmx_true_exit_ctls);
sys/arch/amd64/amd64/vmm_machdep.c
7524
CTRL_DUMP(vcpu, EXIT, SAVE_DEBUG_CONTROLS);
sys/arch/amd64/amd64/vmm_machdep.c
7525
CTRL_DUMP(vcpu, EXIT, HOST_SPACE_ADDRESS_SIZE);
sys/arch/amd64/amd64/vmm_machdep.c
7526
CTRL_DUMP(vcpu, EXIT, LOAD_IA32_PERF_GLOBAL_CTRL_ON_EXIT);
sys/arch/amd64/amd64/vmm_machdep.c
7527
CTRL_DUMP(vcpu, EXIT, ACKNOWLEDGE_INTERRUPT_ON_EXIT);
sys/arch/amd64/amd64/vmm_machdep.c
7528
CTRL_DUMP(vcpu, EXIT, SAVE_IA32_PAT_ON_EXIT);
sys/arch/amd64/amd64/vmm_machdep.c
7529
CTRL_DUMP(vcpu, EXIT, LOAD_IA32_PAT_ON_EXIT);
sys/arch/amd64/amd64/vmm_machdep.c
7530
CTRL_DUMP(vcpu, EXIT, SAVE_IA32_EFER_ON_EXIT);
sys/arch/amd64/amd64/vmm_machdep.c
7531
CTRL_DUMP(vcpu, EXIT, LOAD_IA32_EFER_ON_EXIT);
sys/arch/amd64/amd64/vmm_machdep.c
7532
CTRL_DUMP(vcpu, EXIT, SAVE_VMX_PREEMPTION_TIMER);
sys/arch/amd64/amd64/vmm_machdep.c
7533
CTRL_DUMP(vcpu, EXIT, CLEAR_IA32_BNDCFGS_ON_EXIT);
sys/arch/amd64/amd64/vmm_machdep.c
7534
CTRL_DUMP(vcpu, EXIT, CONCEAL_VM_EXITS_FROM_PT);
sys/arch/amd64/amd64/vmm_machdep.c
7582
vmx_dump_vmcs(struct vcpu *vcpu)
sys/arch/amd64/amd64/vmm_machdep.c
7591
(vcpu->vc_vmx_vmcs_state == VMCS_LAUNCHED) ? "Yes" : "No");
sys/arch/amd64/amd64/vmm_machdep.c
7605
has_sec = vcpu_vmx_check_cap(vcpu, IA32_VMX_PROCBASED_CTLS,
sys/arch/amd64/amd64/vmm_machdep.c
7609
if (vcpu_vmx_check_cap(vcpu, IA32_VMX_PROCBASED2_CTLS,
sys/arch/amd64/amd64/vmm_machdep.c
7615
if (vcpu_vmx_check_cap(vcpu, IA32_VMX_PINBASED_CTLS,
sys/arch/amd64/amd64/vmm_machdep.c
7622
if (vcpu_vmx_check_cap(vcpu, IA32_VMX_PROCBASED2_CTLS,
sys/arch/amd64/amd64/vmm_machdep.c
7641
if (vcpu_vmx_check_cap(vcpu, IA32_VMX_PROCBASED2_CTLS,
sys/arch/amd64/amd64/vmm_machdep.c
7647
if (vcpu_vmx_check_cap(vcpu, IA32_VMX_PROCBASED2_CTLS,
sys/arch/amd64/amd64/vmm_machdep.c
7668
if (vcpu_vmx_check_cap(vcpu, IA32_VMX_PROCBASED_CTLS,
sys/arch/amd64/amd64/vmm_machdep.c
7684
if (vcpu_vmx_check_cap(vcpu, IA32_VMX_PROCBASED2_CTLS,
sys/arch/amd64/amd64/vmm_machdep.c
7694
if (vcpu_vmx_check_cap(vcpu, IA32_VMX_PROCBASED_CTLS,
sys/arch/amd64/amd64/vmm_machdep.c
77
int vcpu_readregs_vmx(struct vcpu *, uint64_t, int, struct vcpu_reg_state *);
sys/arch/amd64/amd64/vmm_machdep.c
7702
if (vcpu_vmx_check_cap(vcpu, IA32_VMX_PROCBASED2_CTLS,
sys/arch/amd64/amd64/vmm_machdep.c
7710
if (vcpu_vmx_check_cap(vcpu, IA32_VMX_PINBASED_CTLS,
sys/arch/amd64/amd64/vmm_machdep.c
7718
if (vcpu_vmx_check_cap(vcpu, IA32_VMX_PROCBASED2_CTLS,
sys/arch/amd64/amd64/vmm_machdep.c
7725
if (vcpu_vmx_check_cap(vcpu, IA32_VMX_PROCBASED2_CTLS,
sys/arch/amd64/amd64/vmm_machdep.c
7732
if (vcpu_vmx_check_cap(vcpu, IA32_VMX_PROCBASED2_CTLS,
sys/arch/amd64/amd64/vmm_machdep.c
7748
if (vcpu_vmx_check_cap(vcpu, IA32_VMX_PROCBASED2_CTLS,
sys/arch/amd64/amd64/vmm_machdep.c
7758
if (vcpu_vmx_check_cap(vcpu, IA32_VMX_PROCBASED2_CTLS,
sys/arch/amd64/amd64/vmm_machdep.c
7765
if (vcpu_vmx_check_cap(vcpu, IA32_VMX_PROCBASED2_CTLS,
sys/arch/amd64/amd64/vmm_machdep.c
7772
if (vcpu_vmx_check_cap(vcpu, IA32_VMX_PROCBASED2_CTLS,
sys/arch/amd64/amd64/vmm_machdep.c
7779
if (vcpu_vmx_check_cap(vcpu, IA32_VMX_PROCBASED2_CTLS,
sys/arch/amd64/amd64/vmm_machdep.c
7786
if (vcpu_vmx_check_cap(vcpu, IA32_VMX_PROCBASED2_CTLS,
sys/arch/amd64/amd64/vmm_machdep.c
7799
if (vcpu_vmx_check_cap(vcpu, IA32_VMX_ENTRY_CTLS,
sys/arch/amd64/amd64/vmm_machdep.c
78
int vcpu_readregs_svm(struct vcpu *, uint64_t, struct vcpu_reg_state *);
sys/arch/amd64/amd64/vmm_machdep.c
7801
vcpu_vmx_check_cap(vcpu, IA32_VMX_EXIT_CTLS,
sys/arch/amd64/amd64/vmm_machdep.c
7808
if (vcpu_vmx_check_cap(vcpu, IA32_VMX_ENTRY_CTLS,
sys/arch/amd64/amd64/vmm_machdep.c
7810
vcpu_vmx_check_cap(vcpu, IA32_VMX_EXIT_CTLS,
sys/arch/amd64/amd64/vmm_machdep.c
7817
if (vcpu_vmx_check_cap(vcpu, IA32_VMX_ENTRY_CTLS,
sys/arch/amd64/amd64/vmm_machdep.c
7825
if (vcpu_vmx_check_cap(vcpu, IA32_VMX_PROCBASED2_CTLS,
sys/arch/amd64/amd64/vmm_machdep.c
7838
if (vcpu_vmx_check_cap(vcpu, IA32_VMX_ENTRY_CTLS,
sys/arch/amd64/amd64/vmm_machdep.c
7840
vcpu_vmx_check_cap(vcpu, IA32_VMX_EXIT_CTLS,
sys/arch/amd64/amd64/vmm_machdep.c
7847
if (vcpu_vmx_check_cap(vcpu, IA32_VMX_EXIT_CTLS,
sys/arch/amd64/amd64/vmm_machdep.c
7854
if (vcpu_vmx_check_cap(vcpu, IA32_VMX_EXIT_CTLS,
sys/arch/amd64/amd64/vmm_machdep.c
7861
if (vcpu_vmx_check_cap(vcpu, IA32_VMX_EXIT_CTLS,
sys/arch/amd64/amd64/vmm_machdep.c
7891
if (vcpu_vmx_check_cap(vcpu, IA32_VMX_PROCBASED_CTLS,
sys/arch/amd64/amd64/vmm_machdep.c
79
int vcpu_writeregs_vmx(struct vcpu *, uint64_t, int, struct vcpu_reg_state *);
sys/arch/amd64/amd64/vmm_machdep.c
7900
if (vcpu_vmx_check_cap(vcpu, IA32_VMX_PROCBASED2_CTLS,
sys/arch/amd64/amd64/vmm_machdep.c
7970
if (vcpu_vmx_check_cap(vcpu, IA32_VMX_PINBASED_CTLS,
sys/arch/amd64/amd64/vmm_machdep.c
80
int vcpu_writeregs_svm(struct vcpu *, uint64_t, struct vcpu_reg_state *);
sys/arch/amd64/amd64/vmm_machdep.c
8091
vmx_vcpu_dump_regs(struct vcpu *vcpu)
sys/arch/amd64/amd64/vmm_machdep.c
8098
DPRINTF("vcpu @ %p in %s mode\n", vcpu, vmm_decode_cpu_mode(vcpu));
sys/arch/amd64/amd64/vmm_machdep.c
8099
i = vmm_get_guest_cpu_cpl(vcpu);
sys/arch/amd64/amd64/vmm_machdep.c
81
int vcpu_reset_regs(struct vcpu *, struct vcpu_reg_state *);
sys/arch/amd64/amd64/vmm_machdep.c
8105
vcpu->vc_gueststate.vg_rax, vcpu->vc_gueststate.vg_rbx,
sys/arch/amd64/amd64/vmm_machdep.c
8106
vcpu->vc_gueststate.vg_rcx);
sys/arch/amd64/amd64/vmm_machdep.c
8108
vcpu->vc_gueststate.vg_rdx, vcpu->vc_gueststate.vg_rbp,
sys/arch/amd64/amd64/vmm_machdep.c
8109
vcpu->vc_gueststate.vg_rdi);
sys/arch/amd64/amd64/vmm_machdep.c
8111
vcpu->vc_gueststate.vg_rsi, vcpu->vc_gueststate.vg_r8,
sys/arch/amd64/amd64/vmm_machdep.c
8112
vcpu->vc_gueststate.vg_r9);
sys/arch/amd64/amd64/vmm_machdep.c
8114
vcpu->vc_gueststate.vg_r10, vcpu->vc_gueststate.vg_r11,
sys/arch/amd64/amd64/vmm_machdep.c
8115
vcpu->vc_gueststate.vg_r12);
sys/arch/amd64/amd64/vmm_machdep.c
8117
vcpu->vc_gueststate.vg_r13, vcpu->vc_gueststate.vg_r14,
sys/arch/amd64/amd64/vmm_machdep.c
8118
vcpu->vc_gueststate.vg_r15);
sys/arch/amd64/amd64/vmm_machdep.c
8120
DPRINTF(" rip=0x%016llx rsp=", vcpu->vc_gueststate.vg_rip);
sys/arch/amd64/amd64/vmm_machdep.c
8142
DPRINTF(" cr2=0x%016llx\n", vcpu->vc_gueststate.vg_cr2);
sys/arch/amd64/amd64/vmm_machdep.c
82
int vcpu_reset_regs_vmx(struct vcpu *, struct vcpu_reg_state *);
sys/arch/amd64/amd64/vmm_machdep.c
83
int vcpu_reset_regs_svm(struct vcpu *, struct vcpu_reg_state *);
sys/arch/amd64/amd64/vmm_machdep.c
8395
(uint64_t)vcpu->vc_vmx_msr_exit_save_va,
sys/arch/amd64/amd64/vmm_machdep.c
8396
(uint64_t)vcpu->vc_vmx_msr_exit_save_pa);
sys/arch/amd64/amd64/vmm_machdep.c
8398
msr_store = (struct vmx_msr_store *)vcpu->vc_vmx_msr_exit_save_va;
sys/arch/amd64/amd64/vmm_machdep.c
84
int vcpu_svm_init_vmsa(struct vcpu *, struct vcpu_reg_state *);
sys/arch/amd64/amd64/vmm_machdep.c
85
int vcpu_reload_vmcs_vmx(struct vcpu *);
sys/arch/amd64/amd64/vmm_machdep.c
86
int vcpu_init(struct vcpu *, struct vm_create_params *);
sys/arch/amd64/amd64/vmm_machdep.c
87
int vcpu_init_vmx(struct vcpu *);
sys/arch/amd64/amd64/vmm_machdep.c
874
vmx_remote_vmclear(struct cpu_info *ci, struct vcpu *vcpu)
sys/arch/amd64/amd64/vmm_machdep.c
88
int vcpu_init_svm(struct vcpu *, struct vm_create_params *);
sys/arch/amd64/amd64/vmm_machdep.c
881
atomic_swap_ulong(&ci->ci_vmcs_pa, vcpu->vc_control_pa);
sys/arch/amd64/amd64/vmm_machdep.c
8856
vmm_decode_cpu_mode(struct vcpu *vcpu)
sys/arch/amd64/amd64/vmm_machdep.c
8858
int mode = vmm_get_guest_cpu_mode(vcpu);
sys/arch/amd64/amd64/vmm_machdep.c
89
int vcpu_run_vmx(struct vcpu *, struct vm_run_params *);
sys/arch/amd64/amd64/vmm_machdep.c
894
atomic_swap_uint(&vcpu->vc_vmx_vmcs_state, VMCS_CLEARED);
sys/arch/amd64/amd64/vmm_machdep.c
90
int vcpu_run_svm(struct vcpu *, struct vm_run_params *);
sys/arch/amd64/amd64/vmm_machdep.c
91
void vcpu_deinit(struct vcpu *);
sys/arch/amd64/amd64/vmm_machdep.c
92
void vcpu_deinit_svm(struct vcpu *);
sys/arch/amd64/amd64/vmm_machdep.c
93
void vcpu_deinit_vmx(struct vcpu *);
sys/arch/amd64/amd64/vmm_machdep.c
94
int vcpu_vmx_check_cap(struct vcpu *, uint32_t, uint32_t, int);
sys/arch/amd64/amd64/vmm_machdep.c
955
vcpu_reload_vmcs_vmx(struct vcpu *vcpu)
sys/arch/amd64/amd64/vmm_machdep.c
959
rw_assert_wrlock(&vcpu->vc_lock);
sys/arch/amd64/amd64/vmm_machdep.c
962
last_ci = vcpu->vc_last_pcpu;
sys/arch/amd64/amd64/vmm_machdep.c
966
if (vmclear(&vcpu->vc_control_pa))
sys/arch/amd64/amd64/vmm_machdep.c
968
atomic_swap_uint(&vcpu->vc_vmx_vmcs_state, VMCS_CLEARED);
sys/arch/amd64/amd64/vmm_machdep.c
97
int vmx_load_pdptes(struct vcpu *);
sys/arch/amd64/amd64/vmm_machdep.c
972
if (vmx_remote_vmclear(last_ci, vcpu))
sys/arch/amd64/amd64/vmm_machdep.c
974
KASSERT(vcpu->vc_vmx_vmcs_state == VMCS_CLEARED);
sys/arch/amd64/amd64/vmm_machdep.c
978
if (vmptrld(&vcpu->vc_control_pa)) {
sys/arch/amd64/amd64/vmm_machdep.c
98
int vmx_handle_exit(struct vcpu *);
sys/arch/amd64/amd64/vmm_machdep.c
99
int svm_handle_exit(struct vcpu *);
sys/arch/amd64/include/cpu.h
110
struct vcpu;
sys/arch/amd64/include/cpu.h
246
struct vcpu *ci_guest_vcpu; /* [o] last vcpu resumed */
sys/arch/amd64/include/vmmvar.h
1037
SLIST_HEAD(vcpu_head, vcpu);
sys/arch/amd64/include/vmmvar.h
1065
int vcpu_init(struct vcpu *, struct vm_create_params *);
sys/arch/amd64/include/vmmvar.h
1066
void vcpu_deinit(struct vcpu *);
sys/arch/amd64/include/vmmvar.h
1068
int vcpu_reset_regs(struct vcpu *, struct vcpu_reg_state *);
sys/arch/amd64/include/vmmvar.h
968
SLIST_ENTRY(vcpu) vc_vcpu_link; /* [V] */
sys/dev/pv/xen.c
822
ebv.vcpu = 0;
sys/dev/pv/xen.c
825
sc->sc_dev.dv_xname, ebv.port, ebv.vcpu);
sys/dev/pv/xen.c
845
es.port, es.vcpu);
sys/dev/pv/xenreg.h
495
uint32_t vcpu; /* VCPU to which this channel is bound. */
sys/dev/pv/xenreg.h
524
uint32_t vcpu;
sys/dev/vmm/vmm.c
327
struct vcpu *
sys/dev/vmm/vmm.c
330
struct vcpu *vcpu;
sys/dev/vmm/vmm.c
335
SLIST_FOREACH(vcpu, &vm->vm_vcpu_list, vc_vcpu_link) {
sys/dev/vmm/vmm.c
336
if (vcpu->vc_id == id)
sys/dev/vmm/vmm.c
337
return (vcpu);
sys/dev/vmm/vmm.c
361
struct vcpu *vcpu;
sys/dev/vmm/vmm.c
458
vcpu = pool_get(&vcpu_pool, PR_WAITOK | PR_ZERO);
sys/dev/vmm/vmm.c
460
vcpu->vc_parent = vm;
sys/dev/vmm/vmm.c
461
vcpu->vc_id = vm->vm_vcpu_ct;
sys/dev/vmm/vmm.c
464
if ((ret = vcpu_init(vcpu, vcp)) != 0) {
sys/dev/vmm/vmm.c
466
pool_put(&vcpu_pool, vcpu);
sys/dev/vmm/vmm.c
470
SLIST_INSERT_HEAD(&vm->vm_vcpu_list, vcpu, vc_vcpu_link);
sys/dev/vmm/vmm.c
580
struct vcpu *vcpu, *tmp;
sys/dev/vmm/vmm.c
587
SLIST_FOREACH_SAFE(vcpu, &vm->vm_vcpu_list, vc_vcpu_link, tmp) {
sys/dev/vmm/vmm.c
588
SLIST_REMOVE(&vm->vm_vcpu_list, vcpu, vcpu, vc_vcpu_link);
sys/dev/vmm/vmm.c
589
vcpu_deinit(vcpu);
sys/dev/vmm/vmm.c
590
pool_put(&vcpu_pool, vcpu);
sys/dev/vmm/vmm.c
640
struct vcpu *vcpu;
sys/dev/vmm/vmm.c
677
SLIST_FOREACH(vcpu, &vm->vm_vcpu_list,
sys/dev/vmm/vmm.c
679
if (vcpu->vc_id == j)
sys/dev/vmm/vmm.c
681
vcpu->vc_state;
sys/dev/vmm/vmm.c
76
pool_init(&vcpu_pool, sizeof(struct vcpu), 64, IPL_MPFLOOR, PR_WAITOK,
sys/dev/vmm/vmm.c
780
struct vcpu *vcpu;
sys/dev/vmm/vmm.c
793
vcpu = vm_find_vcpu(vm, vrp->vrp_vcpu_id);
sys/dev/vmm/vmm.c
795
if (vcpu == NULL) {
sys/dev/vmm/vmm.c
802
rw_enter_write(&vcpu->vc_lock);
sys/dev/vmm/vmm.c
803
if (vcpu->vc_state != VCPU_STATE_STOPPED)
sys/dev/vmm/vmm.c
806
if (vcpu_reset_regs(vcpu, &vrp->vrp_init_state)) {
sys/dev/vmm/vmm.c
809
dump_vcpu(vcpu);
sys/dev/vmm/vmm.c
814
rw_exit_write(&vcpu->vc_lock);
sys/dev/vmm/vmm.c
837
vcpu_must_stop(struct vcpu *vcpu)
sys/dev/vmm/vmm.c
841
if (vcpu->vc_state == VCPU_STATE_REQTERM)
sys/dev/vmm/vmm.h
255
struct vcpu *vm_find_vcpu(struct vm *, uint32_t);
sys/dev/vmm/vmm.h
263
int vcpu_must_stop(struct vcpu *);
sys/dev/vmm/vmm.h
268
void dump_vcpu(struct vcpu *);
usr.sbin/ldomctl/config.c
2812
primary_num_cpus = domain->vcpu;
usr.sbin/ldomctl/config.c
2816
num_cpus += (domain->vcpu * domain->vcpu_stride);
usr.sbin/ldomctl/config.c
2863
for (i = 0; i < domain->vcpu; i++)
usr.sbin/ldomctl/ldomctl.h
185
uint64_t vcpu, vcpu_stride;
usr.sbin/ldomctl/parse.y
108
%type <v.vcpu_opts> vcpu
usr.sbin/ldomctl/parse.y
139
if (domain->vcpu == 0) {
usr.sbin/ldomctl/parse.y
169
domainopts : VCPU vcpu {
usr.sbin/ldomctl/parse.y
170
if (domain->vcpu) {
usr.sbin/ldomctl/parse.y
174
domain->vcpu = $2.count;
usr.sbin/ldomctl/parse.y
291
vcpu : STRING {
usr.sbin/vmd/vionet.c
1550
msg.vcpu = 0; /* XXX: smp */
usr.sbin/vmd/vionet.c
1572
msg.vcpu = 0; /* XXX: smp */
usr.sbin/vmd/virtio.c
1747
vcpu_assert_irq(vmm_id, msg->vcpu, msg->irq);
usr.sbin/vmd/virtio.c
1749
vcpu_deassert_irq(vmm_id, msg->vcpu, msg->irq);
usr.sbin/vmd/virtio.c
1848
vcpu_assert_irq(dev->vmm_id, msg.vcpu, msg.irq);
usr.sbin/vmd/virtio.c
1850
vcpu_deassert_irq(dev->vmm_id, msg.vcpu, msg.irq);
usr.sbin/vmd/virtio.c
1862
virtio_assert_irq(struct virtio_dev *dev, int vcpu)
usr.sbin/vmd/virtio.c
1869
msg.vcpu = vcpu;
usr.sbin/vmd/virtio.c
1880
virtio_deassert_irq(struct virtio_dev *dev, int vcpu)
usr.sbin/vmd/virtio.c
1887
msg.vcpu = vcpu;
usr.sbin/vmd/virtio.h
140
uint8_t vcpu; /* VCPU id */