sys/amd64/include/vmm.h
503
_Static_assert(sizeof(struct vie) == 64, "ABI");
sys/amd64/include/vmm.h
504
_Static_assert(__offsetof(struct vie, disp_bytes) == 22, "ABI");
sys/amd64/include/vmm.h
505
_Static_assert(__offsetof(struct vie, scale) == 24, "ABI");
sys/amd64/include/vmm.h
506
_Static_assert(__offsetof(struct vie, base_register) == 28, "ABI");
sys/amd64/include/vmm.h
594
struct vie vie;
sys/amd64/include/vmm_instruction_emul.h
109
uint64_t rip, int inst_length, struct vie *vie,
sys/amd64/include/vmm_instruction_emul.h
131
void vie_restart(struct vie *vie);
sys/amd64/include/vmm_instruction_emul.h
132
void vie_init(struct vie *vie, const char *inst_bytes, int inst_length);
sys/amd64/include/vmm_instruction_emul.h
148
enum vm_cpu_mode cpu_mode, int csd, struct vie *vie);
sys/amd64/include/vmm_instruction_emul.h
155
struct vie *vie);
sys/amd64/include/vmm_instruction_emul.h
79
int vmm_emulate_instruction(struct vcpu *vcpu, uint64_t gpa, struct vie *vie,
sys/amd64/vmm/amd/svm.c
932
vie_init(&vmexit->u.inst_emul.vie, inst_bytes, inst_len);
sys/amd64/vmm/intel/vmx.c
2112
vie_init(&vmexit->u.inst_emul.vie, NULL, 0);
sys/amd64/vmm/vmm.c
883
struct vie *vie;
sys/amd64/vmm/vmm.c
901
vie = &vme->u.inst_emul.vie;
sys/amd64/vmm/vmm.c
908
if (vie->num_valid == 0) {
sys/amd64/vmm/vmm.c
910
VIE_INST_SIZE, vie, &fault);
sys/amd64/vmm/vmm.c
920
if (vmm_decode_instruction(vcpu, gla, cpu_mode, cs_d, vie) != 0) {
sys/amd64/vmm/vmm.c
930
vme->inst_length = vie->num_processed;
sys/amd64/vmm/vmm.c
931
vcpu->nextrip += vie->num_processed;
sys/amd64/vmm/vmm.c
950
error = vmm_emulate_instruction(vcpu, gpa, vie, paging, mread, mwrite,
sys/amd64/vmm/vmm_instruction_emul.c
1001
emulate_and(struct vcpu *vcpu, uint64_t gpa, struct vie *vie,
sys/amd64/vmm/vmm_instruction_emul.c
1008
size = vie->opsize;
sys/amd64/vmm/vmm_instruction_emul.c
1011
switch (vie->op.op_byte) {
sys/amd64/vmm/vmm_instruction_emul.c
1023
reg = gpr_map[vie->reg];
sys/amd64/vmm/vmm_instruction_emul.c
1061
result = val1 & vie->immediate;
sys/amd64/vmm/vmm_instruction_emul.c
1089
emulate_or(struct vcpu *vcpu, uint64_t gpa, struct vie *vie,
sys/amd64/vmm/vmm_instruction_emul.c
1096
size = vie->opsize;
sys/amd64/vmm/vmm_instruction_emul.c
1099
switch (vie->op.op_byte) {
sys/amd64/vmm/vmm_instruction_emul.c
1111
reg = gpr_map[vie->reg];
sys/amd64/vmm/vmm_instruction_emul.c
1149
result = val1 | vie->immediate;
sys/amd64/vmm/vmm_instruction_emul.c
1177
emulate_cmp(struct vcpu *vcpu, uint64_t gpa, struct vie *vie,
sys/amd64/vmm/vmm_instruction_emul.c
1184
size = vie->opsize;
sys/amd64/vmm/vmm_instruction_emul.c
1185
switch (vie->op.op_byte) {
sys/amd64/vmm/vmm_instruction_emul.c
1204
reg = gpr_map[vie->reg];
sys/amd64/vmm/vmm_instruction_emul.c
1214
if (vie->op.op_byte == 0x3B) {
sys/amd64/vmm/vmm_instruction_emul.c
1245
if (vie->op.op_byte == 0x80)
sys/amd64/vmm/vmm_instruction_emul.c
1253
rflags2 = getcc(size, op1, vie->immediate);
sys/amd64/vmm/vmm_instruction_emul.c
1269
emulate_test(struct vcpu *vcpu, uint64_t gpa, struct vie *vie,
sys/amd64/vmm/vmm_instruction_emul.c
1275
size = vie->opsize;
sys/amd64/vmm/vmm_instruction_emul.c
1278
switch (vie->op.op_byte) {
sys/amd64/vmm/vmm_instruction_emul.c
1296
if ((vie->reg & 7) != 0)
sys/amd64/vmm/vmm_instruction_emul.c
1303
rflags2 = getandflags(size, op1, vie->immediate);
sys/amd64/vmm/vmm_instruction_emul.c
1324
emulate_bextr(struct vcpu *vcpu, uint64_t gpa, struct vie *vie,
sys/amd64/vmm/vmm_instruction_emul.c
1332
size = vie->opsize;
sys/amd64/vmm/vmm_instruction_emul.c
1355
error = vie_read_register(vcpu, gpr_map[vie->vex_reg], &src2);
sys/amd64/vmm/vmm_instruction_emul.c
1384
error = vie_update_register(vcpu, gpr_map[vie->reg], dst, size);
sys/amd64/vmm/vmm_instruction_emul.c
1401
emulate_add(struct vcpu *vcpu, uint64_t gpa, struct vie *vie,
sys/amd64/vmm/vmm_instruction_emul.c
1408
size = vie->opsize;
sys/amd64/vmm/vmm_instruction_emul.c
1411
switch (vie->op.op_byte) {
sys/amd64/vmm/vmm_instruction_emul.c
1422
reg = gpr_map[vie->reg];
sys/amd64/vmm/vmm_instruction_emul.c
1457
emulate_sub(struct vcpu *vcpu, uint64_t gpa, struct vie *vie,
sys/amd64/vmm/vmm_instruction_emul.c
1464
size = vie->opsize;
sys/amd64/vmm/vmm_instruction_emul.c
1467
switch (vie->op.op_byte) {
sys/amd64/vmm/vmm_instruction_emul.c
1478
reg = gpr_map[vie->reg];
sys/amd64/vmm/vmm_instruction_emul.c
1513
emulate_stack_op(struct vcpu *vcpu, uint64_t mmio_gpa, struct vie *vie,
sys/amd64/vmm/vmm_instruction_emul.c
1527
size = vie->opsize;
sys/amd64/vmm/vmm_instruction_emul.c
1528
pushop = (vie->op.op_type == VIE_OP_TYPE_PUSH) ? 1 : 0;
sys/amd64/vmm/vmm_instruction_emul.c
1544
size = vie->opsize_override ? 2 : 8;
sys/amd64/vmm/vmm_instruction_emul.c
1615
emulate_push(struct vcpu *vcpu, uint64_t mmio_gpa, struct vie *vie,
sys/amd64/vmm/vmm_instruction_emul.c
1627
if ((vie->reg & 7) != 6)
sys/amd64/vmm/vmm_instruction_emul.c
1630
error = emulate_stack_op(vcpu, mmio_gpa, vie, paging, memread,
sys/amd64/vmm/vmm_instruction_emul.c
1636
emulate_pop(struct vcpu *vcpu, uint64_t mmio_gpa, struct vie *vie,
sys/amd64/vmm/vmm_instruction_emul.c
1648
if ((vie->reg & 7) != 0)
sys/amd64/vmm/vmm_instruction_emul.c
1651
error = emulate_stack_op(vcpu, mmio_gpa, vie, paging, memread,
sys/amd64/vmm/vmm_instruction_emul.c
1657
emulate_group1(struct vcpu *vcpu, uint64_t gpa, struct vie *vie,
sys/amd64/vmm/vmm_instruction_emul.c
1663
switch (vie->reg & 7) {
sys/amd64/vmm/vmm_instruction_emul.c
1665
error = emulate_or(vcpu, gpa, vie,
sys/amd64/vmm/vmm_instruction_emul.c
1669
error = emulate_and(vcpu, gpa, vie,
sys/amd64/vmm/vmm_instruction_emul.c
1673
error = emulate_cmp(vcpu, gpa, vie,
sys/amd64/vmm/vmm_instruction_emul.c
1685
emulate_bittest(struct vcpu *vcpu, uint64_t gpa, struct vie *vie,
sys/amd64/vmm/vmm_instruction_emul.c
1698
if ((vie->reg & 7) != 4)
sys/amd64/vmm/vmm_instruction_emul.c
1704
error = memread(vcpu, gpa, &val, vie->opsize, memarg);
sys/amd64/vmm/vmm_instruction_emul.c
1712
bitmask = vie->opsize * 8 - 1;
sys/amd64/vmm/vmm_instruction_emul.c
1713
bitoff = vie->immediate & bitmask;
sys/amd64/vmm/vmm_instruction_emul.c
1728
emulate_twob_group15(struct vcpu *vcpu, uint64_t gpa, struct vie *vie,
sys/amd64/vmm/vmm_instruction_emul.c
1735
switch (vie->reg & 7) {
sys/amd64/vmm/vmm_instruction_emul.c
1737
if (vie->mod == 0x3) {
sys/amd64/vmm/vmm_instruction_emul.c
1760
vmm_emulate_instruction(struct vcpu *vcpu, uint64_t gpa, struct vie *vie,
sys/amd64/vmm/vmm_instruction_emul.c
1766
if (!vie->decoded)
sys/amd64/vmm/vmm_instruction_emul.c
1769
switch (vie->op.op_type) {
sys/amd64/vmm/vmm_instruction_emul.c
1771
error = emulate_group1(vcpu, gpa, vie, paging, memread,
sys/amd64/vmm/vmm_instruction_emul.c
1775
error = emulate_pop(vcpu, gpa, vie, paging, memread,
sys/amd64/vmm/vmm_instruction_emul.c
1779
error = emulate_push(vcpu, gpa, vie, paging, memread,
sys/amd64/vmm/vmm_instruction_emul.c
1783
error = emulate_cmp(vcpu, gpa, vie,
sys/amd64/vmm/vmm_instruction_emul.c
1787
error = emulate_mov(vcpu, gpa, vie,
sys/amd64/vmm/vmm_instruction_emul.c
1792
error = emulate_movx(vcpu, gpa, vie,
sys/amd64/vmm/vmm_instruction_emul.c
1796
error = emulate_movs(vcpu, gpa, vie, paging, memread,
sys/amd64/vmm/vmm_instruction_emul.c
1800
error = emulate_stos(vcpu, gpa, vie, paging, memread,
sys/amd64/vmm/vmm_instruction_emul.c
1804
error = emulate_and(vcpu, gpa, vie,
sys/amd64/vmm/vmm_instruction_emul.c
1808
error = emulate_or(vcpu, gpa, vie,
sys/amd64/vmm/vmm_instruction_emul.c
1812
error = emulate_sub(vcpu, gpa, vie,
sys/amd64/vmm/vmm_instruction_emul.c
1816
error = emulate_bittest(vcpu, gpa, vie,
sys/amd64/vmm/vmm_instruction_emul.c
1820
error = emulate_twob_group15(vcpu, gpa, vie,
sys/amd64/vmm/vmm_instruction_emul.c
1824
error = emulate_add(vcpu, gpa, vie, memread,
sys/amd64/vmm/vmm_instruction_emul.c
1828
error = emulate_test(vcpu, gpa, vie,
sys/amd64/vmm/vmm_instruction_emul.c
1832
error = emulate_bextr(vcpu, gpa, vie, paging,
sys/amd64/vmm/vmm_instruction_emul.c
1997
vie_restart(struct vie *vie)
sys/amd64/vmm/vmm_instruction_emul.c
2000
offsetof(struct vie, inst) < offsetof(struct vie, vie_startzero) &&
sys/amd64/vmm/vmm_instruction_emul.c
2001
offsetof(struct vie, num_valid) < offsetof(struct vie, vie_startzero),
sys/amd64/vmm/vmm_instruction_emul.c
2004
memset((char *)vie + offsetof(struct vie, vie_startzero), 0,
sys/amd64/vmm/vmm_instruction_emul.c
2005
sizeof(*vie) - offsetof(struct vie, vie_startzero));
sys/amd64/vmm/vmm_instruction_emul.c
2007
vie->base_register = VM_REG_LAST;
sys/amd64/vmm/vmm_instruction_emul.c
2008
vie->index_register = VM_REG_LAST;
sys/amd64/vmm/vmm_instruction_emul.c
2009
vie->segment_register = VM_REG_LAST;
sys/amd64/vmm/vmm_instruction_emul.c
2013
vie_init(struct vie *vie, const char *inst_bytes, int inst_length)
sys/amd64/vmm/vmm_instruction_emul.c
2018
vie_restart(vie);
sys/amd64/vmm/vmm_instruction_emul.c
2019
memset(vie->inst, 0, sizeof(vie->inst));
sys/amd64/vmm/vmm_instruction_emul.c
2021
memcpy(vie->inst, inst_bytes, inst_length);
sys/amd64/vmm/vmm_instruction_emul.c
2022
vie->num_valid = inst_length;
sys/amd64/vmm/vmm_instruction_emul.c
2285
uint64_t rip, int inst_length, struct vie *vie, int *faultptr)
sys/amd64/vmm/vmm_instruction_emul.c
2299
vm_copyin(copyinfo, vie->inst, inst_length);
sys/amd64/vmm/vmm_instruction_emul.c
2301
vie->num_valid = inst_length;
sys/amd64/vmm/vmm_instruction_emul.c
2307
vie_peek(struct vie *vie, uint8_t *x)
sys/amd64/vmm/vmm_instruction_emul.c
2310
if (vie->num_processed < vie->num_valid) {
sys/amd64/vmm/vmm_instruction_emul.c
2311
*x = vie->inst[vie->num_processed];
sys/amd64/vmm/vmm_instruction_emul.c
2318
vie_advance(struct vie *vie)
sys/amd64/vmm/vmm_instruction_emul.c
2321
vie->num_processed++;
sys/amd64/vmm/vmm_instruction_emul.c
2354
decode_prefixes(struct vie *vie, enum vm_cpu_mode cpu_mode, int cs_d)
sys/amd64/vmm/vmm_instruction_emul.c
2359
if (vie_peek(vie, &x))
sys/amd64/vmm/vmm_instruction_emul.c
2363
vie->opsize_override = 1;
sys/amd64/vmm/vmm_instruction_emul.c
2365
vie->addrsize_override = 1;
sys/amd64/vmm/vmm_instruction_emul.c
2367
vie->repz_present = 1;
sys/amd64/vmm/vmm_instruction_emul.c
2369
vie->repnz_present = 1;
sys/amd64/vmm/vmm_instruction_emul.c
2370
else if (segment_override(x, &vie->segment_register))
sys/amd64/vmm/vmm_instruction_emul.c
2371
vie->segment_override = 1;
sys/amd64/vmm/vmm_instruction_emul.c
2375
vie_advance(vie);
sys/amd64/vmm/vmm_instruction_emul.c
2387
vie->rex_present = 1;
sys/amd64/vmm/vmm_instruction_emul.c
2388
vie->rex_w = x & 0x8 ? 1 : 0;
sys/amd64/vmm/vmm_instruction_emul.c
2389
vie->rex_r = x & 0x4 ? 1 : 0;
sys/amd64/vmm/vmm_instruction_emul.c
2390
vie->rex_x = x & 0x2 ? 1 : 0;
sys/amd64/vmm/vmm_instruction_emul.c
2391
vie->rex_b = x & 0x1 ? 1 : 0;
sys/amd64/vmm/vmm_instruction_emul.c
2392
vie_advance(vie);
sys/amd64/vmm/vmm_instruction_emul.c
2403
vie->vex_present = 1;
sys/amd64/vmm/vmm_instruction_emul.c
2405
vie_advance(vie);
sys/amd64/vmm/vmm_instruction_emul.c
2406
if (vie_peek(vie, &x))
sys/amd64/vmm/vmm_instruction_emul.c
2413
vie->rex_r = x & 0x80 ? 0 : 1;
sys/amd64/vmm/vmm_instruction_emul.c
2414
vie->rex_x = x & 0x40 ? 0 : 1;
sys/amd64/vmm/vmm_instruction_emul.c
2415
vie->rex_b = x & 0x20 ? 0 : 1;
sys/amd64/vmm/vmm_instruction_emul.c
2433
vie_advance(vie);
sys/amd64/vmm/vmm_instruction_emul.c
2434
if (vie_peek(vie, &x))
sys/amd64/vmm/vmm_instruction_emul.c
2438
vie->rex_w = x & 0x80 ? 1 : 0;
sys/amd64/vmm/vmm_instruction_emul.c
2440
vie->vex_reg = ((~(unsigned)x & 0x78u) >> 3);
sys/amd64/vmm/vmm_instruction_emul.c
2441
vie->vex_l = !!(x & 0x4);
sys/amd64/vmm/vmm_instruction_emul.c
2442
vie->vex_pp = (x & 0x3);
sys/amd64/vmm/vmm_instruction_emul.c
2445
switch (vie->vex_pp) {
sys/amd64/vmm/vmm_instruction_emul.c
2447
vie->opsize_override = 1;
sys/amd64/vmm/vmm_instruction_emul.c
2450
vie->repz_present = 1;
sys/amd64/vmm/vmm_instruction_emul.c
2453
vie->repnz_present = 1;
sys/amd64/vmm/vmm_instruction_emul.c
2457
vie_advance(vie);
sys/amd64/vmm/vmm_instruction_emul.c
2460
if (vie_peek(vie, &x))
sys/amd64/vmm/vmm_instruction_emul.c
2463
vie->op = optab[x];
sys/amd64/vmm/vmm_instruction_emul.c
2464
if (vie->op.op_type == VIE_OP_TYPE_NONE)
sys/amd64/vmm/vmm_instruction_emul.c
2467
vie_advance(vie);
sys/amd64/vmm/vmm_instruction_emul.c
2478
vie->addrsize = vie->addrsize_override ? 4 : 8;
sys/amd64/vmm/vmm_instruction_emul.c
2479
if (vie->rex_w)
sys/amd64/vmm/vmm_instruction_emul.c
2480
vie->opsize = 8;
sys/amd64/vmm/vmm_instruction_emul.c
2481
else if (vie->opsize_override)
sys/amd64/vmm/vmm_instruction_emul.c
2482
vie->opsize = 2;
sys/amd64/vmm/vmm_instruction_emul.c
2484
vie->opsize = 4;
sys/amd64/vmm/vmm_instruction_emul.c
2487
vie->addrsize = vie->addrsize_override ? 2 : 4;
sys/amd64/vmm/vmm_instruction_emul.c
2488
vie->opsize = vie->opsize_override ? 2 : 4;
sys/amd64/vmm/vmm_instruction_emul.c
2491
vie->addrsize = vie->addrsize_override ? 4 : 2;
sys/amd64/vmm/vmm_instruction_emul.c
2492
vie->opsize = vie->opsize_override ? 4 : 2;
sys/amd64/vmm/vmm_instruction_emul.c
2498
decode_two_byte_opcode(struct vie *vie)
sys/amd64/vmm/vmm_instruction_emul.c
2502
if (vie_peek(vie, &x))
sys/amd64/vmm/vmm_instruction_emul.c
2505
vie->op = two_byte_opcodes[x];
sys/amd64/vmm/vmm_instruction_emul.c
2507
if (vie->op.op_type == VIE_OP_TYPE_NONE)
sys/amd64/vmm/vmm_instruction_emul.c
2510
vie_advance(vie);
sys/amd64/vmm/vmm_instruction_emul.c
2515
decode_opcode(struct vie *vie)
sys/amd64/vmm/vmm_instruction_emul.c
2519
if (vie_peek(vie, &x))
sys/amd64/vmm/vmm_instruction_emul.c
2523
if (vie->op.op_type != VIE_OP_TYPE_NONE)
sys/amd64/vmm/vmm_instruction_emul.c
2526
vie->op = one_byte_opcodes[x];
sys/amd64/vmm/vmm_instruction_emul.c
2528
if (vie->op.op_type == VIE_OP_TYPE_NONE)
sys/amd64/vmm/vmm_instruction_emul.c
2531
vie_advance(vie);
sys/amd64/vmm/vmm_instruction_emul.c
2533
if (vie->op.op_type == VIE_OP_TYPE_TWO_BYTE)
sys/amd64/vmm/vmm_instruction_emul.c
2534
return (decode_two_byte_opcode(vie));
sys/amd64/vmm/vmm_instruction_emul.c
2540
decode_modrm(struct vie *vie, enum vm_cpu_mode cpu_mode)
sys/amd64/vmm/vmm_instruction_emul.c
2544
if (vie->op.op_flags & VIE_OP_F_NO_MODRM)
sys/amd64/vmm/vmm_instruction_emul.c
2550
if (vie_peek(vie, &x))
sys/amd64/vmm/vmm_instruction_emul.c
2553
vie->mod = (x >> 6) & 0x3;
sys/amd64/vmm/vmm_instruction_emul.c
2554
vie->rm = (x >> 0) & 0x7;
sys/amd64/vmm/vmm_instruction_emul.c
2555
vie->reg = (x >> 3) & 0x7;
sys/amd64/vmm/vmm_instruction_emul.c
2562
if (vie->mod == VIE_MOD_DIRECT)
sys/amd64/vmm/vmm_instruction_emul.c
2565
if ((vie->mod == VIE_MOD_INDIRECT && vie->rm == VIE_RM_DISP32) ||
sys/amd64/vmm/vmm_instruction_emul.c
2566
(vie->mod != VIE_MOD_DIRECT && vie->rm == VIE_RM_SIB)) {
sys/amd64/vmm/vmm_instruction_emul.c
2580
vie->rm |= (vie->rex_b << 3);
sys/amd64/vmm/vmm_instruction_emul.c
2583
vie->reg |= (vie->rex_r << 3);
sys/amd64/vmm/vmm_instruction_emul.c
2586
if (vie->mod != VIE_MOD_DIRECT && vie->rm == VIE_RM_SIB)
sys/amd64/vmm/vmm_instruction_emul.c
2589
vie->base_register = gpr_map[vie->rm];
sys/amd64/vmm/vmm_instruction_emul.c
2591
switch (vie->mod) {
sys/amd64/vmm/vmm_instruction_emul.c
2593
vie->disp_bytes = 1;
sys/amd64/vmm/vmm_instruction_emul.c
2596
vie->disp_bytes = 4;
sys/amd64/vmm/vmm_instruction_emul.c
2599
if (vie->rm == VIE_RM_DISP32) {
sys/amd64/vmm/vmm_instruction_emul.c
2600
vie->disp_bytes = 4;
sys/amd64/vmm/vmm_instruction_emul.c
2609
vie->base_register = VM_REG_GUEST_RIP;
sys/amd64/vmm/vmm_instruction_emul.c
2611
vie->base_register = VM_REG_LAST;
sys/amd64/vmm/vmm_instruction_emul.c
2617
vie_advance(vie);
sys/amd64/vmm/vmm_instruction_emul.c
2623
decode_sib(struct vie *vie)
sys/amd64/vmm/vmm_instruction_emul.c
2628
if (vie->mod == VIE_MOD_DIRECT || vie->rm != VIE_RM_SIB)
sys/amd64/vmm/vmm_instruction_emul.c
2631
if (vie_peek(vie, &x))
sys/amd64/vmm/vmm_instruction_emul.c
2635
vie->ss = (x >> 6) & 0x3;
sys/amd64/vmm/vmm_instruction_emul.c
2636
vie->index = (x >> 3) & 0x7;
sys/amd64/vmm/vmm_instruction_emul.c
2637
vie->base = (x >> 0) & 0x7;
sys/amd64/vmm/vmm_instruction_emul.c
2640
vie->index |= vie->rex_x << 3;
sys/amd64/vmm/vmm_instruction_emul.c
2641
vie->base |= vie->rex_b << 3;
sys/amd64/vmm/vmm_instruction_emul.c
2643
switch (vie->mod) {
sys/amd64/vmm/vmm_instruction_emul.c
2645
vie->disp_bytes = 1;
sys/amd64/vmm/vmm_instruction_emul.c
2648
vie->disp_bytes = 4;
sys/amd64/vmm/vmm_instruction_emul.c
2652
if (vie->mod == VIE_MOD_INDIRECT &&
sys/amd64/vmm/vmm_instruction_emul.c
2653
(vie->base == 5 || vie->base == 13)) {
sys/amd64/vmm/vmm_instruction_emul.c
2662
vie->disp_bytes = 4;
sys/amd64/vmm/vmm_instruction_emul.c
2664
vie->base_register = gpr_map[vie->base];
sys/amd64/vmm/vmm_instruction_emul.c
2674
if (vie->index != 4)
sys/amd64/vmm/vmm_instruction_emul.c
2675
vie->index_register = gpr_map[vie->index];
sys/amd64/vmm/vmm_instruction_emul.c
2678
if (vie->index_register < VM_REG_LAST)
sys/amd64/vmm/vmm_instruction_emul.c
2679
vie->scale = 1 << vie->ss;
sys/amd64/vmm/vmm_instruction_emul.c
2681
vie_advance(vie);
sys/amd64/vmm/vmm_instruction_emul.c
2687
decode_displacement(struct vie *vie)
sys/amd64/vmm/vmm_instruction_emul.c
2698
if ((n = vie->disp_bytes) == 0)
sys/amd64/vmm/vmm_instruction_emul.c
2705
if (vie_peek(vie, &x))
sys/amd64/vmm/vmm_instruction_emul.c
2709
vie_advance(vie);
sys/amd64/vmm/vmm_instruction_emul.c
2713
vie->displacement = u.signed8; /* sign-extended */
sys/amd64/vmm/vmm_instruction_emul.c
2715
vie->displacement = u.signed32; /* sign-extended */
sys/amd64/vmm/vmm_instruction_emul.c
2721
decode_immediate(struct vie *vie)
sys/amd64/vmm/vmm_instruction_emul.c
2733
if (vie->op.op_flags & VIE_OP_F_IMM) {
sys/amd64/vmm/vmm_instruction_emul.c
2741
if (vie->opsize == 4 || vie->opsize == 8)
sys/amd64/vmm/vmm_instruction_emul.c
2742
vie->imm_bytes = 4;
sys/amd64/vmm/vmm_instruction_emul.c
2744
vie->imm_bytes = 2;
sys/amd64/vmm/vmm_instruction_emul.c
2745
} else if (vie->op.op_flags & VIE_OP_F_IMM8) {
sys/amd64/vmm/vmm_instruction_emul.c
2746
vie->imm_bytes = 1;
sys/amd64/vmm/vmm_instruction_emul.c
2749
if ((n = vie->imm_bytes) == 0)
sys/amd64/vmm/vmm_instruction_emul.c
2756
if (vie_peek(vie, &x))
sys/amd64/vmm/vmm_instruction_emul.c
2760
vie_advance(vie);
sys/amd64/vmm/vmm_instruction_emul.c
2765
vie->immediate = u.signed8;
sys/amd64/vmm/vmm_instruction_emul.c
2767
vie->immediate = u.signed16;
sys/amd64/vmm/vmm_instruction_emul.c
2769
vie->immediate = u.signed32;
sys/amd64/vmm/vmm_instruction_emul.c
2775
decode_moffset(struct vie *vie)
sys/amd64/vmm/vmm_instruction_emul.c
2784
if ((vie->op.op_flags & VIE_OP_F_MOFFSET) == 0)
sys/amd64/vmm/vmm_instruction_emul.c
2791
n = vie->addrsize;
sys/amd64/vmm/vmm_instruction_emul.c
2796
if (vie_peek(vie, &x))
sys/amd64/vmm/vmm_instruction_emul.c
2800
vie_advance(vie);
sys/amd64/vmm/vmm_instruction_emul.c
2802
vie->displacement = u.u64;
sys/amd64/vmm/vmm_instruction_emul.c
2812
verify_gla(struct vcpu *vcpu, uint64_t gla, struct vie *vie,
sys/amd64/vmm/vmm_instruction_emul.c
2825
if (vie->base_register != VM_REG_LAST) {
sys/amd64/vmm/vmm_instruction_emul.c
2826
error = vm_get_register(vcpu, vie->base_register, &base);
sys/amd64/vmm/vmm_instruction_emul.c
2829
error, vie->base_register);
sys/amd64/vmm/vmm_instruction_emul.c
2837
if (vie->base_register == VM_REG_GUEST_RIP)
sys/amd64/vmm/vmm_instruction_emul.c
2838
base += vie->num_processed;
sys/amd64/vmm/vmm_instruction_emul.c
2842
if (vie->index_register != VM_REG_LAST) {
sys/amd64/vmm/vmm_instruction_emul.c
2843
error = vm_get_register(vcpu, vie->index_register, &idx);
sys/amd64/vmm/vmm_instruction_emul.c
2846
error, vie->index_register);
sys/amd64/vmm/vmm_instruction_emul.c
2864
if (vie->segment_override)
sys/amd64/vmm/vmm_instruction_emul.c
2865
seg = vie->segment_register;
sys/amd64/vmm/vmm_instruction_emul.c
2866
else if (vie->base_register == VM_REG_GUEST_RSP ||
sys/amd64/vmm/vmm_instruction_emul.c
2867
vie->base_register == VM_REG_GUEST_RBP)
sys/amd64/vmm/vmm_instruction_emul.c
2879
vie->segment_register);
sys/amd64/vmm/vmm_instruction_emul.c
2885
gla2 = segbase + base + vie->scale * idx + vie->displacement;
sys/amd64/vmm/vmm_instruction_emul.c
2886
gla2 &= size2mask[vie->addrsize];
sys/amd64/vmm/vmm_instruction_emul.c
2891
segbase, base, vie->scale, idx, vie->displacement,
sys/amd64/vmm/vmm_instruction_emul.c
2903
enum vm_cpu_mode cpu_mode, int cs_d, struct vie *vie)
sys/amd64/vmm/vmm_instruction_emul.c
2905
vmm_decode_instruction(enum vm_cpu_mode cpu_mode, int cs_d, struct vie *vie)
sys/amd64/vmm/vmm_instruction_emul.c
2909
if (decode_prefixes(vie, cpu_mode, cs_d))
sys/amd64/vmm/vmm_instruction_emul.c
2912
if (decode_opcode(vie))
sys/amd64/vmm/vmm_instruction_emul.c
2915
if (decode_modrm(vie, cpu_mode))
sys/amd64/vmm/vmm_instruction_emul.c
2918
if (decode_sib(vie))
sys/amd64/vmm/vmm_instruction_emul.c
2921
if (decode_displacement(vie))
sys/amd64/vmm/vmm_instruction_emul.c
2924
if (decode_immediate(vie))
sys/amd64/vmm/vmm_instruction_emul.c
2927
if (decode_moffset(vie))
sys/amd64/vmm/vmm_instruction_emul.c
293
vie_calc_bytereg(struct vie *vie, enum vm_reg_name *reg, int *lhbr)
sys/amd64/vmm/vmm_instruction_emul.c
2931
if ((vie->op.op_flags & VIE_OP_F_NO_GLA_VERIFICATION) == 0) {
sys/amd64/vmm/vmm_instruction_emul.c
2932
if (verify_gla(vcpu, gla, vie, cpu_mode))
sys/amd64/vmm/vmm_instruction_emul.c
2937
vie->decoded = 1; /* success */
sys/amd64/vmm/vmm_instruction_emul.c
296
*reg = gpr_map[vie->reg];
sys/amd64/vmm/vmm_instruction_emul.c
310
if (!vie->rex_present) {
sys/amd64/vmm/vmm_instruction_emul.c
311
if (vie->reg & 0x4) {
sys/amd64/vmm/vmm_instruction_emul.c
313
*reg = gpr_map[vie->reg & 0x3];
sys/amd64/vmm/vmm_instruction_emul.c
319
vie_read_bytereg(struct vcpu *vcpu, struct vie *vie, uint8_t *rval)
sys/amd64/vmm/vmm_instruction_emul.c
325
vie_calc_bytereg(vie, ®, &lhbr);
sys/amd64/vmm/vmm_instruction_emul.c
340
vie_write_bytereg(struct vcpu *vcpu, struct vie *vie, uint8_t byte)
sys/amd64/vmm/vmm_instruction_emul.c
346
vie_calc_bytereg(vie, ®, &lhbr);
sys/amd64/vmm/vmm_instruction_emul.c
502
emulate_mov(struct vcpu *vcpu, uint64_t gpa, struct vie *vie,
sys/amd64/vmm/vmm_instruction_emul.c
510
size = vie->opsize;
sys/amd64/vmm/vmm_instruction_emul.c
513
switch (vie->op.op_byte) {
sys/amd64/vmm/vmm_instruction_emul.c
521
error = vie_read_bytereg(vcpu, vie, &byte);
sys/amd64/vmm/vmm_instruction_emul.c
532
reg = gpr_map[vie->reg];
sys/amd64/vmm/vmm_instruction_emul.c
548
error = vie_write_bytereg(vcpu, vie, val);
sys/amd64/vmm/vmm_instruction_emul.c
559
reg = gpr_map[vie->reg];
sys/amd64/vmm/vmm_instruction_emul.c
596
error = memwrite(vcpu, gpa, vie->immediate, size, arg);
sys/amd64/vmm/vmm_instruction_emul.c
605
val = vie->immediate & size2mask[size];
sys/amd64/vmm/vmm_instruction_emul.c
616
emulate_movx(struct vcpu *vcpu, uint64_t gpa, struct vie *vie,
sys/amd64/vmm/vmm_instruction_emul.c
623
size = vie->opsize;
sys/amd64/vmm/vmm_instruction_emul.c
626
switch (vie->op.op_byte) {
sys/amd64/vmm/vmm_instruction_emul.c
643
reg = gpr_map[vie->reg];
sys/amd64/vmm/vmm_instruction_emul.c
663
reg = gpr_map[vie->reg];
sys/amd64/vmm/vmm_instruction_emul.c
686
reg = gpr_map[vie->reg];
sys/amd64/vmm/vmm_instruction_emul.c
704
get_gla(struct vcpu *vcpu, struct vie *vie __unused,
sys/amd64/vmm/vmm_instruction_emul.c
757
emulate_movs(struct vcpu *vcpu, uint64_t gpa, struct vie *vie,
sys/amd64/vmm/vmm_instruction_emul.c
770
opsize = (vie->op.op_byte == 0xA4) ? 1 : vie->opsize;
sys/amd64/vmm/vmm_instruction_emul.c
781
repeat = vie->repz_present | vie->repnz_present;
sys/amd64/vmm/vmm_instruction_emul.c
791
if ((rcx & vie_size2mask(vie->addrsize)) == 0) {
sys/amd64/vmm/vmm_instruction_emul.c
813
seg = vie->segment_override ? vie->segment_register : VM_REG_GUEST_DS;
sys/amd64/vmm/vmm_instruction_emul.c
814
error = get_gla(vcpu, vie, paging, opsize, vie->addrsize,
sys/amd64/vmm/vmm_instruction_emul.c
839
error = get_gla(vcpu, vie, paging, opsize, vie->addrsize,
sys/amd64/vmm/vmm_instruction_emul.c
913
vie->addrsize);
sys/amd64/vmm/vmm_instruction_emul.c
917
vie->addrsize);
sys/amd64/vmm/vmm_instruction_emul.c
923
rcx, vie->addrsize);
sys/amd64/vmm/vmm_instruction_emul.c
929
if ((rcx & vie_size2mask(vie->addrsize)) != 0)
sys/amd64/vmm/vmm_instruction_emul.c
939
emulate_stos(struct vcpu *vcpu, uint64_t gpa, struct vie *vie,
sys/amd64/vmm/vmm_instruction_emul.c
947
opsize = (vie->op.op_byte == 0xAA) ? 1 : vie->opsize;
sys/amd64/vmm/vmm_instruction_emul.c
948
repeat = vie->repz_present | vie->repnz_present;
sys/amd64/vmm/vmm_instruction_emul.c
958
if ((rcx & vie_size2mask(vie->addrsize)) == 0)
sys/amd64/vmm/vmm_instruction_emul.c
981
vie->addrsize);
sys/amd64/vmm/vmm_instruction_emul.c
987
rcx, vie->addrsize);
sys/amd64/vmm/vmm_instruction_emul.c
993
if ((rcx & vie_size2mask(vie->addrsize)) != 0)
sys/amd64/vmm/vmm_ioport.c
152
struct vie vie;
sys/amd64/vmm/vmm_ioport.c
160
vie_init(&vie, NULL, 0);
sys/amd64/vmm/vmm_ioport.c
162
vme->rip + vme->u.inout_str.cs_base, VIE_INST_SIZE, &vie, &fault);
sys/amd64/vmm/vmm_ioport.c
167
vme->u.inout_str.cs_d, &vie);
sys/amd64/vmm/vmm_ioport.c
169
if (err || vie.op.op_type != VIE_OP_TYPE_OUTS)
sys/amd64/vmm/vmm_ioport.c
171
if (vie.segment_override)
sys/amd64/vmm/vmm_ioport.c
172
*segment = vie.segment_register;
sys/arm64/include/vmm.h
286
struct vie vie;
sys/arm64/include/vmm_instruction_emul.h
34
struct vie;
sys/arm64/include/vmm_instruction_emul.h
63
int vmm_emulate_instruction(struct vcpu *vcpu, uint64_t gpa, struct vie *vie,
sys/arm64/vmm/vmm.c
651
struct vie *vie;
sys/arm64/vmm/vmm.c
664
vie = &vme->u.inst_emul.vie;
sys/arm64/vmm/vmm.c
680
error = vmm_emulate_instruction(vcpu, fault_ipa, vie, paging,
sys/arm64/vmm/vmm_arm64.c
618
struct vie *vie;
sys/arm64/vmm/vmm_arm64.c
633
vie = &vme_ret->u.inst_emul.vie;
sys/arm64/vmm/vmm_arm64.c
634
vie->access_size = 1 << esr_sas;
sys/arm64/vmm/vmm_arm64.c
635
vie->sign_extend = (esr_iss & ISS_DATA_SSE) ? 1 : 0;
sys/arm64/vmm/vmm_arm64.c
636
vie->dir = (esr_iss & ISS_DATA_WnR) ? VM_DIR_WRITE : VM_DIR_READ;
sys/arm64/vmm/vmm_arm64.c
637
vie->reg = reg_num;
sys/arm64/vmm/vmm_instruction_emul.c
55
vmm_emulate_instruction(struct vcpu *vcpu, uint64_t gpa, struct vie *vie,
sys/arm64/vmm/vmm_instruction_emul.c
62
if (vie->dir == VM_DIR_READ) {
sys/arm64/vmm/vmm_instruction_emul.c
63
error = memread(vcpu, gpa, &val, vie->access_size, memarg);
sys/arm64/vmm/vmm_instruction_emul.c
66
error = vm_set_register(vcpu, vie->reg, val);
sys/arm64/vmm/vmm_instruction_emul.c
68
error = vm_get_register(vcpu, vie->reg, &val);
sys/arm64/vmm/vmm_instruction_emul.c
72
if (vie->access_size < 8)
sys/arm64/vmm/vmm_instruction_emul.c
73
val &= (1ul << (vie->access_size * 8)) - 1;
sys/arm64/vmm/vmm_instruction_emul.c
74
error = memwrite(vcpu, gpa, val, vie->access_size, memarg);
sys/riscv/include/vmm.h
237
struct vie vie;
sys/riscv/include/vmm_instruction_emul.h
33
struct vie;
sys/riscv/include/vmm_instruction_emul.h
63
int vmm_emulate_instruction(struct vcpu *vcpu, uint64_t gpa, struct vie *vie,
sys/riscv/vmm/vmm.c
325
struct vie *vie;
sys/riscv/vmm/vmm.c
338
vie = &vme->u.inst_emul.vie;
sys/riscv/vmm/vmm.c
354
error = vmm_emulate_instruction(vcpu, fault_ipa, vie, paging,
sys/riscv/vmm/vmm_instruction_emul.c
60
vmm_emulate_instruction(struct vcpu *vcpu, uint64_t gpa, struct vie *vie,
sys/riscv/vmm/vmm_instruction_emul.c
67
if (vie->dir == VM_DIR_READ) {
sys/riscv/vmm/vmm_instruction_emul.c
68
error = memread(vcpu, gpa, &val, vie->access_size, memarg);
sys/riscv/vmm/vmm_instruction_emul.c
71
if ((vie->sign_extend == 0) && (vie->access_size < 8))
sys/riscv/vmm/vmm_instruction_emul.c
72
val &= (1ul << (vie->access_size * 8)) - 1;
sys/riscv/vmm/vmm_instruction_emul.c
73
error = vm_set_register(vcpu, vie->reg, val);
sys/riscv/vmm/vmm_instruction_emul.c
75
error = vm_get_register(vcpu, vie->reg, &val);
sys/riscv/vmm/vmm_instruction_emul.c
79
if (vie->access_size < 8)
sys/riscv/vmm/vmm_instruction_emul.c
80
val &= (1ul << (vie->access_size * 8)) - 1;
sys/riscv/vmm/vmm_instruction_emul.c
81
error = memwrite(vcpu, gpa, val, vie->access_size, memarg);
sys/riscv/vmm/vmm_riscv.c
331
struct vie *vie;
sys/riscv/vmm/vmm_riscv.c
438
vie = &vme_ret->u.inst_emul.vie;
sys/riscv/vmm/vmm_riscv.c
439
vie->dir = direction;
sys/riscv/vmm/vmm_riscv.c
440
vie->reg = reg_num;
sys/riscv/vmm/vmm_riscv.c
441
vie->sign_extend = sign_extend;
sys/riscv/vmm/vmm_riscv.c
442
vie->access_size = access_size;
usr.sbin/bhyve/aarch64/vmexit.c
65
struct vie *vie;
usr.sbin/bhyve/aarch64/vmexit.c
69
vie = &vme->u.inst_emul.vie;
usr.sbin/bhyve/aarch64/vmexit.c
71
err = emulate_mem(vcpu, vme->u.inst_emul.gpa, vie,
usr.sbin/bhyve/amd64/vmexit.c
353
struct vie *vie;
usr.sbin/bhyve/amd64/vmexit.c
359
vie = &vme->u.inst_emul.vie;
usr.sbin/bhyve/amd64/vmexit.c
360
if (!vie->decoded) {
usr.sbin/bhyve/amd64/vmexit.c
367
vie_restart(vie);
usr.sbin/bhyve/amd64/vmexit.c
370
if (vmm_decode_instruction(mode, cs_d, vie) != 0)
usr.sbin/bhyve/amd64/vmexit.c
373
vme->rip + vie->num_processed) != 0)
usr.sbin/bhyve/amd64/vmexit.c
377
err = emulate_mem(vcpu, vme->u.inst_emul.gpa, vie,
usr.sbin/bhyve/amd64/vmexit.c
391
for (i = 0; i < vie->num_valid; i++)
usr.sbin/bhyve/amd64/vmexit.c
392
fprintf(stderr, "%02x", vie->inst[i]);
usr.sbin/bhyve/mem.c
237
struct vie *vie;
usr.sbin/bhyve/mem.c
248
return (vmm_emulate_instruction(vcpu, paddr, ema->vie, ema->paging,
usr.sbin/bhyve/mem.c
253
emulate_mem(struct vcpu *vcpu, uint64_t paddr, struct vie *vie,
usr.sbin/bhyve/mem.c
258
ema.vie = vie;
usr.sbin/bhyve/mem.h
54
int emulate_mem(struct vcpu *vcpu, uint64_t paddr, struct vie *vie,
usr.sbin/bhyve/riscv/vmexit.c
79
struct vie *vie;
usr.sbin/bhyve/riscv/vmexit.c
83
vie = &vme->u.inst_emul.vie;
usr.sbin/bhyve/riscv/vmexit.c
85
err = emulate_mem(vcpu, vme->u.inst_emul.gpa, vie,