Symbol: MSR_EFER
arch/x86/hyperv/hv_crash.c
176
hv_wrmsr(MSR_EFER, hv_crash_ctxt.efer);
arch/x86/hyperv/hv_crash.c
221
ctxt->efer = __rdmsr(MSR_EFER);
arch/x86/hyperv/hv_vtl.c
165
input->vp_context.efer = native_rdmsrq(MSR_EFER);
arch/x86/hyperv/ivm.c
331
vmsa->efer = native_read_msr(MSR_EFER);
arch/x86/kernel/acpi/sleep.c
85
if (!rdmsr_safe(MSR_EFER,
arch/x86/kernel/acpi/sleep.c
88
!wrmsr_safe(MSR_EFER,
arch/x86/kernel/cpu/amd.c
1154
WARN_ON_ONCE(msr_set_bit(MSR_EFER, _EFER_AUTOIBRS) < 0);
arch/x86/kernel/cpu/amd.c
1161
msr_set_bit(MSR_EFER, _EFER_TCE);
arch/x86/kernel/cpu/bugs.c
2248
msr_set_bit(MSR_EFER, _EFER_AUTOIBRS);
arch/x86/kvm/emulate.c
1452
ctxt->ops->get_msr(ctxt, MSR_EFER, &efer);
arch/x86/kvm/emulate.c
1498
if (ctxt->ops->get_msr(ctxt, MSR_EFER, &efer))
arch/x86/kvm/emulate.c
1657
ctxt->ops->get_msr(ctxt, MSR_EFER, &efer);
arch/x86/kvm/emulate.c
2363
ops->get_msr(ctxt, MSR_EFER, &efer);
arch/x86/kvm/emulate.c
2414
ops->get_msr(ctxt, MSR_EFER, &efer);
arch/x86/kvm/emulate.c
3865
ctxt->ops->get_msr(ctxt, MSR_EFER, &efer);
arch/x86/kvm/emulate.c
738
ctxt->ops->get_msr(ctxt, MSR_EFER, &efer);
arch/x86/kvm/smm.c
536
if (__kvm_emulate_msr_write(vcpu, MSR_EFER, smstate->efer & ~EFER_LMA))
arch/x86/kvm/smm.c
631
__kvm_emulate_msr_write(vcpu, MSR_EFER, efer);
arch/x86/kvm/svm/sev.c
4558
svm_disable_intercept_for_msr(vcpu, MSR_EFER, MSR_TYPE_RW);
arch/x86/kvm/svm/svm.c
2703
msr_info.index = MSR_EFER;
arch/x86/kvm/svm/svm.c
485
rdmsrq(MSR_EFER, efer);
arch/x86/kvm/svm/svm.c
492
wrmsrq(MSR_EFER, efer & ~EFER_SVME);
arch/x86/kvm/svm/svm.c
521
rdmsrq(MSR_EFER, efer);
arch/x86/kvm/svm/svm.c
531
wrmsrq(MSR_EFER, efer | EFER_SVME);
arch/x86/kvm/vmx/nested.c
4924
if (vmx->msr_autoload.guest.val[i].index == MSR_EFER)
arch/x86/kvm/vmx/nested.c
4928
efer_msr = vmx_find_uret_msr(vmx, MSR_EFER);
arch/x86/kvm/vmx/tdx.c
2119
case MSR_EFER:
arch/x86/kvm/vmx/tdx.c
2153
return index == MSR_IA32_APICBASE || index == MSR_EFER ||
arch/x86/kvm/vmx/vmx.c
1062
case MSR_EFER:
arch/x86/kvm/vmx/vmx.c
1120
case MSR_EFER:
arch/x86/kvm/vmx/vmx.c
1186
add_atomic_switch_msr(vmx, MSR_EFER, guest_efer, kvm_host.efer);
arch/x86/kvm/vmx/vmx.c
1188
clear_atomic_switch_msr(vmx, MSR_EFER);
arch/x86/kvm/vmx/vmx.c
1192
i = kvm_find_user_return_msr(MSR_EFER);
arch/x86/kvm/vmx/vmx.c
1196
clear_atomic_switch_msr(vmx, MSR_EFER);
arch/x86/kvm/vmx/vmx.c
2017
vmx_setup_uret_msr(vmx, MSR_EFER, update_transition_efer(vmx));
arch/x86/kvm/vmx/vmx.c
2142
case MSR_EFER:
arch/x86/kvm/vmx/vmx.c
2340
case MSR_EFER:
arch/x86/kvm/vmx/vmx.c
3314
if (!vmx_find_uret_msr(vmx, MSR_EFER))
arch/x86/kvm/vmx/vmx.c
6638
efer_slot = vmx_find_loadstore_msr_slot(&vmx->msr_autoload.guest, MSR_EFER);
arch/x86/kvm/vmx/vmx.c
8656
MSR_EFER, MSR_TSC_AUX, MSR_STAR,
arch/x86/kvm/x86.c
10124
rdmsrq_safe(MSR_EFER, &kvm_host.efer);
arch/x86/kvm/x86.c
4001
case MSR_EFER:
arch/x86/kvm/x86.c
4488
case MSR_EFER:
arch/x86/power/cpu.c
119
rdmsrq(MSR_EFER, ctxt->efer);
arch/x86/power/cpu.c
212
wrmsrq(MSR_EFER, ctxt->efer);
arch/x86/realmode/init.c
151
rdmsrq(MSR_EFER, efer);
tools/testing/selftests/kvm/lib/x86/svm.c
111
save->efer = rdmsr(MSR_EFER);
tools/testing/selftests/kvm/lib/x86/svm.c
96
efer = rdmsr(MSR_EFER);
tools/testing/selftests/kvm/lib/x86/svm.c
97
wrmsr(MSR_EFER, efer | EFER_SVME);
tools/testing/selftests/kvm/lib/x86/vmx.c
275
vmwrite(HOST_IA32_EFER, rdmsr(MSR_EFER));
tools/testing/selftests/kvm/x86/nested_set_state_test.c
253
uint64_t old_efer = vcpu_get_msr(vcpu, MSR_EFER);
tools/testing/selftests/kvm/x86/nested_set_state_test.c
255
vcpu_set_msr(vcpu, MSR_EFER, old_efer | EFER_SVME);
tools/testing/selftests/kvm/x86/nested_set_state_test.c
260
uint64_t old_efer = vcpu_get_msr(vcpu, MSR_EFER);
tools/testing/selftests/kvm/x86/nested_set_state_test.c
262
vcpu_set_msr(vcpu, MSR_EFER, old_efer & ~EFER_SVME);
tools/testing/selftests/kvm/x86/sev_smoke_test.c
34
guest_sev_test_msr(MSR_EFER);