X86_CR0_PG
| X86_CR0_NW | X86_CR0_CD | X86_CR0_PG))
X86_CR0_PG)
#define X86_CR0_PDPTR_BITS (X86_CR0_CD | X86_CR0_NW | X86_CR0_PG)
#define KVM_MMU_CR0_ROLE_BITS (X86_CR0_PG | X86_CR0_WP)
BUILD_MMU_ROLE_REGS_ACCESSOR(cr0, pg, X86_CR0_PG);
cr0 = vcpu->arch.cr0 & ~(X86_CR0_PE | X86_CR0_EM | X86_CR0_TS | X86_CR0_PG);
kvm_set_cr0(vcpu, cr0 & ~(X86_CR0_PG | X86_CR0_PE));
if (!(save->cr0 & X86_CR0_PG) ||
if ((save->efer & EFER_LME) && (save->cr0 & X86_CR0_PG)) {
kvm_init_shadow_npt_mmu(vcpu, X86_CR0_PG, svm->vmcb01.ptr->save.cr4,
if (!is_paging(vcpu) && (cr0 & X86_CR0_PG)) {
if (is_paging(vcpu) && !(cr0 & X86_CR0_PG)) {
hcr0 |= X86_CR0_PG | X86_CR0_WP;
if (CC((vmcs12->guest_cr0 & (X86_CR0_PG | X86_CR0_PE)) == X86_CR0_PG))
CC(ia32e && !(vmcs12->guest_cr0 & X86_CR0_PG)))
CC(((vmcs12->guest_cr0 & X86_CR0_PG) &&
#define VMXON_CR0_ALWAYSON (X86_CR0_PE | X86_CR0_PG | X86_CR0_NE)
fixed0 &= ~(X86_CR0_PE | X86_CR0_PG);
(KVM_VM_CR0_ALWAYS_ON_UNRESTRICTED_GUEST | X86_CR0_PG | X86_CR0_PE)
old_cr0_pg = kvm_read_cr0_bits(vcpu, X86_CR0_PG);
if (!old_cr0_pg && (cr0 & X86_CR0_PG))
else if (old_cr0_pg && !(cr0 & X86_CR0_PG))
if (!(cr0 & X86_CR0_PG)) {
if ((old_cr0_pg ^ cr0) & X86_CR0_PG)
if (!(old_cr0_pg & X86_CR0_PG) && (cr0 & X86_CR0_PG))
if ((cr0 & X86_CR0_PG) && !(cr0 & X86_CR0_PE))
if (!(cr0 & X86_CR0_PG))
if ((cr0 ^ old_cr0) & X86_CR0_PG) {
if (!(cr0 & X86_CR0_PG))
(cr0 & X86_CR0_PG)) {
if (!(vcpu->arch.efer & EFER_LME) && (cr0 & X86_CR0_PG) &&
if (!(cr0 & X86_CR0_PG) &&
if ((sregs->efer & EFER_LME) && (sregs->cr0 & X86_CR0_PG)) {
bool pae = (sregs2->cr0 & X86_CR0_PG) && (sregs2->cr4 & X86_CR4_PAE) &&
if (old_cr0 & X86_CR0_PG) {
return likely(kvm_is_cr0_bit_set(vcpu, X86_CR0_PG));
sregs.cr0 = X86_CR0_PE | X86_CR0_NE | X86_CR0_PG;
TEST_INVALID_CR_BIT(vcpu, cr0, sregs, X86_CR0_PG);