X86_CR4_SMEP
| X86_CR4_OSXSAVE | X86_CR4_SMEP | X86_CR4_FSGSBASE \
cr4_set_bits(X86_CR4_SMEP);
static const unsigned long cr4_pinned_mask = X86_CR4_SMEP | X86_CR4_SMAP | X86_CR4_UMIP |
#define X86_CR4_TLBFLUSH_BITS (X86_CR4_PGE | X86_CR4_PCIDE | X86_CR4_PAE | X86_CR4_SMEP)
#define X86_CR4_PDPTR_BITS (X86_CR4_PGE | X86_CR4_PSE | X86_CR4_PAE | X86_CR4_SMEP)
X86_CR4_SMEP | X86_CR4_SMAP | X86_CR4_PKE)
BUILD_MMU_ROLE_REGS_ACCESSOR(cr4, smep, X86_CR4_SMEP);
cr4 &= ~(X86_CR4_SMEP | X86_CR4_SMAP | X86_CR4_PKE);
smep = kvm_is_cr4_bit_set(vcpu, X86_CR4_SMEP);
hw_cr4 &= ~(X86_CR4_SMEP | X86_CR4_SMAP | X86_CR4_PKE);
cr4_fixed1_update(X86_CR4_SMEP, ebx, feature_bit(SMEP));
((cr4 & X86_CR4_SMEP) && !(old_cr4 & X86_CR4_SMEP)))
__reserved_bits |= X86_CR4_SMEP; \
(__read_cr4() & X86_CR4_SMEP))
if ((cr4 & X86_CR4_SMEP) != X86_CR4_SMEP) {
cr4 &= ~(X86_CR4_SMEP);
cr4 |= X86_CR4_SMEP;
if (native_read_cr4() & X86_CR4_SMEP) {
cr4 |= X86_CR4_SMEP;
TEST_INVALID_CR_BIT(vcpu, cr4, sregs, X86_CR4_SMEP);
cr4 |= X86_CR4_SMEP;