ESR_ELx_FSC
#define DISR_EL1_ESR_MASK (ESR_ELx_AET | ESR_ELx_EA | ESR_ELx_FSC)
esr = esr & ESR_ELx_FSC;
esr = esr & ESR_ELx_FSC;
esr = esr & ESR_ELx_FSC;
esr = esr & ESR_ELx_FSC;
esr &= ESR_ELx_FSC;
esr = esr & ESR_ELx_FSC;
esr = esr & ESR_ELx_FSC;
return kvm_vcpu_get_esr(vcpu) & (ESR_ELx_CM | ESR_ELx_WNR | ESR_ELx_FSC);
return kvm_vcpu_get_esr(vcpu) & ESR_ELx_FSC;
if ((esr & ESR_ELx_FSC) != ESR_ELx_FSC_SERROR) {
u64 fsc = esr & ESR_ELx_FSC;
FIELD_PREP(ESR_ELx_FSC, ESR_ELx_FSC_EXCL_ATOMIC) |
esr &= ~ESR_ELx_FSC;
run->arm_nisv.esr_iss = esr & ~(u64)ESR_ELx_FSC;
ESR_ELx_FSC;
esr &= ~ESR_ELx_FSC;
esr |= FIELD_PREP(ESR_ELx_FSC,
esr &= ~ESR_ELx_FSC;
esr |= FIELD_PREP(ESR_ELx_FSC, vt->wr.fst);
esr = kvm_vcpu_get_esr(vcpu) & ~ESR_ELx_FSC;
result->esr |= (kvm_vcpu_get_esr(vcpu) & ~ESR_ELx_FSC);
pr_alert(" FSC = 0x%02lx: %s\n", (esr & ESR_ELx_FSC),
unsigned long fsc = esr & ESR_ELx_FSC;
return fault_info + (esr & ESR_ELx_FSC);
#define DISR_EL1_ESR_MASK (ESR_ELx_AET | ESR_ELx_EA | ESR_ELx_FSC)
esr = esr & ESR_ELx_FSC;
esr = esr & ESR_ELx_FSC;
esr = esr & ESR_ELx_FSC;
GUEST_ASSERT_EQ((esr & ESR_ELx_FSC), ESR_ELx_FSC_SEA_TTW(3));