ARCH_PERFMON_EVENTSEL_ENABLE
#define AMD_MERGE_EVENT_ENABLE (AMD_MERGE_EVENT | ARCH_PERFMON_EVENTSEL_ENABLE)
__x86_pmu_enable_event(hwc, ARCH_PERFMON_EVENTSEL_ENABLE);
wrmsrq(hwc->config_base, (hwc->config | ARCH_PERFMON_EVENTSEL_ENABLE));
ARCH_PERFMON_EVENTSEL_ENABLE);
if (val & ARCH_PERFMON_EVENTSEL_ENABLE) {
if (!(val & ARCH_PERFMON_EVENTSEL_ENABLE))
val &= ~ARCH_PERFMON_EVENTSEL_ENABLE;
__x86_pmu_enable_event(hwc, ARCH_PERFMON_EVENTSEL_ENABLE);
ARCH_PERFMON_EVENTSEL_ENABLE);
u64 enable_mask = ARCH_PERFMON_EVENTSEL_ENABLE;
event->hw.config | ARCH_PERFMON_EVENTSEL_ENABLE;
arr[idx].host &= ~ARCH_PERFMON_EVENTSEL_ENABLE;
arr[idx].guest &= ~ARCH_PERFMON_EVENTSEL_ENABLE;
__x86_pmu_enable_event(hwc, ARCH_PERFMON_EVENTSEL_ENABLE);
val &= ~ARCH_PERFMON_EVENTSEL_ENABLE;
val |= ARCH_PERFMON_EVENTSEL_ENABLE;
val &= ~ARCH_PERFMON_EVENTSEL_ENABLE;
val |= ARCH_PERFMON_EVENTSEL_ENABLE;
__x86_pmu_enable_event(hwc, ARCH_PERFMON_EVENTSEL_ENABLE);
pmc->eventsel_hw &= ~ARCH_PERFMON_EVENTSEL_ENABLE;
ARCH_PERFMON_EVENTSEL_ENABLE;
return pmc->eventsel & ARCH_PERFMON_EVENTSEL_ENABLE;
ARCH_PERFMON_EVENTSEL_ENABLE |
wrmsr(MSR_P6_EVNTSEL0, ARCH_PERFMON_EVENTSEL_ENABLE |
wrmsr(MSR_P6_EVNTSEL1, ARCH_PERFMON_EVENTSEL_ENABLE |
wrmsr(MSR_K7_EVNTSEL0, ARCH_PERFMON_EVENTSEL_ENABLE |
wrmsr(MSR_K7_EVNTSEL1, ARCH_PERFMON_EVENTSEL_ENABLE |
wrmsr(MSR_P6_EVNTSEL0 + 0, ARCH_PERFMON_EVENTSEL_ENABLE |
wrmsr(MSR_P6_EVNTSEL0 + 1, ARCH_PERFMON_EVENTSEL_ENABLE |
wrmsr(MSR_P6_EVNTSEL0 + 2, ARCH_PERFMON_EVENTSEL_ENABLE |
wrmsr(MSR_K7_EVNTSEL0, ARCH_PERFMON_EVENTSEL_ENABLE |
wrmsr(MSR_K7_EVNTSEL1, ARCH_PERFMON_EVENTSEL_ENABLE |
wrmsr(MSR_K7_EVNTSEL2, ARCH_PERFMON_EVENTSEL_ENABLE |