DPCPU_GET
return (DPCPU_GET(vcpu));
tramp = DPCPU_GET(intr_tramp);
lic = &DPCPU_GET(linux_idr_cache);
lic = &DPCPU_GET(linux_idr_cache);
} else if ((il = idr_preload_dequeue_locked(&DPCPU_GET(linux_idr_cache))) != NULL) {
record = &DPCPU_GET(linux_epoch_record[type]);
record = &DPCPU_GET(linux_epoch_record[type]);
MPASS((record = &DPCPU_GET(linux_epoch_record[type])) &&
tw = &DPCPU_GET(tasklet_worker);
if ((count = DPCPU_GET(nmi_counter))) {
v = DPCPU_GET(vcpu_info);
vcpu_time = xen_fetch_vcpu_time(DPCPU_GET(vcpu_info));
uptime = xen_fetch_vcpu_time(DPCPU_GET(vcpu_info));
xen_fetch_vcpu_time(DPCPU_GET(vcpu_info));
pcpu->last_processed = xen_fetch_vcpu_time(DPCPU_GET(vcpu_info));
struct vcpu_info *vcpu = DPCPU_GET(vcpu_info);
if (__predict_false(DPCPU_GET(epoch_cb_count)))
to_sbt = DPCPU_GET(hardclocktime);
to_sbt = DPCPU_GET(hardclocktime);
return (DPCPU_GET(vcpu));
#define SCHED_STAT_INC(var) DPCPU_GET(var)++;
if (DPCPU_GET(vcpu_info) != NULL) {
ipi_handle = DPCPU_GET(ipi_handle);
++*DPCPU_GET(pintrcnt);