atomic_load_acq_int
if (atomic_load_acq_int(&loc->monetary_locale_changed) != 0) {
if (atomic_load_acq_int(&loc->numeric_locale_changed) != 0) {
if (atomic_load_acq_int(&aux_once))
while ((seq = atomic_load_acq_int(&tsc_ref->tsc_seq)) != 0) {
while (atomic_load_acq_int(&aps_ready) == 0)
#define atomic_load_acq_32 atomic_load_acq_int
val = atomic_load_acq_int(&irrptr[idx]);
*data = atomic_load_acq_int(reg + i);
while (!atomic_load_acq_int(&aps_ready)) {
while (atomic_load_acq_int(&arg->done) == 0)
while (atomic_load_acq_int(&arg->arrived) != mp_ncpus - 1)
if ((atomic_load_acq_int(&acpi_tasks[i].at_flag) &
pending = atomic_load_acq_int(&sc->ec_sci_pend);
#define ECORE_ATOMIC_READ(a) atomic_load_acq_int((volatile int *)a)
if (atomic_load_acq_int(&e->refcnt) == 0) {
if (atomic_load_acq_int(&e->refcnt) == 0) {
l2e_state(e), atomic_load_acq_int(&e->refcnt),
if (!atomic_load_acq_int(&d->nfree))
if (atomic_load_acq_int(&e->refcnt) == 0)
for (e = d->l2tab; atomic_load_acq_int(&e->refcnt); ++e)
atomic_load_acq_int(&t->tids_in_use));
if (atomic_load_acq_int(&e->refcnt) == 1) {
smt_state(e), atomic_load_acq_int(&e->refcnt));
if (atomic_load_acq_int(&e->refcnt) == 0) {
if (atomic_load_acq_int(&e->refcnt))
while (atomic_load_acq_int(&dcons_close_refs) > 0)
(atomic_load_acq_int(&(a)->counter))
#define atomic_read(p) atomic_load_acq_int(p)
while (tmo_loops-- && atomic_load_acq_int(&msg->poll_done) == 0) {
while ((seq = atomic_load_acq_int(&tsc_ref->tsc_seq)) != 0) { \
if (atomic_load_acq_int(&txr->sc->recovery_mode))
if (atomic_load_acq_int(&txr->sc->recovery_mode))
if (atomic_load_acq_int(&rxr->sc->recovery_mode))
if (atomic_load_acq_int(&rxr->sc->recovery_mode))
if (atomic_load_acq_int(&que->sc->recovery_mode))
if (atomic_load_acq_int(&sc->recovery_mode))
if (atomic_load_acq_int(&sc->recovery_mode))
if (atomic_load_acq_int(&sc->recovery_mode))
if (atomic_load_acq_int(&sc->recovery_mode))
if (atomic_load_acq_int(&sc->recovery_mode))
return (atomic_load_acq_int(&oct->instr_queue[q_no]->instr_pending) >=
pkts_pend = atomic_load_acq_int(&droq->pkts_pending);
int32_t istate = (int32_t)atomic_load_acq_int(state_ptr);
refcount = atomic_load_acq_int(oct->adapter_refcount);
refcount = atomic_load_acq_int(oct->adapter_refcount);
if (atomic_load_acq_int(&oct->status) >= LIO_DEV_RUNNING) {
atomic_load_acq_int(&oct->status));
if (atomic_load_acq_int(&iq->instr_pending))
pkt_count = atomic_load_acq_int(&droq->pkts_pending);
if (atomic_load_acq_int(&droq->pkts_pending))
pending = atomic_load_acq_int(
if (atomic_load_acq_int(&oct->status) == LIO_DEV_RUNNING) {
if (atomic_load_acq_int(&iq->instr_pending) >=
if (atomic_load_acq_int(&iq->instr_pending) >=
if (atomic_load_acq_int(&iq->instr_pending)) {
if (atomic_load_acq_int(&iq->instr_pending))
if (!(atomic_load_acq_int(&oct->status) == LIO_DEV_IN_RESET))
if (!(atomic_load_acq_int(&lio->ifstate) &
if (atomic_load_acq_int(&lio->ifstate) & LIO_IFSTATE_RUNNING)
if (atomic_load_acq_int(&lio->ifstate) & LIO_IFSTATE_REGISTERED)
switch (atomic_load_acq_int(&oct->status)) {
if (atomic_load_acq_int(&oct_dev->status) == LIO_DEV_CORE_OK) {
return (atomic_load_acq_int(&lio->ifstate) & state_flag);
(atomic_load_acq_int(&lio->ifstate) | state_flag));
(atomic_load_acq_int(&lio->ifstate) &
pcount = atomic_load_acq_int(
if (!(atomic_load_acq_int(&lio->ifstate) & LIO_IFSTATE_RUNNING) ||
gen = atomic_load_acq_int(&cp->clbr_gen);
#define mpi3mr_atomic_read(v) atomic_load_acq_int(&(v)->val)
#define mrsas_atomic_read(v) atomic_load_acq_int(&(v)->val)
children = atomic_load_acq_int(&parent->bio_children);
while (!atomic_load_acq_int(&status->done)) {
atomic_load_acq_int(&fxent_nactpools) > 1 &&
if (atomic_load_acq_int(&fxent_timer_ready) != 0) {
lev = atomic_load_acq_int(&sc->grab_level) - 1;
while (sc_cnputc_logtail != atomic_load_acq_int(&sc_cnputc_loghead)) {
if (atomic_load_acq_int(&sc_cnputc_loghead) -
while (!atomic_load_acq_int(&status->done)) {
if (atomic_load_acq_int(&pkt->p_state) != WG_PACKET_CRYPTED)
if (atomic_load_acq_int(&pkt->p_state) != WG_PACKET_CRYPTED)
timeout = atomic_load_acq_int(&g_raid3_timeout);
while (atomic_load_acq_int(&aps_ready) == 0)
#define atomic_load_acq_32 atomic_load_acq_int
atomic_load_acq_int((const volatile u_int *)(p))
switch (atomic_load_acq_int(&state->action)) {
if (atomic_load_acq_int(&state->action))
if (atomic_load_acq_int(&ithd->it_need) == 0 &&
atomic_load_acq_int(&info->state) != KCOV_STATE_RUNNING)
if (atomic_load_acq_int(&bc->__count) == 0) {
gen = atomic_load_acq_int(&th->th_generation);
pps->capgen = atomic_load_acq_int(&th->th_generation);
gen = atomic_load_acq_int(&th->th_generation);
gen = atomic_load_acq_int(&th->th_generation);
gen = atomic_load_acq_int(&th->th_generation);
atomic_load_acq_int(&rtc_generation);
atomic_load_acq_int(&rtc_generation);
atomic_load_acq_int(&epoch->e_in_use) == 0))
while (atomic_load_acq_int(&smp_rv_waiters[3]) < ncpus)
s_rd_seq = atomic_load_acq_int(&s->s_rd_seq);
s_rd_seq = atomic_load_acq_int(&s->s_rd_seq);
s_wr_seq = atomic_load_acq_int(&s->s_wr.seq);
state = atomic_load_acq_int(&ktls_init_state);
state = atomic_load_acq_int(&splice_init_state);
atomic_load_acq_int(&zb->zb_header->bzh_user_gen))
count = atomic_load_acq_int(&grp->il_inpcnt);
count = atomic_load_acq_int(&grp->il_inpcnt);
#define atomic_load_acq_32 atomic_load_acq_int
return ((u_long)atomic_load_acq_int((const volatile u_int *)addr));
#define atomic_load_acq_ptr atomic_load_acq_int
if (atomic_load_acq_int(&smp_started))
return (atomic_load_acq_int(seqcp));
return (atomic_load_acq_int(seqcp) & ~SEQC_MOD);
(pq->pq_cnt > 0 || atomic_load_acq_int(&swapdev_enabled))) {
if (atomic_load_acq_int(&swapdev_enabled))
while (atomic_load_acq_int(&smp_started) == 0)
oldval = atomic_load_acq_int(ss->p);
oldval = atomic_load_acq_int(ss->p);