arch/alpha/kernel/smp.c
641
if (atomic_read(&mm->mm_users) <= 1) {
arch/alpha/kernel/smp.c
688
if (atomic_read(&mm->mm_users) <= 1) {
arch/alpha/kernel/smp.c
742
if (atomic_read(&mm->mm_users) <= 1) {
arch/alpha/mm/tlbflush.c
97
if (atomic_read(&mm->mm_users) <= 1) {
arch/arc/mm/tlb.c
185
if (atomic_read(&mm->mm_users) == 0)
arch/arm/include/asm/mmu_context.h
30
unlikely(atomic_read(&mm->context.vmalloc_seq) !=
arch/arm/include/asm/mmu_context.h
31
atomic_read(&init_mm.context.vmalloc_seq)))
arch/arm/kernel/ftrace.c
234
if (unlikely(atomic_read(¤t->tracing_graph_pause)))
arch/arm/kernel/machine_kexec.c
120
while ((atomic_read(&waiting_for_crash_ipi) > 0) && msecs) {
arch/arm/kernel/machine_kexec.c
124
if (atomic_read(&waiting_for_crash_ipi) > 0)
arch/arm/mach-exynos/pm.c
210
if (atomic_read(&cpu1_wakeup))
arch/arm/mach-exynos/pm.c
253
!atomic_read(&cpu1_wakeup))
arch/arm/mach-exynos/pm.c
256
if (!atomic_read(&cpu1_wakeup))
arch/arm/mach-exynos/pm.c
260
while (!atomic_read(&cpu1_wakeup)) {
arch/arm/mm/ioremap.c
159
} while (seq != atomic_read(&init_mm.context.vmalloc_seq));
arch/arm64/include/asm/pgtable.h
421
if (mm != current->active_mm && atomic_read(&mm->mm_users) <= 1)
arch/arm64/kernel/ftrace.c
533
if (unlikely(atomic_read(¤t->tracing_graph_pause)))
arch/arm64/kernel/ftrace.c
558
if (unlikely(atomic_read(¤t->tracing_graph_pause)))
arch/arm64/kernel/patching.c
217
while (atomic_read(&pp->cpu_count) <= num_online_cpus())
arch/arm64/kvm/arm.c
618
(atomic_read(&vcpu->arch.vgic_cpu.vgic_v3.its_vpe.vlpi_count) ||
arch/arm64/kvm/arm.c
834
atomic_read(&kvm->online_vcpus) == 1)
arch/arm64/kvm/guest.c
1005
if (write && atomic_read(&kvm->nr_memslots_dirty_logging)) {
arch/arm64/kvm/hyp/vhe/switch.c
431
atomic_read(&vcpu->kvm->arch.vncr_map_count))
arch/arm64/kvm/nested.c
1192
if (!WARN_ON(atomic_read(&mmu->refcnt)))
arch/arm64/kvm/nested.c
729
if (atomic_read(&s2_mmu->refcnt) == 0)
arch/arm64/kvm/nested.c
732
BUG_ON(atomic_read(&s2_mmu->refcnt)); /* We have struct MMUs to spare */
arch/arm64/kvm/nested.c
91
num_mmus = atomic_read(&kvm->online_vcpus) * S2_MMU_PER_VCPU;
arch/arm64/kvm/vgic/vgic-debug.c
82
int nr_cpus = atomic_read(&kvm->online_vcpus);
arch/arm64/kvm/vgic/vgic-init.c
119
if (kvm->created_vcpus != atomic_read(&kvm->online_vcpus))
arch/arm64/kvm/vgic/vgic-init.c
138
if (atomic_read(&kvm->online_vcpus) > kvm->max_vcpus) {
arch/arm64/kvm/vgic/vgic-init.c
397
if (kvm->created_vcpus != atomic_read(&kvm->online_vcpus))
arch/arm64/kvm/vgic/vgic-mmio-v2.c
124
int nr_vcpus = atomic_read(&source_vcpu->kvm->online_vcpus);
arch/arm64/kvm/vgic/vgic-mmio-v2.c
185
u8 cpu_mask = GENMASK(atomic_read(&vcpu->kvm->online_vcpus) - 1, 0);
arch/arm64/kvm/vgic/vgic-mmio-v2.c
38
value |= (atomic_read(&vcpu->kvm->online_vcpus) - 1) << 5;
arch/arm64/kvm/vgic/vgic-mmio-v3.c
269
return atomic_read(&vgic_cpu->ctlr) == GICR_CTLR_ENABLE_LPIS;
arch/arm64/kvm/vgic/vgic-mmio-v3.c
278
val = atomic_read(&vgic_cpu->ctlr);
arch/arm64/kvm/vgic/vgic-mmio-v3.c
541
return !!atomic_read(&vcpu->arch.vgic_cpu.syncr_busy);
arch/arm64/kvm/vgic/vgic-mmio-v3.c
892
int nr_vcpus = atomic_read(&kvm->online_vcpus);
arch/arm64/kvm/vgic/vgic-v3.c
64
atomic_read(&vcpu->kvm->arch.vgic.active_spis))
arch/arm64/kvm/vgic/vgic-v4.c
257
nr_vcpus = atomic_read(&kvm->online_vcpus);
arch/arm64/kvm/vgic/vgic.h
398
return atomic_read(&kvm->online_vcpus) * KVM_VGIC_V3_REDIST_SIZE;
arch/csky/kernel/ftrace.c
153
if (unlikely(atomic_read(¤t->tracing_graph_pause)))
arch/csky/kernel/ftrace.c
216
while (atomic_read(¶m->cpu_count) <= num_online_cpus())
arch/csky/kernel/probes/kprobes.c
38
while (atomic_read(¶m->cpu_count) <= num_online_cpus())
arch/hexagon/kernel/kgdb.c
157
if (atomic_read(&kgdb_active) != -1) {
arch/loongarch/include/asm/pgtable.h
452
atomic_read(&mm->tlb_flush_pending))
arch/loongarch/include/asm/qspinlock.h
27
val = atomic_read(&lock->val);
arch/loongarch/kernel/ftrace.c
58
if (unlikely(atomic_read(¤t->tracing_graph_pause)))
arch/loongarch/kernel/ftrace_dyn.c
230
if (unlikely(atomic_read(¤t->tracing_graph_pause)))
arch/loongarch/kernel/ftrace_dyn.c
248
if (unlikely(atomic_read(¤t->tracing_graph_pause)))
arch/loongarch/kernel/irq.c
51
seq_printf(p, "%*s: %10u\n", prec, "ERR", atomic_read(&irq_err_count));
arch/loongarch/kernel/kgdb.c
252
if (atomic_read(&kgdb_active) != -1)
arch/loongarch/kernel/kgdb.c
258
if (atomic_read(&kgdb_setting_breakpoint))
arch/loongarch/kernel/machine_kexec.c
140
while (!atomic_read(&kexec_ready_to_reboot))
arch/loongarch/kernel/machine_kexec.c
174
while (!atomic_read(&kexec_ready_to_reboot))
arch/loongarch/kernel/perf_event.c
558
if (atomic_read(&active_events) == 0) {
arch/loongarch/kernel/smp.c
740
if (atomic_read(&mm->mm_users) == 0)
arch/loongarch/kernel/smp.c
745
if ((atomic_read(&mm->mm_users) != 1) || (current->mm != mm)) {
arch/loongarch/kernel/smp.c
778
if ((atomic_read(&mm->mm_users) != 1) || (current->mm != mm)) {
arch/loongarch/kernel/smp.c
825
if ((atomic_read(&vma->vm_mm->mm_users) != 1) || (current->mm != vma->vm_mm)) {
arch/m68k/amiga/chipram.c
118
unsigned long n = atomic_read(&chipavail);
arch/m68k/kernel/irq.c
37
seq_printf(p, "%*s: %10u\n", prec, "ERR", atomic_read(&irq_err_count));
arch/microblaze/kernel/ftrace.c
32
if (unlikely(atomic_read(¤t->tracing_graph_pause)))
arch/mips/cavium-octeon/setup.c
95
while (!atomic_read(&kexec_ready_to_reboot))
arch/mips/kernel/crash.c
47
while (!atomic_read(&kexec_ready_to_reboot))
arch/mips/kernel/ftrace.c
333
if (unlikely(atomic_read(¤t->tracing_graph_pause)))
arch/mips/kernel/irq.c
44
seq_printf(p, "%*s: %10u\n", prec, "ERR", atomic_read(&irq_err_count));
arch/mips/kernel/kgdb.c
306
if (atomic_read(&kgdb_active) != -1)
arch/mips/kernel/kgdb.c
312
if (atomic_read(&kgdb_setting_breakpoint))
arch/mips/kernel/machine_kexec.c
127
while (!atomic_read(&kexec_ready_to_reboot))
arch/mips/kernel/perf_event_mipsxx.c
675
if (atomic_read(&active_events) == 0)
arch/mips/kernel/pm-cps.c
89
while (atomic_read(a) < online)
arch/mips/kernel/pm-cps.c
97
while (atomic_read(a) > online)
arch/mips/kernel/smp.c
561
if (atomic_read(&mm->mm_users) == 0)
arch/mips/kernel/smp.c
571
} else if ((atomic_read(&mm->mm_users) != 1) || (current->mm != mm)) {
arch/mips/kernel/smp.c
621
} else if ((atomic_read(&mm->mm_users) != 1) || (current->mm != mm)) {
arch/mips/kernel/smp.c
688
} else if ((atomic_read(&vma->vm_mm->mm_users) != 1) ||
arch/mips/kernel/spinlock_test.c
67
while (atomic_read(&s->enter_wait))
arch/mips/kernel/spinlock_test.c
74
while (atomic_read(&s->start_wait))
arch/mips/kernel/spinlock_test.c
86
while (atomic_read(&s->exit_wait))
arch/mips/kernel/sync-r4k.c
116
while (atomic_read(&start_count) != cpus - 1)
arch/mips/kernel/sync-r4k.c
126
while (atomic_read(&stop_count) != cpus-1)
arch/mips/kernel/sync-r4k.c
167
if (atomic_read(&test_runs) > 0)
arch/mips/kernel/sync-r4k.c
192
while (atomic_read(&start_count) != cpus)
arch/mips/kernel/sync-r4k.c
210
while (atomic_read(&stop_count) != cpus)
arch/mips/kernel/sync-r4k.c
223
if (!atomic_read(&test_runs)) {
arch/mips/math-emu/dsemul.c
180
fr_idx = atomic_read(¤t->thread.bd_emu_frame);
arch/mips/math-emu/dsemul.c
246
fr_idx = atomic_read(¤t->thread.bd_emu_frame);
arch/mips/sgi-ip27/ip27-nmi.c
223
while (atomic_read(&nmied_cpus) != num_online_cpus());
arch/openrisc/kernel/sync-timer.c
104
while (atomic_read(&count_count_start) != 2)
arch/openrisc/kernel/sync-timer.c
114
while (atomic_read(&count_count_stop) != 2)
arch/openrisc/kernel/sync-timer.c
53
while (atomic_read(&count_count_start) != 1)
arch/openrisc/kernel/sync-timer.c
74
while (atomic_read(&count_count_stop) != 1)
arch/parisc/include/asm/mmu_context.h
21
BUG_ON(atomic_read(&mm->mm_users) != 1);
arch/parisc/kernel/ftrace.c
43
if (unlikely(atomic_read(¤t->tracing_graph_pause)))
arch/parisc/kernel/toc.c
85
if (atomic_read(&kgdb_active) != -1)
arch/powerpc/include/asm/eeh.h
112
return pe ? !!atomic_read(&pe->pass_dev_cnt) : false;
arch/powerpc/include/asm/kvm_book3s_64.h
528
if (atomic_read(&kvm->arch.hpte_mod_interest))
arch/powerpc/include/asm/membarrier.h
16
likely(!(atomic_read(&next->membarrier_state) &
arch/powerpc/include/asm/mmu_context.h
127
VM_WARN_ON_ONCE(atomic_read(&mm->context.active_cpus) <= 0);
arch/powerpc/include/asm/tlb.h
62
if (atomic_read(&mm->context.active_cpus) > 1)
arch/powerpc/kernel/fadump.c
736
while ((atomic_read(&cpus_in_fadump) < ncpus) && (--msecs > 0))
arch/powerpc/kernel/process.c
1301
atomic_read(&new->mm->context.vas_windows)))
arch/powerpc/kernel/time.c
558
if (atomic_read(&ppc_n_lost_interrupts) != 0)
arch/powerpc/kernel/trace/ftrace.c
687
if (unlikely(atomic_read(¤t->tracing_graph_pause)))
arch/powerpc/kernel/trace/ftrace_64_pg.c
796
if (unlikely(atomic_read(¤t->tracing_graph_pause)))
arch/powerpc/kexec/crash.c
145
while ((atomic_read(&cpus_in_crash) < ncpus) && (--msecs > 0))
arch/powerpc/kexec/crash.c
150
if (atomic_read(&cpus_in_crash) >= ncpus) {
arch/powerpc/kexec/crash.c
156
ncpus - atomic_read(&cpus_in_crash));
arch/powerpc/kexec/crash.c
187
while (atomic_read(&cpus_in_crash) < ncpus)
arch/powerpc/kvm/book3s_64_mmu_hv.c
138
if (atomic_read(&kvm->arch.vcpus_running)) {
arch/powerpc/kvm/book3s_64_mmu_hv.c
1823
if (atomic_read(&kvm->arch.vcpus_running)) {
arch/powerpc/kvm/book3s_64_mmu_hv.c
998
return atomic_read(&kvm->arch.vcpus_running) != 0;
arch/powerpc/kvm/book3s_hv.c
3940
int n_online = atomic_read(&vc->online_count);
arch/powerpc/kvm/book3s_hv.c
6295
if (atomic_read(&kvm->arch.vcpus_running)) {
arch/powerpc/kvm/book3s_hv.c
6425
if (atomic_read(&kvm->arch.vcpus_running)) {
arch/powerpc/kvm/book3s_hv_builtin.c
162
return atomic_read(&hv_vm_count) != 0;
arch/powerpc/kvm/book3s_xive.c
219
if (atomic_read(&q->pending_count)) {
arch/powerpc/kvm/book3s_xive.c
223
WARN_ON(p > atomic_read(&q->count));
arch/powerpc/mm/book3s64/hash_pgtable.c
490
while (atomic_read(&parms->cpu_counter) > 1)
arch/powerpc/mm/book3s64/pgtable.c
473
BUG_ON(atomic_read(&ptdesc->pt_frag_refcount) <= 0);
arch/powerpc/mm/book3s64/radix_hugetlbpage.c
59
atomic_read(&mm->context.copros) > 0)
arch/powerpc/mm/book3s64/radix_pgtable.c
1574
atomic_read(&mm->context.copros) > 0) {
arch/powerpc/mm/book3s64/radix_pgtable.c
1609
(atomic_read(&mm->context.copros) > 0))
arch/powerpc/mm/book3s64/radix_tlb.c
1059
if (atomic_read(&mm->context.copros) > 0)
arch/powerpc/mm/book3s64/radix_tlb.c
1270
if (atomic_read(&mm->context.copros) > 0)
arch/powerpc/mm/book3s64/radix_tlb.c
1348
if (atomic_read(&mm->context.copros) > 0)
arch/powerpc/mm/book3s64/radix_tlb.c
380
if (atomic_read(&mm->context.copros) > 0)
arch/powerpc/mm/book3s64/radix_tlb.c
513
if (atomic_read(&mm->context.copros) > 0)
arch/powerpc/mm/book3s64/radix_tlb.c
567
if (atomic_read(&mm->context.copros) > 0)
arch/powerpc/mm/book3s64/radix_tlb.c
654
if (atomic_read(&mm->context.copros) > 0)
arch/powerpc/mm/book3s64/radix_tlb.c
767
int active_cpus = atomic_read(&mm->context.active_cpus);
arch/powerpc/mm/book3s64/radix_tlb.c
788
if (atomic_read(&mm->context.copros) > 0)
arch/powerpc/mm/book3s64/radix_tlb.c
805
if (atomic_read(&mm->mm_users) <= 1 && current->mm == mm) {
arch/powerpc/mm/book3s64/radix_tlb.c
857
if (atomic_read(&mm->context.copros) > 0)
arch/powerpc/mm/book3s64/radix_tlb.c
895
if (atomic_read(&mm->context.copros) > 0)
arch/powerpc/mm/book3s64/radix_tlb.c
937
if (atomic_read(&mm->context.copros) > 0)
arch/powerpc/mm/pgtable-frag.c
123
BUG_ON(atomic_read(&ptdesc->pt_frag_refcount) <= 0);
arch/powerpc/perf/8xx-pmu.c
42
ctr = atomic_read(&instruction_counter);
arch/powerpc/perf/8xx-pmu.c
44
} while (ctr != atomic_read(&instruction_counter));
arch/powerpc/perf/core-book3s.c
2183
if (atomic_read(&num_events) == 0 &&
arch/powerpc/perf/core-fsl-emb.c
209
if (atomic_read(&num_events)) {
arch/powerpc/perf/core-fsl-emb.c
558
if (atomic_read(&num_events) == 0 &&
arch/powerpc/platforms/85xx/smp.c
453
while ( (atomic_read(&kexec_down_cpus) != (num_cpus - 1)) &&
arch/powerpc/platforms/cell/spufs/inode.c
328
if ((cbe_spu_info[node].n_spus - atomic_read(
arch/powerpc/platforms/cell/spufs/sched.c
1074
atomic_read(&nr_spu_contexts),
arch/powerpc/platforms/cell/spufs/sched.c
400
if (atomic_read(&ctx->gang->aff_sched_count) == 0)
arch/powerpc/platforms/powermac/backlight.c
136
if (atomic_read(&kernel_backlight_disabled))
arch/powerpc/platforms/powermac/backlight.c
144
if (atomic_read(&kernel_backlight_disabled))
arch/powerpc/platforms/powermac/backlight.c
69
if (atomic_read(&kernel_backlight_disabled))
arch/powerpc/platforms/powermac/backlight.c
97
if (atomic_read(&kernel_backlight_disabled))
arch/powerpc/platforms/powernv/idle.c
647
if (atomic_read(&local_paca->dont_stop)) {
arch/powerpc/platforms/powernv/vas-window.c
1292
if (!window->tx_win && atomic_read(&window->num_txwins) != 0) {
arch/powerpc/platforms/pseries/vas-sysfs.c
61
return sprintf(buf, "%d\n", atomic_read(&caps->_name)); \
arch/powerpc/platforms/pseries/vas.c
1015
atomic_read(&caps->nr_used_credits),
arch/powerpc/platforms/pseries/vas.c
224
while (atomic_read(&txwin->pending_faults)) {
arch/powerpc/platforms/pseries/vas.c
363
atomic_read(&cop_feat_caps->nr_total_credits)) {
arch/powerpc/platforms/pseries/vas.c
868
old_nr_creds = atomic_read(&caps->nr_total_credits);
arch/powerpc/platforms/pseries/vas.c
997
old_nr_creds = atomic_read(&caps->nr_total_credits);
arch/powerpc/platforms/pseries/vio.c
952
return sprintf(buf, "%d\n", atomic_read(&viodev->cmo.allocs_failed));
arch/powerpc/sysdev/xive/common.c
190
if (atomic_read(&q->pending_count)) {
arch/powerpc/sysdev/xive/common.c
193
WARN_ON(p > atomic_read(&q->count));
arch/powerpc/xmon/xmon.c
882
if (!bp->enabled && atomic_read(&bp->ref_count) == 0) {
arch/riscv/include/asm/membarrier.h
17
likely(!(atomic_read(&next->membarrier_state) &
arch/riscv/include/asm/pgtable.h
364
atomic_read(&mm->tlb_flush_pending))
arch/riscv/kernel/ftrace.c
236
if (unlikely(atomic_read(¤t->tracing_graph_pause)))
arch/riscv/kernel/ftrace.c
258
if (unlikely(atomic_read(¤t->tracing_graph_pause)))
arch/riscv/kernel/kgdb.c
315
if (atomic_read(&kgdb_setting_breakpoint))
arch/riscv/kernel/patch.c
268
while (atomic_read(&patch->cpu_count) <= num_online_cpus())
arch/riscv/kernel/sbi.c
369
return atomic_read(&req.error);
arch/riscv/kernel/smp.c
323
while ((atomic_read(&waiting_for_crash_ipi) > 0) && timeout--)
arch/riscv/kernel/smp.c
326
if (atomic_read(&waiting_for_crash_ipi) > 0)
arch/riscv/kernel/smp.c
333
return (atomic_read(&waiting_for_crash_ipi) > 0);
arch/riscv/kvm/aia_device.c
240
if (kvm->created_vcpus != atomic_read(&kvm->online_vcpus))
arch/riscv/kvm/aia_device.c
323
nr_vcpus = atomic_read(&dev->kvm->online_vcpus);
arch/riscv/kvm/aia_device.c
394
nr_vcpus = atomic_read(&dev->kvm->online_vcpus);
arch/riscv/kvm/aia_device.c
460
nr_vcpus = atomic_read(&dev->kvm->online_vcpus);
arch/riscv/kvm/vcpu_sbi_v01.c
52
hmask = (1UL << atomic_read(&kvm->online_vcpus)) - 1;
arch/riscv/kvm/vcpu_sbi_v01.c
74
hmask = (1UL << atomic_read(&kvm->online_vcpus)) - 1;
arch/s390/crypto/paes_s390.c
1064
if (!atomic_read(&ctx->via_engine_ctr)) {
arch/s390/crypto/paes_s390.c
1497
if (!atomic_read(&ctx->via_engine_ctr)) {
arch/s390/crypto/paes_s390.c
480
if (!atomic_read(&ctx->via_engine_ctr)) {
arch/s390/crypto/paes_s390.c
746
if (!atomic_read(&ctx->via_engine_ctr)) {
arch/s390/crypto/phmac_s390.c
617
if (!atomic_read(&tfm_ctx->via_engine_ctr)) {
arch/s390/crypto/phmac_s390.c
655
if (!atomic_read(&tfm_ctx->via_engine_ctr)) {
arch/s390/crypto/phmac_s390.c
698
if (!atomic_read(&tfm_ctx->via_engine_ctr)) {
arch/s390/crypto/phmac_s390.c
704
!atomic_read(&tfm_ctx->via_engine_ctr)) {
arch/s390/include/asm/mmu_context.h
104
while (atomic_read(&mm->context.flush_count))
arch/s390/include/asm/pgtable.h
550
if (unlikely(atomic_read(&mm->context.protected_count)))
arch/s390/kernel/ftrace.c
274
if (unlikely(atomic_read(¤t->tracing_graph_pause)))
arch/s390/kernel/perf_cpum_cf.c
1051
if (!atomic_read(&cpuhw->ctr_set[i]))
arch/s390/kernel/perf_cpum_cf.c
1443
if (atomic_read(&p.cpus_ack) != cpumask_weight(mask)) {
arch/s390/kernel/time.c
292
sw0 = atomic_read(sw_ptr);
arch/s390/kernel/time.c
294
sw1 = atomic_read(sw_ptr);
arch/s390/kernel/time.c
342
rc = (atomic_read(sw_ptr) & 0x80000000U) != 0;
arch/s390/kernel/time.c
525
while (atomic_read(&sync->cpus) != 0)
arch/s390/kernel/topology.c
384
if (atomic_read(&topology_poll) > 60)
arch/s390/kvm/interrupt.c
1902
online_vcpus = atomic_read(&kvm->online_vcpus);
arch/s390/kvm/interrupt.c
3049
int vcpu_idx, online_vcpus = atomic_read(&kvm->online_vcpus);
arch/s390/kvm/kvm-s390.c
3812
return atomic_read(&vcpu->arch.sie_block->prog20) &
arch/s390/kvm/kvm-s390.c
4479
cpuflags = atomic_read(&vcpu->arch.sie_block->cpuflags);
arch/s390/kvm/kvm-s390.c
5100
online_vcpus = atomic_read(&vcpu->kvm->online_vcpus);
arch/s390/kvm/kvm-s390.c
5156
online_vcpus = atomic_read(&vcpu->kvm->online_vcpus);
arch/s390/kvm/kvm-s390.h
103
return (atomic_read(&vcpu->arch.sie_block->cpuflags) & flags) == flags;
arch/s390/kvm/priv.c
827
cpus = atomic_read(&vcpu->kvm->online_vcpus);
arch/s390/kvm/pv.c
70
return atomic_read(&mm->context.protected_count) > 1;
arch/s390/kvm/vsie.c
107
return !(atomic_read(&vsie_page->scb_s.prog20) & PROG_REQUEST);
arch/s390/kvm/vsie.c
116
cpuflags = atomic_read(&vsie_page->scb_o->cpuflags);
arch/s390/kvm/vsie.c
1201
if (!(atomic_read(&scb_o->cpuflags) & CPUSTAT_STOP_INT))
arch/s390/kvm/vsie.c
126
int newflags, cpuflags = atomic_read(&scb_o->cpuflags);
arch/s390/kvm/vsie.c
1439
nr_vcpus = atomic_read(&kvm->online_vcpus);
arch/s390/kvm/vsie.c
512
if (!(atomic_read(&scb_s->cpuflags) & CPUSTAT_KSS))
arch/s390/kvm/vsie.c
517
if (!(atomic_read(&scb_s->cpuflags) & CPUSTAT_SM))
arch/sh/kernel/ftrace.c
102
int old = atomic_read(&nmi_running);
arch/sh/kernel/ftrace.c
159
if (!atomic_read(&nmi_running))
arch/sh/kernel/ftrace.c
164
} while (atomic_read(&nmi_running));
arch/sh/kernel/ftrace.c
325
if (unlikely(atomic_read(¤t->tracing_graph_pause)))
arch/sh/kernel/irq.c
51
seq_printf(p, "%*s: %10u\n", prec, "ERR", atomic_read(&irq_err_count));
arch/sh/kernel/perf_event.c
120
if (atomic_read(&num_events) == 0 &&
arch/sh/kernel/smp.c
366
if ((atomic_read(&mm->mm_users) != 1) || (current->mm != mm)) {
arch/sh/kernel/smp.c
398
if ((atomic_read(&mm->mm_users) != 1) || (current->mm != mm)) {
arch/sh/kernel/smp.c
441
if ((atomic_read(&vma->vm_mm->mm_users) != 1) ||
arch/sparc/kernel/ftrace.c
125
if (unlikely(atomic_read(¤t->tracing_graph_pause)))
arch/sparc/kernel/kgdb_64.c
128
if (atomic_read(&kgdb_active) != -1)
arch/sparc/kernel/nmi.c
172
if (!atomic_read(&nmi_active))
arch/sparc/kernel/nmi.c
198
if (!atomic_read(&nmi_active)) {
arch/sparc/kernel/nmi.c
292
if (atomic_read(&nmi_active) == -1) {
arch/sparc/kernel/nmi.c
313
if (atomic_read(&nmi_active) == -1)
arch/sparc/kernel/nmi.c
57
if (atomic_read(&nmi_active)) {
arch/sparc/kernel/perf_event.c
1177
if (atomic_read(&active_events) == 0) {
arch/sparc/kernel/perf_event.c
1178
if (atomic_read(&nmi_active) > 0) {
arch/sparc/kernel/perf_event.c
1180
BUG_ON(atomic_read(&nmi_active) != 0);
arch/sparc/kernel/perf_event.c
1190
if (atomic_read(&nmi_active) == 0)
arch/sparc/kernel/perf_event.c
1425
if (atomic_read(&nmi_active) < 0)
arch/sparc/kernel/perf_event.c
1624
if (!atomic_read(&active_events))
arch/sparc/kernel/smp_64.c
1157
while (atomic_read(&smp_capture_registry) != ncpus)
arch/sparc/kernel/traps_64.c
2024
if ((cnt = atomic_read(ocnt)) != 0) {
arch/sparc/mm/init_64.c
557
atomic_read(&dcpage_flushes));
arch/sparc/mm/init_64.c
560
atomic_read(&dcpage_flushes_xcall));
arch/sparc/mm/srmmu.c
1664
if (atomic_read(&mm->mm_users) == 1 && current->active_mm == mm)
arch/um/drivers/port_kern.c
111
if (atomic_read(&port->wait_count) == 0) {
arch/um/drivers/vector_kern.c
268
return atomic_read(&qi->queue_depth);
arch/um/drivers/vector_kern.c
282
return atomic_read(&qi->queue_depth);
arch/um/drivers/vector_kern.c
335
queue_depth = atomic_read(&qi->queue_depth);
arch/um/drivers/vector_kern.c
407
while (atomic_read(&qi->queue_depth) > 0) {
arch/um/drivers/vector_kern.c
409
send_len = atomic_read(&qi->queue_depth);
arch/um/drivers/vector_kern.c
460
return atomic_read(&qi->queue_depth);
arch/um/drivers/vector_kern.c
661
queue_depth = atomic_read(&qi->queue_depth);
arch/um/kernel/tlb.c
212
if (atomic_read(¤t->mm->mm_users) == 0)
arch/x86/events/core.c
1810
if (!atomic_read(&active_events))
arch/x86/events/core.c
419
if (atomic_read(&pmc_refcount) == 0) {
arch/x86/events/core.c
463
if (i != what && atomic_read(&x86_pmu.lbr_exclusive[i]))
arch/x86/events/intel/core.c
3860
if (!atomic_read(&era->ref) || era->config == reg->config) {
arch/x86/events/intel/uncore.c
201
if (!atomic_read(&er->ref) ||
arch/x86/events/intel/uncore_nhmex.c
1006
if (!atomic_read(&er->ref) || er->config == reg1->config) {
arch/x86/events/intel/uncore_nhmex.c
1017
if (!__BITS_VALUE(atomic_read(&er->ref), idx - 2, 8) ||
arch/x86/events/intel/uncore_nhmex.c
1025
if (!atomic_read(&er->ref) ||
arch/x86/events/intel/uncore_nhmex.c
565
if (!atomic_read(&er->ref) || er->config == config) {
arch/x86/events/intel/uncore_nhmex.c
592
if (__BITS_VALUE(atomic_read(&er->ref), idx, 8)) {
arch/x86/events/intel/uncore_nhmex.c
599
if (!atomic_read(&er->ref) || !((er->config ^ config) & mask)) {
arch/x86/events/intel/uncore_snbep.c
1081
if (!__BITS_VALUE(atomic_read(&er->ref), idx, 8) ||
arch/x86/events/intel/uncore_snbep.c
954
if (!__BITS_VALUE(atomic_read(&er->ref), i, 6) ||
arch/x86/hyperv/hv_crash.c
300
while (atomic_read(&crash_cpus_wait) < num_online_cpus() && msecs--)
arch/x86/include/asm/paravirt-spinlock.h
135
val = atomic_read(&lock->val);
arch/x86/include/asm/pgtable.h
979
atomic_read(&mm->tlb_flush_pending))
arch/x86/include/asm/qspinlock.h
28
val |= atomic_read(&lock->val) & ~_Q_PENDING_MASK;
arch/x86/kernel/apic/apic.c
359
rsvd = atomic_read(&eilvt_offsets[offset]);
arch/x86/kernel/cpu/mce/core.c
1053
if (atomic_read(&mce_panicked))
arch/x86/kernel/cpu/mce/core.c
1263
while (atomic_read(&mce_executing) <= num_online_cpus()) {
arch/x86/kernel/cpu/mce/core.c
1277
while (atomic_read(&mce_executing) != 0) {
arch/x86/kernel/cpu/microcode/core.c
418
atomic_read(&late_cpus_in) - 1);
arch/x86/kernel/cpu/microcode/core.c
449
pr_err_once("load: %d CPUs timed out\n", atomic_read(&late_cpus_in) - 1);
arch/x86/kernel/cpu/microcode/core.c
486
if (atomic_read(&offline_in_nmi) == nr_offl)
arch/x86/kernel/cpu/microcode/core.c
651
nr_offl - atomic_read(&offline_in_nmi));
arch/x86/kernel/ftrace.c
630
if (atomic_read(¤t->tracing_graph_pause))
arch/x86/kernel/irq.c
173
seq_printf(p, "%*s: %10u\n", prec, "ERR", atomic_read(&irq_err_count));
arch/x86/kernel/irq.c
175
seq_printf(p, "%*s: %10u\n", prec, "MIS", atomic_read(&irq_mis_count));
arch/x86/kernel/irq.c
254
u64 sum = atomic_read(&irq_err_count);
arch/x86/kernel/kgdb.c
502
if (atomic_read(&kgdb_active) != -1) {
arch/x86/kernel/kgdb.c
533
if (atomic_read(&kgdb_cpu_doing_single_step) != -1) {
arch/x86/kernel/reboot.c
942
while ((atomic_read(&waiting_for_crash_ipi) > 0) && msecs) {
arch/x86/kernel/smp.c
124
if (raw_smp_processor_id() == atomic_read(&stopping_cpu))
arch/x86/kernel/tboot.c
324
while (atomic_read((atomic_t *)&tboot->num_in_wfs) != num_aps &&
arch/x86/kernel/tboot.c
333
return !(atomic_read((atomic_t *)&tboot->num_in_wfs) == num_aps);
arch/x86/kernel/tboot.c
340
if (tboot_wait_for_aps(atomic_read(&ap_wfs_count)))
arch/x86/kernel/tsc_sync.c
371
while (atomic_read(&start_count) != cpus - 1)
arch/x86/kernel/tsc_sync.c
381
while (atomic_read(&stop_count) != cpus-1)
arch/x86/kernel/tsc_sync.c
425
if (atomic_read(&test_runs) > 0)
arch/x86/kernel/tsc_sync.c
464
while (atomic_read(&start_count) != cpus)
arch/x86/kernel/tsc_sync.c
482
while (atomic_read(&stop_count) != cpus)
arch/x86/kernel/tsc_sync.c
495
if (!atomic_read(&test_runs))
arch/x86/kvm/hyperv.c
1352
if (atomic_read(&kvm->online_vcpus) < 2)
arch/x86/kvm/hyperv.c
1798
bool has_mismatch = atomic_read(&hv->num_mismatched_vp_indexes);
arch/x86/kvm/hyperv.c
239
gsi = atomic_read(&synic->sint_to_gsi[sint]);
arch/x86/kvm/i8254.c
248
if (atomic_read(&ps->reinject) && !atomic_xchg(&ps->irq_ack, 0))
arch/x86/kvm/i8254.c
263
if (atomic_read(&kvm->arch.vapics_in_nmi_mode) > 0)
arch/x86/kvm/i8254.c
273
if (atomic_read(&ps->reinject))
arch/x86/kvm/i8254.c
296
if (atomic_read(&ps->reinject) == reinject)
arch/x86/kvm/lapic.c
2028
if (atomic_read(&apic->lapic_timer.pending))
arch/x86/kvm/lapic.c
2278
if (atomic_read(&ktimer->pending)) {
arch/x86/kvm/lapic.c
2298
if (!apic_lvtt_period(apic) && atomic_read(&ktimer->pending))
arch/x86/kvm/lapic.c
2312
if (!apic_lvtt_period(apic) && atomic_read(&apic->lapic_timer.pending))
arch/x86/kvm/lapic.c
2999
return atomic_read(&apic->lapic_timer.pending);
arch/x86/kvm/lapic.c
3148
if (atomic_read(&apic->lapic_timer.pending) > 0) {
arch/x86/kvm/mmu/mmu.c
6185
return atomic_read(&sp->write_flooding_count) >= 3;
arch/x86/kvm/mmu/mmu.c
7718
if (!atomic_read(&kvm->nr_memslots_dirty_logging))
arch/x86/kvm/svm/sev.c
2050
if (src->created_vcpus != atomic_read(&src->online_vcpus) ||
arch/x86/kvm/svm/sev.c
2051
dst->created_vcpus != atomic_read(&dst->online_vcpus))
arch/x86/kvm/svm/sev.c
2057
if (atomic_read(&src->online_vcpus) != atomic_read(&dst->online_vcpus))
arch/x86/kvm/svm/svm.c
1350
if (atomic_read(&srso_nr_vms))
arch/x86/kvm/vmx/tdx.c
2674
if (kvm->created_vcpus != atomic_read(&kvm->online_vcpus)) {
arch/x86/kvm/vmx/tdx.c
3313
if (!atomic_read(&nr_configured_hkid))
arch/x86/kvm/vmx/vmx.c
4863
if (!enable_pml || !atomic_read(&vcpu->kvm->nr_memslots_dirty_logging))
arch/x86/kvm/vmx/vmx.c
8474
if (atomic_read(&vcpu->kvm->nr_memslots_dirty_logging))
arch/x86/kvm/x86.c
13585
nr_slots = atomic_read(&kvm->nr_memslots_dirty_logging);
arch/x86/kvm/x86.c
14049
return atomic_read(&kvm->arch.noncoherent_dma_count);
arch/x86/kvm/x86.c
2623
atomic_read(&vcpu->kvm->online_vcpus)) &&
arch/x86/kvm/x86.c
2637
atomic_read(&vcpu->kvm->online_vcpus),
arch/x86/kvm/x86.c
3127
atomic_read(&kvm->online_vcpus));
arch/x86/kvm/x86.c
9974
atomic_read(&kvm_guest_has_master_clock) != 0)
arch/x86/kvm/xen.c
110
if (atomic_read(&vcpu->arch.xen.timer_pending) > 0) {
arch/x86/kvm/xen.c
132
if (atomic_read(&vcpu->arch.xen.timer_pending))
arch/x86/kvm/xen.h
106
return atomic_read(&vcpu->arch.xen.timer_pending);
arch/x86/mm/mmio-mod.c
76
return atomic_read(&mmiotrace_enabled);
arch/x86/mm/tlb.c
757
atomic_read(&mm->context.perf_rdpmc_allowed))) {
arch/x86/platform/uv/uv_nmi.c
524
nmi = atomic_read(&hub_nmi->in_nmi);
arch/x86/platform/uv/uv_nmi.c
552
nmi = atomic_read(&hub_nmi->in_nmi);
arch/x86/platform/uv/uv_nmi.c
562
nmi = atomic_read(&uv_in_nmi);
arch/x86/platform/uv/uv_nmi.c
584
if (cpu == atomic_read(&hub_nmi->cpu_owner)) {
arch/x86/platform/uv/uv_nmi.c
702
atomic_read(&uv_nmi_cpus_in_nmi), num_online_cpus());
arch/x86/platform/uv/uv_nmi.c
770
while (atomic_read(&uv_nmi_cpus_in_nmi) > 0)
arch/x86/platform/uv/uv_nmi.c
774
while (atomic_read(&uv_nmi_slave_continue))
arch/x86/platform/uv/uv_nmi.c
783
int in = atomic_read(&uv_nmi_cpus_in_nmi);
arch/x86/platform/uv/uv_nmi.c
789
while (!atomic_read(&uv_nmi_slave_continue))
arch/x86/platform/uv/uv_nmi.c
805
atomic_read(&uv_nmi_cpus_in_nmi), cpu);
arch/x86/platform/uv/uv_nmi.c
823
while (!atomic_read(&uv_nmi_slave_continue))
arch/x86/platform/uv/uv_nmi.c
860
while (atomic_read(&uv_nmi_kexec_failed) == 0) {
arch/x86/platform/uv/uv_nmi.c
917
sig = atomic_read(&uv_nmi_slave_continue);
arch/x86/platform/uv/uv_nmi.c
953
master = (atomic_read(&uv_nmi_cpu) == cpu);
arch/x86/xen/spinlock.c
48
if (atomic_read(nest_cnt) == 1 && xen_test_irq_pending(irq)) {
arch/xtensa/kernel/jump_label.c
47
while (atomic_read(&patch->cpu_count) <= num_online_cpus())
block/bdev.c
1133
if (atomic_read(&bdev->bd_openers) == 1)
block/bdev.c
1297
if (!atomic_read(&bdev->bd_openers)) {
block/bdev.c
748
if (!atomic_read(&bdev->bd_openers))
block/bdev.c
778
if (!atomic_read(&part->bd_openers)) {
block/bfq-iosched.c
5967
if (atomic_read(&bic->icq.ioc->active_ref) == 0 ||
block/bio.c
1728
BUG_ON(atomic_read(&bio->__bi_remaining) <= 0);
block/bio.c
836
BUG_ON(!atomic_read(&bio->__bi_cnt));
block/blk-cgroup.c
1215
if (blkcg_debug_stats && atomic_read(&blkg->use_delay)) {
block/blk-cgroup.c
1217
atomic_read(&blkg->use_delay),
block/blk-cgroup.c
1892
if (atomic_read(&blkg->use_delay) < 0)
block/blk-cgroup.c
1912
int cur_use = atomic_read(&blkg->use_delay);
block/blk-cgroup.c
1954
int use_delay = atomic_read(&blkg->use_delay);
block/blk-cgroup.c
2091
if (WARN_ON_ONCE(atomic_read(&blkg->use_delay) < 0))
block/blk-cgroup.c
2256
if (atomic_read(&blkcg->congestion_count)) {
block/blk-cgroup.h
375
if (WARN_ON_ONCE(atomic_read(&blkg->use_delay) < 0))
block/blk-cgroup.h
383
int old = atomic_read(&blkg->use_delay);
block/blk-cgroup.h
418
int old = atomic_read(&blkg->use_delay);
block/blk-cgroup.h
435
int old = atomic_read(&blkg->use_delay);
block/blk-crypto-profile.c
390
if (WARN_ON_ONCE(atomic_read(&slot->slot_refs) != 0)) {
block/blk-iocost.c
1181
ioc_gen = atomic_read(&ioc->hweight_gen);
block/blk-iocost.c
1326
iocg->hweight_gen = atomic_read(&ioc->hweight_gen) - 1;
block/blk-iocost.c
2304
!atomic_read(&iocg_to_blkg(iocg)->use_delay) &&
block/blk-iolatency.c
1014
atomic_read(&parent->child_lat.scale_cookie));
block/blk-iolatency.c
292
unsigned use_delay = atomic_read(&lat_to_blkg(iolat)->use_delay);
block/blk-iolatency.c
335
unsigned long old = atomic_read(&lat_info->scale_cookie);
block/blk-iolatency.c
404
unsigned int our_cookie = atomic_read(&iolat->scale_cookie);
block/blk-iolatency.c
413
cur_cookie = atomic_read(&lat_info->scale_cookie);
block/blk-iolatency.c
544
atomic_read(&lat_info->scale_cookie) == DEFAULT_SCALE_COOKIE)
block/blk-iolatency.c
679
cookie = atomic_read(&lat_info->scale_cookie);
block/blk-iolatency.c
743
enabled = atomic_read(&blkiolat->enable_cnt);
block/blk-mq-debugfs.c
77
seq_printf(m, "%d\n", atomic_read(&q->pm_only));
block/blk-mq.h
351
return atomic_read(&hctx->queue->nr_active_requests_shared_tags);
block/blk-mq.h
352
return atomic_read(&hctx->nr_active);
block/blk-rq-qos.c
11
unsigned int cur = atomic_read(v);
block/blk-wbt.c
295
ret += atomic_read(&rwb->rq_wait[i].inflight);
block/blk-wbt.c
878
atomic_read(&rwb->rq_wait[i].inflight));
block/blk-zoned.c
1098
if (atomic_read(&disk->nr_zone_wplugs)) {
block/blk-zoned.c
1954
WARN_ON_ONCE(atomic_read(&disk->nr_zone_wplugs));
block/blk-zoned.c
571
if (!atomic_read(&disk->nr_zone_wplugs))
block/blk.h
645
((unsigned int) atomic_read(&(req->ref)) + 127u <= 127u)
block/blk.h
665
return atomic_read(&req->ref);
block/mq-deadline.c
253
return stats->inserted - atomic_read(&stats->completed);
block/mq-deadline.c
520
stats->dispatched, atomic_read(&stats->completed));
block/mq-deadline.c
903
atomic_read(&stats->completed);
block/partitions/core.c
470
if (atomic_read(&part->bd_openers))
block/partitions/core.c
674
WARN_ON_ONCE(atomic_read(&part->bd_openers));
crypto/af_alg.c
135
unsigned int nokey = atomic_read(&ask->nokey_refcnt);
crypto/af_alg.c
192
if (atomic_read(&ask->refcnt))
crypto/af_alg.c
366
if (atomic_read(&ask->refcnt) != atomic_read(&ask->nokey_refcnt))
crypto/algif_aead.c
294
if (!atomic_read(&ask->nokey_refcnt))
crypto/algif_hash.c
304
if (!atomic_read(&ask->nokey_refcnt))
crypto/algif_skcipher.c
280
if (!atomic_read(&ask->nokey_refcnt))
crypto/jitterentropy-testing.c
102
data->jent_testing_rb[((u32)atomic_read(&data->rb_writer)) &
crypto/jitterentropy-testing.c
116
return ((((u32)atomic_read(&data->rb_writer)) &
crypto/jitterentropy-testing.c
130
u32 writer = (u32)atomic_read(&data->rb_writer);
crypto/jitterentropy-testing.c
80
if (!atomic_read(&data->jent_testing_enabled) && (*boot != 1))
crypto/jitterentropy-testing.c
90
if (((u32)atomic_read(&data->rb_writer)) >
crypto/jitterentropy-testing.c
98
if (atomic_read(&data->rb_writer) == 1)
drivers/accel/habanalabs/common/command_buffer.c
356
*usage_cnt = atomic_read(&cb->cs_cnt);
drivers/accel/habanalabs/common/debugfs.c
125
atomic_read(&cb->buf->mmap), atomic_read(&cb->cs_cnt));
drivers/accel/habanalabs/common/device.c
1086
atomic_read(&hdev->kernel_queues[cpu_q_id].ci),
drivers/accel/habanalabs/common/device.c
1087
atomic_read(&hdev->kernel_queues[cpu_q_id].ci) & pq_pi_mask,
drivers/accel/habanalabs/common/device.c
572
dmabuf_export_cnt = atomic_read(&hdev->dmabuf_export_cnt);
drivers/accel/habanalabs/common/hw_queue.c
28
return atomic_read(ci) & ((queue_len << 1) - 1);
drivers/accel/habanalabs/common/memory.c
1338
if (atomic_read(&phys_pg_pack->mapping_cnt) == 0) {
drivers/accel/habanalabs/common/memory.c
368
if (atomic_read(&phys_pg_pack->mapping_cnt) > 0) {
drivers/accel/habanalabs/gaudi/gaudi.c
7286
le32_to_cpu(sync_err->pi), le32_to_cpu(sync_err->ci), q->pi, atomic_read(&q->ci));
drivers/accel/habanalabs/gaudi2/gaudi2.c
9874
q->pi, atomic_read(&q->ci));
drivers/accel/habanalabs/gaudi2/gaudi2.c
9950
le32_to_cpu(sync_err->pi), le32_to_cpu(sync_err->ci), q->pi, atomic_read(&q->ci));
drivers/accel/habanalabs/goya/goya.c
4472
le32_to_cpu(sync_err->pi), le32_to_cpu(sync_err->ci), q->pi, atomic_read(&q->ci));
drivers/accel/ivpu/ivpu_debugfs.c
110
seq_printf(s, "%d\n", atomic_read(&vdev->pm->reset_counter));
drivers/accel/ivpu/ivpu_debugfs.c
118
seq_printf(s, "%d\n", atomic_read(&vdev->pm->reset_pending));
drivers/accel/ivpu/ivpu_debugfs.c
126
seq_printf(s, "%d\n", atomic_read(&vdev->hw->firewall_irq_counter));
drivers/accel/ivpu/ivpu_drv.c
383
drm_WARN_ON(&vdev->drm, atomic_read(&vdev->job_timeout_counter));
drivers/accel/ivpu/ivpu_hw_ip.c
1117
atomic_read(&vdev->hw->firewall_irq_counter));
drivers/accel/ivpu/ivpu_ipc.c
438
if (atomic_read(&ipc->rx_msg_count) > IPC_MAX_RX_MSG) {
drivers/accel/ivpu/ivpu_ipc.c
540
drm_WARN_ON(&vdev->drm, atomic_read(&ipc->rx_msg_count) > 0);
drivers/accel/ivpu/ivpu_ipc.c
578
drm_WARN_ON(&vdev->drm, atomic_read(&ipc->rx_msg_count) > 0);
drivers/accel/ivpu/ivpu_pm.c
164
ivpu_err(vdev, "Recovering the NPU (reset #%d)\n", atomic_read(&vdev->pm->reset_counter));
drivers/accel/qaic/qaic_timesync.c
141
if (atomic_read(&mqtsdev->buff_in_use)) {
drivers/accel/rocket/rocket_job.c
114
if (atomic_read(&core->reset.pending))
drivers/accel/rocket/rocket_job.c
353
if (!atomic_read(&core->reset.pending))
drivers/accel/rocket/rocket_job.c
528
if (atomic_read(&core->sched.credit_count))
drivers/acpi/apei/ghes.c
1108
count = atomic_read(&cache->count);
drivers/android/binder.c
1534
if (thread->is_dead && !atomic_read(&thread->tmp_ref)) {
drivers/android/binder.c
6501
atomic_read(&thread->tmp_ref));
drivers/android/binder.c
6749
int temp = atomic_read(&stats->bc[i]);
drivers/android/binder.c
6759
int temp = atomic_read(&stats->br[i]);
drivers/android/binder.c
6771
int created = atomic_read(&stats->obj_created[i]);
drivers/android/binder.c
6772
int deleted = atomic_read(&stats->obj_deleted[i]);
drivers/android/binder.c
6968
unsigned int log_cur = atomic_read(&log->cur);
drivers/atm/idt77252.c
2544
while (atomic_read(&vc->scq->used) > 0) {
drivers/atm/idt77252.c
2548
card->name, atomic_read(&vc->scq->used));
drivers/atm/idt77252.c
713
entries = atomic_read(&scq->used);
drivers/atm/idt77252.c
768
TXPRINTK("%d entries in SCQ used (push).\n", atomic_read(&scq->used));
drivers/atm/idt77252.c
771
card->name, atomic_read(&scq->used),
drivers/atm/idt77252.c
798
card->name, atomic_read(&scq->used), scq->next);
drivers/atm/iphase.c
2832
printk("section_bip: %d\n", atomic_read(&stats->section_bip));
drivers/atm/iphase.c
2833
printk("line_bip : %d\n", atomic_read(&stats->line_bip));
drivers/atm/iphase.c
2834
printk("path_bip : %d\n", atomic_read(&stats->path_bip));
drivers/atm/iphase.c
2835
printk("line_febe : %d\n", atomic_read(&stats->line_febe));
drivers/atm/iphase.c
2836
printk("path_febe : %d\n", atomic_read(&stats->path_febe));
drivers/atm/iphase.c
2837
printk("corr_hcs : %d\n", atomic_read(&stats->corr_hcs));
drivers/atm/iphase.c
2838
printk("uncorr_hcs : %d\n", atomic_read(&stats->uncorr_hcs));
drivers/atm/iphase.c
2839
printk("tx_cells : %d\n", atomic_read(&stats->tx_cells));
drivers/atm/iphase.c
2840
printk("rx_cells : %d\n", atomic_read(&stats->rx_cells));
drivers/atm/iphase.c
3043
if (atomic_read(&vcc->stats->tx) % 20 == 0) {
drivers/atm/suni.c
53
if (atomic_read(&stats->s) < 0) atomic_set(&stats->s,INT_MAX);
drivers/auxdisplay/panel.c
1084
if (!atomic_read(&keypad_available)) {
drivers/base/dd.c
821
int local_probe_count = atomic_read(&probe_count);
drivers/base/dd.c
837
wait_event(probe_waitqueue, atomic_read(&probe_count) == 0);
drivers/base/dd.c
889
int trigger_count = atomic_read(&deferred_trigger_count);
drivers/base/dd.c
900
if (trigger_count != atomic_read(&deferred_trigger_count) &&
drivers/base/power/runtime.c
1239
atomic_read(&dev->power.child_count) > 0)) {
drivers/base/power/runtime.c
1579
atomic_read(&dev->power.child_count) > 0)
drivers/base/power/runtime.c
2113
return atomic_read(&dev->power.usage_count) <= 1 &&
drivers/base/power/runtime.c
2114
(atomic_read(&dev->power.child_count) == 0 ||
drivers/base/power/runtime.c
279
else if (atomic_read(&dev->power.usage_count))
drivers/base/power/runtime.c
281
else if (!dev->power.ignore_children && atomic_read(&dev->power.child_count))
drivers/base/power/runtime.c
336
atomic_read(&supplier->power.usage_count) > 0)
drivers/base/power/sysfs.c
556
return sysfs_emit(buf, "%d\n", atomic_read(&dev->power.usage_count));
drivers/base/power/sysfs.c
565
0 : atomic_read(&dev->power.child_count));
drivers/base/power/wakeup.c
50
unsigned int comb = atomic_read(&combined_event_count);
drivers/base/power/wakeup.c
891
return ret || atomic_read(&pm_abort_suspend) > 0;
drivers/base/test/test_async_driver_probe.c
243
if (atomic_read(&async_completed) != async_id) {
drivers/base/test/test_async_driver_probe.c
247
} else if (!atomic_read(&errors) && !atomic_read(&warnings)) {
drivers/base/test/test_async_driver_probe.c
275
atomic_read(&errors), atomic_read(&warnings));
drivers/base/test/test_async_driver_probe.c
32
if (atomic_read(&timeout)) {
drivers/block/aoe/aoecmd.c
180
if (skb && atomic_read(&skb_shinfo(skb)->dataref) == 1) {
drivers/block/aoe/aoecmd.c
234
if (atomic_read(&skb_shinfo(skb)->dataref) != 1) {
drivers/block/aoe/aoedev.c
423
while (atomic_read(&skb_shinfo(skb)->dataref) != 1 && i-- > 0)
drivers/block/drbd/drbd_actlog.c
181
D_ASSERT(device, atomic_read(&device->md_io.in_use) == 1);
drivers/block/drbd/drbd_actlog.c
249
D_ASSERT(device, atomic_read(&device->local_cnt) > 0);
drivers/block/drbd/drbd_actlog.c
268
D_ASSERT(device, atomic_read(&device->local_cnt) > 0);
drivers/block/drbd/drbd_actlog.c
650
D_ASSERT(device, atomic_read(&device->local_cnt));
drivers/block/drbd/drbd_bitmap.c
1194
if (atomic_read(&ctx->in_flight))
drivers/block/drbd/drbd_debugfs.c
142
if (atomic_read(&tmp.in_use)) {
drivers/block/drbd/drbd_debugfs.c
166
int n = atomic_read(&device->ap_actlog_cnt);
drivers/block/drbd/drbd_debugfs.c
203
in_flight = atomic_read(&ctx->in_flight);
drivers/block/drbd/drbd_int.h
2126
if (atomic_read(&device->suspend_cnt))
drivers/block/drbd/drbd_int.h
2139
if (atomic_read(&device->ap_bio_cnt) > mxb)
drivers/block/drbd/drbd_main.c
2231
expect(device, atomic_read(&req->completion_ref) == 0) &&
drivers/block/drbd/drbd_main.c
2238
req, atomic_read(&req->completion_ref),
drivers/block/drbd/drbd_main.c
2621
if (atomic_read(&connection->current_epoch->epoch_size) != 0)
drivers/block/drbd/drbd_main.c
2622
drbd_err(connection, "epoch_size:%d\n", atomic_read(&connection->current_epoch->epoch_size));
drivers/block/drbd/drbd_main.c
3433
int cnt = atomic_read(&device->ap_bio_cnt);
drivers/block/drbd/drbd_main.c
3501
if (flags == BM_LOCKED_CHANGE_ALLOWED || atomic_read(&device->ap_bio_cnt) == 0) {
drivers/block/drbd/drbd_nl.c
1564
if (atomic_read(&device->ap_bio_cnt))
drivers/block/drbd/drbd_nl.c
1977
wait_event(device->misc_wait, !atomic_read(&device->ap_pending_cnt) || drbd_suspended(device));
drivers/block/drbd/drbd_nl.c
3403
s->dev_upper_pending = atomic_read(&device->ap_bio_cnt);
drivers/block/drbd/drbd_nl.c
3404
s->dev_lower_pending = atomic_read(&device->local_cnt);
drivers/block/drbd/drbd_nl.c
3650
s->peer_dev_pending = atomic_read(&device->ap_pending_cnt) +
drivers/block/drbd/drbd_nl.c
3651
atomic_read(&device->rs_pending_cnt);
drivers/block/drbd/drbd_nl.c
3652
s->peer_dev_unacked = atomic_read(&device->unacked_cnt);
drivers/block/drbd/drbd_nl.c
3849
nla_put_u32(skb, T_ap_bio_cnt, atomic_read(&device->ap_bio_cnt)) ||
drivers/block/drbd/drbd_nl.c
3850
nla_put_u32(skb, T_ap_pending_cnt, atomic_read(&device->ap_pending_cnt)) ||
drivers/block/drbd/drbd_nl.c
3851
nla_put_u32(skb, T_rs_pending_cnt, atomic_read(&device->rs_pending_cnt)))
drivers/block/drbd/drbd_nl.c
779
wait_event(device->misc_wait, atomic_read(&device->ap_pending_cnt) == 0);
drivers/block/drbd/drbd_nl.c
977
wait_event(device->misc_wait, !atomic_read(&device->ap_bio_cnt));
drivers/block/drbd/drbd_proc.c
293
atomic_read(&device->local_cnt),
drivers/block/drbd/drbd_proc.c
294
atomic_read(&device->ap_pending_cnt) +
drivers/block/drbd/drbd_proc.c
295
atomic_read(&device->rs_pending_cnt),
drivers/block/drbd/drbd_proc.c
296
atomic_read(&device->unacked_cnt),
drivers/block/drbd/drbd_proc.c
297
atomic_read(&device->ap_bio_cnt),
drivers/block/drbd/drbd_proc.c
318
seq_printf(seq, "\tblocked on activity log: %d\n", atomic_read(&device->ap_actlog_cnt));
drivers/block/drbd/drbd_receiver.c
1183
epoch_size = atomic_read(&epoch->epoch_size);
drivers/block/drbd/drbd_receiver.c
1198
atomic_read(&epoch->active) == 0 &&
drivers/block/drbd/drbd_receiver.c
125
if (atomic_read(&device->pp_in_use) >= mxb)
drivers/block/drbd/drbd_receiver.c
1607
if (atomic_read(&connection->current_epoch->epoch_size)) {
drivers/block/drbd/drbd_receiver.c
1625
if (atomic_read(&connection->current_epoch->epoch_size)) {
drivers/block/drbd/drbd_receiver.c
231
D_ASSERT(device, atomic_read(&peer_req->pending_bios) == 0);
drivers/block/drbd/drbd_receiver.c
2588
atomic_read(&device->rs_sect_ev);
drivers/block/drbd/drbd_receiver.c
2590
if (atomic_read(&device->ap_actlog_cnt)
drivers/block/drbd/drbd_receiver.c
5039
i = atomic_read(&device->pp_in_use_by_net);
drivers/block/drbd/drbd_receiver.c
5042
i = atomic_read(&device->pp_in_use);
drivers/block/drbd/drbd_receiver.c
5647
atomic_read(&device->ap_in_flight) == 0 &&
drivers/block/drbd/drbd_req.c
1011
atomic_read(&device->ap_in_flight) >= nc->cong_fill) {
drivers/block/drbd/drbd_req.c
1361
req->epoch = atomic_read(&first_peer_device(device)->connection->current_tle_nr);
drivers/block/drbd/drbd_req.c
244
req->epoch == atomic_read(&first_peer_device(device)->connection->current_tle_nr))
drivers/block/drbd/drbd_req.c
78
atomic_read(&req->completion_ref) ||
drivers/block/drbd/drbd_req.c
82
s, atomic_read(&req->completion_ref));
drivers/block/drbd/drbd_req.c
925
return atomic_read(&device->local_cnt) >
drivers/block/drbd/drbd_req.c
926
atomic_read(&device->ap_pending_cnt) + atomic_read(&device->rs_pending_cnt);
drivers/block/drbd/drbd_worker.c
1689
if (atomic_read(&device->unacked_cnt) || atomic_read(&device->rs_pending_cnt)) {
drivers/block/drbd/drbd_worker.c
2115
atomic_read(&connection->current_tle_nr) !=
drivers/block/floppy.c
2859
if (WARN(atomic_read(&usage_count) == 0,
drivers/block/floppy.c
4183
if (WARN(atomic_read(&usage_count) == 0,
drivers/block/floppy.c
4749
if (atomic_read(&usage_count))
drivers/block/floppy.c
4999
if (atomic_read(&usage_count))
drivers/block/floppy.c
888
if (WARN(atomic_read(&usage_count) == 0,
drivers/block/mtip32xx/mtip32xx.c
2510
} while (atomic_read(&dd->irq_workers_active) != 0 &&
drivers/block/mtip32xx/mtip32xx.c
2513
if (atomic_read(&dd->irq_workers_active) != 0)
drivers/block/mtip32xx/mtip32xx.c
3868
} while (atomic_read(&dd->irq_workers_active) != 0 &&
drivers/block/mtip32xx/mtip32xx.c
3871
if (atomic_read(&dd->irq_workers_active) != 0) {
drivers/block/mtip32xx/mtip32xx.c
751
WARN_ON_ONCE(atomic_read(&dd->irq_workers_active) != 0);
drivers/block/nbd.c
1110
atomic_read(&config->live_connections) > 0,
drivers/block/nbd.c
1556
atomic_read(&config->recv_threads) == 0);
drivers/block/nbd.c
483
atomic_read(&config->live_connections),
drivers/block/rnbd/rnbd-clt.c
1077
if (atomic_read(&sess->busy)) {
drivers/block/rnbd/rnbd-clt.c
287
} while (atomic_read(&sess->busy) == 0 && requeued);
drivers/block/xen-blkback/blkback.c
1025
if (atomic_read(&ring->inflight) == 0)
drivers/block/xen-blkback/blkback.c
1030
if (!atomic_read(&blkif->drain))
drivers/block/xen-blkback/blkback.c
697
if (atomic_dec_and_test(&ring->inflight) && atomic_read(&blkif->drain)) {
drivers/block/xen-blkback/xenbus.c
289
if (atomic_read(&ring->inflight) > 0) {
drivers/block/xen-blkback/xenbus.c
321
BUG_ON(atomic_read(&ring->persistent_gnt_in_use) != 0);
drivers/block/zram/zram_drv.c
1126
while (atomic_read(&wb_ctl->num_inflight) > 0) {
drivers/block/zram/zram_drv.c
838
WARN_ON(atomic_read(&wb_ctl->num_inflight));
drivers/block/zram/zram_drv.c
995
while (atomic_read(&wb_ctl->num_inflight) > 0) {
drivers/bluetooth/bcm203x.c
145
if (atomic_read(&data->shutdown))
drivers/bluetooth/bfusb.c
160
while ((atomic_read(&data->pending_tx) < BFUSB_MAX_BULK_TX) &&
drivers/bluetooth/hci_vhci.c
82
if (atomic_read(&data->initialized))
drivers/bus/bt1-apb.c
246
return scnprintf(buf, PAGE_SIZE, "%d\n", atomic_read(&apb->count));
drivers/bus/bt1-axi.c
181
return scnprintf(buf, PAGE_SIZE, "%d\n", atomic_read(&axi->count));
drivers/bus/mhi/host/debugfs.c
33
atomic_read(&mhi_cntrl->dev_wake),
drivers/bus/mhi/host/debugfs.c
34
atomic_read(&mhi_cntrl->pending_pkts));
drivers/bus/mhi/host/pm.c
354
if (unlikely(atomic_read(&mhi_cntrl->pending_pkts) ||
drivers/bus/mhi/host/pm.c
355
atomic_read(&mhi_cntrl->dev_wake))) {
drivers/bus/mhi/host/pm.c
358
atomic_read(&mhi_cntrl->pending_pkts),
drivers/bus/mhi/host/pm.c
359
atomic_read(&mhi_cntrl->dev_wake));
drivers/bus/mhi/host/pm.c
548
WARN_ON(atomic_read(&mhi_cntrl->dev_wake));
drivers/bus/mhi/host/pm.c
549
WARN_ON(atomic_read(&mhi_cntrl->pending_pkts));
drivers/bus/mhi/host/pm.c
705
WARN_ON(atomic_read(&mhi_cntrl->dev_wake));
drivers/bus/mhi/host/pm.c
706
WARN_ON(atomic_read(&mhi_cntrl->pending_pkts));
drivers/bus/mhi/host/pm.c
881
if (atomic_read(&mhi_cntrl->dev_wake) ||
drivers/bus/mhi/host/pm.c
882
atomic_read(&mhi_cntrl->pending_pkts))
drivers/bus/mhi/host/pm.c
908
if (atomic_read(&mhi_cntrl->dev_wake) ||
drivers/bus/mhi/host/pm.c
909
atomic_read(&mhi_cntrl->pending_pkts)) {
drivers/char/agp/backend.c
77
if (atomic_read(&bridge->agp_in_use))
drivers/char/agp/generic.c
229
cur_memory = atomic_read(&bridge->current_memory_agp);
drivers/char/agp/generic.c
383
info->current_memory = atomic_read(&bridge->current_memory_agp);
drivers/char/apm-emulation.c
509
if (atomic_read(&userspace_notification_inhibit))
drivers/char/apm-emulation.c
547
atomic_read(&suspend_acks_pending) == 0,
drivers/char/ipmi/ipmb_dev_int.c
188
if (atomic_read(&ipmb_dev->request_queue_len))
drivers/char/ipmi/ipmb_dev_int.c
207
if (atomic_read(&ipmb_dev->request_queue_len) >=
drivers/char/ipmi/ipmi_msghandler.c
3562
return sysfs_emit(buf, "%d\n", atomic_read(&intf->nr_users));
drivers/char/ipmi/ipmi_msghandler.c
3577
count += atomic_read(&user->nr_msgs);
drivers/char/ipmi/ipmi_msghandler.c
5184
if (atomic_read(&stop_operation))
drivers/char/ipmi/ipmi_msghandler.c
5190
if (atomic_read(&stop_operation))
drivers/char/ipmi/ipmi_msghandler.c
5195
if (atomic_read(&intf->event_waiters)) {
drivers/char/ipmi/ipmi_msghandler.c
5218
if (atomic_read(&stop_operation))
drivers/char/ipmi/ipmi_msghandler.c
5352
while (atomic_read(&panic_done_count) != 0)
drivers/char/ipmi/ipmi_msghandler.c
5682
count = atomic_read(&smi_msg_inuse_count);
drivers/char/ipmi/ipmi_msghandler.c
5685
count = atomic_read(&recv_msg_inuse_count);
drivers/char/ipmi/ipmi_msghandler.c
670
((unsigned int) atomic_read(&(intf)->stats[IPMI_STAT_ ## stat]))
drivers/char/ipmi/ipmi_poweroff.c
155
while (atomic_read(&dummy_count) > 0) {
drivers/char/ipmi/ipmi_si_intf.c
1045
if (atomic_read(&smi_info->need_watch)) {
drivers/char/ipmi/ipmi_si_intf.c
257
((unsigned int) atomic_read(&(smi)->stats[SI_STAT_ ## stat]))
drivers/char/ipmi/ipmi_si_intf.c
859
&& (atomic_read(&smi_info->req_events))) {
drivers/char/ipmi/ipmi_ssif.c
295
((unsigned int) atomic_read(&(ssif)->stats[SSIF_STAT_ ## stat]))
drivers/char/ipmi/ipmi_watchdog.c
1045
while (atomic_read(&msg_tofree))
drivers/char/ppdev.c
615
ret = atomic_read(&pp->irqc);
drivers/char/ppdev.c
777
if (atomic_read(&pp->irqc))
drivers/clk/renesas/rzg2l-cpg.c
1386
if (criticals && criticals == atomic_read(&mstop->usecnt))
drivers/clk/renesas/rzg2l-cpg.c
1392
if (!atomic_read(&mstop->usecnt))
drivers/clk/renesas/rzg2l-cpg.c
1397
if (!atomic_read(&mstop->usecnt))
drivers/clk/renesas/rzg2l-cpg.c
1429
__clk_get_enable_count(hw->clk), atomic_read(&clk->mstop->usecnt),
drivers/clk/renesas/rzv2h-cpg.c
1116
if (!atomic_read(&mstop[i]))
drivers/clk/renesas/rzv2h-cpg.c
1137
if (!atomic_read(&mstop[i]) ||
drivers/clk/renesas/rzv2h-cpg.c
1330
if (atomic_read(&mstop[i]))
drivers/clocksource/arm_arch_timer.c
556
return atomic_read(&timer_unstable_counter_workaround_in_use);
drivers/clocksource/timer-ti-dm.c
1033
if (unlikely(!timer || !atomic_read(&timer->enabled))) {
drivers/clocksource/timer-ti-dm.c
1048
if (unlikely(!timer || !atomic_read(&timer->enabled)))
drivers/clocksource/timer-ti-dm.c
1061
if (unlikely(!timer || !atomic_read(&timer->enabled))) {
drivers/clocksource/timer-ti-dm.c
1113
if (unlikely(!timer || !atomic_read(&timer->enabled))) {
drivers/clocksource/timer-ti-dm.c
335
!atomic_read(&timer->enabled))
drivers/clocksource/timer-ti-dm.c
343
!atomic_read(&timer->enabled))
drivers/connector/cn_proc.c
124
if (atomic_read(&proc_event_num_listeners) < 1)
drivers/connector/cn_proc.c
153
if (atomic_read(&proc_event_num_listeners) < 1)
drivers/connector/cn_proc.c
178
if (atomic_read(&proc_event_num_listeners) < 1)
drivers/connector/cn_proc.c
215
if (atomic_read(&proc_event_num_listeners) < 1)
drivers/connector/cn_proc.c
239
if (atomic_read(&proc_event_num_listeners) < 1)
drivers/connector/cn_proc.c
271
if (atomic_read(&proc_event_num_listeners) < 1)
drivers/connector/cn_proc.c
297
if (atomic_read(&proc_event_num_listeners) < 1)
drivers/connector/cn_proc.c
330
if (atomic_read(&proc_event_num_listeners) < 1)
drivers/connector/cn_proc.c
372
if (atomic_read(&proc_event_num_listeners) < 1)
drivers/connector/cn_queue.c
139
while (atomic_read(&dev->refcnt)) {
drivers/connector/cn_queue.c
141
dev->name, atomic_read(&dev->refcnt));
drivers/counter/ti-ecap-capture.c
293
*val = atomic_read(&ecap_dev->nb_ovf);
drivers/cpuidle/coupled.c
156
while (atomic_read(a) < n)
drivers/cpuidle/coupled.c
164
while (atomic_read(a) > n)
drivers/cpuidle/coupled.c
246
int r = atomic_read(&coupled->ready_waiting_counts) >> WAITING_BITS;
drivers/cpuidle/coupled.c
258
int r = atomic_read(&coupled->ready_waiting_counts) >> WAITING_BITS;
drivers/cpuidle/coupled.c
270
int w = atomic_read(&coupled->ready_waiting_counts) & WAITING_MASK;
drivers/cpuidle/coupled.c
282
int w = atomic_read(&coupled->ready_waiting_counts) & WAITING_MASK;
drivers/cpuidle/cpuidle-tegra.c
154
if (atomic_read(&tegra_abort_flag)) {
drivers/crypto/atmel-ecc.c
221
tfm_cnt = atomic_read(&i2c_priv->tfm_count);
drivers/crypto/atmel-ecc.c
350
if (atomic_read(&i2c_priv->tfm_count)) {
drivers/crypto/atmel-sha204a.c
194
if (atomic_read(&i2c_priv->tfm_count)) {
drivers/crypto/bcm/util.c
376
atomic_read(&ipriv->session_count));
drivers/crypto/bcm/util.c
379
atomic_read(&ipriv->stream_count));
drivers/crypto/bcm/util.c
382
atomic_read(&ipriv->setkey_cnt[SPU_OP_CIPHER]));
drivers/crypto/bcm/util.c
385
atomic_read(&ipriv->op_counts[SPU_OP_CIPHER]));
drivers/crypto/bcm/util.c
388
op_cnt = atomic_read(&ipriv->cipher_cnt[alg][mode]);
drivers/crypto/bcm/util.c
399
atomic_read(&ipriv->op_counts[SPU_OP_HASH]));
drivers/crypto/bcm/util.c
401
op_cnt = atomic_read(&ipriv->hash_cnt[alg]);
drivers/crypto/bcm/util.c
411
atomic_read(&ipriv->setkey_cnt[SPU_OP_HMAC]));
drivers/crypto/bcm/util.c
414
atomic_read(&ipriv->op_counts[SPU_OP_HMAC]));
drivers/crypto/bcm/util.c
416
op_cnt = atomic_read(&ipriv->hmac_cnt[alg]);
drivers/crypto/bcm/util.c
426
atomic_read(&ipriv->setkey_cnt[SPU_OP_AEAD]));
drivers/crypto/bcm/util.c
430
atomic_read(&ipriv->op_counts[SPU_OP_AEAD]));
drivers/crypto/bcm/util.c
432
op_cnt = atomic_read(&ipriv->aead_cnt[alg]);
drivers/crypto/bcm/util.c
448
atomic_read(&ipriv->mb_no_spc));
drivers/crypto/bcm/util.c
451
atomic_read(&ipriv->mb_send_fail));
drivers/crypto/bcm/util.c
454
atomic_read(&ipriv->bad_icv));
drivers/crypto/caam/jr.c
201
if (atomic_read(&jrpriv->tfm_count)) {
drivers/crypto/caam/jr.c
363
tfm_cnt = atomic_read(&jrpriv->tfm_count);
drivers/crypto/cavium/nitrox/nitrox_dev.h
294
return atomic_read(&ndev->state) == __NDEV_READY;
drivers/crypto/cavium/nitrox/nitrox_dev.h
299
return atomic_read(&vfdev->state) == __NDEV_READY;
drivers/crypto/cavium/nitrox/nitrox_reqmgr.c
324
if (!atomic_read(&cmdq->backlog_count))
drivers/crypto/cavium/nitrox/nitrox_reqmgr.c
546
budget = atomic_read(&cmdq->pending_count);
drivers/crypto/cavium/nitrox/nitrox_reqmgr.c
553
if (atomic_read(&sr->status) != REQ_POSTED)
drivers/crypto/cavium/nitrox/nitrox_reqmgr.c
605
if (atomic_read(&cmdq->backlog_count))
drivers/crypto/chelsio/chcr_core.c
232
if (atomic_read(&dev->inflight) != 0) {
drivers/crypto/chelsio/chcr_core.c
258
if (!atomic_read(&drv_data.dev_count))
drivers/crypto/chelsio/chcr_core.c
56
if (atomic_read(&dev->inflight)) {
drivers/crypto/chelsio/chcr_core.c
60
atomic_read(&dev->inflight));
drivers/crypto/chelsio/chcr_core.c
65
atomic_read(&dev->inflight));
drivers/crypto/hisilicon/debugfs.c
1085
val = atomic_read(&qm->status.flags);
drivers/crypto/hisilicon/debugfs.c
528
if (unlikely(atomic_read(&qm->status.flags) == QM_STOP)) {
drivers/crypto/hisilicon/qm.c
1030
if (unlikely(atomic_read(&qp->qp_status.flags) == QP_STOP))
drivers/crypto/hisilicon/qm.c
2037
if (unlikely(atomic_read(&qp->qp_status.used) == qp->sq_depth - 1))
drivers/crypto/hisilicon/qm.c
2089
if (atomic_read(&qm->status.flags) == QM_STOP) {
drivers/crypto/hisilicon/qm.c
2242
if (atomic_read(&qm->status.flags) == QM_STOP) {
drivers/crypto/hisilicon/qm.c
2286
int qp_used = atomic_read(&qp->qp_status.used);
drivers/crypto/hisilicon/qm.c
2394
if (atomic_read(&qp->qp_status.flags) != QP_START) {
drivers/crypto/hisilicon/qm.c
2409
if (unlikely(qp->is_resetting && atomic_read(&qp->qp_status.used)))
drivers/crypto/hisilicon/qm.c
2451
if (unlikely(atomic_read(&qp->qp_status.flags) == QP_STOP ||
drivers/crypto/hisilicon/qm.c
2452
atomic_read(&qp->qm->status.flags) == QM_STOP ||
drivers/crypto/hisilicon/qm.c
3422
if (atomic_read(&qp->qp_status.flags) == QP_STOP &&
drivers/crypto/hisilicon/qm.c
3447
if (atomic_read(&qp->qp_status.flags) == QP_START) {
drivers/crypto/hisilicon/qm.c
3511
if (atomic_read(&qm->status.flags) == QM_STOP)
drivers/crypto/hisilicon/sec/sec_drv.c
847
return !atomic_read(&msg_ring->used);
drivers/crypto/hisilicon/sec/sec_drv.c
867
if (write == read && atomic_read(&msg_ring->used) == SEC_QUEUE_LEN) {
drivers/crypto/hisilicon/sec/sec_drv.c
889
return SEC_QUEUE_LEN - atomic_read(&msg_ring->used) >= num;
drivers/crypto/hisilicon/sec2/sec_crypto.c
209
if (atomic_read(&qp_ctx->qp->qp_status.used) == qp_ctx->qp->sq_depth - 1)
drivers/crypto/hisilicon/sec2/sec_crypto.c
213
if (atomic_read(&qp_ctx->qp->qp_status.used) == qp_ctx->qp->sq_depth - 1) {
drivers/crypto/intel/ixp4xx/ixp4xx_crypto.c
1111
if (atomic_read(&ctx->configuring))
drivers/crypto/intel/ixp4xx/ixp4xx_crypto.c
997
if (atomic_read(&ctx->configuring))
drivers/crypto/intel/qat/qat_common/adf_dev_mgr.c
374
return atomic_read(&accel_dev->ref_count) != 0;
drivers/crypto/intel/qat/qat_common/adf_sysfs_ras_counters.h
16
atomic_read(&(ras_errors).counter[ERR])
drivers/crypto/intel/qat/qat_common/adf_telemetry.c
169
if (!atomic_read(&telemetry->state)) {
drivers/crypto/intel/qat/qat_common/adf_telemetry.c
317
if (atomic_read(&accel_dev->telemetry->state))
drivers/crypto/intel/qat/qat_common/adf_tl_debugfs.c
249
if (atomic_read(&telemetry->state) > 1) {
drivers/crypto/intel/qat/qat_common/adf_tl_debugfs.c
404
if (!atomic_read(&telemetry->state)) {
drivers/crypto/intel/qat/qat_common/adf_tl_debugfs.c
463
seq_printf(s, "%d\n", atomic_read(&accel_dev->telemetry->state));
drivers/crypto/intel/qat/qat_common/adf_tl_debugfs.c
509
if (atomic_read(&telemetry->state)) {
drivers/crypto/intel/qat/qat_common/adf_tl_debugfs.c
555
curr_state = atomic_read(&telemetry->state);
drivers/crypto/intel/qat/qat_common/adf_tl_debugfs.c
615
if (!atomic_read(&telemetry->state)) {
drivers/crypto/intel/qat/qat_common/adf_tl_debugfs.c
735
if (atomic_read(&telemetry->state))
drivers/crypto/intel/qat/qat_common/adf_transport.c
85
return atomic_read(ring->inflights) > ring->threshold;
drivers/crypto/intel/qat/qat_common/qat_compression.c
34
for (i = 0; i < atomic_read(&inst->refctr); i++)
drivers/crypto/intel/qat/qat_common/qat_compression.c
66
ctr = atomic_read(&tmp_dev->ref_count);
drivers/crypto/intel/qat/qat_common/qat_compression.c
98
ctr = atomic_read(&tmp_inst->refctr);
drivers/crypto/intel/qat/qat_common/qat_crypto.c
30
for (i = 0; i < atomic_read(&inst->refctr); i++)
drivers/crypto/intel/qat/qat_common/qat_crypto.c
64
ctr = atomic_read(&tmp_dev->ref_count);
drivers/crypto/intel/qat/qat_common/qat_crypto.c
91
ctr = atomic_read(&tmp_inst->refctr);
drivers/crypto/marvell/cesa/cesa.h
748
u32 load = atomic_read(&engine->load);
drivers/crypto/marvell/octeontx/otx_cptvf_algs.c
1552
count = atomic_read(&se_devices.count);
drivers/crypto/marvell/octeontx/otx_cptvf_algs.c
1563
if (atomic_read(&se_devices.count) == num_devices &&
drivers/crypto/marvell/octeontx/otx_cptvf_algs.c
1579
count = atomic_read(&ae_devices.count);
drivers/crypto/marvell/octeontx/otx_cptvf_algs.c
1612
count = atomic_read(&dev_tbl->count);
drivers/crypto/marvell/octeontx/otx_cptvf_algs.c
74
count = atomic_read(&se_devices.count);
drivers/crypto/marvell/octeontx2/otx2_cptlf.h
423
return atomic_read(&lfs->state) == OTX2_CPTLF_STARTED;
drivers/crypto/marvell/octeontx2/otx2_cptvf_algs.c
1640
count = atomic_read(&se_devices.count);
drivers/crypto/marvell/octeontx2/otx2_cptvf_algs.c
1650
if (atomic_read(&se_devices.count) == num_devices &&
drivers/crypto/marvell/octeontx2/otx2_cptvf_algs.c
1678
count = atomic_read(&dev_tbl->count);
drivers/crypto/marvell/octeontx2/otx2_cptvf_algs.c
63
count = atomic_read(&se_devices.count);
drivers/crypto/virtio/virtio_crypto_mgr.c
162
ctr = atomic_read(&tmp_dev->ref_count);
drivers/cxl/core/port.c
459
if (atomic_read(&cxlrd->region_id) >= 0)
drivers/cxl/core/port.c
460
memregion_free(atomic_read(&cxlrd->region_id));
drivers/cxl/core/region.c
2408
int id = atomic_read(&cxlrd->region_id);
drivers/cxl/core/region.c
2637
return sysfs_emit(buf, "region%u\n", atomic_read(&cxlrd->region_id));
drivers/cxl/core/region.c
3907
atomic_read(&cxlrd->region_id));
drivers/cxl/core/suspend.c
11
return atomic_read(&mem_active) != 0;
drivers/dibs/dibs_loopback.c
296
if (atomic_read(&ldev->dmb_cnt))
drivers/dibs/dibs_loopback.c
297
wait_event(ldev->ldev_release, !atomic_read(&ldev->dmb_cnt));
drivers/dma-buf/st-dma-fence-chain.c
471
!atomic_read(&data.children),
drivers/dma/dw-axi-dmac/dw-axi-dmac-platform.c
1052
int count = atomic_read(&chan->descs_allocated);
drivers/dma/dw-axi-dmac/dw-axi-dmac-platform.c
1095
int count = atomic_read(&chan->descs_allocated);
drivers/dma/dw-axi-dmac/dw-axi-dmac-platform.c
346
atomic_read(&chan->descs_allocated));
drivers/dma/dw-axi-dmac/dw-axi-dmac-platform.c
561
axi_chan_name(chan), atomic_read(&chan->descs_allocated));
drivers/dma/mediatek/mtk-hsdma.c
428
reserved = min_t(u16, num_sgs, atomic_read(&pc->nr_free));
drivers/dma/mediatek/mtk-hsdma.c
628
if (atomic_read(&pc->nr_free) >= MTK_DMA_SIZE - 1)
drivers/dma/qcom/hidma_dbg.c
30
seq_printf(s, "allocated=%d\n", atomic_read(&tre->allocated));
drivers/dma/qcom/hidma_dbg.c
70
atomic_read(&lldev->pending_tre_count));
drivers/dma/qcom/hidma_ll.c
126
if (atomic_read(&tre->allocated) != true) {
drivers/dma/qcom/hidma_ll.c
306
while (atomic_read(&lldev->pending_tre_count)) {
drivers/dma/qcom/hidma_ll.c
612
if (atomic_read(&tre->allocated) != true) {
drivers/dma/sun6i-dma.c
578
if (!atomic_read(&sdev->tasklet_shutdown))
drivers/edac/edac_pci_sysfs.c
61
return sprintf(data, "%u\n", atomic_read(&pci->counters.pe_count));
drivers/edac/edac_pci_sysfs.c
655
before_count = atomic_read(&pci_parity_count);
drivers/edac/edac_pci_sysfs.c
667
if (before_count != atomic_read(&pci_parity_count))
drivers/edac/edac_pci_sysfs.c
67
return sprintf(data, "%u\n", atomic_read(&pci->counters.npe_count));
drivers/edac/thunderx_edac.c
424
while (!atomic_read(&lmc->ecc_int) && timeout--) {
drivers/firewire/core-card.c
477
atomic_read(&root_device->state) == FW_DEVICE_RUNNING;
drivers/firewire/core-device.c
1093
atomic_read(&device->state) == FW_DEVICE_INITIALIZING) {
drivers/firewire/core-device.c
1306
atomic_read(&device->state) == FW_DEVICE_INITIALIZING) {
drivers/firewire/core-device.c
1409
if (atomic_read(&device->state) == FW_DEVICE_RUNNING) {
drivers/firewire/nosy.c
146
atomic_read(&buffer->size) > 0) ||
drivers/firewire/nosy.c
150
if (atomic_read(&buffer->size) == 0)
drivers/firewire/nosy.c
192
atomic_read(&buffer->size) + sizeof(struct packet) + length) {
drivers/firewire/nosy.c
330
if (atomic_read(&client->buffer.size) > 0)
drivers/firmware/arm_scmi/driver.c
3467
return atomic_read(&info->dbg->counters[XFERS_INFLIGHT]);
drivers/firmware/efi/efi.c
794
for (i = 0; i < atomic_read(&rsv->count); i++) {
drivers/firmware/stratix10-svc.c
1135
atomic_read(&actrl->common_achan_refcount) > 0) {
drivers/firmware/stratix10-svc.c
1161
atomic_read(&actrl->common_achan_refcount) == 0) {
drivers/firmware/stratix10-svc.c
1209
if (atomic_read(&actrl->common_achan_refcount) == 0) {
drivers/gpib/common/gpib_os.c
1322
if (atomic_read(&desc->descriptor_busy)) {
drivers/gpib/common/gpib_os.c
144
if (atomic_read(&pseudo_irq->active))
drivers/gpib/common/gpib_os.c
596
if (atomic_read(&priv->holding_mutex))
drivers/gpib/common/iblib.c
158
!atomic_read(&board->stuck_srq) &&
drivers/gpib/common/iblib.c
554
if (atomic_read(&desc->io_in_progress))
drivers/gpio/gpio-mpsse.c
417
while ((irq_enabled = atomic_read(&priv->irq_enabled)) &&
drivers/gpio/gpio-mpsse.c
418
!atomic_read(&my_worker->cancelled)) {
drivers/gpio/gpio-mpsse.c
426
irq_type[offset] = atomic_read(&priv->irq_type[offset]);
drivers/gpio/gpio-pca953x.c
1397
if (atomic_read(&chip->wakeup_path))
drivers/gpio/gpio-pca953x.c
1410
if (!atomic_read(&chip->wakeup_path)) {
drivers/gpio/gpio-rcar.c
608
if (atomic_read(&p->wakeup_path))
drivers/gpio/gpio-virtuser.c
659
*val = atomic_read(&ld->irq_count);
drivers/gpio/gpiolib-cdev.c
2637
if (atomic_read(&cdev->watch_abi_version) == 2)
drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c
63
if (atomic_read(&p->ctx->guilty)) {
drivers/gpu/drm/amd/amdgpu/amdgpu_ctx.c
336
ctx->reset_counter = atomic_read(&mgr->adev->gpu_reset_counter);
drivers/gpu/drm/amd/amdgpu/amdgpu_ctx.c
563
reset_counter = atomic_read(&adev->gpu_reset_counter);
drivers/gpu/drm/amd/amdgpu/amdgpu_ctx.c
599
if (ctx->reset_counter != atomic_read(&adev->gpu_reset_counter))
drivers/gpu/drm/amd/amdgpu/amdgpu_ctx.c
605
if (atomic_read(&ctx->guilty))
drivers/gpu/drm/amd/amdgpu/amdgpu_ctx.c
618
ce_count = atomic_read(&con->ras_ce_count);
drivers/gpu/drm/amd/amdgpu/amdgpu_ctx.c
619
ue_count = atomic_read(&con->ras_ue_count);
drivers/gpu/drm/amd/amdgpu/amdgpu_debugfs.c
1796
last_seq = atomic_read(&ring->fence_drv.last_seq);
drivers/gpu/drm/amd/amdgpu/amdgpu_debugfs.c
1862
if (preempt_seq <= atomic_read(&drv->last_seq)) {
drivers/gpu/drm/amd/amdgpu/amdgpu_debugfs.c
1930
if (atomic_read(&ring->fence_drv.last_seq) !=
drivers/gpu/drm/amd/amdgpu/amdgpu_device.c
2391
return atomic_read(&dev->open_count) == 0;
drivers/gpu/drm/amd/amdgpu/amdgpu_device.c
6433
atomic_read(
drivers/gpu/drm/amd/amdgpu/amdgpu_device.c
6444
atomic_read(&tmp_adev->gpu_reset_counter));
drivers/gpu/drm/amd/amdgpu/amdgpu_device.c
7354
return atomic_read(&adev->reset_domain->in_gpu_reset);
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
227
last_seq = atomic_read(&ring->fence_drv.last_seq);
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
358
emitted -= atomic_read(&ring->fence_drv.last_seq);
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
376
last_seq = atomic_read(&ring->fence_drv.last_seq);
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
443
amdgpu_fence_write(ring, atomic_read(&ring->fence_drv.last_seq));
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
87
seq = atomic_read(&drv->last_seq);
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
906
atomic_read(&ring->fence_drv.last_seq));
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
954
*val = atomic_read(&adev->reset_domain->reset_res);
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.c
2325
if (!fences && !atomic_read(&adev->gfx.total_submission_cnt)) {
drivers/gpu/drm/amd/amdgpu/amdgpu_gmc.c
1491
req_nps_mode = atomic_read(&hive->requested_nps_mode);
drivers/gpu/drm/amd/amdgpu/amdgpu_ids.c
174
atomic_read(&adev->gpu_reset_counter);
drivers/gpu/drm/amd/amdgpu/amdgpu_irq.c
706
return !!atomic_read(&src->enabled_types[type]);
drivers/gpu/drm/amd/amdgpu/amdgpu_job.c
124
job->base.sched->name, atomic_read(&ring->fence_drv.last_seq),
drivers/gpu/drm/amd/amdgpu/amdgpu_jpeg.c
124
if (!fences && !atomic_read(&adev->jpeg.total_submission_cnt)) {
drivers/gpu/drm/amd/amdgpu/amdgpu_kms.c
1293
ui32 = atomic_read(&adev->vram_lost_counter);
drivers/gpu/drm/amd/amdgpu/amdgpu_ras.c
2872
hive_ras_recovery = atomic_read(&hive->ras_recovery);
drivers/gpu/drm/amd/amdgpu/amdgpu_ras.c
2876
if (ras && (atomic_read(&ras->in_recovery) || hive_ras_recovery))
drivers/gpu/drm/amd/amdgpu/amdgpu_ras.c
3721
atomic_read(&con->page_retirement_req_cnt));
drivers/gpu/drm/amd/amdgpu/amdgpu_ras.c
3730
poison_creation_count = atomic_read(&con->poison_creation_count);
drivers/gpu/drm/amd/amdgpu/amdgpu_ras.c
3739
} while (atomic_read(&con->poison_creation_count) &&
drivers/gpu/drm/amd/amdgpu/amdgpu_ras.c
3740
!atomic_read(&con->poison_consumption_count));
drivers/gpu/drm/amd/amdgpu/amdgpu_ras.c
5093
hive_ras_recovery = atomic_read(&hive->ras_recovery);
drivers/gpu/drm/amd/amdgpu/amdgpu_ras.h
935
return !!atomic_read(&amdgpu_ras_in_intr);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring_mux.c
474
last_seq = atomic_read(&ring->fence_drv.last_seq);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring_mux.c
99
last_seq = atomic_read(&e->ring->fence_drv.last_seq);
drivers/gpu/drm/amd/amdgpu/amdgpu_userq.c
135
if (atomic_read(&uq_mgr->userq_count[ring_type]) > 0 &&
drivers/gpu/drm/amd/amdgpu/amdgpu_userq.c
277
if (!IS_ERR_OR_NULL(mapping) && atomic_read(&mapping->bo_va->userq_va_mapped))
drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c
1379
if (atomic_read(&adev->uvd.handles[i]))
drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c
433
if (atomic_read(&adev->uvd.handles[i]))
drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c
527
uint32_t handle = atomic_read(&adev->uvd.handles[i]);
drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c
855
if (atomic_read(&adev->uvd.handles[i]) == handle) {
drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c
879
if (atomic_read(&adev->uvd.handles[i]) == handle) {
drivers/gpu/drm/amd/amdgpu/amdgpu_vce.c
311
if (atomic_read(&adev->vce.handles[i]))
drivers/gpu/drm/amd/amdgpu/amdgpu_vce.c
439
uint32_t handle = atomic_read(&adev->vce.handles[i]);
drivers/gpu/drm/amd/amdgpu/amdgpu_vce.c
749
if (atomic_read(&p->adev->vce.handles[i]) == handle) {
drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c
479
unlikely(atomic_read(&vcn_inst->dpg_enc_submission_cnt)))
drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c
490
if (!fences && !atomic_read(&vcn_inst->total_submission_cnt)) {
drivers/gpu/drm/amd/amdgpu/amdgpu_vcn.c
528
if (fences || atomic_read(&vcn_inst->dpg_enc_submission_cnt))
drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c
1419
enable = !!atomic_read(&adev->vm_manager.num_prt_users);
drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c
1988
if (unlikely(atomic_read(&bo_va->userq_va_mapped) > 0)) {
drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c
563
uint64_t result = (u64)atomic_read(&adev->vram_lost_counter) << 32;
drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c
861
atomic_read(&adev->gpu_reset_counter);
drivers/gpu/drm/amd/amdgpu/amdgpu_xcp.c
498
total_ref_cnt = atomic_read(&xcp_mgr->xcp[i].ref_cnt);
drivers/gpu/drm/amd/amdgpu/amdgpu_xgmi.c
1680
num_devs = atomic_read(&hive->number_devices);
drivers/gpu/drm/amd/amdgpu/amdgpu_xgmi.c
1710
if (atomic_read(&hive->requested_nps_mode) ==
drivers/gpu/drm/amd/amdgpu/amdgpu_xgmi.c
622
sprintf(node, "node%d", atomic_read(&hive->number_devices));
drivers/gpu/drm/amd/amdgpu/amdgpu_xgmi.c
666
sprintf(node, "node%d", atomic_read(&hive->number_devices));
drivers/gpu/drm/amd/amdgpu/amdgpu_xgmi.c
840
atomic_read(&hive->number_devices),
drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c
135
unlikely(atomic_read(&v->dpg_enc_submission_cnt)))
drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c
148
if (!fences && !atomic_read(&adev->vcn.inst[0].total_submission_cnt)) {
drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c
189
if (fences || atomic_read(&v->dpg_enc_submission_cnt))
drivers/gpu/drm/amd/amdgpu/vcn_v4_0_3.c
1651
wait_seq = atomic_read(&ring->fence_drv.last_seq);
drivers/gpu/drm/amd/amdgpu/vcn_v5_0_1.c
1314
wait_seq = atomic_read(&ring->fence_drv.last_seq);
drivers/gpu/drm/amd/amdkfd/kfd_device.c
1474
if (atomic_read(&node->kfd->compute_profile))
drivers/gpu/drm/amd/amdkfd/kfd_device.c
1522
if (!!atomic_read(&kfd->kfd_processes_count)) {
drivers/gpu/drm/amd/amdkfd/kfd_events.c
1253
int reset_cause = atomic_read(&dev->sram_ecc_flag) ?
drivers/gpu/drm/amd/amdkfd/kfd_int_process_v11.c
228
if (atomic_read(&p->poison)) {
drivers/gpu/drm/amd/amdkfd/kfd_int_process_v12_1.c
194
if (atomic_read(&p->poison)) {
drivers/gpu/drm/amd/amdkfd/kfd_process.c
1329
if (atomic_read(&p->debugged_process_count) > 0) {
drivers/gpu/drm/amd/amdkfd/kfd_process.c
1339
if (atomic_read(&p->debugged_process_count) == 0)
drivers/gpu/drm/amd/amdkfd/kfd_svm.c
1083
atomic_set(&new->queue_refcount, atomic_read(&old->queue_refcount));
drivers/gpu/drm/amd/amdkfd/kfd_svm.c
1897
evicted_ranges = atomic_read(&svms->evicted_ranges);
drivers/gpu/drm/amd/amdkfd/kfd_svm.c
1917
evicted_ranges = atomic_read(&svms->evicted_ranges);
drivers/gpu/drm/amd/amdkfd/kfd_svm.c
1920
invalid = atomic_read(&prange->invalid);
drivers/gpu/drm/amd/amdkfd/kfd_svm.c
2105
atomic_set(&new->queue_refcount, atomic_read(&old->queue_refcount));
drivers/gpu/drm/amd/amdkfd/kfd_svm.c
2547
if (atomic_read(&prange->queue_refcount)) {
drivers/gpu/drm/amd/amdkfd/kfd_svm.c
3065
if (atomic_read(&svms->drain_pagefaults)) {
drivers/gpu/drm/amd/pm/amdgpu_dpm.c
85
if (atomic_read(&adev->pm.pwr_state[block_type]) == pwr_state &&
drivers/gpu/drm/amd/pm/amdgpu_pm.c
1591
atomic_read(&adev->throttling_logging_enabled) ? "enabled" : "disabled",
drivers/gpu/drm/amd/pm/swsmu/amdgpu_smu.c
267
if (atomic_read(&power_gate->vcn_gated[inst]) ^ enable)
drivers/gpu/drm/amd/pm/swsmu/amdgpu_smu.c
290
if (atomic_read(&power_gate->jpeg_gated) ^ enable)
drivers/gpu/drm/amd/pm/swsmu/amdgpu_smu.c
310
if (atomic_read(&power_gate->vpe_gated) ^ enable)
drivers/gpu/drm/amd/pm/swsmu/amdgpu_smu.c
3178
if (!atomic_read(&smu->smu_power.power_gate.vcn_gated[i])) {
drivers/gpu/drm/amd/pm/swsmu/amdgpu_smu.c
330
if (atomic_read(&power_gate->isp_gated) ^ enable)
drivers/gpu/drm/amd/pm/swsmu/amdgpu_smu.c
353
if (atomic_read(&power_gate->umsch_mm_gated) ^ enable)
drivers/gpu/drm/amd/pm/swsmu/amdgpu_smu.c
841
vcn_gate[i] = atomic_read(&power_gate->vcn_gated[i]);
drivers/gpu/drm/amd/pm/swsmu/amdgpu_smu.c
844
jpeg_gate = atomic_read(&power_gate->jpeg_gated);
drivers/gpu/drm/amd/pm/swsmu/smu11/smu_v11_0.c
1463
if (!atomic_read(&adev->throttling_logging_enabled))
drivers/gpu/drm/amd/pm/swsmu/smu13/smu_v13_0.c
1323
if (!atomic_read(&adev->throttling_logging_enabled))
drivers/gpu/drm/amd/pm/swsmu/smu13/smu_v13_0_6_ppt.c
1243
throttler_status = atomic_read(&power_context->throttle_status);
drivers/gpu/drm/amd/pm/swsmu/smu13/smu_v13_0_6_ppt.c
1821
if (!atomic_read(&adev->throttling_logging_enabled))
drivers/gpu/drm/amd/ras/ras_mgr/amdgpu_ras_sys.c
211
return !!atomic_read(&amdgpu_ras_in_intr);
drivers/gpu/drm/amd/ras/rascore/ras_process.c
174
umc_event_count = atomic_read(&ras_proc->umc_interrupt_count);
drivers/gpu/drm/amd/ras/rascore/ras_process.c
181
} while (atomic_read(&ras_proc->umc_interrupt_count));
drivers/gpu/drm/amd/ras/rascore/ras_process.c
203
atomic_read(&ras_proc->ras_interrupt_req));
drivers/gpu/drm/arm/hdlcd_drv.c
202
seq_printf(m, "underrun : %d\n", atomic_read(&hdlcd->buffer_underrun_count));
drivers/gpu/drm/arm/hdlcd_drv.c
203
seq_printf(m, "dma_end : %d\n", atomic_read(&hdlcd->dma_end_count));
drivers/gpu/drm/arm/hdlcd_drv.c
204
seq_printf(m, "bus_error: %d\n", atomic_read(&hdlcd->bus_error_count));
drivers/gpu/drm/arm/hdlcd_drv.c
205
seq_printf(m, "vsync : %d\n", atomic_read(&hdlcd->vsync_count));
drivers/gpu/drm/arm/malidp_drv.c
187
atomic_read(&malidp->config_valid) == MALIDP_CONFIG_VALID_DONE,
drivers/gpu/drm/arm/malidp_hw.c
1337
if ((atomic_read(&malidp->config_valid) != MALIDP_CONFIG_START) ||
drivers/gpu/drm/display/drm_dp_aux_dev.c
292
wait_var_event(&aux_dev->usecount, !atomic_read(&aux_dev->usecount));
drivers/gpu/drm/drm_file.c
245
atomic_read(&dev->open_count));
drivers/gpu/drm/drm_file.c
436
drm_dbg_core(dev, "open_count = %d\n", atomic_read(&dev->open_count));
drivers/gpu/drm/drm_pagemap_util.c
354
unsigned long count = atomic_read(&shrinker->num_dpagemaps);
drivers/gpu/drm/drm_pagemap_util.c
401
drm_WARN_ON(shrinker->drm, !!atomic_read(&shrinker->num_dpagemaps));
drivers/gpu/drm/drm_vblank.c
1262
if (drm_WARN_ON(dev, atomic_read(&vblank->refcount) == 0))
drivers/gpu/drm/drm_vblank.c
1510
if (atomic_read(&vblank->refcount) != 0 || !vblank->config.offdelay_ms)
drivers/gpu/drm/drm_vblank.c
1958
!atomic_read(&vblank->refcount));
drivers/gpu/drm/drm_vblank.c
506
if (atomic_read(&vblank->refcount) == 0 && vblank->enabled) {
drivers/gpu/drm/etnaviv/etnaviv_gem.h
76
return atomic_read(&etnaviv_obj->gpu_active) != 0;
drivers/gpu/drm/etnaviv/etnaviv_gpu.c
1990
if (atomic_read(&gpu->sched.credit_count))
drivers/gpu/drm/exynos/exynos7_drm_decon.c
142
!atomic_read(&ctx->wait_vsync_event),
drivers/gpu/drm/exynos/exynos7_drm_decon.c
613
if (atomic_read(&ctx->wait_vsync_event)) {
drivers/gpu/drm/exynos/exynos_drm_fimd.c
1038
if (atomic_read(&ctx->wait_vsync_event)) {
drivers/gpu/drm/exynos/exynos_drm_fimd.c
1102
if (atomic_read(&ctx->wait_vsync_event)) {
drivers/gpu/drm/exynos/exynos_drm_fimd.c
333
!atomic_read(&ctx->wait_vsync_event),
drivers/gpu/drm/exynos/exynos_drm_fimd.c
999
if (atomic_read(&ctx->triggering))
drivers/gpu/drm/gma500/mmu.c
103
if (atomic_read(&driver->needs_tlbflush))
drivers/gpu/drm/gma500/mmu.c
72
if (atomic_read(&driver->needs_tlbflush) || force) {
drivers/gpu/drm/i915/display/intel_display_reset.c
35
if (atomic_read(&display->restore.pending_fb_pin)) {
drivers/gpu/drm/i915/display/intel_dp_hdcp.c
48
#define C (hdcp->cp_irq_count_cached != atomic_read(&hdcp->cp_irq_count))
drivers/gpu/drm/i915/display/intel_dp_hdcp.c
540
hdcp->cp_irq_count_cached = atomic_read(&hdcp->cp_irq_count);
drivers/gpu/drm/i915/display/intel_fb.c
2165
if (!atomic_read(&front->bits))
drivers/gpu/drm/i915/display/intel_frontbuffer.c
207
drm_WARN_ON(front->display->drm, atomic_read(&front->bits));
drivers/gpu/drm/i915/display/intel_frontbuffer.c
247
!(atomic_read(&old->bits) & frontbuffer_bits));
drivers/gpu/drm/i915/display/intel_frontbuffer.c
253
atomic_read(&new->bits) & frontbuffer_bits);
drivers/gpu/drm/i915/display/intel_frontbuffer.h
125
frontbuffer_bits = atomic_read(&front->bits);
drivers/gpu/drm/i915/display/intel_frontbuffer.h
97
frontbuffer_bits = atomic_read(&front->bits);
drivers/gpu/drm/i915/gem/i915_gem_context.c
2291
return atomic_read(&file_priv->ban_score) >= I915_CLIENT_SCORE_BANNED;
drivers/gpu/drm/i915/gem/i915_gem_context.c
2660
args->batch_active = atomic_read(&ctx->guilty_count);
drivers/gpu/drm/i915/gem/i915_gem_context.c
2661
args->batch_pending = atomic_read(&ctx->active_count);
drivers/gpu/drm/i915/gem/i915_gem_object.c
285
GEM_BUG_ON(!atomic_read(&vma->open_count));
drivers/gpu/drm/i915/gem/i915_gem_object.c
308
GEM_BUG_ON(!atomic_read(&i915->mm.free_count));
drivers/gpu/drm/i915/gem/i915_gem_object.c
592
int pin_count = atomic_read(&obj->mm.pages_pin_count);
drivers/gpu/drm/i915/gem/i915_gem_object.c
603
if (atomic_read(&vma->pages_count))
drivers/gpu/drm/i915/gem/i915_gem_object.h
675
return atomic_read(&obj->mm.pages_pin_count);
drivers/gpu/drm/i915/gem/selftests/i915_gem_phys.c
49
if (!atomic_read(&obj->mm.pages_pin_count)) {
drivers/gpu/drm/i915/gt/gen6_ppgtt.c
309
if (!pt || atomic_read(&pt->used))
drivers/gpu/drm/i915/gt/gen6_ppgtt.c
350
if (!atomic_read(&ppgtt->pin_count)) {
drivers/gpu/drm/i915/gt/gen6_ppgtt.c
423
GEM_BUG_ON(!atomic_read(&ppgtt->pin_count));
drivers/gpu/drm/i915/gt/gen6_ppgtt.c
92
GEM_BUG_ON(count > atomic_read(&pt->used));
drivers/gpu/drm/i915/gt/gen8_ppgtt.c
251
idx, len, atomic_read(px_used(pd)));
drivers/gpu/drm/i915/gt/gen8_ppgtt.c
252
GEM_BUG_ON(!len || len >= atomic_read(px_used(pd)));
drivers/gpu/drm/i915/gt/gen8_ppgtt.c
280
atomic_read(&pt->used));
drivers/gpu/drm/i915/gt/gen8_ppgtt.c
281
GEM_BUG_ON(!count || count >= atomic_read(&pt->used));
drivers/gpu/drm/i915/gt/gen8_ppgtt.c
334
idx, len, atomic_read(px_used(pd)));
drivers/gpu/drm/i915/gt/gen8_ppgtt.c
338
GEM_BUG_ON(!atomic_read(px_used(pd))); /* Must be pinned! */
drivers/gpu/drm/i915/gt/gen8_ppgtt.c
372
GEM_BUG_ON(!atomic_read(&pt->used));
drivers/gpu/drm/i915/gt/gen8_ppgtt.c
379
atomic_read(&pt->used));
drivers/gpu/drm/i915/gt/gen8_ppgtt.c
383
GEM_BUG_ON(atomic_read(&pt->used) > NALLOC * I915_PDES);
drivers/gpu/drm/i915/gt/intel_breadcrumbs.c
273
if (READ_ONCE(b->irq_armed) && !atomic_read(&b->active))
drivers/gpu/drm/i915/gt/intel_breadcrumbs.c
490
while (atomic_read(&b->signaler_active))
drivers/gpu/drm/i915/gt/intel_context.h
124
return atomic_read(&ce->pin_count);
drivers/gpu/drm/i915/gt/intel_engine_cs.c
2150
str_enabled_disabled(!atomic_read(&engine->sched_engine->tasklet.count)),
drivers/gpu/drm/i915/gt/intel_engine_cs.c
2392
drm_printf(m, "\tAwake? %d\n", atomic_read(&engine->wakeref.count));
drivers/gpu/drm/i915/gt/intel_engine_pm.c
210
GEM_BUG_ON(atomic_read(&ce->timeline->active_count) < 0);
drivers/gpu/drm/i915/gt/intel_execlists_submission.c
2968
atomic_read(&engine->sched_engine->tasklet.count));
drivers/gpu/drm/i915/gt/intel_execlists_submission.c
3253
atomic_read(&engine->sched_engine->tasklet.count));
drivers/gpu/drm/i915/gt/intel_ggtt.c
1635
atomic_read(&vma->flags) & I915_VMA_BIND_MASK;
drivers/gpu/drm/i915/gt/intel_ggtt_fencing.c
305
GEM_BUG_ON(atomic_read(&fence->pin_count));
drivers/gpu/drm/i915/gt/intel_ggtt_fencing.c
352
if (atomic_read(&fence->pin_count))
drivers/gpu/drm/i915/gt/intel_ggtt_fencing.c
388
GEM_BUG_ON(atomic_read(&fence->pin_count));
drivers/gpu/drm/i915/gt/intel_ggtt_fencing.c
468
count += !atomic_read(&fence->pin_count);
drivers/gpu/drm/i915/gt/intel_gt_pm.c
32
int count = atomic_read(>->user_wakeref);
drivers/gpu/drm/i915/gt/intel_gt_pm.c
41
GEM_BUG_ON(count > atomic_read(>->wakeref.count));
drivers/gpu/drm/i915/gt/intel_gt_pm_debugfs.c
494
atomic_read(&rps->num_waiters));
drivers/gpu/drm/i915/gt/intel_gt_requests.c
150
GEM_BUG_ON(!atomic_read(&tl->active_count));
drivers/gpu/drm/i915/gt/intel_gt_requests.c
187
GEM_BUG_ON(atomic_read(&tl->active_count));
drivers/gpu/drm/i915/gt/intel_ppgtt.c
105
GEM_BUG_ON(atomic_read(px_used(pd)) > NALLOC * I915_PDES);
drivers/gpu/drm/i915/gt/intel_ppgtt.c
117
GEM_BUG_ON(atomic_read(px_used(pd)) == 0);
drivers/gpu/drm/i915/gt/intel_reset.c
105
ctx->name, atomic_read(&ctx->guilty_count));
drivers/gpu/drm/i915/gt/intel_reset.c
59
atomic_read(&file_priv->ban_score));
drivers/gpu/drm/i915/gt/intel_ring.c
31
GEM_BUG_ON(!atomic_read(&ring->pin_count));
drivers/gpu/drm/i915/gt/intel_rps.c
1831
client_boost = atomic_read(&rps->num_waiters);
drivers/gpu/drm/i915/gt/intel_rps.c
981
boost = atomic_read(&rps->num_waiters);
drivers/gpu/drm/i915/gt/intel_timeline.c
188
GEM_BUG_ON(!atomic_read(&tl->pin_count));
drivers/gpu/drm/i915/gt/intel_timeline.c
228
GEM_BUG_ON(!atomic_read(&tl->pin_count));
drivers/gpu/drm/i915/gt/intel_timeline.c
283
GEM_BUG_ON(!atomic_read(&tl->active_count));
drivers/gpu/drm/i915/gt/intel_timeline.c
302
GEM_BUG_ON(!atomic_read(&tl->pin_count));
drivers/gpu/drm/i915/gt/intel_timeline.c
384
GEM_BUG_ON(!atomic_read(&tl->pin_count));
drivers/gpu/drm/i915/gt/intel_timeline.c
397
GEM_BUG_ON(atomic_read(&timeline->pin_count));
drivers/gpu/drm/i915/gt/intel_timeline.c
435
GEM_BUG_ON(!atomic_read(&tl->active_count));
drivers/gpu/drm/i915/gt/intel_timeline.c
481
GEM_BUG_ON(atomic_read(&tl->active_count));
drivers/gpu/drm/i915/gt/mock_engine.c
34
GEM_BUG_ON(!atomic_read(&tl->pin_count));
drivers/gpu/drm/i915/gt/selftest_context.c
296
atomic_read(&engine->wakeref.count));
drivers/gpu/drm/i915/gt/uc/intel_guc_ct.c
1372
atomic_read(&ct->ctbs.send.space) * 4);
drivers/gpu/drm/i915/gt/uc/intel_guc_ct.c
1378
atomic_read(&ct->ctbs.recv.space) * 4);
drivers/gpu/drm/i915/gt/uc/intel_guc_ct.c
506
GEM_BUG_ON(atomic_read(&ctb->space) < len + GUC_CTB_HDR_LEN);
drivers/gpu/drm/i915/gt/uc/intel_guc_ct.c
580
atomic_read(&ct->ctbs.send.space) * 4);
drivers/gpu/drm/i915/gt/uc/intel_guc_ct.c
584
atomic_read(&ct->ctbs.recv.space) * 4);
drivers/gpu/drm/i915/gt/uc/intel_guc_ct.c
603
return !g2h_len_dw || atomic_read(&ctb->space) >= g2h_len_dw;
drivers/gpu/drm/i915/gt/uc/intel_guc_ct.c
628
if (atomic_read(&ctb->space) >= len_dw)
drivers/gpu/drm/i915/gt/uc/intel_guc_slpc.c
267
if (atomic_read(&slpc->num_waiters)) {
drivers/gpu/drm/i915/gt/uc/intel_guc_slpc.c
821
if (atomic_read(&slpc->num_waiters)) {
drivers/gpu/drm/i915/gt/uc/intel_guc_slpc.c
894
atomic_read(&slpc->num_waiters));
drivers/gpu/drm/i915/gt/uc/intel_guc_submission.c
2077
outstanding = atomic_read(&guc->outstanding_submission_g2h);
drivers/gpu/drm/i915/gt/uc/intel_guc_submission.c
2335
GEM_BUG_ON(atomic_read(&cn->guc_id.ref));
drivers/gpu/drm/i915/gt/uc/intel_guc_submission.c
2393
GEM_BUG_ON(atomic_read(&ce->guc_id.ref));
drivers/gpu/drm/i915/gt/uc/intel_guc_submission.c
2442
GEM_BUG_ON(atomic_read(&ce->guc_id.ref) < 0);
drivers/gpu/drm/i915/gt/uc/intel_guc_submission.c
2451
!atomic_read(&ce->guc_id.ref))
drivers/gpu/drm/i915/gt/uc/intel_guc_submission.c
5505
atomic_read(&guc->outstanding_submission_g2h));
drivers/gpu/drm/i915/gt/uc/intel_guc_submission.c
5507
atomic_read(&sched_engine->tasklet.count));
drivers/gpu/drm/i915/gt/uc/intel_guc_submission.c
5559
atomic_read(&ce->pin_count));
drivers/gpu/drm/i915/gt/uc/intel_guc_submission.c
5561
atomic_read(&ce->guc_id.ref));
drivers/gpu/drm/i915/gt/uc/intel_guc_submission.c
654
if (!atomic_read(wait_var))
drivers/gpu/drm/i915/gt/uc/intel_guc_submission.c
663
if (!atomic_read(wait_var))
drivers/gpu/drm/i915/gt/uc/intel_guc_submission.c
717
GEM_BUG_ON(!atomic_read(&ce->guc_id.ref));
drivers/gpu/drm/i915/gt/uc/intel_guc_submission.c
861
GEM_BUG_ON(!atomic_read(&ce->guc_id.ref));
drivers/gpu/drm/i915/gt/uc/intel_uc.c
658
GEM_WARN_ON(atomic_read(&guc->outstanding_submission_g2h));
drivers/gpu/drm/i915/gvt/cmd_parser.c
3175
GEM_BUG_ON(atomic_read(&ce->pin_count) < 0);
drivers/gpu/drm/i915/gvt/gtt.c
1892
if (GEM_WARN_ON(atomic_read(&mm->pincount)))
drivers/gpu/drm/i915/gvt/gtt.c
1964
if (atomic_read(&mm->pincount))
drivers/gpu/drm/i915/gvt/gtt.c
867
int v = atomic_read(&spt->refcount);
drivers/gpu/drm/i915/gvt/gtt.c
875
int v = atomic_read(&spt->refcount);
drivers/gpu/drm/i915/gvt/scheduler.c
1094
!atomic_read(&workload->shadow_ctx_active));
drivers/gpu/drm/i915/gvt/scheduler.c
1243
if (atomic_read(&s->running_workload_num)) {
drivers/gpu/drm/i915/gvt/scheduler.c
1247
!atomic_read(&s->running_workload_num));
drivers/gpu/drm/i915/gvt/scheduler.c
79
if (WARN_ON(!atomic_read(&workload->shadow_mm->pincount)))
drivers/gpu/drm/i915/gvt/vgpu.c
189
if (atomic_read(&vgpu->submission.running_workload_num)) {
drivers/gpu/drm/i915/i915_active.c
101
if (!atomic_read(&ref->count)) /* after the last dec */
drivers/gpu/drm/i915/i915_active.c
181
GEM_BUG_ON(!atomic_read(&ref->count));
drivers/gpu/drm/i915/i915_active.c
191
GEM_BUG_ON(!atomic_read(&ref->count));
drivers/gpu/drm/i915/i915_active.c
742
GEM_BUG_ON(atomic_read(&ref->count));
drivers/gpu/drm/i915/i915_active.h
195
GEM_BUG_ON(!atomic_read(&ref->count));
drivers/gpu/drm/i915/i915_active.h
202
return !atomic_read(&ref->count);
drivers/gpu/drm/i915/i915_debugfs.c
279
atomic_read(&i915->mm.free_count),
drivers/gpu/drm/i915/i915_debugfs.c
386
atomic_read(&rps->num_waiters));
drivers/gpu/drm/i915/i915_debugfs.c
419
atomic_read(&dev_priv->drm.dev->power.usage_count));
drivers/gpu/drm/i915/i915_debugfs.c
447
atomic_read(&to_gt(i915)->wakeref.count),
drivers/gpu/drm/i915/i915_driver.c
1659
drm_WARN_ON_ONCE(&dev_priv->drm, atomic_read(&rpm->wakeref_count));
drivers/gpu/drm/i915/i915_gem.c
1113
while (atomic_read(&i915->mm.free_count)) {
drivers/gpu/drm/i915/i915_gem.c
1310
GEM_BUG_ON(atomic_read(&dev_priv->mm.free_count));
drivers/gpu/drm/i915/i915_gpu_error.c
1474
e->guilty = atomic_read(&ctx->guilty_count);
drivers/gpu/drm/i915/i915_gpu_error.c
1475
e->active = atomic_read(&ctx->active_count);
drivers/gpu/drm/i915/i915_gpu_error.c
2066
error->wakelock = atomic_read(&i915->runtime_pm.wakeref_count);
drivers/gpu/drm/i915/i915_gpu_error.h
246
return atomic_read(&error->reset_count);
drivers/gpu/drm/i915/i915_gpu_error.h
252
return atomic_read(&error->reset_engine_count[engine->class]);
drivers/gpu/drm/i915/i915_sw_fence.c
230
pending = atomic_read(&fence->pending);
drivers/gpu/drm/i915/i915_sw_fence.h
103
return atomic_read(&fence->pending) <= 0;
drivers/gpu/drm/i915/i915_sw_fence.h
108
return atomic_read(&fence->pending) < 0;
drivers/gpu/drm/i915/i915_sw_fence_work.h
61
if (atomic_read(&f->chain.pending) <= 1)
drivers/gpu/drm/i915/i915_switcheroo.c
58
atomic_read(&i915->drm.open_count) == 0;
drivers/gpu/drm/i915/i915_tasklet.h
30
return !atomic_read(&t->count);
drivers/gpu/drm/i915/i915_trace.h
411
__entry->pin_count = atomic_read(&ce->pin_count);
drivers/gpu/drm/i915/i915_vma.c
1399
GEM_BUG_ON(atomic_read(&vma->pages_count) < count);
drivers/gpu/drm/i915/i915_vma.c
1427
count = atomic_read(&vma->pages_count);
drivers/gpu/drm/i915/i915_vma.c
1538
bound = atomic_read(&vma->flags);
drivers/gpu/drm/i915/i915_vma.c
1758
GEM_BUG_ON(!atomic_read(&vma->open_count));
drivers/gpu/drm/i915/i915_vma.c
503
vma_flags = atomic_read(&vma->flags);
drivers/gpu/drm/i915/i915_vma.c
512
GEM_BUG_ON(!atomic_read(&vma->pages_count));
drivers/gpu/drm/i915/i915_vma.c
946
bound = atomic_read(&vma->flags);
drivers/gpu/drm/i915/i915_vma.h
300
return atomic_read(&vma->flags) & I915_VMA_PIN_MASK;
drivers/gpu/drm/i915/i915_vma.h
329
return atomic_read(&vma->flags) & where;
drivers/gpu/drm/i915/i915_vma.h
388
GEM_BUG_ON(atomic_read(&vma->fence->pin_count) <= 0);
drivers/gpu/drm/i915/i915_vma_resource.c
244
if (atomic_read(&vma_res->chain.pending) <= 1) {
drivers/gpu/drm/i915/intel_runtime_pm.c
518
int count = atomic_read(&rpm->wakeref_count);
drivers/gpu/drm/i915/intel_runtime_pm.h
132
__assert_rpm_raw_wakeref_held(rpm, atomic_read(&rpm->wakeref_count));
drivers/gpu/drm/i915/intel_runtime_pm.h
138
__assert_rpm_wakelock_held(rpm, atomic_read(&rpm->wakeref_count));
drivers/gpu/drm/i915/intel_wakeref.c
29
if (!atomic_read(&wf->count)) {
drivers/gpu/drm/i915/intel_wakeref.c
45
INTEL_WAKEREF_BUG_ON(atomic_read(&wf->count) <= 0);
drivers/gpu/drm/i915/intel_wakeref.c
59
INTEL_WAKEREF_BUG_ON(atomic_read(&wf->count) <= 0);
drivers/gpu/drm/i915/intel_wakeref.h
112
INTEL_WAKEREF_BUG_ON(atomic_read(&wf->count) <= 0);
drivers/gpu/drm/i915/intel_wakeref.h
157
INTEL_WAKEREF_BUG_ON(atomic_read(&wf->count) <= 0);
drivers/gpu/drm/i915/intel_wakeref.h
252
INTEL_WAKEREF_BUG_ON(atomic_read(&wf->count));
drivers/gpu/drm/i915/selftests/i915_active.c
130
if (atomic_read(&active->base.count) != count) {
drivers/gpu/drm/i915/selftests/i915_active.c
132
atomic_read(&active->base.count), count);
drivers/gpu/drm/i915/selftests/i915_active.c
284
drm_printf(m, "\tcount: %d\n", atomic_read(&ref->count));
drivers/gpu/drm/i915/selftests/i915_gem_gtt.c
1449
GEM_BUG_ON(atomic_read(&vma->pages_count));
drivers/gpu/drm/i915/selftests/i915_request.c
1548
!atomic_read(&i915->selftest.counter),
drivers/gpu/drm/i915/selftests/i915_request.c
410
atomic_read(&wait->pending), count,
drivers/gpu/drm/imagination/pvr_queue.c
1093
if (atomic_read(&job->ctx->faulty))
drivers/gpu/drm/imagination/pvr_queue.c
583
if (!atomic_read(&queue->in_flight_job_count))
drivers/gpu/drm/imagination/pvr_queue.c
778
*queue->timeline_ufo.value = atomic_read(&queue->job_fence_ctx.seqno);
drivers/gpu/drm/imagination/pvr_queue.c
840
WARN_ON(atomic_read(&queue->in_flight_job_count) != job_count);
drivers/gpu/drm/lima/lima_device.c
517
if (atomic_read(&ldev->pipe[i].base.credit_count))
drivers/gpu/drm/msm/adreno/a5xx_gpu.h
171
int preempt_state = atomic_read(&a5xx_gpu->preempt_state);
drivers/gpu/drm/msm/adreno/a6xx_gpu.h
295
int preempt_state = atomic_read(&a6xx_gpu->preempt_state);
drivers/gpu/drm/msm/disp/dpu1/dpu_crtc.c
1027
if (!atomic_read(&dpu_crtc->frame_pending)) {
drivers/gpu/drm/msm/disp/dpu1/dpu_crtc.c
1244
atomic_read(&dpu_crtc->frame_pending));
drivers/gpu/drm/msm/disp/dpu1/dpu_crtc.c
1249
if (atomic_read(&dpu_crtc->frame_pending)) {
drivers/gpu/drm/msm/disp/dpu1/dpu_crtc.c
1251
atomic_read(&dpu_crtc->frame_pending));
drivers/gpu/drm/msm/disp/dpu1/dpu_crtc.c
678
if (atomic_read(&dpu_crtc->frame_pending) < 1) {
drivers/gpu/drm/msm/disp/dpu1/dpu_crtc.h
239
return crtc ? atomic_read(&to_dpu_crtc(crtc)->frame_pending) : -EINVAL;
drivers/gpu/drm/msm/disp/dpu1/dpu_encoder.c
1502
if (atomic_read(&phy_enc->underrun_cnt) == 1)
drivers/gpu/drm/msm/disp/dpu1/dpu_encoder.c
1506
atomic_read(&phy_enc->underrun_cnt));
drivers/gpu/drm/msm/disp/dpu1/dpu_encoder.c
1736
atomic_read(info->atomic_cnt) == 0, jiffies);
drivers/gpu/drm/msm/disp/dpu1/dpu_encoder.c
1743
atomic_read(info->atomic_cnt));
drivers/gpu/drm/msm/disp/dpu1/dpu_encoder.c
1745
} while (atomic_read(info->atomic_cnt) && (rc == 0) &&
drivers/gpu/drm/msm/disp/dpu1/dpu_encoder.c
2508
atomic_read(&phys->vsync_cnt),
drivers/gpu/drm/msm/disp/dpu1/dpu_encoder.c
2509
atomic_read(&phys->underrun_cnt),
drivers/gpu/drm/msm/disp/dpu1/dpu_encoder.c
2510
atomic_read(&dpu_enc->frame_done_timeout_cnt));
drivers/gpu/drm/msm/disp/dpu1/dpu_encoder.c
492
atomic_read(wait_info->atomic_cnt));
drivers/gpu/drm/msm/disp/dpu1/dpu_encoder.c
508
atomic_read(wait_info->atomic_cnt));
drivers/gpu/drm/msm/disp/dpu1/dpu_encoder.c
519
atomic_read(wait_info->atomic_cnt));
drivers/gpu/drm/msm/disp/dpu1/dpu_encoder.c
526
atomic_read(wait_info->atomic_cnt));
drivers/gpu/drm/msm/disp/dpu1/dpu_encoder.c
540
return phys ? atomic_read(&phys->vsync_cnt) : 0;
drivers/gpu/drm/msm/disp/dpu1/dpu_encoder_phys_cmd.c
191
atomic_read(&phys_enc->pending_kickoff_cnt),
drivers/gpu/drm/msm/disp/dpu1/dpu_encoder_phys_cmd.c
201
atomic_read(&phys_enc->pending_kickoff_cnt));
drivers/gpu/drm/msm/disp/dpu1/dpu_encoder_phys_cmd.c
588
atomic_read(&phys_enc->pending_kickoff_cnt));
drivers/gpu/drm/msm/disp/dpu1/dpu_encoder_phys_cmd.c
607
atomic_read(&phys_enc->pending_kickoff_cnt));
drivers/gpu/drm/msm/disp/dpu1/dpu_encoder_phys_vid.c
346
atomic_read(&phys_enc->pending_kickoff_cnt);
drivers/gpu/drm/msm/disp/dpu1/dpu_hw_interrupts.c
741
irq_count = atomic_read(&irq_entry->count);
drivers/gpu/drm/msm/msm_gem_vma.c
1214
atomic_read(&vm->prealloc_throttle.in_flight) <= 1024);
drivers/gpu/drm/msm/msm_kms.c
172
if (atomic_read(&kms->fault_snapshot_capture) == 0) {
drivers/gpu/drm/nouveau/nouveau_chan.c
59
if (unlikely(!atomic_read(&chan->killed)))
drivers/gpu/drm/nouveau/nouveau_chan.c
68
if (likely(chan && chan->fence && !atomic_read(&chan->killed))) {
drivers/gpu/drm/nouveau/nouveau_exec.c
186
if (unlikely(!atomic_read(&chan->killed)))
drivers/gpu/drm/nouveau/nouveau_exec.c
379
if (unlikely(atomic_read(&chan->killed)))
drivers/gpu/drm/nouveau/nouveau_gem.c
776
if (unlikely(atomic_read(&chan->killed)))
drivers/gpu/drm/nouveau/nouveau_vga.c
76
return atomic_read(&drm->dev->open_count) == 0;
drivers/gpu/drm/nouveau/nvkm/core/event.c
183
if (atomic_read(&ntfy->allowed))
drivers/gpu/drm/nouveau/nvkm/core/event.c
68
if (atomic_read(&ntfy->allowed) != ntfy->running) {
drivers/gpu/drm/nouveau/nvkm/core/intr.c
200
if (atomic_read(&inth->allowed)) {
drivers/gpu/drm/nouveau/nvkm/core/uevent.c
89
if (atomic_read(&uevent->allowed))
drivers/gpu/drm/nouveau/nvkm/engine/fifo/chan.c
221
CHAN_TRACE(chan, "block %d", atomic_read(&chan->blocked));
drivers/gpu/drm/nouveau/nvkm/engine/fifo/chan.c
254
CHAN_TRACE(chan, "allow %d", atomic_read(&chan->blocked));
drivers/gpu/drm/nouveau/nvkm/engine/fifo/runl.c
108
reset = atomic_read(&cgrp->rc) == NVKM_CGRP_RC_RUNNING;
drivers/gpu/drm/nouveau/nvkm/engine/fifo/uchan.c
290
if (atomic_read(&chan->errored))
drivers/gpu/drm/nouveau/nvkm/engine/sec2/base.c
55
if (atomic_read(&sec2->initmsg) == 1) {
drivers/gpu/drm/nouveau/nvkm/engine/sec2/gp102.c
162
if (unlikely(atomic_read(&sec2->initmsg) == 0)) {
drivers/gpu/drm/nouveau/nvkm/engine/sec2/gp102.c
171
if (atomic_read(&sec2->initmsg) > 0) {
drivers/gpu/drm/nouveau/nvkm/engine/sec2/gp102.c
181
if (atomic_read(&sec2->running)) {
drivers/gpu/drm/nouveau/nvkm/engine/sw/nv04.c
53
args->v0.ref = atomic_read(&chan->ref);
drivers/gpu/drm/nouveau/nvkm/subdev/clk/base.c
341
wait_event(clk->wait, !atomic_read(&clk->waiting));
drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgf100.c
231
if (atomic_read(&vmm->engref[NVKM_SUBDEV_BAR]))
drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgp100.c
576
if (atomic_read(&vmm->engref[NVKM_SUBDEV_BAR]))
drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv50.c
189
if (!atomic_read(&vmm->engref[i]))
drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmtu102.c
33
if (atomic_read(&vmm->engref[NVKM_SUBDEV_BAR]))
drivers/gpu/drm/panfrost/panfrost_gem_shrinker.c
44
if (atomic_read(&bo->gpu_usecount))
drivers/gpu/drm/panfrost/panfrost_gpu.c
100
if (drm_WARN_ON(&pfdev->base, atomic_read(&pfdev->cycle_counter.use_count) != 0))
drivers/gpu/drm/panfrost/panfrost_job.c
256
if (!atomic_read(&pfdev->reset.pending)) {
drivers/gpu/drm/panfrost/panfrost_job.c
614
} else if (!atomic_read(&pfdev->reset.pending)) {
drivers/gpu/drm/panfrost/panfrost_job.c
665
if (!atomic_read(&pfdev->reset.pending))
drivers/gpu/drm/panfrost/panfrost_job.c
977
if (atomic_read(&js->queue[i].sched.credit_count))
drivers/gpu/drm/panfrost/panfrost_mmu.c
301
if (!atomic_read(&lru_mmu->as_count))
drivers/gpu/drm/panfrost/panfrost_mmu.c
336
WARN_ON(atomic_read(&mmu->as_count) < 0);
drivers/gpu/drm/panthor/panthor_device.c
157
if (atomic_read(&ptdev->pm.state) != PANTHOR_DEVICE_PM_STATE_ACTIVE)
drivers/gpu/drm/panthor/panthor_device.c
424
active = atomic_read(&ptdev->pm.state) == PANTHOR_DEVICE_PM_STATE_ACTIVE;
drivers/gpu/drm/panthor/panthor_device.c
507
if (atomic_read(&ptdev->pm.state) != PANTHOR_DEVICE_PM_STATE_SUSPENDED)
drivers/gpu/drm/panthor/panthor_device.c
531
if (atomic_read(&ptdev->reset.pending)) {
drivers/gpu/drm/panthor/panthor_device.c
585
if (atomic_read(&ptdev->pm.state) != PANTHOR_DEVICE_PM_STATE_ACTIVE)
drivers/gpu/drm/panthor/panthor_device.h
289
atomic_read(&ptdev->pm.state) == PANTHOR_DEVICE_PM_STATE_ACTIVE)
drivers/gpu/drm/panthor/panthor_device.h
300
return atomic_read(&ptdev->reset.pending) != 0;
drivers/gpu/drm/panthor/panthor_device.h
413
if (atomic_read(&pirq->suspended)) \
drivers/gpu/drm/panthor/panthor_device.h
438
if (!atomic_read(&pirq->suspended)) \
drivers/gpu/drm/panthor/panthor_heap.c
631
return atomic_read(&pool->size);
drivers/gpu/drm/panthor/panthor_sched.c
2690
if (atomic_read(&sched->reset.in_progress))
drivers/gpu/drm/panthor/panthor_sched.c
3409
drm_WARN_ON(&ptdev->base, atomic_read(&sched->reset.in_progress));
drivers/gpu/drm/panthor/panthor_sched.c
3748
if (atomic_read(&sched->reset.in_progress)) {
drivers/gpu/drm/panthor/panthor_sched.c
3794
} else if (!atomic_read(&sched->reset.in_progress)) {
drivers/gpu/drm/panthor/panthor_sched.c
747
if (!atomic_read(&(sched)->reset.in_progress) && \
drivers/gpu/drm/panthor/panthor_sched.c
763
if (!atomic_read(&sched->reset.in_progress) && \
drivers/gpu/drm/qxl/qxl_cmd.c
288
irq_num = atomic_read(&qdev->irq_received_io_cmd);
drivers/gpu/drm/qxl/qxl_cmd.c
292
atomic_read(&qdev->irq_received_io_cmd) > irq_num, 5*HZ);
drivers/gpu/drm/qxl/qxl_cmd.c
295
atomic_read(&qdev->irq_received_io_cmd) > irq_num, 5*HZ);
drivers/gpu/drm/qxl/qxl_cmd.c
299
irq_num = atomic_read(&qdev->irq_received_io_cmd);
drivers/gpu/drm/qxl/qxl_cmd.c
305
atomic_read(&qdev->irq_received_io_cmd) > irq_num, 5*HZ);
drivers/gpu/drm/qxl/qxl_cmd.c
308
atomic_read(&qdev->irq_received_io_cmd) > irq_num, 5*HZ);
drivers/gpu/drm/qxl/qxl_debugfs.c
45
seq_printf(m, "%d\n", atomic_read(&qdev->irq_received));
drivers/gpu/drm/qxl/qxl_debugfs.c
46
seq_printf(m, "%d\n", atomic_read(&qdev->irq_received_display));
drivers/gpu/drm/qxl/qxl_debugfs.c
47
seq_printf(m, "%d\n", atomic_read(&qdev->irq_received_cursor));
drivers/gpu/drm/qxl/qxl_debugfs.c
48
seq_printf(m, "%d\n", atomic_read(&qdev->irq_received_io_cmd));
drivers/gpu/drm/qxl/qxl_kms.c
307
atomic_read(&qdev->release_count) == 0,
drivers/gpu/drm/radeon/cik.c
7061
if (atomic_read(&rdev->irq.ring_int[RADEON_RING_TYPE_GFX_INDEX])) {
drivers/gpu/drm/radeon/cik.c
7065
if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_CP1_INDEX])) {
drivers/gpu/drm/radeon/cik.c
7108
if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_CP2_INDEX])) {
drivers/gpu/drm/radeon/cik.c
7152
if (atomic_read(&rdev->irq.ring_int[R600_RING_TYPE_DMA_INDEX])) {
drivers/gpu/drm/radeon/cik.c
7157
if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_DMA1_INDEX])) {
drivers/gpu/drm/radeon/cik.c
7163
atomic_read(&rdev->irq.pflip[0])) {
drivers/gpu/drm/radeon/cik.c
7168
atomic_read(&rdev->irq.pflip[1])) {
drivers/gpu/drm/radeon/cik.c
7173
atomic_read(&rdev->irq.pflip[2])) {
drivers/gpu/drm/radeon/cik.c
7178
atomic_read(&rdev->irq.pflip[3])) {
drivers/gpu/drm/radeon/cik.c
7183
atomic_read(&rdev->irq.pflip[4])) {
drivers/gpu/drm/radeon/cik.c
7188
atomic_read(&rdev->irq.pflip[5])) {
drivers/gpu/drm/radeon/cik.c
7592
if (atomic_read(&rdev->irq.pflip[0]))
drivers/gpu/drm/radeon/cik.c
7622
if (atomic_read(&rdev->irq.pflip[1]))
drivers/gpu/drm/radeon/cik.c
7652
if (atomic_read(&rdev->irq.pflip[2]))
drivers/gpu/drm/radeon/cik.c
7682
if (atomic_read(&rdev->irq.pflip[3]))
drivers/gpu/drm/radeon/cik.c
7712
if (atomic_read(&rdev->irq.pflip[4]))
drivers/gpu/drm/radeon/cik.c
7742
if (atomic_read(&rdev->irq.pflip[5]))
drivers/gpu/drm/radeon/evergreen.c
4529
if (atomic_read(&rdev->irq.ring_int[RADEON_RING_TYPE_GFX_INDEX])) {
drivers/gpu/drm/radeon/evergreen.c
4533
if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_CP1_INDEX])) {
drivers/gpu/drm/radeon/evergreen.c
4537
if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_CP2_INDEX])) {
drivers/gpu/drm/radeon/evergreen.c
4542
if (atomic_read(&rdev->irq.ring_int[RADEON_RING_TYPE_GFX_INDEX])) {
drivers/gpu/drm/radeon/evergreen.c
4549
if (atomic_read(&rdev->irq.ring_int[R600_RING_TYPE_DMA_INDEX])) {
drivers/gpu/drm/radeon/evergreen.c
4556
if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_DMA1_INDEX])) {
drivers/gpu/drm/radeon/evergreen.c
4586
atomic_read(&rdev->irq.pflip[i]), "vblank", i);
drivers/gpu/drm/radeon/evergreen.c
4768
if (atomic_read(&rdev->irq.pflip[crtc_idx])) {
drivers/gpu/drm/radeon/r100.c
732
if (atomic_read(&rdev->irq.ring_int[RADEON_RING_TYPE_GFX_INDEX])) {
drivers/gpu/drm/radeon/r100.c
736
atomic_read(&rdev->irq.pflip[0])) {
drivers/gpu/drm/radeon/r100.c
740
atomic_read(&rdev->irq.pflip[1])) {
drivers/gpu/drm/radeon/r100.c
805
if (atomic_read(&rdev->irq.pflip[0]))
drivers/gpu/drm/radeon/r100.c
814
if (atomic_read(&rdev->irq.pflip[1]))
drivers/gpu/drm/radeon/r600.c
3819
if (atomic_read(&rdev->irq.ring_int[RADEON_RING_TYPE_GFX_INDEX])) {
drivers/gpu/drm/radeon/r600.c
3825
if (atomic_read(&rdev->irq.ring_int[R600_RING_TYPE_DMA_INDEX])) {
drivers/gpu/drm/radeon/r600.c
3831
atomic_read(&rdev->irq.pflip[0])) {
drivers/gpu/drm/radeon/r600.c
3836
atomic_read(&rdev->irq.pflip[1])) {
drivers/gpu/drm/radeon/r600.c
4143
if (atomic_read(&rdev->irq.pflip[0]))
drivers/gpu/drm/radeon/r600.c
4173
if (atomic_read(&rdev->irq.pflip[1]))
drivers/gpu/drm/radeon/radeon_device.c
1257
return atomic_read(&dev->open_count) == 0;
drivers/gpu/drm/radeon/radeon_kms.c
603
*value = atomic_read(&rdev->gpu_reset_counter);
drivers/gpu/drm/radeon/radeon_ring.c
260
if (rptr != atomic_read(&ring->last_rptr)) {
drivers/gpu/drm/radeon/radeon_uvd.c
259
uint32_t handle = atomic_read(&rdev->uvd.handles[i]);
drivers/gpu/drm/radeon/radeon_uvd.c
333
uint32_t handle = atomic_read(&rdev->uvd.handles[i]);
drivers/gpu/drm/radeon/radeon_uvd.c
508
if (atomic_read(&p->rdev->uvd.handles[i]) == handle) {
drivers/gpu/drm/radeon/radeon_uvd.c
534
if (atomic_read(&p->rdev->uvd.handles[i]) == handle) {
drivers/gpu/drm/radeon/radeon_uvd.c
849
if (!atomic_read(&rdev->uvd.handles[i]))
drivers/gpu/drm/radeon/radeon_vce.c
205
if (atomic_read(&rdev->vce.handles[i]))
drivers/gpu/drm/radeon/radeon_vce.c
320
uint32_t handle = atomic_read(&rdev->vce.handles[i]);
drivers/gpu/drm/radeon/radeon_vce.c
528
if (atomic_read(&p->rdev->vce.handles[i]) == handle) {
drivers/gpu/drm/radeon/rs600.c
686
if (atomic_read(&rdev->irq.ring_int[RADEON_RING_TYPE_GFX_INDEX])) {
drivers/gpu/drm/radeon/rs600.c
690
atomic_read(&rdev->irq.pflip[0])) {
drivers/gpu/drm/radeon/rs600.c
694
atomic_read(&rdev->irq.pflip[1])) {
drivers/gpu/drm/radeon/rs600.c
804
if (atomic_read(&rdev->irq.pflip[0]))
drivers/gpu/drm/radeon/rs600.c
813
if (atomic_read(&rdev->irq.pflip[1]))
drivers/gpu/drm/radeon/si.c
6061
if (atomic_read(&rdev->irq.ring_int[RADEON_RING_TYPE_GFX_INDEX])) {
drivers/gpu/drm/radeon/si.c
6065
if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_CP1_INDEX])) {
drivers/gpu/drm/radeon/si.c
6069
if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_CP2_INDEX])) {
drivers/gpu/drm/radeon/si.c
6073
if (atomic_read(&rdev->irq.ring_int[R600_RING_TYPE_DMA_INDEX])) {
drivers/gpu/drm/radeon/si.c
6078
if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_DMA1_INDEX])) {
drivers/gpu/drm/radeon/si.c
6101
atomic_read(&rdev->irq.pflip[i]), "vblank", i);
drivers/gpu/drm/radeon/si.c
6284
if (atomic_read(&rdev->irq.pflip[crtc_idx])) {
drivers/gpu/drm/scheduler/gpu_scheduler_trace.h
73
__entry->hw_job_count = atomic_read(
drivers/gpu/drm/scheduler/sched_entity.c
479
if (entity->guilty && atomic_read(entity->guilty))
drivers/gpu/drm/scheduler/sched_main.c
101
atomic_read(&sched->credit_count),
drivers/gpu/drm/scheduler/sched_main.c
1193
num_score = atomic_read(sched->score);
drivers/gpu/drm/scheduler/sched_main.c
747
if (!found_guilty && atomic_read(&s_job->karma) > sched->hang_limit) {
drivers/gpu/drm/vc4/vc4_hvs.c
236
drm_printf(&p, "%d\n", atomic_read(&vc4->underrun));
drivers/gpu/drm/virtio/virtgpu_ioctl.c
424
atomic_read(&cache_ent->is_valid), 5 * HZ);
drivers/gpu/drm/virtio/virtgpu_vq.c
515
if (!atomic_read(&vgdev->pending_commands))
drivers/gpu/drm/virtio/virtgpu_vq.c
529
if (!atomic_read(&vgdev->pending_commands))
drivers/gpu/drm/vmwgfx/vmwgfx_cmd.c
521
*seqno = atomic_read(&dev_priv->marker_seq);
drivers/gpu/drm/vmwgfx/vmwgfx_drv.c
1518
if (atomic_read(&dev_priv->num_fifo_resources) != 0) {
drivers/gpu/drm/vmwgfx/vmwgfx_irq.c
147
ret = ((atomic_read(&dev_priv->marker_seq) - seqno)
drivers/gpu/drm/vmwgfx/vmwgfx_irq.c
189
signal_seq = atomic_read(&dev_priv->marker_seq);
drivers/gpu/drm/vmwgfx/vmwgfx_msg.c
767
if (pid == (pid_t)atomic_read(&dev_priv->mksstat_kern_pids[slot]))
drivers/gpu/drm/vmwgfx/vmwgfx_msg.c
840
const pid_t pid0 = (pid_t)atomic_read(&dev_priv->mksstat_user_pids[i]);
drivers/gpu/drm/vmwgfx/vmwgfx_msg.c
870
const pid_t pid0 = (pid_t)atomic_read(&dev_priv->mksstat_kern_pids[i]);
drivers/gpu/drm/vmwgfx/vmwgfx_validation.c
506
if (atomic_read(&vbo->cpu_writers))
drivers/gpu/drm/vmwgfx/vmwgfx_vkms.c
500
WARN_ON(atomic_read(&du->vkms.atomic_lock) != VMW_VKMS_LOCK_MODESET);
drivers/gpu/drm/vmwgfx/vmwgfx_vkms.c
563
total_delay, ret, atomic_read(&du->vkms.atomic_lock));
drivers/gpu/drm/xe/display/xe_display.c
527
return atomic_read(&xe->irq.enabled);
drivers/gpu/drm/xe/tests/xe_guc_g2g_test.c
223
while (atomic_read(&xe->g2g_test_count)) {
drivers/gpu/drm/xe/tests/xe_guc_g2g_test.c
230
KUNIT_ASSERT_EQ_MSG(test, 0, atomic_read(&xe->g2g_test_count),
drivers/gpu/drm/xe/xe_device.c
844
if (atomic_read(&xe->wedged.flag))
drivers/gpu/drm/xe/xe_device.h
192
return atomic_read(&xe->wedged.flag);
drivers/gpu/drm/xe/xe_drm_client.c
338
!atomic_read(&xef->exec_queue.pending_removal));
drivers/gpu/drm/xe/xe_exec.c
158
if (atomic_read(&q->job_cnt) >= XE_MAX_JOB_COUNT_PER_EXEC_QUEUE) {
drivers/gpu/drm/xe/xe_exec_queue.c
477
xe_assert(gt_to_xe(q->gt), atomic_read(&q->job_cnt) == 0);
drivers/gpu/drm/xe/xe_guc_pc.c
152
!atomic_read(&pc->flush_freq_limit),
drivers/gpu/drm/xe/xe_guc_pc.c
987
if (!atomic_read(&pc->flush_freq_limit))
drivers/gpu/drm/xe/xe_guc_submit.c
109
return atomic_read(&q->guc->state) & EXEC_QUEUE_STATE_PENDING_ENABLE;
drivers/gpu/drm/xe/xe_guc_submit.c
1207
return atomic_read(&guc->submission_state.stopped);
drivers/gpu/drm/xe/xe_guc_submit.c
124
return atomic_read(&q->guc->state) & EXEC_QUEUE_STATE_PENDING_DISABLE;
drivers/gpu/drm/xe/xe_guc_submit.c
139
return atomic_read(&q->guc->state) & EXEC_QUEUE_STATE_DESTROYED;
drivers/gpu/drm/xe/xe_guc_submit.c
154
return atomic_read(&q->guc->state) & EXEC_QUEUE_STATE_BANNED;
drivers/gpu/drm/xe/xe_guc_submit.c
164
return atomic_read(&q->guc->state) & EXEC_QUEUE_STATE_SUSPENDED;
drivers/gpu/drm/xe/xe_guc_submit.c
179
return atomic_read(&q->guc->state) & EXEC_QUEUE_STATE_RESET;
drivers/gpu/drm/xe/xe_guc_submit.c
189
return atomic_read(&q->guc->state) & EXEC_QUEUE_STATE_KILLED;
drivers/gpu/drm/xe/xe_guc_submit.c
199
return atomic_read(&q->guc->state) & EXEC_QUEUE_STATE_WEDGED;
drivers/gpu/drm/xe/xe_guc_submit.c
209
return atomic_read(&q->guc->state) & EXEC_QUEUE_STATE_PENDING_RESUME;
drivers/gpu/drm/xe/xe_guc_submit.c
224
return atomic_read(&q->guc->state) & EXEC_QUEUE_STATE_IDLE_SKIP_SUSPEND;
drivers/gpu/drm/xe/xe_guc_submit.c
239
return (atomic_read(&q->guc->state) &
drivers/gpu/drm/xe/xe_guc_submit.c
2857
atomic_read(&q->guc->state), q->guc->id,
drivers/gpu/drm/xe/xe_guc_submit.c
2893
atomic_read(&q->guc->state), q->guc->id);
drivers/gpu/drm/xe/xe_guc_submit.c
2919
atomic_read(&q->guc->state));
drivers/gpu/drm/xe/xe_guc_submit.c
3194
snapshot->schedule_state = atomic_read(&q->guc->state);
drivers/gpu/drm/xe/xe_guc_submit.c
79
return atomic_read(&q->guc->state) & EXEC_QUEUE_STATE_REGISTERED;
drivers/gpu/drm/xe/xe_guc_submit.c
94
return atomic_read(&q->guc->state) & EXEC_QUEUE_STATE_ENABLED;
drivers/gpu/drm/xe/xe_irq.c
420
if (!atomic_read(&xe->irq.enabled))
drivers/gpu/drm/xe/xe_irq.c
485
if (!atomic_read(&xe->irq.enabled))
drivers/gpu/drm/xe/xe_irq.c
732
if (!atomic_read(&xe->irq.enabled))
drivers/gpu/drm/xe/xe_irq.c
911
if (!atomic_read(&xe->irq.enabled))
drivers/gpu/drm/xe/xe_trace.h
124
__entry->guc_state = atomic_read(&q->guc->state);
drivers/gpu/drm/xe/xe_trace.h
255
atomic_read(&job->q->guc->state);
drivers/gpu/drm/xe/xe_trace.h
91
__entry->guc_state = atomic_read(&q->guc->state);
drivers/gpu/host1x/syncpt.c
275
current_val = (u32)atomic_read(&sp->min_val);
drivers/gpu/host1x/syncpt.c
394
return (u32)atomic_read(&sp->max_val);
drivers/gpu/host1x/syncpt.c
409
return (u32)atomic_read(&sp->min_val);
drivers/gpu/host1x/syncpt.h
97
min = atomic_read(&sp->min_val);
drivers/gpu/host1x/syncpt.h
98
max = atomic_read(&sp->max_val);
drivers/greybus/greybus_trace.h
112
__entry->waiters = atomic_read(&operation->waiters);
drivers/greybus/operation.c
90
if (atomic_read(&operation->waiters))
drivers/hid/hid-cp2112.c
367
atomic_read(avail), msecs_to_jiffies(RESPONSE_TIMEOUT));
drivers/hsi/clients/ssi_protocol.c
290
if (atomic_read(&ssi->tx_usecnt))
drivers/hsi/clients/ssi_protocol.c
344
dev_dbg(&master->device, "start TX %d\n", atomic_read(&ssi->tx_usecnt));
drivers/hsi/clients/ssi_protocol.c
361
WARN_ON_ONCE(atomic_read(&ssi->tx_usecnt) == 0);
drivers/hsi/clients/ssi_protocol.c
372
dev_dbg(&master->device, "stop TX %d\n", atomic_read(&ssi->tx_usecnt));
drivers/hsi/clients/ssi_protocol.c
466
if (atomic_read(&ssi->tx_usecnt) == 0)
drivers/hsi/clients/ssi_protocol.c
861
if (atomic_read(&ssi->tx_usecnt)) {
drivers/hv/channel_mgmt.c
1190
while (atomic_read(&vmbus_connection.offer_in_progress) != 0) {
drivers/hv/hv_balloon.c
1165
if (status.hdr.trans_id != atomic_read(&trans_id))
drivers/hv/vmbus_drv.c
2747
if (atomic_read(&vmbus_connection.nr_chan_close_on_suspend) > 0)
drivers/hwmon/pwm-fan.c
91
pulses = atomic_read(&tach->pulses);
drivers/hwmon/sht15.c
539
if (!atomic_read(&data->interrupt_handled))
drivers/hwmon/sht15.c
831
|| atomic_read(&data->interrupt_handled))
drivers/hwtracing/coresight/coresight-syscfg.c
1070
if (!atomic_read(&cscfg_mgr->sys_active_cnt))
drivers/hwtracing/coresight/coresight-syscfg.c
347
if (atomic_read(&cscfg_mgr->sys_active_cnt)) {
drivers/hwtracing/coresight/coresight-syscfg.c
674
if (atomic_read(&cscfg_mgr->sys_active_cnt)) {
drivers/hwtracing/coresight/coresight-trace-id.c
290
PERF_SESSION(atomic_read(&id_map->perf_cs_etm_session_active));
drivers/hwtracing/coresight/coresight-trace-id.c
298
PERF_SESSION(atomic_read(&id_map->perf_cs_etm_session_active));
drivers/hwtracing/coresight/coresight-trace-id.c
51
return atomic_read(per_cpu_ptr(id_map->cpu_map, cpu));
drivers/hwtracing/intel_th/msu.c
1321
if (atomic_read(&msc->user_count) != -1)
drivers/i3c/master.c
3185
if (atomic_read(&dev->ibi->pending_ibis))
drivers/iio/accel/hid-sensor-accel-3d.c
243
if (atomic_read(&accel_state->common_attributes.data_ready)) {
drivers/iio/adc/stm32-dfsdm-core.c
175
atomic_read(&priv->n_active_ch));
drivers/iio/adc/stm32-dfsdm-core.c
217
atomic_read(&priv->n_active_ch));
drivers/iio/adc/sun4i-gpadc-iio.c
351
if (atomic_read(&info->ignore_temp_data_irq))
drivers/iio/adc/sun4i-gpadc-iio.c
365
if (atomic_read(&info->ignore_fifo_data_irq))
drivers/iio/buffer/industrialio-buffer-dma.c
309
return queue->fileio.enabled || !atomic_read(&queue->num_dmabufs);
drivers/iio/common/hid-sensors/hid-sensor-trigger.c
202
if (atomic_read(&attrb->user_requested_state))
drivers/iio/common/hid-sensors/hid-sensor-trigger.c
224
if (atomic_read(&attrb->runtime_pm_enable))
drivers/iio/common/ssp_sensors/ssp_dev.c
610
if (atomic_read(&data->enable_refcount) > 0)
drivers/iio/common/ssp_sensors/ssp_dev.c
635
if (atomic_read(&data->enable_refcount) > 0)
drivers/iio/gyro/hid-sensor-gyro-3d.c
197
if (atomic_read(&gyro_state->common_attributes.data_ready)) {
drivers/iio/humidity/hid-sensor-humidity.c
135
if (atomic_read(&humid_st->common_attributes.data_ready))
drivers/iio/industrialio-trigger.c
199
if (!atomic_read(&trig->use_count)) {
drivers/iio/industrialio-trigger.c
230
if (!atomic_read(&trig->use_count)) {
drivers/iio/light/hid-sensor-als.c
261
if (atomic_read(&als_state->common_attributes.data_ready)) {
drivers/iio/light/hid-sensor-prox.c
186
if (atomic_read(&prox_state->common_attributes.data_ready)) {
drivers/iio/light/lm3533-als.c
110
*zone = atomic_read(&als->zone);
drivers/iio/magnetometer/hid-sensor-magn-3d.c
290
if (atomic_read(&magn_state->magn_flux_attributes.data_ready)) {
drivers/iio/orientation/hid-sensor-incl-3d.c
199
if (atomic_read(&incl_state->common_attributes.data_ready)) {
drivers/iio/orientation/hid-sensor-rotation.c
158
if (atomic_read(&rot_state->common_attributes.data_ready)) {
drivers/iio/position/hid-sensor-custom-intel-hinge.c
198
if (atomic_read(&st->common_attributes.data_ready)) {
drivers/iio/pressure/hid-sensor-press.c
175
if (atomic_read(&press_state->common_attributes.data_ready)) {
drivers/iio/temperature/hid-sensor-temperature.c
133
if (atomic_read(&temp_st->common_attributes.data_ready))
drivers/infiniband/core/cq.c
322
if (WARN_ON_ONCE(atomic_read(&cq->usecnt)))
drivers/infiniband/core/iwpm_msg.c
169
msg_seq = atomic_read(&echo_nlmsg_seq);
drivers/infiniband/core/iwpm_msg.c
254
msg_seq = atomic_read(&echo_nlmsg_seq);
drivers/infiniband/core/iwpm_msg.c
334
msg_seq = atomic_read(&echo_nlmsg_seq);
drivers/infiniband/core/iwpm_msg.c
86
msg_seq = atomic_read(&echo_nlmsg_seq);
drivers/infiniband/core/nldev.c
642
atomic_read(&cq->usecnt), RDMA_NLDEV_ATTR_PAD))
drivers/infiniband/core/nldev.c
736
atomic_read(&pd->usecnt), RDMA_NLDEV_ATTR_PAD))
drivers/infiniband/core/rdma_core.c
95
WARN_ON(atomic_read(&uobj->usecnt) <= 0);
drivers/infiniband/core/rdma_core.c
98
WARN_ON(atomic_read(&uobj->usecnt) != -1);
drivers/infiniband/core/security.c
210
if (atomic_read(&pp->sec->error_list_count))
drivers/infiniband/core/security.c
476
sec->error_comps_pending = atomic_read(&sec->error_list_count);
drivers/infiniband/core/user_mad.c
194
atomic_read(&file->recv_list_size) > MAX_UMAD_RECV_LIST_SIZE)
drivers/infiniband/core/uverbs_std_types.c
111
if (atomic_read(&uxrcd->refcnt))
drivers/infiniband/core/uverbs_std_types.c
127
if (atomic_read(&pd->usecnt))
drivers/infiniband/core/uverbs_std_types.c
87
if (atomic_read(&rwq_ind_tbl->usecnt))
drivers/infiniband/core/uverbs_std_types_counters.c
100
if (!atomic_read(&counters->usecnt))
drivers/infiniband/core/uverbs_std_types_counters.c
45
if (atomic_read(&counters->usecnt))
drivers/infiniband/core/uverbs_std_types_dm.c
43
if (atomic_read(&dm->usecnt))
drivers/infiniband/core/uverbs_std_types_dmah.c
18
if (atomic_read(&dmah->usecnt))
drivers/infiniband/core/uverbs_std_types_flow_action.c
43
if (atomic_read(&action->usecnt))
drivers/infiniband/core/verbs.c
1138
if (atomic_read(&srq->usecnt))
drivers/infiniband/core/verbs.c
2129
if (atomic_read(&real_qp->usecnt) == 0)
drivers/infiniband/core/verbs.c
2153
if (atomic_read(&qp->usecnt))
drivers/infiniband/core/verbs.c
2245
if (atomic_read(&cq->usecnt))
drivers/infiniband/core/verbs.c
2574
if (atomic_read(&xrcd->usecnt))
drivers/infiniband/core/verbs.c
2636
if (atomic_read(&wq->usecnt))
drivers/infiniband/hw/bnxt_re/debugfs.c
352
seq_printf(m, "CQ Resize Count\t\t: %d\n", atomic_read(&res_s->resize_count));
drivers/infiniband/hw/bnxt_re/qplib_fp.c
167
if (atomic_read(&cq->arm_state) && nq->cqn_handler) {
drivers/infiniband/hw/bnxt_re/qplib_rcfw.c
469
atomic_read(&rcfw->timeout_send));
drivers/infiniband/hw/bnxt_re/qplib_rcfw.c
509
else if (atomic_read(&rcfw->rcfw_intr_enabled))
drivers/infiniband/hw/cxgb4/device.c
132
idx = atomic_read(&dev->rdev.wr_log_idx) &
drivers/infiniband/hw/erdma/erdma_cm.c
690
if (atomic_read(&new_s->sk->sk_rmem_alloc)) {
drivers/infiniband/hw/hfi1/driver.c
1305
if (atomic_read(&ppd->led_override_timer_active)) {
drivers/infiniband/hw/hfi1/file_ops.c
281
if (atomic_read(&pq->n_reqs) == pq->n_max_reqs) {
drivers/infiniband/hw/hfi1/iowait.h
176
wait_event(wait->wait_dma, !atomic_read(&wait->sdma_busy));
drivers/infiniband/hw/hfi1/iowait.h
187
return atomic_read(&wait->sdma_busy);
drivers/infiniband/hw/hfi1/iowait.h
230
!atomic_read(&wait->pio_busy),
drivers/infiniband/hw/hfi1/iowait.h
242
return atomic_read(&wait->pio_busy);
drivers/infiniband/hw/hfi1/ipoib_tx.c
849
atomic_read(&txq->tx_ring.stops),
drivers/infiniband/hw/hfi1/ipoib_tx.c
850
atomic_read(&txq->tx_ring.no_desc),
drivers/infiniband/hw/hfi1/ipoib_tx.c
851
atomic_read(&txq->tx_ring.ring_full));
drivers/infiniband/hw/hfi1/mad.c
4017
is_beaconing_active = !!atomic_read(&ppd->led_override_timer_active);
drivers/infiniband/hw/hfi1/mad.c
836
is_beaconing_active = !!atomic_read(&ppd->led_override_timer_active);
drivers/infiniband/hw/hfi1/pin_system.c
127
if (!hfi1_can_pin_pages(pq->dd, current->mm, atomic_read(&pq->n_locked),
drivers/infiniband/hw/hfi1/pin_system.c
130
atomic_read(&pq->n_locked), npages);
drivers/infiniband/hw/hfi1/qp.c
619
atomic_read(&qp->refcount),
drivers/infiniband/hw/hfi1/rc.c
627
if (atomic_read(&priv->n_tid_requests) >=
drivers/infiniband/hw/hfi1/sdma.c
3310
atomic_read(&dd->sdma_unfreeze_count) <=
drivers/infiniband/hw/hfi1/sdma.c
3313
if (ret || atomic_read(&dd->sdma_unfreeze_count) < 0)
drivers/infiniband/hw/hfi1/sdma.c
3329
atomic_read(&dd->sdma_unfreeze_count) <= 0);
drivers/infiniband/hw/hfi1/tid_rdma.c
5028
if (((atomic_read(&priv->n_tid_requests) < HFI1_TID_RDMA_WRITE_CNT) &&
drivers/infiniband/hw/hfi1/tid_rdma.c
5029
atomic_read(&priv->n_requests) &&
drivers/infiniband/hw/hfi1/trace.c
509
val = atomic_read(count);
drivers/infiniband/hw/hfi1/trace_tid.h
1500
__entry->n_requests = atomic_read(&priv->n_requests);
drivers/infiniband/hw/hfi1/trace_tid.h
1501
__entry->n_tid_requests = atomic_read(&priv->n_tid_requests);
drivers/infiniband/hw/hfi1/trace_tx.h
923
__entry->stops = atomic_read(&txq->tx_ring.stops);
drivers/infiniband/hw/hfi1/trace_tx.h
924
__entry->no_desc = atomic_read(&txq->tx_ring.no_desc);
drivers/infiniband/hw/hfi1/user_sdma.c
229
!atomic_read(&pq->n_reqs));
drivers/infiniband/hw/ionic/ionic_admin.c
107
if (atomic_read(&aq->admin_state) == IONIC_ADMIN_KILLED) {
drivers/infiniband/hw/ionic/ionic_admin.c
111
wr->status = atomic_read(&aq->admin_state);
drivers/infiniband/hw/ionic/ionic_admin.c
118
wr->status = atomic_read(&aq->admin_state);
drivers/infiniband/hw/ionic/ionic_admin.c
165
wr->status = atomic_read(&aq->admin_state);
drivers/infiniband/hw/ionic/ionic_admin.c
192
if (atomic_read(&aq->admin_state) != IONIC_ADMIN_ACTIVE)
drivers/infiniband/hw/ionic/ionic_admin.c
42
if (atomic_read(&aq->admin_state) < IONIC_ADMIN_KILLED)
drivers/infiniband/hw/ionic/ionic_admin.c
499
if (atomic_read(&aq->admin_state) < IONIC_ADMIN_KILLED)
drivers/infiniband/hw/ionic/ionic_admin.c
63
if (atomic_read(&dev->admin_state) == IONIC_ADMIN_KILLED)
drivers/infiniband/hw/ionic/ionic_admin.c
683
if (atomic_read(&aq->admin_state) != IONIC_ADMIN_KILLED) {
drivers/infiniband/hw/ionic/ionic_admin.c
72
if (atomic_read(&aq->admin_state) == IONIC_ADMIN_KILLED)
drivers/infiniband/hw/ionic/ionic_admin.c
743
if (atomic_read(&aq->admin_state) == IONIC_ADMIN_ACTIVE)
drivers/infiniband/hw/irdma/cm.c
2698
if (atomic_read(&cm_node->listener->pend_accepts_cnt) >
drivers/infiniband/hw/irdma/hw.c
391
if (atomic_read(&iwqp->close_timer_started))
drivers/infiniband/hw/irdma/icrdma_if.c
14
!atomic_read(&iwdev->vsi.qp_suspend_reqs),
drivers/infiniband/hw/irdma/verbs.c
1670
wait_event(iwqp->mod_qp_waitq, !atomic_read(&iwqp->hw_mod_qp_pend));
drivers/infiniband/hw/mlx4/mcg.c
1024
atomic_read(&group->refcount),
drivers/infiniband/hw/mlx4/mcg.c
1114
if (atomic_read(&group->refcount))
drivers/infiniband/hw/mlx4/mcg.c
1116
atomic_read(&group->refcount), group);
drivers/infiniband/hw/mlx4/mcg.c
1244
if (atomic_read(&group->refcount)) {
drivers/infiniband/hw/mlx5/counters.c
1134
if (!counters || atomic_read(&counters->usecnt) != 1)
drivers/infiniband/hw/mlx5/fs.c
2580
if (atomic_read(&obj->usecnt))
drivers/infiniband/hw/mlx5/fs.c
2824
if (atomic_read(&obj->usecnt))
drivers/infiniband/hw/mlx5/main.c
2867
return sysfs_emit(buf, "%d\n", atomic_read(&dev->mdev->priv.reg_pages));
drivers/infiniband/hw/mthca/mthca_av.c
170
} else if (!atomic_read(&pd->sqp_count) &&
drivers/infiniband/hw/qedr/verbs.c
3829
used = hw_srq->wr_prod_cnt - (u32)atomic_read(&hw_srq->wr_cons_cnt);
drivers/infiniband/hw/qedr/verbs.c
3857
atomic_read(&hw_srq->wr_cons_cnt),
drivers/infiniband/hw/vmw_pvrdma/pvrdma_ring.h
103
const __u32 tail = atomic_read(&r->prod_tail);
drivers/infiniband/hw/vmw_pvrdma/pvrdma_ring.h
104
const __u32 head = atomic_read(&r->cons_head);
drivers/infiniband/hw/vmw_pvrdma/pvrdma_ring.h
71
const unsigned int idx = atomic_read(var);
drivers/infiniband/hw/vmw_pvrdma/pvrdma_ring.h
80
__u32 idx = atomic_read(var) + 1; /* Increment. */
drivers/infiniband/hw/vmw_pvrdma/pvrdma_ring.h
89
const __u32 tail = atomic_read(&r->prod_tail);
drivers/infiniband/hw/vmw_pvrdma/pvrdma_ring.h
90
const __u32 head = atomic_read(&r->cons_head);
drivers/infiniband/sw/rdmavt/mcast.c
371
wait_event(mcast->wait, atomic_read(&mcast->refcount) <= 1);
drivers/infiniband/sw/rdmavt/mcast.c
376
wait_event(mcast->wait, !atomic_read(&mcast->refcount));
drivers/infiniband/sw/rdmavt/mr.c
765
if (unlikely(atomic_read(&mr->lkey_invalid) ||
drivers/infiniband/sw/rdmavt/mr.c
873
if (unlikely(atomic_read(&mr->lkey_invalid) ||
drivers/infiniband/sw/rdmavt/qp.c
1680
wait_event(qp->wait, !atomic_read(&qp->refcount));
drivers/infiniband/sw/rdmavt/qp.c
1918
reserved_used = atomic_read(&qp->s_reserved_used);
drivers/infiniband/sw/rdmavt/qp.c
1933
reserved_used = atomic_read(&qp->s_reserved_used);
drivers/infiniband/sw/rdmavt/qp.c
2008
atomic_read(&qp->local_ops_pending)) {
drivers/infiniband/sw/rdmavt/qp.c
260
return atomic_read(&wss->total_count) >= wss->threshold;
drivers/infiniband/sw/rxe/rxe_mcast.c
430
if (atomic_read(&mcg->qp_num) <= 0)
drivers/infiniband/sw/rxe/rxe_mcast.c
466
if (atomic_read(&mcg->qp_num) == 0)
drivers/infiniband/sw/rxe/rxe_mr.c
745
if (atomic_read(&mr->num_mw) > 0) {
drivers/infiniband/sw/rxe/rxe_qp.c
844
if (atomic_read(&qp->mcg_num)) {
drivers/infiniband/sw/rxe/rxe_req.c
223
atomic_read(&qp->req.rd_atomic) != qp->attr.max_rd_atomic;
drivers/infiniband/sw/rxe/rxe_req.c
725
if (unlikely(atomic_read(&qp->skb_out) >
drivers/infiniband/sw/rxe/rxe_verbs.c
1223
if (atomic_read(&cq->num_wq)) {
drivers/infiniband/sw/rxe/rxe_verbs.c
1414
if (atomic_read(&mr->num_mw) > 0) {
drivers/infiniband/sw/siw/siw_cm.c
1025
if (atomic_read(&new_s->sk->sk_rmem_alloc)) {
drivers/infiniband/sw/siw/siw_main.c
189
usage = atomic_read(&per_cpu(siw_use_cnt, cpu));
drivers/infiniband/sw/siw/siw_verbs.c
110
atomic_read(&sdev->num_ctx));
drivers/infiniband/sw/siw/siw_verbs.c
117
atomic_read(&sdev->num_ctx));
drivers/infiniband/sw/siw/siw_verbs.c
248
siw_dbg_pd(pd, "now %d PD's(s)\n", atomic_read(&sdev->num_pd));
drivers/infiniband/ulp/ipoib/ipoib_main.c
1690
wait_flushed = atomic_read(&priv->ntbl.entries);
drivers/infiniband/ulp/rtrs/rtrs-clt-stats.c
102
atomic_read(&stats->inflight), sum.failover_cnt);
drivers/infiniband/ulp/rtrs/rtrs-clt-stats.c
49
atomic_read(&s->cpu_migr.from));
drivers/infiniband/ulp/rtrs/rtrs-clt.c
2225
!atomic_read(&clt_path->connected_cnt),
drivers/infiniband/ulp/rtrs/rtrs-clt.c
835
inflight = atomic_read(&clt_path->stats->inflight);
drivers/infiniband/ulp/rtrs/rtrs-srv-trace.h
62
__entry->wr_cnt = atomic_read(&con->c.wr_cnt);
drivers/infiniband/ulp/srpt/ib_srpt.c
2841
atomic_read(&ch->sq_wr_avail));
drivers/infiniband/ulp/srpt/ib_srpt.c
3026
while (atomic_read(&sport->refcount) > 0 &&
drivers/infiniband/ulp/srpt/ib_srpt.c
3030
atomic_read(&sport->refcount));
drivers/input/mouse/cyapa_gen5.c
2611
if (atomic_read(&pip->cmd_issued)) {
drivers/iommu/arm/arm-smmu-v3/arm-smmu-v3.c
2271
if (!atomic_read(&smmu_domain->nr_ats_masters))
drivers/iommu/arm/arm-smmu-v3/arm-smmu-v3.c
519
if (atomic_read(&cmdq->lock) == 1)
drivers/iommu/dma-iommu.c
243
if (!atomic_read(&cookie->fq_timer_on) &&
drivers/iommu/iommu-debug-pagealloc.c
83
ref = atomic_read(&d->ref);
drivers/iommu/iommufd/selftest.c
270
} else if (atomic_read(&mdev->pasid_1024_fake_error)) {
drivers/iommu/riscv/iommu.c
314
unsigned int head = atomic_read(&queue->head);
drivers/iommu/riscv/iommu.c
315
unsigned int tail = atomic_read(&queue->tail);
drivers/iommu/riscv/iommu.c
353
const unsigned int cons = atomic_read(&queue->head);
drivers/iommu/riscv/iommu.c
370
unsigned int cons = atomic_read(&queue->head);
drivers/iommu/riscv/iommu.c
400
head = atomic_read(&queue->head);
drivers/iommu/riscv/iommu.c
404
if (readx_poll_timeout(atomic_read, &queue->head,
drivers/iommu/riscv/iommu.c
422
if (readx_poll_timeout(atomic_read, &queue->tail, tail, prod == tail,
drivers/irqchip/irq-gic-v3-its.c
1653
return atomic_read(&per_cpu_ptr(&cpu_lpi_count, cpu)->managed);
drivers/irqchip/irq-gic-v3-its.c
1655
return atomic_read(&per_cpu_ptr(&cpu_lpi_count, cpu)->unmanaged);
drivers/irqchip/irq-gic-v3-its.c
3923
if (!atomic_read(&vpe->vmapp_count)) {
drivers/irqchip/irq-gic-v3-its.c
4737
if (find_4_1_its() && !atomic_read(&vpe->vmapp_count))
drivers/irqchip/irq-renesas-intc-irqpin.c
577
if (atomic_read(&p->wakeup_path))
drivers/irqchip/irq-renesas-irqc.c
234
if (atomic_read(&p->wakeup_path))
drivers/leds/trigger/ledtrig-cpu.c
83
active_cpus = atomic_read(&num_active_cpus);
drivers/leds/trigger/ledtrig-netdev.c
186
unsigned int interval = atomic_read(&trigger_data->interval);
drivers/leds/trigger/ledtrig-netdev.c
449
jiffies_to_msecs(atomic_read(&trigger_data->interval)));
drivers/leds/trigger/ledtrig-netdev.c
674
atomic_read(&trigger_data->interval));
drivers/leds/trigger/ledtrig-netdev.c
684
(atomic_read(&trigger_data->interval)*2));
drivers/macintosh/adb.c
629
if (atomic_read(&state->n_pending) == 0) {
drivers/macintosh/adb.c
703
if (atomic_read(&state->n_pending) == 0
drivers/macintosh/adb.c
739
else if (atomic_read(&state->n_pending) == 0)
drivers/mailbox/bcm-flexrm-mailbox.c
969
(u32)atomic_read(&ring->msg_send_count),
drivers/mailbox/bcm-flexrm-mailbox.c
970
(u32)atomic_read(&ring->msg_cmpl_count));
drivers/mailbox/mtk-gpueb-mailbox.c
133
if (atomic_read(&ch->rx_status))
drivers/md/bcache/alloc.c
107
!atomic_read(&b->pin)) {
drivers/md/bcache/alloc.c
130
!atomic_read(&b->pin) && can_inc_bucket_gen(b));
drivers/md/bcache/alloc.c
347
allocator_wait(ca, !atomic_read(&ca->set->prio_blocked));
drivers/md/bcache/alloc.c
435
BUG_ON(atomic_read(&b->pin) != 1);
drivers/md/bcache/alloc.c
93
r = atomic_read(&c->rescale);
drivers/md/bcache/btree.c
1584
if (atomic_read(&c->search_inflight) == 0) {
drivers/md/bcache/btree.c
1598
if (atomic_read(&c->bucket_wait_cnt) > 0)
drivers/md/bcache/btree.c
1748
if (!atomic_read(&b->pin)) {
drivers/md/bcache/btree.c
1814
if (atomic_read(&b->pin))
drivers/md/bcache/btree.c
1877
if (atomic_read(&c->sectors_to_gc) < 0)
drivers/md/bcache/btree.c
2088
if (atomic_read(&check_state.enough))
drivers/md/bcache/btree.c
2111
wait_event(check_state.wait, atomic_read(&check_state.started) == 0);
drivers/md/bcache/extents.c
200
buf, PTR_BUCKET_NR(b->c, k, i), atomic_read(&g->pin),
drivers/md/bcache/extents.c
532
buf, PTR_BUCKET_NR(b->c, k, ptr), atomic_read(&g->pin),
drivers/md/bcache/io.c
109
errors = atomic_read(&ca->io_errors);
drivers/md/bcache/io.c
150
int congested = atomic_read(&c->congested);
drivers/md/bcache/journal.c
349
BUG_ON(i->pin && atomic_read(i->pin) != 1);
drivers/md/bcache/journal.c
422
ref_nr = atomic_read(fifo_front_p);
drivers/md/bcache/journal.c
459
ref_nr = atomic_read(fifo_front_p);
drivers/md/bcache/journal.c
584
while (!atomic_read(&fifo_front(&c->journal.pin)))
drivers/md/bcache/movinggc.c
218
atomic_read(&b->pin))
drivers/md/bcache/request.c
1187
if (atomic_read(&d->c->idle_counter))
drivers/md/bcache/request.c
1195
if (unlikely(atomic_read(&d->c->at_max_writeback_rate) == 1)) {
drivers/md/bcache/request.c
336
i += atomic_read(&c->congested);
drivers/md/bcache/request.c
623
if (dc && atomic_read(&dc->has_dirty))
drivers/md/bcache/stats.c
177
if (!atomic_read(&acc->closing))
drivers/md/bcache/super.c
1367
if (atomic_read(&dc->running)) {
drivers/md/bcache/super.c
1792
} else if (atomic_read(&dc->has_dirty)) {
drivers/md/bcache/sysfs.c
1046
atomic_read(&ca->io_errors) >> IO_ERROR_SHIFT);
drivers/md/bcache/sysfs.c
205
sysfs_printf(io_errors, "%i", atomic_read(&dc->io_errors));
drivers/md/bcache/sysfs.c
262
sysfs_print(running, atomic_read(&dc->running));
drivers/md/bcache/writeback.c
1024
if (atomic_read(&state.enough))
drivers/md/bcache/writeback.c
1043
wait_event(state.wait, atomic_read(&state.started) == 0);
drivers/md/bcache/writeback.c
175
dev_nr = atomic_read(&c->attached_dev_nr);
drivers/md/bcache/writeback.c
221
if (atomic_read(&c->at_max_writeback_rate) != 1)
drivers/md/bcache/writeback.c
236
!atomic_read(&c->at_max_writeback_rate))
drivers/md/bcache/writeback.c
275
if (atomic_read(&dc->has_dirty) && dc->writeback_percent &&
drivers/md/bcache/writeback.c
411
if (atomic_read(&dc->writeback_sequence_next) != io->sequence) {
drivers/md/bcache/writeback.c
415
if (atomic_read(&dc->writeback_sequence_next) == io->sequence) {
drivers/md/bcache/writeback.c
759
(!atomic_read(&dc->has_dirty) || !dc->writeback_running)) {
drivers/md/bcache/writeback.h
136
if (!atomic_read(&dc->has_dirty) &&
drivers/md/bcache/writeback.h
54
ret += atomic_read(d->stripe_sectors_dirty + i);
drivers/md/bcache/writeback.h
88
if (atomic_read(dc->disk.stripe_sectors_dirty + stripe))
drivers/md/dm-bio-record.h
38
bd->__bi_remaining = atomic_read(&bio->__bi_remaining);
drivers/md/dm-bufio.c
1494
BUG_ON(atomic_read(&b->hold_count));
drivers/md/dm-bufio.c
1758
BUG_ON(!atomic_read(&b->hold_count));
drivers/md/dm-bufio.c
1918
if (b && (atomic_read(&b->hold_count) == 1))
drivers/md/dm-bufio.c
2361
(unsigned long long)b->block, atomic_read(&b->hold_count), b->list_mode);
drivers/md/dm-bufio.c
265
if (atomic_read(&le->referenced)) {
drivers/md/dm-bufio.c
663
if (atomic_read(&b->hold_count))
drivers/md/dm-bufio.c
877
if (atomic_read(&b->hold_count) != 1) {
drivers/md/dm-cache-background-tracker.c
148
return atomic_read(&b->pending_demotes);
drivers/md/dm-cache-background-tracker.c
154
return atomic_read(&b->pending_promotes) +
drivers/md/dm-cache-background-tracker.c
155
atomic_read(&b->pending_writebacks) +
drivers/md/dm-cache-background-tracker.c
156
atomic_read(&b->pending_demotes) >= b->max_work;
drivers/md/dm-cache-target.c
1595
sector_t current_volume = (atomic_read(&cache->nr_io_migrations) + 1) *
drivers/md/dm-cache-target.c
2791
BUG_ON(atomic_read(&cache->nr_io_migrations));
drivers/md/dm-cache-target.c
3185
(unsigned int) atomic_read(&cache->stats.read_hit),
drivers/md/dm-cache-target.c
3186
(unsigned int) atomic_read(&cache->stats.read_miss),
drivers/md/dm-cache-target.c
3187
(unsigned int) atomic_read(&cache->stats.write_hit),
drivers/md/dm-cache-target.c
3188
(unsigned int) atomic_read(&cache->stats.write_miss),
drivers/md/dm-cache-target.c
3189
(unsigned int) atomic_read(&cache->stats.demotion),
drivers/md/dm-cache-target.c
3190
(unsigned int) atomic_read(&cache->stats.promotion),
drivers/md/dm-cache-target.c
3191
(unsigned long) atomic_read(&cache->nr_dirty));
drivers/md/dm-cache-target.c
966
stats.read_hits = atomic_read(&cache->stats.read_hit);
drivers/md/dm-cache-target.c
967
stats.read_misses = atomic_read(&cache->stats.read_miss);
drivers/md/dm-cache-target.c
968
stats.write_hits = atomic_read(&cache->stats.write_hit);
drivers/md/dm-cache-target.c
969
stats.write_misses = atomic_read(&cache->stats.write_miss);
drivers/md/dm-clone-target.c
1094
!atomic_read(&clone->ios_in_flight) &&
drivers/md/dm-clone-target.c
1097
current_volume = atomic_read(&clone->hydrations_in_flight);
drivers/md/dm-clone-target.c
1468
atomic_read(&clone->hydrations_in_flight));
drivers/md/dm-clone-target.c
1987
wait_event(clone->hydration_stopped, !atomic_read(&clone->hydrations_in_flight));
drivers/md/dm-clone-target.c
789
!atomic_read(&clone->ios_in_flight))
drivers/md/dm-era-target.c
1241
if (!atomic_read(&era->suspended))
drivers/md/dm-integrity.c
885
BUG_ON(!atomic_read(&comp->in_flight));
drivers/md/dm-ioctl.c
1401
priv->global_event_nr = atomic_read(&dm_global_event_nr);
drivers/md/dm-ioctl.c
2143
priv->global_event_nr = atomic_read(&dm_global_event_nr);
drivers/md/dm-ioctl.c
2161
if ((int)(atomic_read(&dm_global_event_nr) - priv->global_event_nr) > 0)
drivers/md/dm-kcopyd.c
980
wait_event(kc->destroyq, !atomic_read(&kc->nr_jobs));
drivers/md/dm-log-userspace-base.c
334
if (atomic_read(&lc->sched_flush))
drivers/md/dm-log-userspace-base.c
371
if (lc->integrated_flush && atomic_read(&lc->sched_flush))
drivers/md/dm-log-userspace-base.c
605
if (mark_list_is_empty && !atomic_read(&lc->sched_flush)) {
drivers/md/dm-log-writes.c
626
wait_event(lc->wait, !atomic_read(&lc->io_blocks) &&
drivers/md/dm-log-writes.c
627
!atomic_read(&lc->pending_blocks));
drivers/md/dm-mpath.c
1237
unsigned int nr_valid_paths = atomic_read(&m->nr_valid_paths);
drivers/md/dm-mpath.c
1287
if (!atomic_read(&m->pg_init_in_progress))
drivers/md/dm-mpath.c
1298
if (!atomic_read(&m->pg_init_in_progress))
drivers/md/dm-mpath.c
1302
if (atomic_read(&m->pg_init_in_progress) &&
drivers/md/dm-mpath.c
1356
pgpath->path.dev->name, atomic_read(&m->nr_valid_paths));
drivers/md/dm-mpath.c
1531
if (atomic_read(&m->pg_init_count) <= m->pg_init_retries &&
drivers/md/dm-mpath.c
1676
if (!atomic_read(&m->nr_valid_paths) &&
drivers/md/dm-mpath.c
1711
if (!atomic_read(&m->nr_valid_paths)) {
drivers/md/dm-mpath.c
1822
atomic_read(&m->pg_init_count));
drivers/md/dm-mpath.c
2128
if (r == 0 && !atomic_read(&m->nr_valid_paths))
drivers/md/dm-mpath.c
2240
if (atomic_read(&m->pg_init_in_progress))
drivers/md/dm-mpath.c
2244
if (!atomic_read(&m->nr_valid_paths)) {
drivers/md/dm-mpath.c
323
if (atomic_read(&m->pg_init_in_progress) || test_bit(MPATHF_PG_INIT_DISABLED, &m->flags))
drivers/md/dm-mpath.c
344
return atomic_read(&m->pg_init_in_progress);
drivers/md/dm-mpath.c
409
if (!atomic_read(&m->nr_valid_paths)) {
drivers/md/dm-mpath.c
806
atomic_read(&m->nr_valid_paths) == 0 &&
drivers/md/dm-pcache/backing_dev.c
75
atomic_read(&backing_dev->inflight_reqs) == 0);
drivers/md/dm-pcache/cache_gc.c
113
if (pcache_is_stopping(pcache) || atomic_read(&cache->gc_errors))
drivers/md/dm-pcache/cache_writeback.c
226
if (atomic_read(&cache->writeback_ctx.pending))
drivers/md/dm-pcache/dm_pcache.c
355
atomic_read(&pcache->inflight_reqs) == 0);
drivers/md/dm-pcache/dm_pcache.h
39
return (atomic_read(&pcache->state) == PCACHE_STATE_STOPPING);
drivers/md/dm-ps-io-affinity.c
166
DMEMIT("%d ", atomic_read(&s->map_misses));
drivers/md/dm-ps-queue-length.c
101
DMEMIT("%d ", atomic_read(&pi->qlen));
drivers/md/dm-ps-queue-length.c
204
(atomic_read(&pi->qlen) < atomic_read(&best->qlen)))
drivers/md/dm-ps-queue-length.c
207
if (!atomic_read(&best->qlen))
drivers/md/dm-ps-service-time.c
220
sz1 = atomic_read(&pi1->in_flight_size);
drivers/md/dm-ps-service-time.c
221
sz2 = atomic_read(&pi2->in_flight_size);
drivers/md/dm-ps-service-time.c
96
DMEMIT("%d %u ", atomic_read(&pi->in_flight_size),
drivers/md/dm-raid1.c
1395
if (!atomic_read(&(m->error_count)))
drivers/md/dm-raid1.c
179
return &ms->mirror[atomic_read(&ms->default_mirror)];
drivers/md/dm-raid1.c
195
if (!atomic_read(&m->error_count))
drivers/md/dm-raid1.c
427
if (likely(!atomic_read(&m->error_count)))
drivers/md/dm-raid1.c
441
return !atomic_read(&default_mirror->error_count);
drivers/md/dm-raid1.c
487
if (atomic_read(&ms->suspend)) {
drivers/md/dm-raid1.c
581
else if (m && atomic_read(&m->error_count))
drivers/md/dm-region-hash.c
249
BUG_ON(atomic_read(®->pending));
drivers/md/dm-region-hash.c
615
if (atomic_read(®->pending))
drivers/md/dm-region-hash.c
683
return atomic_read(&rh->recovery_in_flight);
drivers/md/dm-snap.c
1485
while (atomic_read(&s->pending_exceptions_count))
drivers/md/dm-stats.c
190
return atomic_read(&shared->in_flight[READ]) +
drivers/md/dm-stats.c
191
atomic_read(&shared->in_flight[WRITE]);
drivers/md/dm-stats.c
233
atomic_read(&shared->in_flight[READ]),
drivers/md/dm-stats.c
234
atomic_read(&shared->in_flight[WRITE]));
drivers/md/dm-stats.c
549
in_flight_read = (unsigned int)atomic_read(&shared->in_flight[READ]);
drivers/md/dm-stats.c
550
in_flight_write = (unsigned int)atomic_read(&shared->in_flight[WRITE]);
drivers/md/dm-stripe.c
376
DMEMIT("%c", atomic_read(&(sc->stripe[i].error_count)) ? 'D' : 'A');
drivers/md/dm-stripe.c
397
atomic_read(&(sc->stripe[i].error_count)) ? 'D' : 'A');
drivers/md/dm-stripe.c
431
if (atomic_read(&(sc->stripe[i].error_count)) <
drivers/md/dm-vdo/data-vio.c
308
u32 packed = atomic_read(&data_vio->compression.status);
drivers/md/dm-vdo/data-vio.c
906
BUG_ON(atomic_read(&pool->processing));
drivers/md/dm-vdo/dedupe.c
2246
atomic_read(&context->state));
drivers/md/dm-vdo/dedupe.c
2263
if ((atomic_read(&zone->timer_state) == DEDUPE_QUERY_TIMER_IDLE) ||
drivers/md/dm-vdo/funnel-workqueue.c
146
if ((atomic_read(&queue->idle) != 1) || (atomic_cmpxchg(&queue->idle, 1, 0) != 1))
drivers/md/dm-vdo/funnel-workqueue.c
491
thread_status = atomic_read(&queue->idle) ? "idle" : "running";
drivers/md/dm-vdo/indexer/funnel-requestqueue.c
136
bool dormant = atomic_read(&queue->dormant);
drivers/md/dm-vdo/indexer/funnel-requestqueue.c
253
if (atomic_read(&queue->dormant) || unbatched)
drivers/md/dm-vdo/recovery-journal.c
118
u32 decrements = atomic_read(get_decrement_counter(journal, lock_number));
drivers/md/dm-vdo/recovery-journal.c
1496
locked = (atomic_read(zone_count) != 0);
drivers/md/dm-vdo/recovery-journal.c
994
VDO_ASSERT_LOG_ONLY((*journal_value == atomic_read(decrement_counter)),
drivers/md/dm-vdo/vdo.c
855
enum vdo_state state = atomic_read(&vdo->state);
drivers/md/dm-writecache.c
483
BUG_ON(atomic_read(&endio->count) <= 0);
drivers/md/dm-writecache.c
491
!atomic_read(&wc->bio_in_progress[direction]));
drivers/md/dm-zoned-metadata.c
1810
} else if (atomic_read(&zmd->nr_reserved_seq_zones) < zmd->nr_reserved_seq) {
drivers/md/dm-zoned-metadata.c
275
return atomic_read(&zmd->dev[idx].unmap_nr_rnd);
drivers/md/dm-zoned-metadata.c
285
return atomic_read(&zmd->unmap_nr_cache);
drivers/md/dm-zoned-metadata.c
295
return atomic_read(&zmd->dev[idx].unmap_nr_seq);
drivers/md/dm-zoned-metadata.c
2963
zmd->nr_cache, atomic_read(&zmd->unmap_nr_cache));
drivers/md/dm-zoned-metadata.c
407
if (zmd->max_nr_mblks && atomic_read(&zmd->nr_mblks) > zmd->max_nr_mblks) {
drivers/md/dm-zoned-metadata.c
594
atomic_read(&zmd->nr_mblks) > zmd->min_nr_mblks &&
drivers/md/dm-zoned-metadata.c
615
return atomic_read(&zmd->nr_mblks);
drivers/md/dm-zoned.h
298
return atomic_read(&zone->refcount);
drivers/md/dm.c
2101
if (atomic_read(&io->io_count) > 1)
drivers/md/dm.c
2105
return atomic_read(&io->io_count) == 1;
drivers/md/dm.c
2741
while (atomic_read(&md->holders))
drivers/md/dm.c
2743
else if (atomic_read(&md->holders))
drivers/md/dm.c
2745
dm_device_name(md), atomic_read(&md->holders));
drivers/md/dm.c
3308
return atomic_read(&md->event_nr);
drivers/md/dm.c
3314
(event_nr != atomic_read(&md->event_nr)));
drivers/md/dm.c
359
return atomic_read(&md->open_count);
drivers/md/md-bitmap.c
1897
atomic_read(&bitmap->mddev->recovery_active) == 0);
drivers/md/md-bitmap.c
2022
atomic_read(&bitmap->pending_writes) == 0);
drivers/md/md-bitmap.c
2046
bw = atomic_read(&bitmap->behind_writes);
drivers/md/md-bitmap.c
2061
atomic_read(&bitmap->behind_writes),
drivers/md/md-bitmap.c
2070
if (bitmap && atomic_read(&bitmap->behind_writes) > 0) {
drivers/md/md-bitmap.c
2075
atomic_read(&bitmap->behind_writes) == 0);
drivers/md/md-bitmap.c
2359
stats->behind_writes = atomic_read(&bitmap->behind_writes);
drivers/md/md-bitmap.c
518
atomic_read(&bitmap->pending_writes) == 0);
drivers/md/md-bitmap.c
606
atomic_read(&bitmap->pending_writes)==0);
drivers/md/md-bitmap.c
665
atomic_read(&bitmap->pending_writes)==0);
drivers/md/md-llbitmap.c
1463
wait_event(mddev->recovery_wait, !atomic_read(&mddev->recovery_active));
drivers/md/md-llbitmap.c
1541
stats->behind_writes = atomic_read(&llbitmap->behind_writes);
drivers/md/md-llbitmap.c
1585
atomic_read(&llbitmap->behind_writes) == 0);
drivers/md/md.c
1172
wait_event(mddev->sb_wait, atomic_read(&mddev->pending_writes)==0);
drivers/md/md.c
2217
sb->cnt_corrected_read = cpu_to_le32(atomic_read(&rdev->corrected_errors));
drivers/md/md.c
3251
return sprintf(page, "%d\n", atomic_read(&rdev->corrected_errors));
drivers/md/md.c
4780
atomic_read(&mddev->max_corr_read_errors));
drivers/md/md.c
5169
int sync_seq = atomic_read(&mddev->sync_seq);
drivers/md/md.c
5191
sync_seq != atomic_read(&mddev->sync_seq)));
drivers/md/md.c
5501
resync = mddev->curr_mark_cnt - atomic_read(&mddev->recovery_active);
drivers/md/md.c
563
if (mddev->pers && atomic_read(&mddev->openers) > opener_num) {
drivers/md/md.c
7793
} else if (atomic_read(&inode->i_writecount) != 1) {
drivers/md/md.c
8792
res = atomic_read(&mddev->recovery_active);
drivers/md/md.c
8890
recovery_active = atomic_read(&mddev->recovery_active);
drivers/md/md.c
8911
seq->poll_event = atomic_read(&md_event_count);
drivers/md/md.c
9082
seq->poll_event = atomic_read(&md_event_count);
drivers/md/md.c
9099
if (seq->poll_event != atomic_read(&md_event_count))
drivers/md/md.c
9562
return atomic_read(&mddev->recovery_active) <
drivers/md/md.c
9793
atomic_read(&mddev->recovery_active) == 0);
drivers/md/md.c
9866
mark_cnt[next] = io_sectors - atomic_read(&mddev->recovery_active);
drivers/md/md.c
9883
recovery_done = io_sectors - atomic_read(&mddev->recovery_active);
drivers/md/md.c
9899
!atomic_read(&mddev->recovery_active));
drivers/md/md.c
9910
wait_event(mddev->recovery_wait, !atomic_read(&mddev->recovery_active));
drivers/md/md.c
9953
if (atomic_read(&rdev->nr_pending))
drivers/md/raid1-10.c
178
unsigned int read_errors = atomic_read(&rdev->read_errors);
drivers/md/raid1-10.c
206
int max_read_errors = atomic_read(&mddev->max_corr_read_errors);
drivers/md/raid1.c
1000
!atomic_read(&conf->nr_waiting[idx]),
drivers/md/raid1.c
1024
!atomic_read(&conf->nr_pending[idx]) &&
drivers/md/raid1.c
1025
atomic_read(&conf->barrier[idx]) < RESYNC_DEPTH) ||
drivers/md/raid1.c
1046
BUG_ON(atomic_read(&conf->barrier[idx]) <= 0);
drivers/md/raid1.c
1086
!atomic_read(&conf->barrier[idx]))
drivers/md/raid1.c
1112
!atomic_read(&conf->barrier[idx]),
drivers/md/raid1.c
1190
ret = atomic_read(&conf->nr_sync_pending);
drivers/md/raid1.c
1192
ret += atomic_read(&conf->nr_pending[idx]) -
drivers/md/raid1.c
1193
atomic_read(&conf->nr_queued[idx]);
drivers/md/raid1.c
1923
atomic_read(&rdev->nr_pending))
drivers/md/raid1.c
2024
if (atomic_read(&repl->nr_pending)) {
drivers/md/raid1.c
2852
if (atomic_read(&conf->nr_waiting[idx]))
drivers/md/raid1.c
555
if (atomic_read(&r1_bio->behind_remaining) >= (atomic_read(&r1_bio->remaining)-1) &&
drivers/md/raid1.c
816
pending = atomic_read(&rdev->nr_pending);
drivers/md/raid10.c
1058
wait_event_barrier_cmd(conf, atomic_read(&conf->nr_pending) ==
drivers/md/raid10.c
2188
atomic_read(&rdev->nr_pending)) {
drivers/md/raid10.c
811
pending = atomic_read(&rdev->nr_pending);
drivers/md/raid10.c
938
wait_event_barrier(conf, !atomic_read(&conf->nr_pending) &&
drivers/md/raid10.c
969
if (atomic_read(&conf->nr_pending) && bio_list &&
drivers/md/raid10.c
981
WARN_ON_ONCE(atomic_read(&conf->nr_pending) == 0);
drivers/md/raid5-cache.c
1432
flushing_partial = atomic_read(&conf->r5c_flushing_partial_stripes);
drivers/md/raid5-cache.c
1433
flushing_full = atomic_read(&conf->r5c_flushing_full_stripes);
drivers/md/raid5-cache.c
1434
total_cached = atomic_read(&conf->r5c_cached_partial_stripes) +
drivers/md/raid5-cache.c
1435
atomic_read(&conf->r5c_cached_full_stripes) -
drivers/md/raid5-cache.c
1439
atomic_read(&conf->empty_inactive_list_nr) > 0)
drivers/md/raid5-cache.c
1446
atomic_read(&conf->r5c_cached_full_stripes) - flushing_full >
drivers/md/raid5-cache.c
1478
atomic_read(&sh->count) == 0) {
drivers/md/raid5-cache.c
2454
atomic_read(&conf->active_stripes) == 0);
drivers/md/raid5-cache.c
2872
BUG_ON(atomic_read(&conf->r5c_cached_partial_stripes) == 0);
drivers/md/raid5-cache.c
2878
BUG_ON(atomic_read(&conf->r5c_cached_full_stripes) == 0);
drivers/md/raid5-cache.c
331
total_cached = atomic_read(&conf->r5c_cached_partial_stripes) +
drivers/md/raid5-cache.c
332
atomic_read(&conf->r5c_cached_full_stripes);
drivers/md/raid5-cache.c
343
atomic_read(&conf->empty_inactive_list_nr) > 0)
drivers/md/raid5-cache.c
362
if (atomic_read(&conf->r5c_cached_full_stripes) >=
drivers/md/raid5-cache.c
404
((conf->max_degraded + 1) * atomic_read(&log->stripe_in_journal_count) +
drivers/md/raid5.c
233
BUG_ON(atomic_read(&conf->active_stripes)==0);
drivers/md/raid5.c
2692
BUG_ON(atomic_read(&sh->count));
drivers/md/raid5.c
2723
(unsigned long long)sh->sector, i, atomic_read(&sh->count),
drivers/md/raid5.c
2768
if (atomic_read(&rdev->read_errors))
drivers/md/raid5.c
2798
} else if (atomic_read(&rdev->read_errors)
drivers/md/raid5.c
2803
atomic_read(&rdev->read_errors),
drivers/md/raid5.c
2863
(unsigned long long)sh->sector, i, atomic_read(&sh->count),
drivers/md/raid5.c
361
if (atomic_read(&conf->active_stripes) == 0)
drivers/md/raid5.c
4955
atomic_read(&sh->count), sh->pd_idx, sh->qd_idx,
drivers/md/raid5.c
5293
if (atomic_read(&conf->preread_active_stripes) <
drivers/md/raid5.c
5304
if (atomic_read(&conf->preread_active_stripes) < IO_THRESHOLD) {
drivers/md/raid5.c
5559
atomic_read(&conf->pending_full_writes), conf->bypass_count);
drivers/md/raid5.c
5579
atomic_read(&conf->pending_full_writes) == 0)) {
drivers/md/raid5.c
585
BUG_ON(atomic_read(&sh->count) != 0);
drivers/md/raid5.c
6366
atomic_read(&conf->reshape_stripes)==0
drivers/md/raid5.c
6368
if (atomic_read(&conf->reshape_stripes) != 0)
drivers/md/raid5.c
6475
atomic_read(&conf->reshape_stripes) == 0
drivers/md/raid5.c
6477
if (atomic_read(&conf->reshape_stripes) != 0)
drivers/md/raid5.c
655
if (!atomic_read(&sh->count)) {
drivers/md/raid5.c
7163
return sprintf(page, "%d\n", atomic_read(&conf->active_stripes));
drivers/md/raid5.c
803
return (atomic_read(&conf->active_stripes) <
drivers/md/raid5.c
8225
if (atomic_read(&conf->active_stripes) ||
drivers/md/raid5.c
8226
atomic_read(&conf->r5c_cached_full_stripes) ||
drivers/md/raid5.c
8227
atomic_read(&conf->r5c_cached_partial_stripes)) {
drivers/md/raid5.c
8248
atomic_read(&rdev->nr_pending)) {
drivers/md/raid5.c
8676
atomic_read(&conf->active_stripes) == 0 &&
drivers/md/raid5.c
8677
atomic_read(&conf->active_aligned_reads) == 0,
drivers/media/cec/core/cec-pin.c
1071
atomic_read(&pin->work_irq_change) ||
drivers/media/cec/core/cec-pin.c
1072
atomic_read(&pin->work_pin_num_events));
drivers/media/cec/core/cec-pin.c
1104
while (atomic_read(&pin->work_pin_num_events)) {
drivers/media/cec/core/cec-pin.c
118
if (atomic_read(&pin->work_pin_num_events) < CEC_NUM_PIN_EVENTS) {
drivers/media/common/videobuf2/videobuf2-core.c
1784
atomic_read(&q->owned_by_drv_count));
drivers/media/common/videobuf2/videobuf2-core.c
1797
if (WARN_ON(atomic_read(&q->owned_by_drv_count))) {
drivers/media/common/videobuf2/videobuf2-core.c
1814
WARN_ON(atomic_read(&q->owned_by_drv_count));
drivers/media/common/videobuf2/videobuf2-core.c
2105
wait_event(q->done_wq, !atomic_read(&q->owned_by_drv_count));
drivers/media/common/videobuf2/videobuf2-core.c
2210
if (WARN_ON(atomic_read(&q->owned_by_drv_count))) {
drivers/media/common/videobuf2/videobuf2-core.c
2224
WARN_ON(atomic_read(&q->owned_by_drv_count));
drivers/media/dvb-core/dvb_ca_en50221.c
237
return (atomic_read(&sl->camchange_count) != 0);
drivers/media/dvb-frontends/cxd2880/cxd2880_integ.c
68
if (atomic_read(&tnr_dmd->cancel) != 0)
drivers/media/i2c/cx25840/cx25840-ir.c
641
invert = (bool) atomic_read(&ir_state->rx_invert);
drivers/media/i2c/cx25840/cx25840-ir.c
642
divider = (u16) atomic_read(&ir_state->rxclk_divider);
drivers/media/mc/mc-device.c
691
atomic_read(&dev->num_requests));
drivers/media/mc/mc-device.c
693
atomic_read(&dev->num_request_objects));
drivers/media/pci/cx18/cx18-controls.c
24
if (atomic_read(&cx->ana_capturing) > 0)
drivers/media/pci/cx18/cx18-driver.c
1258
if (atomic_read(&cx->tot_capturing) > 0)
drivers/media/pci/cx18/cx18-fileops.c
232
if (!atomic_read(&s->q_full.depth))
drivers/media/pci/cx18/cx18-fileops.c
412
if (atomic_read(&cx->ana_capturing) == 0 && s->id == -1) {
drivers/media/pci/cx18/cx18-fileops.c
622
if (atomic_read(&s->q_full.depth))
drivers/media/pci/cx18/cx18-fileops.c
701
if (atomic_read(&cx->ana_capturing) > 0) {
drivers/media/pci/cx18/cx18-fileops.c
751
if (atomic_read(&cx->ana_capturing) > 0) {
drivers/media/pci/cx18/cx18-fileops.c
797
if (atomic_read(&cx->ana_capturing)) {
drivers/media/pci/cx18/cx18-fileops.c
810
if (atomic_read(&cx->ana_capturing)) {
drivers/media/pci/cx18/cx18-ioctl.c
143
if (atomic_read(&cx->ana_capturing) > 0)
drivers/media/pci/cx18/cx18-ioctl.c
341
if (!cx18_raw_vbi(cx) && atomic_read(&cx->ana_capturing) > 0)
drivers/media/pci/cx18/cx18-ioctl.c
374
if (cx18_raw_vbi(cx) && atomic_read(&cx->ana_capturing) > 0)
drivers/media/pci/cx18/cx18-ioctl.c
611
atomic_read(&cx->ana_capturing) > 0) {
drivers/media/pci/cx18/cx18-ioctl.c
872
if (!atomic_read(&cx->ana_capturing))
drivers/media/pci/cx18/cx18-ioctl.c
888
if (!atomic_read(&cx->ana_capturing))
drivers/media/pci/cx18/cx18-ioctl.c
974
atomic_read(&s->q_full.depth) * s->bufs_per_mdl * 100
drivers/media/pci/cx18/cx18-mailbox.c
512
if (atomic_read(&cx->in_work_order[i].pending) == 0) {
drivers/media/pci/cx18/cx18-queue.c
151
if (mdl->skipped >= atomic_read(&s->q_busy.depth)-1) {
drivers/media/pci/cx18/cx18-queue.c
57
atomic_read(&q->depth) >= CX18_MAX_FW_MDLS_PER_STREAM)
drivers/media/pci/cx18/cx18-streams.c
1010
if (atomic_read(&cx->tot_capturing) > 0)
drivers/media/pci/cx18/cx18-streams.c
667
if ((atomic_read(&s->q_free.depth) + atomic_read(&s->q_busy.depth)) >=
drivers/media/pci/cx18/cx18-streams.c
672
if (atomic_read(&s->q_full.depth) < 2)
drivers/media/pci/cx18/cx18-streams.c
714
if (atomic_read(&s->q_free.depth) == 0 ||
drivers/media/pci/cx18/cx18-streams.c
715
atomic_read(&s->q_busy.depth) >= CX18_MAX_FW_MDLS_PER_STREAM)
drivers/media/pci/cx18/cx18-streams.c
724
} while (atomic_read(&s->q_busy.depth) < CX18_MAX_FW_MDLS_PER_STREAM
drivers/media/pci/cx18/cx18-streams.c
861
if (atomic_read(&cx->ana_capturing) == 0)
drivers/media/pci/cx18/cx18-streams.c
913
if (atomic_read(&cx->tot_capturing) == 0) {
drivers/media/pci/cx18/cx18-streams.c
942
if (atomic_read(&cx->tot_capturing) == 0) {
drivers/media/pci/cx18/cx18-streams.c
983
if (atomic_read(&cx->tot_capturing) == 0)
drivers/media/pci/cx23885/cx23885-alsa.c
480
count = atomic_read(&chip->count);
drivers/media/pci/cx23885/cx23885-input.c
109
params.shutdown = atomic_read(&dev->ir_input_stopping);
drivers/media/pci/cx23885/cx23885-input.c
120
params.shutdown = atomic_read(&dev->ir_input_stopping);
drivers/media/pci/cx23885/cx23888-ir.c
633
bool invert = (bool) atomic_read(&state->rx_invert);
drivers/media/pci/cx23885/cx23888-ir.c
634
u16 divider = (u16) atomic_read(&state->rxclk_divider);
drivers/media/pci/cx25821/cx25821-alsa.c
621
count = atomic_read(&chip->count);
drivers/media/pci/cx88/cx88-alsa.c
565
count = atomic_read(&chip->count);
drivers/media/pci/intel/ipu3/ipu3-cio2.c
588
b->vbb.sequence = atomic_read(&q->frame_sequence);
drivers/media/pci/intel/ipu3/ipu3-cio2.c
611
.u.frame_sync.frame_sequence = atomic_read(&q->frame_sequence),
drivers/media/pci/intel/ipu6/ipu6-isys-csi2.c
590
u32 frame_sequence = atomic_read(&stream->sequence);
drivers/media/pci/intel/ipu6/ipu6-isys-queue.c
276
if (atomic_read(&stream->sequence) >= IPU6_ISYS_FRAME_NUM_THRESHOLD) {
drivers/media/pci/intel/ipu6/ipu6-isys-queue.c
673
return atomic_read(&stream->sequence) - 1;
drivers/media/pci/intel/ipu6/ipu6-isys-queue.c
732
if (atomic_read(&ib->str2mmio_flag)) {
drivers/media/pci/intel/ipu6/ipu6-isys.c
1300
atomic_read(&stream->sequence) - 1;
drivers/media/pci/ivtv/ivtv-controls.c
106
if (atomic_read(&itv->decoding)) {
drivers/media/pci/ivtv/ivtv-driver.c
1350
if (atomic_read(&itv->capturing) > 0)
drivers/media/pci/ivtv/ivtv-driver.c
1359
if (atomic_read(&itv->decoding) > 0) {
drivers/media/pci/ivtv/ivtv-fileops.c
1006
if (atomic_read(&itv->capturing) > 0) {
drivers/media/pci/ivtv/ivtv-fileops.c
1058
if (atomic_read(&itv->capturing))
drivers/media/pci/ivtv/ivtv-fileops.c
1065
if (atomic_read(&itv->capturing)) {
drivers/media/pci/ivtv/ivtv-fileops.c
362
if (atomic_read(&itv->capturing) == 0 && s->id == NULL) {
drivers/media/pci/ivtv/ivtv-fileops.c
527
if (atomic_read(&itv->decoding) == 0) {
drivers/media/pci/ivtv/ivtv-fileops.c
904
if (atomic_read(&itv->capturing) > 0) {
drivers/media/pci/ivtv/ivtv-firmware.c
334
if (!res && !atomic_read(&itv->capturing) &&
drivers/media/pci/ivtv/ivtv-firmware.c
335
(!atomic_read(&itv->decoding) ||
drivers/media/pci/ivtv/ivtv-firmware.c
336
(atomic_read(&itv->decoding) < 2 && test_bit(IVTV_F_I_DEC_YUV,
drivers/media/pci/ivtv/ivtv-firmware.c
365
if (res && !atomic_read(&itv->capturing) &&
drivers/media/pci/ivtv/ivtv-firmware.c
366
!atomic_read(&itv->decoding)) {
drivers/media/pci/ivtv/ivtv-ioctl.c
1181
atomic_read(&itv->capturing) > 0 ||
drivers/media/pci/ivtv/ivtv-ioctl.c
1182
atomic_read(&itv->decoding) > 0) {
drivers/media/pci/ivtv/ivtv-ioctl.c
1275
if (!atomic_read(&itv->capturing))
drivers/media/pci/ivtv/ivtv-ioctl.c
1310
if (!atomic_read(&itv->capturing))
drivers/media/pci/ivtv/ivtv-ioctl.c
1323
if (!atomic_read(&itv->capturing))
drivers/media/pci/ivtv/ivtv-ioctl.c
158
if (atomic_read(&itv->decoding) > 0) {
drivers/media/pci/ivtv/ivtv-ioctl.c
267
if (atomic_read(&itv->decoding) == 0)
drivers/media/pci/ivtv/ivtv-ioctl.c
278
if (!atomic_read(&itv->decoding))
drivers/media/pci/ivtv/ivtv-ioctl.c
282
if (atomic_read(&itv->decoding) > 0) {
drivers/media/pci/ivtv/ivtv-ioctl.c
292
if (!atomic_read(&itv->decoding))
drivers/media/pci/ivtv/ivtv-ioctl.c
584
if (atomic_read(&itv->capturing) > 0)
drivers/media/pci/ivtv/ivtv-ioctl.c
602
if (!ivtv_raw_vbi(itv) && atomic_read(&itv->capturing) > 0)
drivers/media/pci/ivtv/ivtv-ioctl.c
621
if (ivtv_raw_vbi(itv) && atomic_read(&itv->capturing) > 0)
drivers/media/pci/ivtv/ivtv-ioctl.c
990
if (atomic_read(&itv->capturing) > 0) {
drivers/media/pci/ivtv/ivtv-irq.c
798
if (atomic_read(&itv->yuv_info.next_dma_frame) >= 0)
drivers/media/pci/ivtv/ivtv-irq.c
833
int last_dma_frame = atomic_read(&yi->next_dma_frame);
drivers/media/pci/ivtv/ivtv-irq.c
844
if (next_dma_frame >= 0 && next_dma_frame != atomic_read(&yi->next_fill_frame)) {
drivers/media/pci/ivtv/ivtv-irq.c
898
(u8)(atomic_read(&yi->next_dma_frame) -
drivers/media/pci/ivtv/ivtv-streams.c
1022
if (atomic_read(&itv->capturing) == 0)
drivers/media/pci/ivtv/ivtv-streams.c
533
if (atomic_read(&itv->capturing) == 0) {
drivers/media/pci/ivtv/ivtv-streams.c
601
if (atomic_read(&itv->capturing) == 0) {
drivers/media/pci/ivtv/ivtv-streams.c
794
if (atomic_read(&itv->capturing) == 0)
drivers/media/pci/ivtv/ivtv-streams.c
873
if (atomic_read(&itv->capturing) > 0) {
drivers/media/pci/ivtv/ivtv-streams.c
999
if (atomic_read(&itv->capturing) == 0) {
drivers/media/pci/ivtv/ivtv-yuv.c
956
if (atomic_read(&yi->next_dma_frame) == -1)
drivers/media/pci/ivtv/ivtv-yuv.c
959
draw = atomic_read(&yi->next_fill_frame);
drivers/media/pci/ivtv/ivtv-yuv.c
960
display = atomic_read(&yi->next_dma_frame);
drivers/media/platform/m2m-deinterlace.c
160
!atomic_read(&ctx->dev->busy)) {
drivers/media/platform/mediatek/jpeg/mtk_jpeg_core.c
1601
atomic_read(&jpeg->hw_rdy) > 0);
drivers/media/platform/mediatek/jpeg/mtk_jpeg_core.c
1696
atomic_read(&jpeg->hw_rdy) > 0,
drivers/media/platform/mediatek/mdp3/mtk-mdp3-cmdq.c
681
if (atomic_read(&mdp->suspended)) {
drivers/media/platform/mediatek/mdp3/mtk-mdp3-m2m.c
39
return ((atomic_read(&ctx->curr_param.state) & mask) == mask);
drivers/media/platform/mediatek/vcodec/decoder/vdec_msg_queue.c
181
if (atomic_read(&msg_queue->lat_list_cnt) == NUM_BUFFER_COUNT) {
drivers/media/platform/mediatek/vcodec/decoder/vdec_msg_queue.c
183
atomic_read(&msg_queue->lat_list_cnt),
drivers/media/platform/mediatek/vcodec/decoder/vdec_msg_queue.c
184
atomic_read(&msg_queue->core_list_cnt),
drivers/media/platform/mediatek/vcodec/decoder/vdec_msg_queue.c
195
atomic_read(&msg_queue->lat_list_cnt),
drivers/media/platform/mediatek/vcodec/decoder/vdec_msg_queue.c
196
atomic_read(&msg_queue->core_list_cnt));
drivers/media/platform/mediatek/vcodec/decoder/vdec_msg_queue.c
274
atomic_read(&msg_queue->core_list_cnt)) {
drivers/media/platform/nuvoton/npcm-video.c
968
atomic_read(&video->ece.clients));
drivers/media/platform/nuvoton/npcm-video.c
994
atomic_read(&video->ece.clients));
drivers/media/platform/qcom/camss/camss.c
5099
if (atomic_read(&camss->ref_count) == 0)
drivers/media/platform/qcom/venus/hfi.c
103
!atomic_read(&core->insts_count));
drivers/media/platform/samsung/exynos4-is/fimc-lite-reg.c
66
if (atomic_read(&dev->out_path) == FIMC_IO_DMA) {
drivers/media/platform/samsung/exynos4-is/fimc-lite.c
1069
if ((atomic_read(&fimc->out_path) == FIMC_IO_ISP &&
drivers/media/platform/samsung/exynos4-is/fimc-lite.c
1071
(atomic_read(&fimc->out_path) == FIMC_IO_DMA &&
drivers/media/platform/samsung/exynos4-is/fimc-lite.c
1199
if (atomic_read(&fimc->out_path) != FIMC_IO_ISP)
drivers/media/platform/samsung/exynos4-is/fimc-lite.c
1566
fimc_lite_hw_init(fimc, atomic_read(&fimc->out_path) == FIMC_IO_ISP);
drivers/media/platform/samsung/exynos4-is/fimc-lite.c
271
if (atomic_read(&fimc->out_path) != FIMC_IO_DMA)
drivers/media/platform/samsung/exynos4-is/fimc-lite.c
464
if (atomic_read(&fimc->out_path) != FIMC_IO_DMA) {
drivers/media/platform/samsung/exynos4-is/fimc-lite.c
479
atomic_read(&fimc->out_path) != FIMC_IO_DMA)
drivers/media/platform/samsung/exynos4-is/fimc-lite.c
515
atomic_read(&fimc->out_path) == FIMC_IO_DMA) {
drivers/media/platform/samsung/s5p-mfc/s5p_mfc.c
150
if (atomic_read(&dev->watchdog_cnt) >= MFC_WATCHDOG_CNT) {
drivers/media/platform/ti/omap3isp/isp.c
1225
if (!wait_event_timeout(*wait, !atomic_read(stopping),
drivers/media/platform/ti/omap3isp/ispccdc.c
1477
event.u.frame_sync.frame_sequence = atomic_read(&pipe->frame_number);
drivers/media/platform/ti/omap3isp/isphist.c
275
if (atomic_read(&hist->buf_err) || hist->state != ISPSTAT_ENABLED) {
drivers/media/platform/ti/omap3isp/ispstat.c
917
stat->frame_number = atomic_read(&pipe->frame_number);
drivers/media/platform/ti/omap3isp/ispvideo.c
553
buf->vb.sequence = atomic_read(&pipe->frame_number);
drivers/media/radio/radio-shark.c
165
brightness = atomic_read(&shark->brightness[i]);
drivers/media/radio/radio-shark2.c
152
brightness = atomic_read(&shark->brightness[i]);
drivers/media/radio/radio-si476x.c
1076
atomic_read(&radio->core->is_alive))
drivers/media/radio/radio-si476x.c
1099
!atomic_read(&radio->core->is_alive)));
drivers/media/radio/radio-si476x.c
1103
if (!atomic_read(&radio->core->is_alive))
drivers/media/radio/radio-si476x.c
1130
if (atomic_read(&radio->core->is_alive))
drivers/media/radio/radio-si476x.c
1133
if (!atomic_read(&radio->core->is_alive))
drivers/media/usb/cx231xx/cx231xx-audio.c
108
if (atomic_read(&dev->stream_started) == 0)
drivers/media/usb/cx231xx/cx231xx-audio.c
199
if (atomic_read(&dev->stream_started) == 0)
drivers/media/usb/cx231xx/cx231xx-audio.c
471
if (atomic_read(&dev->stream_started) > 0) {
drivers/media/usb/cx231xx/cx231xx-audio.c
493
if (atomic_read(&dev->stream_started)) {
drivers/media/usb/cx231xx/cx231xx-core.c
65
if (atomic_read(&dev->devlist_count) > 0) {
drivers/media/usb/em28xx/em28xx-audio.c
305
if (atomic_read(&dev->adev.stream_started) > 0) {
drivers/media/usb/em28xx/em28xx-audio.c
336
if (atomic_read(&adev->stream_started)) {
drivers/media/usb/em28xx/em28xx-audio.c
98
if (atomic_read(&dev->adev.stream_started) == 0)
drivers/media/usb/gspca/cpia1.c
1272
framerate = atomic_read(&sd->fps);
drivers/media/usb/gspca/cpia1.c
1395
cam_exposure = atomic_read(&sd->cam_exposure);
drivers/media/usb/gspca/pac207.c
343
int avg_lum = atomic_read(&sd->avg_lum);
drivers/media/usb/gspca/pac7302.c
727
int avg_lum = atomic_read(&sd->avg_lum);
drivers/media/usb/gspca/pac7311.c
492
int avg_lum = atomic_read(&sd->avg_lum);
drivers/media/usb/gspca/sn9c20x.c
2162
avg_lum = atomic_read(&sd->avg_lum);
drivers/media/usb/gspca/sonixb.c
880
avg_lum = atomic_read(&sd->avg_lum);
drivers/media/usb/gspca/sonixj.c
2584
delta = atomic_read(&sd->avg_lum);
drivers/media/usb/s2255/s2255drv.c
1412
state = atomic_read(&dev->fw_data->fw_state);
drivers/media/usb/s2255/s2255drv.c
1421
((atomic_read(&dev->fw_data->fw_state)
drivers/media/usb/s2255/s2255drv.c
1423
(atomic_read(&dev->fw_data->fw_state)
drivers/media/usb/s2255/s2255drv.c
1427
state = atomic_read(&dev->fw_data->fw_state);
drivers/media/usb/s2255/s2255drv.c
1435
((atomic_read(&dev->fw_data->fw_state)
drivers/media/usb/s2255/s2255drv.c
1437
(atomic_read(&dev->fw_data->fw_state)
drivers/media/usb/s2255/s2255drv.c
1441
state = atomic_read(&dev->fw_data->fw_state);
drivers/media/usb/usbtv/usbtv-audio.c
122
if (!atomic_read(&chip->snd_stream))
drivers/media/usb/usbtv/usbtv-audio.c
258
if (atomic_read(&usbtv->snd_stream) && usbtv->snd_bulk_urb)
drivers/media/usb/usbtv/usbtv-audio.c
264
if (atomic_read(&usbtv->snd_stream) && usbtv->snd_bulk_urb)
drivers/media/usb/usbtv/usbtv-audio.c
275
if (atomic_read(&chip->snd_stream))
drivers/media/usb/usbtv/usbtv-audio.c
80
if (atomic_read(&chip->snd_stream)) {
drivers/media/v4l2-core/v4l2-dev.c
290
if (atomic_read(&global->prios[V4L2_PRIORITY_RECORD]) > 0)
drivers/media/v4l2-core/v4l2-dev.c
292
if (atomic_read(&global->prios[V4L2_PRIORITY_INTERACTIVE]) > 0)
drivers/media/v4l2-core/v4l2-dev.c
294
if (atomic_read(&global->prios[V4L2_PRIORITY_BACKGROUND]) > 0)
drivers/memory/tegra/tegra210-emc-core.c
659
if (atomic_read(&emc->refresh_poll) > 0) {
drivers/memory/tegra/tegra210-emc-core.c
694
*state = atomic_read(&emc->refresh_poll);
drivers/memory/tegra/tegra210-emc-core.c
704
if (state == atomic_read(&emc->refresh_poll))
drivers/message/fusion/mptlan.c
1107
atomic_read(&priv->buckets_out));
drivers/message/fusion/mptlan.c
1112
remaining, atomic_read(&priv->buckets_out));
drivers/message/fusion/mptlan.c
1115
((atomic_read(&priv->buckets_out) - remaining) >
drivers/message/fusion/mptlan.c
1151
curr = atomic_read(&priv->buckets_out);
drivers/message/fusion/mptlan.c
1278
__func__, buckets, atomic_read(&priv->buckets_out)));
drivers/message/fusion/mptlan.c
500
priv->total_posted,atomic_read(&priv->buckets_out)));
drivers/message/fusion/mptlan.c
507
while (atomic_read(&priv->buckets_out) && time_before(jiffies, timeout))
drivers/message/fusion/mptlan.c
828
atomic_read(&priv->buckets_out)));
drivers/message/fusion/mptlan.c
830
if (atomic_read(&priv->buckets_out) < priv->bucketthresh)
drivers/message/fusion/mptlan.c
835
atomic_read(&priv->buckets_out), priv->total_received));
drivers/message/fusion/mptlan.c
950
/**/ atomic_read(&priv->buckets_out), priv->total_received));
drivers/mfd/ab8500-core.c
596
if (atomic_read(&ab8500->transfer_ongoing))
drivers/mfd/db8500-prcmu.c
2268
return (atomic_read(&ac_wake_req_state) != 0);
drivers/mfd/si476x-cmd.c
299
atomic_read(&core->cts),
drivers/mfd/si476x-cmd.c
314
atomic_read(&core->cts),
drivers/mfd/si476x-cmd.c
385
atomic_read(&core->stc));
drivers/mfd/si476x-i2c.c
521
if (atomic_read(&core->is_alive))
drivers/mfd/twl6030-irq.c
128
chained_wakeups = atomic_read(&pdata->wakeirqs);
drivers/misc/bcm-vk/bcm_vk_msg.c
111
return (!!atomic_read(&vk->msgq_inited));
drivers/misc/bcm-vk/bcm_vk_msg.c
1263
cnt = atomic_read(&ctx->pend_cnt);
drivers/misc/bcm-vk/bcm_vk_msg.c
1301
dma_cnt = atomic_read(&ctx->dma_cnt);
drivers/misc/cb710/core.c
275
BUG_ON(atomic_read(&chip->slot_refs_count) != 0);
drivers/misc/cb710/core.c
288
BUG_ON(atomic_read(&chip->slot_refs_count) != 0);
drivers/misc/ibmasm/command.c
47
dbg("command count: %d\n", atomic_read(&command_count));
drivers/misc/ibmasm/command.c
58
dbg("command count: %d\n", atomic_read(&command_count));
drivers/misc/keba/cp500.c
671
notified = atomic_read(&cp500->nvmem_notified);
drivers/misc/keba/cp500.c
709
notified = atomic_read(&cp500->nvmem_notified);
drivers/misc/lis3lv02d/lis3lv02d.c
503
if (atomic_read(&lis3->wake_thread))
drivers/misc/lis3lv02d/lis3lv02d.c
654
if (atomic_read(&lis3->count))
drivers/misc/mei/platform-vsc.c
168
return atomic_read(&hw->write_lock_cnt) == 0;
drivers/misc/mei/vsc-tp.c
133
atomic_read(&tp->assert_cnt),
drivers/misc/mei/vsc-tp.c
384
if (!atomic_read(&tp->assert_cnt))
drivers/misc/ntsync.c
1011
signaled = atomic_read(&q->signaled);
drivers/misc/ntsync.c
1083
if (atomic_read(&q->signaled) == -1) {
drivers/misc/ntsync.c
1127
signaled = atomic_read(&q->signaled);
drivers/misc/ntsync.c
205
all = atomic_read(&obj->all_hint);
drivers/misc/ntsync.c
847
if (atomic_read(&q->signaled) != -1) {
drivers/misc/ntsync.c
986
if (atomic_read(&q->signaled) != -1)
drivers/misc/phantom.c
263
pr_debug("phantom_poll: %d\n", atomic_read(&dev->counter));
drivers/misc/phantom.c
268
else if (atomic_read(&dev->counter))
drivers/misc/phantom.c
271
pr_debug("phantom_poll end: %x/%d\n", mask, atomic_read(&dev->counter));
drivers/misc/sgi-gru/grufault.c
410
if (atomic_read(>s->ts_gms->ms_range_active))
drivers/misc/sgi-gru/grufault.c
623
atomic_read(&gms->ms_range_active) == 0);
drivers/misc/sgi-gru/grutlbpurge.c
216
range->start, range->end, atomic_read(&gms->ms_range_active));
drivers/misc/sgi-xp/xpc_channel.c
123
if (atomic_read(&ch->n_to_notify) > 0) {
drivers/misc/sgi-xp/xpc_channel.c
136
DBUG_ON(atomic_read(&ch->n_to_notify) != 0);
drivers/misc/sgi-xp/xpc_channel.c
493
DBUG_ON(atomic_read(&ch->kthreads_assigned) != 0);
drivers/misc/sgi-xp/xpc_channel.c
494
DBUG_ON(atomic_read(&ch->kthreads_idle) != 0);
drivers/misc/sgi-xp/xpc_channel.c
495
DBUG_ON(atomic_read(&ch->kthreads_active) != 0);
drivers/misc/sgi-xp/xpc_channel.c
784
if (atomic_read(&ch->kthreads_idle) > 0) {
drivers/misc/sgi-xp/xpc_channel.c
794
if (atomic_read(&ch->n_on_msg_allocate_wq) > 0)
drivers/misc/sgi-xp/xpc_channel.c
94
if (atomic_read(&ch->kthreads_assigned) > 0 ||
drivers/misc/sgi-xp/xpc_channel.c
95
atomic_read(&ch->references) > 0) {
drivers/misc/sgi-xp/xpc_main.c
333
atomic_read(&part->nchannels_active) > 0 ||
drivers/misc/sgi-xp/xpc_main.c
353
(atomic_read(&part->channel_mgr_requests) > 0 ||
drivers/misc/sgi-xp/xpc_main.c
356
atomic_read(&part->nchannels_active) == 0 &&
drivers/misc/sgi-xp/xpc_main.c
483
DBUG_ON(atomic_read(&part->nchannels_engaged) != 0);
drivers/misc/sgi-xp/xpc_main.c
484
DBUG_ON(atomic_read(&part->nchannels_active) != 0);
drivers/misc/sgi-xp/xpc_main.c
494
wait_event(part->teardown_wq, (atomic_read(&part->references) == 0));
drivers/misc/sgi-xp/xpc_main.c
601
int idle = atomic_read(&ch->kthreads_idle);
drivers/misc/sgi-xp/xpc_main.c
602
int assigned = atomic_read(&ch->kthreads_assigned);
drivers/misc/sgi-xp/xpc_main.c
818
if (atomic_read(&ch->kthreads_assigned) <
drivers/misc/sgi-xp/xpc_uv.c
1275
if (atomic_read(&ch->n_on_msg_allocate_wq) > 0)
drivers/misc/sgi-xp/xpc_uv.c
1381
if (atomic_read(&ch->kthreads_idle) > 0)
drivers/misc/sgi-xp/xpc_uv.c
1545
if (atomic_read(&ch->n_to_notify) == 0)
drivers/misc/vmw_vmci/vmci_doorbell.c
345
atomic_read(&dbell->active) == 1) {
drivers/misc/vmw_vmci/vmci_guest.c
85
return atomic_read(&vmci_num_guest_devices) != 0;
drivers/misc/vmw_vmci/vmci_host.c
108
atomic_read(&vmci_host_active_users) > 0);
drivers/misc/vmw_vmci/vmci_host.c
113
return atomic_read(&vmci_host_active_users);
drivers/mmc/core/core.c
805
stop = abort ? atomic_read(abort) : 0;
drivers/mmc/core/sdio.c
1166
if (atomic_read(&card->sdio_funcs_probed) > 1) {
drivers/mtd/maps/vmu-flash.c
147
if (atomic_read(&mdev->busy) == 1) {
drivers/mtd/maps/vmu-flash.c
149
atomic_read(&mdev->busy) == 0, HZ);
drivers/mtd/maps/vmu-flash.c
150
if (atomic_read(&mdev->busy) == 1) {
drivers/mtd/maps/vmu-flash.c
173
(atomic_read(&mdev->busy) == 0 ||
drivers/mtd/maps/vmu-flash.c
174
atomic_read(&mdev->busy) == 2), HZ * 3);
drivers/mtd/maps/vmu-flash.c
180
if (error || atomic_read(&mdev->busy) == 2) {
drivers/mtd/maps/vmu-flash.c
181
if (atomic_read(&mdev->busy) == 2)
drivers/mtd/maps/vmu-flash.c
252
if (atomic_read(&mdev->busy) == 1) {
drivers/mtd/maps/vmu-flash.c
254
atomic_read(&mdev->busy) == 0, HZ);
drivers/mtd/maps/vmu-flash.c
255
if (atomic_read(&mdev->busy) == 1) {
drivers/mtd/maps/vmu-flash.c
268
atomic_read(&mdev->busy) == 0, HZ/10);
drivers/mtd/maps/vmu-flash.c
274
if (atomic_read(&mdev->busy) == 2) {
drivers/mtd/maps/vmu-flash.c
653
if (atomic_read(&mdev->busy) == 1) {
drivers/mtd/maps/vmu-flash.c
655
atomic_read(&mdev->busy) == 0, HZ);
drivers/mtd/maps/vmu-flash.c
656
if (atomic_read(&mdev->busy) == 1) {
drivers/net/bonding/bond_3ad.c
2494
val = atomic_read(&BOND_AD_INFO(bond).agg_select_timer);
drivers/net/bonding/bond_3ad.c
280
return atomic_read(&BOND_AD_INFO(bond).agg_select_timer) ? 1 : 0;
drivers/net/bonding/bond_alb.c
1570
if (atomic_read(&bond_info->tx_rebalance_counter) >= BOND_TLB_REBALANCE_TICKS) {
drivers/net/bonding/bond_main.c
6652
WARN_ON(atomic_read(&netpoll_block_tx));
drivers/net/can/usb/ems_usb.c
856
if (atomic_read(&dev->active_tx_urbs) >= MAX_TX_URBS ||
drivers/net/can/usb/esd_usb.c
915
if (atomic_read(&priv->active_tx_jobs) >= ESD_USB_MAX_TX_URBS)
drivers/net/can/usb/etas_es58x/es58x_core.c
1768
if (atomic_read(&es58x_dev->tx_urbs_idle_cnt))
drivers/net/can/usb/etas_es58x/es58x_core.c
1771
atomic_read(&es58x_dev->tx_urbs_idle_cnt));
drivers/net/can/usb/gs_usb.c
930
if (atomic_read(&dev->active_tx_urbs) >= GS_MAX_TX_URBS)
drivers/net/can/usb/mcba_usb.c
198
if (!atomic_read(&priv->free_ctx_cnt))
drivers/net/can/usb/peak_usb/pcan_usb_core.c
424
if (atomic_read(&dev->active_tx_urbs) >= PCAN_USB_MAX_TX_URBS)
drivers/net/can/usb/usb_8dev.c
683
} else if (atomic_read(&priv->active_tx_urbs) >= MAX_TX_URBS)
drivers/net/ethernet/alteon/acenic.c
1569
cur_size = atomic_read(&ap->cur_rx_bufs);
drivers/net/ethernet/alteon/acenic.c
1579
cur_size = atomic_read(&ap->cur_mini_bufs);
drivers/net/ethernet/alteon/acenic.c
1590
cur_size = atomic_read(&ap->cur_jumbo_bufs);
drivers/net/ethernet/alteon/acenic.c
2165
cur_size = atomic_read(&ap->cur_rx_bufs);
drivers/net/ethernet/alteon/acenic.c
2179
cur_size = atomic_read(&ap->cur_mini_bufs);
drivers/net/ethernet/alteon/acenic.c
2196
cur_size = atomic_read(&ap->cur_jumbo_bufs);
drivers/net/ethernet/amazon/ena/ena_com.c
1428
while (atomic_read(&admin_queue->outstanding_cmds) != 0) {
drivers/net/ethernet/amazon/ena/ena_com.c
227
cnt = (u16)atomic_read(&admin_queue->outstanding_cmds);
drivers/net/ethernet/aquantia/atlantic/aq_ptp.c
546
timestamp += atomic_read(&aq_ptp->offset_egress);
drivers/net/ethernet/aquantia/atlantic/aq_ptp.c
565
timestamp -= atomic_read(&aq_ptp->offset_ingress);
drivers/net/ethernet/aquantia/atlantic/aq_utils.h
19
flags_old = atomic_read(flags);
drivers/net/ethernet/aquantia/atlantic/aq_utils.h
29
flags_old = atomic_read(flags);
drivers/net/ethernet/aquantia/atlantic/aq_utils.h
36
return atomic_read(flags) & mask;
drivers/net/ethernet/aquantia/atlantic/hw_atl/hw_atl_utils.c
641
pmbox->stats.dpc = atomic_read(&self->dpc);
drivers/net/ethernet/atheros/atl1c/atl1c_main.c
1597
u16 next_to_clean = atomic_read(&tpd_ring->next_to_clean);
drivers/net/ethernet/atheros/atl1c/atl1c_main.c
1985
next_to_clean = atomic_read(&tpd_ring->next_to_clean);
drivers/net/ethernet/atheros/atl1e/atl1e_main.c
1235
u16 next_to_clean = atomic_read(&tx_ring->next_to_clean);
drivers/net/ethernet/atheros/atl1e/atl1e_main.c
1565
next_to_clean = atomic_read(&tx_ring->next_to_clean);
drivers/net/ethernet/atheros/atlx/atl1.c
1497
value = ((atomic_read(&adapter->tpd_ring.next_to_use)
drivers/net/ethernet/atheros/atlx/atl1.c
1499
((atomic_read(&adapter->rrd_ring.next_to_clean)
drivers/net/ethernet/atheros/atlx/atl1.c
1501
((atomic_read(&adapter->rfd_ring.next_to_use)
drivers/net/ethernet/atheros/atlx/atl1.c
1743
tpd_next_to_use = atomic_read(&adapter->tpd_ring.next_to_use);
drivers/net/ethernet/atheros/atlx/atl1.c
1744
rfd_next_to_use = atomic_read(&adapter->rfd_ring.next_to_use);
drivers/net/ethernet/atheros/atlx/atl1.c
1745
rrd_next_to_clean = atomic_read(&adapter->rrd_ring.next_to_clean);
drivers/net/ethernet/atheros/atlx/atl1.c
1842
next_next = rfd_next_to_use = atomic_read(&rfd_ring->next_to_use);
drivers/net/ethernet/atheros/atlx/atl1.c
1920
rrd_next_to_clean = atomic_read(&rrd_ring->next_to_clean);
drivers/net/ethernet/atheros/atlx/atl1.c
2040
tpd_next_to_use = atomic_read(&adapter->tpd_ring.next_to_use);
drivers/net/ethernet/atheros/atlx/atl1.c
2042
atomic_read(&adapter->rfd_ring.next_to_use);
drivers/net/ethernet/atheros/atlx/atl1.c
2044
atomic_read(&adapter->rrd_ring.next_to_clean);
drivers/net/ethernet/atheros/atlx/atl1.c
2066
sw_tpd_next_to_clean = atomic_read(&tpd_ring->next_to_clean);
drivers/net/ethernet/atheros/atlx/atl1.c
2098
u16 next_to_clean = atomic_read(&tpd_ring->next_to_clean);
drivers/net/ethernet/atheros/atlx/atl1.c
2099
u16 next_to_use = atomic_read(&tpd_ring->next_to_use);
drivers/net/ethernet/atheros/atlx/atl1.c
2211
next_to_use = atomic_read(&tpd_ring->next_to_use);
drivers/net/ethernet/atheros/atlx/atl1.c
2333
u16 next_to_use = (u16) atomic_read(&tpd_ring->next_to_use);
drivers/net/ethernet/atheros/atlx/atl1.c
2428
(u16) atomic_read(&tpd_ring->next_to_use));
drivers/net/ethernet/atheros/atlx/atl2.c
472
txs_write_ptr = (u32) atomic_read(&adapter->txs_write_ptr);
drivers/net/ethernet/atheros/atlx/atl2.c
484
txd_read_ptr = (u32) atomic_read(&adapter->txd_read_ptr);
drivers/net/ethernet/atheros/atlx/atl2.c
802
u32 txs_write_ptr = (u32) atomic_read(&adapter->txs_write_ptr);
drivers/net/ethernet/atheros/atlx/atl2.c
812
u32 txd_read_ptr = (u32)atomic_read(&adapter->txd_read_ptr);
drivers/net/ethernet/broadcom/bnx2.c
3320
if (unlikely(atomic_read(&bp->intr_sem) != 0))
drivers/net/ethernet/broadcom/bnx2.c
3337
if (unlikely(atomic_read(&bp->intr_sem) != 0))
drivers/net/ethernet/broadcom/bnx2.c
3373
if (unlikely(atomic_read(&bp->intr_sem) != 0))
drivers/net/ethernet/broadcom/bnx2.c
6171
if (atomic_read(&bp->intr_sem) != 0)
drivers/net/ethernet/broadcom/bnx2.c
6539
atomic_read(&bp->intr_sem), val1);
drivers/net/ethernet/broadcom/bnx2x/bnx2x_cmn.c
3207
if (atomic_read(&bp->pdev->enable_cnt) != 1)
drivers/net/ethernet/broadcom/bnx2x/bnx2x_main.c
13082
if (atomic_read(&pdev->enable_cnt) == 1) {
drivers/net/ethernet/broadcom/bnx2x/bnx2x_main.c
13251
if (atomic_read(&pdev->enable_cnt) == 1)
drivers/net/ethernet/broadcom/bnx2x/bnx2x_main.c
14016
if (atomic_read(&pdev->enable_cnt) == 1)
drivers/net/ethernet/broadcom/bnx2x/bnx2x_main.c
14107
if (atomic_read(&pdev->enable_cnt) == 1)
drivers/net/ethernet/broadcom/bnx2x/bnx2x_main.c
14468
if (!atomic_read(&bp->cq_spq_left))
drivers/net/ethernet/broadcom/bnx2x/bnx2x_main.c
14473
if (!atomic_read(&bp->eq_spq_left))
drivers/net/ethernet/broadcom/bnx2x/bnx2x_main.c
1899
DP(BNX2X_MSG_SP, "bp->cq_spq_left %x\n", atomic_read(&bp->cq_spq_left));
drivers/net/ethernet/broadcom/bnx2x/bnx2x_main.c
3899
if (!atomic_read(&bp->eq_spq_left)) {
drivers/net/ethernet/broadcom/bnx2x/bnx2x_main.c
3905
} else if (!atomic_read(&bp->cq_spq_left)) {
drivers/net/ethernet/broadcom/bnx2x/bnx2x_main.c
3953
atomic_read(&bp->cq_spq_left), atomic_read(&bp->eq_spq_left));
drivers/net/ethernet/broadcom/bnx2x/bnx2x_main.c
5493
hw_cons, sw_cons, atomic_read(&bp->eq_spq_left));
drivers/net/ethernet/broadcom/bnx2x/bnx2x_main.c
5693
if (atomic_read(&bp->interrupt_occurred)) {
drivers/net/ethernet/broadcom/bnx2x/bnx2x_sp.c
4168
c = atomic_read(v);
drivers/net/ethernet/broadcom/bnx2x/bnx2x_sp.c
4196
c = atomic_read(v);
drivers/net/ethernet/broadcom/bnx2x/bnx2x_sp.c
4240
cur_credit = atomic_read(&o->credit);
drivers/net/ethernet/broadcom/bnxt/bnxt.c
14237
if (atomic_read(&bp->intr_sem) != 0)
drivers/net/ethernet/broadcom/bnxt/bnxt_tc.c
1828
num_flows = atomic_read(&tc_info->flow_table.nelems);
drivers/net/ethernet/broadcom/cnic.c
3603
if (atomic_read(&csk1->ref_count))
drivers/net/ethernet/broadcom/cnic.c
3658
while (atomic_read(&csk->ref_count) != 1)
drivers/net/ethernet/broadcom/cnic.c
4232
if (atomic_read(&cp->iscsi_conn) != 0)
drivers/net/ethernet/broadcom/cnic.c
4234
atomic_read(&cp->iscsi_conn));
drivers/net/ethernet/broadcom/cnic.c
5442
while ((atomic_read(&dev->ref_count) != 0) && i < 10) {
drivers/net/ethernet/broadcom/cnic.c
5446
if (atomic_read(&dev->ref_count) != 0)
drivers/net/ethernet/broadcom/cnic.c
549
while ((atomic_read(&ulp_ops->ref_count) != 0) && (i < 20)) {
drivers/net/ethernet/broadcom/cnic.c
554
if (atomic_read(&ulp_ops->ref_count) != 0)
drivers/net/ethernet/brocade/bna/bnad_debugfs.c
536
if (atomic_read(&bna_debugfs_port_count) == 0) {
drivers/net/ethernet/cavium/liquidio/cn23xx_pf_device.c
1223
if (atomic_read(oct->adapter_refcount) > 1)
drivers/net/ethernet/cavium/liquidio/cn23xx_pf_device.c
1294
} while ((atomic_read(&ctx.status) == 0) && (count++ < timeout));
drivers/net/ethernet/cavium/liquidio/cn23xx_pf_device.c
1296
ret = atomic_read(&ctx.status);
drivers/net/ethernet/cavium/liquidio/cn23xx_vf_device.c
441
} while ((!atomic_read(&status)) && (count++ < 100000));
drivers/net/ethernet/cavium/liquidio/cn23xx_vf_device.c
443
ret = atomic_read(&status);
drivers/net/ethernet/cavium/liquidio/lio_core.c
1002
if (!(atomic_read(&oct->status) == OCT_DEV_IN_RESET))
drivers/net/ethernet/cavium/liquidio/lio_core.c
1282
atomic_read(&oct->droq[idx]->pkts_pending);
drivers/net/ethernet/cavium/liquidio/lio_core.c
770
if (atomic_read(&iq->instr_pending))
drivers/net/ethernet/cavium/liquidio/lio_main.c
1004
atomic_read(&iq->instr_pending);
drivers/net/ethernet/cavium/liquidio/lio_main.c
1116
if (atomic_read(oct->adapter_fw_state) == FW_IS_PRELOADED)
drivers/net/ethernet/cavium/liquidio/lio_main.c
1222
if (atomic_read(&lio->ifstate) & LIO_IFSTATE_RUNNING)
drivers/net/ethernet/cavium/liquidio/lio_main.c
1241
if (atomic_read(&lio->ifstate) & LIO_IFSTATE_REGISTERED)
drivers/net/ethernet/cavium/liquidio/lio_main.c
2315
if (!(atomic_read(&lio->ifstate) & LIO_IFSTATE_RUNNING) ||
drivers/net/ethernet/cavium/liquidio/lio_main.c
256
if (atomic_read(&iq->instr_pending)) {
drivers/net/ethernet/cavium/liquidio/lio_main.c
261
atomic_read(&iq->instr_pending);
drivers/net/ethernet/cavium/liquidio/lio_main.c
3451
} else if (atomic_read(octeon_dev->adapter_fw_state) ==
drivers/net/ethernet/cavium/liquidio/lio_main.c
3949
if (atomic_read(&oct->status) == OCT_DEV_RUNNING)
drivers/net/ethernet/cavium/liquidio/lio_main.c
3957
if (atomic_read(&oct->status) != OCT_DEV_CORE_OK) {
drivers/net/ethernet/cavium/liquidio/lio_main.c
877
if (atomic_read(oct_dev->adapter_refcount) == 1) {
drivers/net/ethernet/cavium/liquidio/lio_main.c
952
switch (atomic_read(&oct->status)) {
drivers/net/ethernet/cavium/liquidio/lio_main.c
999
if (atomic_read(&iq->instr_pending)) {
drivers/net/ethernet/cavium/liquidio/lio_vf_main.c
127
if (atomic_read(&iq->instr_pending)) {
drivers/net/ethernet/cavium/liquidio/lio_vf_main.c
132
atomic_read(&iq->instr_pending);
drivers/net/ethernet/cavium/liquidio/lio_vf_main.c
1425
if (!(atomic_read(&lio->ifstate) & LIO_IFSTATE_RUNNING) ||
drivers/net/ethernet/cavium/liquidio/lio_vf_main.c
449
switch (atomic_read(&oct->status)) {
drivers/net/ethernet/cavium/liquidio/lio_vf_main.c
489
if (atomic_read(&iq->instr_pending)) {
drivers/net/ethernet/cavium/liquidio/lio_vf_main.c
494
atomic_read(&iq->instr_pending);
drivers/net/ethernet/cavium/liquidio/lio_vf_main.c
675
if (atomic_read(&lio->ifstate) & LIO_IFSTATE_RUNNING)
drivers/net/ethernet/cavium/liquidio/lio_vf_main.c
693
if (atomic_read(&lio->ifstate) & LIO_IFSTATE_REGISTERED)
drivers/net/ethernet/cavium/liquidio/lio_vf_rep.c
132
atomic_set(&vf_rep->ifstate, (atomic_read(&vf_rep->ifstate) |
drivers/net/ethernet/cavium/liquidio/lio_vf_rep.c
165
atomic_set(&vf_rep->ifstate, (atomic_read(&vf_rep->ifstate) &
drivers/net/ethernet/cavium/liquidio/lio_vf_rep.c
316
if (!(atomic_read(&vf_rep->ifstate) & LIO_IFSTATE_RUNNING) ||
drivers/net/ethernet/cavium/liquidio/lio_vf_rep.c
381
if (!(atomic_read(&vf_rep->ifstate) & LIO_IFSTATE_RUNNING) ||
drivers/net/ethernet/cavium/liquidio/octeon_device.c
1214
if (atomic_read(&oct->status) >= OCT_DEV_RUNNING) {
drivers/net/ethernet/cavium/liquidio/octeon_device.c
1216
atomic_read(&oct->status));
drivers/net/ethernet/cavium/liquidio/octeon_device.c
1444
pkts_pend = (u32)atomic_read(&droq->pkts_pending);
drivers/net/ethernet/cavium/liquidio/octeon_device.c
631
s32 istate = (s32)atomic_read(state_ptr);
drivers/net/ethernet/cavium/liquidio/octeon_device.c
804
refcount = atomic_read(oct->adapter_refcount);
drivers/net/ethernet/cavium/liquidio/octeon_device.c
822
refcount = atomic_read(oct->adapter_refcount);
drivers/net/ethernet/cavium/liquidio/octeon_droq.c
730
if (!atomic_read(&droq->pkts_pending))
drivers/net/ethernet/cavium/liquidio/octeon_droq.c
746
pkt_count = atomic_read(&droq->pkts_pending);
drivers/net/ethernet/cavium/liquidio/octeon_droq.c
768
if (atomic_read(&droq->pkts_pending))
drivers/net/ethernet/cavium/liquidio/octeon_droq.c
795
(u32)(atomic_read(&droq->pkts_pending)));
drivers/net/ethernet/cavium/liquidio/octeon_network.h
506
return atomic_read(&lio->ifstate) & state_flag;
drivers/net/ethernet/cavium/liquidio/octeon_network.h
516
atomic_set(&lio->ifstate, (atomic_read(&lio->ifstate) | state_flag));
drivers/net/ethernet/cavium/liquidio/octeon_network.h
526
atomic_set(&lio->ifstate, (atomic_read(&lio->ifstate) & ~(state_flag)));
drivers/net/ethernet/cavium/liquidio/octeon_network.h
540
pcount = atomic_read(
drivers/net/ethernet/cavium/liquidio/octeon_nic.h
114
return ((u32)atomic_read(&oct->instr_queue[q_no]->instr_pending)
drivers/net/ethernet/cavium/liquidio/request_manager.c
247
atomic_read(&oct->instr_queue[i]->instr_pending);
drivers/net/ethernet/cavium/liquidio/request_manager.c
267
if (atomic_read(&oct->status) == OCT_DEV_RUNNING) {
drivers/net/ethernet/cavium/liquidio/request_manager.c
310
if (atomic_read(&iq->instr_pending) >= (s32)(iq->max_count - 1)) {
drivers/net/ethernet/cavium/liquidio/request_manager.c
316
if (atomic_read(&iq->instr_pending) >= (s32)(iq->max_count - 2))
drivers/net/ethernet/cavium/liquidio/request_manager.c
431
if (atomic_read(&oct->response_list
drivers/net/ethernet/cavium/liquidio/request_manager.c
506
if (!atomic_read(&iq->instr_pending))
drivers/net/ethernet/cavium/liquidio/request_manager.c
778
if (!atomic_read(&done_sc_list->pending_req_count))
drivers/net/ethernet/cavium/liquidio/response_manager.c
235
if (atomic_read(&oct->response_list
drivers/net/ethernet/cavium/thunder/nicvf_main.c
1297
if (atomic_read(&sq->free_cnt) > MIN_SQ_DESC_PER_PKT_XMIT) {
drivers/net/ethernet/cavium/thunder/nicvf_main.c
934
(atomic_read(&sq->free_cnt) >= MIN_SQ_DESC_PER_PKT_XMIT)) {
drivers/net/ethernet/cavium/thunder/nicvf_queues.c
1558
if (subdesc_cnt > atomic_read(&sq->free_cnt))
drivers/net/ethernet/chelsio/cxgb3/cxgb3_offload.c
641
t->atids_in_use + atomic_read(&t->tids_in_use) + MC5_MIN_TIDS <=
drivers/net/ethernet/chelsio/cxgb3/l2t.c
186
if (!atomic_read(&d->nfree))
drivers/net/ethernet/chelsio/cxgb3/l2t.c
191
if (atomic_read(&e->refcnt) == 0)
drivers/net/ethernet/chelsio/cxgb3/l2t.c
194
for (e = &d->l2tab[1]; atomic_read(&e->refcnt); ++e) ;
drivers/net/ethernet/chelsio/cxgb3/l2t.c
230
if (atomic_read(&e->refcnt) == 0) { /* hasn't been recycled */
drivers/net/ethernet/chelsio/cxgb3/l2t.c
301
if (atomic_read(&e->refcnt) == 1)
drivers/net/ethernet/chelsio/cxgb3/l2t.c
384
if (atomic_read(&e->refcnt)) {
drivers/net/ethernet/chelsio/cxgb4/clip_tbl.c
269
seq_printf(seq, "Free clip entries : %d\n", atomic_read(&ctbl->nfree));
drivers/net/ethernet/chelsio/cxgb4/cxgb4_debugfs.c
3323
atomic_read(&t->conns_in_use));
drivers/net/ethernet/chelsio/cxgb4/cxgb4_debugfs.c
3335
atomic_read(&t->tids_in_use),
drivers/net/ethernet/chelsio/cxgb4/cxgb4_debugfs.c
3336
atomic_read(&t->hash_tids_in_use));
drivers/net/ethernet/chelsio/cxgb4/cxgb4_debugfs.c
3344
atomic_read(&t->tids_in_use),
drivers/net/ethernet/chelsio/cxgb4/cxgb4_debugfs.c
3345
atomic_read(&t->hash_tids_in_use));
drivers/net/ethernet/chelsio/cxgb4/cxgb4_debugfs.c
3351
atomic_read(&t->hash_tids_in_use));
drivers/net/ethernet/chelsio/cxgb4/cxgb4_debugfs.c
3355
atomic_read(&t->conns_in_use));
drivers/net/ethernet/chelsio/cxgb4/cxgb4_debugfs.c
3360
atomic_read(&t->tids_in_use));
drivers/net/ethernet/chelsio/cxgb4/cxgb4_debugfs.c
3387
atomic_read(&t->eotids_in_use));
drivers/net/ethernet/chelsio/cxgb4/cxgb4_debugfs.c
3538
atomic_read(&adap->chcr_stats.cipher_rqst));
drivers/net/ethernet/chelsio/cxgb4/cxgb4_debugfs.c
3540
atomic_read(&adap->chcr_stats.digest_rqst));
drivers/net/ethernet/chelsio/cxgb4/cxgb4_debugfs.c
3542
atomic_read(&adap->chcr_stats.aead_rqst));
drivers/net/ethernet/chelsio/cxgb4/cxgb4_debugfs.c
3544
atomic_read(&adap->chcr_stats.complete));
drivers/net/ethernet/chelsio/cxgb4/cxgb4_debugfs.c
3546
atomic_read(&adap->chcr_stats.error));
drivers/net/ethernet/chelsio/cxgb4/cxgb4_debugfs.c
3548
atomic_read(&adap->chcr_stats.fallback));
drivers/net/ethernet/chelsio/cxgb4/cxgb4_debugfs.c
3550
atomic_read(&adap->chcr_stats.tls_pdu_tx));
drivers/net/ethernet/chelsio/cxgb4/cxgb4_debugfs.c
3552
atomic_read(&adap->chcr_stats.tls_pdu_rx));
drivers/net/ethernet/chelsio/cxgb4/cxgb4_debugfs.c
3554
atomic_read(&adap->chcr_stats.tls_key));
drivers/net/ethernet/chelsio/cxgb4/cxgb4_debugfs.c
3558
atomic_read(&adap->ch_ipsec_stats.ipsec_cnt));
drivers/net/ethernet/chelsio/cxgb4/cxgb4_filter.c
1161
if ((atomic_read(&adap->tids.hash_tids_in_use) +
drivers/net/ethernet/chelsio/cxgb4/cxgb4_filter.c
1162
atomic_read(&adap->tids.tids_in_use)) >=
drivers/net/ethernet/chelsio/cxgb4/cxgb4_uld.c
668
return (atomic_read(&t->conns_in_use) || t->stids_in_use);
drivers/net/ethernet/chelsio/cxgb4/l2t.c
268
if (!atomic_read(&d->nfree))
drivers/net/ethernet/chelsio/cxgb4/l2t.c
273
if (atomic_read(&e->refcnt) == 0)
drivers/net/ethernet/chelsio/cxgb4/l2t.c
276
for (e = d->l2tab; atomic_read(&e->refcnt); ++e)
drivers/net/ethernet/chelsio/cxgb4/l2t.c
305
if (atomic_read(&e->refcnt) == 0) {
drivers/net/ethernet/chelsio/cxgb4/l2t.c
355
if (atomic_read(&e->refcnt) == 0) { /* hasn't been recycled */
drivers/net/ethernet/chelsio/cxgb4/l2t.c
373
if (atomic_read(&e->refcnt) == 0) { /* hasn't been recycled */
drivers/net/ethernet/chelsio/cxgb4/l2t.c
444
if (atomic_read(&e->refcnt) == 1)
drivers/net/ethernet/chelsio/cxgb4/l2t.c
523
if (atomic_read(&e->refcnt))
drivers/net/ethernet/chelsio/cxgb4/l2t.c
588
if (!atomic_read(&e->refcnt)) {
drivers/net/ethernet/chelsio/cxgb4/l2t.c
710
l2e_state(e), atomic_read(&e->refcnt),
drivers/net/ethernet/chelsio/cxgb4/sched.c
615
if (!atomic_read(&e->refcnt) && e->state != SCHED_STATE_UNUSED) {
drivers/net/ethernet/emulex/benet/be_cmds.c
590
if (atomic_read(&mcc_obj->q.used) == 0)
drivers/net/ethernet/emulex/benet/be_cmds.c
850
if (atomic_read(&mccq->used) >= mccq->len)
drivers/net/ethernet/emulex/benet/be_main.c
1435
atomic_read(&txo->q.used), txo->q.id);
drivers/net/ethernet/emulex/benet/be_main.c
1450
atomic_read(&txo->cq.used));
drivers/net/ethernet/emulex/benet/be_main.c
2666
} else if (atomic_read(&rxq->used) == 0) {
drivers/net/ethernet/emulex/benet/be_main.c
2821
while (atomic_read(&rxq->used) > 0) {
drivers/net/ethernet/emulex/benet/be_main.c
2826
BUG_ON(atomic_read(&rxq->used));
drivers/net/ethernet/emulex/benet/be_main.c
2915
if (atomic_read(&txq->used)) {
drivers/net/ethernet/emulex/benet/be_main.c
2917
i, atomic_read(&txq->used));
drivers/net/ethernet/emulex/benet/be_main.c
2920
index_adv(&end_idx, atomic_read(&txq->used) - 1,
drivers/net/ethernet/emulex/benet/be_main.c
2927
BUG_ON(atomic_read(&txq->used));
drivers/net/ethernet/emulex/benet/be_main.c
3258
if (atomic_read(&rxo->q.used) < RX_FRAGS_REFILL_WM &&
drivers/net/ethernet/emulex/benet/be_main.c
3585
if (atomic_read(&q->used) == 0)
drivers/net/ethernet/emulex/benet/be_main.c
817
return atomic_read(&txo->q.used) + BE_MAX_TX_FRAG_COUNT >= txo->q.len;
drivers/net/ethernet/emulex/benet/be_main.c
822
return atomic_read(&txo->q.used) < txo->q.len / 2;
drivers/net/ethernet/emulex/benet/be_main.c
827
return atomic_read(&txo->q.used) > txo->pend_wrb_cnt;
drivers/net/ethernet/google/gve/gve_tx.c
57
WARN(atomic_read(&fifo->available) != fifo->size,
drivers/net/ethernet/google/gve/gve_tx.c
71
return (atomic_read(&fifo->available) <= bytes) ? false : true;
drivers/net/ethernet/google/gve/gve_tx_dqo.c
1019
u32 tail = atomic_read(&tx->dqo_tx.xsk_reorder_queue_tail);
drivers/net/ethernet/hisilicon/hns3/hns3_debugfs.c
779
atomic_read(&ring->page_pool->pages_state_release_cnt),
drivers/net/ethernet/hisilicon/hns3/hns3vf/hclgevf_mbx.c
207
if (atomic_read(&hdev->arq.count) >=
drivers/net/ethernet/huawei/hinic/hinic_debugfs.c
31
return atomic_read(&wq->prod_idx) & wq->mask;
drivers/net/ethernet/huawei/hinic/hinic_debugfs.c
33
return atomic_read(&wq->cons_idx) & wq->mask;
drivers/net/ethernet/huawei/hinic/hinic_debugfs.c
63
return atomic_read(&wq->cons_idx) & wq->mask;
drivers/net/ethernet/huawei/hinic/hinic_debugfs.c
65
return atomic_read(&wq->prod_idx) & wq->mask;
drivers/net/ethernet/huawei/hinic/hinic_hw_cmdq.c
722
HINIC_CMDQ_CTXT_BLOCK_INFO_SET(atomic_read(&wq->cons_idx), CI);
drivers/net/ethernet/huawei/hinic/hinic_hw_mbox.c
470
if (atomic_read(&recv_mbox->msg_cnt) > HINIC_MAX_MSG_CNT_TO_PROCESS) {
drivers/net/ethernet/huawei/hinic/hinic_hw_mbox.c
473
src_func_idx, atomic_read(&recv_mbox->msg_cnt));
drivers/net/ethernet/huawei/hinic/hinic_hw_qp.c
101
ci_start = atomic_read(&wq->cons_idx);
drivers/net/ethernet/huawei/hinic/hinic_hw_qp.c
102
pi_start = atomic_read(&wq->prod_idx);
drivers/net/ethernet/huawei/hinic/hinic_hw_qp.c
163
ci_start = atomic_read(&wq->cons_idx);
drivers/net/ethernet/huawei/hinic/hinic_hw_qp.c
164
pi_start = atomic_read(&wq->prod_idx);
drivers/net/ethernet/huawei/hinic/hinic_hw_qp.c
459
return atomic_read(&wq->delta) - 1;
drivers/net/ethernet/huawei/hinic/hinic_hw_qp.c
472
return atomic_read(&wq->delta) - 1;
drivers/net/ethernet/huawei/hinic/hinic_hw_wq.c
746
*prod_idx = MASKED_WQE_IDX(wq, atomic_read(&wq->prod_idx));
drivers/net/ethernet/huawei/hinic/hinic_hw_wq.c
829
if ((atomic_read(&wq->delta) + num_wqebbs) > wq->q_depth)
drivers/net/ethernet/huawei/hinic/hinic_hw_wq.c
832
curr_cons_idx = atomic_read(&wq->cons_idx);
drivers/net/ethernet/huawei/hinic/hinic_main.c
831
sw_pi = atomic_read(&sq->wq->prod_idx) & sq->wq->mask;
drivers/net/ethernet/huawei/hinic/hinic_main.c
833
sw_ci = atomic_read(&sq->wq->cons_idx) & sq->wq->mask;
drivers/net/ethernet/ibm/ehea/ehea_ethtool.c
258
data[i++] = atomic_read(&port->port_res[k].swqe_avail);
drivers/net/ethernet/ibm/ehea/ehea_main.c
2081
if (unlikely(atomic_read(&pr->swqe_avail) <= 1)) {
drivers/net/ethernet/ibm/ehea/ehea_main.c
2499
atomic_read(&pr->swqe_avail) >= swqe_max,
drivers/net/ethernet/ibm/ehea/ehea_main.c
3355
if (atomic_read(&ehea_memory_hooks_registered) == 0)
drivers/net/ethernet/ibm/ehea/ehea_main.c
862
(atomic_read(&pr->swqe_avail) >= pr->swqe_refill_th))) {
drivers/net/ethernet/ibm/ehea/ehea_main.c
865
(atomic_read(&pr->swqe_avail) >= pr->swqe_refill_th))
drivers/net/ethernet/ibm/ibmveth.c
215
u32 remaining = pool->size - atomic_read(&pool->available);
drivers/net/ethernet/ibm/ibmveth.c
405
(atomic_read(&pool->available) < pool->threshold))
drivers/net/ethernet/ibm/ibmvnic.c
3610
(atomic_read(&adapter->rx_pool[scrq_num].available) <
drivers/net/ethernet/ibm/ibmvnic.c
5493
atomic_read(&adapter->running_cap_crqs));
drivers/net/ethernet/ibm/ibmvnic.c
5557
if (atomic_read(&adapter->running_cap_crqs) == 0)
drivers/net/ethernet/ibm/ibmvnic.c
5703
atomic_read(&adapter->running_cap_crqs));
drivers/net/ethernet/ibm/ibmvnic.c
5867
if (atomic_read(&adapter->running_cap_crqs) == 0)
drivers/net/ethernet/ibm/ibmvnic.c
773
int count = pool->size - atomic_read(&pool->available);
drivers/net/ethernet/intel/e1000/e1000_main.c
2364
if (atomic_read(&adapter->tx_fifo_stall)) {
drivers/net/ethernet/intel/e1000/e1000_main.c
3049
if (atomic_read(&adapter->tx_fifo_stall))
drivers/net/ethernet/intel/ice/ice_arfs.c
41
return atomic_read(&arfs_fltr_cntrs->active_udpv4_cnt) > 0;
drivers/net/ethernet/intel/ice/ice_arfs.c
43
return atomic_read(&arfs_fltr_cntrs->active_udpv6_cnt) > 0;
drivers/net/ethernet/intel/ice/ice_arfs.c
45
return atomic_read(&arfs_fltr_cntrs->active_tcpv4_cnt) > 0;
drivers/net/ethernet/intel/ice/ice_arfs.c
47
return atomic_read(&arfs_fltr_cntrs->active_tcpv6_cnt) > 0;
drivers/net/ethernet/jme.c
1056
if (unlikely(atomic_read(&jme->link_changing) != 1))
drivers/net/ethernet/jme.c
1062
i = atomic_read(&rxring->next_to_clean);
drivers/net/ethernet/jme.c
1194
(atomic_read(&jme->link_changing) != 1)
drivers/net/ethernet/jme.c
1270
while (atomic_read(&jme->link_changing) != 1)
drivers/net/ethernet/jme.c
1358
while (atomic_read(&jme->rx_empty) > 0) {
drivers/net/ethernet/jme.c
1379
if (unlikely(atomic_read(&jme->link_changing) != 1))
drivers/net/ethernet/jme.c
1389
while (atomic_read(&jme->rx_empty) > 0) {
drivers/net/ethernet/jme.c
1404
atomic_read(&txring->nr_free) >= (jme->tx_wake_threshold))) {
drivers/net/ethernet/jme.c
1424
if (unlikely(atomic_read(&jme->link_changing) != 1))
drivers/net/ethernet/jme.c
1430
max = jme->tx_ring_size - atomic_read(&txring->nr_free);
drivers/net/ethernet/jme.c
1433
for (i = atomic_read(&txring->next_to_clean) ; cnt < max ; ) {
drivers/net/ethernet/jme.c
1940
if (unlikely(atomic_read(&txring->nr_free) < nr_alloc))
drivers/net/ethernet/jme.c
2164
int idx = atomic_read(&txring->next_to_clean);
drivers/net/ethernet/jme.c
2169
if (unlikely(atomic_read(&txring->nr_free) < (MAX_SKB_FRAGS+2))) {
drivers/net/ethernet/jme.c
2173
if (atomic_read(&txring->nr_free)
drivers/net/ethernet/marvell/octeontx2/nic/otx2_ethtool.c
1419
*(data++) = atomic_read(&((atomic_t *)&vf->hw.drv_stats)
drivers/net/ethernet/marvell/octeontx2/nic/otx2_ethtool.c
200
*(data++) = atomic_read(&((atomic_t *)&pfvf->hw.drv_stats)
drivers/net/ethernet/mediatek/mtk_eth_soc.c
1781
if (unlikely(atomic_read(&ring->free_count) <= tx_num)) {
drivers/net/ethernet/mediatek/mtk_eth_soc.c
1807
if (unlikely(atomic_read(&ring->free_count) <= ring->thresh))
drivers/net/ethernet/mediatek/mtk_eth_soc.c
1992
if (unlikely(atomic_read(&ring->free_count) <= 1 + nr_frags))
drivers/net/ethernet/mediatek/mtk_eth_soc.c
2554
(atomic_read(&ring->free_count) > ring->thresh))
drivers/net/ethernet/mellanox/mlx4/fw.c
2707
int num_tasks = atomic_read(&priv->opreq_count);
drivers/net/ethernet/mellanox/mlx4/main.c
2073
if (atomic_read(&pf_loading)) {
drivers/net/ethernet/mellanox/mlx4/resource_tracker.c
1331
if (res->com.state == RES_QP_BUSY || atomic_read(&res->ref_count) ||
drivers/net/ethernet/mellanox/mlx4/resource_tracker.c
1334
res->com.state, atomic_read(&res->ref_count));
drivers/net/ethernet/mellanox/mlx4/resource_tracker.c
1346
atomic_read(&res->ref_count)) {
drivers/net/ethernet/mellanox/mlx4/resource_tracker.c
1350
atomic_read(&res->ref_count));
drivers/net/ethernet/mellanox/mlx4/resource_tracker.c
1681
else if (atomic_read(&r->ref_count))
drivers/net/ethernet/mellanox/mlx4/resource_tracker.c
1721
else if (atomic_read(&r->ref_count))
drivers/net/ethernet/mellanox/mlx4/resource_tracker.c
4812
if (cq->com.owner == slave && !atomic_read(&cq->ref_count)) {
drivers/net/ethernet/mellanox/mlx5/core/en_accel/psp.c
1059
stats->tx_error = atomic_read(&priv->psp->tx_drop);
drivers/net/ethernet/mellanox/mlx5/core/en_accel/psp.c
1159
WARN_ON(atomic_read(&psp->tx_key_cnt));
drivers/net/ethernet/mellanox/mlx5/core/en_main.c
4022
s->tx_dropped += atomic_read(&priv->psp->tx_drop);
drivers/net/ethernet/mellanox/mlx5/core/en_tc.c
5514
return atomic_read(&tc_ht->nelems);
drivers/net/ethernet/mellanox/mlx5/core/esw/ipsec_fs.c
167
if (atomic_read(&rep->rep_data[REP_ETH].state) != REP_LOADED)
drivers/net/ethernet/mellanox/mlx5/core/eswitch_offloads.c
3073
if (atomic_read(&rep->rep_data[rep_type].state) == REP_LOADED &&
drivers/net/ethernet/mellanox/mlx5/core/eswitch_offloads.c
3106
if (atomic_read(&rep->rep_data[rep_type].state) == REP_LOADED &&
drivers/net/ethernet/mellanox/mlx5/core/eswitch_offloads.c
3483
if (atomic_read(&rep->rep_data[REP_ETH].state) != REP_LOADED)
drivers/net/ethernet/mellanox/mlx5/core/eswitch_offloads.c
3491
if (atomic_read(&rep->rep_data[REP_ETH].state) == REP_LOADED)
drivers/net/ethernet/mellanox/mlx5/core/eswitch_offloads.c
3593
if (work_gen != atomic_read(&esw->esw_funcs.generation))
drivers/net/ethernet/mellanox/mlx5/core/eswitch_offloads.c
3655
host_work->work_gen = atomic_read(&esw_funcs->generation);
drivers/net/ethernet/mellanox/mlx5/core/eswitch_offloads.c
4421
if (atomic_read(&rep->rep_data[rep_type].state) == REP_LOADED &&
drivers/net/ethernet/mellanox/mlx5/core/fs_core.c
2118
version += (u64)atomic_read(&iter->g->node.version);
drivers/net/ethernet/mellanox/mlx5/core/fs_core.c
2278
if (atomic_read(&ft->node.version) != ft_version) {
drivers/net/ethernet/mellanox/mlx5/core/fs_core.c
2361
version = atomic_read(&ft->node.version);
drivers/net/ethernet/mellanox/mlx5/core/fs_core.c
2392
version != atomic_read(&ft->node.version))
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/bwc.c
389
u32 rx_rules = atomic_read(&bwc_matcher->rx_size.num_of_rules);
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/bwc.c
390
u32 tx_rules = atomic_read(&bwc_matcher->tx_size.num_of_rules);
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/bwc.c
574
if (atomic_read(&rx_size->num_of_rules) ||
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/bwc.c
575
atomic_read(&tx_size->num_of_rules))
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/bwc.c
606
if (atomic_read(&bwc_matcher->rx_size.num_of_rules) ||
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/bwc.c
607
atomic_read(&bwc_matcher->tx_size.num_of_rules))
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/bwc.c
863
need_rx_rehash = atomic_read(&bwc_matcher->rx_size.rehash_required);
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/bwc.c
864
need_tx_rehash = atomic_read(&bwc_matcher->tx_size.rehash_required);
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/bwc.c
998
if (!atomic_read(&bwc_matcher->rx_size.rehash_required) &&
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/bwc.c
999
!atomic_read(&bwc_matcher->tx_size.rehash_required))
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_dbg.c
1079
if (atomic_read(&dmn->dump_info.state) != MLX5DR_DEBUG_DUMP_STATE_FREE) {
drivers/net/ethernet/mellanox/mlxsw/core.c
120
return atomic_read(&mlxsw_core->active_ports_count);
drivers/net/ethernet/mellanox/mlxsw/spectrum_cnt.c
107
WARN_ON(atomic_read(&sub_pool->active_entries_count));
drivers/net/ethernet/mellanox/mlxsw/spectrum_cnt.c
117
return atomic_read(&pool->active_entries_count);
drivers/net/ethernet/mellanox/mlxsw/spectrum_cnt.c
173
WARN_ON(atomic_read(&pool->active_entries_count));
drivers/net/ethernet/mellanox/mlxsw/spectrum_cnt.c
49
return atomic_read(&sub_pool->active_entries_count);
drivers/net/ethernet/mellanox/mlxsw/spectrum_policer.c
112
WARN_ON(atomic_read(&family->policers_count) != 0);
drivers/net/ethernet/mellanox/mlxsw/spectrum_policer.c
76
return atomic_read(&family->policers_count);
drivers/net/ethernet/mellanox/mlxsw/spectrum_port_range.c
157
return atomic_read(&pr_core->prr_count);
drivers/net/ethernet/mellanox/mlxsw/spectrum_router.c
11102
WARN_ON_ONCE(atomic_read(&mlxsw_sp->router->rifs_count));
drivers/net/ethernet/mellanox/mlxsw/spectrum_router.c
2578
if (!atomic_read(&mlxsw_sp->router->neighs_update.neigh_count))
drivers/net/ethernet/mellanox/mlxsw/spectrum_router.c
8976
return atomic_read(&mlxsw_sp->router->rif_mac_profiles_count);
drivers/net/ethernet/mellanox/mlxsw/spectrum_router.c
8983
return atomic_read(&mlxsw_sp->router->rifs_count);
drivers/net/ethernet/mellanox/mlxsw/spectrum_span.c
1063
if (atomic_read(&mlxsw_sp->span->active_entries_count) == 0)
drivers/net/ethernet/mellanox/mlxsw/spectrum_span.c
77
return atomic_read(&mlxsw_sp->span->active_entries_count);
drivers/net/ethernet/microsoft/mana/mana_en.c
3247
while (atomic_read(&txq->pending_sends) > 0 &&
drivers/net/ethernet/microsoft/mana/mana_en.c
3252
if (atomic_read(&txq->pending_sends)) {
drivers/net/ethernet/microsoft/mana/mana_en.c
3256
err, atomic_read(&txq->pending_sends),
drivers/net/ethernet/natsemi/ns83820.c
1594
dev->tx_done_idx, dev->tx_free_idx, atomic_read(&dev->nr_tx_skbs)
drivers/net/ethernet/natsemi/ns83820.c
1603
atomic_read(&dev->nr_tx_skbs));
drivers/net/ethernet/netronome/nfp/flower/main.c
309
atomic_read(replies) >= tot_repl,
drivers/net/ethernet/netronome/nfp/nfp_net_ethtool.c
927
*data++ = atomic_read(&nn->ktls_no_space);
drivers/net/ethernet/netronome/nfp/nfp_net_ethtool.c
928
*data++ = atomic_read(&nn->ktls_rx_resync_req);
drivers/net/ethernet/netronome/nfp/nfp_net_ethtool.c
929
*data++ = atomic_read(&nn->ktls_rx_resync_ign);
drivers/net/ethernet/netronome/nfp/nfp_net_ethtool.c
930
*data++ = atomic_read(&nn->ktls_rx_resync_sent);
drivers/net/ethernet/netronome/nfp/nfpcore/nfp6000_pcie.c
403
if (!atomic_read(&bar->refcnt))
drivers/net/ethernet/netronome/nfp/nfpcore/nfp6000_pcie.c
803
if (WARN_ON(!atomic_read(&priv->refcnt)))
drivers/net/ethernet/netronome/nfp/nfpcore/nfp_cppcore.c
405
if (atomic_read(&area->refcount))
drivers/net/ethernet/qlogic/qed/qed_main.c
265
if (atomic_read(&pdev->enable_cnt) == 1)
drivers/net/ethernet/qlogic/qed/qed_main.c
301
if (atomic_read(&pdev->enable_cnt) == 1) {
drivers/net/ethernet/qlogic/qla3xxx.c
2466
if (unlikely(atomic_read(&qdev->tx_count) < 2))
drivers/net/ethernet/qlogic/qlcnic/qlcnic_83xx_hw.c
833
if (atomic_read(&cmd->rsp_status) ==
drivers/net/ethernet/sfc/ef10.c
1930
if (rc != -ENOENT || atomic_read(&efx->active_queues))
drivers/net/ethernet/sfc/efx.c
923
stats[GENERIC_STAT_rx_noskb_drops] = atomic_read(&efx->n_rx_noskb_drops);
drivers/net/ethernet/sfc/ethtool_common.c
50
return atomic_read((atomic_t *) field);
drivers/net/ethernet/sfc/falcon/efx.c
2699
stats[GENERIC_STAT_rx_noskb_drops] = atomic_read(&efx->n_rx_noskb_drops);
drivers/net/ethernet/sfc/falcon/ethtool.c
49
return atomic_read((atomic_t *) field);
drivers/net/ethernet/sfc/falcon/farch.c
1146
WARN_ON(atomic_read(&efx->active_queues) == 0);
drivers/net/ethernet/sfc/falcon/farch.c
434
WARN_ON(atomic_read(&tx_queue->flush_outstanding));
drivers/net/ethernet/sfc/falcon/farch.c
609
return (atomic_read(&efx->active_queues) == 0 ||
drivers/net/ethernet/sfc/falcon/farch.c
610
(atomic_read(&efx->rxq_flush_outstanding) < EF4_RX_FLUSH_COUNT
drivers/net/ethernet/sfc/falcon/farch.c
611
&& atomic_read(&efx->rxq_flush_pending) > 0));
drivers/net/ethernet/sfc/falcon/farch.c
676
while (timeout && atomic_read(&efx->active_queues) > 0) {
drivers/net/ethernet/sfc/falcon/farch.c
683
if (atomic_read(&efx->rxq_flush_outstanding) >=
drivers/net/ethernet/sfc/falcon/farch.c
701
if (atomic_read(&efx->active_queues) &&
drivers/net/ethernet/sfc/falcon/farch.c
704
"(rx %d+%d)\n", atomic_read(&efx->active_queues),
drivers/net/ethernet/sfc/falcon/farch.c
705
atomic_read(&efx->rxq_flush_outstanding),
drivers/net/ethernet/sfc/falcon/farch.c
706
atomic_read(&efx->rxq_flush_pending));
drivers/net/ethernet/sfc/falcon/selftest.c
368
if (atomic_read(&state->rx_bad) == 0) {
drivers/net/ethernet/sfc/falcon/selftest.c
475
return atomic_read(&state->rx_good) == state->packet_count;
drivers/net/ethernet/sfc/falcon/selftest.c
501
rx_good = atomic_read(&state->rx_good);
drivers/net/ethernet/sfc/falcon/selftest.c
502
rx_bad = atomic_read(&state->rx_bad);
drivers/net/ethernet/sfc/mcdi.c
1287
WARN_ON(atomic_read(&efx->active_queues) < 0);
drivers/net/ethernet/sfc/mcdi_functions.c
380
atomic_read(&efx->active_queues) == 0,
drivers/net/ethernet/sfc/mcdi_functions.c
382
pending = atomic_read(&efx->active_queues);
drivers/net/ethernet/sfc/mcdi_port_common.c
1176
if (rc && (rc != -ENOENT || atomic_read(&efx->active_queues)))
drivers/net/ethernet/sfc/selftest.c
365
if (atomic_read(&state->rx_bad) == 0) {
drivers/net/ethernet/sfc/selftest.c
472
return atomic_read(&state->rx_good) == state->packet_count;
drivers/net/ethernet/sfc/selftest.c
498
rx_good = atomic_read(&state->rx_good);
drivers/net/ethernet/sfc/selftest.c
499
rx_bad = atomic_read(&state->rx_bad);
drivers/net/ethernet/sfc/siena/efx.c
827
stats[GENERIC_STAT_rx_noskb_drops] = atomic_read(&efx->n_rx_noskb_drops);
drivers/net/ethernet/sfc/siena/ethtool_common.c
50
return atomic_read((atomic_t *) field);
drivers/net/ethernet/sfc/siena/farch.c
1139
WARN_ON(atomic_read(&efx->active_queues) == 0);
drivers/net/ethernet/sfc/siena/farch.c
428
WARN_ON(atomic_read(&tx_queue->flush_outstanding));
drivers/net/ethernet/sfc/siena/farch.c
597
return (atomic_read(&efx->active_queues) == 0 ||
drivers/net/ethernet/sfc/siena/farch.c
598
(atomic_read(&efx->rxq_flush_outstanding) < EFX_RX_FLUSH_COUNT
drivers/net/ethernet/sfc/siena/farch.c
599
&& atomic_read(&efx->rxq_flush_pending) > 0));
drivers/net/ethernet/sfc/siena/farch.c
664
while (timeout && atomic_read(&efx->active_queues) > 0) {
drivers/net/ethernet/sfc/siena/farch.c
681
if (atomic_read(&efx->rxq_flush_outstanding) >=
drivers/net/ethernet/sfc/siena/farch.c
700
if (atomic_read(&efx->active_queues) &&
drivers/net/ethernet/sfc/siena/farch.c
703
"(rx %d+%d)\n", atomic_read(&efx->active_queues),
drivers/net/ethernet/sfc/siena/farch.c
704
atomic_read(&efx->rxq_flush_outstanding),
drivers/net/ethernet/sfc/siena/farch.c
705
atomic_read(&efx->rxq_flush_pending));
drivers/net/ethernet/sfc/siena/mcdi.c
1310
WARN_ON(atomic_read(&efx->active_queues) < 0);
drivers/net/ethernet/sfc/siena/mcdi_port_common.c
1183
if (rc && (rc != -ENOENT || atomic_read(&efx->active_queues)))
drivers/net/ethernet/sfc/siena/selftest.c
366
if (atomic_read(&state->rx_bad) == 0) {
drivers/net/ethernet/sfc/siena/selftest.c
473
return atomic_read(&state->rx_good) == state->packet_count;
drivers/net/ethernet/sfc/siena/selftest.c
499
rx_good = atomic_read(&state->rx_good);
drivers/net/ethernet/sfc/siena/selftest.c
500
rx_bad = atomic_read(&state->rx_bad);
drivers/net/ethernet/sfc/siena/siena_sriov.c
665
atomic_read(&vf->rxq_retry_count);
drivers/net/ethernet/silan/sc92031.c
865
intr_mask = atomic_read(&priv->intr_mask);
drivers/net/ethernet/silan/sc92031.c
898
intr_mask = atomic_read(&priv->intr_mask);
drivers/net/ethernet/sun/cassini.c
3786
while (atomic_read(&cp->reset_task_pending_mtu) ||
drivers/net/ethernet/sun/cassini.c
3787
atomic_read(&cp->reset_task_pending_spare) ||
drivers/net/ethernet/sun/cassini.c
3788
atomic_read(&cp->reset_task_pending_all))
drivers/net/ethernet/sun/cassini.c
3792
while (atomic_read(&cp->reset_task_pending))
drivers/net/ethernet/sun/cassini.c
3949
int pending = atomic_read(&cp->reset_task_pending);
drivers/net/ethernet/sun/cassini.c
3951
int pending_all = atomic_read(&cp->reset_task_pending_all);
drivers/net/ethernet/sun/cassini.c
3952
int pending_spare = atomic_read(&cp->reset_task_pending_spare);
drivers/net/ethernet/sun/cassini.c
3953
int pending_mtu = atomic_read(&cp->reset_task_pending_mtu);
drivers/net/ethernet/sun/cassini.c
4050
if (atomic_read(&cp->reset_task_pending_all) ||
drivers/net/ethernet/sun/cassini.c
4051
atomic_read(&cp->reset_task_pending_spare) ||
drivers/net/ethernet/sun/cassini.c
4052
atomic_read(&cp->reset_task_pending_mtu))
drivers/net/ethernet/sun/cassini.c
4055
if (atomic_read(&cp->reset_task_pending))
drivers/net/ethernet/toshiba/ps3_gelic_net.c
1873
atomic_read(&card->tx_timeout_task_counter) == 0);
drivers/net/hyperv/netvsc.c
1180
atomic_read(&nvchan->queue_sends) < 1 &&
drivers/net/hyperv/rndis_filter.c
1145
if (atomic_read(&nvchan->queue_sends) > 0)
drivers/net/hyperv/rndis_filter.c
1330
atomic_read(&nvdev->open_chn) == nvdev->num_chn);
drivers/net/ipa/gsi_trans.c
304
int avail = atomic_read(&trans_info->tre_avail);
drivers/net/ipa/gsi_trans.c
331
return atomic_read(&trans_info->tre_avail) == tre_max;
drivers/net/ipa/gsi_trans.c
594
if (ring_db || !atomic_read(&channel->trans_info.tre_avail)) {
drivers/net/plip/plip.c
389
if (!(atomic_read (&nl->kill_timer))) {
drivers/net/ppp/ppp_generic.c
3613
if (atomic_read(&ppp_unit_count) || atomic_read(&channel_count))
drivers/net/rionet.c
419
state = atomic_read(&rdev->state);
drivers/net/thunderbolt/main.c
1108
u16 frame_id = atomic_read(&net->frame_id);
drivers/net/usb/catc.c
267
if (atomic_read(&catc->recq_sz)) {
drivers/net/usb/cdc_mbim.c
49
dev_dbg(&dev->intf->dev, "%s() pmcount=%d, on=%d\n", __func__, atomic_read(&info->pmcount), on);
drivers/net/usb/cdc_ncm.c
1485
if (!(hrtimer_active(&ctx->tx_timer) || atomic_read(&ctx->stop)))
drivers/net/usb/cdc_ncm.c
1496
if (!atomic_read(&ctx->stop))
drivers/net/usb/qmi_wwan.c
660
atomic_read(&info->pmcount), on);
drivers/net/usb/r8152.c
2093
WARN_ON(atomic_read(&tp->rx_count));
drivers/net/usb/r8152.c
2484
return atomic_read(&tp->rx_count) > RTL8152_MAX_RX;
drivers/net/usb/r8152.c
2516
if (!agg_free && atomic_read(&tp->rx_count) < tp->rx_pending)
drivers/net/wan/framer/pef2256/pef2256.c
661
status->link_is_on = !!atomic_read(&pef2256->carrier);
drivers/net/wireguard/receive.c
112
under_load = atomic_read(&wg->handshake_queue_len) >=
drivers/net/wireguard/receive.c
554
if (atomic_read(&wg->handshake_queue_len) > MAX_QUEUED_INCOMING_HANDSHAKES / 2) {
drivers/net/wireguard/selftest/ratelimiter.c
105
if (atomic_read(&total_entries))
drivers/net/wireless/ath/ar5523/ar5523.c
137
atomic_read(&ar->tx_nr_pending));
drivers/net/wireless/ath/ar5523/ar5523.c
740
if (atomic_read(&ar->tx_nr_total) < AR5523_TX_DATA_RESTART_COUNT) {
drivers/net/wireless/ath/ar5523/ar5523.c
786
atomic_read(&ar->tx_nr_total),
drivers/net/wireless/ath/ar5523/ar5523.c
787
atomic_read(&ar->tx_nr_pending));
drivers/net/wireless/ath/ar5523/ar5523.c
877
atomic_read(&ar->tx_nr_pending));
drivers/net/wireless/ath/ar5523/ar5523.c
921
atomic_read(&ar->tx_nr_total),
drivers/net/wireless/ath/ar5523/ar5523.c
922
atomic_read(&ar->tx_nr_pending));
drivers/net/wireless/ath/ar5523/ar5523.c
937
!atomic_read(&ar->tx_nr_pending), AR5523_FLUSH_TIMEOUT))
drivers/net/wireless/ath/ar5523/ar5523.c
939
atomic_read(&ar->tx_nr_total),
drivers/net/wireless/ath/ar5523/ar5523.c
940
atomic_read(&ar->tx_nr_pending));
drivers/net/wireless/ath/ath10k/core.c
2502
if (atomic_read(&ar->fail_cont_count) >= ATH10K_RECOVERY_MAX_FAIL_COUNT) {
drivers/net/wireless/ath/ath10k/core.c
2504
atomic_read(&ar->fail_cont_count));
drivers/net/wireless/ath/ath10k/core.c
2511
if (atomic_read(&ar->pending_recovery)) {
drivers/net/wireless/ath/ath10k/htt_rx.c
4393
while (atomic_read(&htt->num_mpdus_ready)) {
drivers/net/wireless/ath/ath11k/core.c
2545
fail_cont_count = atomic_read(&ab->fail_cont_count);
drivers/net/wireless/ath/ath11k/debugfs.c
716
atomic_read(&soc_stats->tx_err.misc_fail));
drivers/net/wireless/ath/ath11k/mac.c
6489
atomic_read(&ar->num_pending_mgmt_tx) > ATH11K_PRB_RSP_DROP_THRESHOLD) {
drivers/net/wireless/ath/ath11k/mac.c
8621
(atomic_read(&ar->dp.num_tx_pending) == 0),
drivers/net/wireless/ath/ath11k/mac.c
8625
atomic_read(&ar->dp.num_tx_pending));
drivers/net/wireless/ath/ath11k/mac.c
8630
(atomic_read(&ar->num_pending_mgmt_tx) == 0),
drivers/net/wireless/ath/ath11k/mac.c
8634
atomic_read(&ar->num_pending_mgmt_tx));
drivers/net/wireless/ath/ath12k/core.c
1663
fail_cont_count = atomic_read(&ab->fail_cont_count);
drivers/net/wireless/ath/ath12k/debugfs.c
1118
atomic_read(&device_stats->tx_err.misc_fail));
drivers/net/wireless/ath/ath12k/debugfs.c
1166
atomic_read(&ar->dp.num_tx_pending));
drivers/net/wireless/ath/ath12k/mac.c
12338
(atomic_read(&ar->dp.num_tx_pending) == 0),
drivers/net/wireless/ath/ath12k/mac.c
12343
atomic_read(&ar->dp.num_tx_pending));
drivers/net/wireless/ath/ath12k/mac.c
12348
(atomic_read(&ar->num_pending_mgmt_tx) == 0),
drivers/net/wireless/ath/ath12k/mac.c
12353
atomic_read(&ar->num_pending_mgmt_tx));
drivers/net/wireless/ath/ath12k/mac.c
9345
atomic_read(&ar->num_pending_mgmt_tx) > ATH12K_PRB_RSP_DROP_THRESHOLD) {
drivers/net/wireless/ath/ath6kl/sdio.c
611
return !atomic_read(&ar_sdio->irq_handling);
drivers/net/wireless/ath/ath6kl/sdio.c
621
if (atomic_read(&ar_sdio->irq_handling)) {
drivers/net/wireless/ath/ath9k/htc_drv_init.c
352
if (atomic_read(&priv->wmi->mwrite_cnt))
drivers/net/wireless/ath/ath9k/htc_drv_init.c
506
if (atomic_read(&priv->wmi->m_rmw_cnt))
drivers/net/wireless/ath/ath9k/htc_drv_init.c
83
if (atomic_read(&priv->htc->tgt_ready) > 0) {
drivers/net/wireless/ath/ath9k/mac.c
881
if (atomic_read(&ah->intr_ref_cnt) != 0) {
drivers/net/wireless/ath/ath9k/mac.c
883
atomic_read(&ah->intr_ref_cnt));
drivers/net/wireless/ath/ath9k/mac.c
900
atomic_read(&ah->intr_ref_cnt));
drivers/net/wireless/ath/carl9170/debug.c
222
ar->fw.mem_blocks, atomic_read(&ar->mem_allocs));
drivers/net/wireless/ath/carl9170/debug.c
225
atomic_read(&ar->mem_free_blocks),
drivers/net/wireless/ath/carl9170/debug.c
226
(atomic_read(&ar->mem_free_blocks) * ar->fw.mem_block_size) / 1024,
drivers/net/wireless/ath/carl9170/debug.c
673
atomic_read(&ar->pending_restarts));
drivers/net/wireless/ath/carl9170/debug.c
769
atomic_read(&ar->tx_anch_urbs));
drivers/net/wireless/ath/carl9170/debug.c
771
atomic_read(&ar->rx_anch_urbs));
drivers/net/wireless/ath/carl9170/debug.c
773
atomic_read(&ar->rx_work_urbs));
drivers/net/wireless/ath/carl9170/debug.c
775
atomic_read(&ar->rx_pool_urbs));
drivers/net/wireless/ath/carl9170/debug.c
778
atomic_read(&ar->tx_total_queued));
drivers/net/wireless/ath/carl9170/debug.c
780
atomic_read(&ar->tx_ampdu_scheduler));
drivers/net/wireless/ath/carl9170/debug.c
783
atomic_read(&ar->tx_total_pending));
drivers/net/wireless/ath/carl9170/debug.c
789
atomic_read(&ar->tx_ampdu_upload));
drivers/net/wireless/ath/carl9170/main.c
1696
if (atomic_read(&sta_info->pending_frames))
drivers/net/wireless/ath/carl9170/main.c
1710
return !!atomic_read(&ar->tx_total_queued);
drivers/net/wireless/ath/carl9170/main.c
245
if (atomic_read(&ar->tx_total_queued))
drivers/net/wireless/ath/carl9170/tx.c
1141
if (atomic_read(&ar->tx_ampdu_upload))
drivers/net/wireless/ath/carl9170/tx.c
286
if (atomic_read(&ar->tx_total_queued))
drivers/net/wireless/ath/carl9170/tx.c
290
if (!atomic_read(&ar->tx_ampdu_upload))
drivers/net/wireless/ath/carl9170/tx.c
651
if (!atomic_read(&ar->tx_total_queued))
drivers/net/wireless/ath/carl9170/tx.c
73
atomic_read(&ar->mem_free_blocks));
drivers/net/wireless/ath/carl9170/usb.c
321
while ((atomic_read(&ar->rx_anch_urbs) < AR9170_NUM_RX_URBS) &&
drivers/net/wireless/ath/carl9170/usb.c
439
if (atomic_read(&ar->rx_anch_urbs) == 0) {
drivers/net/wireless/ath/wil6210/interrupt.c
139
bool unmask_rx_htrsh = atomic_read(&wil->connected_vifs) > 0;
drivers/net/wireless/broadcom/b43/b43.h
820
#define b43_status(wldev) atomic_read(&(wldev)->__init_status)
drivers/net/wireless/broadcom/b43/leds.c
62
turn_on = atomic_read(&led->state) != LED_OFF;
drivers/net/wireless/broadcom/b43legacy/b43legacy.h
663
#define b43legacy_status(wldev) atomic_read(&(wldev)->__init_status)
drivers/net/wireless/broadcom/brcm80211/brcmfmac/bcmsdh.c
810
WARN_ON(atomic_read(&sdiodev->freezer->freezing));
drivers/net/wireless/broadcom/brcm80211/brcmfmac/bcmsdh.c
826
atomic_read(expect) == sdiodev->freezer->frozen_count);
drivers/net/wireless/broadcom/brcm80211/brcmfmac/bcmsdh.c
845
atomic_read(&sdiodev->freezer->freezing);
drivers/net/wireless/broadcom/brcm80211/brcmfmac/core.c
1499
return atomic_read(&ifp->pend_8021x_cnt);
drivers/net/wireless/broadcom/brcm80211/brcmfmac/msgbuf.c
1392
((qlen) && (atomic_read(&commonring->outstanding_tx) <
drivers/net/wireless/broadcom/brcm80211/brcmfmac/msgbuf.c
1419
while (retry && atomic_read(&commonring_del->outstanding_tx)) {
drivers/net/wireless/broadcom/brcm80211/brcmfmac/msgbuf.c
792
if ((force) || (atomic_read(&commonring->outstanding_tx) <
drivers/net/wireless/broadcom/brcm80211/brcmfmac/sdio.c
2548
if (!sdiodev->irq_en && !atomic_read(&bus->ipend)) {
drivers/net/wireless/broadcom/brcm80211/brcmfmac/sdio.c
2628
if (atomic_read(&bus->ipend) > 0) {
drivers/net/wireless/broadcom/brcm80211/brcmfmac/sdio.c
2716
if ((bus->clkstate == CLK_AVAIL) && !atomic_read(&bus->fcstate) &&
drivers/net/wireless/broadcom/brcm80211/brcmfmac/sdio.c
2737
} else if (atomic_read(&bus->intstatus) ||
drivers/net/wireless/broadcom/brcm80211/brcmfmac/sdio.c
2738
atomic_read(&bus->ipend) > 0 ||
drivers/net/wireless/broadcom/brcm80211/brcmfmac/sdio.c
2739
(!atomic_read(&bus->fcstate) &&
drivers/net/wireless/broadcom/brcm80211/brcmsmac/mac80211_if.c
1449
callbacks = atomic_read(&wl->callbacks) - ret_val;
drivers/net/wireless/broadcom/brcm80211/brcmsmac/mac80211_if.c
1457
SPINWAIT((atomic_read(&wl->callbacks) > callbacks), 100 * 1000);
drivers/net/wireless/broadcom/brcm80211/brcmsmac/mac80211_if.c
314
while (atomic_read(&wl->callbacks) > 0)
drivers/net/wireless/intel/ipw2x00/libipw_crypto.c
87
if (atomic_read(&entry->refcnt) != 0 && !force)
drivers/net/wireless/intel/iwlegacy/debug.c
851
atomic_read(&il->queue_stop_count[cnt]));
drivers/net/wireless/intel/iwlwifi/dvm/mac80211.c
1544
if (atomic_read(&sta_priv->pending_frames) > 0)
drivers/net/wireless/intel/iwlwifi/pcie/gen1_2/rx.c
1529
atomic_read(&trans_pcie->rba.req_pending) *
drivers/net/wireless/intel/iwlwifi/pcie/gen1_2/rx.c
519
int pending = atomic_read(&rba->req_pending);
drivers/net/wireless/intel/iwlwifi/pcie/gen1_2/rx.c
580
pending = atomic_read(&rba->req_pending);
drivers/net/wireless/marvell/mwifiex/cmdevt.c
1122
if (!adapter->cmd_sent && !atomic_read(&adapter->tx_hw_pending) &&
drivers/net/wireless/marvell/mwifiex/cmdevt.c
1129
atomic_read(&adapter->tx_hw_pending) ? "T" : "",
drivers/net/wireless/marvell/mwifiex/cmdevt.c
133
atomic_read(&adapter->cmd_pending));
drivers/net/wireless/marvell/mwifiex/cmdevt.c
727
command, atomic_read(&adapter->cmd_pending));
drivers/net/wireless/marvell/mwifiex/debugfs.c
265
atomic_read(&phist_data->num_samples));
drivers/net/wireless/marvell/mwifiex/debugfs.c
279
value = atomic_read(&phist_data->rx_rate[i]);
drivers/net/wireless/marvell/mwifiex/debugfs.c
287
value = atomic_read(&phist_data->rx_rate[i]);
drivers/net/wireless/marvell/mwifiex/debugfs.c
295
value = atomic_read(&phist_data->snr[i]);
drivers/net/wireless/marvell/mwifiex/debugfs.c
300
value = atomic_read(&phist_data->noise_flr[i]);
drivers/net/wireless/marvell/mwifiex/debugfs.c
306
value = atomic_read(&phist_data->sig_str[i]);
drivers/net/wireless/marvell/mwifiex/main.c
1059
if (atomic_read(&card->port[i].tx_data_urb_pending)) {
drivers/net/wireless/marvell/mwifiex/main.c
1122
atomic_read(&cardp->tx_cmd_urb_pending));
drivers/net/wireless/marvell/mwifiex/main.c
1124
atomic_read(&cardp->port[0].tx_data_urb_pending));
drivers/net/wireless/marvell/mwifiex/main.c
1126
atomic_read(&cardp->port[1].tx_data_urb_pending));
drivers/net/wireless/marvell/mwifiex/main.c
1128
atomic_read(&cardp->rx_cmd_urb_pending));
drivers/net/wireless/marvell/mwifiex/main.c
1130
atomic_read(&cardp->rx_data_urb_pending));
drivers/net/wireless/marvell/mwifiex/main.c
1134
atomic_read(&adapter->tx_pending));
drivers/net/wireless/marvell/mwifiex/main.c
1136
atomic_read(&adapter->rx_pending));
drivers/net/wireless/marvell/mwifiex/main.c
1153
atomic_read(&priv->wmm_tx_pending[0]));
drivers/net/wireless/marvell/mwifiex/main.c
1155
atomic_read(&priv->wmm_tx_pending[1]));
drivers/net/wireless/marvell/mwifiex/main.c
1157
atomic_read(&priv->wmm_tx_pending[2]));
drivers/net/wireless/marvell/mwifiex/main.c
1159
atomic_read(&priv->wmm_tx_pending[3]));
drivers/net/wireless/marvell/mwifiex/main.c
1460
if (atomic_read(&adapter->rx_pending) ||
drivers/net/wireless/marvell/mwifiex/main.c
1461
atomic_read(&adapter->tx_pending) ||
drivers/net/wireless/marvell/mwifiex/main.c
1462
atomic_read(&adapter->cmd_pending)) {
drivers/net/wireless/marvell/mwifiex/main.c
1466
atomic_read(&adapter->rx_pending),
drivers/net/wireless/marvell/mwifiex/main.c
1467
atomic_read(&adapter->tx_pending),
drivers/net/wireless/marvell/mwifiex/main.c
1468
atomic_read(&adapter->cmd_pending));
drivers/net/wireless/marvell/mwifiex/main.c
193
(atomic_read(&adapter->rx_pending) < LOW_RX_PENDING)) {
drivers/net/wireless/marvell/mwifiex/main.c
276
if (atomic_read(&adapter->rx_pending) >= HIGH_RX_PENDING &&
drivers/net/wireless/marvell/mwifiex/tdls.c
73
if (atomic_read(&priv->wmm.highest_queued_prio) <
drivers/net/wireless/marvell/mwifiex/uap_txrx.c
315
if ((atomic_read(&adapter->pending_bridged_pkts) >=
drivers/net/wireless/marvell/mwifiex/uap_txrx.c
47
if ((atomic_read(&adapter->pending_bridged_pkts) <=
drivers/net/wireless/marvell/mwifiex/uap_txrx.c
97
if ((atomic_read(&adapter->pending_bridged_pkts) >=
drivers/net/wireless/marvell/mwifiex/usb.c
1080
if (atomic_read(&port->tx_data_urb_pending) >=
drivers/net/wireless/marvell/mwifiex/usb.c
1141
if (atomic_read(&port->tx_data_urb_pending) >=
drivers/net/wireless/marvell/mwifiex/usb.c
1193
if (atomic_read(&port->tx_data_urb_pending)
drivers/net/wireless/marvell/mwifiex/usb.c
1540
(!atomic_read(&card->rx_cmd_urb_pending)))
drivers/net/wireless/marvell/mwifiex/usb.c
235
if (atomic_read(&adapter->rx_pending) <= HIGH_RX_PENDING) {
drivers/net/wireless/marvell/mwifiex/usb.c
350
if (atomic_read(&card->rx_cmd_urb_pending) && card->rx_cmd.urb)
drivers/net/wireless/marvell/mwifiex/usb.c
356
if (atomic_read(&card->rx_data_urb_pending))
drivers/net/wireless/marvell/mwifiex/usb.c
573
if (atomic_read(&card->rx_cmd_urb_pending) && card->rx_cmd.urb)
drivers/net/wireless/marvell/mwifiex/usb.c
576
if (atomic_read(&card->rx_data_urb_pending))
drivers/net/wireless/marvell/mwifiex/usb.c
626
if (!atomic_read(&card->rx_data_urb_pending))
drivers/net/wireless/marvell/mwifiex/usb.c
631
if (!atomic_read(&card->rx_cmd_urb_pending)) {
drivers/net/wireless/marvell/mwifiex/usb.c
839
atomic_read(&port->tx_data_urb_pending) ==
drivers/net/wireless/marvell/mwifiex/util.c
778
if (atomic_read(&phist_data->num_samples) > MWIFIEX_HIST_MAX_SAMPLES)
drivers/net/wireless/marvell/mwifiex/wmm.c
1105
(atomic_read(&priv_tmp->wmm.tx_pkts_queued) == 0))
drivers/net/wireless/marvell/mwifiex/wmm.c
1114
for (i = atomic_read(hqp); i >= LOW_PRIO_TID; --i) {
drivers/net/wireless/marvell/mwifiex/wmm.c
1134
if (atomic_read(&priv_tmp->wmm.tx_pkts_queued) != 0) {
drivers/net/wireless/marvell/mwifiex/wmm.c
1151
if (atomic_read(hqp) > i)
drivers/net/wireless/marvell/mwifiex/wmm.c
1527
if (atomic_read(&adapter->tx_queued) >=
drivers/net/wireless/marvell/mwifiex/wmm.c
484
if (atomic_read(&priv->wmm.tx_pkts_queued))
drivers/net/wireless/marvell/mwifiex/wmm.c
661
tx_pkts_queued = atomic_read(&priv->wmm.tx_pkts_queued);
drivers/net/wireless/marvell/mwifiex/wmm.c
705
tx_pkts_queued = atomic_read(&priv->wmm.tx_pkts_queued);
drivers/net/wireless/marvell/mwifiex/wmm.c
886
if (atomic_read(&priv->wmm.highest_queued_prio) <
drivers/net/wireless/marvell/mwl8k.c
1574
if (atomic_read(&priv->watchdog_event_pending))
drivers/net/wireless/marvell/mwl8k.c
1599
if (atomic_read(&priv->watchdog_event_pending)) {
drivers/net/wireless/mediatek/mt76/mcu.c
82
if (test_bit(MT76_RESET, &dev->phy.state) && atomic_read(&dev->bus_hung))
drivers/net/wireless/mediatek/mt76/mt7921/mac.c
678
if (mt76_is_sdio(&dev->mt76) && atomic_read(&dev->mt76.bus_hung))
drivers/net/wireless/mediatek/mt76/mt7921/sdio_mac.c
131
if (!atomic_read(&mdev->bus_hung))
drivers/net/wireless/mediatek/mt76/mt7921/sdio_mac.c
154
if (atomic_read(&dev->mt76.bus_hung))
drivers/net/wireless/mediatek/mt76/mt7921/sdio_mac.c
40
if (atomic_read(&dev->mt76.bus_hung))
drivers/net/wireless/mediatek/mt76/tx.c
473
if (atomic_read(&wcid->non_aql_packets) >= MT_MAX_NON_AQL_PKT)
drivers/net/wireless/mediatek/mt7601u/debugfs.c
71
atomic_read(&dev->avg_ampdu_len));
drivers/net/wireless/quantenna/qtnfmac/util.h
26
return atomic_read(&list->size);
drivers/net/wireless/ralink/rt2x00/rt2x00queue.c
221
seqno = atomic_read(&intf->seqno);
drivers/net/wireless/realtek/rtw89/chan.c
3313
cur = atomic_read(&hal->roc_chanctx_idx);
drivers/net/wireless/realtek/rtw89/chan.c
422
roc_idx = atomic_read(&hal->roc_chanctx_idx);
drivers/net/wireless/realtek/rtw89/core.h
7107
enum rtw89_chanctx_idx roc_idx = atomic_read(&hal->roc_chanctx_idx);
drivers/net/wireless/rsi/rsi_91x_coex.c
63
} while (atomic_read(&coex_cb->coex_tx_thread.thread_done) == 0);
drivers/net/wireless/rsi/rsi_91x_main.c
266
} while (atomic_read(&common->tx_thread.thread_done) == 0);
drivers/net/wireless/rsi/rsi_91x_sdio_ops.c
74
} while (!atomic_read(&sdev->rx_thread.thread_done));
drivers/net/wireless/rsi/rsi_91x_usb_ops.c
40
if (atomic_read(&dev->rx_thread.thread_done))
drivers/net/wireless/rsi/rsi_common.h
40
(atomic_read(&event->event_condition) == 0));
drivers/net/wireless/rsi/rsi_common.h
43
(atomic_read(&event->event_condition) == 0),
drivers/net/wireless/silabs/wfx/queue.c
140
WARN_ON(!atomic_read(&queue->pending_frames));
drivers/net/wireless/silabs/wfx/queue.c
166
WARN_ON(!atomic_read(&queue->pending_frames));
drivers/net/wireless/silabs/wfx/queue.c
228
return atomic_read(&queue->pending_frames) * queue->priority;
drivers/net/wireless/silabs/wfx/queue.c
312
if (atomic_read(&wdev->tx_lock))
drivers/net/wireless/silabs/wfx/queue.c
88
WARN_ON(atomic_read(&wvif->tx_queue[i].pending_frames));
drivers/net/wireless/silabs/wfx/traces.h
465
__entry->hw[j] = atomic_read(&queue->pending_frames);
drivers/net/wireless/st/cw1200/bh.c
134
(CW1200_BH_SUSPENDED == atomic_read(&priv->bh_suspend)),
drivers/net/wireless/st/cw1200/bh.c
149
(CW1200_BH_RESUMED == atomic_read(&priv->bh_suspend)),
drivers/net/wireless/st/cw1200/bh.c
428
!atomic_read(&priv->recent_scan)) {
drivers/net/wireless/st/cw1200/bh.c
442
(atomic_read(&priv->bh_rx) == 0) &&
drivers/net/wireless/st/cw1200/bh.c
443
(atomic_read(&priv->bh_tx) == 0))
drivers/net/wireless/st/cw1200/bh.c
453
0 : atomic_read(&priv->bh_suspend);
drivers/net/wireless/st/cw1200/bh.c
503
!atomic_read(&priv->recent_scan)) {
drivers/net/wireless/st/cw1200/bh.c
522
CW1200_BH_RESUME == atomic_read(&priv->bh_suspend));
drivers/net/wireless/st/cw1200/debug.c
232
atomic_read(&priv->bh_term) ? "terminated" : "alive");
drivers/net/wireless/st/cw1200/debug.c
234
atomic_read(&priv->bh_rx));
drivers/net/wireless/st/cw1200/debug.c
236
atomic_read(&priv->bh_tx));
drivers/net/wireless/st/cw1200/debug.c
260
atomic_read(&priv->tx_lock) ? "locked" : "unlocked");
drivers/net/wireless/st/cw1200/debug.c
261
if (atomic_read(&priv->tx_lock))
drivers/net/wireless/st/cw1200/debug.c
263
atomic_read(&priv->tx_lock));
drivers/net/wireless/st/cw1200/debug.c
284
atomic_read(&priv->scan.in_progress) ? "active" : "idle");
drivers/net/wireless/st/cw1200/pm.c
149
if (atomic_read(&priv->bh_rx)) {
drivers/net/wireless/st/cw1200/pm.c
272
if (atomic_read(&priv->bh_rx)) {
drivers/net/wireless/st/cw1200/sta.c
1245
if (atomic_read(&priv->scan.in_progress)) {
drivers/net/wireless/st/cw1200/sta.c
1389
if (atomic_read(&priv->scan.in_progress)) {
drivers/net/wireless/st/cw1200/sta.c
159
atomic_read(&priv->tx_lock),
drivers/net/wireless/st/cw1200/sta.c
173
if (!priv->vif->p2p && !atomic_read(&priv->tx_lock))
drivers/net/wireless/st/cw1200/wsm.c
1184
BUG_ON(!atomic_read(&priv->tx_lock));
drivers/net/wireless/zydas/zd1211rw/zd_usb.c
1025
if (!atomic_read(&tx->enabled)) {
drivers/net/wireless/zydas/zd1211rw/zd_usb.c
1088
if (!atomic_read(&tx->enabled) || !tx->watchdog_enabled)
drivers/net/wireless/zydas/zd1211rw/zd_usb.c
364
if (atomic_read(&intr->read_regs_enabled)) {
drivers/net/wireless/zydas/zd1211rw/zd_usb.c
390
} else if (atomic_read(&intr->read_regs_enabled)) {
drivers/net/wireless/zydas/zd1211rw/zd_usb.c
420
atomic_read(&intr->read_regs_enabled))
drivers/net/wireless/zydas/zd1211rw/zd_usb.c
463
if (hdr->id != USB_INT_ID_REGS && atomic_read(&intr->read_regs_enabled))
drivers/net/wwan/t7xx/t7xx_hif_dpmaif_rx.c
1124
timeout = readx_poll_timeout_atomic(atomic_read, &rxq->rx_processing, value,
drivers/net/wwan/t7xx/t7xx_hif_dpmaif_tx.c
512
if (atomic_read(&txq->tx_budget) <= t7xx_skb_drb_cnt(skb)) {
drivers/net/wwan/t7xx/t7xx_hif_dpmaif_tx.c
654
while (atomic_read(&txq->tx_processing)) {
drivers/net/wwan/t7xx/t7xx_netdev.c
158
if (atomic_read(&ccmni->usage) > 0)
drivers/net/wwan/t7xx/t7xx_netdev.c
179
if (atomic_read(&ccmni->usage) > 0) {
drivers/net/wwan/t7xx/t7xx_netdev.c
185
if (atomic_read(&ctlb->napi_usr_refcnt))
drivers/net/wwan/t7xx/t7xx_netdev.c
199
if (atomic_read(&ccmni->usage) > 0)
drivers/net/wwan/t7xx/t7xx_netdev.c
209
if (atomic_read(&ctlb->napi_usr_refcnt))
drivers/net/wwan/t7xx/t7xx_netdev.c
217
if (atomic_read(&ccmni->usage) > 0)
drivers/net/wwan/t7xx/t7xx_netdev.c
448
if (netif_running(ccmni->dev) && atomic_read(&ccmni->usage) > 0) {
drivers/net/wwan/t7xx/t7xx_netdev.c
460
if (atomic_read(&ccmni->usage) > 0) {
drivers/net/wwan/t7xx/t7xx_pci.c
348
if (atomic_read(&t7xx_dev->md_pm_state) < MTK_PM_RESUMED)
drivers/net/wwan/t7xx/t7xx_pci.c
383
if (atomic_read(&t7xx_dev->md_pm_state) < MTK_PM_RESUMED)
drivers/net/wwan/t7xx/t7xx_pci.c
415
if (atomic_read(&t7xx_dev->md_pm_state) <= MTK_PM_INIT ||
drivers/net/wwan/t7xx/t7xx_pci.c
577
if (atomic_read(&t7xx_dev->md_pm_state) <= MTK_PM_INIT) {
drivers/net/wwan/t7xx/t7xx_port_wwan.c
193
if (!atomic_read(&port->usage_cnt) || !port->chan_enable) {
drivers/net/wwan/t7xx/t7xx_port_wwan.c
42
if (atomic_read(&port_mtk->usage_cnt))
drivers/net/xen-netback/netback.c
1594
!atomic_read(&queue->inflight_packets);
drivers/net/xen-netfront.c
2558
data[i] = atomic_read((atomic_t *)(np + xennet_stats[i].offset));
drivers/ntb/test/ntb_perf.c
1104
atomic_read(&perf->tsync) <= 0);
drivers/ntb/test/ntb_perf.c
851
return likely(atomic_read(&pthr->perf->tsync) > 0) ? 0 : -EINTR;
drivers/ntb/test/ntb_perf.c
972
(atomic_read(&pthr->dma_sync) == 0 ||
drivers/ntb/test/ntb_perf.c
973
atomic_read(&perf->tsync) < 0));
drivers/ntb/test/ntb_perf.c
975
if (atomic_read(&perf->tsync) < 0)
drivers/ntb/test/ntb_pingpong.c
174
count = atomic_read(&pp->count);
drivers/nvdimm/bus.c
1019
if (atomic_read(&nvdimm->busy))
drivers/nvdimm/bus.c
425
atomic_read(&nvdimm_bus->ioctl_active) == 0);
drivers/nvdimm/dimm_devs.c
314
return sprintf(buf, "%s\n", atomic_read(&nvdimm->busy)
drivers/nvdimm/security.c
559
if (atomic_read(&nvdimm->busy)) {
drivers/nvdimm/security.c
567
if (atomic_read(&nvdimm->busy)) {
drivers/nvme/host/fc.c
473
if (atomic_read(&lport->act_rport_cnt) == 0)
drivers/nvme/host/fc.c
840
if (atomic_read(&rport->act_ctrl_cnt) == 0)
drivers/nvme/host/multipath.c
1084
return sysfs_emit(buf, "%d\n", atomic_read(&ns->ctrl->nr_active));
drivers/nvme/host/multipath.c
408
depth = atomic_read(&ns->ctrl->nr_active);
drivers/nvme/host/tcp.c
1661
int num_queues = atomic_read(&nvme_tcp_cpu_queues[cpu]);
drivers/nvme/target/fc.c
2137
sqtail = atomic_read(&q->sqtail) % q->sqsize;
drivers/nvme/target/fc.c
958
(!atomic_read(&queue->connected) ||
drivers/nvme/target/pr.c
836
data->gen = cpu_to_le32(atomic_read(&pr->generation));
drivers/pci/hotplug/cpci_hotplug_core.c
407
inserted, extracted, atomic_read(&extracting));
drivers/pci/hotplug/cpci_hotplug_core.c
410
else if (!atomic_read(&extracting)) {
drivers/pci/hotplug/cpci_hotplug_core.c
440
} while (atomic_read(&extracting) && !kthread_should_stop());
drivers/pci/hotplug/cpci_hotplug_core.c
472
} while (atomic_read(&extracting) && !kthread_should_stop());
drivers/pci/hotplug/pciehp_ctrl.c
409
!atomic_read(&ctrl->pending_events) &&
drivers/pci/hotplug/pciehp_ctrl.c
443
!atomic_read(&ctrl->pending_events) &&
drivers/pci/hotplug/pciehp_hpc.c
809
atomic_read(&ctrl->pending_events))
drivers/pci/pci-sysfs.c
357
return sysfs_emit(buf, "%u\n", atomic_read(&pdev->enable_cnt));
drivers/pci/pci.c
2200
dev_WARN_ONCE(&dev->dev, atomic_read(&dev->enable_cnt) <= 0,
drivers/pci/pcie/aer.c
1044
if (atomic_read(&dev->enable_cnt) == 0)
drivers/pci/switch/switchtec.c
652
if (stuser->event_cnt != atomic_read(&stdev->event_cnt))
drivers/pci/switch/switchtec.c
921
stuser->event_cnt = atomic_read(&stdev->event_cnt);
drivers/pci/switch/switchtec.c
98
stuser->event_cnt = atomic_read(&stdev->event_cnt);
drivers/pcmcia/ds.c
1322
if (atomic_read(&p_dev->socket->present) != 0)
drivers/perf/arm-cci.c
1332
if (atomic_read(active_events) == 0)
drivers/phy/motorola/phy-cpcap-usb.c
338
if (!atomic_read(&ddata->active))
drivers/phy/ti/phy-twl4030-usb.c
404
if (!twl->runtime_suspended && !atomic_read(&twl->connected)) {
drivers/pinctrl/qcom/tlmm-test.c
182
KUNIT_ASSERT_EQ(test, atomic_read(&priv->intr_count), 0);
drivers/pinctrl/qcom/tlmm-test.c
239
KUNIT_ASSERT_EQ(test, atomic_read(&priv->intr_count), 10);
drivers/pinctrl/qcom/tlmm-test.c
263
KUNIT_ASSERT_EQ(test, atomic_read(&priv->intr_count), 10);
drivers/pinctrl/qcom/tlmm-test.c
288
KUNIT_ASSERT_EQ(test, atomic_read(&priv->intr_count), 10);
drivers/pinctrl/qcom/tlmm-test.c
313
KUNIT_ASSERT_EQ(test, atomic_read(&priv->intr_count), 10);
drivers/pinctrl/qcom/tlmm-test.c
335
KUNIT_ASSERT_EQ(test, atomic_read(&priv->intr_count), 10);
drivers/pinctrl/qcom/tlmm-test.c
357
KUNIT_ASSERT_EQ(test, atomic_read(&priv->intr_count), 10);
drivers/pinctrl/qcom/tlmm-test.c
383
KUNIT_ASSERT_EQ(test, atomic_read(&priv->intr_count), 10);
drivers/pinctrl/qcom/tlmm-test.c
384
KUNIT_ASSERT_EQ(test, atomic_read(&priv->thread_count), 10);
drivers/pinctrl/qcom/tlmm-test.c
409
KUNIT_ASSERT_EQ(test, atomic_read(&priv->intr_count), 10);
drivers/pinctrl/qcom/tlmm-test.c
410
KUNIT_ASSERT_EQ(test, atomic_read(&priv->thread_count), 10);
drivers/pinctrl/qcom/tlmm-test.c
434
KUNIT_ASSERT_EQ(test, atomic_read(&priv->intr_count), 10);
drivers/pinctrl/qcom/tlmm-test.c
435
KUNIT_ASSERT_EQ(test, atomic_read(&priv->thread_count), 10);
drivers/pinctrl/qcom/tlmm-test.c
459
KUNIT_ASSERT_EQ(test, atomic_read(&priv->intr_count), 10);
drivers/pinctrl/qcom/tlmm-test.c
460
KUNIT_ASSERT_EQ(test, atomic_read(&priv->thread_count), 10);
drivers/pinctrl/qcom/tlmm-test.c
484
KUNIT_ASSERT_EQ(test, atomic_read(&priv->intr_count), 10);
drivers/pinctrl/qcom/tlmm-test.c
485
KUNIT_ASSERT_EQ(test, atomic_read(&priv->thread_count), 10);
drivers/pinctrl/qcom/tlmm-test.c
509
KUNIT_ASSERT_EQ(test, atomic_read(&priv->intr_count), 10);
drivers/pinctrl/qcom/tlmm-test.c
510
KUNIT_ASSERT_EQ(test, atomic_read(&priv->thread_count), 10);
drivers/pinctrl/qcom/tlmm-test.c
532
before_edge = atomic_read(&priv->intr_count);
drivers/pinctrl/qcom/tlmm-test.c
536
after_edge = atomic_read(&priv->intr_count);
drivers/pinctrl/qcom/tlmm-test.c
546
KUNIT_ASSERT_EQ(test, atomic_read(&priv->intr_count), 1);
drivers/pinctrl/renesas/pinctrl-rzg2l.c
3207
if (!atomic_read(&pctrl->wakeup_path))
drivers/pinctrl/renesas/pinctrl-rzg2l.c
3224
if (!atomic_read(&pctrl->wakeup_path)) {
drivers/pinctrl/renesas/pinctrl-rzt2h.c
1015
if (atomic_read(&pctrl->wakeup_path))
drivers/platform/raspberrypi/vchiq-interface/vchiq_core.c
677
value = atomic_read(&service->poll_flags);
drivers/platform/raspberrypi/vchiq-interface/vchiq_core.c
683
value = atomic_read(&state->poll_services[index]);
drivers/platform/surface/aggregator/ssh_packet_layer.c
1088
if (kthread_should_stop() || !atomic_read(&ptl->tx.running))
drivers/platform/surface/aggregator/ssh_packet_layer.c
1103
while (!kthread_should_stop() && atomic_read(&ptl->tx.running)) {
drivers/platform/surface/aggregator/ssh_packet_layer.c
1295
if (atomic_read(&ptl->pending.count) < SSH_PTL_MAX_PENDING)
drivers/platform/surface/aggregator/ssh_packet_layer.c
1345
(atomic_read(&ptl->pending.count) < SSH_PTL_MAX_PENDING))
drivers/platform/surface/aggregator/ssh_packet_layer.c
1481
if (atomic_read(&p->ptl->pending.count) < SSH_PTL_MAX_PENDING)
drivers/platform/surface/aggregator/ssh_packet_layer.c
1511
trace_ssam_ptl_timeout_reap(atomic_read(&ptl->pending.count));
drivers/platform/surface/aggregator/ssh_packet_layer.c
888
return !atomic_read(&ptl->pending.count);
drivers/platform/surface/aggregator/ssh_packet_layer.c
899
return atomic_read(&ptl->pending.count) < SSH_PTL_MAX_PENDING;
drivers/platform/surface/aggregator/ssh_request_layer.c
1244
pending = atomic_read(&rtl->pending.count);
drivers/platform/surface/aggregator/ssh_request_layer.c
202
return !atomic_read(&rtl->pending.count);
drivers/platform/surface/aggregator/ssh_request_layer.c
204
return atomic_read(&rtl->pending.count) < SSH_RTL_MAX_PENDING;
drivers/platform/surface/aggregator/ssh_request_layer.c
307
if (atomic_read(&rtl->pending.count) >= SSH_RTL_MAX_PENDING)
drivers/platform/surface/aggregator/ssh_request_layer.c
829
trace_ssam_rtl_timeout_reap(atomic_read(&rtl->pending.count));
drivers/platform/x86/intel/ifs/runtest.c
169
while (atomic_read(t) < all_cpus) {
drivers/pmdomain/core.c
1015
if (atomic_read(&genpd->sd_count) > 0)
drivers/pmdomain/core.c
1418
|| atomic_read(&genpd->sd_count) > 0)
drivers/pmdomain/core.c
272
if (!WARN_ON(atomic_read(&genpd->sd_count) == 0))
drivers/pmdomain/core.c
973
genpd->stay_on || atomic_read(&genpd->sd_count) > 0)
drivers/power/supply/ab8500_charger.c
3300
if (atomic_read(&di->current_stepping_sessions))
drivers/power/supply/cpcap-battery.c
876
if (!atomic_read(&ddata->active))
drivers/power/supply/cpcap-charger.c
741
if (!atomic_read(&ddata->active))
drivers/power/supply/power_supply_core.c
1249
if (atomic_read(&psy->use_cnt) <= 0) {
drivers/power/supply/power_supply_core.c
1307
if (atomic_read(&psy->use_cnt) <= 0)
drivers/power/supply/power_supply_core.c
1381
if (atomic_read(&psy->use_cnt) <= 0 ||
drivers/power/supply/rt9756.c
414
*pval = atomic_read(&data->usb_type);
drivers/ptp/ptp_vmclock.c
407
old_seq = atomic_read(&fst->seq);
drivers/ptp/ptp_vmclock.c
453
if (atomic_read(&fst->seq) != seq)
drivers/rapidio/devices/rio_mport_cdev.c
1876
if (atomic_read(&chdev->active) == 0)
drivers/rapidio/devices/rio_mport_cdev.c
2055
if (atomic_read(&md->active) == 0)
drivers/rapidio/rio_cm.c
2042
if (atomic_read(&rdev->state) != RIO_DEVICE_SHUTDOWN)
drivers/ras/debugfs.c
18
return atomic_read(&trace_count);
drivers/remoteproc/stm32_rproc.c
907
if (atomic_read(&rproc->power) > 0)
drivers/reset/core.c
227
if (WARN_ON(atomic_read(&rstc->deassert_count) != 0))
drivers/reset/core.c
344
if (WARN_ON(atomic_read(&rstc->deassert_count) != 0))
drivers/reset/core.c
413
if (WARN_ON(atomic_read(&rstc->deassert_count) != 0))
drivers/reset/core.c
454
if (WARN_ON(atomic_read(&rstc->triggered_count) != 0))
drivers/reset/core.c
457
if (WARN_ON(atomic_read(&rstc->deassert_count) == 0))
drivers/reset/core.c
542
if (WARN_ON(atomic_read(&rstc->triggered_count) != 0))
drivers/rtc/rtc-renesas-rtca3.c
185
if (atomic_read(&priv->alrm_sstep) > RTCA3_ALRM_SSTEP_IRQ) {
drivers/rtc/rtc-renesas-rtca3.c
189
if (atomic_read(&priv->alrm_sstep) == RTCA3_ALRM_SSTEP_IRQ) {
drivers/rtc/rtc-renesas-rtca3.c
802
if (atomic_read(&priv->alrm_sstep) != RTCA3_ALRM_SSTEP_DONE)
drivers/s390/block/dasd.c
1369
cqr->trkcount = atomic_read(&cqr->block->trkcount);
drivers/s390/block/dasd.c
3555
open_count = atomic_read(&device->block->open_count);
drivers/s390/block/dasd_3990_erp.c
2232
if (atomic_read(&device->path[pos].error_count) >=
drivers/s390/block/dasd_devmap.c
870
wait_event(dasd_delete_wq, atomic_read(&device->ref_count) == 0);
drivers/s390/block/dasd_eckd.c
3121
if (cqr->trkcount != atomic_read(&block->trkcount)) {
drivers/s390/block/dasd_ioctl.c
533
dasd_info->open_count = atomic_read(&block->open_count);
drivers/s390/block/dcssblk.c
354
if (atomic_read(&dev_info->use_count)) {
drivers/s390/block/dcssblk.c
462
if (atomic_read(&dev_info->use_count) == 0) {
drivers/s390/block/dcssblk.c
804
if (atomic_read(&dev_info->use_count) != 0) {
drivers/s390/char/monreader.c
209
if (!atomic_read(&monpriv->read_ready))
drivers/s390/char/monreader.c
309
atomic_read(&monpriv->iucv_connected) ||
drivers/s390/char/monreader.c
310
atomic_read(&monpriv->iucv_severed));
drivers/s390/char/monreader.c
311
if (atomic_read(&monpriv->iucv_severed)) {
drivers/s390/char/monreader.c
376
atomic_read(&monpriv->read_ready) ||
drivers/s390/char/monreader.c
377
atomic_read(&monpriv->iucv_severed));
drivers/s390/char/monreader.c
380
if (unlikely(atomic_read(&monpriv->iucv_severed)))
drivers/s390/char/monreader.c
429
if (unlikely(atomic_read(&monpriv->iucv_severed)))
drivers/s390/char/monreader.c
431
if (atomic_read(&monpriv->read_ready))
drivers/s390/char/raw3270.c
1086
wait_event(raw3270_wait_queue, atomic_read(&view->ref_count) == 0);
drivers/s390/char/vmlogrdr.c
416
if (atomic_read(&priv->receive_ready)) {
drivers/s390/char/vmlogrdr.c
485
atomic_read(&priv->receive_ready));
drivers/s390/cio/crw.c
144
wait_event(crw_handler_wait_q, atomic_read(&crw_nr_req) == 0);
drivers/s390/cio/crw.c
70
atomic_read(&crw_nr_req) > 0);
drivers/s390/cio/css.c
1297
atomic_read(&css_eval_scheduled) == 0);
drivers/s390/cio/device.c
156
atomic_read(&ccw_device_init_count) == 0);
drivers/s390/cio/qdio_debug.c
111
atomic_read(&q->nr_buf_used), q->first_to_check);
drivers/s390/cio/qdio_main.c
450
count = atomic_read(&q->nr_buf_used);
drivers/s390/cio/qdio_main.c
530
if (!atomic_read(&q->nr_buf_used))
drivers/s390/cio/qdio_main.c
552
count = atomic_read(&q->nr_buf_used);
drivers/s390/cio/qdio_thinint.c
89
if (!atomic_read(&q_indicators[TIQDIO_SHARED_IND].count))
drivers/s390/crypto/ap_bus.c
2267
if (ac->maxmsgsize > atomic_read(&ap_max_msg_size)) {
drivers/s390/crypto/ap_bus.c
2271
atomic_read(&ap_max_msg_size));
drivers/s390/crypto/ap_bus.c
600
maxmsgsize = atomic_read(&ap_max_msg_size);
drivers/s390/crypto/zcrypt_api.c
1677
return put_user(atomic_read(&zcrypt_open_count),
drivers/s390/crypto/zcrypt_api.c
614
weight += atomic_read(&zc->load);
drivers/s390/crypto/zcrypt_api.c
615
pref_weight += atomic_read(&pref_zc->load);
drivers/s390/crypto/zcrypt_api.c
629
weight += atomic_read(&zq->load);
drivers/s390/crypto/zcrypt_api.c
630
pref_weight += atomic_read(&pref_zq->load);
drivers/s390/crypto/zcrypt_card.c
121
return sysfs_emit(buf, "%d\n", atomic_read(&zc->load));
drivers/s390/crypto/zcrypt_queue.c
88
return sysfs_emit(buf, "%d\n", atomic_read(&zq->load));
drivers/s390/net/fsm.c
127
int st = atomic_read(&fi->state);
drivers/s390/net/fsm.h
147
int state = atomic_read(&fi->state);
drivers/s390/net/fsm.h
215
return atomic_read(&fi->state);
drivers/s390/net/qeth_core.h
537
return atomic_read(&queue->used_buffers) >= QDIO_MAX_BUFFERS_PER_Q;
drivers/s390/net/qeth_core.h
542
return atomic_read(&queue->used_buffers) == 0;
drivers/s390/net/qeth_core_main.c
1493
if (atomic_read(&card->qdio.state) != QETH_QDIO_UNINITIALIZED)
drivers/s390/net/qeth_core_main.c
3549
if ((atomic_read(&buffer->state) == QETH_QDIO_BUF_EMPTY) &&
drivers/s390/net/qeth_core_main.c
3567
if (atomic_read(&queue->used_buffers)
drivers/s390/net/qeth_core_main.c
3586
if (atomic_read(&queue->used_buffers)
drivers/s390/net/qeth_core_main.c
3641
if ((atomic_read(&queue->used_buffers) >=
drivers/s390/net/qeth_core_main.c
3644
!atomic_read(&queue->set_pci_flags_count)) {
drivers/s390/net/qeth_core_main.c
3651
if (!atomic_read(&queue->set_pci_flags_count)) {
drivers/s390/net/qeth_core_main.c
3684
atomic_read(&queue->used_buffers) >= 32) {
drivers/s390/net/qeth_core_main.c
3721
if ((atomic_read(&queue->used_buffers) <= QETH_LOW_WATERMARK_PACK) ||
drivers/s390/net/qeth_core_main.c
3722
!atomic_read(&queue->set_pci_flags_count)) {
drivers/s390/net/qeth_core_main.c
3730
if (!flush_cnt && !atomic_read(&queue->set_pci_flags_count))
drivers/s390/net/qeth_core_main.c
4158
if (atomic_read(&buffer->state) != QETH_QDIO_BUF_EMPTY)
drivers/s390/net/qeth_core_main.c
4180
if (atomic_read(&buffer->state) != QETH_QDIO_BUF_EMPTY)
drivers/s390/net/qeth_core_main.c
4238
if (atomic_read(&buffer->state) != QETH_QDIO_BUF_EMPTY)
drivers/s390/net/qeth_core_main.c
4258
if (atomic_read(&buffer->state) !=
drivers/s390/net/qeth_core_main.c
5648
!atomic_read(&card->force_alloc_skb));
drivers/s390/net/qeth_core_main.c
5974
!atomic_read(&queue->set_pci_flags_count))
drivers/s390/scsi/zfcp_aux.c
278
if (atomic_read(&adapter->stat_miss) >=
drivers/s390/scsi/zfcp_dbf.c
312
rec->adapter_status = atomic_read(&adapter->status);
drivers/s390/scsi/zfcp_dbf.c
314
rec->port_status = atomic_read(&port->status);
drivers/s390/scsi/zfcp_dbf.c
319
rec->lun_status = atomic_read(&sdev_to_zfcp(sdev)->status);
drivers/s390/scsi/zfcp_dbf.c
422
atomic_read(&sdev_to_zfcp(erp->sdev)->erp_counter);
drivers/s390/scsi/zfcp_dbf.c
424
rec->u.run.rec_count = atomic_read(&erp->port->erp_counter);
drivers/s390/scsi/zfcp_dbf.c
426
rec->u.run.rec_count = atomic_read(&erp->adapter->erp_counter);
drivers/s390/scsi/zfcp_def.h
345
return atomic_read(&adapter->status) & ZFCP_STATUS_ADAPTER_MB_ACT;
drivers/s390/scsi/zfcp_erp.c
1045
int p_status = atomic_read(&port->status);
drivers/s390/scsi/zfcp_erp.c
1086
int p_status = atomic_read(&port->status);
drivers/s390/scsi/zfcp_erp.c
110
if (atomic_read(&adapter->status) & ZFCP_STATUS_COMMON_ERP_INUSE)
drivers/s390/scsi/zfcp_erp.c
1158
if (atomic_read(&zfcp_sdev->status) & ZFCP_STATUS_COMMON_OPEN)
drivers/s390/scsi/zfcp_erp.c
1163
if (atomic_read(&zfcp_sdev->status) & ZFCP_STATUS_COMMON_OPEN)
drivers/s390/scsi/zfcp_erp.c
1170
if (atomic_read(&zfcp_sdev->status) & ZFCP_STATUS_COMMON_OPEN)
drivers/s390/scsi/zfcp_erp.c
1194
if (atomic_read(&zfcp_sdev->erp_counter) > ZFCP_MAX_ERPS) {
drivers/s390/scsi/zfcp_erp.c
1212
if (atomic_read(&zfcp_sdev->status) & ZFCP_STATUS_COMMON_ERP_FAILED) {
drivers/s390/scsi/zfcp_erp.c
1229
if (atomic_read(&port->status) & ZFCP_STATUS_COMMON_NOESC) {
drivers/s390/scsi/zfcp_erp.c
1234
if (atomic_read(&port->erp_counter) > ZFCP_MAX_ERPS) {
drivers/s390/scsi/zfcp_erp.c
1250
if (atomic_read(&port->status) & ZFCP_STATUS_COMMON_ERP_FAILED) {
drivers/s390/scsi/zfcp_erp.c
1268
if (atomic_read(&adapter->erp_counter) > ZFCP_MAX_ERPS) {
drivers/s390/scsi/zfcp_erp.c
1284
if (atomic_read(&adapter->status) & ZFCP_STATUS_COMMON_ERP_FAILED) {
drivers/s390/scsi/zfcp_erp.c
130
if (atomic_read(&zsdev->status) & ZFCP_STATUS_COMMON_ERP_FAILED)
drivers/s390/scsi/zfcp_erp.c
1318
int status = atomic_read(target_status);
drivers/s390/scsi/zfcp_erp.c
134
if (atomic_read(&port->status) & ZFCP_STATUS_COMMON_ERP_FAILED)
drivers/s390/scsi/zfcp_erp.c
138
if (atomic_read(&port->status) &
drivers/s390/scsi/zfcp_erp.c
1421
port_status = atomic_read(&port->status);
drivers/s390/scsi/zfcp_erp.c
1443
lun_status = atomic_read(&zsdev->status);
drivers/s390/scsi/zfcp_erp.c
147
if (atomic_read(&adapter->status) &
drivers/s390/scsi/zfcp_erp.c
1681
!(atomic_read(&adapter->status) &
drivers/s390/scsi/zfcp_erp.c
172
l_status = atomic_read(&zfcp_sdev->status);
drivers/s390/scsi/zfcp_erp.c
175
p_status = atomic_read(&port->status);
drivers/s390/scsi/zfcp_erp.c
183
p_status = atomic_read(&port->status);
drivers/s390/scsi/zfcp_erp.c
188
p_status = atomic_read(&port->status);
drivers/s390/scsi/zfcp_erp.c
191
a_status = atomic_read(&adapter->status);
drivers/s390/scsi/zfcp_erp.c
201
a_status = atomic_read(&adapter->status);
drivers/s390/scsi/zfcp_erp.c
238
if (!(atomic_read(&zfcp_sdev->status) &
drivers/s390/scsi/zfcp_erp.c
252
if (!(atomic_read(&port->status) & ZFCP_STATUS_COMMON_RUNNING))
drivers/s390/scsi/zfcp_erp.c
263
if (!(atomic_read(&adapter->status) &
drivers/s390/scsi/zfcp_erp.c
529
return (atomic_read(status) ^ mask) & mask;
drivers/s390/scsi/zfcp_erp.c
766
if (!(atomic_read(&adapter->status) &
drivers/s390/scsi/zfcp_erp.c
777
if (!(atomic_read(&adapter->status) & ZFCP_STATUS_ADAPTER_XCONFIG_OK))
drivers/s390/scsi/zfcp_erp.c
87
if (atomic_read(&zfcp_sdev->status) & ZFCP_STATUS_COMMON_ERP_INUSE)
drivers/s390/scsi/zfcp_erp.c
942
if (atomic_read(&adapter->status) & ZFCP_STATUS_COMMON_OPEN) {
drivers/s390/scsi/zfcp_erp.c
95
if (atomic_read(&port->status) & ZFCP_STATUS_COMMON_ERP_INUSE)
drivers/s390/scsi/zfcp_erp.c
975
int status = atomic_read(&port->status);
drivers/s390/scsi/zfcp_fc.c
1090
if (!(atomic_read(&adapter->status) & ZFCP_STATUS_COMMON_OPEN))
drivers/s390/scsi/zfcp_fc.c
183
if ((atomic_read(&wka_port->refcount) != 0) ||
drivers/s390/scsi/zfcp_fc.c
528
!(atomic_read(&port->status) & ZFCP_STATUS_COMMON_OPEN)) {
drivers/s390/scsi/zfcp_fc.c
603
if (atomic_read(&port->status) & ZFCP_STATUS_PORT_LINK_TEST)
drivers/s390/scsi/zfcp_fc.c
721
if (!(atomic_read(&port->status) & ZFCP_STATUS_COMMON_NOESC))
drivers/s390/scsi/zfcp_fsf.c
1060
if (unlikely(!(atomic_read(&zfcp_sdev->status) &
drivers/s390/scsi/zfcp_fsf.c
148
if (atomic_read(&adapter->status) & ZFCP_STATUS_ADAPTER_LINK_UNPLUGGED)
drivers/s390/scsi/zfcp_fsf.c
2571
if (unlikely(!(atomic_read(&zfcp_sdev->status) &
drivers/s390/scsi/zfcp_fsf.c
2576
if (atomic_read(&qdio->req_q_free) <= 0) {
drivers/s390/scsi/zfcp_fsf.c
2683
if (unlikely(!(atomic_read(&zfcp_sdev->status) &
drivers/s390/scsi/zfcp_fsf.c
489
BUG_ON(atomic_read(&adapter->status) & ZFCP_STATUS_ADAPTER_QDIOUP);
drivers/s390/scsi/zfcp_fsf.c
890
req->qdio_req.qdio_outb_usage = atomic_read(&qdio->req_q_free);
drivers/s390/scsi/zfcp_qdio.c
168
if (atomic_read(&qdio->req_q_free) < QDIO_MAX_BUFFERS_PER_Q)
drivers/s390/scsi/zfcp_qdio.c
267
if (atomic_read(&qdio->req_q_free) ||
drivers/s390/scsi/zfcp_qdio.c
268
!(atomic_read(&qdio->adapter->status) & ZFCP_STATUS_ADAPTER_QDIOUP))
drivers/s390/scsi/zfcp_qdio.c
290
if (!(atomic_read(&qdio->adapter->status) & ZFCP_STATUS_ADAPTER_QDIOUP))
drivers/s390/scsi/zfcp_qdio.c
341
if (atomic_read(&qdio->req_q_free) <= 2 * ZFCP_QDIO_MAX_SBALS_PER_REQ)
drivers/s390/scsi/zfcp_qdio.c
396
if (!(atomic_read(&adapter->status) & ZFCP_STATUS_ADAPTER_QDIOUP))
drivers/s390/scsi/zfcp_qdio.c
416
count = atomic_read(&qdio->req_q_free);
drivers/s390/scsi/zfcp_qdio.c
454
if (atomic_read(&adapter->status) & ZFCP_STATUS_ADAPTER_QDIOUP)
drivers/s390/scsi/zfcp_qdio.c
590
if (atomic_read(&adapter->status) & ZFCP_STATUS_ADAPTER_SIOSL_ISSUED)
drivers/s390/scsi/zfcp_qdio.c
60
used = QDIO_MAX_BUFFERS_PER_Q - atomic_read(&qdio->req_q_free);
drivers/s390/scsi/zfcp_qdio.c
97
if (atomic_read(&qdio->req_q_free) < QDIO_MAX_BUFFERS_PER_Q)
drivers/s390/scsi/zfcp_qdio.h
121
int count = min(atomic_read(&qdio->req_q_free),
drivers/s390/scsi/zfcp_qdio.h
216
int count = min(atomic_read(&qdio->req_q_free), max_sbals);
drivers/s390/scsi/zfcp_scsi.c
204
if (!(atomic_read(&adapter->status) &
drivers/s390/scsi/zfcp_scsi.c
311
if (!(atomic_read(&adapter->status) &
drivers/s390/scsi/zfcp_scsi.c
673
int status = atomic_read(&adapter->status);
drivers/s390/scsi/zfcp_scsi.c
830
data_div = atomic_read(&adapter->status) &
drivers/s390/scsi/zfcp_scsi.c
85
status = atomic_read(&zfcp_sdev->status);
drivers/s390/scsi/zfcp_scsi.c
87
!(atomic_read(&zfcp_sdev->port->status) &
drivers/s390/scsi/zfcp_sysfs.c
132
status = atomic_read(&sdev_to_zfcp(sdev)->status);
drivers/s390/scsi/zfcp_sysfs.c
177
if (atomic_read(&adapter->status) & ZFCP_STATUS_COMMON_ERP_FAILED)
drivers/s390/scsi/zfcp_sysfs.c
259
return atomic_read(&port->units) == -1;
drivers/s390/scsi/zfcp_sysfs.c
270
if (atomic_read(&port->units) > 0)
drivers/s390/scsi/zfcp_sysfs.c
400
status = atomic_read(&adapter->status);
drivers/s390/scsi/zfcp_sysfs.c
484
unsigned int status = atomic_read(&port->status);
drivers/s390/scsi/zfcp_sysfs.c
60
ZFCP_DEFINE_A_ATTR(status, "0x%08x\n", atomic_read(&adapter->status));
drivers/s390/scsi/zfcp_sysfs.c
633
(atomic_read(&zfcp_sdev->status) &
drivers/s390/scsi/zfcp_sysfs.c
641
unsigned int status = atomic_read(&sdev_to_zfcp(sdev)->status);
drivers/s390/scsi/zfcp_sysfs.c
669
(atomic_read(&zfcp_sdev->status) &
drivers/s390/scsi/zfcp_sysfs.c
673
atomic_read(&zfcp_sdev->status));
drivers/s390/scsi/zfcp_sysfs.c
69
ZFCP_DEFINE_A_ATTR(in_recovery, "%d\n", (atomic_read(&adapter->status) &
drivers/s390/scsi/zfcp_sysfs.c
73
atomic_read(&port->status));
drivers/s390/scsi/zfcp_sysfs.c
75
(atomic_read(&port->status) &
drivers/s390/scsi/zfcp_sysfs.c
789
return sysfs_emit(buf, "%d %llu\n", atomic_read(&qdio->req_q_full),
drivers/s390/scsi/zfcp_sysfs.c
825
status = atomic_read(&adapter->status);
drivers/s390/scsi/zfcp_sysfs.c
869
status = atomic_read(&adapter->status); \
drivers/s390/scsi/zfcp_sysfs.c
96
if (atomic_read(&port->status) & ZFCP_STATUS_COMMON_ERP_FAILED)
drivers/s390/scsi/zfcp_unit.c
223
status = atomic_read(&zfcp_sdev->status);
drivers/scsi/aacraid/commsup.c
389
qid, atomic_read(&q->numpending));
drivers/scsi/arcmsr/arcmsr_attr.c
275
atomic_read(&acb->ccboutstandingcount));
drivers/scsi/arcmsr/arcmsr_hba.c
1516
, atomic_read(&acb->ccboutstandingcount));
drivers/scsi/arcmsr/arcmsr_hba.c
1596
residual = atomic_read(&acb->ccboutstandingcount);
drivers/scsi/arcmsr/arcmsr_hba.c
1732
if (!atomic_read(&acb->ccboutstandingcount))
drivers/scsi/arcmsr/arcmsr_hba.c
1738
if (atomic_read(&acb->ccboutstandingcount)) {
drivers/scsi/arcmsr/arcmsr_hba.c
3626
, atomic_read(&acb->ccboutstandingcount));
drivers/scsi/arcmsr/arcmsr_hba.c
3695
, atomic_read(&acb->ccboutstandingcount));
drivers/scsi/arcmsr/arcmsr_hba.c
3757
, atomic_read(&acb->ccboutstandingcount));
drivers/scsi/arcmsr/arcmsr_hba.c
3832
, atomic_read(&acb->ccboutstandingcount));
drivers/scsi/arcmsr/arcmsr_hba.c
3900
, atomic_read(&acb->ccboutstandingcount));
drivers/scsi/arcmsr/arcmsr_hba.c
4645
if (atomic_read(&acb->ccboutstandingcount) != 0) {
drivers/scsi/arcmsr/arcmsr_hba.c
4754
if (!atomic_read(&acb->ccboutstandingcount)) {
drivers/scsi/bfa/bfad_debugfs.c
499
if (atomic_read(&bfa_debugfs_port_count) == 0) {
drivers/scsi/bnx2fc/bnx2fc_io.c
422
free_sqes = atomic_read(&tgt->free_sqes);
drivers/scsi/bnx2fc/bnx2fc_io.c
481
free_sqes = atomic_read(&tgt->free_sqes);
drivers/scsi/bnx2i/bnx2i_hwi.c
160
num_active_cmds = atomic_read(&ep->num_active_cmds);
drivers/scsi/bnx2i/bnx2i_hwi.c
2048
if (!atomic_read(&bnx2i_conn->ep->num_active_cmds))
drivers/scsi/bnx2i/bnx2i_iscsi.c
1230
if (atomic_read(&bnx2i_conn->ep->num_active_cmds) + 1 >
drivers/scsi/bnx2i/bnx2i_iscsi.c
1496
if (atomic_read(&bnx2i_conn->work_cnt)) {
drivers/scsi/elx/efct/efct_xport.c
73
if (atomic_read(&efct_debugfs_count) == 0) {
drivers/scsi/esas2r/esas2r_int.c
127
if (likely(atomic_read(&a->disable_cnt) == 0))
drivers/scsi/esas2r/esas2r_int.c
162
if (likely(atomic_read(&a->disable_cnt) == 0))
drivers/scsi/esas2r/esas2r_int.c
471
if (atomic_read(&a->disable_cnt) == 0)
drivers/scsi/esas2r/esas2r_int.c
661
if (atomic_read(&a->disable_cnt) == 0)
drivers/scsi/esas2r/esas2r_int.c
83
if (atomic_read(&a->disable_cnt) == 0)
drivers/scsi/esas2r/esas2r_io.c
754
if (atomic_read(&a->disable_cnt) == 0)
drivers/scsi/esas2r/esas2r_io.c
835
if (atomic_read(&a->disable_cnt) == 0)
drivers/scsi/esas2r/esas2r_main.c
1036
if (atomic_read(&a->disable_cnt) == 0)
drivers/scsi/fnic/fnic_scsi.c
260
while (atomic_read(&fnic->in_flight))
drivers/scsi/fnic/fnic_trace.c
510
atomic_read(&tport->in_flight),
drivers/scsi/hisi_sas/hisi_sas_v3_hw.c
4394
seq_printf(s, "%d\n", atomic_read(&phy->down_cnt));
drivers/scsi/hisi_sas/hisi_sas_v3_hw.c
5197
atomic_read(&device->power.usage_count)) {
drivers/scsi/hpsa.c
2464
atomic_read(&dev->commands_outstanding) <= 0)
drivers/scsi/hpsa.c
3176
atomic_read(&dev->commands_outstanding) <= 0 ||
drivers/scsi/hpsa.c
533
atomic_read(&h->commands_outstanding));
drivers/scsi/hpsa.c
6087
if (atomic_read(&dev->commands_outstanding) > 0)
drivers/scsi/hptiop.c
1074
atomic_read(&hba->resetting) == 0, 60 * HZ);
drivers/scsi/hptiop.c
1076
if (atomic_read(&hba->resetting)) {
drivers/scsi/ibmvscsi/ibmvfc.c
858
BUG_ON(atomic_read(&pool->events[i].free) != 1);
drivers/scsi/ibmvscsi/ibmvscsi.c
1751
atomic_read(&hostdata->request_limit) < 2;) {
drivers/scsi/ibmvscsi/ibmvscsi.c
1756
if (atomic_read(&hostdata->request_limit) <= 0)
drivers/scsi/ibmvscsi/ibmvscsi.c
1835
if (atomic_read(&evt_struct->free)) {
drivers/scsi/ibmvscsi/ibmvscsi.c
2312
atomic_read(&hostdata->request_limit) < 2;) {
drivers/scsi/ibmvscsi/ibmvscsi.c
2318
if (atomic_read(&hostdata->request_limit) > 0)
drivers/scsi/ibmvscsi/ibmvscsi.c
492
if (atomic_read(&pool->events[i].free) != 1)
drivers/scsi/iscsi_tcp.c
121
!atomic_read(&sk->sk_rmem_alloc)) {
drivers/scsi/libfc/fc_exch.c
2371
st->fc_no_free_exch += atomic_read(&mp->stats.no_free_exch);
drivers/scsi/libfc/fc_exch.c
2373
atomic_read(&mp->stats.no_free_exch_xid);
drivers/scsi/libfc/fc_exch.c
2374
st->fc_xid_not_found += atomic_read(&mp->stats.xid_not_found);
drivers/scsi/libfc/fc_exch.c
2375
st->fc_xid_busy += atomic_read(&mp->stats.xid_busy);
drivers/scsi/libfc/fc_exch.c
2376
st->fc_seq_not_found += atomic_read(&mp->stats.seq_not_found);
drivers/scsi/libfc/fc_exch.c
2377
st->fc_non_bls_resp += atomic_read(&mp->stats.non_bls_resp);
drivers/scsi/libsas/sas_init.c
653
if (atomic_read(&phy->event_nr) > phy->ha->event_thres) {
drivers/scsi/lpfc/lpfc_attr.c
1396
atomic_read(&vport->fc_map_cnt) +
drivers/scsi/lpfc/lpfc_attr.c
1397
atomic_read(&vport->fc_unmap_cnt));
drivers/scsi/lpfc/lpfc_attr.c
267
atomic_read(&phba->cmf_busy),
drivers/scsi/lpfc/lpfc_attr.c
524
atomic_read(&tgtp->rcv_ls_req_in),
drivers/scsi/lpfc/lpfc_attr.c
525
atomic_read(&tgtp->rcv_ls_req_drop),
drivers/scsi/lpfc/lpfc_attr.c
526
atomic_read(&tgtp->xmt_ls_abort));
drivers/scsi/lpfc/lpfc_attr.c
530
if (atomic_read(&tgtp->rcv_ls_req_in) !=
drivers/scsi/lpfc/lpfc_attr.c
531
atomic_read(&tgtp->rcv_ls_req_out)) {
drivers/scsi/lpfc/lpfc_attr.c
534
atomic_read(&tgtp->rcv_ls_req_in),
drivers/scsi/lpfc/lpfc_attr.c
535
atomic_read(&tgtp->rcv_ls_req_out));
drivers/scsi/lpfc/lpfc_attr.c
542
atomic_read(&tgtp->xmt_ls_rsp),
drivers/scsi/lpfc/lpfc_attr.c
543
atomic_read(&tgtp->xmt_ls_drop),
drivers/scsi/lpfc/lpfc_attr.c
544
atomic_read(&tgtp->xmt_ls_rsp_cmpl));
drivers/scsi/lpfc/lpfc_attr.c
550
atomic_read(&tgtp->xmt_ls_rsp_aborted),
drivers/scsi/lpfc/lpfc_attr.c
551
atomic_read(&tgtp->xmt_ls_rsp_xb_set),
drivers/scsi/lpfc/lpfc_attr.c
552
atomic_read(&tgtp->xmt_ls_rsp_error));
drivers/scsi/lpfc/lpfc_attr.c
559
atomic_read(&tgtp->rcv_fcp_cmd_in),
drivers/scsi/lpfc/lpfc_attr.c
560
atomic_read(&tgtp->rcv_fcp_cmd_defer),
drivers/scsi/lpfc/lpfc_attr.c
561
atomic_read(&tgtp->xmt_fcp_release),
drivers/scsi/lpfc/lpfc_attr.c
562
atomic_read(&tgtp->rcv_fcp_cmd_drop));
drivers/scsi/lpfc/lpfc_attr.c
566
if (atomic_read(&tgtp->rcv_fcp_cmd_in) !=
drivers/scsi/lpfc/lpfc_attr.c
567
atomic_read(&tgtp->rcv_fcp_cmd_out)) {
drivers/scsi/lpfc/lpfc_attr.c
570
atomic_read(&tgtp->rcv_fcp_cmd_in),
drivers/scsi/lpfc/lpfc_attr.c
571
atomic_read(&tgtp->rcv_fcp_cmd_out));
drivers/scsi/lpfc/lpfc_attr.c
579
atomic_read(&tgtp->xmt_fcp_read),
drivers/scsi/lpfc/lpfc_attr.c
580
atomic_read(&tgtp->xmt_fcp_read_rsp),
drivers/scsi/lpfc/lpfc_attr.c
581
atomic_read(&tgtp->xmt_fcp_write),
drivers/scsi/lpfc/lpfc_attr.c
582
atomic_read(&tgtp->xmt_fcp_rsp),
drivers/scsi/lpfc/lpfc_attr.c
583
atomic_read(&tgtp->xmt_fcp_drop));
drivers/scsi/lpfc/lpfc_attr.c
589
atomic_read(&tgtp->xmt_fcp_rsp_cmpl),
drivers/scsi/lpfc/lpfc_attr.c
590
atomic_read(&tgtp->xmt_fcp_rsp_error),
drivers/scsi/lpfc/lpfc_attr.c
591
atomic_read(&tgtp->xmt_fcp_rsp_drop));
drivers/scsi/lpfc/lpfc_attr.c
597
atomic_read(&tgtp->xmt_fcp_rsp_aborted),
drivers/scsi/lpfc/lpfc_attr.c
598
atomic_read(&tgtp->xmt_fcp_rsp_xb_set),
drivers/scsi/lpfc/lpfc_attr.c
599
atomic_read(&tgtp->xmt_fcp_xri_abort_cqe));
drivers/scsi/lpfc/lpfc_attr.c
605
atomic_read(&tgtp->xmt_fcp_abort),
drivers/scsi/lpfc/lpfc_attr.c
606
atomic_read(&tgtp->xmt_fcp_abort_cmpl));
drivers/scsi/lpfc/lpfc_attr.c
612
atomic_read(&tgtp->xmt_abort_sol),
drivers/scsi/lpfc/lpfc_attr.c
613
atomic_read(&tgtp->xmt_abort_unsol),
drivers/scsi/lpfc/lpfc_attr.c
614
atomic_read(&tgtp->xmt_abort_rsp),
drivers/scsi/lpfc/lpfc_attr.c
615
atomic_read(&tgtp->xmt_abort_rsp_error));
drivers/scsi/lpfc/lpfc_attr.c
621
atomic_read(&tgtp->defer_ctx),
drivers/scsi/lpfc/lpfc_attr.c
622
atomic_read(&tgtp->defer_fod),
drivers/scsi/lpfc/lpfc_attr.c
623
atomic_read(&tgtp->defer_wqfull));
drivers/scsi/lpfc/lpfc_attr.c
628
tot = atomic_read(&tgtp->rcv_fcp_cmd_drop);
drivers/scsi/lpfc/lpfc_attr.c
629
tot += atomic_read(&tgtp->xmt_fcp_release);
drivers/scsi/lpfc/lpfc_attr.c
630
tot = atomic_read(&tgtp->rcv_fcp_cmd_in) - tot;
drivers/scsi/lpfc/lpfc_attr.c
763
atomic_read(&lport->fc4NvmeLsRequests),
drivers/scsi/lpfc/lpfc_attr.c
764
atomic_read(&lport->fc4NvmeLsCmpls),
drivers/scsi/lpfc/lpfc_attr.c
765
atomic_read(&lport->xmt_ls_abort));
drivers/scsi/lpfc/lpfc_attr.c
771
atomic_read(&lport->xmt_ls_err),
drivers/scsi/lpfc/lpfc_attr.c
772
atomic_read(&lport->cmpl_ls_xb),
drivers/scsi/lpfc/lpfc_attr.c
773
atomic_read(&lport->cmpl_ls_err));
drivers/scsi/lpfc/lpfc_attr.c
798
atomic_read(&lport->xmt_fcp_abort),
drivers/scsi/lpfc/lpfc_attr.c
799
atomic_read(&lport->xmt_fcp_noxri),
drivers/scsi/lpfc/lpfc_attr.c
800
atomic_read(&lport->xmt_fcp_bad_ndlp),
drivers/scsi/lpfc/lpfc_attr.c
801
atomic_read(&lport->xmt_fcp_qdepth),
drivers/scsi/lpfc/lpfc_attr.c
802
atomic_read(&lport->xmt_fcp_wqerr),
drivers/scsi/lpfc/lpfc_attr.c
803
atomic_read(&lport->xmt_fcp_err));
drivers/scsi/lpfc/lpfc_attr.c
809
atomic_read(&lport->cmpl_fcp_xb),
drivers/scsi/lpfc/lpfc_attr.c
810
atomic_read(&lport->cmpl_fcp_err));
drivers/scsi/lpfc/lpfc_debugfs.c
1056
atomic_read(&tgtp->rcv_ls_req_in),
drivers/scsi/lpfc/lpfc_debugfs.c
1057
atomic_read(&tgtp->rcv_ls_req_drop),
drivers/scsi/lpfc/lpfc_debugfs.c
1058
atomic_read(&tgtp->xmt_ls_abort));
drivers/scsi/lpfc/lpfc_debugfs.c
1059
if (atomic_read(&tgtp->rcv_ls_req_in) !=
drivers/scsi/lpfc/lpfc_debugfs.c
1060
atomic_read(&tgtp->rcv_ls_req_out)) {
drivers/scsi/lpfc/lpfc_debugfs.c
1063
atomic_read(&tgtp->rcv_ls_req_in),
drivers/scsi/lpfc/lpfc_debugfs.c
1064
atomic_read(&tgtp->rcv_ls_req_out));
drivers/scsi/lpfc/lpfc_debugfs.c
1069
atomic_read(&tgtp->xmt_ls_rsp),
drivers/scsi/lpfc/lpfc_debugfs.c
1070
atomic_read(&tgtp->xmt_ls_drop),
drivers/scsi/lpfc/lpfc_debugfs.c
1071
atomic_read(&tgtp->xmt_ls_rsp_cmpl));
drivers/scsi/lpfc/lpfc_debugfs.c
1075
atomic_read(&tgtp->xmt_ls_rsp_aborted),
drivers/scsi/lpfc/lpfc_debugfs.c
1076
atomic_read(&tgtp->xmt_ls_rsp_xb_set),
drivers/scsi/lpfc/lpfc_debugfs.c
1077
atomic_read(&tgtp->xmt_ls_rsp_error));
drivers/scsi/lpfc/lpfc_debugfs.c
1082
atomic_read(&tgtp->rcv_fcp_cmd_in),
drivers/scsi/lpfc/lpfc_debugfs.c
1083
atomic_read(&tgtp->rcv_fcp_cmd_defer),
drivers/scsi/lpfc/lpfc_debugfs.c
1084
atomic_read(&tgtp->xmt_fcp_release),
drivers/scsi/lpfc/lpfc_debugfs.c
1085
atomic_read(&tgtp->rcv_fcp_cmd_drop));
drivers/scsi/lpfc/lpfc_debugfs.c
1087
if (atomic_read(&tgtp->rcv_fcp_cmd_in) !=
drivers/scsi/lpfc/lpfc_debugfs.c
1088
atomic_read(&tgtp->rcv_fcp_cmd_out)) {
drivers/scsi/lpfc/lpfc_debugfs.c
1091
atomic_read(&tgtp->rcv_fcp_cmd_in),
drivers/scsi/lpfc/lpfc_debugfs.c
1092
atomic_read(&tgtp->rcv_fcp_cmd_out));
drivers/scsi/lpfc/lpfc_debugfs.c
1098
atomic_read(&tgtp->xmt_fcp_read),
drivers/scsi/lpfc/lpfc_debugfs.c
1099
atomic_read(&tgtp->xmt_fcp_read_rsp),
drivers/scsi/lpfc/lpfc_debugfs.c
1100
atomic_read(&tgtp->xmt_fcp_write),
drivers/scsi/lpfc/lpfc_debugfs.c
1101
atomic_read(&tgtp->xmt_fcp_rsp));
drivers/scsi/lpfc/lpfc_debugfs.c
1105
atomic_read(&tgtp->xmt_fcp_rsp_cmpl),
drivers/scsi/lpfc/lpfc_debugfs.c
1106
atomic_read(&tgtp->xmt_fcp_rsp_error),
drivers/scsi/lpfc/lpfc_debugfs.c
1107
atomic_read(&tgtp->xmt_fcp_rsp_drop));
drivers/scsi/lpfc/lpfc_debugfs.c
1111
atomic_read(&tgtp->xmt_fcp_rsp_aborted),
drivers/scsi/lpfc/lpfc_debugfs.c
1112
atomic_read(&tgtp->xmt_fcp_rsp_xb_set),
drivers/scsi/lpfc/lpfc_debugfs.c
1113
atomic_read(&tgtp->xmt_fcp_xri_abort_cqe));
drivers/scsi/lpfc/lpfc_debugfs.c
1117
atomic_read(&tgtp->xmt_fcp_abort),
drivers/scsi/lpfc/lpfc_debugfs.c
1118
atomic_read(&tgtp->xmt_fcp_abort_cmpl));
drivers/scsi/lpfc/lpfc_debugfs.c
1122
atomic_read(&tgtp->xmt_abort_sol),
drivers/scsi/lpfc/lpfc_debugfs.c
1123
atomic_read(&tgtp->xmt_abort_unsol),
drivers/scsi/lpfc/lpfc_debugfs.c
1124
atomic_read(&tgtp->xmt_abort_rsp),
drivers/scsi/lpfc/lpfc_debugfs.c
1125
atomic_read(&tgtp->xmt_abort_rsp_error));
drivers/scsi/lpfc/lpfc_debugfs.c
1156
tot = atomic_read(&tgtp->rcv_fcp_cmd_drop);
drivers/scsi/lpfc/lpfc_debugfs.c
1157
tot += atomic_read(&tgtp->xmt_fcp_release);
drivers/scsi/lpfc/lpfc_debugfs.c
1158
tot = atomic_read(&tgtp->rcv_fcp_cmd_in) - tot;
drivers/scsi/lpfc/lpfc_debugfs.c
1183
atomic_read(&lport->fc4NvmeLsRequests),
drivers/scsi/lpfc/lpfc_debugfs.c
1184
atomic_read(&lport->fc4NvmeLsCmpls));
drivers/scsi/lpfc/lpfc_debugfs.c
1217
atomic_read(&lport->xmt_ls_abort),
drivers/scsi/lpfc/lpfc_debugfs.c
1218
atomic_read(&lport->xmt_ls_err),
drivers/scsi/lpfc/lpfc_debugfs.c
1219
atomic_read(&lport->cmpl_ls_xb),
drivers/scsi/lpfc/lpfc_debugfs.c
1220
atomic_read(&lport->cmpl_ls_err));
drivers/scsi/lpfc/lpfc_debugfs.c
1225
atomic_read(&lport->xmt_fcp_noxri),
drivers/scsi/lpfc/lpfc_debugfs.c
1226
atomic_read(&lport->xmt_fcp_bad_ndlp),
drivers/scsi/lpfc/lpfc_debugfs.c
1227
atomic_read(&lport->xmt_fcp_qdepth),
drivers/scsi/lpfc/lpfc_debugfs.c
1228
atomic_read(&lport->xmt_fcp_wqerr),
drivers/scsi/lpfc/lpfc_debugfs.c
1229
atomic_read(&lport->xmt_fcp_err),
drivers/scsi/lpfc/lpfc_debugfs.c
1230
atomic_read(&lport->xmt_fcp_abort));
drivers/scsi/lpfc/lpfc_debugfs.c
1234
atomic_read(&lport->cmpl_fcp_xb),
drivers/scsi/lpfc/lpfc_debugfs.c
1235
atomic_read(&lport->cmpl_fcp_err));
drivers/scsi/lpfc/lpfc_debugfs.c
1605
index = (atomic_read(&phba->nvmeio_trc_cnt) + 1) &
drivers/scsi/lpfc/lpfc_debugfs.c
161
index = (atomic_read(&vport->disc_trc_cnt) + 1) &
drivers/scsi/lpfc/lpfc_debugfs.c
227
index = (atomic_read(&phba->slow_ring_trc_cnt) + 1) &
drivers/scsi/lpfc/lpfc_debugfs.c
905
i = atomic_read(&ndlp->cmd_pending);
drivers/scsi/lpfc/lpfc_els.c
11806
if (atomic_read(&phba->fabric_iocb_count) == 0) {
drivers/scsi/lpfc/lpfc_els.c
11924
BUG_ON(atomic_read(&phba->fabric_iocb_count) == 0);
drivers/scsi/lpfc/lpfc_els.c
11969
BUG_ON(atomic_read(&phba->fabric_iocb_count) > 1);
drivers/scsi/lpfc/lpfc_els.c
11972
ready = atomic_read(&phba->fabric_iocb_count) == 0 &&
drivers/scsi/lpfc/lpfc_els.c
1662
atomic_read(&vport->fc_plogi_cnt),
drivers/scsi/lpfc/lpfc_els.c
2748
if (atomic_read(&vport->fc_npr_cnt))
drivers/scsi/lpfc/lpfc_els.c
2807
if (atomic_read(&vport->fc_npr_cnt))
drivers/scsi/lpfc/lpfc_els.c
2839
atomic_read(&vport->fc_adisc_cnt),
drivers/scsi/lpfc/lpfc_hbadisc.c
4009
if (atomic_read(&vport->fc_npr_cnt))
drivers/scsi/lpfc/lpfc_hbadisc.c
4600
if (!atomic_read(&vport->fc_npr_cnt) && count == -1)
drivers/scsi/lpfc/lpfc_hbadisc.c
4994
atomic_read(&vport->fc_plogi_cnt),
drivers/scsi/lpfc/lpfc_hbadisc.c
4995
atomic_read(&vport->fc_adisc_cnt));
drivers/scsi/lpfc/lpfc_hbadisc.c
5027
atomic_read(&vport->fc_plogi_cnt),
drivers/scsi/lpfc/lpfc_hbadisc.c
5028
atomic_read(&vport->fc_adisc_cnt));
drivers/scsi/lpfc/lpfc_hbadisc.c
5882
atomic_read(&vport->fc_plogi_cnt),
drivers/scsi/lpfc/lpfc_hbadisc.c
5883
atomic_read(&vport->fc_adisc_cnt),
drivers/scsi/lpfc/lpfc_hbadisc.c
5884
atomic_read(&vport->fc_npr_cnt));
drivers/scsi/lpfc/lpfc_hbadisc.c
5913
if (atomic_read(&vport->fc_npr_cnt))
drivers/scsi/lpfc/lpfc_hbadisc.c
6006
if (atomic_read(&vport->fc_plogi_cnt) ||
drivers/scsi/lpfc/lpfc_hbadisc.c
6007
atomic_read(&vport->fc_adisc_cnt)) {
drivers/scsi/lpfc/lpfc_hbadisc.c
726
if (atomic_read(&phba->fast_event_count) > LPFC_MAX_EVT_COUNT)
drivers/scsi/lpfc/lpfc_init.c
15745
start_idx = (unsigned int)atomic_read(&phba->dbg_log_idx) % DBG_LOG_SZ;
drivers/scsi/lpfc/lpfc_init.c
15746
dbg_cnt = (unsigned int)atomic_read(&phba->dbg_log_cnt);
drivers/scsi/lpfc/lpfc_init.c
15788
int dbg_dmping = atomic_read(&phba->dbg_log_dmping);
drivers/scsi/lpfc/lpfc_init.c
4951
if (!atomic_read(&vport->fc_map_cnt) &&
drivers/scsi/lpfc/lpfc_init.c
5716
lvalue = atomic_read(&phba->cgn_latency_evt_cnt);
drivers/scsi/lpfc/lpfc_init.c
5753
dvalue = atomic_read(&phba->cgn_driver_evt_cnt);
drivers/scsi/lpfc/lpfc_init.c
5761
wvalue = atomic_read(&phba->cgn_fabric_warn_cnt);
drivers/scsi/lpfc/lpfc_init.c
5768
avalue = atomic_read(&phba->cgn_fabric_alarm_cnt);
drivers/scsi/lpfc/lpfc_nvme.c
1649
if ((atomic_read(&ndlp->cmd_pending) >= ndlp->cmd_qdepth) &&
drivers/scsi/lpfc/lpfc_nvme.c
1655
atomic_read(&ndlp->cmd_pending),
drivers/scsi/lpfc/lpfc_nvmet.c
1325
hstate = atomic_read(&lpfc_nvmet->state);
drivers/scsi/lpfc/lpfc_nvmet.c
2284
atomic_read(&tgtp->rcv_fcp_cmd_in),
drivers/scsi/lpfc/lpfc_nvmet.c
2285
atomic_read(&tgtp->rcv_fcp_cmd_out),
drivers/scsi/lpfc/lpfc_nvmet.c
2286
atomic_read(&tgtp->xmt_fcp_release));
drivers/scsi/lpfc/lpfc_nvmet.c
2519
atomic_read(&tgtp->rcv_fcp_cmd_in),
drivers/scsi/lpfc/lpfc_nvmet.c
2520
atomic_read(&tgtp->rcv_fcp_cmd_out),
drivers/scsi/lpfc/lpfc_nvmet.c
2521
atomic_read(&tgtp->xmt_fcp_release));
drivers/scsi/lpfc/lpfc_nvmet.c
483
atomic_read(&tgtp->rcv_fcp_cmd_in),
drivers/scsi/lpfc/lpfc_nvmet.c
484
atomic_read(&tgtp->rcv_fcp_cmd_out),
drivers/scsi/lpfc/lpfc_nvmet.c
485
atomic_read(&tgtp->xmt_fcp_release));
drivers/scsi/lpfc/lpfc_scsi.c
173
num_rsrc_err = atomic_read(&phba->num_rsrc_err);
drivers/scsi/lpfc/lpfc_scsi.c
3721
if (atomic_read(&phba->cmf_stop_io))
drivers/scsi/lpfc/lpfc_scsi.c
3806
if (size > atomic_read(&phba->rx_max_read_cnt))
drivers/scsi/lpfc/lpfc_scsi.c
4278
atomic_read(&ndlp->cmd_pending) &&
drivers/scsi/lpfc/lpfc_scsi.c
4279
(atomic_read(&ndlp->cmd_pending) >
drivers/scsi/lpfc/lpfc_scsi.c
4284
atomic_read(&ndlp->cmd_pending);
drivers/scsi/lpfc/lpfc_scsi.c
4558
atomic_read(&pnode->cmd_pending) &&
drivers/scsi/lpfc/lpfc_scsi.c
4559
(atomic_read(&pnode->cmd_pending) >
drivers/scsi/lpfc/lpfc_scsi.c
4564
atomic_read(&pnode->cmd_pending);
drivers/scsi/lpfc/lpfc_scsi.c
5294
if (atomic_read(&ndlp->cmd_pending) >= ndlp->cmd_qdepth) {
drivers/scsi/lpfc/lpfc_scsi.c
5303
atomic_read(&ndlp->cmd_pending),
drivers/scsi/lpfc/lpfc_sli.c
14735
atomic_read(&tgtp->rcv_fcp_cmd_in),
drivers/scsi/lpfc/lpfc_sli.c
14736
atomic_read(&tgtp->rcv_fcp_cmd_out),
drivers/scsi/lpfc/lpfc_sli.c
14737
atomic_read(&tgtp->xmt_fcp_release));
drivers/scsi/lpfc/lpfc_sli.c
15284
atomic_read(&tgtp->rcv_fcp_cmd_in),
drivers/scsi/lpfc/lpfc_sli.c
15285
atomic_read(&tgtp->rcv_fcp_cmd_out),
drivers/scsi/lpfc/lpfc_sli.c
15286
atomic_read(&tgtp->xmt_fcp_release));
drivers/scsi/megaraid.c
1313
if(atomic_read(&adapter->quiescent) == 0) {
drivers/scsi/megaraid.c
1393
if(atomic_read(&adapter->quiescent) == 0) {
drivers/scsi/megaraid.c
2090
atomic_read(&adapter->quiescent));
drivers/scsi/megaraid.c
2115
seq_printf(m, "pend_cmds = %d\n", atomic_read(&adapter->pend_cmds));
drivers/scsi/megaraid.c
3780
while (atomic_read(&adapter->pend_cmds) > 0 ||
drivers/scsi/megaraid.c
406
if (atomic_read(&adapter->quiescent) == 0)
drivers/scsi/megaraid.c
4079
if (atomic_read(&adapter->quiescent) == 0)
drivers/scsi/megaraid.c
4499
if (atomic_read(&adapter->pend_cmds) > 0)
drivers/scsi/megaraid/megaraid_mbox.c
3517
if (atomic_read(&adapter->being_detached)) {
drivers/scsi/megaraid/megaraid_sas_base.c
1081
if (atomic_read(&instance->adprecovery) != MEGASAS_HBA_OPERATIONAL)
drivers/scsi/megaraid/megaraid_sas_base.c
1128
if (atomic_read(&instance->adprecovery) == MEGASAS_HW_CRITICAL_ERROR) {
drivers/scsi/megaraid/megaraid_sas_base.c
1157
if (atomic_read(&instance->adprecovery) == MEGASAS_HW_CRITICAL_ERROR) {
drivers/scsi/megaraid/megaraid_sas_base.c
1223
if (atomic_read(&instance->adprecovery) == MEGASAS_HW_CRITICAL_ERROR) {
drivers/scsi/megaraid/megaraid_sas_base.c
1684
dev_err(&instance->pdev->dev, "[%d]: Total OS Pending cmds : %d\n",instance->host->host_no,atomic_read(&instance->fw_outstanding));
drivers/scsi/megaraid/megaraid_sas_base.c
1805
if (atomic_read(&instance->adprecovery) == MEGASAS_ADPRESET_SM_INFAULT) {
drivers/scsi/megaraid/megaraid_sas_base.c
1818
(atomic_read(&instance->adprecovery) == MEGASAS_HW_CRITICAL_ERROR)) {
drivers/scsi/megaraid/megaraid_sas_base.c
1833
if (atomic_read(&instance->adprecovery) != MEGASAS_HBA_OPERATIONAL)
drivers/scsi/megaraid/megaraid_sas_base.c
2222
if (atomic_read(&instance->adprecovery) == MEGASAS_HW_CRITICAL_ERROR) {
drivers/scsi/megaraid/megaraid_sas_base.c
2263
&& atomic_read(&instance->fw_outstanding) <
drivers/scsi/megaraid/megaraid_sas_base.c
2291
if (atomic_read(&instance->adprecovery) == MEGASAS_HW_CRITICAL_ERROR)
drivers/scsi/megaraid/megaraid_sas_base.c
2759
if (atomic_read(&instance->adprecovery) == MEGASAS_HW_CRITICAL_ERROR) {
drivers/scsi/megaraid/megaraid_sas_base.c
2765
if (atomic_read(&instance->adprecovery) != MEGASAS_HBA_OPERATIONAL) {
drivers/scsi/megaraid/megaraid_sas_base.c
2776
if (atomic_read(&instance->adprecovery) == MEGASAS_HBA_OPERATIONAL)
drivers/scsi/megaraid/megaraid_sas_base.c
2780
if (atomic_read(&instance->adprecovery) != MEGASAS_HBA_OPERATIONAL) {
drivers/scsi/megaraid/megaraid_sas_base.c
2820
outstanding = atomic_read(&instance->fw_outstanding);
drivers/scsi/megaraid/megaraid_sas_base.c
2839
outstanding = atomic_read(&instance->fw_outstanding);
drivers/scsi/megaraid/megaraid_sas_base.c
2848
if ((fw_state == MFI_STATE_FAULT) || atomic_read(&instance->fw_outstanding)) {
drivers/scsi/megaraid/megaraid_sas_base.c
2851
__func__, __LINE__, fw_state, atomic_read(&instance->fw_outstanding));
drivers/scsi/megaraid/megaraid_sas_base.c
2856
if (atomic_read(&instance->adprecovery) == MEGASAS_HW_CRITICAL_ERROR) {
drivers/scsi/megaraid/megaraid_sas_base.c
2867
outstanding = atomic_read(&instance->fw_outstanding);
drivers/scsi/megaraid/megaraid_sas_base.c
2888
atomic_read(&instance->fw_outstanding));
drivers/scsi/megaraid/megaraid_sas_base.c
2913
if (atomic_read(&instance->adprecovery) == MEGASAS_HW_CRITICAL_ERROR) {
drivers/scsi/megaraid/megaraid_sas_base.c
3077
atomic_read(&instance->fw_outstanding));
drivers/scsi/megaraid/megaraid_sas_base.c
3403
return snprintf(buf, PAGE_SIZE, "%d\n", atomic_read(&instance->ldio_outstanding));
drivers/scsi/megaraid/megaraid_sas_base.c
3413
return snprintf(buf, PAGE_SIZE, "%d\n", atomic_read(&instance->fw_outstanding));
drivers/scsi/megaraid/megaraid_sas_base.c
3953
if (atomic_read(&instance->adprecovery) != MEGASAS_ADPRESET_SM_INFAULT) {
drivers/scsi/megaraid/megaraid_sas_base.c
3955
atomic_read(&instance->adprecovery));
drivers/scsi/megaraid/megaraid_sas_base.c
3959
if (atomic_read(&instance->adprecovery) == MEGASAS_ADPRESET_SM_INFAULT) {
drivers/scsi/megaraid/megaraid_sas_base.c
4071
fw_state, atomic_read(&instance->adprecovery));
drivers/scsi/megaraid/megaraid_sas_base.c
4098
if (atomic_read(&instance->fw_reset_no_pci_access))
drivers/scsi/megaraid/megaraid_sas_base.c
600
if ((atomic_read(&instance->adprecovery) != MEGASAS_HBA_OPERATIONAL) &&
drivers/scsi/megaraid/megaraid_sas_base.c
740
if (atomic_read(&instance->adprecovery) != MEGASAS_HBA_OPERATIONAL)
drivers/scsi/megaraid/megaraid_sas_base.c
7650
if (atomic_read(&instance->adprecovery) == MEGASAS_HW_CRITICAL_ERROR)
drivers/scsi/megaraid/megaraid_sas_base.c
7693
if (atomic_read(&instance->adprecovery) == MEGASAS_HW_CRITICAL_ERROR)
drivers/scsi/megaraid/megaraid_sas_base.c
7939
adp_state = atomic_read(&instance->adprecovery);
drivers/scsi/megaraid/megaraid_sas_base.c
8208
if ((atomic_read(&local_instance->adprecovery) ==
drivers/scsi/megaraid/megaraid_sas_base.c
8546
if (atomic_read(&instance->adprecovery) == MEGASAS_HW_CRITICAL_ERROR) {
drivers/scsi/megaraid/megaraid_sas_base.c
8596
if (atomic_read(&instance->adprecovery) == MEGASAS_HW_CRITICAL_ERROR) {
drivers/scsi/megaraid/megaraid_sas_base.c
881
if (atomic_read(&instance->adprecovery) != MEGASAS_HBA_OPERATIONAL)
drivers/scsi/megaraid/megaraid_sas_fp.c
1385
pend0 = atomic_read(&lbInfo->scsi_pending_cmds[pd0]);
drivers/scsi/megaraid/megaraid_sas_fp.c
1386
pend1 = atomic_read(&lbInfo->scsi_pending_cmds[pd1]);
drivers/scsi/megaraid/megaraid_sas_fusion.c
252
return atomic_read(&mr_device_priv_data->sdev_priv_busy);
drivers/scsi/megaraid/megaraid_sas_fusion.c
3566
if (atomic_read(&instance->adprecovery) == MEGASAS_HW_CRITICAL_ERROR)
drivers/scsi/megaraid/megaraid_sas_fusion.c
3840
if (atomic_read(&instance->adprecovery) == MEGASAS_HW_CRITICAL_ERROR)
drivers/scsi/megaraid/megaraid_sas_fusion.c
4245
outstanding = atomic_read(&instance->fw_outstanding);
drivers/scsi/megaraid/megaraid_sas_fusion.c
4263
if (atomic_read(&instance->fw_outstanding)) {
drivers/scsi/megaraid/megaraid_sas_fusion.c
4743
if (atomic_read(&instance->adprecovery) != MEGASAS_HBA_OPERATIONAL) {
drivers/scsi/megaraid/megaraid_sas_fusion.c
4824
if (atomic_read(&instance->adprecovery) != MEGASAS_HBA_OPERATIONAL) {
drivers/scsi/megaraid/megaraid_sas_fusion.c
4900
(atomic_read(&peer_instance->adprecovery) ==
drivers/scsi/megaraid/megaraid_sas_fusion.c
4928
if (atomic_read(&instance->adprecovery) == MEGASAS_HW_CRITICAL_ERROR) {
drivers/scsi/megaraid/megaraid_sas_fusion.c
4955
} while ((atomic_read(&instance->adprecovery) != MEGASAS_HBA_OPERATIONAL) &&
drivers/scsi/megaraid/megaraid_sas_fusion.c
4958
if (atomic_read(&instance->adprecovery) == MEGASAS_HBA_OPERATIONAL) {
drivers/scsi/mpi3mr/mpi3mr_app.c
3166
return sysfs_emit(buf, "%u\n", atomic_read(&mrioc->reply_qfull_count));
drivers/scsi/mpi3mr/mpi3mr_fw.c
2595
(atomic_read(&op_reply_q->pend_ios) >
drivers/scsi/mpi3mr/mpi3mr_fw.c
2892
if (atomic_read(&mrioc->admin_pend_isr)) {
drivers/scsi/mpi3mr/mpi3mr_fw.c
5467
if (atomic_read(&mrioc->admin_reply_q_in_use) == 1)
drivers/scsi/mpi3mr/mpi3mr_fw.c
5474
if (atomic_read(&mrioc->op_reply_qinfo[i].in_use) == 1) {
drivers/scsi/mpi3mr/mpi3mr_fw.c
704
!atomic_read(&intr_info->op_reply_q->pend_ios))
drivers/scsi/mpi3mr/mpi3mr_fw.c
749
} while (atomic_read(&intr_info->op_reply_q->pend_ios) &&
drivers/scsi/mpi3mr/mpi3mr_os.c
3498
ioc_pend_data_len = atomic_read(&mrioc->pend_large_data_sz);
drivers/scsi/mpi3mr/mpi3mr_os.c
3504
tg_pend_data_len = atomic_read(&tg->pend_large_data_sz);
drivers/scsi/mpi3mr/mpi3mr_os.c
4234
pend_ios += atomic_read(&mrioc->op_reply_qinfo[i].pend_ios);
drivers/scsi/mpi3mr/mpi3mr_os.c
5182
if (atomic_read(&stgt_priv_data->block_io)) {
drivers/scsi/mpi3mr/mpi3mr_os.c
655
while (atomic_read(&mrioc->op_reply_qinfo[i].in_use))
drivers/scsi/mpt3sas/mpt3sas_base.c
1612
while (atomic_read(&ioc->io_uring_poll_queues[qid].busy)) {
drivers/scsi/mpt3sas/mpt3sas_base.c
1883
if (atomic_read(&ioc->io_uring_poll_queues[qid].pause) ||
drivers/scsi/mpt3sas/mpt3sas_base.c
2159
atomic_read(&ioc->chain_lookup[smid - 1].chain_offset);
drivers/scsi/mpt3sas/mpt3sas_scsih.c
3570
if (r == SUCCESS && atomic_read(&starget->target_busy))
drivers/scsi/mvumi.c
1339
if (atomic_read(&cmd->sync_cmd)) {
drivers/scsi/mvumi.c
1613
if (!atomic_read(&mhba->pnp_count))
drivers/scsi/mvumi.c
416
if (atomic_read(&mhba->fw_outstanding) >= mhba->max_io) {
drivers/scsi/mvumi.c
420
return mhba->max_io - atomic_read(&mhba->fw_outstanding);
drivers/scsi/mvumi.c
427
if (atomic_read(&mhba->fw_outstanding) >= (mhba->max_io - 1))
drivers/scsi/mvumi.c
717
if (atomic_read(&cmd->sync_cmd)) {
drivers/scsi/mvumi.c
720
atomic_read(&cmd->sync_cmd));
drivers/scsi/mvumi.c
734
if (atomic_read(&cmd->sync_cmd)) {
drivers/scsi/pm8001/pm8001_sas.c
618
pm8001_dev ? atomic_read(&pm8001_dev->running_req) : -1);
drivers/scsi/pm8001/pm8001_sas.c
771
if (atomic_read(&pm8001_dev->running_req)) {
drivers/scsi/pm8001/pm8001_sas.c
774
while (atomic_read(&pm8001_dev->running_req))
drivers/scsi/pm8001/pm80xx_hwi.c
4681
ccb->device ? atomic_read(&ccb->device->running_req) : 0);
drivers/scsi/pmcraid.c
1758
atomic_read(&pinstance->ccn.ignore) == 1) {
drivers/scsi/pmcraid.c
1798
atomic_read(&pinstance->ccn.ignore) == 1) {
drivers/scsi/pmcraid.c
3115
if (atomic_read(&pinstance->outstanding_cmds) <=
drivers/scsi/pmcraid.c
3848
if (!atomic_read(&pinstance->expose_resources))
drivers/scsi/pmcraid.c
5177
if (atomic_read(&pmcraid_adapter_count) >= PMCRAID_MAX_ADAPTERS) {
drivers/scsi/pmcraid.c
5180
atomic_read(&pmcraid_adapter_count));
drivers/scsi/pmcraid.c
5196
atomic_read(&pmcraid_adapter_count));
drivers/scsi/qedf/qedf_debugfs.c
335
seq_printf(s, "Link State: %s\n", atomic_read(&qedf->link_state) ?
drivers/scsi/qedf/qedf_debugfs.c
352
atomic_read(&qedf->cmd_mgr->free_list_cnt));
drivers/scsi/qedf/qedf_debugfs.c
367
atomic_read(&fcport->free_sqes),
drivers/scsi/qedf/qedf_debugfs.c
368
atomic_read(&fcport->num_active_ios));
drivers/scsi/qedf/qedf_fip.c
103
if (atomic_read(&qedf->link_state) == QEDF_LINK_DOWN) {
drivers/scsi/qedf/qedf_fip.c
61
if (atomic_read(&qedf->link_state) != QEDF_LINK_UP) {
drivers/scsi/qedf/qedf_io.c
1615
while (atomic_read(&fcport->ios_to_queue)) {
drivers/scsi/qedf/qedf_io.c
1618
atomic_read(&fcport->ios_to_queue));
drivers/scsi/qedf/qedf_io.c
1622
atomic_read(&fcport->ios_to_queue));
drivers/scsi/qedf/qedf_io.c
1633
atomic_read(&fcport->num_active_ios), fcport,
drivers/scsi/qedf/qedf_io.c
1684
if (atomic_read(&io_req->state) ==
drivers/scsi/qedf/qedf_io.c
1797
flush_cnt, atomic_read(&fcport->num_active_ios));
drivers/scsi/qedf/qedf_io.c
1801
while (atomic_read(&fcport->num_active_ios)) {
drivers/scsi/qedf/qedf_io.c
1805
atomic_read(&fcport->num_active_ios),
drivers/scsi/qedf/qedf_io.c
1811
atomic_read(&fcport->num_active_ios));
drivers/scsi/qedf/qedf_io.c
1884
if (atomic_read(&qedf->link_down_tmo_valid) > 0) {
drivers/scsi/qedf/qedf_io.c
1891
if (!atomic_read(&fcport->free_sqes)) {
drivers/scsi/qedf/qedf_io.c
2190
if (!atomic_read(&fcport->free_sqes)) {
drivers/scsi/qedf/qedf_io.c
285
atomic_read(&cmgr->free_list_cnt));
drivers/scsi/qedf/qedf_io.c
305
free_sqes = atomic_read(&fcport->free_sqes);
drivers/scsi/qedf/qedf_io.c
315
if ((atomic_read(&fcport->num_active_ios) >=
drivers/scsi/qedf/qedf_io.c
319
atomic_read(&fcport->num_active_ios));
drivers/scsi/qedf/qedf_io.c
324
if (atomic_read(&cmd_mgr->free_list_cnt) <= GBL_RSVD_TASKS) {
drivers/scsi/qedf/qedf_io.c
327
atomic_read(&cmd_mgr->free_list_cnt));
drivers/scsi/qedf/qedf_io.c
450
if (atomic_read(&fcport->num_active_ios) < 0) {
drivers/scsi/qedf/qedf_io.c
990
atomic_read(&qedf->link_state) != QEDF_LINK_UP) {
drivers/scsi/qedf/qedf_main.c
1140
if (atomic_read(&qedf->link_state) != QEDF_LINK_UP) {
drivers/scsi/qedf/qedf_main.c
139
if (atomic_read(&qedf->link_state) == QEDF_LINK_DOWN) {
drivers/scsi/qedf/qedf_main.c
1492
if (atomic_read(&qedf->num_offloads) >= QEDF_MAX_SESSIONS) {
drivers/scsi/qedf/qedf_main.c
153
if (atomic_read(&qedf->link_state) == QEDF_LINK_UP)
drivers/scsi/qedf/qedf_main.c
175
atomic_read(&qedf->link_state));
drivers/scsi/qedf/qedf_main.c
177
if (atomic_read(&qedf->link_state) == QEDF_LINK_UP) {
drivers/scsi/qedf/qedf_main.c
182
if (atomic_read(&qedf->link_state) != QEDF_LINK_UP) {
drivers/scsi/qedf/qedf_main.c
1849
if (atomic_read(&base_qedf->link_state) != QEDF_LINK_UP) {
drivers/scsi/qedf/qedf_main.c
205
} else if (atomic_read(&qedf->link_state) == QEDF_LINK_DOWN) {
drivers/scsi/qedf/qedf_main.c
2670
if (atomic_read(&qedf->link_state) == QEDF_LINK_DOWN) {
drivers/scsi/qedf/qedf_main.c
3869
if (!atomic_read(&pdev->enable_cnt))
drivers/scsi/qedf/qedf_main.c
567
if (atomic_read(&qedf->link_state) == QEDF_LINK_UP)
drivers/scsi/qedf/qedf_main.c
593
if (atomic_read(&qedf->link_state) == QEDF_LINK_UP) {
drivers/scsi/qedf/qedf_main.c
607
if (atomic_read(&qedf->dcbx) == QEDF_DCBX_DONE ||
drivers/scsi/qedf/qedf_main.c
611
if (atomic_read(&qedf->link_down_tmo_valid) > 0)
drivers/scsi/qedf/qedf_main.c
653
if (atomic_read(&qedf->dcbx) == QEDF_DCBX_DONE) {
drivers/scsi/qedf/qedf_main.c
679
if (atomic_read(&qedf->link_state) == QEDF_LINK_UP &&
drivers/scsi/qedf/qedf_main.c
681
if (atomic_read(&qedf->link_down_tmo_valid) > 0)
drivers/scsi/qedf/qedf_main.c
889
if (atomic_read(&qedf->num_offloads))
drivers/scsi/qedf/qedf_main.c
892
atomic_read(&qedf->num_offloads));
drivers/scsi/qedf/qedf_main.c
940
WARN_ON(atomic_read(&qedf->num_offloads));
drivers/scsi/qedf/qedf_main.c
974
if (atomic_read(&qedf->link_state) == QEDF_LINK_DOWN ||
drivers/scsi/qedi/qedi_fw.c
1212
atomic_read(&qedi_conn->cmd_cleanup_cmpl)) ||
drivers/scsi/qedi/qedi_fw.c
1219
atomic_read(&qedi_conn->cmd_cleanup_cmpl),
drivers/scsi/qedi/qedi_fw.c
1228
atomic_read(&qedi_conn->cmd_cleanup_cmpl),
drivers/scsi/qedi/qedi_fw.c
1238
atomic_read(&qedi_conn->cmd_cleanup_cmpl)) ||
drivers/scsi/qedi/qedi_iscsi.c
929
if (atomic_read(&qedi->link_state) != QEDI_LINK_UP) {
drivers/scsi/qedi/qedi_sysfs.c
25
if (atomic_read(&qedi->link_state) == QEDI_LINK_UP)
drivers/scsi/qla2xxx/qla_attr.c
1166
if (atomic_read(&vha->loop_state) == LOOP_DOWN ||
drivers/scsi/qla2xxx/qla_attr.c
1167
atomic_read(&vha->loop_state) == LOOP_DEAD ||
drivers/scsi/qla2xxx/qla_attr.c
1170
else if (atomic_read(&vha->loop_state) != LOOP_READY ||
drivers/scsi/qla2xxx/qla_attr.c
2861
} else if (atomic_read(&base_vha->loop_state) == LOOP_READY &&
drivers/scsi/qla2xxx/qla_attr.c
3021
switch (atomic_read(&base_vha->loop_state)) {
drivers/scsi/qla2xxx/qla_attr.c
3082
if (atomic_read(&base_vha->loop_state) == LOOP_DOWN ||
drivers/scsi/qla2xxx/qla_attr.c
3083
atomic_read(&base_vha->loop_state) == LOOP_DEAD) {
drivers/scsi/qla2xxx/qla_bsg.c
1363
if (atomic_read(&fcport->state) != FCS_ONLINE) {
drivers/scsi/qla2xxx/qla_bsg.c
342
if (atomic_read(&fcport->state) != FCS_ONLINE) {
drivers/scsi/qla2xxx/qla_bsg.c
826
if (atomic_read(&vha->loop_state) == LOOP_READY &&
drivers/scsi/qla2xxx/qla_def.h
5246
atomic_read(&ha->loop_state) == LOOP_DOWN)
drivers/scsi/qla2xxx/qla_def.h
5592
__func__, _fp->port_name, ##_args, atomic_read(&_fp->state), \
drivers/scsi/qla2xxx/qla_def.h
5597
(!_fcport || IS_SESSION_DELETED(_fcport) || atomic_read(&_fcport->state) != FCS_ONLINE || \
drivers/scsi/qla2xxx/qla_dfs.c
273
iocbs_used = atomic_read(&ha->fwres.iocb_used);
drivers/scsi/qla2xxx/qla_dfs.c
274
exch_used = atomic_read(&ha->fwres.exch_used);
drivers/scsi/qla2xxx/qla_dfs.c
800
if (atomic_read(&qla2x00_dfs_root_count) == 0 &&
drivers/scsi/qla2xxx/qla_edif.c
1040
(atomic_read(&fcport->state) ==
drivers/scsi/qla2xxx/qla_edif.c
603
if (atomic_read(&vha->loop_state) == LOOP_DOWN)
drivers/scsi/qla2xxx/qla_edif.c
703
if (atomic_read(&vha->loop_state) == LOOP_DOWN)
drivers/scsi/qla2xxx/qla_edif.c
805
if (atomic_read(&fcport->state) == FCS_ONLINE) {
drivers/scsi/qla2xxx/qla_gs.c
3085
rscn_gen = atomic_read(&vha->rscn_gen);
drivers/scsi/qla2xxx/qla_gs.c
3266
atomic_read(&fcport->state) == FCS_ONLINE) ||
drivers/scsi/qla2xxx/qla_gs.c
3877
ls = atomic_read(&vha->loop_state);
drivers/scsi/qla2xxx/qla_init.c
1871
if (atomic_read(&fcport->state) == FCS_ONLINE) {
drivers/scsi/qla2xxx/qla_init.c
1889
atomic_read(&fcport->state) == FCS_ONLINE)
drivers/scsi/qla2xxx/qla_init.c
1902
atomic_read(&fcport->state) == FCS_ONLINE)
drivers/scsi/qla2xxx/qla_init.c
1916
atomic_read(&fcport->state) == FCS_ONLINE)
drivers/scsi/qla2xxx/qla_init.c
1929
vha->scan.rscn_gen_start = atomic_read(&vha->rscn_gen);
drivers/scsi/qla2xxx/qla_init.c
4995
if (atomic_read(&vha->loop_down_timer) &&
drivers/scsi/qla2xxx/qla_init.c
5067
if (LOOP_TRANSITION(vha) || atomic_read(&ha->loop_down_timer) ||
drivers/scsi/qla2xxx/qla_init.c
5549
old_state = atomic_read(&fcport->state);
drivers/scsi/qla2xxx/qla_init.c
5759
if (atomic_read(&vha->loop_down_timer) ||
drivers/scsi/qla2xxx/qla_init.c
6050
atomic_read(&fcport->state) == FCS_ONLINE) {
drivers/scsi/qla2xxx/qla_init.c
6092
if (atomic_read(&fcport->state) != FCS_ONLINE)
drivers/scsi/qla2xxx/qla_init.c
6140
if (atomic_read(&fcport->state) == FCS_ONLINE)
drivers/scsi/qla2xxx/qla_init.c
6439
vha->scan.rscn_gen_end = atomic_read(&vha->rscn_gen);
drivers/scsi/qla2xxx/qla_init.c
6548
(atomic_read(&vha->loop_down_timer) ||
drivers/scsi/qla2xxx/qla_init.c
6655
(atomic_read(&fcport->state) == FCS_ONLINE ||
drivers/scsi/qla2xxx/qla_init.c
6746
atomic_read(&fcport->state) == FCS_ONLINE) {
drivers/scsi/qla2xxx/qla_init.c
7017
} while (!atomic_read(&vha->loop_down_timer) &&
drivers/scsi/qla2xxx/qla_init.c
7376
if (atomic_read(&vha->loop_state) != LOOP_DOWN) {
drivers/scsi/qla2xxx/qla_init.c
7392
if (!atomic_read(&vha->loop_down_timer))
drivers/scsi/qla2xxx/qla_init.c
7461
while (atomic_read(&ha->num_pend_mbx_stage2) ||
drivers/scsi/qla2xxx/qla_init.c
7462
atomic_read(&ha->num_pend_mbx_stage1)) {
drivers/scsi/qla2xxx/qla_init.c
7471
if (atomic_read(&vha->loop_state) != LOOP_DOWN) {
drivers/scsi/qla2xxx/qla_init.c
7487
if (!atomic_read(&vha->loop_down_timer))
drivers/scsi/qla2xxx/qla_init.c
7625
if (!atomic_read(&vha->loop_down_timer)) {
drivers/scsi/qla2xxx/qla_init.c
9621
if (!atomic_read(&vha->loop_down_timer)) {
drivers/scsi/qla2xxx/qla_inline.h
121
old_val = atomic_read(&fcport->shadow_disc_state);
drivers/scsi/qla2xxx/qla_inline.h
434
if ((iores->iocb_cnt + atomic_read(&ha->fwres.iocb_used)) >=
drivers/scsi/qla2xxx/qla_inline.h
441
if ((iores->exch_cnt + atomic_read(&ha->fwres.exch_used)) >=
drivers/scsi/qla2xxx/qla_inline.h
469
c = atomic_read(v);
drivers/scsi/qla2xxx/qla_isr.c
1444
if (atomic_read(&vha->loop_state) != LOOP_DOWN) {
drivers/scsi/qla2xxx/qla_isr.c
1502
if (atomic_read(&vha->loop_state) != LOOP_DOWN) {
drivers/scsi/qla2xxx/qla_isr.c
1543
if (atomic_read(&vha->loop_state) != LOOP_DOWN) {
drivers/scsi/qla2xxx/qla_isr.c
1583
if (atomic_read(&vha->loop_state) != LOOP_DOWN) {
drivers/scsi/qla2xxx/qla_isr.c
1585
if (!atomic_read(&vha->loop_down_timer))
drivers/scsi/qla2xxx/qla_isr.c
1613
if (atomic_read(&vha->loop_state) != LOOP_DOWN) {
drivers/scsi/qla2xxx/qla_isr.c
1615
if (!atomic_read(&vha->loop_down_timer))
drivers/scsi/qla2xxx/qla_isr.c
1686
if (atomic_read(&fcport->state) != FCS_ONLINE) {
drivers/scsi/qla2xxx/qla_isr.c
1689
atomic_read(&fcport->state));
drivers/scsi/qla2xxx/qla_isr.c
1708
if (atomic_read(&vha->loop_state) != LOOP_DOWN) {
drivers/scsi/qla2xxx/qla_isr.c
1734
if (atomic_read(&vha->loop_state) != LOOP_DOWN &&
drivers/scsi/qla2xxx/qla_isr.c
1736
atomic_read(&vha->loop_state) != LOOP_DEAD) {
drivers/scsi/qla2xxx/qla_isr.c
1898
if (atomic_read(&vha->loop_state) == LOOP_DOWN)
drivers/scsi/qla2xxx/qla_isr.c
2716
if (atomic_read(&fcport->state) == FCS_ONLINE) {
drivers/scsi/qla2xxx/qla_isr.c
2854
if (atomic_read(&fcport->state) == FCS_ONLINE) {
drivers/scsi/qla2xxx/qla_isr.c
3632
if (atomic_read(&fcport->state) == FCS_ONLINE) {
drivers/scsi/qla2xxx/qla_mbx.c
4267
if (atomic_read(&fcport->state) == FCS_ONLINE)
drivers/scsi/qla2xxx/qla_mid.c
213
if (atomic_read(&base_vha->loop_state) == LOOP_DOWN ||
drivers/scsi/qla2xxx/qla_mid.c
214
atomic_read(&base_vha->loop_state) == LOOP_DEAD ||
drivers/scsi/qla2xxx/qla_mid.c
344
if (atomic_read(&vha->loop_state) != LOOP_DOWN) {
drivers/scsi/qla2xxx/qla_mid.c
348
if (!atomic_read(&vha->loop_down_timer))
drivers/scsi/qla2xxx/qla_mid.c
381
if (atomic_read(&vha->loop_state) == LOOP_READY) {
drivers/scsi/qla2xxx/qla_mid.c
389
atomic_read(&vha->loop_state) != LOOP_DOWN) {
drivers/scsi/qla2xxx/qla_mid.c
81
if (atomic_read(&vha->vref_count) == 0) {
drivers/scsi/qla2xxx/qla_mr.c
1108
if ((atomic_read(&vha->loop_down_timer) ||
drivers/scsi/qla2xxx/qla_mr.c
1153
atomic_read(&fcport->state) == FCS_ONLINE)
drivers/scsi/qla2xxx/qla_mr.c
1164
atomic_read(&fcport->state),
drivers/scsi/qla2xxx/qla_mr.c
1177
if (atomic_read(&fcport->state) != FCS_ONLINE) {
drivers/scsi/qla2xxx/qla_mr.c
1250
if (atomic_read(&fcport->state) == FCS_DEVICE_LOST) {
drivers/scsi/qla2xxx/qla_mr.c
1347
if (atomic_read(&vha->loop_state) != LOOP_DOWN) {
drivers/scsi/qla2xxx/qla_mr.c
1352
if (!atomic_read(&vha->loop_down_timer))
drivers/scsi/qla2xxx/qla_mr.c
1360
if (atomic_read(&fcport->state) == FCS_ONLINE)
drivers/scsi/qla2xxx/qla_mr.c
2484
atomic_read(&fcport->state));
drivers/scsi/qla2xxx/qla_mr.c
2486
if (atomic_read(&fcport->state) == FCS_ONLINE)
drivers/scsi/qla2xxx/qla_os.c
1016
if (atomic_read(&fcport->state) != FCS_ONLINE || fcport->deleted) {
drivers/scsi/qla2xxx/qla_os.c
1017
if (atomic_read(&fcport->state) == FCS_DEVICE_DEAD ||
drivers/scsi/qla2xxx/qla_os.c
1018
atomic_read(&base_vha->loop_state) == LOOP_DEAD) {
drivers/scsi/qla2xxx/qla_os.c
1021
atomic_read(&fcport->state),
drivers/scsi/qla2xxx/qla_os.c
1022
atomic_read(&base_vha->loop_state));
drivers/scsi/qla2xxx/qla_os.c
2844
return atomic_read(&vha->loop_state) == LOOP_READY;
drivers/scsi/qla2xxx/qla_os.c
3718
if (!atomic_read(&pdev->enable_cnt))
drivers/scsi/qla2xxx/qla_os.c
3882
if (!atomic_read(&pdev->enable_cnt)) {
drivers/scsi/qla2xxx/qla_os.c
4079
if (atomic_read(&fcport->state) == FCS_ONLINE &&
drivers/scsi/qla2xxx/qla_os.c
4089
if (atomic_read(&fcport->state) != FCS_DEVICE_DEAD)
drivers/scsi/qla2xxx/qla_os.c
5621
if (atomic_read(&fcport->state) != FCS_ONLINE &&
drivers/scsi/qla2xxx/qla_os.c
6809
if (!atomic_read(&pdev->enable_cnt)) {
drivers/scsi/qla2xxx/qla_os.c
7070
if (atomic_read(&base_vha->loop_state) == LOOP_READY) {
drivers/scsi/qla2xxx/qla_os.c
7129
atomic_read(&base_vha->loop_state) != LOOP_DOWN) {
drivers/scsi/qla2xxx/qla_os.c
7168
atomic_read(&base_vha->loop_state) == LOOP_READY) {
drivers/scsi/qla2xxx/qla_os.c
7263
!atomic_read(&vha->loop_down_timer) &&
drivers/scsi/qla2xxx/qla_os.c
7273
} while (!atomic_read(&vha->loop_down_timer) &&
drivers/scsi/qla2xxx/qla_os.c
7441
if (atomic_read(&vha->loop_down_timer) > 0 &&
drivers/scsi/qla2xxx/qla_os.c
7446
if (atomic_read(&vha->loop_down_timer) ==
drivers/scsi/qla2xxx/qla_os.c
7510
atomic_read(&vha->loop_down_timer));
drivers/scsi/qla2xxx/qla_os.c
7542
index = atomic_read(&ha->nvme_active_aen_cnt);
drivers/scsi/qla2xxx/qla_os.c
7547
ha->nvme_last_rptd_aen = atomic_read(&ha->nvme_active_aen_cnt);
drivers/scsi/qla2xxx/qla_os.c
7556
atomic_read(&ha->zio_threshold) != ha->last_zio_threshold &&
drivers/scsi/qla2xxx/qla_os.c
7561
ha->last_zio_threshold = atomic_read(&ha->zio_threshold);
drivers/scsi/qla2xxx/qla_os.c
7814
if (!atomic_read(&pdev->enable_cnt)) {
drivers/scsi/qla2xxx/qla_os.c
927
if (atomic_read(&fcport->state) != FCS_ONLINE || fcport->deleted) {
drivers/scsi/qla2xxx/qla_os.c
928
if (atomic_read(&fcport->state) == FCS_DEVICE_DEAD ||
drivers/scsi/qla2xxx/qla_os.c
929
atomic_read(&base_vha->loop_state) == LOOP_DEAD) {
drivers/scsi/qla2xxx/qla_os.c
932
atomic_read(&fcport->state),
drivers/scsi/qla2xxx/qla_os.c
933
atomic_read(&base_vha->loop_state));
drivers/scsi/qla2xxx/qla_target.c
7295
atomic_read(&vha->vha_tgt.qla_tgt->tgt_global_resets_count);
drivers/scsi/qla2xxx/qla_target.c
7324
atomic_read(&vha->vha_tgt.qla_tgt->tgt_global_resets_count)) {
drivers/scsi/qla2xxx/qla_target.c
7329
atomic_read(&vha->vha_tgt.
drivers/scsi/qla2xxx/tcm_qla2xxx.c
1078
if (atomic_read(&tpg->lport_tpg_enabled))
drivers/scsi/qla2xxx/tcm_qla2xxx.c
1084
if (!atomic_read(&tpg->lport_tpg_enabled))
drivers/scsi/qla2xxx/tcm_qla2xxx.c
1684
!atomic_read(&base_lport->tpg_1->lport_tpg_enabled)) {
drivers/scsi/qla2xxx/tcm_qla2xxx.c
935
if (atomic_read(&tpg->lport_tpg_enabled))
drivers/scsi/qla2xxx/tcm_qla2xxx.c
941
if (!atomic_read(&tpg->lport_tpg_enabled))
drivers/scsi/qla4xxx/ql4_os.c
4513
if (atomic_read(&ddb_entry->retry_relogin_timer) !=
drivers/scsi/qla4xxx/ql4_os.c
4515
if (atomic_read(&ddb_entry->retry_relogin_timer) ==
drivers/scsi/qla4xxx/ql4_os.c
4530
if (atomic_read(&ddb_entry->relogin_timer) &&
drivers/scsi/qla4xxx/ql4_os.c
4543
atomic_read(&ddb_entry->relogin_retry_count),
drivers/scsi/qla4xxx/ql4_os.c
8923
if (atomic_read(&other_pdev->enable_cnt)) {
drivers/scsi/qla4xxx/ql4_os.c
9694
if (atomic_read(&other_pdev->enable_cnt)) {
drivers/scsi/scsi.c
172
if (atomic_read(&shost->host_blocked))
drivers/scsi/scsi.c
174
if (atomic_read(&starget->target_blocked))
drivers/scsi/scsi.c
176
if (atomic_read(&sdev->device_blocked))
drivers/scsi/scsi_debug.c
2234
int stopped_state = atomic_read(&devip->stopped);
drivers/scsi/scsi_debug.c
2292
stopped_state = atomic_read(&devip->stopped);
drivers/scsi/scsi_debug.c
4614
atomic_read(&sdeb_inject_pending))) {
drivers/scsi/scsi_debug.c
4685
atomic_read(&sdeb_inject_pending))) {
drivers/scsi/scsi_debug.c
4932
atomic_read(&sdeb_inject_pending))) {
drivers/scsi/scsi_debug.c
5057
atomic_read(&sdeb_inject_pending))) {
drivers/scsi/scsi_debug.c
5218
atomic_read(&sdeb_inject_pending))) {
drivers/scsi/scsi_debug.c
7154
count = atomic_read(&sdebug_cmnd_count);
drivers/scsi/scsi_debug.c
7172
return (atomic_read(&sdebug_cmnd_count) % abs(sdebug_every_nth)) == 0;
drivers/scsi/scsi_debug.c
7238
if (atomic_read(&sdeb_inject_pending)) {
drivers/scsi/scsi_debug.c
7301
atomic_read(&sdeb_inject_pending))) {
drivers/scsi/scsi_debug.c
7590
atomic_read(&sdebug_cmnd_count),
drivers/scsi/scsi_debug.c
7591
atomic_read(&sdebug_completions),
drivers/scsi/scsi_debug.c
7592
"miss_cpus", atomic_read(&sdebug_miss_cpus),
drivers/scsi/scsi_debug.c
7593
atomic_read(&sdebug_a_tsf),
drivers/scsi/scsi_debug.c
7594
atomic_read(&sdeb_mq_poll_count));
drivers/scsi/scsi_debug.c
9004
if (0 == (atomic_read(&sdebug_cmnd_count) % abs(sdebug_every_nth))) {
drivers/scsi/scsi_debug.c
9024
stopped_state = atomic_read(&devip->stopped);
drivers/scsi/scsi_debug.c
9389
if (unlikely(inject_now && !atomic_read(&sdeb_inject_pending)))
drivers/scsi/scsi_debug.c
9459
atomic_read(&devip->stopped))) {
drivers/scsi/scsi_lib.c
1380
if (!atomic_read(&sdev->device_blocked))
drivers/scsi/scsi_lib.c
1424
if (atomic_read(&starget->target_blocked) > 0) {
drivers/scsi/scsi_lib.c
1463
if (atomic_read(&shost->host_blocked) > 0) {
drivers/scsi/scsi_lib.c
3256
if (WARN_ON_ONCE(atomic_read(&sdev->disk_events_disable_depth) <= 0))
drivers/scsi/scsi_lib.c
462
if (atomic_read(&sdev->device_blocked) > 0)
drivers/scsi/scsi_lib.c
470
if (atomic_read(&starget->target_busy) >= starget->can_queue)
drivers/scsi/scsi_lib.c
472
if (atomic_read(&starget->target_blocked) > 0)
drivers/scsi/scsi_lib.c
480
if (atomic_read(&shost->host_blocked) > 0)
drivers/scsi/scsi_lib.c
624
int old = atomic_read(&sdev->restarts);
drivers/scsi/scsi_sysfs.c
670
return snprintf(buf, 20, "%d\n", atomic_read(&sdev->device_blocked));
drivers/scsi/scsi_sysfs.c
950
unsigned long long count = atomic_read(&sdev->field); \
drivers/scsi/sg.c
1098
if (atomic_read(&sdp->detaching))
drivers/scsi/sg.c
1102
if (atomic_read(&sdp->detaching))
drivers/scsi/sg.c
1130
if (atomic_read(&sdp->detaching))
drivers/scsi/sg.c
1190
if (atomic_read(&sdp->detaching))
drivers/scsi/sg.c
1333
if (unlikely(atomic_read(&sdp->detaching)))
drivers/scsi/sg.c
2147
if (unlikely(atomic_read(&sfp->parentdp->detaching)))
drivers/scsi/sg.c
2176
if (atomic_read(&sdp->detaching)) {
drivers/scsi/sg.c
2301
else if (atomic_read(&sdp->detaching)) {
drivers/scsi/sg.c
2495
(atomic_read(&sdp->detaching)))
drivers/scsi/sg.c
2521
if (sdp && scsidp && (!atomic_read(&sdp->detaching)))
drivers/scsi/sg.c
255
(atomic_read(&sdp->detaching) ||
drivers/scsi/sg.c
261
if (atomic_read(&sdp->detaching))
drivers/scsi/sg.c
2615
if (atomic_read(&sdp->detaching))
drivers/scsi/sg.c
268
(atomic_read(&sdp->detaching) ||
drivers/scsi/sg.c
274
if (atomic_read(&sdp->detaching))
drivers/scsi/sg.c
473
(!busy && atomic_read(&sdp->detaching))));
drivers/scsi/sg.c
617
if (atomic_read(&sdp->detaching))
drivers/scsi/sg.c
810
if (atomic_read(&sdp->detaching)) {
drivers/scsi/sg.c
929
if (atomic_read(&sdp->detaching))
drivers/scsi/sg.c
978
if (atomic_read(&sdp->detaching))
drivers/scsi/smartpqi/smartpqi_init.c
1931
atomic_read(&device->scsi_cmds_outstanding[lun]));
drivers/scsi/smartpqi/smartpqi_init.c
3468
if (atomic_read(&io_request->refcount) == 0) {
drivers/scsi/smartpqi/smartpqi_init.c
3841
num_interrupts = atomic_read(&ctrl_info->num_interrupts);
drivers/scsi/smartpqi/smartpqi_init.c
3867
atomic_read(&ctrl_info->num_interrupts);
drivers/scsi/smartpqi/smartpqi_init.c
409
while (atomic_read(&ctrl_info->num_busy_threads) >
drivers/scsi/smartpqi/smartpqi_init.c
410
atomic_read(&ctrl_info->num_blocked_threads)) {
drivers/scsi/smartpqi/smartpqi_init.c
6277
while ((cmds_outstanding = atomic_read(&device->scsi_cmds_outstanding[lun])) > 0) {
drivers/scsi/smartpqi/smartpqi_init.c
6337
cmds_outstanding = atomic_read(&device->scsi_cmds_outstanding[lun]);
drivers/scsi/smartpqi/smartpqi_init.c
9155
if (atomic_read(&io_request->refcount) == 0)
drivers/scsi/smartpqi/smartpqi_init.c
9386
if (atomic_read(&io_request->refcount) == 0)
drivers/scsi/snic/snic_main.c
328
return atomic_read(&snic->state);
drivers/scsi/snic/snic_scsi.c
2323
while (atomic_read(&snic->ios_inflight))
drivers/scsi/sr.c
571
if (atomic_read(&cd->device->disk_events_disable_depth))
drivers/scsi/st.c
497
atomic64_add(atomic_read(&STp->stats->last_write_size)
drivers/scsi/st.c
503
atomic64_add(atomic_read(&STp->stats->last_write_size),
drivers/scsi/st.c
511
atomic64_add(atomic_read(&STp->stats->last_read_size)
drivers/scsi/st.c
517
atomic64_add(atomic_read(&STp->stats->last_read_size),
drivers/scsi/storvsc_drv.c
578
atomic_read(&dev->num_outstanding_req) == 0);
drivers/scsi/storvsc_drv.c
598
(atomic_read(&stor_device->num_outstanding_req) == 0))
drivers/sh/maple/maple.c
423
if (mdev->interval > 0 && atomic_read(&mdev->busy) == 0 &&
drivers/sh/maple/maple.c
436
if (atomic_read(&mdev->busy) == 0) {
drivers/soc/fsl/qbman/qman_test_stash.c
123
while (!atomic_read(&bstrap.started))
drivers/soc/ti/knav_dma.c
345
if (atomic_read(&chan->ref_count))
drivers/soc/ti/knav_dma.c
355
if (atomic_read(&dma->ref_count)) {
drivers/soc/ti/knav_dma.c
469
if (atomic_read(&chan->ref_count) >= 1) {
drivers/soc/ti/knav_qmss_acc.c
110
if (atomic_read(&acc->retrigger_count)) {
drivers/soc/ti/knav_qmss_acc.c
64
if (!enabled || atomic_read(&kq->desc_count) <= 0)
drivers/soc/ti/knav_qmss_queue.c
425
atomic_read(&inst->desc_count);
drivers/soc/ti/knav_qmss_queue.c
555
while (atomic_read(&qh->notifier_enabled) > 0)
drivers/soc/ti/knav_qmss_queue.c
95
if (atomic_read(&qh->notifier_enabled) <= 0)
drivers/spi/spi-pxa2xx.c
1116
if (atomic_read(&drv_data->dma_running))
drivers/spi/spi-rockchip.c
294
if (atomic_read(&rs->state) & TXDMA)
drivers/spi/spi-rockchip.c
297
if (atomic_read(&rs->state) & RXDMA)
drivers/spi/spi-rockchip.c
630
if (atomic_read(&rs->state) & RXDMA) {
drivers/spi/spi-rockchip.c
661
if (atomic_read(&rs->state) & RXDMA)
drivers/spi/spi-rockchip.c
663
if (atomic_read(&rs->state) & TXDMA)
drivers/spi/spi-uniphier.c
589
if (atomic_read(&priv->dma_busy) & SSI_DMA_TX_BUSY) {
drivers/spi/spi-uniphier.c
594
if (atomic_read(&priv->dma_busy) & SSI_DMA_RX_BUSY) {
drivers/staging/greybus/loopback.c
420
!atomic_read(&gb->outstanding_operations));
drivers/staging/greybus/loopback.c
829
(atomic_read(&gb->outstanding_operations) <
drivers/staging/media/atomisp/pci/atomisp_cmd.c
343
event.u.frame_sync.frame_sequence = atomic_read(&asd->sof_count);
drivers/staging/media/atomisp/pci/atomisp_cmd.c
472
if (atomic_read(&isp->asd.sequence) == atomic_read(&isp->asd.sequence_temp))
drivers/staging/media/atomisp/pci/atomisp_cmd.c
473
atomic_set(&isp->asd.sequence_temp, atomic_read(&isp->asd.sof_count));
drivers/staging/media/atomisp/pci/atomisp_cmd.c
480
atomic_set(&isp->asd.sequence, atomic_read(&isp->asd.sequence_temp));
drivers/staging/media/atomisp/pci/atomisp_cmd.c
585
frame->vb.sequence = atomic_read(&pipe->asd->sequence);
drivers/staging/media/ipu3/ipu3-v4l2.c
100
if (atomic_read(&imgu_sd->running_mode) == IPU3_RUNNING_MODE_VIDEO)
drivers/staging/media/ipu3/ipu3-v4l2.c
715
if (atomic_read(&imgu_sd->running_mode) == IPU3_RUNNING_MODE_VIDEO)
drivers/staging/media/ipu3/ipu3.c
580
if (!atomic_read(&imgu->qbuf_barrier))
drivers/staging/media/ipu7/ipu7-isys-csi2.c
480
u32 frame_sequence = atomic_read(&stream->sequence);
drivers/staging/media/ipu7/ipu7-isys-queue.c
653
return atomic_read(&stream->sequence) - 1;
drivers/staging/media/ipu7/ipu7-isys-queue.c
668
return atomic_read(&stream->sequence) - 1;
drivers/staging/media/ipu7/ipu7-isys-queue.c
714
if (atomic_read(&ib->str2mmio_flag)) {
drivers/staging/media/ipu7/ipu7-isys.c
998
atomic_read(&stream->sequence) - 1U;
drivers/staging/media/meson/vdec/esparser.c
319
atomic_read(&sess->esparser_queued_bufs) >= num_dst_bufs)
drivers/staging/media/meson/vdec/vdec_helpers.c
299
atomic_read(&sess->esparser_queued_bufs) <= 1) {
drivers/staging/media/meson/vdec/vdec_helpers.c
312
atomic_read(&sess->esparser_queued_bufs));
drivers/staging/octeon/ethernet-rx.c
439
if (!atomic_read(&oct_rx_ready))
drivers/staging/octeon/ethernet.c
121
if (!atomic_read(&cvm_oct_poll_queue_stopping))
drivers/staging/octeon/ethernet.c
137
if (!atomic_read(&cvm_oct_poll_queue_stopping))
drivers/staging/rtl8723bs/core/rtw_cmd.c
307
!atomic_read(&pcmdpriv->cmdthd_running)) /* com_thread not running */
drivers/staging/rtl8723bs/core/rtw_cmd.c
365
atomic_read(&adapter->cmdpriv.cmdthd_running) &&
drivers/staging/rtl8723bs/core/rtw_mlme.c
1670
return (atomic_read(&mlmepriv->set_scan_deny) != 0) ? true : false;
drivers/staging/rtl8723bs/include/drv_types.h
394
int df = atomic_read(&adapter_to_dvobj(padapter)->disable_func);
drivers/staging/rtl8723bs/include/drv_types.h
399
#define RTW_IS_FUNC_DISABLED(padapter, func_bit) (atomic_read(&adapter_to_dvobj(padapter)->disable_func) & (func_bit))
drivers/staging/rtl8723bs/os_dep/os_intfs.c
990
while (atomic_read(&pcmdpriv->cmdthd_running)) {
drivers/staging/vc04_services/bcm2835-audio/bcm2835-pcm.c
300
atomic_read(&alsa_stream->pos));
drivers/staging/vc04_services/bcm2835-audio/bcm2835-pcm.c
71
pos = atomic_read(&alsa_stream->pos);
drivers/target/iscsi/iscsi_target.c
2741
hdr->max_cmdsn = cpu_to_be32((u32) atomic_read(&conn->sess->max_cmd_sn));
drivers/target/iscsi/iscsi_target.c
2799
hdr->max_cmdsn = cpu_to_be32((u32) atomic_read(&conn->sess->max_cmd_sn));
drivers/target/iscsi/iscsi_target.c
2954
hdr->max_cmdsn = cpu_to_be32((u32) atomic_read(&conn->sess->max_cmd_sn));
drivers/target/iscsi/iscsi_target.c
2997
hdr->max_cmdsn = cpu_to_be32((u32) atomic_read(&conn->sess->max_cmd_sn));
drivers/target/iscsi/iscsi_target.c
3078
hdr->max_cmdsn = cpu_to_be32((u32) atomic_read(&conn->sess->max_cmd_sn));
drivers/target/iscsi/iscsi_target.c
3218
hdr->max_cmdsn = cpu_to_be32((u32) atomic_read(&conn->sess->max_cmd_sn));
drivers/target/iscsi/iscsi_target.c
3298
hdr->max_cmdsn = cpu_to_be32((u32) atomic_read(&conn->sess->max_cmd_sn));
drivers/target/iscsi/iscsi_target.c
3506
hdr->max_cmdsn = cpu_to_be32((u32) atomic_read(&conn->sess->max_cmd_sn));
drivers/target/iscsi/iscsi_target.c
3547
hdr->max_cmdsn = cpu_to_be32((u32) atomic_read(&conn->sess->max_cmd_sn));
drivers/target/iscsi/iscsi_target.c
3754
if (atomic_read(&conn->check_immediate_queue))
drivers/target/iscsi/iscsi_target.c
3831
if (atomic_read(&conn->check_immediate_queue))
drivers/target/iscsi/iscsi_target.c
4281
if (atomic_read(&conn->connection_recovery)) {
drivers/target/iscsi/iscsi_target.c
4295
if (atomic_read(&conn->conn_logout_remove)) {
drivers/target/iscsi/iscsi_target.c
4316
if (atomic_read(&conn->connection_recovery))
drivers/target/iscsi/iscsi_target.c
4327
if (atomic_read(&conn->sleep_on_conn_wait_comp)) {
drivers/target/iscsi/iscsi_target.c
4340
if (atomic_read(&conn->connection_wait_rcfr)) {
drivers/target/iscsi/iscsi_target.c
4368
" %s\n", atomic_read(&sess->nconn),
drivers/target/iscsi/iscsi_target.c
4375
!atomic_read(&sess->session_logout))
drivers/target/iscsi/iscsi_target.c
4384
if (atomic_read(&sess->nconn)) {
drivers/target/iscsi/iscsi_target.c
4385
if (!atomic_read(&sess->session_reinstatement) &&
drivers/target/iscsi/iscsi_target.c
4386
!atomic_read(&sess->session_fall_back_to_erl0)) {
drivers/target/iscsi/iscsi_target.c
4390
if (!atomic_read(&sess->session_stop_active)) {
drivers/target/iscsi/iscsi_target.c
4413
if (!atomic_read(&sess->session_reinstatement) &&
drivers/target/iscsi/iscsi_target.c
4414
atomic_read(&sess->session_fall_back_to_erl0)) {
drivers/target/iscsi/iscsi_target.c
4420
} else if (atomic_read(&sess->session_logout)) {
drivers/target/iscsi/iscsi_target.c
4424
if (atomic_read(&sess->session_close)) {
drivers/target/iscsi/iscsi_target.c
4437
if (!atomic_read(&sess->session_continuation))
drivers/target/iscsi/iscsi_target.c
4440
if (atomic_read(&sess->session_close)) {
drivers/target/iscsi/iscsi_target.c
4461
if (atomic_read(&sess->nconn)) {
drivers/target/iscsi/iscsi_target.c
4463
" to %s\n", atomic_read(&sess->nconn),
drivers/target/iscsi/iscsi_target.c
4672
u16 conn_count = atomic_read(&sess->nconn);
drivers/target/iscsi/iscsi_target.c
4706
if (session_sleep && atomic_read(&sess->nconn)) {
drivers/target/iscsi/iscsi_target.c
4732
if (atomic_read(&sess->session_fall_back_to_erl0) ||
drivers/target/iscsi/iscsi_target.c
4733
atomic_read(&sess->session_logout) ||
drivers/target/iscsi/iscsi_target.c
4734
atomic_read(&sess->session_close) ||
drivers/target/iscsi/iscsi_target_configfs.c
1493
if (atomic_read(&sess->session_fall_back_to_erl0) ||
drivers/target/iscsi/iscsi_target_configfs.c
1494
atomic_read(&sess->session_logout) ||
drivers/target/iscsi/iscsi_target_configfs.c
1495
atomic_read(&sess->session_close) ||
drivers/target/iscsi/iscsi_target_configfs.c
579
max_cmd_sn = (u32) atomic_read(&sess->max_cmd_sn);
drivers/target/iscsi/iscsi_target_erl0.c
756
if (atomic_read(&sess->session_reinstatement)) {
drivers/target/iscsi/iscsi_target_erl0.c
826
if (atomic_read(&conn->connection_exit)) {
drivers/target/iscsi/iscsi_target_erl0.c
831
if (atomic_read(&conn->transport_failed)) {
drivers/target/iscsi/iscsi_target_erl0.c
850
if (atomic_read(&conn->connection_exit)) {
drivers/target/iscsi/iscsi_target_erl0.c
855
if (atomic_read(&conn->transport_failed)) {
drivers/target/iscsi/iscsi_target_erl0.c
860
if (atomic_read(&conn->connection_reinstatement)) {
drivers/target/iscsi/iscsi_target_erl0.c
897
!atomic_read(&sess->session_reinstatement) &&
drivers/target/iscsi/iscsi_target_erl0.c
898
!atomic_read(&sess->session_fall_back_to_erl0))
drivers/target/iscsi/iscsi_target_erl0.c
913
if (atomic_read(&conn->connection_exit)) {
drivers/target/iscsi/iscsi_target_login.c
117
if (atomic_read(&sess_p->session_fall_back_to_erl0) ||
drivers/target/iscsi/iscsi_target_login.c
118
atomic_read(&sess_p->session_logout) ||
drivers/target/iscsi/iscsi_target_login.c
119
atomic_read(&sess_p->session_close) ||
drivers/target/iscsi/iscsi_target_login.c
460
if (atomic_read(&sess_p->session_fall_back_to_erl0) ||
drivers/target/iscsi/iscsi_target_login.c
461
atomic_read(&sess_p->session_logout) ||
drivers/target/iscsi/iscsi_target_login.c
462
atomic_read(&sess_p->session_close) ||
drivers/target/iscsi/iscsi_target_login.c
575
if ((atomic_read(&sess->nconn) + 1) > sess->sess_ops->MaxConnections) {
drivers/target/iscsi/iscsi_target_login.c
691
" from node: %s\n", atomic_read(&sess->nconn),
drivers/target/iscsi/iscsi_target_login.c
739
" %s\n", atomic_read(&sess->nconn),
drivers/target/iscsi/iscsi_target_nego.c
332
login_rsp->max_cmdsn = cpu_to_be32((u32) atomic_read(&conn->sess->max_cmd_sn));
drivers/target/iscsi/iscsi_target_tmr.c
45
iscsi_sna_lte(be32_to_cpu(hdr->refcmdsn), (u32) atomic_read(&conn->sess->max_cmd_sn))) ?
drivers/target/iscsi/iscsi_target_util.c
252
max_cmdsn = atomic_read(&sess->max_cmd_sn);
drivers/target/iscsi/iscsi_target_util.c
510
if (!atomic_read(&cmd->immed_queue_count)) {
drivers/target/iscsi/iscsi_target_util.c
525
if (atomic_read(&cmd->immed_queue_count)) {
drivers/target/iscsi/iscsi_target_util.c
528
atomic_read(&cmd->immed_queue_count));
drivers/target/iscsi/iscsi_target_util.c
586
if (!atomic_read(&cmd->response_queue_count)) {
drivers/target/iscsi/iscsi_target_util.c
602
if (atomic_read(&cmd->response_queue_count)) {
drivers/target/iscsi/iscsi_target_util.c
605
atomic_read(&cmd->response_queue_count));
drivers/target/loopback/tcm_loop.c
768
if (atomic_read(&tpg->tl_tpg_port_count)) {
drivers/target/loopback/tcm_loop.c
770
atomic_read(&tpg->tl_tpg_port_count));
drivers/target/target_core_alua.c
1186
atomic_read(&lun->lun_tg_pt_secondary_offline),
drivers/target/target_core_alua.c
1479
while (atomic_read(&lu_gp->lu_gp_ref_cnt))
drivers/target/target_core_alua.c
1527
while (atomic_read(&lu_gp_mem->lu_gp_mem_ref_cnt))
drivers/target/target_core_alua.c
1734
while (atomic_read(&tg_pt_gp->tg_pt_gp_ref_cnt))
drivers/target/target_core_alua.c
1886
atomic_read(&lun->lun_tg_pt_secondary_offline) ?
drivers/target/target_core_alua.c
2152
atomic_read(&lun->lun_tg_pt_secondary_offline));
drivers/target/target_core_alua.c
663
if (atomic_read(&lun->lun_tg_pt_secondary_offline)) {
drivers/target/target_core_configfs.c
509
BUG_ON(atomic_read(&t->tf_access_cnt));
drivers/target/target_core_iblock.c
325
else if (atomic_read(&ibr->ib_bio_err_cnt))
drivers/target/target_core_pr.c
1305
while (atomic_read(&pr_reg->pr_res_holders) != 0) {
drivers/target/target_core_tpg.c
285
while (atomic_read(&nacl->acl_pr_ref_count) != 0)
drivers/target/target_core_tpg.c
332
if (sess->cmd_cnt && atomic_read(&sess->cmd_cnt->stopped))
drivers/target/target_core_tpg.c
577
while (atomic_read(&se_tpg->tpg_pr_ref_count) != 0)
drivers/target/target_core_transport.c
2537
if (atomic_read(&cmd->se_dev->dev_qf_count) != 0)
drivers/target/target_core_transport.c
263
if (!atomic_read(&cmd_cnt->stopped))
drivers/target/target_core_transport.c
3218
WARN_ON_ONCE(!atomic_read(&cmd_cnt->stopped));
drivers/target/target_core_user.c
1504
if (atomic_read(&global_page_count) > tcmu_global_max_pages &&
drivers/target/target_core_user.c
244
if (atomic_read(&global_page_count) > tcmu_global_max_pages)
drivers/target/target_core_user.c
3193
if (atomic_read(&global_page_count) <= tcmu_global_max_pages)
drivers/target/target_core_user.c
3259
if (atomic_read(&global_page_count) > tcmu_global_max_pages)
drivers/thermal/intel/therm_throt.c
580
if (!atomic_read(&therm_throt_en))
drivers/thermal/intel/therm_throt.c
703
return atomic_read(&therm_throt_en);
drivers/tty/mips_ejtag_fdc.c
494
atomic_read(&priv->xmit_total) ||
drivers/tty/serial/8250/8250_omap.c
1809
if (atomic_read(&priv->active))
drivers/tty/serial/atmel_serial.c
1273
if (!atomic_read(&atmel_port->tasklet_shutdown)) {
drivers/tty/serial/atmel_serial.c
290
if (!atomic_read(&atmel_port->tasklet_shutdown))
drivers/tty/serial/atmel_serial.c
578
if (!atomic_read(&atmel_port->tasklet_shutdown))
drivers/tty/serial/serial_core.c
3179
wait_event(state->remove_wait, !atomic_read(&state->refcount));
drivers/tty/tty_buffer.c
178
if (atomic_read(&port->buf.mem_used) > port->buf.mem_limit)
drivers/tty/tty_buffer.c
475
if (atomic_read(&buf->priority))
drivers/tty/tty_buffer.c
96
int space = port->buf.mem_limit - atomic_read(&port->buf.mem_used);
drivers/ufs/core/ufs-sysfs.c
567
return sysfs_emit(buf, "%u\n", atomic_read(&hba->dev_lvl_exception_count));
drivers/uio/uio.c
262
return sprintf(buf, "%u\n", (unsigned int)atomic_read(&idev->event));
drivers/uio/uio.c
504
listener->event_count = atomic_read(&idev->event);
drivers/uio/uio.c
575
if (listener->event_count != atomic_read(&idev->event))
drivers/uio/uio.c
605
event_count = atomic_read(&idev->event);
drivers/usb/atm/usbatm.c
747
atomic_read(&atm_dev->stats.aal5.tx),
drivers/usb/atm/usbatm.c
748
atomic_read(&atm_dev->stats.aal5.tx_err),
drivers/usb/atm/usbatm.c
749
atomic_read(&atm_dev->stats.aal5.rx),
drivers/usb/atm/usbatm.c
750
atomic_read(&atm_dev->stats.aal5.rx_err),
drivers/usb/atm/usbatm.c
751
atomic_read(&atm_dev->stats.aal5.rx_drop));
drivers/usb/class/usbtmc.c
2253
if (atomic_read(&file_data->srq_asserted))
drivers/usb/class/usbtmc.c
522
atomic_read(&data->iin_data_valid) != 0,
drivers/usb/class/usbtmc.c
633
atomic_read(&file_data->srq_asserted) != 0 ||
drivers/usb/class/usbtmc.c
634
atomic_read(&file_data->closing),
drivers/usb/class/usbtmc.c
640
if (atomic_read(&file_data->closing) || data->zombie)
drivers/usb/core/driver.c
1737
__func__, atomic_read(&udev->dev.power.usage_count),
drivers/usb/core/driver.c
1767
__func__, atomic_read(&udev->dev.power.usage_count),
drivers/usb/core/driver.c
1795
__func__, atomic_read(&intf->dev.power.usage_count),
drivers/usb/core/driver.c
1822
__func__, atomic_read(&intf->dev.power.usage_count));
drivers/usb/core/driver.c
1869
__func__, atomic_read(&intf->dev.power.usage_count),
drivers/usb/core/driver.c
1900
__func__, atomic_read(&intf->dev.power.usage_count),
drivers/usb/core/driver.c
1950
if (atomic_read(&intf->dev.power.usage_count) > 0)
drivers/usb/core/hcd.c
1133
if (unlikely(atomic_read(&urb->reject))) {
drivers/usb/core/hcd.c
1561
if (atomic_read(&urb->reject))
drivers/usb/core/hcd.c
1610
if (atomic_read(&urb->use_count) > 0) {
drivers/usb/core/hcd.c
1669
if (unlikely(atomic_read(&urb->reject)))
drivers/usb/core/sysfs.c
299
return sysfs_emit(buf, "%d\n", atomic_read(&udev->urbnum));
drivers/usb/core/urb.c
144
return atomic_read(&anchor->suspend_wakeups) == 0 &&
drivers/usb/core/urb.c
717
wait_event(usb_kill_urb_queue, atomic_read(&urb->use_count) == 0);
drivers/usb/core/urb.c
767
wait_event(usb_kill_urb_queue, atomic_read(&urb->use_count) == 0);
drivers/usb/gadget/function/f_ecm.c
377
if (atomic_read(&ecm->notify_count))
drivers/usb/gadget/function/f_ecm.c
921
if (atomic_read(&ecm->notify_count)) {
drivers/usb/gadget/function/f_ncm.c
1740
if (atomic_read(&ncm->notify_count)) {
drivers/usb/gadget/function/f_ncm.c
546
if (atomic_read(&ncm->notify_count))
drivers/usb/gadget/function/f_tcm.c
1917
if (atomic_read(&tpg->tpg_port_count)) {
drivers/usb/gadget/function/f_tcm.c
1920
pr_err(MSG, atomic_read(&tpg->tpg_port_count));
drivers/usb/gadget/function/f_tcm.c
442
luns = atomic_read(&fu->tpg->tpg_port_count);
drivers/usb/gadget/function/u_ether.c
1075
if (atomic_read(&dev->tx_qlen)) {
drivers/usb/gadget/function/uvc_video.c
275
trace_uvcg_video_queue(req, atomic_read(&video->queued));
drivers/usb/gadget/function/uvc_video.c
414
trace_uvcg_video_complete(req, atomic_read(&video->queued));
drivers/usb/gadget/function/uvc_video.c
448
(atomic_read(&video->queued) > UVCG_REQ_MAX_ZERO_COUNT)) {
drivers/usb/gadget/udc/lpc32xx_udc.c
2456
if (atomic_read(&udc->enabled_ep_cnt))
drivers/usb/gadget/udc/lpc32xx_udc.c
2458
(atomic_read(&udc->enabled_ep_cnt) == 0));
drivers/usb/gadget/udc/lpc32xx_udc.c
2929
if (atomic_read(&udc->enabled_ep_cnt))
drivers/usb/gadget/udc/lpc32xx_udc.c
2931
(atomic_read(&udc->enabled_ep_cnt) == 0));
drivers/usb/host/isp116x-hcd.c
487
if (atomic_read(&isp116x->atl_finishing))
drivers/usb/misc/iowarrior.c
175
intr_idx = atomic_read(&dev->intr_idx);
drivers/usb/misc/iowarrior.c
178
read_idx = atomic_read(&dev->read_idx);
drivers/usb/misc/iowarrior.c
265
read_idx = atomic_read(&dev->read_idx);
drivers/usb/misc/iowarrior.c
266
intr_idx = atomic_read(&dev->intr_idx);
drivers/usb/misc/iowarrior.c
350
} while (atomic_read(&dev->overflow_flag));
drivers/usb/misc/iowarrior.c
415
if (atomic_read(&dev->write_busy) == MAX_WRITES_IN_FLIGHT) {
drivers/usb/misc/iowarrior.c
422
(!dev->present || (atomic_read (&dev-> write_busy) < MAX_WRITES_IN_FLIGHT)));
drivers/usb/misc/iowarrior.c
470
retval, atomic_read(&dev->write_busy));
drivers/usb/misc/iowarrior.c
720
if (atomic_read(&dev->write_busy) < MAX_WRITES_IN_FLIGHT)
drivers/usb/serial/io_edgeport.c
2147
__func__, urb, atomic_read(&CmdUrbs));
drivers/usb/serial/io_edgeport.c
781
atomic_read(&CmdUrbs));
drivers/usb/serial/mos7720.c
416
status = atomic_read(&mos_parport->shadowDSR) & 0xf8;
drivers/usb/storage/realtek_cr.c
769
atomic_read(&us->pusb_intf->dev.power.usage_count));
drivers/usb/storage/realtek_cr.c
771
if (atomic_read(&us->pusb_intf->dev.power.usage_count) > 0) {
drivers/usb/storage/realtek_cr.c
778
atomic_read(&us->pusb_intf->dev.power.usage_count));
drivers/usb/storage/realtek_cr.c
811
atomic_read(&us->pusb_intf->dev.power.usage_count));
drivers/usb/storage/realtek_cr.c
813
if (atomic_read(&us->pusb_intf->dev.power.usage_count) <= 0) {
drivers/usb/usbip/vhci_rx.c
71
atomic_read(&vhci_hcd->seqnum));
drivers/vdpa/mlx5/core/mr.c
673
if (atomic_read(&mres->shutdown)) {
drivers/vdpa/octeon_ep/octep_vdpa_main.c
476
status = atomic_read(&mgmt_dev->status);
drivers/vdpa/octeon_ep/octep_vdpa_main.c
613
if (atomic_read(&mgmt_dev->status) >= OCTEP_VDPA_DEV_STATUS_READY) {
drivers/vdpa/octeon_ep/octep_vdpa_main.c
621
if (atomic_read(&mgmt_dev->status) != OCTEP_VDPA_DEV_STATUS_INIT) {
drivers/vfio/mdev/mdev_core.c
150
if (!atomic_read(&parent->available_instances)) {
drivers/vfio/mdev/mdev_sysfs.c
114
atomic_read(&mtype->parent->available_instances));
drivers/vhost/net.c
267
wait_event(ubufs->wait, !atomic_read(&ubufs->refcount));
drivers/vhost/vsock.c
403
val = atomic_read(&vsock->queued_replies);
drivers/video/fbdev/arcfb.c
194
int count = atomic_read(&par->ref_count);
drivers/video/fbdev/omap2/omapfb/omapfb-ioctl.c
226
if (atomic_read(&rg->map_count)) {
drivers/video/fbdev/omap2/omapfb/omapfb-main.c
1309
WARN_ON(atomic_read(&rg->map_count));
drivers/video/fbdev/omap2/omapfb/omapfb-main.c
664
WARN_ON(!atomic_read(&ofbi->region->lock_count));
drivers/video/fbdev/omap2/omapfb/omapfb-main.c
847
WARN_ON(!atomic_read(&ofbi->region->lock_count));
drivers/video/fbdev/omap2/omapfb/omapfb-main.c
941
WARN_ON(!atomic_read(&ofbi->region->lock_count));
drivers/video/fbdev/omap2/omapfb/omapfb-sysfs.c
450
if (atomic_read(&rg->map_count)) {
drivers/video/fbdev/ps3fb.c
514
if (atomic_read(&ps3fb.ext_flip)) {
drivers/video/fbdev/ps3fb.c
929
!atomic_read(&ps3fb.ext_flip)) {
drivers/video/fbdev/smscufx.c
866
if (!atomic_read(&dev->usb_active))
drivers/video/fbdev/smscufx.c
911
if (!atomic_read(&dev->usb_active))
drivers/video/fbdev/smscufx.c
936
if (!atomic_read(&dev->usb_active))
drivers/video/fbdev/udlfb.c
1390
atomic_read(&dlfb->bytes_rendered));
drivers/video/fbdev/udlfb.c
1398
atomic_read(&dlfb->bytes_identical));
drivers/video/fbdev/udlfb.c
1406
atomic_read(&dlfb->bytes_sent));
drivers/video/fbdev/udlfb.c
1414
atomic_read(&dlfb->cpu_kcycles_used));
drivers/video/fbdev/udlfb.c
290
if (!atomic_read(&dlfb->usb_active))
drivers/video/fbdev/udlfb.c
626
if (!atomic_read(&dlfb->usb_active)) {
drivers/video/fbdev/udlfb.c
742
if (!atomic_read(&dlfb->usb_active))
drivers/video/fbdev/udlfb.c
819
if (!atomic_read(&dlfb->usb_active))
drivers/video/fbdev/uvesafb.c
1165
int cnt = atomic_read(&par->ref_count);
drivers/video/fbdev/uvesafb.c
1186
int cnt = atomic_read(&par->ref_count);
drivers/virt/coco/guest/report.c
481
if (atomic_read(&provider.count)) {
drivers/virt/coco/guest/report.c
497
if (atomic_read(&provider.count))
drivers/virt/nitro_enclaves/ne_pci_dev.c
91
atomic_read(&ne_pci_dev->cmd_reply_avail) != 0,
drivers/virtio/virtio_mem.c
1087
if (!atomic_read(&vm->wq_active) &&
drivers/virtio/virtio_mem.c
1243
if (atomic_read(&vm->config_changed))
drivers/virtio/virtio_mem.c
1430
if (atomic_read(&vm->config_changed))
drivers/virtio/virtio_mem.c
1468
if (atomic_read(&vm->config_changed))
drivers/virtio/virtio_mem.c
2440
if (atomic_read(&vm->config_changed)) {
drivers/w1/masters/amd_axi_w1.c
83
atomic_read(&amd_axi_w1_local->flag) != 0,
drivers/w1/slaves/w1_therm.c
1051
refcnt = atomic_read(THERM_REFCNT(sl->family_data));
drivers/w1/w1_family.c
73
while (atomic_read(&fent->refcnt)) {
drivers/w1/w1_family.c
75
fent->fid, atomic_read(&fent->refcnt));
drivers/w1/w1_int.c
193
while (atomic_read(&dev->refcnt)) {
drivers/w1/w1_int.c
195
dev->name, atomic_read(&dev->refcnt));
drivers/watchdog/pcwd_usb.c
249
if (atomic_read(&usb_pcwd->cmd_received))
drivers/xen/events/events_base.c
1829
unsigned int curch = atomic_read(&channels_on_cpu[cpu]);
drivers/xen/gntdev.c
474
if (atomic_read(&map->live_grants) == 0)
drivers/xen/pvcalls-back.c
238
while (atomic_read(&map->io) > 0) {
drivers/xen/pvcalls-back.c
239
if (atomic_read(&map->release) > 0) {
drivers/xen/pvcalls-back.c
244
if (atomic_read(&map->read) > 0 &&
drivers/xen/pvcalls-back.c
247
if (atomic_read(&map->write) > 0 &&
drivers/xen/pvcalls-back.c
251
if (atomic_read(&map->eoi) > 0 && !atomic_read(&map->write)) {
drivers/xen/pvcalls-front.c
1073
while (atomic_read(&map->refcount) > 1)
drivers/xen/pvcalls-front.c
1081
while (atomic_read(&map->refcount) > 1)
drivers/xen/pvcalls-front.c
1126
while (atomic_read(&pvcalls_refcount) > 0)
drivers/xen/xen-scsiback.c
607
atomic_read(&info->nr_unreplied_reqs) == 0);
drivers/xen/xenbus/xenbus_probe.c
221
return sprintf(buf, "%d\n", atomic_read(&dev->name)); \
fs/affs/file.c
28
inode->i_ino, atomic_read(&AFFS_I(inode)->i_opencnt));
fs/affs/file.c
37
inode->i_ino, atomic_read(&AFFS_I(inode)->i_opencnt));
fs/afs/callback.c
43
if (vnode->cb_v_check != atomic_read(&volume->cb_v_break)) {
fs/afs/callback.c
65
trace_afs_cb_v_break(se->volume->vid, atomic_read(&se->volume->cb_v_break),
fs/afs/callback.c
84
vnode->cb_v_check = atomic_read(&vnode->volume->cb_v_break);
fs/afs/callback.c
92
atomic_read(&vnode->cb_nr_mmap))
fs/afs/cell.c
575
trace_afs_cell(cell->debug_id, r, atomic_read(&cell->active), afs_cell_trace_free);
fs/afs/cell.c
606
trace_afs_cell(cell->debug_id, r + 1, atomic_read(&cell->active), reason);
fs/afs/cell.c
621
a = atomic_read(&cell->active);
fs/afs/cell.c
625
a = atomic_read(&cell->active);
fs/afs/cell.c
687
a = atomic_read(&cell->active);
fs/afs/cell.c
772
if (atomic_read(&cell->active))
fs/afs/cell.c
853
if (atomic_read(&cell->active)) {
fs/afs/cell.c
922
!atomic_read(&net->cells_outstanding));
fs/afs/cmservice.c
177
atomic_read(&call->server->active),
fs/afs/fs_operation.c
251
op->cb_v_break = atomic_read(&op->volume->cb_v_break);
fs/afs/fs_operation.c
41
op->cb_v_break = atomic_read(&volume->cb_v_break);
fs/afs/fs_probe.c
327
if (atomic_read(&estate->nr_probing))
fs/afs/fs_probe.c
348
if (atomic_read(&estate->nr_probing))
fs/afs/fs_probe.c
502
if (atomic_read(&estate->nr_probing) == 0)
fs/afs/fs_probe.c
512
atomic_read(&estate->nr_probing) == 0 ||
fs/afs/inode.c
311
if (vnode->cb_ro_snapshot == atomic_read(&vnode->volume->cb_ro_snapshot) &&
fs/afs/inode.c
645
vnode->cb_v_check = atomic_read(&as->volume->cb_v_break);
fs/afs/internal.h
1044
atomic_read(&vnode->volume->cb_ro_snapshot) +
fs/afs/internal.h
1045
atomic_read(&vnode->volume->cb_scrub));
fs/afs/internal.h
1403
atomic_read(&call->net->nr_outstanding_calls),
fs/afs/internal.h
1413
atomic_read(&call->net->nr_outstanding_calls),
fs/afs/internal.h
1556
int a = atomic_read(&server->active);
fs/afs/proc.c
374
atomic_read(&vlserver->probe_outstanding));
fs/afs/proc.c
451
atomic_read(&server->active),
fs/afs/proc.c
463
estate->probe_seq, atomic_read(&estate->nr_probing),
fs/afs/proc.c
51
atomic_read(&cell->active),
fs/afs/proc.c
665
atomic_read(&net->n_lookup),
fs/afs/proc.c
666
atomic_read(&net->n_reval),
fs/afs/proc.c
667
atomic_read(&net->n_inval),
fs/afs/proc.c
668
atomic_read(&net->n_relpg));
fs/afs/proc.c
671
atomic_read(&net->n_read_dir));
fs/afs/proc.c
674
atomic_read(&net->n_dir_cr),
fs/afs/proc.c
675
atomic_read(&net->n_dir_rm));
fs/afs/proc.c
678
atomic_read(&net->n_fetches),
fs/afs/proc.c
681
atomic_read(&net->n_stores),
fs/afs/rotate.c
595
vnode->cb_v_check = atomic_read(&vnode->volume->cb_v_break);
fs/afs/rxrpc.c
138
_debug("outstanding %u", atomic_read(&net->nr_outstanding_calls));
fs/afs/rxrpc.c
140
!atomic_read(&net->nr_outstanding_calls));
fs/afs/rxrpc.c
204
o = atomic_read(&net->nr_outstanding_calls);
fs/afs/rxrpc.c
225
o = atomic_read(&net->nr_outstanding_calls);
fs/afs/rxrpc.c
251
o = atomic_read(&net->nr_outstanding_calls);
fs/afs/rxrpc.c
353
atomic_read(&call->net->nr_outstanding_calls));
fs/afs/rxrpc.c
702
atomic_read(&call->net->nr_outstanding_calls),
fs/afs/server.c
303
a = atomic_read(&server->active);
fs/afs/server.c
342
a = atomic_read(&server->active);
fs/afs/server.c
397
atomic_read(&server->active), afs_server_trace_free);
fs/afs/server.c
426
if (atomic_read(&server->active))
fs/afs/server.c
459
_debug("expire %pU %u", &server->uuid, atomic_read(&server->active));
fs/afs/server.c
535
!atomic_read(&net->servers_outstanding));
fs/afs/server.c
553
atomic_read(&server->active),
fs/afs/super.c
138
if (atomic_read(&afs_count_active_inodes) != 0) {
fs/afs/super.c
140
atomic_read(&afs_count_active_inodes));
fs/afs/validation.c
130
if (atomic_read(&volume->cb_v_check) != atomic_read(&volume->cb_v_break))
fs/afs/validation.c
138
else if (vnode->cb_ro_snapshot != atomic_read(&volume->cb_ro_snapshot))
fs/afs/validation.c
140
else if (vnode->cb_scrub != atomic_read(&volume->cb_scrub))
fs/afs/validation.c
243
snap = atomic_read(&volume->cb_ro_snapshot);
fs/afs/validation.c
334
unsigned int cb_v_break = atomic_read(&volume->cb_v_break);
fs/afs/validation.c
335
unsigned int cb_v_check = atomic_read(&volume->cb_v_check);
fs/afs/validation.c
421
atomic_read(&volume->cb_v_check) != atomic_read(&volume->cb_v_break)) {
fs/afs/validation.c
428
cb_ro_snapshot = atomic_read(&volume->cb_ro_snapshot);
fs/afs/validation.c
429
cb_scrub = atomic_read(&volume->cb_scrub);
fs/afs/validation.c
437
atomic_read(&volume->cb_v_check) != atomic_read(&volume->cb_v_break) ||
fs/afs/validation.c
458
cb_ro_snapshot = atomic_read(&volume->cb_ro_snapshot);
fs/afs/validation.c
459
cb_scrub = atomic_read(&volume->cb_scrub);
fs/aio.c
1013
int avail = atomic_read(&ctx->reqs_available);
fs/aio.c
1356
if (unlikely(atomic_read(&ctx->dead)))
fs/aio.c
425
if (!atomic_read(&ctx->dead)) {
fs/btrfs/async-thread.c
192
pending = atomic_read(&wq->pending);
fs/btrfs/async-thread.c
71
return atomic_read(&wq->pending) > wq->thresh * 2;
fs/btrfs/bio.c
449
if (atomic_read(&bioc->error) > bioc->max_errors)
fs/btrfs/block-group.c
1341
remove_map = (atomic_read(&block_group->frozen) == 0);
fs/btrfs/block-group.c
375
wait_var_event(&bg->nocow_writers, !atomic_read(&bg->nocow_writers));
fs/btrfs/block-group.c
3806
atomic_read(&cur_trans->num_writers) == 1);
fs/btrfs/block-group.c
412
wait_var_event(&bg->reservations, !atomic_read(&bg->reservations));
fs/btrfs/block-group.c
467
progress = atomic_read(&caching_ctl->progress);
fs/btrfs/block-group.c
470
(progress != atomic_read(&caching_ctl->progress) &&
fs/btrfs/compression.c
849
if (atomic_read(total_ws) > cpus) {
fs/btrfs/compression.c
854
if (atomic_read(total_ws) > cpus && !*free_ws)
fs/btrfs/compression.c
885
if (atomic_read(total_ws) == 0) {
fs/btrfs/delayed-inode.c
1337
if (atomic_read(&delayed_root->items) <
fs/btrfs/delayed-inode.c
1413
int val = atomic_read(&delayed_root->items_seq);
fs/btrfs/delayed-inode.c
1418
if (atomic_read(&delayed_root->items) < BTRFS_DELAYED_BACKGROUND)
fs/btrfs/delayed-inode.c
1428
if ((atomic_read(&delayed_root->items) < BTRFS_DELAYED_BACKGROUND) ||
fs/btrfs/delayed-inode.c
1432
if (atomic_read(&delayed_root->items) >= BTRFS_DELAYED_WRITEBACK) {
fs/btrfs/delayed-inode.c
1436
seq = atomic_read(&delayed_root->items_seq);
fs/btrfs/discard.c
632
discardable_extents = atomic_read(&discard_ctl->discardable_extents);
fs/btrfs/disk-io.c
3837
return atomic_read(&device->sb_write_errors) < i ? 0 : -1;
fs/btrfs/disk-io.c
3882
errors += atomic_read(&device->sb_write_errors);
fs/btrfs/disk-io.c
4269
(atomic_read(&fs_info->defrag_running) == 0));
fs/btrfs/disk-io.c
4870
atomic_read(&t->num_writers) == 0);
fs/btrfs/disk-io.c
522
BUG_ON(!atomic_read(&eb->refs));
fs/btrfs/disk-io.c
547
ASSERT(atomic_read(&eb->refs));
fs/btrfs/extent_io.c
1949
atomic_read(&eb->writeback_inhibitors) == 0) &&
fs/btrfs/extent_io.c
2894
if (atomic_read(&bfs->eb_refs))
fs/btrfs/inode.c
10513
atomic_read(&fs_info->scrubs_running) ?
fs/btrfs/inode.c
1831
atomic_read(&root->snapshot_force_cow))
fs/btrfs/inode.c
3672
atomic_read(&fs_info->nr_delayed_iputs) == 0);
fs/btrfs/inode.c
4790
if (atomic_read(&dest->nr_swapfiles)) {
fs/btrfs/ioctl.c
3299
if (atomic_read(&fs_info->balance_pause_req))
fs/btrfs/ioctl.c
3301
if (atomic_read(&fs_info->balance_cancel_req))
fs/btrfs/ioctl.c
730
if (atomic_read(&root->nr_swapfiles)) {
fs/btrfs/locking.c
326
if (atomic_read(&lock->readers))
fs/btrfs/locking.c
333
if (atomic_read(&lock->readers)) {
fs/btrfs/locking.c
346
wait_event(lock->pending_writers, !atomic_read(&lock->readers));
fs/btrfs/locking.c
372
wait_event(lock->pending_readers, atomic_read(&lock->writers) == 0);
fs/btrfs/raid56.c
1746
wait_event(rbio->io_wait, atomic_read(&rbio->stripes_pending) == 0);
fs/btrfs/raid56.c
2524
wait_event(rbio->io_wait, atomic_read(&rbio->stripes_pending) == 0);
fs/btrfs/raid56.c
2985
wait_event(rbio->io_wait, atomic_read(&rbio->stripes_pending) == 0);
fs/btrfs/relocation.c
2758
return atomic_read(&fs_info->balance_cancel_req) ||
fs/btrfs/relocation.c
2759
atomic_read(&fs_info->reloc_cancel_req) ||
fs/btrfs/relocation.c
3780
if (atomic_read(&fs_info->reloc_cancel_req) > 0) {
fs/btrfs/relocation.c
3798
if (atomic_read(&fs_info->reloc_cancel_req) > 0)
fs/btrfs/scrub.c
2074
if (atomic_read(&fs_info->scrub_cancel_req) ||
fs/btrfs/scrub.c
2075
atomic_read(&sctx->cancel_req))
fs/btrfs/scrub.c
2173
if (atomic_read(&fs_info->scrub_pause_req))
fs/btrfs/scrub.c
2318
if (atomic_read(&fs_info->scrub_pause_req))
fs/btrfs/scrub.c
3253
while (atomic_read(&fs_info->scrubs_paused) !=
fs/btrfs/scrub.c
3254
atomic_read(&fs_info->scrubs_running)) {
fs/btrfs/scrub.c
3257
atomic_read(&fs_info->scrubs_paused) ==
fs/btrfs/scrub.c
3258
atomic_read(&fs_info->scrubs_running));
fs/btrfs/scrub.c
3273
if (!atomic_read(&fs_info->scrubs_running)) {
fs/btrfs/scrub.c
3279
while (atomic_read(&fs_info->scrubs_running)) {
fs/btrfs/scrub.c
3282
atomic_read(&fs_info->scrubs_running) == 0);
fs/btrfs/scrub.c
393
wait_event(stripe->io_wait, atomic_read(&stripe->pending_io) == 0);
fs/btrfs/scrub.c
400
while (atomic_read(&fs_info->scrub_pause_req)) {
fs/btrfs/scrub.c
403
atomic_read(&fs_info->scrub_pause_req) == 0);
fs/btrfs/scrub.c
962
ASSERT(atomic_read(&stripe->pending_io) == 0,
fs/btrfs/scrub.c
963
"atomic_read(&stripe->pending_io)=%d", atomic_read(&stripe->pending_io));
fs/btrfs/space-info.c
796
async_pages = atomic_read(&fs_info->async_delalloc_pages);
fs/btrfs/space-info.c
811
atomic_read(&fs_info->async_delalloc_pages) <=
fs/btrfs/subpage.c
173
ASSERT(atomic_read(&bfs->eb_refs));
fs/btrfs/subpage.c
244
if (atomic_read(&bfs->nr_locked) == 0) {
fs/btrfs/subpage.c
253
ASSERT(atomic_read(&bfs->nr_locked) >= cleared,
fs/btrfs/subpage.c
255
atomic_read(&bfs->nr_locked), cleared);
fs/btrfs/subpage.c
296
if (atomic_read(&bfs->nr_locked) == 0) {
fs/btrfs/subpage.c
323
if (atomic_read(&bfs->nr_locked) == 0) {
fs/btrfs/subpage.c
334
ASSERT(atomic_read(&bfs->nr_locked) >= cleared,
fs/btrfs/subpage.c
336
atomic_read(&bfs->nr_locked), cleared);
fs/btrfs/super.c
1265
(atomic_read(&fs_info->defrag_running) == 0));
fs/btrfs/sysfs.c
489
atomic_read(&fs_info->discard_ctl.discardable_extents));
fs/btrfs/transaction.c
1106
WARN_ON(atomic_read(&cur_trans->num_writers) < 1);
fs/btrfs/transaction.c
1519
ASSERT(atomic_read(&root->log_writers) == 0,
fs/btrfs/transaction.c
1521
atomic_read(&root->log_writers));
fs/btrfs/transaction.c
1522
ASSERT(atomic_read(&root->log_commit[0]) == 0,
fs/btrfs/transaction.c
1524
atomic_read(&root->log_commit[0]));
fs/btrfs/transaction.c
1525
ASSERT(atomic_read(&root->log_commit[1]) == 0,
fs/btrfs/transaction.c
1527
atomic_read(&root->log_commit[1]));
fs/btrfs/transaction.c
2089
atomic_read(&cur_trans->num_writers) == 1);
fs/btrfs/transaction.c
2406
atomic_read(&cur_trans->pending_ordered) == 0);
fs/btrfs/transaction.c
2427
atomic_read(&cur_trans->num_writers) == 1);
fs/btrfs/transaction.c
245
return atomic_read(&trans->num_extwriters);
fs/btrfs/tree-log.c
3210
atomic_read(&root->log_commit[index])))
fs/btrfs/tree-log.c
3227
if (!atomic_read(&root->log_writers))
fs/btrfs/tree-log.c
326
if (zoned && atomic_read(&root->log_commit[index])) {
fs/btrfs/tree-log.c
3343
if (atomic_read(&root->log_commit[index1])) {
fs/btrfs/tree-log.c
3353
if (atomic_read(&root->log_commit[(index1 + 1) % 2]))
fs/btrfs/tree-log.c
3357
int batch = atomic_read(&root->log_batch);
fs/btrfs/tree-log.c
3366
if (batch == atomic_read(&root->log_batch))
fs/btrfs/tree-log.c
3479
if (atomic_read(&log_root_tree->log_commit[index2])) {
fs/btrfs/tree-log.c
3494
if (atomic_read(&log_root_tree->log_commit[(index2 + 1) % 2])) {
fs/btrfs/tree-log.c
389
if (zoned && atomic_read(&root->log_commit[index])) {
fs/btrfs/volumes.c
4370
if ((!counting && atomic_read(&fs_info->balance_pause_req)) ||
fs/btrfs/volumes.c
4371
atomic_read(&fs_info->balance_cancel_req)) {
fs/btrfs/volumes.c
4775
atomic_read(&fs_info->balance_pause_req) ||
fs/btrfs/volumes.c
4899
if (ret == -ECANCELED && atomic_read(&fs_info->balance_pause_req)) {
fs/btrfs/volumes.c
8274
atomic_read(&device->dev_stats_ccnt) != 0) {
fs/btrfs/volumes.c
8286
stats_cnt = atomic_read(&device->dev_stats_ccnt);
fs/btrfs/volumes.h
833
return atomic_read(dev->dev_stat_values + index);
fs/btrfs/zoned.c
2430
if (atomic_read(&zinfo->active_zones_left) <= reserved) {
fs/btrfs/zoned.c
2664
ret = (atomic_read(&zinfo->active_zones_left) >= (1 + reserved));
fs/btrfs/zoned.c
2667
ret = (atomic_read(&zinfo->active_zones_left) >= (2 + reserved));
fs/buffer.c
1130
if (atomic_read(&bh->b_count)) {
fs/buffer.c
2759
WARN_ON(atomic_read(&bh->b_count) < 1);
fs/buffer.c
2792
return atomic_read(&bh->b_count) |
fs/cachefiles/cache.c
330
if (atomic_read(&volume->vcookie->n_accesses) == 0)
fs/ceph/caps.c
1170
cap->cap_gen == atomic_read(&session->s_cap_gen))) {
fs/ceph/caps.c
3502
bool was_stale = cap->cap_gen < atomic_read(&session->s_cap_gen);
fs/ceph/caps.c
3565
cap->cap_gen = atomic_read(&session->s_cap_gen);
fs/ceph/caps.c
5081
if (atomic_read(&ci->i_filelock_ref) > 0) {
fs/ceph/caps.c
671
gen = atomic_read(&session->s_cap_gen);
fs/ceph/caps.c
795
gen = atomic_read(&cap->session->s_cap_gen);
fs/ceph/dir.c
1320
if (atomic_read(&ci->i_shared_gen) != di->lease_shared_gen ||
fs/ceph/dir.c
1837
gen = atomic_read(&session->s_cap_gen);
fs/ceph/dir.c
1910
if (atomic_read(&ci->i_shared_gen) == di->lease_shared_gen &&
fs/ceph/dir.c
1938
shared_gen = atomic_read(&ci->i_shared_gen);
fs/ceph/dir.c
1953
ceph_vinop(dir), (unsigned)atomic_read(&ci->i_shared_gen),
fs/ceph/dir.c
2139
di->lease_shared_gen == atomic_read(&dir_ci->i_shared_gen))
fs/ceph/dir.c
373
int shared_gen = atomic_read(&ci->i_shared_gen);
fs/ceph/dir.c
844
di->lease_shared_gen = atomic_read(&ci->i_shared_gen);
fs/ceph/file.c
532
di->lease_shared_gen = atomic_read(&ci->i_shared_gen);
fs/ceph/file.c
534
} else if (atomic_read(&ci->i_shared_gen) !=
fs/ceph/inode.c
1402
di->lease_shared_gen = atomic_read(&ceph_inode(dir)->i_shared_gen);
fs/ceph/inode.c
1408
if (di->lease_gen == atomic_read(&session->s_cap_gen) &&
fs/ceph/inode.c
1419
di->lease_gen = atomic_read(&session->s_cap_gen);
fs/ceph/inode.c
2075
atomic_read(&ci->i_shared_gen)) {
fs/ceph/mds_client.c
1992
if (cap && cap->cap_gen < atomic_read(&cap->session->s_cap_gen))
fs/ceph/mds_client.c
2199
if (atomic_read(&ci->i_filelock_ref) > 0)
fs/ceph/mds_client.c
4678
cap->cap_gen = atomic_read(&cap->session->s_cap_gen);
fs/ceph/mds_client.c
5312
di->lease_gen == atomic_read(&session->s_cap_gen) &&
fs/ceph/mds_client.c
5974
return atomic_read(&mdsc->num_sessions) <= skipped;
fs/ceph/snap.c
1337
if (WARN_ON_ONCE(atomic_read(&sm->ref))) {
fs/ceph/super.c
1568
wait = !!atomic_read(&mdsc->stopping_blockers);
fs/ceph/super.c
1571
if (wait && atomic_read(&mdsc->stopping_blockers)) {
fs/coda/cache.c
36
cii->c_cached_epoch = atomic_read(&permission_epoch);
fs/coda/cache.c
50
cii->c_cached_epoch = atomic_read(&permission_epoch) - 1;
fs/coda/cache.c
70
cii->c_cached_epoch == atomic_read(&permission_epoch);
fs/configfs/configfs_internal.h
147
WARN_ON(!atomic_read(&sd->s_count));
fs/configfs/configfs_internal.h
155
WARN_ON(!atomic_read(&sd->s_count));
fs/debugfs/file.c
902
*val = atomic_read((atomic_t *)data);
fs/dlm/lockspace.c
298
wait_event(ls->ls_count_wait, atomic_read(&ls->ls_count) == 0);
fs/dlm/lockspace.c
301
if (atomic_read(&ls->ls_count) != 0) {
fs/dlm/lowcomms.c
1524
wait_event(processqueue_wq, !atomic_read(&processqueue_count));
fs/dlm/midcomms.c
1156
WARN_ON_ONCE(atomic_read(&node->send_queue_cnt));
fs/dlm/midcomms.c
266
return atomic_read(&node->send_queue_cnt);
fs/dlm/midcomms.c
408
oval = atomic_read(&node->ulp_delivered);
fs/dlm/midcomms.c
419
dlm_send_ack(node->nodeid, atomic_read(&node->seq_next));
fs/dlm/midcomms.c
526
oval = atomic_read(&node->seq_next);
fs/dlm/requestqueue.c
113
if (!atomic_read(&ls->ls_count))
fs/dlm/user.c
897
return atomic_read(&dlm_monitor_opened) ? 1 : 0;
fs/ecryptfs/miscdev.c
493
BUG_ON(atomic_read(&ecryptfs_num_miscdev_opens) != 0);
fs/erofs/data.c
270
orig = atomic_read((atomic_t *)&folio->private);
fs/ext2/xattr.c
225
atomic_read(&(bh->b_count)), le32_to_cpu(HDR(bh)->h_refcount));
fs/ext2/xattr.c
311
atomic_read(&(bh->b_count)), le32_to_cpu(HDR(bh)->h_refcount));
fs/ext2/xattr.c
450
atomic_read(&(bh->b_count)),
fs/ext2/xattr.c
848
ea_bdebug(bh, "b_count=%d", atomic_read(&(bh->b_count)));
fs/ext2/xattr.c
967
atomic_read(&(bh->b_count)));
fs/ext4/balloc.c
706
if (atomic_read(&sbi->s_mb_free_pending) == 0) {
fs/ext4/ext4.h
3574
return (atomic_read(&sbi->s_lock_busy) > EXT4_CONTENTION_THRESHOLD);
fs/ext4/fast_commit.c
1206
int subtid = atomic_read(&sbi->s_fc_subtid);
fs/ext4/fast_commit.c
1223
if (atomic_read(&sbi->s_fc_subtid) <= subtid &&
fs/ext4/file.c
175
(atomic_read(&inode->i_writecount) == 1) &&
fs/ext4/ialloc.c
383
stats->free_inodes = atomic_read(&fg->free_inodes);
fs/ext4/ialloc.c
385
stats->used_dirs = atomic_read(&fg->used_dirs);
fs/ext4/mballoc.c
3220
seq_printf(seq, "\treqs: %u\n", atomic_read(&sbi->s_bal_reqs));
fs/ext4/mballoc.c
3221
seq_printf(seq, "\tsuccess: %u\n", atomic_read(&sbi->s_bal_success));
fs/ext4/mballoc.c
3224
atomic_read(&sbi->s_bal_groups_scanned));
fs/ext4/mballoc.c
3235
atomic_read(&sbi->s_bal_cX_ex_scanned[CR_POWER2_ALIGNED]));
fs/ext4/mballoc.c
3247
atomic_read(&sbi->s_bal_cX_ex_scanned[CR_GOAL_LEN_FAST]));
fs/ext4/mballoc.c
3260
atomic_read(&sbi->s_bal_cX_ex_scanned[CR_BEST_AVAIL_LEN]));
fs/ext4/mballoc.c
3272
atomic_read(&sbi->s_bal_cX_ex_scanned[CR_GOAL_LEN_SLOW]));
fs/ext4/mballoc.c
3284
atomic_read(&sbi->s_bal_cX_ex_scanned[CR_ANY_FREE]));
fs/ext4/mballoc.c
3290
atomic_read(&sbi->s_bal_ex_scanned));
fs/ext4/mballoc.c
3291
seq_printf(seq, "\t\tgoal_hits: %u\n", atomic_read(&sbi->s_bal_goals));
fs/ext4/mballoc.c
3293
atomic_read(&sbi->s_bal_stream_goals));
fs/ext4/mballoc.c
3295
atomic_read(&sbi->s_bal_len_goals));
fs/ext4/mballoc.c
3296
seq_printf(seq, "\t\t2^n_hits: %u\n", atomic_read(&sbi->s_bal_2orders));
fs/ext4/mballoc.c
3297
seq_printf(seq, "\t\tbreaks: %u\n", atomic_read(&sbi->s_bal_breaks));
fs/ext4/mballoc.c
3298
seq_printf(seq, "\t\tlost: %u\n", atomic_read(&sbi->s_mb_lost_chunks));
fs/ext4/mballoc.c
3300
atomic_read(&sbi->s_mb_buddies_generated),
fs/ext4/mballoc.c
3305
atomic_read(&sbi->s_mb_preallocated));
fs/ext4/mballoc.c
3306
seq_printf(seq, "\tdiscarded: %u\n", atomic_read(&sbi->s_mb_discarded));
fs/ext4/mballoc.c
3664
!atomic_read(&sbi->s_retry_alloc_pending)) {
fs/ext4/mballoc.c
3933
atomic_read(&sbi->s_bal_allocated),
fs/ext4/mballoc.c
3934
atomic_read(&sbi->s_bal_reqs),
fs/ext4/mballoc.c
3935
atomic_read(&sbi->s_bal_success));
fs/ext4/mballoc.c
3939
atomic_read(&sbi->s_bal_ex_scanned),
fs/ext4/mballoc.c
3940
atomic_read(&sbi->s_bal_groups_scanned),
fs/ext4/mballoc.c
3941
atomic_read(&sbi->s_bal_goals),
fs/ext4/mballoc.c
3942
atomic_read(&sbi->s_bal_2orders),
fs/ext4/mballoc.c
3943
atomic_read(&sbi->s_bal_breaks),
fs/ext4/mballoc.c
3944
atomic_read(&sbi->s_mb_lost_chunks));
fs/ext4/mballoc.c
3947
atomic_read(&sbi->s_mb_buddies_generated),
fs/ext4/mballoc.c
3951
atomic_read(&sbi->s_mb_preallocated),
fs/ext4/mballoc.c
3952
atomic_read(&sbi->s_mb_discarded));
fs/ext4/mballoc.c
5148
BUG_ON(atomic_read(&pa->pa_count));
fs/ext4/mballoc.c
5544
if (atomic_read(&pa->pa_count)) {
fs/ext4/mballoc.c
5634
atomic_read(&ei->i_prealloc_active));
fs/ext4/mballoc.c
5646
if (atomic_read(&pa->pa_count)) {
fs/ext4/mballoc.c
5957
if (atomic_read(&pa->pa_count)) {
fs/ext4/orphan.c
655
if (atomic_read(&oi->of_binfo[i].ob_free_entries) !=
fs/ext4/sysfs.c
449
return sysfs_emit(buf, "%d\n", atomic_read((atomic_t *) ptr));
fs/ext4/xattr.c
1877
atomic_read(&(bs->bh->b_count)),
fs/ext4/xattr.c
596
atomic_read(&(bh->b_count)), le32_to_cpu(BHDR(bh)->h_refcount));
fs/ext4/xattr.c
765
atomic_read(&(bh->b_count)), le32_to_cpu(BHDR(bh)->h_refcount));
fs/f2fs/checkpoint.c
2189
while (atomic_read(&cprc->queued_ckpt))
fs/f2fs/compress.c
1058
while (atomic_read(&cic->pending_pages) !=
fs/f2fs/data.c
3382
if (atomic_read(&sbi->wb_sync_req[DATA]) &&
fs/f2fs/data.c
3583
else if (atomic_read(&sbi->wb_sync_req[DATA])) {
fs/f2fs/debug.c
147
si->ext_tree[i] = atomic_read(&eti->total_ext_tree);
fs/f2fs/debug.c
148
si->zombie_tree[i] = atomic_read(&eti->total_zombie_tree);
fs/f2fs/debug.c
149
si->ext_node[i] = atomic_read(&eti->total_ext_node);
fs/f2fs/debug.c
170
si->aw_cnt = atomic_read(&sbi->atomic_files);
fs/f2fs/debug.c
171
si->max_aw_cnt = atomic_read(&sbi->max_aw_cnt);
fs/f2fs/debug.c
181
atomic_read(&SM_I(sbi)->fcc_info->issued_flush);
fs/f2fs/debug.c
183
atomic_read(&SM_I(sbi)->fcc_info->queued_flush);
fs/f2fs/debug.c
189
atomic_read(&SM_I(sbi)->dcc_info->issued_discard);
fs/f2fs/debug.c
191
atomic_read(&SM_I(sbi)->dcc_info->queued_discard);
fs/f2fs/debug.c
193
atomic_read(&SM_I(sbi)->dcc_info->discard_cmd_cnt);
fs/f2fs/debug.c
196
si->nr_issued_ckpt = atomic_read(&sbi->cprc_info.issued_ckpt);
fs/f2fs/debug.c
197
si->nr_total_ckpt = atomic_read(&sbi->cprc_info.total_ckpt);
fs/f2fs/debug.c
198
si->nr_queued_ckpt = atomic_read(&sbi->cprc_info.queued_ckpt);
fs/f2fs/debug.c
210
si->inline_xattr = atomic_read(&sbi->inline_xattr);
fs/f2fs/debug.c
211
si->inline_inode = atomic_read(&sbi->inline_inode);
fs/f2fs/debug.c
212
si->inline_dir = atomic_read(&sbi->inline_dir);
fs/f2fs/debug.c
213
si->compr_inode = atomic_read(&sbi->compr_inode);
fs/f2fs/debug.c
214
si->swapfile_inode = atomic_read(&sbi->swapfile_inode);
fs/f2fs/debug.c
232
si->compress_page_hit = atomic_read(&sbi->compress_page_hit);
fs/f2fs/debug.c
261
si->meta_count[i] = atomic_read(&sbi->meta_count[i]);
fs/f2fs/debug.c
286
si->cp_call_count[i] = atomic_read(&sbi->cp_call_count[i]);
fs/f2fs/debug.c
293
si->inplace_count = atomic_read(&sbi->inplace_count);
fs/f2fs/debug.c
365
atomic_read(&SM_I(sbi)->dcc_info->discard_cmd_cnt);
fs/f2fs/debug.c
382
si->ext_mem[i] = atomic_read(&eti->total_ext_tree) *
fs/f2fs/debug.c
384
si->ext_mem[i] += atomic_read(&eti->total_ext_node) *
fs/f2fs/extent_cache.c
1181
atomic_read(&et->node_cnt)) {
fs/f2fs/extent_cache.c
1194
f2fs_bug_on(sbi, atomic_read(&et->node_cnt));
fs/f2fs/extent_cache.c
435
if (atomic_read(&et->node_cnt) || !ei.len)
fs/f2fs/extent_cache.c
642
if (!et || !atomic_read(&et->node_cnt))
fs/f2fs/extent_cache.c
645
while (atomic_read(&et->node_cnt)) {
fs/f2fs/extent_cache.c
651
f2fs_bug_on(sbi, atomic_read(&et->node_cnt));
fs/f2fs/extent_cache.c
736
atomic_read(&et->node_cnt) <
fs/f2fs/extent_cache.c
977
if (!atomic_read(&eti->total_zombie_tree))
fs/f2fs/extent_cache.c
985
if (atomic_read(&et->node_cnt)) {
fs/f2fs/extent_cache.c
992
if (atomic_read(&et->node_cnt))
fs/f2fs/f2fs.h
2093
if (atomic_read(&ffi->inject_ops) >= ffi->inject_rate) {
fs/f2fs/f2fs.h
2795
return atomic_read(&sbi->nr_pages[count_type]);
fs/f2fs/f2fs.h
2800
return atomic_read(&F2FS_I(inode)->dirty_pages);
fs/f2fs/f2fs.h
3136
atomic_read(&SM_I(sbi)->dcc_info->queued_discard))
fs/f2fs/f2fs.h
3140
atomic_read(&SM_I(sbi)->fcc_info->queued_flush))
fs/f2fs/f2fs.h
4394
int cur = atomic_read(&F2FS_I_SB(inode)->atomic_files); \
fs/f2fs/f2fs.h
4395
int max = atomic_read(&F2FS_I_SB(inode)->max_aw_cnt); \
fs/f2fs/f2fs.h
4780
if (f2fs_is_mmap_file(inode) || atomic_read(&fi->writeback) ||
fs/f2fs/f2fs.h
4960
if (!add && !atomic_read(&fi->i_compr_blocks))
fs/f2fs/file.c
2074
atomic_read(&inode->i_writecount) != 1)
fs/f2fs/file.c
2142
atomic_read(&fi->writeback) ||
fs/f2fs/file.c
3776
*blocks = atomic_read(&F2FS_I(inode)->i_compr_blocks);
fs/f2fs/file.c
3872
writecount = atomic_read(&inode->i_writecount);
fs/f2fs/file.c
3889
if (!atomic_read(&fi->i_compr_blocks)) {
fs/f2fs/file.c
3951
atomic_read(&fi->i_compr_blocks)) {
fs/f2fs/file.c
3958
atomic_read(&fi->i_compr_blocks));
fs/f2fs/file.c
4073
if (atomic_read(&fi->i_compr_blocks))
fs/f2fs/file.c
4134
atomic_read(&fi->i_compr_blocks)) {
fs/f2fs/file.c
4141
atomic_read(&fi->i_compr_blocks));
fs/f2fs/file.c
4512
if (!atomic_read(&fi->i_compr_blocks))
fs/f2fs/file.c
658
bool released = !atomic_read(&F2FS_I(dn->inode)->i_compr_blocks);
fs/f2fs/inode.c
557
stat_add_compr_blocks(inode, atomic_read(&fi->i_compr_blocks));
fs/f2fs/inode.c
748
atomic_read(&fi->i_compr_blocks));
fs/f2fs/inode.c
973
atomic_read(&fi->i_compr_blocks));
fs/f2fs/node.c
100
atomic_read(&eti->total_ext_node) *
fs/f2fs/node.c
104
mem_size = (atomic_read(&dcc->discard_cmd_cnt) *
fs/f2fs/node.c
2108
if (atomic_read(&sbi->wb_sync_req[NODE]) &&
fs/f2fs/node.c
2261
else if (atomic_read(&sbi->wb_sync_req[NODE])) {
fs/f2fs/node.c
98
mem_size = (atomic_read(&eti->total_ext_tree) *
fs/f2fs/segment.c
1216
if (atomic_read(&dcc->discard_cmd_cnt))
fs/f2fs/segment.c
1889
if (!atomic_read(&dcc->discard_cmd_cnt))
fs/f2fs/segment.c
1900
f2fs_bug_on(sbi, atomic_read(&dcc->discard_cmd_cnt));
fs/f2fs/segment.c
1932
if (atomic_read(&dcc->queued_discard))
fs/f2fs/segment.c
1940
!atomic_read(&dcc->discard_cmd_cnt)) {
fs/f2fs/segment.c
1958
if (!atomic_read(&dcc->discard_cmd_cnt))
fs/f2fs/shrinker.c
36
return atomic_read(&eti->total_zombie_tree) +
fs/f2fs/shrinker.c
37
atomic_read(&eti->total_ext_node);
fs/f2fs/sysfs.c
151
return sysfs_emit(buf, "%llu\n", (unsigned long long)atomic_read(
fs/f2fs/sysfs.c
160
return sysfs_emit(buf, "%llu\n", (unsigned long long)atomic_read(
fs/f2fs/sysfs.c
169
return sysfs_emit(buf, "%llu\n", (unsigned long long)atomic_read(
fs/f2fs/sysfs.c
446
atomic_read(&sbi->cp_call_count[TOTAL_CALL]) -
fs/f2fs/sysfs.c
447
atomic_read(&sbi->cp_call_count[BACKGROUND]));
fs/f2fs/sysfs.c
450
atomic_read(&sbi->cp_call_count[BACKGROUND]));
fs/file.c
264
if (atomic_read(&files->count) > 1)
fs/file.c
831
if ((flags & CLOSE_RANGE_UNSHARE) && atomic_read(&cur_fds->count) > 1) {
fs/fs-writeback.c
1221
if (atomic_read(&isw_nr_in_flight)) {
fs/fs-writeback.c
210
return !atomic_read(&done->cnt);
fs/fs-writeback.c
654
if (atomic_read(&isw_nr_in_flight) > WB_FRN_MAX_IN_FLIGHT)
fs/fuse/control.c
60
value = atomic_read(&fc->num_waiting);
fs/fuse/cuse.c
584
return sprintf(buf, "%d\n", atomic_read(&cc->fc.num_waiting));
fs/fuse/dev.c
2529
wait_event(fc->blocked_waitq, atomic_read(&fc->num_waiting) == 0);
fs/fuse/dev.c
83
if (!atomic_read(&fc->num_waiting))
fs/fuse/dev_uring.c
455
if (atomic_read(&ring->queue_refs) > 0) {
fs/fuse/dev_uring.c
483
if (atomic_read(&ring->queue_refs) > 0) {
fs/fuse/dev_uring_i.h
155
if (atomic_read(&ring->queue_refs) > 0) {
fs/fuse/dev_uring_i.h
167
atomic_read(&ring->queue_refs) == 0);
fs/fuse/dir.c
394
if (entry->d_time < atomic_read(&fc->epoch))
fs/fuse/dir.c
618
epoch = atomic_read(&fc->epoch);
fs/fuse/dir.c
830
epoch = atomic_read(&fm->fc->epoch);
fs/fuse/dir.c
985
epoch = atomic_read(&fm->fc->epoch);
fs/fuse/inode.c
1041
WARN_ON(atomic_read(&bucket->count) != 1);
fs/fuse/inode.c
704
count = atomic_read(&bucket->count);
fs/fuse/inode.c
727
wait_event(bucket->waitq, atomic_read(&bucket->count) == 0);
fs/fuse/readdir.c
194
epoch = atomic_read(&fc->epoch);
fs/gfs2/aops.c
672
if (atomic_read(&bh->b_count))
fs/gfs2/bmap.c
1534
if (isize_blks > atomic_read(&sdp->sd_log_thresh2))
fs/gfs2/bmap.c
1536
atomic_read(&sdp->sd_log_thresh2);
fs/gfs2/bmap.c
1552
RES_QUOTA >= atomic_read(&sdp->sd_log_thresh2)) {
fs/gfs2/file.c
375
if (hint > atomic_read(&ip->i_sizehint))
fs/gfs2/glock.c
1990
return vfs_pressure_ratio(atomic_read(&lru_count));
fs/gfs2/glock.c
2156
!atomic_read(&sdp->sd_glock_disposal),
fs/gfs2/glock.c
2158
if (!atomic_read(&sdp->sd_glock_disposal))
fs/gfs2/glock.c
2162
atomic_read(&sdp->sd_glock_disposal),
fs/gfs2/glock.c
2346
atomic_read(&gl->gl_ail_count),
fs/gfs2/glock.c
2347
atomic_read(&gl->gl_revokes),
fs/gfs2/glops.c
114
log_in_flight = atomic_read(&sdp->sd_log_in_flight);
fs/gfs2/glops.c
143
unsigned int revokes = atomic_read(&gl->gl_ail_count);
fs/gfs2/glops.c
235
gfs2_assert_withdraw(sdp, !atomic_read(&gl->gl_ail_count));
fs/gfs2/glops.c
362
gfs2_assert_withdraw(glock_sbd(gl), !atomic_read(&gl->gl_ail_count));
fs/gfs2/glops.c
81
GLOCK_BUG_ON(gl, !fsync && atomic_read(&gl->gl_ail_count));
fs/gfs2/glops.c
94
revokes = atomic_read(&gl->gl_ail_count);
fs/gfs2/inode.c
468
if (atomic_read(&gl->gl_revokes) == 0) {
fs/gfs2/log.c
1138
reserved_revokes += atomic_read(&sdp->sd_log_revokes_available);
fs/gfs2/log.c
1226
return atomic_read(&sdp->sd_log_pinned) +
fs/gfs2/log.c
1227
atomic_read(&sdp->sd_log_blks_needed) >=
fs/gfs2/log.c
1228
atomic_read(&sdp->sd_log_thresh1);
fs/gfs2/log.c
1234
atomic_read(&sdp->sd_log_blks_free) +
fs/gfs2/log.c
1235
atomic_read(&sdp->sd_log_blks_needed) >=
fs/gfs2/log.c
1236
atomic_read(&sdp->sd_log_thresh2);
fs/gfs2/log.c
430
return atomic_read(&sdp->sd_log_blks_free) == sdp->sd_jdesc->jd_blocks;
fs/gfs2/log.c
437
available = atomic_read(&sdp->sd_log_revokes_available);
fs/gfs2/log.c
470
gfs2_assert_withdraw(sdp, atomic_read(&sdp->sd_log_blks_free) <=
fs/gfs2/log.c
472
if (atomic_read(&sdp->sd_log_blks_needed))
fs/gfs2/log.c
491
free_blocks = atomic_read(&sdp->sd_log_blks_free);
fs/gfs2/log.c
532
(free_blocks = atomic_read(&sdp->sd_log_blks_free),
fs/gfs2/log.c
683
if (atomic_read(&sdp->sd_log_in_flight)) {
fs/gfs2/log.c
687
if (atomic_read(&sdp->sd_log_in_flight))
fs/gfs2/log.c
689
} while(atomic_read(&sdp->sd_log_in_flight));
fs/gfs2/log.c
801
unsigned int max_revokes = atomic_read(&sdp->sd_log_revokes_available);
fs/gfs2/ops_fstype.c
776
trace_gfs2_log_blocks(sdp, atomic_read(&sdp->sd_log_blks_free));
fs/gfs2/quota.c
1541
(count = atomic_read(&sdp->sd_quota_count)) == 0,
fs/gfs2/rgrp.c
1585
extlen = max_t(u32, atomic_read(&ip->i_sizehint), ap->target);
fs/gfs2/rgrp.c
690
if (atomic_read(&inode->i_writecount) <= 1)
fs/gfs2/sys.c
120
atomic_read(&sdp->sd_log_in_flight),
fs/gfs2/sys.c
121
atomic_read(&sdp->sd_log_blks_needed),
fs/gfs2/sys.c
122
atomic_read(&sdp->sd_log_blks_free),
fs/gfs2/sys.c
126
atomic_read(&sdp->sd_log_revokes_available),
fs/gfs2/sys.c
127
atomic_read(&sdp->sd_log_pinned),
fs/gfs2/sys.c
128
atomic_read(&sdp->sd_log_thresh1),
fs/gfs2/sys.c
129
atomic_read(&sdp->sd_log_thresh2));
fs/gfs2/trace_gfs2.h
399
__entry->blks_free = atomic_read(&sdp->sd_log_blks_free);
fs/hfs/bnode.c
405
node->tree->cnid, node->this, atomic_read(&node->refcnt));
fs/hfs/bnode.c
551
atomic_read(&node->refcnt));
fs/hfs/bnode.c
564
atomic_read(&node->refcnt));
fs/hfs/bnode.c
565
BUG_ON(!atomic_read(&node->refcnt));
fs/hfs/btree.c
185
if (atomic_read(&node->refcnt))
fs/hfs/btree.c
188
atomic_read(&node->refcnt));
fs/hfs/inode.c
113
if (atomic_read(&node->refcnt)) {
fs/hfs/inode.c
98
else if (atomic_read(&node->refcnt))
fs/hfsplus/bnode.c
518
node->tree->cnid, node->this, atomic_read(&node->refcnt));
fs/hfsplus/bnode.c
665
atomic_read(&node->refcnt));
fs/hfsplus/bnode.c
678
atomic_read(&node->refcnt));
fs/hfsplus/bnode.c
679
BUG_ON(!atomic_read(&node->refcnt));
fs/hfsplus/btree.c
420
if (atomic_read(&node->refcnt))
fs/hfsplus/btree.c
424
atomic_read(&node->refcnt));
fs/hfsplus/dir.c
394
atomic_read(&HFSPLUS_I(inode)->opencnt)) {
fs/hfsplus/dir.c
417
if (!atomic_read(&HFSPLUS_I(inode)->opencnt)) {
fs/hfsplus/inode.c
111
if (atomic_read(&node->refcnt)) {
fs/hfsplus/inode.c
95
else if (atomic_read(&node->refcnt))
fs/hfsplus/xattr.c
217
switch (atomic_read(&sbi->attr_tree_state)) {
fs/inode.c
1923
VFS_BUG_ON_INODE(atomic_read(&inode->i_count) != 0, inode);
fs/inode.c
1942
VFS_BUG_ON_INODE(atomic_read(&inode->i_count) != 0, inode);
fs/inode.c
1986
VFS_BUG_ON_INODE(atomic_read(&inode->i_count) < 1, inode);
fs/inode.c
2020
VFS_BUG_ON_INODE(atomic_read(&inode->i_count) < 2, inode);
fs/inode.c
2710
return atomic_read(&inode->i_dio_count) == 0;
fs/inode.c
3043
count = atomic_read(&inode->i_count);
fs/iomap/buffered-io.c
1743
WARN_ON_ONCE(atomic_read(&ifs->write_bytes_pending) != 0);
fs/iomap/buffered-io.c
1763
WARN_ON_ONCE(ifs && atomic_read(&ifs->write_bytes_pending) <= 0);
fs/iomap/buffered-io.c
262
WARN_ON_ONCE(atomic_read(&ifs->write_bytes_pending));
fs/jbd2/checkpoint.c
722
J_ASSERT(atomic_read(&transaction->t_updates) == 0);
fs/jbd2/commit.c
1097
atomic_read(&commit_transaction->t_handle_count);
fs/jbd2/commit.c
458
J_ASSERT (atomic_read(&commit_transaction->t_outstanding_credits) <=
fs/jbd2/commit.c
524
atomic_sub(atomic_read(&journal->j_reserved_credits),
fs/jbd2/commit.c
572
atomic_read(&commit_transaction->t_outstanding_credits));
fs/jbd2/commit.c
69
if (atomic_read(&bh->b_count) != 1)
fs/jbd2/commit.c
822
J_ASSERT_BH(bh, atomic_read(&bh->b_count) == 0);
fs/jbd2/commit.c
892
atomic_read(&commit_transaction->t_outstanding_credits) < 0);
fs/jbd2/journal.c
2918
(atomic_read(&bh->b_count) > 0) ||
fs/jbd2/journal.c
3167
int n = atomic_read(&nr_journal_heads);
fs/jbd2/transaction.c
245
if (atomic_read(&journal->j_reserved_credits) + total >
fs/jbd2/transaction.c
250
atomic_read(&journal->j_reserved_credits) + total <=
fs/jbd2/transaction.c
297
atomic_read(&journal->j_reserved_credits) + rsv_blocks
fs/jbd2/transaction.c
439
atomic_read(&transaction->t_outstanding_credits),
fs/jbd2/transaction.c
704
J_ASSERT(atomic_read(&transaction->t_updates) > 0);
fs/jbd2/transaction.c
838
if (!atomic_read(&transaction->t_updates)) {
fs/jbd2/transaction.c
867
if (atomic_read(&journal->j_reserved_credits)) {
fs/jbd2/transaction.c
870
atomic_read(&journal->j_reserved_credits) == 0);
fs/jbd2/transaction.c
91
atomic_read(&journal->j_reserved_credits));
fs/jffs2/xattr.c
1331
if (atomic_read(&xd->refcnt) || xd->node != (void *)xd)
fs/jffs2/xattr.c
867
if (!atomic_read(&xd->refcnt)) {
fs/jfs/jfs_dmap.c
636
if ((atomic_read(&bmp->db_active[agpref]) == 0) &&
fs/jfs/jfs_dmap.c
647
if (atomic_read(&bmp->db_active[agpref]))
fs/jfs/jfs_dmap.c
782
if (atomic_read(&bmp->db_active[agno]))
fs/jfs/jfs_dmap.c
816
writers = atomic_read(&bmp->db_active[agno]);
fs/jfs/jfs_imap.c
1366
if (atomic_read(&JFS_SBI(pip->i_sb)->bmap->db_active[agno])) {
fs/jfs/jfs_imap.c
228
dinom_le->in_numinos = cpu_to_le32(atomic_read(&imp->im_numinos));
fs/jfs/jfs_imap.c
229
dinom_le->in_numfree = cpu_to_le32(atomic_read(&imp->im_numfree));
fs/jfs/jfs_imap.c
2859
imap->im_nextiag, atomic_read(&imap->im_numinos),
fs/jfs/jfs_imap.c
2860
atomic_read(&imap->im_numfree));
fs/jfs/jfs_imap.c
2965
if (xnuminos != atomic_read(&imap->im_numinos) ||
fs/jfs/jfs_imap.c
2966
xnumfree != atomic_read(&imap->im_numfree)) {
fs/jfs/super.c
140
maxinodes = min((s64) atomic_read(&imap->im_numinos) +
fs/jfs/super.c
144
buf->f_ffree = maxinodes - (atomic_read(&imap->im_numinos) -
fs/jfs/super.c
145
atomic_read(&imap->im_numfree));
fs/kernfs/dir.c
1434
WARN_ON_ONCE(atomic_read(&kn->active) != KN_DEACTIVATED_BIAS);
fs/kernfs/dir.c
1699
atomic_read(&kn->active) == KN_DEACTIVATED_BIAS)
fs/kernfs/dir.c
35
return atomic_read(&kn->active) >= 0;
fs/kernfs/dir.c
521
if (atomic_read(&kn->active) == KN_DEACTIVATED_BIAS &&
fs/kernfs/dir.c
529
if (atomic_read(&kn->active) != KN_DEACTIVATED_BIAS)
fs/kernfs/dir.c
534
atomic_read(&kn->active) == KN_DEACTIVATED_BIAS);
fs/kernfs/dir.c
554
WARN_ON(!atomic_read(&kn->count));
fs/kernfs/dir.c
594
WARN_ONCE(atomic_read(&kn->active) != KN_DEACTIVATED_BIAS,
fs/kernfs/dir.c
597
rcu_dereference(kn->name), atomic_read(&kn->active));
fs/kernfs/file.c
221
of->event = atomic_read(&of_on(of)->event);
fs/kernfs/file.c
265
of->event = atomic_read(&of_on(of)->event);
fs/kernfs/file.c
861
if (of->event != atomic_read(&on->event))
fs/kernfs/kernfs-internal.h
135
!atomic_read(&kn->count));
fs/locks.c
630
if (atomic_read(&inode->i_writecount) != self_wcount ||
fs/locks.c
631
atomic_read(&inode->i_readcount) != self_rcount)
fs/mbcache.c
148
wait_var_event(&entry->e_refcnt, atomic_read(&entry->e_refcnt) <= 2);
fs/mbcache.c
418
WARN_ON(atomic_read(&entry->e_refcnt) != 1);
fs/netfs/fscache_cache.c
360
cache->name, atomic_read(&cache->object_count));
fs/netfs/fscache_cache.c
370
atomic_read(&cache->n_accesses) == 0);
fs/netfs/fscache_cache.c
397
atomic_read(&cache->n_volumes),
fs/netfs/fscache_cache.c
398
atomic_read(&cache->object_count),
fs/netfs/fscache_cache.c
399
atomic_read(&cache->n_accesses),
fs/netfs/fscache_cookie.c
105
n_accesses = atomic_read(&cookie->n_accesses);
fs/netfs/fscache_cookie.c
1132
atomic_read(&cookie->n_active),
fs/netfs/fscache_cookie.c
1133
atomic_read(&cookie->n_accesses),
fs/netfs/fscache_cookie.c
43
atomic_read(&cookie->n_active),
fs/netfs/fscache_cookie.c
44
atomic_read(&cookie->n_accesses),
fs/netfs/fscache_cookie.c
585
n_active, atomic_read(&cookie->n_accesses),
fs/netfs/fscache_cookie.c
670
unsigned int a = atomic_read(&cookie->n_accesses);
fs/netfs/fscache_cookie.c
685
a = atomic_read(&cookie->n_accesses);
fs/netfs/fscache_cookie.c
717
if (atomic_read(&cookie->n_accesses) == 0 &&
fs/netfs/fscache_cookie.c
744
if (atomic_read(&cookie->n_accesses) != 0)
fs/netfs/fscache_cookie.c
759
if (atomic_read(&cookie->n_accesses) != 0)
fs/netfs/fscache_cookie.c
852
n_accesses = atomic_read(&cookie->n_accesses);
fs/netfs/fscache_cookie.c
867
atomic_read(&cookie->n_active) > 0) {
fs/netfs/fscache_cookie.c
979
cookie->debug_id, atomic_read(&cookie->n_active), retire);
fs/netfs/fscache_cookie.c
989
ASSERTCMP(atomic_read(&cookie->n_active), ==, 0);
fs/netfs/fscache_cookie.c
990
ASSERTCMP(atomic_read(&cookie->volume->n_cookies), >, 0);
fs/netfs/fscache_io.c
124
atomic_read(&cookie->n_accesses),
fs/netfs/fscache_stats.c
100
atomic_read(&fscache_n_write),
fs/netfs/fscache_stats.c
101
atomic_read(&fscache_n_dio_misfit));
fs/netfs/fscache_stats.c
61
atomic_read(&fscache_n_cookies),
fs/netfs/fscache_stats.c
62
atomic_read(&fscache_n_volumes),
fs/netfs/fscache_stats.c
63
atomic_read(&fscache_n_volumes_collision),
fs/netfs/fscache_stats.c
64
atomic_read(&fscache_n_volumes_nomem)
fs/netfs/fscache_stats.c
68
atomic_read(&fscache_n_acquires),
fs/netfs/fscache_stats.c
69
atomic_read(&fscache_n_acquires_ok),
fs/netfs/fscache_stats.c
70
atomic_read(&fscache_n_acquires_oom));
fs/netfs/fscache_stats.c
73
atomic_read(&fscache_n_cookies_lru),
fs/netfs/fscache_stats.c
74
atomic_read(&fscache_n_cookies_lru_expired),
fs/netfs/fscache_stats.c
75
atomic_read(&fscache_n_cookies_lru_removed),
fs/netfs/fscache_stats.c
76
atomic_read(&fscache_n_cookies_lru_dropped),
fs/netfs/fscache_stats.c
81
atomic_read(&fscache_n_invalidates));
fs/netfs/fscache_stats.c
84
atomic_read(&fscache_n_updates),
fs/netfs/fscache_stats.c
85
atomic_read(&fscache_n_resizes),
fs/netfs/fscache_stats.c
86
atomic_read(&fscache_n_resizes_null));
fs/netfs/fscache_stats.c
89
atomic_read(&fscache_n_relinquishes),
fs/netfs/fscache_stats.c
90
atomic_read(&fscache_n_relinquishes_retire),
fs/netfs/fscache_stats.c
91
atomic_read(&fscache_n_relinquishes_dropped));
fs/netfs/fscache_stats.c
94
atomic_read(&fscache_n_no_write_space),
fs/netfs/fscache_stats.c
95
atomic_read(&fscache_n_no_create_space),
fs/netfs/fscache_stats.c
96
atomic_read(&fscache_n_culled));
fs/netfs/fscache_stats.c
99
atomic_read(&fscache_n_read),
fs/netfs/fscache_volume.c
476
atomic_read(&volume->n_accesses) == 0);
fs/netfs/fscache_volume.c
500
atomic_read(&volume->n_cookies),
fs/netfs/fscache_volume.c
501
atomic_read(&volume->n_accesses),
fs/netfs/stats.c
100
atomic_read(&netfs_n_wb_lock_wait));
fs/netfs/stats.c
55
atomic_read(&netfs_n_rh_dio_read),
fs/netfs/stats.c
56
atomic_read(&netfs_n_rh_readahead),
fs/netfs/stats.c
57
atomic_read(&netfs_n_rh_read_folio),
fs/netfs/stats.c
58
atomic_read(&netfs_n_rh_read_single),
fs/netfs/stats.c
59
atomic_read(&netfs_n_rh_write_begin),
fs/netfs/stats.c
60
atomic_read(&netfs_n_rh_write_zskip));
fs/netfs/stats.c
62
atomic_read(&netfs_n_wh_buffered_write),
fs/netfs/stats.c
63
atomic_read(&netfs_n_wh_writethrough),
fs/netfs/stats.c
64
atomic_read(&netfs_n_wh_dio_write),
fs/netfs/stats.c
65
atomic_read(&netfs_n_wh_writepages),
fs/netfs/stats.c
66
atomic_read(&netfs_n_wh_copy_to_cache));
fs/netfs/stats.c
68
atomic_read(&netfs_n_rh_zero),
fs/netfs/stats.c
69
atomic_read(&netfs_n_rh_short_read),
fs/netfs/stats.c
70
atomic_read(&netfs_n_rh_write_zskip));
fs/netfs/stats.c
72
atomic_read(&netfs_n_rh_download),
fs/netfs/stats.c
73
atomic_read(&netfs_n_rh_download_done),
fs/netfs/stats.c
74
atomic_read(&netfs_n_rh_download_failed),
fs/netfs/stats.c
75
atomic_read(&netfs_n_rh_download_instead));
fs/netfs/stats.c
77
atomic_read(&netfs_n_rh_read),
fs/netfs/stats.c
78
atomic_read(&netfs_n_rh_read_done),
fs/netfs/stats.c
79
atomic_read(&netfs_n_rh_read_failed));
fs/netfs/stats.c
81
atomic_read(&netfs_n_wh_upload),
fs/netfs/stats.c
82
atomic_read(&netfs_n_wh_upload_done),
fs/netfs/stats.c
83
atomic_read(&netfs_n_wh_upload_failed));
fs/netfs/stats.c
85
atomic_read(&netfs_n_wh_write),
fs/netfs/stats.c
86
atomic_read(&netfs_n_wh_write_done),
fs/netfs/stats.c
87
atomic_read(&netfs_n_wh_write_failed));
fs/netfs/stats.c
89
atomic_read(&netfs_n_rh_retry_read_req),
fs/netfs/stats.c
90
atomic_read(&netfs_n_rh_retry_read_subreq),
fs/netfs/stats.c
91
atomic_read(&netfs_n_wh_retry_write_req),
fs/netfs/stats.c
92
atomic_read(&netfs_n_wh_retry_write_subreq));
fs/netfs/stats.c
94
atomic_read(&netfs_n_rh_rreq),
fs/netfs/stats.c
95
atomic_read(&netfs_n_rh_sreq),
fs/netfs/stats.c
96
atomic_read(&netfs_n_folioq),
fs/netfs/stats.c
97
atomic_read(&netfs_n_wh_wstream_conflict));
fs/netfs/stats.c
99
atomic_read(&netfs_n_wb_lock_skip),
fs/nfs/file.c
533
if (atomic_read(&nfsi->commit_info.rpcs_out)) {
fs/nfs/localio.c
547
BUG_ON(atomic_read(&iocb->n_iters) <= 0);
fs/nfs/localio.c
676
n_iters = atomic_read(&iocb->n_iters);
fs/nfs/localio.c
866
n_iters = atomic_read(&iocb->n_iters);
fs/nfs/nfs4state.c
1154
if (atomic_read(&clnt->cl_swapper)) {
fs/nfs/nfs4state.c
2696
if (!atomic_read(&cl->cl_swapper))
fs/nfs/pagelist.c
158
!atomic_read(&l_ctx->io_count));
fs/nfs/pagelist.c
176
if (atomic_read(&l_ctx->io_count) > 0) {
fs/nfs/pagelist.c
181
if (atomic_read(&l_ctx->io_count) == 0) {
fs/nfs/pnfs.c
1090
if (atomic_read(&lo->plh_outstanding) == 1)
fs/nfs/pnfs.c
1309
if (atomic_read(&lo->plh_outstanding) != 0 && lo->plh_return_seq == 0)
fs/nfs/pnfs.c
2201
atomic_read(&lo->plh_outstanding) != 0) {
fs/nfs/pnfs.c
588
if (atomic_read(&lo->plh_outstanding) == 0)
fs/nfs/pnfs_dev.c
336
if (d->nfs_client == clp && atomic_read(&d->ref)) {
fs/nfs/pnfs_dev.c
88
if (atomic_read(&d->ref))
fs/nfs/write.c
1600
!atomic_read(&cinfo->rpcs_out));
fs/nfs/write.c
1950
if (!atomic_read(&nfsi->commit_info.rpcs_out))
fs/nfsd/filecache.c
1401
count = atomic_read(&ht->nelems);
fs/nfsd/nfs4callback.c
1118
!atomic_read(&clp->cl_cb_inflight));
fs/nfsd/nfs4proc.c
2688
if (atomic_read(&ls->ls_stid.sc_file->fi_lo_recalls))
fs/nfsd/nfs4state.c
157
if (atomic_read(&ses->se_ref) > ref_held_by_me)
fs/nfsd/nfs4state.c
2200
unsigned long cnt = atomic_read(&nfsd_total_target_slots);
fs/nfsd/nfs4state.c
2350
if (atomic_read(&nn->nfs4_client_count) >= nn->nfs4_max_clients &&
fs/nfsd/nfs4state.c
2351
atomic_read(&nn->nfsd_courtesy_clients) > 0)
fs/nfsd/nfs4state.c
2434
WARN_ON_ONCE(atomic_read(&ses->se_ref));
fs/nfsd/nfs4state.c
2488
int users = atomic_read(&clp->cl_rpc_users);
fs/nfsd/nfs4state.c
2830
atomic_read(&clp->cl_admin_revoked));
fs/nfsd/nfs4state.c
3149
wait_event(expiry_wq, atomic_read(&clp->cl_rpc_users) == 0);
fs/nfsd/nfs4state.c
4378
if (atomic_read(&clp->cl_admin_revoked))
fs/nfsd/nfs4state.c
4882
count = atomic_read(&nn->nfsd_courtesy_clients);
fs/nfsd/nfs4state.c
5943
writes = atomic_read(&ino->i_writecount);
fs/nfsd/nfs4state.c
6669
atomic_read(&nn->nr_reclaim_complete) ==
fs/nfsd/nfs4state.c
6788
if (atomic_read(&clp->cl_delegs_in_recall))
fs/nfsd/nfs4state.c
6815
maxreap = (atomic_read(&nn->nfs4_client_count) >= nn->nfs4_max_clients) ?
fs/nfsd/nfs4state.c
6825
if (!atomic_read(&clp->cl_rpc_users)) {
fs/nfsd/nfs4state.c
6902
if (atomic_read(&clp->cl_admin_revoked) == 0)
fs/nfsd/nfs4state.c
7065
if (atomic_read(&clp->cl_delegs_in_recall))
fs/nfsd/nfs4state.c
790
atomic_read(&fp->fi_access[O_RDONLY]))
fs/nfsd/nfs4state.c
794
atomic_read(&fp->fi_access[O_WRONLY]))
fs/nfsd/nfs4state.c
809
if (atomic_read(&fp->fi_access[1 - oflag]) == 0)
fs/nfsd/nfscache.c
271
if (atomic_read(&nn->num_drc_entries) <= nn->max_drc_entries &&
fs/nfsd/nfscache.c
298
return atomic_read(&nn->num_drc_entries);
fs/nfsd/nfscache.c
438
nn->longest_chain_cachesize = atomic_read(&nn->num_drc_entries);
fs/nfsd/nfscache.c
443
atomic_read(&nn->num_drc_entries));
fs/nfsd/nfscache.c
651
atomic_read(&nn->num_drc_entries));
fs/nfsd/stats.c
45
seq_printf(seq, "th %u 0", atomic_read(&nfsd_th_cnt));
fs/nfsd/vfs.c
1262
if (atomic_read(&inode->i_writecount) > 1
fs/nilfs2/page.c
180
i++, bh, atomic_read(&bh->b_count),
fs/nilfs2/page.c
419
if (atomic_read(&bh->b_count) | buffer_locked(bh)) {
fs/nilfs2/segbuf.c
463
if (unlikely(atomic_read(&segbuf->sb_err) > 0)) {
fs/nilfs2/segment.c
1454
if (atomic_read(&segbuf->sb_err)) {
fs/nilfs2/segment.c
1473
if (atomic_read(&segbuf->sb_err) &&
fs/nilfs2/segment.c
2266
if (atomic_read(&wait_req.done)) {
fs/nilfs2/segment.c
2288
if (!atomic_read(&wrq->done) &&
fs/nilfs2/segment.c
2293
if (atomic_read(&wrq->done)) {
fs/nilfs2/segment.c
275
if (atomic_read(&nilfs->ns_ndirtyblks) > sci->sc_watermark)
fs/nilfs2/sysfs.c
621
ndirtyblks = atomic_read(&nilfs->ns_ndirtyblks);
fs/nilfs2/the_nilfs.c
838
nincsegs = atomic_read(&nilfs->ns_ndirtyblks) /
fs/notify/group.c
68
wait_event(group->notification_waitq, !atomic_read(&group->user_waits));
fs/ntfs3/file.c
1396
atomic_read(&inode->i_writecount) != 1 ||
fs/ntfs3/ntfs_fs.h
1072
return atomic_read(&sbi->used.da);
fs/ocfs2/alloc.c
6109
atomic_read(&osb->osb_tl_disable) == 0) {
fs/ocfs2/cluster/heartbeat.c
1168
if (atomic_read(®->hr_steady_iterations) != 0) {
fs/ocfs2/cluster/heartbeat.c
1175
if (atomic_read(®->hr_steady_iterations) != 0) {
fs/ocfs2/cluster/heartbeat.c
1871
atomic_read(®->hr_steady_iterations) == 0 ||
fs/ocfs2/cluster/heartbeat.c
2092
((atomic_read(®->hr_steady_iterations) == 0) ?
fs/ocfs2/cluster/heartbeat.c
2101
if (atomic_read(®->hr_steady_iterations) != 0) {
fs/ocfs2/cluster/heartbeat.c
325
if (atomic_read(®->hr_steady_iterations) != 0)
fs/ocfs2/cluster/heartbeat.c
854
if (atomic_read(®->hr_steady_iterations) != 0)
fs/ocfs2/cluster/tcp.c
1533
if (atomic_read(&nn->nn_timeout)) {
fs/ocfs2/cluster/tcp.c
1587
timeout = atomic_read(&nn->nn_timeout);
fs/ocfs2/cluster/tcp.c
1724
BUG_ON(atomic_read(&o2net_connected_peers) < 0);
fs/ocfs2/cluster/tcp.c
484
return atomic_read(&o2net_connected_peers);
fs/ocfs2/dlm/dlmdebug.c
101
res->inflight_locks, atomic_read(&res->asts_reserved));
fs/ocfs2/dlm/dlmdebug.c
454
atomic_read(&res->asts_reserved),
fs/ocfs2/dlm/dlmdebug.c
668
atomic_read(&dlm->res_cur_count),
fs/ocfs2/dlm/dlmdebug.c
669
atomic_read(&dlm->res_tot_count));
fs/ocfs2/dlm/dlmdebug.c
672
tot_mles += atomic_read(&dlm->mle_tot_count[i]);
fs/ocfs2/dlm/dlmdebug.c
675
cur_mles += atomic_read(&dlm->mle_cur_count[i]);
fs/ocfs2/dlm/dlmdebug.c
684
atomic_read(&dlm->mle_cur_count[DLM_MLE_BLOCK]),
fs/ocfs2/dlm/dlmdebug.c
685
atomic_read(&dlm->mle_tot_count[DLM_MLE_BLOCK]));
fs/ocfs2/dlm/dlmdebug.c
690
atomic_read(&dlm->mle_cur_count[DLM_MLE_MASTER]),
fs/ocfs2/dlm/dlmdebug.c
691
atomic_read(&dlm->mle_tot_count[DLM_MLE_MASTER]));
fs/ocfs2/dlm/dlmdebug.c
696
atomic_read(&dlm->mle_cur_count[DLM_MLE_MIGRATION]),
fs/ocfs2/dlm/dlmdebug.c
697
atomic_read(&dlm->mle_tot_count[DLM_MLE_MIGRATION]));
fs/ocfs2/dlm/dlmmaster.c
1105
(atomic_read(&mle->woken) == 1),
fs/ocfs2/dlm/dlmmaster.c
2671
(atomic_read(&mle->woken) == 1),
fs/ocfs2/dlm/dlmmaster.c
2675
if (atomic_read(&mle->woken) == 1 ||
fs/ocfs2/journal.c
1070
num_running_trans = atomic_read(&(journal->j_num_trans));
fs/ocfs2/journal.c
1089
BUG_ON(atomic_read(&(journal->j_num_trans)) != 0);
fs/ocfs2/journal.c
2032
if (atomic_read(&os->os_state) == ORPHAN_SCAN_INACTIVE)
fs/ocfs2/journal.c
2036
atomic_read(&os->os_state));
fs/ocfs2/journal.c
2046
if (atomic_read(&os->os_state) == ORPHAN_SCAN_INACTIVE)
fs/ocfs2/journal.c
2068
atomic_read(&os->os_state));
fs/ocfs2/journal.c
2084
if (atomic_read(&os->os_state) == ORPHAN_SCAN_ACTIVE)
fs/ocfs2/journal.c
2095
if (atomic_read(&os->os_state) == ORPHAN_SCAN_ACTIVE) {
fs/ocfs2/journal.c
2377
(!quota && atomic_read(&osb->vol_state) == VOLUME_MOUNTED) ||
fs/ocfs2/journal.c
2378
atomic_read(&osb->vol_state) == VOLUME_MOUNTED_QUOTAS ||
fs/ocfs2/journal.c
2379
atomic_read(&osb->vol_state) == VOLUME_DISABLED);
fs/ocfs2/journal.c
2384
if (atomic_read(&osb->vol_state) == VOLUME_DISABLED) {
fs/ocfs2/journal.c
2404
atomic_read(&journal->j_num_trans) == 0)) {
fs/ocfs2/journal.c
2407
atomic_read(&journal->j_num_trans)
fs/ocfs2/journal.c
2426
if (kthread_should_stop() && atomic_read(&journal->j_num_trans)){
fs/ocfs2/journal.c
2430
atomic_read(&journal->j_num_trans));
fs/ocfs2/journal.c
320
flushed = atomic_read(&journal->j_num_trans);
fs/ocfs2/journal.c
338
flushed = atomic_read(&journal->j_num_trans);
fs/ocfs2/stack_user.c
1017
wait_event(lc->oc_wait, (atomic_read(&lc->oc_this_node) > 0));
fs/ocfs2/stack_user.c
1051
rc = atomic_read(&lc->oc_this_node);
fs/ocfs2/stack_user.c
214
if ((c->oc_type == NO_CONTROLD) || atomic_read(&ocfs2_control_opened))
fs/ocfs2/suballoc.c
1049
atomic_read(&osb->s_num_meta_stolen) < OCFS2_MAX_TO_STEAL)
fs/ocfs2/suballoc.c
1138
atomic_read(&osb->s_num_inodes_stolen) < OCFS2_MAX_TO_STEAL)
fs/ocfs2/super.c
231
atomic_read(&osb->vol_state), osb->osb_flags);
fs/ocfs2/super.c
291
atomic_read(&osb->journal->j_num_trans));
fs/ocfs2/super.c
297
atomic_read(&osb->alloc_stats.bitmap_data),
fs/ocfs2/super.c
298
atomic_read(&osb->alloc_stats.local_data),
fs/ocfs2/super.c
299
atomic_read(&osb->alloc_stats.bg_allocs),
fs/ocfs2/super.c
300
atomic_read(&osb->alloc_stats.moves),
fs/ocfs2/super.c
301
atomic_read(&osb->alloc_stats.bg_extends));
fs/ocfs2/super.c
315
atomic_read(&osb->s_num_inodes_stolen),
fs/ocfs2/super.c
317
atomic_read(&osb->s_num_meta_stolen));
fs/ocfs2/super.c
324
if (atomic_read(&os->os_state) == ORPHAN_SCAN_INACTIVE)
fs/proc/array.c
339
atomic_read(&p->seccomp.filter_count));
fs/proc/generic.c
225
if (atomic_read(&PDE(d_inode(dentry))->in_use) < 0)
fs/proc/generic.c
232
return atomic_read(&PDE(d_inode(dentry))->in_use) < 0;
fs/proc/internal.h
186
int mapcount = atomic_read(&page->_mapcount) + 1;
fs/proc/internal.h
223
return atomic_read(&folio->_mapcount) + 1;
fs/proc/proc_sysctl.c
667
if (event != atomic_read(&table->poll->event)) {
fs/proc/task_nommu.c
40
if (atomic_read(&mm->mm_count) > 1 ||
fs/proc/task_nommu.c
50
if (atomic_read(&mm->mm_count) > 1)
fs/proc/task_nommu.c
60
if (current->files && atomic_read(¤t->files->count) > 1)
fs/pstore/ram_core.c
43
return atomic_read(&prz->buffer->size);
fs/pstore/ram_core.c
48
return atomic_read(&prz->buffer->start);
fs/pstore/ram_core.c
61
old = atomic_read(&prz->buffer->start);
fs/pstore/ram_core.c
83
old = atomic_read(&prz->buffer->size);
fs/pstore/zone.c
1020
(char *)buf->data, atomic_read(&buf->datalen));
fs/pstore/zone.c
1046
len = atomic_read(&buf->datalen);
fs/pstore/zone.c
162
return atomic_read(&zone->buffer->datalen);
fs/pstore/zone.c
167
return atomic_read(&zone->buffer->start);
fs/pstore/zone.c
172
return atomic_read(&pstore_zone_cxt.on_panic);
fs/pstore/zone.c
218
if (!is_on_panic() && !atomic_read(&pstore_zone_cxt.recovered))
fs/pstore/zone.c
269
if (unlikely(!atomic_read(&pstore_zone_cxt.recovered)))
fs/pstore/zone.c
347
if (atomic_read(&zone->dirty)) {
fs/pstore/zone.c
411
if (zone->buffer_size < atomic_read(&buf->datalen)) {
fs/pstore/zone.c
442
if (!atomic_read(&buf->datalen)) {
fs/pstore/zone.c
446
atomic_read(&buf->datalen));
fs/pstore/zone.c
454
zone->buffer_size, atomic_read(&buf->datalen));
fs/pstore/zone.c
513
if (zone->buffer_size < atomic_read(&tmpbuf.datalen) ||
fs/pstore/zone.c
514
zone->buffer_size < atomic_read(&tmpbuf.start)) {
fs/pstore/zone.c
521
if (!atomic_read(&tmpbuf.datalen)) {
fs/pstore/zone.c
524
atomic_read(&tmpbuf.datalen));
fs/pstore/zone.c
530
atomic_read(&tmpbuf.datalen));
fs/pstore/zone.c
532
len = atomic_read(&tmpbuf.datalen) + sizeof(*oldbuf);
fs/pstore/zone.c
539
len = atomic_read(&oldbuf->datalen);
fs/pstore/zone.c
540
start = atomic_read(&oldbuf->start);
fs/pstore/zone.c
605
if (atomic_read(&cxt->recovered))
fs/pstore/zone.c
645
if (zone && zone->oldbuf && atomic_read(&zone->oldbuf->datalen))
fs/pstore/zone.c
834
if (atomic_read(&zone->buffer->datalen) >= zone->buffer_size)
fs/quota/dquot.c
1037
!atomic_read(&inode->i_writecount) ||
fs/quota/dquot.c
588
if (atomic_read(&dquot->dq_count)) {
fs/quota/dquot.c
600
atomic_read(&dquot->dq_count) == 1);
fs/quota/dquot.c
829
WARN_ON_ONCE(atomic_read(&dquot->dq_count));
fs/quota/dquot.c
861
if (!atomic_read(&dquot->dq_count)) {
fs/quota/dquot.c
871
if (atomic_read(&dquot->dq_count) > 1) {
fs/quota/dquot.c
876
atomic_read(&dquot->dq_count) == 1)
fs/quota/dquot.c
966
if (!atomic_read(&dquot->dq_count))
fs/resctrl/rdtgroup.c
3035
if (atomic_read(&sentry->waitcount) != 0)
fs/resctrl/rdtgroup.c
3079
if (atomic_read(&rdtgrp->waitcount) != 0)
fs/smb/client/cifs_debug.c
147
atomic_read(&server->in_send),
fs/smb/client/cifs_debug.c
148
atomic_read(&server->num_waiters));
fs/smb/client/cifs_debug.c
520
atomic_read(&sc->send_io.credits.count),
fs/smb/client/cifs_debug.c
521
atomic_read(&sc->recv_io.credits.count),
fs/smb/client/cifs_debug.c
524
atomic_read(&sc->send_io.pending.count));
fs/smb/client/cifs_debug.c
531
atomic_read(&sc->mr_io.ready.count),
fs/smb/client/cifs_debug.c
532
atomic_read(&sc->mr_io.used.count));
fs/smb/client/cifs_debug.c
563
atomic_read(&server->in_send),
fs/smb/client/cifs_debug.c
564
atomic_read(&server->num_waiters));
fs/smb/client/cifs_debug.c
821
atomic_read(&total_buf_alloc_count),
fs/smb/client/cifs_debug.c
822
atomic_read(&total_small_buf_alloc_count));
fs/smb/client/cifs_debug.c
825
seq_printf(m, "Operations (MIDs): %d\n", atomic_read(&mid_count));
fs/smb/client/cifs_debug.c
845
atomic_read(&server->num_cmds[j]),
fs/smb/client/cifs_debug.c
850
if (atomic_read(&server->smb2slowcmd[j])) {
fs/smb/client/cifs_debug.c
853
atomic_read(&server->smb2slowcmd[j]),
fs/smb/client/cifs_debug.c
867
atomic_read(&tcon->num_smbs_sent),
fs/smb/client/cifsglob.h
1631
return atomic_read(&cifs_sb->mnt_cifs_flags);
fs/smb/client/connect.c
1404
atomic_read(&mid_count));
fs/smb/client/connect.c
4471
atomic_read(&tlink->tl_count) != 0 ||
fs/smb/client/dfs_cache.c
1355
atomic_read(&dfs_cache_ttl) * HZ);
fs/smb/client/dfs_cache.c
468
if (atomic_read(&cache_count) >= CACHE_MAX_ENTRIES && oldest)
fs/smb/client/dfs_cache.c
483
if (atomic_read(&cache_count) >= CACHE_MAX_ENTRIES) {
fs/smb/client/dfs_cache.c
496
ttl = min_t(int, atomic_read(&dfs_cache_ttl), ce->ttl);
fs/smb/client/dfs_cache.h
107
return atomic_read(&dfs_cache_ttl);
fs/smb/client/smb1ops.c
818
atomic_read(&tcon->stats.cifs_stats.num_oplock_brks));
fs/smb/client/smb1ops.c
820
atomic_read(&tcon->stats.cifs_stats.num_reads),
fs/smb/client/smb1ops.c
823
atomic_read(&tcon->stats.cifs_stats.num_writes),
fs/smb/client/smb1ops.c
826
atomic_read(&tcon->stats.cifs_stats.num_flushes));
fs/smb/client/smb1ops.c
828
atomic_read(&tcon->stats.cifs_stats.num_locks),
fs/smb/client/smb1ops.c
829
atomic_read(&tcon->stats.cifs_stats.num_hardlinks),
fs/smb/client/smb1ops.c
830
atomic_read(&tcon->stats.cifs_stats.num_symlinks));
fs/smb/client/smb1ops.c
832
atomic_read(&tcon->stats.cifs_stats.num_opens),
fs/smb/client/smb1ops.c
833
atomic_read(&tcon->stats.cifs_stats.num_closes),
fs/smb/client/smb1ops.c
834
atomic_read(&tcon->stats.cifs_stats.num_deletes));
fs/smb/client/smb1ops.c
836
atomic_read(&tcon->stats.cifs_stats.num_posixopens),
fs/smb/client/smb1ops.c
837
atomic_read(&tcon->stats.cifs_stats.num_posixmkdirs));
fs/smb/client/smb1ops.c
839
atomic_read(&tcon->stats.cifs_stats.num_mkdirs),
fs/smb/client/smb1ops.c
840
atomic_read(&tcon->stats.cifs_stats.num_rmdirs));
fs/smb/client/smb1ops.c
842
atomic_read(&tcon->stats.cifs_stats.num_renames),
fs/smb/client/smb1ops.c
843
atomic_read(&tcon->stats.cifs_stats.num_t2renames));
fs/smb/client/smb1ops.c
845
atomic_read(&tcon->stats.cifs_stats.num_ffirst),
fs/smb/client/smb1ops.c
846
atomic_read(&tcon->stats.cifs_stats.num_fnext),
fs/smb/client/smb1ops.c
847
atomic_read(&tcon->stats.cifs_stats.num_fclose));
fs/smb/client/smb2ops.c
1421
atomic_read(&tcon->num_local_opens),
fs/smb/client/smb2ops.c
1422
atomic_read(&tcon->num_remote_opens));
fs/smb/client/smb2ops.c
1424
atomic_read(&sent[SMB2_TREE_CONNECT_HE]),
fs/smb/client/smb2ops.c
1425
atomic_read(&failed[SMB2_TREE_CONNECT_HE]));
fs/smb/client/smb2ops.c
1427
atomic_read(&sent[SMB2_TREE_DISCONNECT_HE]),
fs/smb/client/smb2ops.c
1428
atomic_read(&failed[SMB2_TREE_DISCONNECT_HE]));
fs/smb/client/smb2ops.c
1430
atomic_read(&sent[SMB2_CREATE_HE]),
fs/smb/client/smb2ops.c
1431
atomic_read(&failed[SMB2_CREATE_HE]));
fs/smb/client/smb2ops.c
1433
atomic_read(&sent[SMB2_CLOSE_HE]),
fs/smb/client/smb2ops.c
1434
atomic_read(&failed[SMB2_CLOSE_HE]));
fs/smb/client/smb2ops.c
1436
atomic_read(&sent[SMB2_FLUSH_HE]),
fs/smb/client/smb2ops.c
1437
atomic_read(&failed[SMB2_FLUSH_HE]));
fs/smb/client/smb2ops.c
1439
atomic_read(&sent[SMB2_READ_HE]),
fs/smb/client/smb2ops.c
1440
atomic_read(&failed[SMB2_READ_HE]));
fs/smb/client/smb2ops.c
1442
atomic_read(&sent[SMB2_WRITE_HE]),
fs/smb/client/smb2ops.c
1443
atomic_read(&failed[SMB2_WRITE_HE]));
fs/smb/client/smb2ops.c
1445
atomic_read(&sent[SMB2_LOCK_HE]),
fs/smb/client/smb2ops.c
1446
atomic_read(&failed[SMB2_LOCK_HE]));
fs/smb/client/smb2ops.c
1448
atomic_read(&sent[SMB2_IOCTL_HE]),
fs/smb/client/smb2ops.c
1449
atomic_read(&failed[SMB2_IOCTL_HE]));
fs/smb/client/smb2ops.c
1451
atomic_read(&sent[SMB2_QUERY_DIRECTORY_HE]),
fs/smb/client/smb2ops.c
1452
atomic_read(&failed[SMB2_QUERY_DIRECTORY_HE]));
fs/smb/client/smb2ops.c
1454
atomic_read(&sent[SMB2_CHANGE_NOTIFY_HE]),
fs/smb/client/smb2ops.c
1455
atomic_read(&failed[SMB2_CHANGE_NOTIFY_HE]));
fs/smb/client/smb2ops.c
1457
atomic_read(&sent[SMB2_QUERY_INFO_HE]),
fs/smb/client/smb2ops.c
1458
atomic_read(&failed[SMB2_QUERY_INFO_HE]));
fs/smb/client/smb2ops.c
1460
atomic_read(&sent[SMB2_SET_INFO_HE]),
fs/smb/client/smb2ops.c
1461
atomic_read(&failed[SMB2_SET_INFO_HE]));
fs/smb/client/smb2ops.c
1463
atomic_read(&sent[SMB2_OPLOCK_BREAK_HE]),
fs/smb/client/smb2ops.c
1464
atomic_read(&failed[SMB2_OPLOCK_BREAK_HE]));
fs/smb/client/smbdirect.c
1152
if (atomic_read(&sc->recv_io.credits.count) >= sc->recv_io.credits.target)
fs/smb/client/smbdirect.c
1155
missing = (int)sc->recv_io.credits.target - atomic_read(&sc->recv_io.credits.count);
fs/smb/client/smbdirect.c
1352
atomic_read(total_credits) >= needed ||
fs/smb/client/smbdirect.c
1384
if (batch && (atomic_read(&sc->send_io.lcredits.count) <= 1)) {
fs/smb/client/smbdirect.c
1402
(batch->wr_cnt >= 16 || atomic_read(&sc->send_io.credits.count) <= 1)) {
fs/smb/client/smbdirect.c
1458
atomic_read(&sc->send_io.credits.count) == 0 &&
fs/smb/client/smbdirect.c
1459
atomic_read(&sc->recv_io.credits.count) == 0) {
fs/smb/client/smbdirect.c
1462
atomic_read(&sc->send_io.credits.count) >= 1 ||
fs/smb/client/smbdirect.c
1463
atomic_read(&sc->recv_io.credits.available) >= 1 ||
fs/smb/client/smbdirect.c
2608
atomic_read(&sc->send_io.pending.count) == 0 ||
fs/smb/client/smbdirect.c
2851
atomic_read(&sc->mr_io.ready.count) ||
fs/smb/client/smbdirect.c
724
atomic_read(&sc->recv_io.credits.count)) {
fs/smb/client/smbdirect.c
751
atomic_read(&sc->send_io.bcredits.count) == 0 &&
fs/smb/client/smbdirect.c
752
atomic_read(&sc->send_io.credits.count) == 0)
fs/smb/client/smbdirect.c
756
if (atomic_read(&sc->recv_io.credits.count) <
fs/smb/client/transport.c
69
if (atomic_read(&server->num_cmds[smb_cmd]) == 0) {
fs/smb/server/connection.c
232
wait_event(conn->req_running_q, atomic_read(&conn->req_running) < 2);
fs/smb/server/connection.c
250
if (atomic_read(&conn->req_running) >= rcount) {
fs/smb/server/connection.c
252
atomic_read(&conn->req_running) < rcount,
fs/smb/server/connection.c
338
if (atomic_read(&conn->stats.open_files_count) > 0)
fs/smb/server/connection.c
392
if (atomic_read(&conn->req_running) + 1 > max_req) {
fs/smb/server/connection.c
394
atomic_read(&conn->req_running) < max_req);
fs/smb/server/connection.c
475
ksmbd_debug(CONN, "Wait for all pending requests(%d)\n", atomic_read(&conn->r_count));
fs/smb/server/connection.c
476
wait_event(conn->r_count_q, atomic_read(&conn->r_count) == 0);
fs/smb/server/connection.c
53
atomic_read(&conn->stats.open_files_count),
fs/smb/server/connection.c
54
atomic_read(&conn->req_running),
fs/smb/server/mgmt/user_session.c
420
if (atomic_read(&sess->refcnt) <= 1 &&
fs/smb/server/mgmt/user_session.c
571
if (atomic_read(&sess->refcnt) <= 0)
fs/smb/server/oplock.c
1236
if (atomic_read(&m_opinfo->breaking_cnt))
fs/smb/server/oplock.c
1405
atomic_read(&brk_op->breaking_cnt))
fs/smb/server/oplock.c
225
return atomic_read(&fp->f_ci->sop_count);
fs/smb/server/oplock.c
227
return atomic_read(&fp->f_ci->op_count);
fs/smb/server/oplock.c
532
if (atomic_read(&opinfo->breaking_cnt))
fs/smb/server/oplock.c
536
if ((atomic_read(&ci->op_count) +
fs/smb/server/oplock.c
537
atomic_read(&ci->sop_count)) == 1) {
fs/smb/server/oplock.c
547
} else if ((atomic_read(&ci->op_count) +
fs/smb/server/oplock.c
548
atomic_read(&ci->sop_count)) > 1) {
fs/smb/server/oplock.c
865
if (atomic_read(&opinfo->breaking_cnt)) {
fs/smb/server/oplock.c
869
atomic_read(&opinfo->breaking_cnt) == 0,
fs/smb/server/smb2pdu.c
7752
if (atomic_read(&fp->f_ci->op_count) > 1)
fs/smb/server/smb2pdu.c
8779
if (!atomic_read(&opinfo->breaking_cnt)) {
fs/smb/server/transport_rdma.c
1027
if (atomic_read(&sc->recv_io.credits.count) < sc->recv_io.credits.target) {
fs/smb/server/transport_rdma.c
1054
atomic_read(&sc->send_io.bcredits.count) == 0 &&
fs/smb/server/transport_rdma.c
1055
atomic_read(&sc->send_io.credits.count) == 0)
fs/smb/server/transport_rdma.c
1134
if (atomic_read(&sc->recv_io.credits.count) >= sc->recv_io.credits.target)
fs/smb/server/transport_rdma.c
1137
missing = (int)sc->recv_io.credits.target - atomic_read(&sc->recv_io.credits.count);
fs/smb/server/transport_rdma.c
1279
atomic_read(total_credits) >= needed ||
fs/smb/server/transport_rdma.c
1311
if (send_ctx && (atomic_read(&sc->send_io.lcredits.count) <= 1)) {
fs/smb/server/transport_rdma.c
1331
(send_ctx->wr_cnt >= 16 || atomic_read(&sc->send_io.credits.count) <= 1)) {
fs/smb/server/transport_rdma.c
1532
atomic_read(&sc->send_io.credits.count) == 0 &&
fs/smb/server/transport_rdma.c
1533
atomic_read(&sc->recv_io.credits.count) == 0) {
fs/smb/server/transport_rdma.c
1536
atomic_read(&sc->send_io.credits.count) >= 1 ||
fs/smb/server/transport_rdma.c
1537
atomic_read(&sc->recv_io.credits.available) >= 1 ||
fs/smb/server/transport_rdma.c
1751
atomic_read(&sc->send_io.pending.count) == 0 ||
fs/smb/server/transport_rdma.c
2104
atomic_read(&sc->send_io.pending.count) == 0 ||
fs/smb/server/transport_rdma.c
716
if (atomic_read(&sc->send_io.credits.count) > 0)
fs/smb/server/transport_tcp.c
284
atomic_read(&active_num_conn));
fs/smb/server/vfs_cache.c
86
atomic_read(&fp->refcount));
fs/smb/server/vfs_cache.c
918
if (atomic_read(&fp->refcount) > 1 ||
fs/stat.c
57
stat->ctime.tv_nsec = (u32)atomic_read(pcn);
fs/super.c
1634
if (atomic_read(&bdev->bd_fsfreeze_count) > 0) {
fs/ubifs/journal.c
324
if (likely(atomic_read(&c->need_wait_space) == 0))
fs/ubifs/journal.c
329
if (atomic_read(&c->need_wait_space) == 0) {
fs/udf/file.c
178
atomic_read(&inode->i_writecount) == 1) {
fs/userfaultfd.c
1616
if (unlikely(atomic_read(&ctx->mmap_changing))) {
fs/userfaultfd.c
1675
if (unlikely(atomic_read(&ctx->mmap_changing))) {
fs/userfaultfd.c
1727
if (atomic_read(&ctx->mmap_changing))
fs/userfaultfd.c
1781
if (unlikely(atomic_read(&ctx->mmap_changing))) {
fs/userfaultfd.c
1841
if (unlikely(atomic_read(&ctx->mmap_changing))) {
fs/userfaultfd.c
1914
if (unlikely(atomic_read(&ctx->mmap_changing))) {
fs/userfaultfd.c
623
VM_WARN_ON_ONCE(atomic_read(&ctx->mmap_changing) < 0);
fs/userfaultfd.c
731
VM_WARN_ON_ONCE(atomic_read(&octx->mmap_changing) < 0);
fs/xfs/libxfs/xfs_group.c
162
XFS_IS_CORRUPT(mp, atomic_read(&xg->xg_ref) != 0);
fs/xfs/libxfs/xfs_group.c
175
XFS_IS_CORRUPT(mp, atomic_read(&xg->xg_active_ref) > 0);
fs/xfs/libxfs/xfs_group.c
176
XFS_IS_CORRUPT(mp, atomic_read(&xg->xg_active_ref) < 0);
fs/xfs/libxfs/xfs_group.c
43
ASSERT(atomic_read(&xg->xg_ref) >= 0);
fs/xfs/libxfs/xfs_group.c
54
ASSERT(atomic_read(&xg->xg_ref) > 0 ||
fs/xfs/libxfs/xfs_group.c
55
atomic_read(&xg->xg_active_ref) > 0);
fs/xfs/libxfs/xfs_group.c
68
ASSERT(atomic_read(&xg->xg_ref) > 0);
fs/xfs/xfs_attr_item.c
150
ASSERT(atomic_read(&attrip->attri_refcount) > 0);
fs/xfs/xfs_bmap_item.c
59
ASSERT(atomic_read(&buip->bui_refcount) > 0);
fs/xfs/xfs_bmap_item.c
99
ASSERT(atomic_read(&buip->bui_next_extent) ==
fs/xfs/xfs_buf.c
846
if (xfs_buf_is_uncached(bp) || !atomic_read(&bp->b_lru_ref))
fs/xfs/xfs_buf.c
901
if (atomic_read(&bp->b_pin_count) && (bp->b_flags & XBF_STALE))
fs/xfs/xfs_buf.c
924
if (atomic_read(&bp->b_pin_count) == 0)
fs/xfs/xfs_buf.c
930
if (atomic_read(&bp->b_pin_count) == 0)
fs/xfs/xfs_buf.h
331
if (!list_empty(&bp->b_lru) || atomic_read(&bp->b_lru_ref) > 1)
fs/xfs/xfs_buf.h
338
return atomic_read(&bp->b_pin_count);
fs/xfs/xfs_buf_item.c
210
ASSERT(atomic_read(&bip->bli_refcount) > 0);
fs/xfs/xfs_buf_item.c
363
ASSERT(atomic_read(&bip->bli_refcount) > 0);
fs/xfs/xfs_buf_item.c
430
ASSERT(atomic_read(&bip->bli_refcount) > 0);
fs/xfs/xfs_buf_item.c
509
ASSERT(atomic_read(&bip->bli_refcount) > 0);
fs/xfs/xfs_buf_item.c
83
ASSERT(atomic_read(&bip->bli_refcount) == 0);
fs/xfs/xfs_dquot.c
1376
ASSERT(atomic_read(&dqp->q_pincount) == 0);
fs/xfs/xfs_dquot_item.c
108
if (atomic_read(&dqp->q_pincount) == 0)
fs/xfs/xfs_dquot_item.c
115
wait_event(dqp->q_pinwait, (atomic_read(&dqp->q_pincount) == 0));
fs/xfs/xfs_dquot_item.c
132
if (atomic_read(&dqp->q_pincount) > 0)
fs/xfs/xfs_dquot_item.c
142
if (atomic_read(&dqp->q_pincount) > 0) {
fs/xfs/xfs_dquot_item.c
94
ASSERT(atomic_read(&dqp->q_pincount) > 0);
fs/xfs/xfs_drain.c
51
ASSERT(atomic_read(&dr->dr_count) == 0);
fs/xfs/xfs_drain.c
82
return atomic_read(&dr->dr_count) > 0;
fs/xfs/xfs_exchmaps_item.c
60
ASSERT(atomic_read(&xmi_lip->xmi_refcount) > 0);
fs/xfs/xfs_extfree_item.c
105
ASSERT(atomic_read(&efip->efi_next_extent) ==
fs/xfs/xfs_extfree_item.c
66
ASSERT(atomic_read(&efip->efi_refcount) > 0);
fs/xfs/xfs_icache.c
110
ASSERT(atomic_read(&ip->i_pincount) == 0);
fs/xfs/xfs_icache.c
172
ASSERT(atomic_read(&ip->i_pincount) == 0);
fs/xfs/xfs_inode.h
588
#define xfs_ipincount(ip) ((unsigned int) atomic_read(&ip->i_pincount))
fs/xfs/xfs_inode_item.c
724
ASSERT(atomic_read(&ip->i_pincount) > 0);
fs/xfs/xfs_log.c
1715
ASSERT(atomic_read(&iclog->ic_refcnt) == 0);
fs/xfs/xfs_log.c
2471
ASSERT(atomic_read(&iclog->ic_refcnt) == 0);
fs/xfs/xfs_log.c
2813
atomic_read(&iclog->ic_refcnt) == 0 && iclog->ic_offset == 0)) {
fs/xfs/xfs_log.c
2824
if (atomic_read(&iclog->ic_refcnt) == 0) {
fs/xfs/xfs_log.c
3003
ASSERT(atomic_read(&ticket->t_ref) > 0);
fs/xfs/xfs_log.c
3012
ASSERT(atomic_read(&ticket->t_ref) > 0);
fs/xfs/xfs_log.c
431
if (atomic_read(&iclog->ic_refcnt)) {
fs/xfs/xfs_log_cil.c
1632
int space_used = atomic_read(&cil->xc_ctx->space_used);
fs/xfs/xfs_log_cil.c
705
space_used = atomic_read(&ctx->space_used) + cilpcp->space_used + len;
fs/xfs/xfs_log_cil.c
706
if (atomic_read(&cil->xc_iclog_hdrs) > 0 ||
fs/xfs/xfs_pwork.c
119
atomic_read(&pctl->nr_work) == 0, HZ) == 0)
fs/xfs/xfs_qm.c
178
ASSERT(atomic_read(&dqp->q_pincount) == 0);
fs/xfs/xfs_qm.c
442
if (XFS_DQ_IS_DIRTY(dqp) || atomic_read(&dqp->q_pincount) > 0)
fs/xfs/xfs_refcount_item.c
100
ASSERT(atomic_read(&cuip->cui_next_extent) ==
fs/xfs/xfs_refcount_item.c
60
ASSERT(atomic_read(&cuip->cui_refcount) > 0);
fs/xfs/xfs_reflink.h
23
atomic_read(&inode->i_dio_count))
fs/xfs/xfs_rmap_item.c
60
ASSERT(atomic_read(&ruip->rui_refcount) > 0);
fs/xfs/xfs_rmap_item.c
99
ASSERT(atomic_read(&ruip->rui_next_extent) ==
fs/xfs/xfs_trace.h
1001
__entry->streams = atomic_read(&pag->pagf_fstrms);
fs/xfs/xfs_trace.h
1162
__entry->pincount = atomic_read(&ip->i_pincount);
fs/xfs/xfs_trace.h
300
__entry->refcount = atomic_read(&pag->pag_group.xg_ref);
fs/xfs/xfs_trace.h
302
atomic_read(&pag->pag_group.xg_active_ref);
fs/xfs/xfs_trace.h
339
__entry->refcount = atomic_read(&xg->xg_ref);
fs/xfs/xfs_trace.h
340
__entry->active_refcount = atomic_read(&xg->xg_active_ref);
fs/xfs/xfs_trace.h
4966
__entry->refcount = atomic_read(&iclog->ic_refcnt);
fs/xfs/xfs_trace.h
5096
atomic_read(&xg->xg_intents_drain.dr_count);
fs/xfs/xfs_trace.h
5212
__entry->pincount = atomic_read(&bp->b_pin_count);
fs/xfs/xfs_trace.h
746
__entry->pincount = atomic_read(&bp->b_pin_count);
fs/xfs/xfs_trace.h
820
__entry->pincount = atomic_read(&bp->b_pin_count);
fs/xfs/xfs_trace.h
864
__entry->pincount = atomic_read(&bp->b_pin_count);
fs/xfs/xfs_trace.h
903
__entry->bli_refcount = atomic_read(&bip->bli_refcount);
fs/xfs/xfs_trace.h
908
__entry->buf_pincount = atomic_read(&bip->bli_buf->b_pin_count);
fs/xfs/xfs_trace.h
971
__entry->streams = atomic_read(&pag->pagf_fstrms);
fs/xfs/xfs_trans_buf.c
149
ASSERT(atomic_read(&bip->bli_refcount) > 0);
fs/xfs/xfs_trans_buf.c
184
ASSERT(atomic_read(&bip->bli_refcount) > 0);
fs/xfs/xfs_trans_buf.c
297
ASSERT(atomic_read(&bip->bli_refcount) > 0);
fs/xfs/xfs_trans_buf.c
375
ASSERT(atomic_read(&bip->bli_refcount) > 0);
fs/xfs/xfs_trans_buf.c
425
ASSERT(atomic_read(&bip->bli_refcount) > 0);
fs/xfs/xfs_trans_buf.c
469
ASSERT(atomic_read(&bip->bli_refcount) > 0);
fs/xfs/xfs_trans_buf.c
490
ASSERT(atomic_read(&bip->bli_refcount) > 0);
fs/xfs/xfs_trans_buf.c
518
ASSERT(atomic_read(&bip->bli_refcount) > 0);
fs/xfs/xfs_trans_buf.c
605
ASSERT(atomic_read(&bip->bli_refcount) > 0);
fs/xfs/xfs_trans_buf.c
659
ASSERT(atomic_read(&bip->bli_refcount) > 0);
fs/xfs/xfs_trans_buf.c
684
ASSERT(atomic_read(&bip->bli_refcount) > 0);
fs/xfs/xfs_trans_buf.c
709
ASSERT(atomic_read(&bip->bli_refcount) > 0);
fs/xfs/xfs_trans_buf.c
732
ASSERT(atomic_read(&bip->bli_refcount) > 0);
fs/xfs/xfs_trans_buf.c
765
ASSERT(atomic_read(&bip->bli_refcount) > 0);
fs/xfs/xfs_zone_alloc.c
483
if (atomic_read(&zi->zi_nr_free_zones) <
fs/xfs/xfs_zone_gc.c
467
if (atomic_read(&rtg->rtg_gccount)) {
fs/xfs/xfs_zone_info.c
64
full -= atomic_read(&zi->zi_nr_free_zones);
fs/xfs/xfs_zone_info.c
95
seq_printf(m, "\tfree zones: %d\n", atomic_read(&zi->zi_nr_free_zones));
fs/zonefs/sysfs.c
45
return sysfs_emit(buf, "%d\n", atomic_read(&sbi->s_wro_seq_files));
fs/zonefs/sysfs.c
57
return sysfs_emit(buf, "%d\n", atomic_read(&sbi->s_active_seq_files));
include/asm-generic/qrwlock.h
48
cnts = atomic_read(&lock->cnts);
include/asm-generic/qrwlock.h
67
cnts = atomic_read(&lock->cnts);
include/asm-generic/qspinlock.h
57
return atomic_read(&lock->val);
include/asm-generic/qspinlock.h
83
return atomic_read(&lock->val) & ~_Q_LOCKED_MASK;
include/asm-generic/qspinlock.h
92
int val = atomic_read(&lock->val);
include/asm-generic/ticket_spinlock.h
55
u32 old = atomic_read(&lock->val);
include/asm-generic/ticket_spinlock.h
66
u32 val = atomic_read(&lock->val);
include/asm-generic/ticket_spinlock.h
87
u32 val = atomic_read(&lock->val);
include/crypto/if_alg.h
219
atomic_read(&ctx->rcvused), 0);
include/drm/spsc_queue.h
62
return atomic_read(&queue->job_count);
include/kunit/run-in-irq-context.h
128
hardirq_calls = atomic_read(&state.hardirq_func_calls);
include/kunit/run-in-irq-context.h
129
softirq_calls = atomic_read(&state.softirq_func_calls);
include/kunit/run-in-irq-context.h
140
KUNIT_EXPECT_GT_MSG(test, atomic_read(&state.hardirq_func_calls), 0,
include/kunit/run-in-irq-context.h
142
KUNIT_EXPECT_GT_MSG(test, atomic_read(&state.softirq_func_calls), 0,
include/kunit/run-in-irq-context.h
36
task_calls = atomic_read(&state->task_func_calls);
include/kunit/run-in-irq-context.h
38
softirq_calls = atomic_read(&state->softirq_func_calls);
include/linux/blkdev.h
245
return atomic_read(&disk->part0->bd_openers);
include/linux/blkdev.h
708
#define blk_queue_pm_only(q) atomic_read(&(q)->pm_only)
include/linux/blkdev.h
803
return atomic_read(&bdev->__bd_flags) & BD_PARTNO;
include/linux/blkdev.h
808
return atomic_read(&bdev->__bd_flags) & flag;
include/linux/call_once.h
53
state = atomic_read(&once->state);
include/linux/closure.h
179
return atomic_read(&cl->remaining) & CLOSURE_REMAINING_MASK;
include/linux/closure.h
293
unsigned old = atomic_read(&cl->remaining);
include/linux/console.h
728
WARN_ON(!atomic_read(&ignore_console_lock_warning) && \
include/linux/context_tracking_state.h
91
return atomic_read(this_cpu_ptr(&context_tracking.state)) & CT_RCU_WATCHING_MASK;
include/linux/context_tracking_state.h
98
return atomic_read(&ct->state) & CT_RCU_WATCHING_MASK;
include/linux/firewire.h
244
return atomic_read(&device->state) == FW_DEVICE_SHUTDOWN;
include/linux/fs.h
2222
return atomic_read(&inode->i_count);
include/linux/fs.h
2851
return atomic_read(&inode->i_writecount) > 0;
include/linux/fs.h
570
return atomic_read(&mapping->i_mmap_writable) > 0;
include/linux/fscache-cache.h
189
atomic_read(&cache->object_count) == 0);
include/linux/irq_work.h
39
return atomic_read(&work->node.a_flags) & IRQ_WORK_PENDING;
include/linux/irq_work.h
44
return atomic_read(&work->node.a_flags) & IRQ_WORK_BUSY;
include/linux/irq_work.h
49
return atomic_read(&work->node.a_flags) & IRQ_WORK_HARD_IRQ;
include/linux/jbd2.h
1757
free -= atomic_read(&journal->
include/linux/jump_label.h
295
v = atomic_read(&key->enabled);
include/linux/jump_label.h
325
if (atomic_read(&key->enabled) != 0) {
include/linux/jump_label.h
326
WARN_ON_ONCE(atomic_read(&key->enabled) != 1);
include/linux/jump_label.h
336
if (atomic_read(&key->enabled) != 1) {
include/linux/jump_label.h
337
WARN_ON_ONCE(atomic_read(&key->enabled) != 0);
include/linux/kfence.h
127
if (likely(atomic_read(&kfence_allocation_gate) > 0))
include/linux/kgdb.h
343
(irqs_disabled() && (smp_processor_id() == atomic_read(&kgdb_active)))
include/linux/kvm_host.h
1011
if (atomic_read(&kvm->online_vcpus)) \
include/linux/kvm_host.h
1013
(atomic_read(&kvm->online_vcpus) - 1))
include/linux/kvm_host.h
993
int num_vcpus = atomic_read(&kvm->online_vcpus);
include/linux/maple_tree.h
632
atomic_read(&maple_tree_tests_passed), \
include/linux/maple_tree.h
633
atomic_read(&maple_tree_tests_run)); \
include/linux/maple_tree.h
648
atomic_read(&maple_tree_tests_passed), \
include/linux/maple_tree.h
649
atomic_read(&maple_tree_tests_run)); \
include/linux/maple_tree.h
665
atomic_read(&maple_tree_tests_passed), \
include/linux/maple_tree.h
666
atomic_read(&maple_tree_tests_run)); \
include/linux/maple_tree.h
681
atomic_read(&maple_tree_tests_passed), \
include/linux/maple_tree.h
682
atomic_read(&maple_tree_tests_run)); \
include/linux/maple_tree.h
699
atomic_read(&maple_tree_tests_passed), \
include/linux/maple_tree.h
700
atomic_read(&maple_tree_tests_run)); \
include/linux/maple_tree.h
718
atomic_read(&maple_tree_tests_passed), \
include/linux/maple_tree.h
719
atomic_read(&maple_tree_tests_run)); \
include/linux/mm.h
1565
return atomic_read(&folio->_entire_mapcount) + 1;
include/linux/mm.h
1571
return atomic_read(&folio->_large_mapcount) + 1;
include/linux/mm.h
1599
mapcount = atomic_read(&folio->_mapcount) + 1;
include/linux/mm.h
2358
return atomic_read(&folio->_pincount) > 0;
include/linux/mm_inline.h
522
return atomic_read(&mm->tlb_flush_pending);
include/linux/mm_inline.h
534
return atomic_read(&mm->tlb_flush_pending) > 1;
include/linux/mm_types.h
655
return atomic_read(&ptdesc->pt_share_count);
include/linux/netdevice.h
3494
return atomic_read(&ndc->drops0) + atomic_read(&ndc->drops1);
include/linux/netfs.h
551
wait_var_event(&ictx->io_count, atomic_read(&ictx->io_count) == 0);
include/linux/ns_common.h
62
return atomic_read(&ns->__ns_ref_active);
include/linux/osq_lock.h
33
return atomic_read(&lock->tail) != OSQ_UNLOCKED_VAL;
include/linux/page_ref.h
67
return atomic_read(&page->_refcount);
include/linux/pagemap.h
525
return atomic_read(&mapping->nr_thps);
include/linux/pci.h
1426
return (atomic_read(&pdev->enable_cnt) > 0);
include/linux/percpu-rwsem.h
150
return atomic_read(&sem->block);
include/linux/quotaops.h
50
WARN_ON_ONCE(!atomic_read(&dquot->dq_count));
include/linux/quotaops.h
60
if (atomic_read(&dquot->dq_count) > 0)
include/linux/ratelimit.h
32
return atomic_read(&rs->missed);
include/linux/rcuref.h
41
unsigned int c = atomic_read(&ref->refcnt);
include/linux/rcuref.h
58
unsigned int c = atomic_read(&ref->refcnt);
include/linux/refcount.h
170
return atomic_read(&r->refs);
include/linux/rhashtable.h
186
return atomic_read(&ht->nelems) > (tbl->size / 4 * 3) &&
include/linux/rhashtable.h
199
return atomic_read(&ht->nelems) < (tbl->size * 3 / 10) &&
include/linux/rhashtable.h
211
return atomic_read(&ht->nelems) > tbl->size &&
include/linux/rhashtable.h
223
return atomic_read(&ht->nelems) >= ht->max_elems;
include/linux/rio.h
300
return atomic_read(&mport->state) == RIO_DEVICE_RUNNING;
include/linux/rmap.h
186
new_mapcount_val = atomic_read(&folio->_large_mapcount) + diff;
include/linux/rmap.h
236
new_mapcount_val = atomic_read(&folio->_large_mapcount) - diff;
include/linux/rmap.h
396
VM_WARN_ON_FOLIO(atomic_read(&anon_vma->refcount) == 0, folio);
include/linux/rwbase_rt.h
31
return atomic_read(&rwb->readers) != READER_BIAS;
include/linux/rwbase_rt.h
36
return atomic_read(&rwb->readers) == WRITER_BIAS;
include/linux/rwbase_rt.h
41
return atomic_read(&rwb->readers) > 0;
include/linux/sbitmap.h
540
int old = atomic_read(index);
include/linux/sbitmap.h
558
ws = &sbq->ws[atomic_read(wait_index)];
include/linux/sched/mm.h
546
if (likely(!(atomic_read(&mm->membarrier_state) &
include/linux/skbuff.h
1308
if (atomic_read(&shinfo->dataref) == bias)
include/linux/skbuff.h
2032
(atomic_read(&skb_shinfo(skb)->dataref) & SKB_DATAREF_MASK) != 1;
include/linux/skbuff.h
2075
dataref = atomic_read(&skb_shinfo(skb)->dataref);
include/linux/swap.h
353
return atomic_read(&lru_disable_count);
include/linux/sysctl.h
156
return (void *)(unsigned long)atomic_read(&poll->event);
include/net/bluetooth/hci_core.h
1678
BT_DBG("hcon %p orig refcnt %d", conn, atomic_read(&conn->refcnt));
include/net/bluetooth/hci_core.h
1688
BT_DBG("hcon %p orig refcnt %d", conn, atomic_read(&conn->refcnt));
include/net/bluetooth/l2cap.h
829
mutex_lock_nested(&chan->lock, atomic_read(&chan->nesting));
include/net/bonding.h
109
return atomic_read(&netpoll_block_tx);
include/net/ip.h
573
val = atomic_read(&inet_sk(sk)->inet_id);
include/net/ip_fib.h
472
return atomic_read(&net->ipv4.fib_num_tclassid_users);
include/net/ip_vs.h
1403
if (atomic_read(&ctl_cp->n_control) == 0) {
include/net/ip_vs.h
1692
#define IP_VS_DFWD_METHOD(dest) (atomic_read(&(dest)->conn_flags) & \
include/net/ip_vs.h
1867
return (atomic_read(&dest->activeconns) << 8) +
include/net/ip_vs.h
1868
atomic_read(&dest->inactconns);
include/net/llc_c_ev.h
220
return atomic_read(&sk->sk_rmem_alloc) + skb->truesize <
include/net/net_namespace.h
541
return atomic_read(&net->ipv4.rt_genid);
include/net/net_namespace.h
547
return atomic_read(&net->ipv6.fib6_sernum);
include/net/net_namespace.h
580
return atomic_read(&net->fnhe_genid);
include/net/netfilter/nf_conntrack_labels.h
42
if (atomic_read(&net->ct.labels_used) == 0)
include/net/pkt_cls.h
82
return block && !atomic_read(&block->useswcnt);
include/net/request_sock.h
233
return atomic_read(&queue->qlen);
include/net/request_sock.h
238
return atomic_read(&queue->young);
include/net/sock.h
1150
unsigned int qsize = sk->sk_backlog.len + atomic_read(&sk->sk_rmem_alloc);
include/net/sock.h
1609
atomic_read(&sk->sk_rmem_alloc);
include/net/sock.h
2357
return atomic_read(&sk->sk_rmem_alloc);
include/net/sock.h
2763
DEBUG_NET_WARN_ON_ONCE(atomic_read(&sk->sk_drops));
include/net/sock.h
2766
return atomic_read(&sk->sk_drops);
include/net/tcp.h
1720
atomic_read(&sk->sk_rmem_alloc));
include/net/tcp.h
1763
return atomic_read(&sk->sk_rmem_alloc) > threshold;
include/net/tcp.h
1913
atomic_read(&sk->sk_rmem_alloc) < sk->sk_rcvbuf &&
include/net/xfrm.h
1534
return atomic_read(&x->tunnel_users);
include/scsi/scsi_device.h
722
#define scsi_get_ua_new_media_ctr(sdev) atomic_read(&sdev->ua_new_media_ctr)
include/scsi/scsi_device.h
723
#define scsi_get_ua_por_ctr(sdev) atomic_read(&sdev->ua_por_ctr)
include/sound/core.h
196
wait_event(card->power_ref_sleep, !atomic_read(&card->power_ref));
include/sound/hdaudio.h
200
return atomic_read(&codec->in_pm);
include/trace/events/bcache.h
482
__entry->blocked = atomic_read(&ca->set->prio_blocked);
include/trace/events/filelock.h
191
__entry->wcount = atomic_read(&inode->i_writecount);
include/trace/events/filelock.h
192
__entry->rcount = atomic_read(&inode->i_readcount);
include/trace/events/fscache.h
426
__entry->v_n_cookies = atomic_read(&cookie->volume->n_cookies);
include/trace/events/fscache.h
452
__entry->n_active = atomic_read(&cookie->n_active);
include/trace/events/module.h
84
__entry->refcnt = atomic_read(&mod->refcnt);
include/trace/events/neigh.h
46
__entry->entries = atomic_read(&tbl->gc_entries);
include/trace/events/page_ref.h
33
__entry->mapcount = atomic_read(&page->_mapcount);
include/trace/events/page_ref.h
82
__entry->mapcount = atomic_read(&page->_mapcount);
include/trace/events/rpcrdma.h
2216
__entry->avail = atomic_read(&rdma->sc_sq_avail);
include/trace/events/rpcrdma.h
2258
__entry->avail = atomic_read(&rdma->sc_sq_avail);
include/trace/events/rpm.h
38
__entry->usage_count = atomic_read(
include/trace/events/rpm.h
44
__entry->child_count = atomic_read(
include/trace/events/sock.h
118
__entry->rmem_alloc = atomic_read(&sk->sk_rmem_alloc);
include/trace/events/sock.h
84
__entry->rmem_alloc = atomic_read(&sk->sk_rmem_alloc);
include/trace/events/vb2.h
29
atomic_read(&q->owned_by_drv_count);
io_uring/cancel.c
576
return atomic_read(&tctx->inflight_tracked);
io_uring/io-wq.c
1027
unsigned int work_flags = atomic_read(&work->flags);
io_uring/io-wq.c
1056
!atomic_read(&acct->nr_running))) {
io_uring/io-wq.c
162
return __io_get_work_hash(atomic_read(&work->flags));
io_uring/io-wq.c
440
work_flags = atomic_read(&work->flags);
io_uring/io-wq.c
450
work_flags = atomic_read(&next->flags);
io_uring/io-wq.c
539
work_flags = atomic_read(&work->flags);
io_uring/io-wq.c
642
unsigned int work_flags = atomic_read(&work->flags);
io_uring/io-wq.h
61
return __io_wq_is_hashed(atomic_read(&work->flags));
io_uring/io_uring.c
1477
if (atomic_read(&work->flags) & IO_WQ_WORK_CANCEL) {
io_uring/io_uring.c
2300
if (tctx && !atomic_read(&tctx->in_cancel))
io_uring/io_uring.c
607
if (unlikely(atomic_read(&tctx->in_cancel)))
io_uring/io_uring.h
167
return dist >= 0 || atomic_read(&ctx->cq_timeouts) != iowq->nr_timeouts;
io_uring/poll.c
249
v = atomic_read(&req->poll_refs);
io_uring/poll.c
96
if (unlikely(atomic_read(&req->poll_refs) >= IO_POLL_REF_BIAS))
io_uring/refs.h
13
((unsigned int) atomic_read(&(req->refs)) + 127u <= 127u)
io_uring/register.c
633
atomic_set(&n.rings->sq_flags, atomic_read(&o.rings->sq_flags));
io_uring/sqpoll.c
255
wait_event(sqd->wait, !atomic_read(&sqd->park_pending));
io_uring/sqpoll.c
85
WARN_ON_ONCE(atomic_read(&sqd->park_pending));
io_uring/timeout.c
140
atomic_read(&req->ctx->cq_timeouts) + 1);
io_uring/timeout.c
152
seq = READ_ONCE(ctx->cached_cq_tail) - atomic_read(&ctx->cq_timeouts);
io_uring/timeout.c
288
atomic_read(&ctx->cq_timeouts) + 1);
io_uring/timeout.c
640
tail = data_race(ctx->cached_cq_tail) - atomic_read(&ctx->cq_timeouts);
io_uring/tw.c
136
if (unlikely(atomic_read(&tctx->in_cancel)))
io_uring/tw.c
229
nr_wait = atomic_read(&ctx->cq_wait_nr);
io_uring/wait.c
224
iowq.nr_timeouts = atomic_read(&ctx->cq_timeouts);
io_uring/waitid.c
133
WARN_ON_ONCE(!(atomic_read(&iw->refs) & IO_WAITID_REF_MASK));
io_uring/waitid.c
219
if (!(atomic_read(&iw->refs) & IO_WAITID_CANCEL_FLAG)) {
io_uring/zcrx.c
350
old = atomic_read(uref);
io_uring/zcrx.c
642
if (!atomic_read(io_get_user_counter(niov)))
kernel/async.c
214
if (!entry || atomic_read(&entry_count) > MAX_WORK) {
kernel/async.c
267
if (!entry || atomic_read(&entry_count) > MAX_WORK) {
kernel/audit.c
1285
s.lost = atomic_read(&audit_lost);
kernel/audit.c
1289
s.backlog_wait_time_actual = atomic_read(&audit_backlog_wait_time_actual);
kernel/audit.c
417
atomic_read(&audit_lost),
kernel/bpf/devmap.c
1063
usage += atomic_read((atomic_t *)&dtab->items) *
kernel/bpf/hashtab.c
2340
atomic_read(&htab->count);
kernel/bpf/hashtab.c
975
return atomic_read(&htab->count) >= htab->map.max_entries;
kernel/bpf/helpers.c
1583
if (atomic_read(&cur_t->cancelling)) {
kernel/bpf/memalloc.c
762
rcu_in_progress += atomic_read(&c->call_rcu_ttrace_in_progress);
kernel/bpf/memalloc.c
763
rcu_in_progress += atomic_read(&c->call_rcu_in_progress);
kernel/bpf/memalloc.c
777
rcu_in_progress += atomic_read(&c->call_rcu_ttrace_in_progress);
kernel/bpf/memalloc.c
778
rcu_in_progress += atomic_read(&c->call_rcu_in_progress);
kernel/bpf/memalloc.c
888
if (!atomic_read(&c->call_rcu_in_progress))
kernel/bpf/rqspinlock.c
281
val = atomic_read(&lock->val);
kernel/bpf/rqspinlock.h
32
old = atomic_read(&lock->val);
kernel/bpf/stream.c
62
if (atomic_read(&stream->capacity) >= BPF_STREAM_MAX_CAPACITY)
kernel/cgroup/cgroup-v1.c
697
atomic_read(&ss->root->nr_cgrps),
kernel/cgroup/cgroup.c
1397
BUG_ON(atomic_read(&root->nr_cgrps));
kernel/cgroup/cgroup.c
2239
BUG_ON(atomic_read(&root->nr_cgrps) != 1);
kernel/cgroup/cgroup.c
5117
if ((task->flags & PF_EXITING) && !atomic_read(&task->signal->live))
kernel/cgroup/cgroup.c
5127
!atomic_read(&task->signal->live))
kernel/cgroup/cgroup.c
7085
if (thread_group_leader(tsk) && atomic_read(&tsk->signal->live))
kernel/cgroup/debug.c
225
atomic_read(&css->online_cnt), pbuf);
kernel/cpu.c
317
sync = atomic_read(st);
kernel/cpu.c
335
sync = atomic_read(st);
kernel/cpu.c
363
int sync = atomic_read(st);
kernel/cpu.c
398
while (atomic_read(st) != SYNC_STATE_SHOULD_ONLINE)
kernel/cpu.c
405
int sync = atomic_read(st);
kernel/debug/debug_core.c
501
if (atomic_read(&kgdb_setting_breakpoint))
kernel/debug/debug_core.c
519
if (atomic_read(&kgdb_active) != raw_smp_processor_id())
kernel/debug/debug_core.c
666
if (atomic_read(&kgdb_cpu_doing_single_step) != -1 &&
kernel/debug/debug_core.c
717
(atomic_read(&masters_in_kgdb) + atomic_read(&slaves_in_kgdb)) !=
kernel/debug/debug_core.c
793
while (kgdb_do_roundup && atomic_read(&slaves_in_kgdb))
kernel/debug/debug_core.c
798
if (atomic_read(&kgdb_cpu_doing_single_step) != -1) {
kernel/debug/debug_core.c
799
int sstep_cpu = atomic_read(&kgdb_cpu_doing_single_step);
kernel/debug/debug_core.c
933
if (!kgdb_connected || atomic_read(&kgdb_active) != -1 || dbg_kdb_mode)
kernel/debug/gdbstub.c
438
tid = -atomic_read(&kgdb_active) - 2;
kernel/debug/kdb/kdb_debugger.c
40
kdb_initial_cpu = atomic_read(&kgdb_active);
kernel/debug/kdb/kdb_debugger.c
70
if (atomic_read(&kgdb_setting_breakpoint))
kernel/events/callchain.c
293
if (atomic_read(&nr_callchain_events))
kernel/events/core.c
10173
if (!atomic_read(&nr_ksymbol_events))
kernel/events/core.c
10285
if (atomic_read(&nr_ksymbol_events))
kernel/events/core.c
10292
if (!atomic_read(&nr_bpf_events))
kernel/events/core.c
10432
if (!atomic_read(&nr_text_poke_events))
kernel/events/core.c
10685
int events = atomic_read(&event->event_limit);
kernel/events/core.c
13242
if (!atomic_read(&perf_sched_count)) {
kernel/events/core.c
3950
if (atomic_read(&nr_switch_events))
kernel/events/core.c
4321
if (atomic_read(&nr_switch_events))
kernel/events/core.c
6363
if (atomic_read(&nr_mediated_pmu_vms))
kernel/events/core.c
6375
if (WARN_ON_ONCE(!atomic_read(&nr_include_guest_events)))
kernel/events/core.c
6397
if (atomic_read(&nr_include_guest_events))
kernel/events/core.c
6407
if (WARN_ON_ONCE(!atomic_read(&nr_mediated_pmu_vms)))
kernel/events/core.c
9170
if (!atomic_read(&nr_comm_events) &&
kernel/events/core.c
9171
!atomic_read(&nr_mmap_events) &&
kernel/events/core.c
9172
!atomic_read(&nr_task_events))
kernel/events/core.c
9329
if (!atomic_read(&nr_comm_events))
kernel/events/core.c
9427
if (!atomic_read(&nr_namespaces_events))
kernel/events/core.c
9535
if (!atomic_read(&nr_cgroup_events))
kernel/events/core.c
9782
if (atomic_read(&nr_build_id_events))
kernel/events/core.c
9893
if (!atomic_read(&nr_mmap_events))
kernel/events/hw_breakpoint.c
253
const int count = atomic_read(&hist->count[i]);
kernel/events/hw_breakpoint.c
270
const int count1 = atomic_read(&hist1->count[i]);
kernel/events/hw_breakpoint.c
271
const int count2 = atomic_read(&hist2->count[i]);
kernel/events/hw_breakpoint.c
911
if (atomic_read(&info->tsk_pinned.count[slot]))
kernel/events/hw_breakpoint.c
924
if (WARN_ON(atomic_read(&cpu_pinned[type].count[slot])))
kernel/events/hw_breakpoint.c
927
if (atomic_read(&tsk_pinned_all[type].count[slot]))
kernel/events/uprobes.c
1665
if (!atomic_read(&vma->vm_mm->mm_users)) /* called by mmput() ? */
kernel/exit.c
114
return sysfs_emit(page, "%d\n", atomic_read(&oops_count));
kernel/exit.c
502
if (atomic_read(&mm->mm_users) <= 1) {
kernel/exit.c
526
if (atomic_read(&mm->mm_users) <= 1)
kernel/fork.c
1171
VM_BUG_ON(atomic_read(&mm->mm_users));
kernel/fork.c
1475
if (atomic_read(&mm->mm_users) > 1) {
kernel/fork.c
3151
(fd && atomic_read(&fd->count) > 1)) {
kernel/futex/futex.h
355
return atomic_read(&hb->waiters);
kernel/futex/requeue.c
186
atomic_read(&q->requeue_state) != Q_REQUEUE_PI_WAIT,
kernel/futex/requeue.c
198
return atomic_read(&q->requeue_state);
kernel/irq/ipi-mux.c
132
en = atomic_read(&icpu->enable);
kernel/irq/ipi-mux.c
50
if (atomic_read(&icpu->bits) & ibit)
kernel/irq/ipi-mux.c
86
if (!(pending & ibit) && (atomic_read(&icpu->enable) & ibit))
kernel/irq/manage.c
111
return !atomic_read(&desc->threads_active);
kernel/irq/manage.c
127
wait_event(desc->wait_for_threads, !atomic_read(&desc->threads_active));
kernel/irq/spurious.c
287
handled = atomic_read(&desc->threads_handled);
kernel/irq_work.c
163
!(atomic_read(&work->node.a_flags) & IRQ_WORK_HARD_IRQ)) {
kernel/irq_work.c
211
flags = atomic_read(&work->node.a_flags);
kernel/irq_work.c
95
work_flags = atomic_read(&work->node.a_flags);
kernel/jump_label.c
110
int n = atomic_read(&key->enabled);
kernel/jump_label.c
141
v = atomic_read(&key->enabled);
kernel/jump_label.c
202
if (atomic_read(&key->enabled) > 0) {
kernel/jump_label.c
203
WARN_ON_ONCE(atomic_read(&key->enabled) != 1);
kernel/jump_label.c
208
if (atomic_read(&key->enabled) == 0) {
kernel/jump_label.c
233
if (atomic_read(&key->enabled) != 1) {
kernel/jump_label.c
234
WARN_ON_ONCE(atomic_read(&key->enabled) != 0);
kernel/jump_label.c
268
v = atomic_read(&key->enabled);
kernel/jump_label.c
301
val = atomic_read(&key->enabled);
kernel/kcsan/kcsan_test.c
591
KCSAN_EXPECT_READ_BARRIER(atomic_read(&dummy), false);
kernel/kcsan/kcsan_test.c
636
KCSAN_EXPECT_WRITE_BARRIER(atomic_read(&dummy), false);
kernel/kcsan/kcsan_test.c
681
KCSAN_EXPECT_RW_BARRIER(atomic_read(&dummy), false);
kernel/kprobes.c
670
atomic_read(&optimizer_state) != OPTIMIZER_ST_IDLE ||
kernel/kprobes.c
680
if (atomic_read(&optimizer_state) == 1)
kernel/kprobes.c
683
atomic_read(&optimizer_state) == OPTIMIZER_ST_FLUSHING ||
kernel/locking/locktorture.c
1191
if (atomic_read(&cxt.n_lock_torture_errors))
kernel/locking/locktorture.c
937
if (WARN_ON_ONCE(atomic_read(&lock_is_read_held)))
kernel/locking/osq_lock.c
61
if (atomic_read(&lock->tail) == curr &&
kernel/locking/percpu-rwsem.c
200
return per_cpu_sum(*sem->read_count) != 0 && !atomic_read(&sem->block);
kernel/locking/percpu-rwsem.c
86
if (atomic_read(&sem->block))
kernel/locking/qrwlock.c
76
if (!(cnts = atomic_read(&lock->cnts)) &&
kernel/locking/qspinlock.h
162
old = atomic_read(&lock->val);
kernel/locking/qspinlock_paravirt.h
137
old = atomic_read(&lock->val);
kernel/locking/qspinlock_paravirt.h
481
return (u32)(atomic_read(&lock->val) | _Q_LOCKED_VAL);
kernel/locking/qspinlock_paravirt.h
506
(unsigned long)lock, atomic_read(&lock->val));
kernel/locking/qspinlock_paravirt.h
88
int val = atomic_read(&lock->val);
kernel/locking/rwbase_rt.c
61
for (r = atomic_read(&rwb->readers); r < 0;) {
kernel/locking/rwsem.c
1524
int count = atomic_read(&sem->rwbase.readers);
kernel/module/main.c
769
return atomic_read(&mod->refcnt) - MODULE_REF_BASE;
kernel/module/stats.c
284
live_mod_count = atomic_read(&modcount);
kernel/module/stats.c
285
fkreads = atomic_read(&failed_kreads);
kernel/module/stats.c
286
fdecompress = atomic_read(&failed_decompress);
kernel/module/stats.c
287
fbecoming = atomic_read(&failed_becoming);
kernel/module/stats.c
288
floads = atomic_read(&failed_load_modules);
kernel/padata.c
488
int old_node = atomic_read(&last_used_nid);
kernel/panic.c
239
return sysfs_emit(page, "%d\n", atomic_read(&warn_count));
kernel/panic.c
475
return unlikely(atomic_read(&panic_cpu) != PANIC_CPU_INVALID);
kernel/panic.c
488
return unlikely(atomic_read(&panic_cpu) == raw_smp_processor_id());
kernel/panic.c
643
if (atomic_read(&panic_redirect_cpu) != PANIC_CPU_INVALID &&
kernel/panic.c
646
atomic_read(&panic_redirect_cpu));
kernel/power/hibernate.c
106
return !atomic_read(&hibernate_atomic);
kernel/power/main.c
107
return atomic_read(&pm_fs_sync_count) == 0;
kernel/power/swap.c
294
wait_event(hb->wait, atomic_read(&hb->count) == 0);
kernel/printk/nbcon.c
1196
if (unlikely(atomic_read(&nbcon_cpu_emergency_cnt)) ||
kernel/printk/nbcon.c
1255
if (unlikely(atomic_read(&nbcon_cpu_emergency_cnt)) ||
kernel/printk/nbcon.c
145
state->atom = atomic_read(&ACCESS_PRIVATE(con, nbcon_state));
kernel/printk/nbcon.c
1747
if (!WARN_ON_ONCE(atomic_read(&nbcon_cpu_emergency_cnt) == 0)) {
kernel/printk/printk.c
5070
return (atomic_read(&printk_cpu_sync_owner) == raw_smp_processor_id());
kernel/printk/printk.c
5083
} while (atomic_read(&printk_cpu_sync_owner) != -1);
kernel/printk/printk.c
5152
if (atomic_read(&printk_cpu_sync_nested)) {
kernel/printk/printk_safe.c
30
return atomic_read(&force_con);
kernel/rcu/rcu.h
333
if (!atomic_read(&___rfd_beenhere) && \
kernel/rcu/rcuscale.c
1108
while (atomic_read(&n_rcu_scale_reader_started) < nrealreaders)
kernel/rcu/rcuscale.c
430
if (atomic_read(&n_rcu_scale_writer_finished) < nrealwriters)
kernel/rcu/rcuscale.c
576
if (wmbp && atomic_read(&wflp->ws_inflight) < gp_async_max) {
kernel/rcu/rcuscale.c
598
atomic_read(&n_rcu_scale_writer_started) >= nrealwriters)
kernel/rcu/rcuscale.c
628
atomic_read(&n_rcu_scale_writer_finished) >= nrealwriters)
kernel/rcu/rcuscale.c
647
__func__, me, started, done, writer_done[me], atomic_read(&n_rcu_scale_writer_finished), i, jiffies - jdone);
kernel/rcu/rcuscale.c
906
while (atomic_read(&n_kfree_scale_thread_started) < kfree_nrealthreads)
kernel/rcu/rcutorture.c
1900
!atomic_read(&rcu_fwd_cb_nodelay) &&
kernel/rcu/rcutorture.c
2825
atomic_read(&n_rcu_torture_alloc),
kernel/rcu/rcutorture.c
2826
atomic_read(&n_rcu_torture_alloc_fail),
kernel/rcu/rcutorture.c
2827
atomic_read(&n_rcu_torture_free));
kernel/rcu/rcutorture.c
2829
atomic_read(&n_rcu_torture_mberror),
kernel/rcu/rcutorture.c
2830
atomic_read(&n_rcu_torture_mbchk_fail), atomic_read(&n_rcu_torture_mbchk_tries),
kernel/rcu/rcutorture.c
2850
if (atomic_read(&n_rcu_torture_mberror) ||
kernel/rcu/rcutorture.c
2851
atomic_read(&n_rcu_torture_mbchk_fail) ||
kernel/rcu/rcutorture.c
2856
WARN_ON_ONCE(atomic_read(&n_rcu_torture_mberror));
kernel/rcu/rcutorture.c
2857
WARN_ON_ONCE(atomic_read(&n_rcu_torture_mbchk_fail));
kernel/rcu/rcutorture.c
2877
pr_cont(" %d", atomic_read(&rcu_torture_wcount[i]));
kernel/rcu/rcutorture.c
3764
atomic_read(&barrier_cbs_count) == 0 ||
kernel/rcu/rcutorture.c
3770
if (atomic_read(&barrier_cbs_invoked) != n_barrier_cbs) {
kernel/rcu/rcutorture.c
3773
atomic_read(&barrier_cbs_invoked),
kernel/rcu/rcutorture.c
3783
} while (atomic_read(&barrier_cbs_invoked) !=
kernel/rcu/rcutorture.c
3789
atomic_read(&barrier_cbs_invoked));
kernel/rcu/rcutorture.c
4234
if (atomic_read(&n_rcu_torture_error) || n_rcu_torture_barrier_error)
kernel/rcu/rcutorture.c
471
if (!atomic_read(&rcu_fwd_cb_nodelay) &&
kernel/rcu/refscale.c
1244
wait_event(rt->wq, (atomic_read(&nreaders_exp) && smp_load_acquire(&rt->start_reader)) ||
kernel/rcu/refscale.c
1291
me, exp_idx, atomic_read(&nreaders_exp));
kernel/rcu/refscale.c
1380
while (atomic_read(&n_init) < nreaders + 1)
kernel/rcu/refscale.c
1406
!atomic_read(&nreaders_exp) || torture_must_stop());
kernel/rcu/tasks.h
789
atomic_read(&rtp->barrier_q_count));
kernel/rcu/tree.c
1367
return rgssp && atomic_read(rgssp);
kernel/rcu/tree.c
3725
atomic_read(&rcu_state.barrier_cpu_count), done);
kernel/rcu/tree.c
3979
return sprintf(buffer, "%d\n", atomic_read((atomic_t *)kp->arg));
kernel/rcu/tree_stall.h
1018
atomic_read(&warned))
kernel/rcu/tree_stall.h
1028
atomic_read(&warned)) {
kernel/rcu/update.c
156
atomic_read(&rcu_async_hurry_nesting);
kernel/rcu/update.c
196
return rcu_expedited || atomic_read(&rcu_expedited_nesting);
kernel/scftorture.c
203
if (atomic_read(&n_errs) || atomic_read(&n_mb_in_errs) ||
kernel/scftorture.c
204
atomic_read(&n_mb_out_errs) ||
kernel/scftorture.c
205
(!IS_ENABLED(CONFIG_KASAN) && atomic_read(&n_alloc_errs)))
kernel/scftorture.c
213
pr_cont("ste: %d stnmie: %d stnmoe: %d staf: %d\n", atomic_read(&n_errs),
kernel/scftorture.c
214
atomic_read(&n_mb_in_errs), atomic_read(&n_mb_out_errs),
kernel/scftorture.c
215
atomic_read(&n_alloc_errs));
kernel/scftorture.c
566
if (atomic_read(&n_errs) || atomic_read(&n_mb_in_errs) || atomic_read(&n_mb_out_errs))
kernel/sched/core.c
493
if (!atomic_read(&sched_core_count))
kernel/sched/core.c
5364
return atomic_read(&cpu_rq(cpu)->nr_iowait);
kernel/sched/cpupri.c
74
if (!atomic_read(&(vec)->count))
kernel/sched/cputime.c
229
if (atomic_read(&rq->nr_iowait) > 0)
kernel/sched/deadline.c
533
return atomic_read(&rq->rd->dlo_count);
kernel/sched/ext.c
4366
kind = atomic_read(&sch->exit_kind);
kernel/sched/ext.c
5279
WARN_ON_ONCE(atomic_read(&sch->exit_kind) == SCX_EXIT_NONE);
kernel/sched/ext.c
733
return atomic_read(&scx_enable_state_var);
kernel/sched/fair.c
12461
if ((atomic_read(nohz_flags(ilb_cpu)) & flags) == flags)
kernel/sched/fair.c
12593
nr_busy = atomic_read(&sds->nr_busy_cpus);
kernel/sched/fair.c
3626
mm_users = atomic_read(&mm->mm_users);
kernel/sched/membarrier.c
212
atomic_read(&mm->membarrier_state));
kernel/sched/membarrier.c
244
membarrier_state = atomic_read(&next_mm->membarrier_state);
kernel/sched/membarrier.c
325
if (!(atomic_read(&mm->membarrier_state) &
kernel/sched/membarrier.c
333
if (!(atomic_read(&mm->membarrier_state) &
kernel/sched/membarrier.c
339
if (!(atomic_read(&mm->membarrier_state) &
kernel/sched/membarrier.c
345
(atomic_read(&mm->mm_users) == 1 || num_online_cpus() == 1))
kernel/sched/membarrier.c
440
int membarrier_state = atomic_read(&mm->membarrier_state);
kernel/sched/membarrier.c
444
if (atomic_read(&mm->mm_users) == 1 || num_online_cpus() == 1) {
kernel/sched/membarrier.c
502
if (atomic_read(&mm->membarrier_state) &
kernel/sched/membarrier.c
542
if ((atomic_read(&mm->membarrier_state) & ready_state) == ready_state)
kernel/sched/membarrier.c
580
membarrier_state = atomic_read(&mm->membarrier_state);
kernel/sched/rt.c
341
return atomic_read(&rq->rd->rto_count);
kernel/sched/sched.h
3735
membarrier_state = atomic_read(&next_mm->membarrier_state);
kernel/sched/topology.c
1542
if (!atomic_read(&d->rd->refcount))
kernel/sched/topology.c
1585
if (atomic_read(&(*per_cpu_ptr(sdd->sds, cpu))->ref))
kernel/sched/topology.c
1588
if (atomic_read(&(*per_cpu_ptr(sdd->sg, cpu))->ref))
kernel/sched/topology.c
1591
if (atomic_read(&(*per_cpu_ptr(sdd->sgc, cpu))->ref))
kernel/seccomp.c
1362
(atomic_read(¤t->signal->live) == 1)) {
kernel/seccomp.c
640
atomic_read(&caller->seccomp.filter_count));
kernel/smp.c
223
return !!atomic_read(&n_csd_lock_stuck);
kernel/softirq.c
886
if (atomic_read(&sync_cb->cb_waiters)) {
kernel/softirq.c
921
if (!atomic_read(&t->count)) {
kernel/time/clocksource.c
438
reset_pending = atomic_read(&watchdog_reset_pending);
kernel/time/clocksource.c
477
atomic_read(&watchdog_reset_pending)) {
kernel/time/clocksource.c
491
if (atomic_read(&watchdog_reset_pending))
kernel/time/hrtimer.c
1403
if (atomic_read(&cpu_base->timer_waiters)) {
kernel/time/tick-sched.c
215
inp = atomic_read(&in_progress);
kernel/time/tick-sched.c
345
int val = atomic_read(dep);
kernel/time/tick-sched.c
618
if (atomic_read(¤t->tick_dep_mask) ||
kernel/time/tick-sched.c
619
atomic_read(¤t->signal->tick_dep_mask))
kernel/time/timer.c
1521
if (atomic_read(&base->timer_waiters)) {
kernel/time/timer_migration.c
1293
childstate.state = atomic_read(&child->migr_state);
kernel/time/timer_migration.c
1871
state.state = atomic_read(&start->migr_state);
kernel/time/timer_migration.c
481
s.state = atomic_read(&group->migr_state);
kernel/time/timer_migration.c
495
s.state = atomic_read(&group->migr_state);
kernel/time/timer_migration.c
511
s.state = atomic_read(&group->migr_state);
kernel/time/timer_migration.c
674
curstate.state = atomic_read(&group->migr_state);
kernel/time/timer_migration.c
774
childstate.state = atomic_read(&child->migr_state);
kernel/time/timer_migration.c
775
groupstate.state = atomic_read(&group->migr_state);
kernel/time/timer_migration.c
830
groupstate.state = atomic_read(&group->migr_state);
kernel/trace/fgraph.c
770
trace->overrun = atomic_read(¤t->trace_overrun);
kernel/trace/ring_buffer.c
2986
if (atomic_read(&cpu_buffer->resize_disabled)) {
kernel/trace/ring_buffer.c
3068
if (atomic_read(&cpu_buffer->resize_disabled)) {
kernel/trace/ring_buffer.c
3112
if (atomic_read(&buffer->record_disabled)) {
kernel/trace/ring_buffer.c
4711
if (unlikely(atomic_read(&buffer->record_disabled)))
kernel/trace/ring_buffer.c
4721
if (unlikely(atomic_read(&cpu_buffer->record_disabled)))
kernel/trace/ring_buffer.c
4858
if (atomic_read(&buffer->record_disabled))
kernel/trace/ring_buffer.c
4868
if (atomic_read(&cpu_buffer->record_disabled))
kernel/trace/ring_buffer.c
4959
rd = atomic_read(&buffer->record_disabled);
kernel/trace/ring_buffer.c
4982
rd = atomic_read(&buffer->record_disabled);
kernel/trace/ring_buffer.c
4997
return !atomic_read(&buffer->record_disabled);
kernel/trace/ring_buffer.c
5013
return !(atomic_read(&buffer->record_disabled) & RB_BUFFER_OFF);
kernel/trace/ring_buffer.c
5031
!atomic_read(&cpu_buffer->record_disabled);
kernel/trace/ring_buffer.c
6295
if (!(atomic_read(&cpu_buffer->resize_disabled) & RESET_BIT))
kernel/trace/ring_buffer.c
6434
if (atomic_read(&buffer_a->record_disabled))
kernel/trace/ring_buffer.c
6437
if (atomic_read(&buffer_b->record_disabled))
kernel/trace/ring_buffer.c
6440
if (atomic_read(&cpu_buffer_a->record_disabled))
kernel/trace/ring_buffer.c
6443
if (atomic_read(&cpu_buffer_b->record_disabled))
kernel/trace/ring_buffer.c
6465
if (atomic_read(&buffer_a->resizing))
kernel/trace/ring_buffer.c
6467
if (atomic_read(&buffer_b->resizing))
kernel/trace/trace_dynevent.c
46
if (WARN_ON_ONCE(atomic_read(&call->refcnt) <= 0)) {
kernel/trace/trace_dynevent.c
56
return atomic_read(&call->refcnt) != 0;
kernel/trace/trace_events.c
1893
if (atomic_read(&file->sm_ref) != 0)
kernel/trace/trace_events.c
789
bool soft_mode = atomic_read(&file->sm_ref) != 0;
kernel/trace/trace_recursion_record.c
109
i = atomic_read(&nr_records);
kernel/trace/trace_recursion_record.c
127
index = atomic_read(&nr_records);
kernel/trace/trace_recursion_record.c
146
index = atomic_read(&nr_records);
kernel/trace/trace_recursion_record.c
36
i = atomic_read(&nr_records);
kernel/umh.c
311
atomic_read(&running_helpers) == 0,
kernel/watchdog.c
164
int hrint = atomic_read(&per_cpu(hrtimer_interrupts, cpu));
kernel/workqueue.c
1708
int old = atomic_read(&nna->nr);
kernel/workqueue.c
3948
WARN_ON_ONCE(atomic_read(&wq->nr_pwqs_to_flush));
lib/closure.c
105
if (atomic_read(&cl->remaining) & CLOSURE_WAITING)
lib/closure.c
206
unsigned old, new, v = atomic_read(&cl->remaining);
lib/closure.c
266
int r = atomic_read(&cl->remaining);
lib/debugobjects.c
396
if (!pool_must_refill(&pool_global) && atomic_read(&cpus_allocating))
lib/fault-inject.c
152
if (atomic_read(&attr->times) == 0)
lib/fault-inject.c
158
if (atomic_read(&attr->space) > size) {
lib/fault-inject.c
176
if (atomic_read(&attr->times) != -1)
lib/fault-inject.c
292
return snprintf(page, PAGE_SIZE, "%d\n", atomic_read(&val));
lib/fault-inject.c
70
atomic_read(&attr->space),
lib/fault-inject.c
71
atomic_read(&attr->times));
lib/is_single_threaded.c
22
if (atomic_read(&task->signal->live) != 1)
lib/is_single_threaded.c
25
if (atomic_read(&mm->mm_users) == 1)
lib/ratelimit.c
62
atomic_read(&rs->rs_n_left) > 0 && atomic_dec_return(&rs->rs_n_left) >= 0)
lib/ratelimit.c
93
if (atomic_read(&rs->rs_n_left) > 0 && atomic_dec_return(&rs->rs_n_left) >= 0)
lib/rcuref.c
194
unsigned int cnt = atomic_read(&ref->refcnt);
lib/refcount.c
76
unsigned int new, val = atomic_read(&r->refs);
lib/rhashtable.c
399
unsigned int nelems = atomic_read(&ht->nelems);
lib/sbitmap.c
588
if (!atomic_read(&sbq->ws_active))
lib/sbitmap.c
591
wake_index = atomic_read(&sbq->wake_index);
lib/sbitmap.c
611
if (wake_index != atomic_read(&sbq->wake_index))
lib/sbitmap.c
620
if (!atomic_read(&sbq->ws_active))
lib/sbitmap.c
624
wakeups = atomic_read(&sbq->wakeup_cnt);
lib/sbitmap.c
627
if (atomic_read(&sbq->completion_cnt) - wakeups < wake_batch)
lib/sbitmap.c
713
wake_index = atomic_read(&sbq->wake_index);
lib/sbitmap.c
743
seq_printf(m, "wake_index=%d\n", atomic_read(&sbq->wake_index));
lib/sbitmap.c
744
seq_printf(m, "ws_active=%d\n", atomic_read(&sbq->ws_active));
lib/test_lockup.c
604
atomic_read(&alloc_pages_failed));
lib/test_maple_tree.c
34
atomic_read(&maple_tree_tests_passed), \
lib/test_maple_tree.c
35
atomic_read(&maple_tree_tests_run)); \
lib/test_maple_tree.c
3973
atomic_read(&maple_tree_tests_passed),
lib/test_maple_tree.c
3974
atomic_read(&maple_tree_tests_run));
lib/test_maple_tree.c
3975
if (atomic_read(&maple_tree_tests_run) ==
lib/test_maple_tree.c
3976
atomic_read(&maple_tree_tests_passed))
lib/test_ref_tracker.c
99
while (!atomic_read(&test_ref_timer_done))
lib/test_rhashtable.c
203
total, atomic_read(&ht->nelems), entries, chain_len);
lib/test_rhashtable.c
205
if (total != atomic_read(&ht->nelems) || total != entries)
lib/test_rhashtable.c
624
if (wait_event_interruptible(startup_wait, atomic_read(&startup_count) == -1)) {
lib/test_rhashtable.c
779
if (wait_event_interruptible(startup_wait, atomic_read(&startup_count) == 0))
mm/debug.c
204
mm->pgd, atomic_read(&mm->mm_users),
mm/debug.c
205
atomic_read(&mm->mm_count),
mm/debug.c
228
atomic_read(&mm->tlb_flush_pending),
mm/debug.c
74
int mapcount = atomic_read(&page->_mapcount) + 1;
mm/debug.c
87
pincount = atomic_read(&folio->_pincount);
mm/gup.c
223
WARN_ON_ONCE(atomic_read(&folio->_pincount) < 1);
mm/huge_memory.c
283
return atomic_read(&huge_zero_refcount) == 1 ? HPAGE_PMD_NR : 0;
mm/huge_memory.c
3510
VM_BUG_ON_PAGE(atomic_read(&new_folio->_mapcount) != -1, new_head);
mm/internal.h
130
return atomic_read(&folio->_nr_pages_mapped) & FOLIO_PAGES_MAPPED;
mm/internal.h
1466
return atomic_read(&vma->vm_mm->mm_users) == 1;
mm/internal.h
484
int nr_throttled = atomic_read(&pgdat->nr_writeback_throttled);
mm/kasan/kasan_test_c.c
777
KUNIT_EXPECT_KASAN_FAIL_READ(test, atomic_read(unsafe));
mm/kfence/core.c
239
if (!atomic_read(&alloc_covered[alloc_stack_hash & ALLOC_COVERED_MASK]))
mm/kfence/core.c
903
atomic_read(&kfence_allocation_gate) > 0 ||
mm/khugepaged.c
392
return atomic_read(&mm->mm_users) == 0;
mm/kmemleak.c
1712
if (atomic_read(&object->use_count) > 1) {
mm/kmemleak.c
1714
atomic_read(&object->use_count));
mm/kmemleak.c
819
WARN_ON(atomic_read(&object->use_count) < 1);
mm/ksm.c
607
return atomic_read(&mm->mm_users) == 0;
mm/memcontrol.c
1045
gen = atomic_read(&iter->generation);
mm/memcontrol.c
167
nr_bytes = atomic_read(&objcg->nr_charged_bytes);
mm/memcontrol.c
3100
stock->nr_bytes = atomic_read(&objcg->nr_charged_bytes)
mm/memcontrol.c
3506
atomic_read(&frn->done.cnt) == 1) {
mm/memcontrol.c
3553
atomic_read(&frn->done.cnt) == 1) {
mm/memcontrol.c
4064
if (atomic_read(&memcg->kmem_stat)) {
mm/memcontrol.c
4081
if (atomic_read(&pn->slab_reclaimable)) {
mm/memcontrol.c
4089
if (atomic_read(&pn->slab_unreclaimable)) {
mm/memcontrol.c
4164
if (atomic_read(&memcg->vmstats->stats_updates))
mm/memcontrol.c
563
return atomic_read(&vmstats->stats_updates) >
mm/memcontrol.c
603
trace_memcg_flush_stats(memcg, atomic_read(&memcg->vmstats->stats_updates),
mm/migrate.c
965
if (atomic_read(&bh->b_count)) {
mm/mmu_notifier.c
603
BUG_ON(atomic_read(&mm->mm_users) <= 0);
mm/mmu_notifier.c
670
BUG_ON(atomic_read(&mm->mm_users) <= 0);
mm/mmu_notifier.c
799
BUG_ON(atomic_read(&mm->mm_count) <= 0);
mm/mmu_notifier.c
832
BUG_ON(atomic_read(&mm->mm_count) <= 0);
mm/mmu_notifier.c
908
if (WARN_ON(atomic_read(&mm->mm_users) <= 0))
mm/oom_kill.c
843
!atomic_read(&oom_victims), timeout);
mm/oom_kill.c
905
if (atomic_read(&mm->mm_users) <= 1)
mm/page-writeback.c
1540
!atomic_read(&wb->writeback_inodes)) {
mm/page_alloc.c
1074
if (unlikely(atomic_read(&page->_mapcount) != -1))
mm/page_alloc.c
1093
if (unlikely(atomic_read(&page->_mapcount) != -1))
mm/page_alloc.c
1152
unlikely(atomic_read(&folio->_nr_pages_mapped))) {
mm/page_alloc.c
1167
if (unlikely(atomic_read(&folio->_entire_mapcount) + 1)) {
mm/page_alloc.c
1171
if (unlikely(atomic_read(&folio->_pincount))) {
mm/page_alloc.c
1184
if (unlikely(atomic_read(&folio->_entire_mapcount) + 1)) {
mm/page_alloc.c
1188
if (unlikely(atomic_read(&folio->_pincount))) {
mm/page_reporting.c
66
state = atomic_read(&prdev->state);
mm/page_table_check.c
117
BUG_ON(atomic_read(&ptc->file_map_count));
mm/page_table_check.c
120
BUG_ON(atomic_read(&ptc->anon_map_count));
mm/page_table_check.c
142
BUG_ON(atomic_read(&ptc->anon_map_count));
mm/page_table_check.c
143
BUG_ON(atomic_read(&ptc->file_map_count));
mm/page_table_check.c
82
BUG_ON(atomic_read(&ptc->file_map_count));
mm/page_table_check.c
85
BUG_ON(atomic_read(&ptc->anon_map_count));
mm/rmap.c
112
VM_BUG_ON(atomic_read(&anon_vma->refcount));
mm/rmap.c
1545
atomic_read(&folio->_mapcount) > 0, folio);
mm/rmap.c
1559
VM_WARN_ON_FOLIO(atomic_read(&cur_page->_mapcount) > 0 &&
mm/rmap.c
1809
partially_mapped = nr && atomic_read(mapped);
mm/rmap.c
760
batch = atomic_read(&mm->tlb_flush_batched);
mm/rmap.c
812
int batch = atomic_read(&mm->tlb_flush_batched);
mm/rmap.c
959
if ((!atomic_read(&vma->vm_mm->mm_users) ||
mm/secretmem.c
234
if (atomic_read(&secretmem_users) < 0)
mm/secretmem.c
47
return !!atomic_read(&secretmem_users);
mm/shmem.c
1419
!atomic_read(&info->stop_eviction));
mm/shmem.c
1422
if (!atomic_read(&info->stop_eviction))
mm/slab_common.c
1639
int sum = atomic_read(&krcp->head_count);
mm/slab_common.c
1643
sum += atomic_read(&krcp->bulk_count[i]);
mm/slab_common.c
1802
nr_pages = atomic_read(&krcp->backoff_page_cache_fill) ?
mm/slab_common.c
1905
if (atomic_read(&krcp->backoff_page_cache_fill)) {
mm/swap_cgroup.c
28
unsigned int old_ids = atomic_read(&map[offset / ID_PER_SC].ids);
mm/swap_cgroup.c
43
unsigned int new_ids, old_ids = atomic_read(&sc->ids);
mm/swap_state.c
431
return READ_ONCE(enable_vma_readahead) && !atomic_read(&nr_rotate_swap);
mm/swap_state.c
694
atomic_read(&last_readahead_pages));
mm/swapfile.c
2916
if (seq->poll_event != atomic_read(&proc_poll_event)) {
mm/swapfile.c
2917
seq->poll_event = atomic_read(&proc_poll_event);
mm/swapfile.c
3015
seq->poll_event = atomic_read(&proc_poll_event);
mm/userfaultfd.c
1792
if (likely(atomic_read(&ctx->mmap_changing)))
mm/userfaultfd.c
563
if (atomic_read(&ctx->mmap_changing))
mm/userfaultfd.c
750
if (atomic_read(&ctx->mmap_changing))
mm/userfaultfd.c
965
if (atomic_read(&ctx->mmap_changing))
mm/vmscan.c
7429
return atomic_read(&pgdat->kswapd_failures) >= MAX_RECLAIM_RETRIES;
net/9p/trans_rdma.c
324
if (unlikely(atomic_read(&rdma->excess_rc) > 0)) {
net/9p/trans_virtio.c
327
if (atomic_read(&vp_pinned) >= chan->p9_max_pages) {
net/9p/trans_virtio.c
329
(atomic_read(&vp_pinned) < chan->p9_max_pages));
net/atm/atm_misc.c
18
if (atomic_read(&sk_atm(vcc)->sk_rmem_alloc) <= sk_atm(vcc)->sk_rcvbuf)
net/atm/atm_misc.c
33
if (atomic_read(&sk->sk_rmem_alloc) <= sk->sk_rcvbuf) {
net/atm/atm_misc.c
90
#define __HANDLE_ITEM(i) to->i = atomic_read(&from->i)
net/atm/br2684.c
264
if (unlikely(atomic_read(&brvcc->qspace) > 0))
net/atm/br2684.c
278
if (atomic_read(&brvcc->qspace) > 0)
net/atm/common.c
559
pr_debug("%d -= %d\n", atomic_read(&sk->sk_rmem_alloc),
net/atm/common.c
80
if (atomic_read(&sk->sk_rmem_alloc))
net/atm/common.c
82
__func__, atomic_read(&sk->sk_rmem_alloc));
net/atm/proc.c
48
atomic_read(&stats->tx), atomic_read(&stats->tx_err),
net/atm/proc.c
49
atomic_read(&stats->rx), atomic_read(&stats->rx_err),
net/atm/proc.c
50
atomic_read(&stats->rx_drop));
net/atm/resources.c
163
#define __HANDLE_ITEM(i) to->i = atomic_read(&from->i)
net/ax25/af_ax25.c
270
atomic_read(&s->sk->sk_rmem_alloc) <= s->sk->sk_rcvbuf) {
net/ax25/ax25_ds_timer.c
124
if (atomic_read(&sk->sk_rmem_alloc) <
net/ax25/ax25_in.c
263
if (atomic_read(&sk->sk_rmem_alloc) >=
net/ax25/ax25_std_timer.c
62
if (atomic_read(&sk->sk_rmem_alloc) <
net/batman-adv/bat_iv_ogm.c
1163
tq_iface_hop_penalty -= atomic_read(&if_incoming->hop_penalty);
net/batman-adv/bat_iv_ogm.c
1587
if_incoming_seqno = atomic_read(&if_incoming->bat_iv.ogm_seqno);
net/batman-adv/bat_iv_ogm.c
1695
if (atomic_read(&bat_priv->mesh_state) == BATADV_MESH_DEACTIVATING) {
net/batman-adv/bat_iv_ogm.c
2273
switch (atomic_read(&bat_priv->gw.sel_class)) {
net/batman-adv/bat_iv_ogm.c
2333
if (atomic_read(&bat_priv->gw.sel_class) <= 2)
net/batman-adv/bat_iv_ogm.c
2368
if ((atomic_read(&bat_priv->gw.sel_class) > 3) &&
net/batman-adv/bat_iv_ogm.c
2369
(orig_tq_avg - gw_tq_avg < atomic_read(&bat_priv->gw.sel_class)))
net/batman-adv/bat_iv_ogm.c
282
msecs = atomic_read(&bat_priv->orig_interval) - BATADV_JITTER;
net/batman-adv/bat_iv_ogm.c
297
int hop_penalty = atomic_read(&bat_priv->hop_penalty);
net/batman-adv/bat_iv_ogm.c
561
if (atomic_read(&bat_priv->aggregated_ogms))
net/batman-adv/bat_iv_ogm.c
651
if (atomic_read(&bat_priv->aggregated_ogms) && !own_packet) {
net/batman-adv/bat_iv_ogm.c
677
if (!own_packet && atomic_read(&bat_priv->aggregated_ogms))
net/batman-adv/bat_iv_ogm.c
832
seqno = (u32)atomic_read(&hard_iface->bat_iv.ogm_seqno);
net/batman-adv/bat_v.c
605
threshold = atomic_read(&bat_priv->gw.sel_class);
net/batman-adv/bat_v_elp.c
100
throughput = atomic_read(&hard_iface->bat_v.throughput_override);
net/batman-adv/bat_v_elp.c
307
if (atomic_read(&bat_priv->mesh_state) == BATADV_MESH_DEACTIVATING)
net/batman-adv/bat_v_elp.c
324
elp_packet->seqno = htonl(atomic_read(&hard_iface->bat_v.elp_seqno));
net/batman-adv/bat_v_elp.c
325
elp_interval = atomic_read(&hard_iface->bat_v.elp_interval);
net/batman-adv/bat_v_elp.c
331
atomic_read(&hard_iface->bat_v.elp_seqno));
net/batman-adv/bat_v_elp.c
65
msecs = atomic_read(&hard_iface->bat_v.elp_interval) - BATADV_JITTER;
net/batman-adv/bat_v_ogm.c
108
msecs = atomic_read(&bat_priv->orig_interval) - BATADV_JITTER;
net/batman-adv/bat_v_ogm.c
242
if (!atomic_read(&bat_priv->aggregated_ogms)) {
net/batman-adv/bat_v_ogm.c
273
if (atomic_read(&bat_priv->mesh_state) == BATADV_MESH_DEACTIVATING)
net/batman-adv/bat_v_ogm.c
297
ogm_packet->seqno = htonl(atomic_read(&bat_priv->bat_v.ogm_seqno));
net/batman-adv/bat_v_ogm.c
476
int if_hop_penalty = atomic_read(&if_incoming->hop_penalty);
net/batman-adv/bat_v_ogm.c
477
int hop_penalty = atomic_read(&bat_priv->hop_penalty);
net/batman-adv/bridge_loop_avoidance.c
1251
if (atomic_read(&backbone_gw->request_sent))
net/batman-adv/bridge_loop_avoidance.c
1343
if (!atomic_read(&bat_priv->bridge_loop_avoidance))
net/batman-adv/bridge_loop_avoidance.c
1447
if (!atomic_read(&bat_priv->bridge_loop_avoidance))
net/batman-adv/bridge_loop_avoidance.c
1495
if (atomic_read(&backbone_gw->request_sent) == 0)
net/batman-adv/bridge_loop_avoidance.c
1739
if (!atomic_read(&bat_priv->bridge_loop_avoidance))
net/batman-adv/bridge_loop_avoidance.c
1777
if (!atomic_read(&orig_node->bat_priv->bridge_loop_avoidance))
net/batman-adv/bridge_loop_avoidance.c
1909
if (!atomic_read(&bat_priv->bridge_loop_avoidance))
net/batman-adv/bridge_loop_avoidance.c
1915
if (unlikely(atomic_read(&bat_priv->bla.num_requests)))
net/batman-adv/bridge_loop_avoidance.c
2041
if (!atomic_read(&bat_priv->bridge_loop_avoidance))
net/batman-adv/bridge_loop_avoidance.c
2049
if (unlikely(atomic_read(&bat_priv->bla.num_requests)))
net/batman-adv/bridge_loop_avoidance.c
2207
cb->seq = atomic_read(&hash->generation) << 1 | 1;
net/batman-adv/bridge_loop_avoidance.c
2368
cb->seq = atomic_read(&hash->generation) << 1 | 1;
net/batman-adv/bridge_loop_avoidance.c
2461
if (!atomic_read(&bat_priv->bridge_loop_avoidance))
net/batman-adv/bridge_loop_avoidance.c
652
if (!atomic_read(&backbone_gw->request_sent)) {
net/batman-adv/bridge_loop_avoidance.c
875
if (atomic_read(&backbone_gw->request_sent)) {
net/batman-adv/distributed-arp-table.c
1138
if (!atomic_read(&bat_priv->distributed_arp_table))
net/batman-adv/distributed-arp-table.c
1233
if (!atomic_read(&bat_priv->distributed_arp_table))
net/batman-adv/distributed-arp-table.c
1295
if (!atomic_read(&bat_priv->distributed_arp_table))
net/batman-adv/distributed-arp-table.c
1343
if (!atomic_read(&bat_priv->distributed_arp_table))
net/batman-adv/distributed-arp-table.c
1713
if (!atomic_read(&bat_priv->distributed_arp_table))
net/batman-adv/distributed-arp-table.c
1743
if (!atomic_read(&bat_priv->distributed_arp_table))
net/batman-adv/distributed-arp-table.c
1788
if (!atomic_read(&bat_priv->distributed_arp_table))
net/batman-adv/distributed-arp-table.c
742
dat_mode = atomic_read(&bat_priv->distributed_arp_table);
net/batman-adv/distributed-arp-table.c
906
cb->seq = atomic_read(&hash->generation) << 1 | 1;
net/batman-adv/gateway_client.c
173
if (atomic_read(&bat_priv->gw.mode) != BATADV_GW_MODE_CLIENT)
net/batman-adv/gateway_client.c
205
if (atomic_read(&bat_priv->gw.mode) != BATADV_GW_MODE_CLIENT)
net/batman-adv/gateway_client.c
701
switch (atomic_read(&bat_priv->gw.mode)) {
net/batman-adv/gateway_common.c
31
gw_mode = atomic_read(&bat_priv->gw.mode);
net/batman-adv/gateway_common.c
39
down = atomic_read(&bat_priv->gw.bandwidth_down);
net/batman-adv/gateway_common.c
40
up = atomic_read(&bat_priv->gw.bandwidth_up);
net/batman-adv/gateway_common.c
86
atomic_read(&bat_priv->gw.mode) == BATADV_GW_MODE_CLIENT)
net/batman-adv/hard-interface.c
597
if (atomic_read(&bat_priv->fragmentation) == 0)
net/batman-adv/hard-interface.c
749
if (atomic_read(&bat_priv->fragmentation) &&
net/batman-adv/hard-interface.c
756
if (!atomic_read(&bat_priv->fragmentation) &&
net/batman-adv/log.h
79
if (atomic_read(&__batpriv->log_level) & (type) && \
net/batman-adv/main.c
443
if (atomic_read(&bat_priv->mesh_state) != BATADV_MESH_ACTIVE)
net/batman-adv/main.c
616
ap_isolation_enabled = atomic_read(&vlan->ap_isolation);
net/batman-adv/mesh-interface.c
126
if (atomic_read(&bat_priv->mesh_state) != BATADV_MESH_ACTIVE)
net/batman-adv/mesh-interface.c
193
if (atomic_read(&bat_priv->mesh_state) != BATADV_MESH_ACTIVE)
net/batman-adv/mesh-interface.c
258
gw_mode = atomic_read(&bat_priv->gw.mode);
net/batman-adv/multicast.c
1102
if (!atomic_read(&bat_priv->multicast_mode))
net/batman-adv/multicast.c
1137
return atomic_read(&bat_priv->mcast.num_want_all_ipv4);
net/batman-adv/multicast.c
1139
return atomic_read(&bat_priv->mcast.num_want_all_ipv6);
net/batman-adv/multicast.c
1161
return atomic_read(&bat_priv->mcast.num_want_all_rtr4);
net/batman-adv/multicast.c
1163
return atomic_read(&bat_priv->mcast.num_want_all_rtr6);
net/batman-adv/multicast.c
1201
if (!atomic_read(&bat_priv->mcast.num_no_mc_ptype_capa) &&
net/batman-adv/multicast.c
1207
if (count <= atomic_read(&bat_priv->multicast_fanout))
net/batman-adv/multicast.c
1244
atomic_read(&bat_priv->mcast.num_want_all_unsnoopables);
net/batman-adv/multicast.c
136
if (in6_dev && atomic_read(&in6_dev->cnf.mc_forwarding))
net/batman-adv/multicast.c
2031
cb->seq = atomic_read(&hash->generation) << 1 | 1;
net/batman-adv/netlink.c
1023
!!atomic_read(&vlan->ap_isolation)))
net/batman-adv/netlink.c
182
ap_isolation = atomic_read(&vlan->ap_isolation);
net/batman-adv/netlink.c
244
(u8)atomic_read(&bat_priv->tt.vn)))
net/batman-adv/netlink.c
270
!!atomic_read(&bat_priv->aggregated_ogms)))
net/batman-adv/netlink.c
285
!!atomic_read(&bat_priv->bonding)))
net/batman-adv/netlink.c
290
!!atomic_read(&bat_priv->bridge_loop_avoidance)))
net/batman-adv/netlink.c
296
!!atomic_read(&bat_priv->distributed_arp_table)))
net/batman-adv/netlink.c
301
!!atomic_read(&bat_priv->fragmentation)))
net/batman-adv/netlink.c
305
atomic_read(&bat_priv->gw.bandwidth_down)))
net/batman-adv/netlink.c
309
atomic_read(&bat_priv->gw.bandwidth_up)))
net/batman-adv/netlink.c
313
atomic_read(&bat_priv->gw.mode)))
net/batman-adv/netlink.c
322
atomic_read(&bat_priv->gw.sel_class)))
net/batman-adv/netlink.c
327
atomic_read(&bat_priv->hop_penalty)))
net/batman-adv/netlink.c
332
atomic_read(&bat_priv->log_level)))
net/batman-adv/netlink.c
338
!atomic_read(&bat_priv->multicast_mode)))
net/batman-adv/netlink.c
342
atomic_read(&bat_priv->multicast_fanout)))
net/batman-adv/netlink.c
347
atomic_read(&bat_priv->orig_interval)))
net/batman-adv/netlink.c
812
atomic_read(&hard_iface->hop_penalty)))
net/batman-adv/netlink.c
817
atomic_read(&hard_iface->bat_v.elp_interval)))
net/batman-adv/netlink.c
821
atomic_read(&hard_iface->bat_v.throughput_override)))
net/batman-adv/routing.c
546
if (!(recv_if == BATADV_IF_DEFAULT && atomic_read(&bat_priv->bonding)))
net/batman-adv/routing.c
757
orig_ttvn = (u8)atomic_read(&bat_priv->tt.vn);
net/batman-adv/routing.c
768
orig_ttvn = (u8)atomic_read(&orig_node->last_ttvn);
net/batman-adv/routing.c
837
curr_ttvn = (u8)atomic_read(&bat_priv->tt.vn);
net/batman-adv/routing.c
848
curr_ttvn = (u8)atomic_read(&orig_node->last_ttvn);
net/batman-adv/send.c
1050
if (atomic_read(&bat_priv->mesh_state) == BATADV_MESH_DEACTIVATING) {
net/batman-adv/send.c
188
if (atomic_read(&bat_priv->fragmentation) &&
net/batman-adv/send.c
225
u8 ttvn = (u8)atomic_read(&orig_node->last_ttvn);
net/batman-adv/tp_meter.c
471
if (unlikely(atomic_read(&tp_vars->sending) == 0))
net/batman-adv/tp_meter.c
491
if (atomic_read(&tp_vars->sending) == 0)
net/batman-adv/tp_meter.c
515
atomic_read(&tp_vars->last_acked));
net/batman-adv/tp_meter.c
522
tp_vars->last_sent = atomic_read(&tp_vars->last_acked);
net/batman-adv/tp_meter.c
650
if (unlikely(atomic_read(&tp_vars->sending) == 0))
net/batman-adv/tp_meter.c
655
(u32)atomic_read(&tp_vars->last_acked)))
net/batman-adv/tp_meter.c
677
if (atomic_read(&tp_vars->last_acked) == recv_ack) {
net/batman-adv/tp_meter.c
679
if (atomic_read(&tp_vars->dup_acks) != 3)
net/batman-adv/tp_meter.c
712
atomic64_add(recv_ack - atomic_read(&tp_vars->last_acked),
net/batman-adv/tp_meter.c
745
if (recv_ack - atomic_read(&tp_vars->last_acked) >= mss)
net/batman-adv/tp_meter.c
771
win_limit = atomic_read(&tp_vars->last_acked) + tp_vars->cwnd;
net/batman-adv/tp_meter.c
849
while (atomic_read(&tp_vars->sending) != 0) {
net/batman-adv/translation-table.c
1110
cb->seq = atomic_read(&hash->generation) << 1 | 1;
net/batman-adv/translation-table.c
1789
last_ttvn = atomic_read(&orig->orig_node->last_ttvn);
net/batman-adv/translation-table.c
280
count = atomic_read(&tt_global_entry->orig_list_count);
net/batman-adv/translation-table.c
2980
orig_ttvn = (u8)atomic_read(&req_dst_orig_node->last_ttvn);
net/batman-adv/translation-table.c
3036
if (tt_len > atomic_read(&bat_priv->packet_size_max)) {
net/batman-adv/translation-table.c
3102
my_ttvn = (u8)atomic_read(&bat_priv->tt.vn);
net/batman-adv/translation-table.c
3141
req_ttvn = (u8)atomic_read(&bat_priv->tt.vn);
net/batman-adv/translation-table.c
3657
(u8)atomic_read(&bat_priv->tt.vn));
net/batman-adv/translation-table.c
3697
if (!atomic_read(&vlan->ap_isolation))
net/batman-adv/translation-table.c
3736
u8 orig_ttvn = (u8)atomic_read(&orig_node->last_ttvn);
net/batman-adv/translation-table.c
3878
atomic_read(&orig_node->last_ttvn)))
net/batman-adv/translation-table.c
3899
int packet_size_max = atomic_read(&bat_priv->packet_size_max);
net/batman-adv/translation-table.c
4092
atomic_read(&orig_node->last_ttvn) + 1);
net/batman-adv/translation-table.c
521
tt_local_entries += atomic_read(&vlan->tt.num_entries);
net/batman-adv/translation-table.c
651
packet_size_max = atomic_read(&bat_priv->packet_size_max);
net/batman-adv/translation-table.c
677
(u8)atomic_read(&bat_priv->tt.vn));
net/batman-adv/translation-table.c
810
num_entries += atomic_read(&vlan->tt.num_entries);
net/batman-adv/translation-table.c
834
(*tt_data)->ttvn = atomic_read(&orig_node->last_ttvn);
net/batman-adv/translation-table.c
889
vlan_entries = atomic_read(&vlan->tt.num_entries);
net/batman-adv/translation-table.c
913
(*tt_data)->ttvn = atomic_read(&bat_priv->tt.vn);
net/batman-adv/translation-table.c
918
vlan_entries = atomic_read(&vlan->tt.num_entries);
net/bluetooth/6lowpan.c
145
int count = atomic_read(&dev->peer_count);
net/bluetooth/6lowpan.c
811
if (!err && last && dev && !atomic_read(&dev->peer_count)) {
net/bluetooth/bnep/core.c
493
if (atomic_read(&s->terminate))
net/bluetooth/cmtp/core.c
291
if (atomic_read(&session->terminate))
net/bluetooth/hci_conn.c
563
int refcnt = atomic_read(&conn->refcnt);
net/bluetooth/hci_core.c
3052
if (atomic_read(&hdev->promisc)) {
net/bluetooth/hci_core.c
4045
if (atomic_read(&hdev->promisc)) {
net/bluetooth/hci_core.c
4145
atomic_read(&hdev->cmd_cnt), skb_queue_len(&hdev->cmd_q));
net/bluetooth/hci_core.c
4148
if (atomic_read(&hdev->cmd_cnt)) {
net/bluetooth/hci_event.c
4321
if (atomic_read(&hdev->cmd_cnt) && !skb_queue_empty(&hdev->cmd_q))
net/bluetooth/hci_event.c
4442
if (atomic_read(&hdev->cmd_cnt) && !skb_queue_empty(&hdev->cmd_q))
net/bluetooth/hci_sock.c
365
if (!atomic_read(&monitor_promisc))
net/bluetooth/hci_sock.c
815
if (atomic_read(&monitor_promisc)) {
net/bluetooth/hidp/core.c
1065
while (atomic_read(&session->state) <= HIDP_SESSION_IDLING)
net/bluetooth/hidp/core.c
1067
atomic_read(&session->state) > HIDP_SESSION_IDLING);
net/bluetooth/hidp/core.c
108
if (atomic_read(&session->terminate))
net/bluetooth/hidp/core.c
1181
atomic_read(&session->state) > HIDP_SESSION_PREPARING)
net/bluetooth/hidp/core.c
1214
if (atomic_read(&session->terminate))
net/bluetooth/hidp/core.c
244
if (atomic_read(&session->terminate))
net/bluetooth/hidp/core.c
276
!atomic_read(&session->terminate)) {
net/bluetooth/hidp/core.c
281
|| atomic_read(&session->terminate),
net/bluetooth/hidp/core.c
351
!atomic_read(&session->terminate)) {
net/bluetooth/hidp/core.c
356
|| atomic_read(&session->terminate),
net/bluetooth/l2cap_sock.c
1173
ssize_t avail = sk->sk_rcvbuf - atomic_read(&sk->sk_rmem_alloc);
net/bluetooth/l2cap_sock.c
1264
atomic_read(&sk->sk_rmem_alloc) <= sk->sk_rcvbuf >> 1)
net/bluetooth/l2cap_sock.c
1618
lock_sock_nested(sk, atomic_read(&chan->nesting));
net/bluetooth/rfcomm/sock.c
60
if (atomic_read(&sk->sk_rmem_alloc) >= sk->sk_rcvbuf)
net/bluetooth/rfcomm/sock.c
613
if (atomic_read(&sk->sk_rmem_alloc) <= (sk->sk_rcvbuf >> 2))
net/bluetooth/rfcomm/tty.c
357
int pending = 40 - atomic_read(&dev->wmem_alloc);
net/bridge/br_fdb.c
395
int n_learned = atomic_read(&br->fdb_n_learned);
net/bridge/br_multicast.c
1286
if (atomic_read(&br->mdb_hash_tbl.nelems) >= br->hash_max) {
net/bridge/br_netlink.c
1688
atomic_read(&br->fdb_n_learned)) ||
net/caif/caif_socket.c
128
if (atomic_read(&sk->sk_rmem_alloc) + skb->truesize >=
net/caif/caif_socket.c
131
atomic_read(&cf_sk->sk.sk_rmem_alloc),
net/caif/caif_socket.c
257
if (atomic_read(&sk->sk_rmem_alloc) <= sk_rcvbuf_lowwater(cf_sk)) {
net/caif/cfctrl.c
134
req->sequence_no = atomic_read(&ctrl->req_seq_no);
net/can/j1939/socket.c
67
return atomic_read(&jsk->skb_pending);
net/ceph/debugfs.c
356
atomic_read(&osdc->num_requests),
net/ceph/debugfs.c
357
atomic_read(&osdc->num_homeless));
net/ceph/messenger.c
352
if (atomic_read(&con->msgr->stopping)) {
net/ceph/osd_client.c
3502
if (atomic_read(&osdc->num_homeless) || !list_empty(&slow_osds))
net/ceph/osd_client.c
5290
WARN_ON(atomic_read(&osdc->num_requests));
net/ceph/osd_client.c
5291
WARN_ON(atomic_read(&osdc->num_homeless));
net/ceph/osdmap.c
1059
if (atomic_read(&wsm->total_ws) > cpus) {
net/ceph/osdmap.c
1064
if (atomic_read(&wsm->total_ws) > cpus && !wsm->free_ws)
net/ceph/osdmap.c
1082
WARN_ON(atomic_read(&wsm->total_ws) < 1);
net/core/bpf_sk_storage.c
291
atomic_read(&sk->sk_omem_alloc) + size < optmem_max) {
net/core/dev.c
2386
int wanted = atomic_read(&netstamp_wanted);
net/core/dev.c
2403
int wanted = atomic_read(&netstamp_wanted);
net/core/filter.c
1248
atomic_read(&sk->sk_omem_alloc) + filter_size < optmem_max) {
net/core/neighbour.c
1001
if (atomic_read(&tbl->entries) < READ_ONCE(tbl->gc_thresh1))
net/core/neighbour.c
1166
atomic_read(&neigh->probes) >= neigh_max_probes(neigh)) {
net/core/neighbour.c
1901
if (atomic_read(&tbl->entries))
net/core/neighbour.c
2278
.ndtc_entries = atomic_read(&tbl->entries),
net/core/neighbour.c
255
int max_clean = atomic_read(&tbl->gc_entries) -
net/core/neighbour.c
2692
if (nla_put_u32(skb, NDA_PROBES, atomic_read(&neigh->probes)) ||
net/core/neighbour.c
3534
atomic_read(&tbl->entries),
net/core/neighbour.c
693
if (atomic_read(&tbl->entries) > (1 << nht->hash_shift))
net/core/net-sysfs.c
466
atomic_read(&netdev->carrier_up_count) +
net/core/net-sysfs.c
467
atomic_read(&netdev->carrier_down_count));
net/core/net-sysfs.c
477
return sysfs_emit(buf, fmt_dec, atomic_read(&netdev->carrier_up_count));
net/core/net-sysfs.c
487
return sysfs_emit(buf, fmt_dec, atomic_read(&netdev->carrier_down_count));
net/core/page_pool.c
686
u32 release_cnt = atomic_read(&pool->pages_state_release_cnt);
net/core/rtnetlink.c
2119
atomic_read(&dev->carrier_up_count) +
net/core/rtnetlink.c
2120
atomic_read(&dev->carrier_down_count)) ||
net/core/rtnetlink.c
2122
atomic_read(&dev->carrier_up_count)) ||
net/core/rtnetlink.c
2124
atomic_read(&dev->carrier_down_count)) ||
net/core/rtnetlink.c
664
if (!atomic_read(&dev_unreg_count))
net/core/skbuff.c
1793
next = (u32)atomic_read(&sk->sk_zckey);
net/core/skbuff.c
5470
if (atomic_read(&sk->sk_rmem_alloc) + skb->truesize >=
net/core/skbuff.c
5579
serr->ee.ee_data -= atomic_read(&sk->sk_tskey);
net/core/skmsg.c
535
if (atomic_read(&sk->sk_rmem_alloc) > sk->sk_rcvbuf)
net/core/sock.c
2365
if (atomic_read(&sk->sk_omem_alloc))
net/core/sock.c
2367
__func__, atomic_read(&sk->sk_omem_alloc));
net/core/sock.c
2852
if (atomic_read(&sk->sk_omem_alloc) + SKB_TRUESIZE(size) >
net/core/sock.c
2874
atomic_read(&sk->sk_omem_alloc) + size < optmem_max) {
net/core/sock.c
3330
if (atomic_read(&sk->sk_rmem_alloc) < sk_get_rmem0(sk, prot))
net/core/sock.c
3361
atomic_read(&sk->sk_rmem_alloc) +
net/core/sock.c
3461
atomic_read(&sk->sk_rmem_alloc));
net/core/sock.c
4022
mem[SK_MEMINFO_OPTMEM] = atomic_read(&sk->sk_omem_alloc);
net/core/sock.c
493
if (atomic_read(&sk->sk_rmem_alloc) >= READ_ONCE(sk->sk_rcvbuf)) {
net/core/sock_map.c
1433
usage += atomic_read(&htab->count) * (u64)htab->elem_size;
net/ipv4/af_inet.c
160
WARN_ON_ONCE(atomic_read(&sk->sk_rmem_alloc));
net/ipv4/arp.c
340
int probes = atomic_read(&neigh->probes);
net/ipv4/devinet.c
1917
u32 res = atomic_read(&net->ipv4.dev_addr_genid) +
net/ipv4/fib_semantics.c
1300
WRITE_ONCE(nh->nh_saddr_genid, atomic_read(&net->ipv4.dev_addr_genid));
net/ipv4/fib_semantics.c
1317
atomic_read(&net->ipv4.dev_addr_genid))
net/ipv4/fib_semantics.c
2187
nh_upper_bound = atomic_read(&nexthop_nh->fib_nh_upper_bound);
net/ipv4/icmp.c
244
if (atomic_read(&net->ipv4.icmp_global_credit) > 0)
net/ipv4/icmp.c
259
old = atomic_read(&net->ipv4.icmp_global_credit);
net/ipv4/ipmr.c
1170
if (atomic_read(&mrt->cache_resolve_queue_len) == 1)
net/ipv4/ipmr.c
1333
if (atomic_read(&mrt->cache_resolve_queue_len) != 0) {
net/ipv4/ipmr.c
2899
u32 queue_len = atomic_read(&mrt->cache_resolve_queue_len);
net/ipv4/nexthop.c
1432
if (hash > atomic_read(&nhge->hthr.upper_bound))
net/ipv4/nexthop.c
1463
if (hash > atomic_read(&nhge->hthr.upper_bound))
net/ipv4/proc.c
73
atomic_read(&net->ipv4.fqdir->rhashtable.nelems),
net/ipv4/raw.c
179
if (atomic_read(&sk->sk_rmem_alloc) >=
net/ipv4/tcp.c
1586
!atomic_read(&sk->sk_rmem_alloc)))
net/ipv4/tcp_fastopen.c
616
tfo_da_times = atomic_read(&sock_net(sk)->ipv4.tfo_active_disable_times);
net/ipv4/tcp_fastopen.c
664
atomic_read(&sock_net(sk)->ipv4.tfo_active_disable_times)) {
net/ipv4/tcp_input.c
5379
unsigned int rmem = atomic_read(&sk->sk_rmem_alloc);
net/ipv4/tcp_input.c
6017
if (!atomic_read(&sk->sk_rmem_alloc))
net/ipv4/tcp_input.c
793
min(atomic_read(&sk->sk_rmem_alloc), rmem2));
net/ipv4/tcp_input.c
795
if (atomic_read(&sk->sk_rmem_alloc) > sk->sk_rcvbuf)
net/ipv4/udp.c
1713
rmem = atomic_read(&sk->sk_rmem_alloc);
net/ipv4/udp.c
1719
rmem += atomic_read(&udp_prod_queue->rmem_alloc);
net/ipv6/addrconf.c
564
atomic_read(&devconf->mc_forwarding)) < 0)
net/ipv6/addrconf.c
5686
array[DEVCONF_MC_FORWARDING] = atomic_read(&cnf->mc_forwarding);
net/ipv6/addrconf.c
723
u32 res = atomic_read(&net->ipv6.dev_addr_genid) +
net/ipv6/ip6_fib.c
94
int new, old = atomic_read(&net->ipv6.fib6_sernum);
net/ipv6/ip6_flowlabel.c
158
if (atomic_read(&fl->users) == 0) {
net/ipv6/ip6_flowlabel.c
175
if (!sched && atomic_read(&fl_size))
net/ipv6/ip6_flowlabel.c
196
atomic_read(&fl->users) == 0) {
net/ipv6/ip6_flowlabel.c
467
int room = FL_MAX_SIZE - atomic_read(&fl_size);
net/ipv6/ip6_flowlabel.c
846
atomic_read(&fl->users),
net/ipv6/ip6_input.c
536
if (atomic_read(&dev_net_rcu(skb->dev)->ipv6.devconf_all->mc_forwarding) &&
net/ipv6/ip6mr.c
1571
if (atomic_read(&mrt->cache_resolve_queue_len) != 0) {
net/ipv6/ip6mr.c
1622
if (!devconf || !atomic_read(&devconf->mc_forwarding))
net/ipv6/ndisc.c
744
int probes = atomic_read(&neigh->probes);
net/ipv6/proc.c
47
atomic_read(&net->ipv6.fqdir->rhashtable.nelems),
net/ipv6/raw.c
163
if (atomic_read(&sk->sk_rmem_alloc) >=
net/ipv6/route.c
3380
val = atomic_read(&net->ipv6.ip6_rt_gc_expire);
net/ipv6/route.c
472
if (hash <= atomic_read(&first->fib6_nh->fib_nh_upper_bound)) {
net/ipv6/route.c
484
nh_upper_bound = atomic_read(&nh->fib_nh_upper_bound);
net/ipv6/route.c
6506
atomic_read(&net->ipv6.rt6_stats->fib_rt_alloc),
net/iucv/af_iucv.c
1294
if (atomic_read(&iucv->msg_recv) > iucv->msglimit) {
net/iucv/af_iucv.c
1317
if (atomic_read(&iucv->msg_recv) >=
net/iucv/af_iucv.c
170
return (atomic_read(&iucv->skbs_in_xmit) < iucv->path->msglim);
net/iucv/af_iucv.c
1709
len = atomic_read(&sk->sk_rmem_alloc);
net/iucv/af_iucv.c
172
return ((atomic_read(&iucv->msg_sent) < iucv->msglimit_peer) &&
net/iucv/af_iucv.c
173
(atomic_read(&iucv->pendings) <= 0));
net/iucv/af_iucv.c
1770
if (atomic_read(&iucv->skbs_in_xmit) == 0) {
net/iucv/af_iucv.c
211
confirm_recv = atomic_read(&iucv->msg_recv);
net/iucv/af_iucv.c
2169
if (atomic_read(&iucv->skbs_in_xmit) == 0) {
net/iucv/af_iucv.c
256
WARN_ON(atomic_read(&iucv->msg_recv) < 0);
net/iucv/af_iucv.c
286
WARN_ON(atomic_read(&sk->sk_rmem_alloc));
net/iucv/af_iucv.c
403
if (!err && atomic_read(&iucv->skbs_in_xmit) > 0) {
net/kcm/kcmproc.c
150
atomic_read(&psock->sk->sk_rmem_alloc),
net/kcm/kcmsock.c
196
if (atomic_read(&sk->sk_rmem_alloc) >= sk->sk_rcvbuf)
net/key/af_key.c
107
WARN_ON(atomic_read(&sk->sk_rmem_alloc));
net/key/af_key.c
196
if (atomic_read(&sk->sk_rmem_alloc) > sk->sk_rcvbuf)
net/key/af_key.c
3095
if (atomic_read(&net_pfkey->socks_nr) == 0)
net/key/af_key.c
3763
3 * atomic_read(&sk->sk_rmem_alloc) <= sk->sk_rcvbuf)
net/key/af_key.c
76
if (3 * atomic_read(&sk->sk_rmem_alloc) <= 2 * sk->sk_rcvbuf)
net/llc/llc_conn.c
941
__func__, atomic_read(&llc_sock_nr));
net/llc/llc_conn.c
993
atomic_read(&llc_sock_nr));
net/llc/llc_conn.c
997
__func__, atomic_read(&llc_sock_nr));
net/mac80211/debugfs.c
214
atomic_read(&local->aql_ac_pending_airtime[IEEE80211_AC_VO]),
net/mac80211/debugfs.c
215
atomic_read(&local->aql_ac_pending_airtime[IEEE80211_AC_VI]),
net/mac80211/debugfs.c
216
atomic_read(&local->aql_ac_pending_airtime[IEEE80211_AC_BE]),
net/mac80211/debugfs.c
217
atomic_read(&local->aql_ac_pending_airtime[IEEE80211_AC_BK]),
net/mac80211/debugfs.c
218
atomic_read(&local->aql_total_pending_airtime));
net/mac80211/debugfs_netdev.c
204
return scnprintf(buf, buflen, "%d\n", atomic_read(&data->field));\
net/mac80211/debugfs_sta.c
261
q_depth[ac] = atomic_read(&sta->airtime[ac].aql_tx_pending);
net/mac80211/ieee80211_i.h
1914
return atomic_read(&sdata->u.ap.num_mcast_sta);
net/mac80211/ieee80211_i.h
1916
return atomic_read(&sdata->u.vlan.num_mcast_sta);
net/mac80211/led.c
14
if (!atomic_read(&local->assoc_led_active))
net/mac80211/led.c
24
if (!atomic_read(&local->radio_led_active))
net/mac80211/led.h
16
if (!atomic_read(&local->rx_led_active))
net/mac80211/led.h
25
if (!atomic_read(&local->tx_led_active))
net/mac80211/led.h
74
if (atomic_read(&local->tpt_led_active))
net/mac80211/led.h
83
if (atomic_read(&local->tpt_led_active))
net/mac80211/main.c
43
if (atomic_read(&local->iff_allmultis))
net/mac80211/mesh.c
297
neighbors = atomic_read(&ifmsh->estab_plinks);
net/mac80211/mesh.h
399
atomic_read(&sdata->u.mesh.estab_plinks);
net/mac80211/mesh_pathtbl.c
486
if (atomic_read(&cache->rht.nelems) >= MESH_FAST_TX_CACHE_MAX_SIZE)
net/mac80211/mesh_pathtbl.c
614
if (atomic_read(&cache->rht.nelems) < MESH_FAST_TX_CACHE_THRESHOLD_SIZE)
net/mac80211/tx.c
4024
return air_info->deficit - atomic_read(&air_info->aql_tx_pending);
net/mac80211/tx.c
4175
if (atomic_read(&sta->airtime[txq->ac].aql_tx_pending) <
net/mac80211/tx.c
4179
if (atomic_read(&local->aql_total_pending_airtime) <
net/mac80211/tx.c
4181
atomic_read(&sta->airtime[txq->ac].aql_tx_pending) <
net/mac80211/tx.c
420
if (!atomic_read(&ps->num_sta_ps) && skb_queue_empty(&ps->bc_buf))
net/mac80211/tx.c
4205
return atomic_read(&local->aql_ac_pending_airtime[ac]) < aql_limit;
net/mac80211/tx.c
5012
if (atomic_read(&ps->num_sta_ps) > 0)
net/mac80211/util.c
372
(ps && atomic_read(&ps->num_sta_ps)) || ac != vif->txq->ac)
net/mac802154/tx.c
117
wait_event(local->phy->sync_txq, !atomic_read(&local->phy->ongoing_txs));
net/mptcp/ctrl.c
481
disable_times = atomic_read(&pernet->active_disable_times);
net/mptcp/ctrl.c
503
if (atomic_read(&pernet->active_disable_times)) {
net/mptcp/protocol.c
4171
atomic_read(&sk->sk_rmem_alloc)) -
net/mptcp/protocol.c
4173
atomic_read(&sk->sk_rmem_alloc));
net/netfilter/ipset/ip_set_hash_gen.h
1024
if (atomic_dec_and_test(&t->uref) && atomic_read(&t->ref)) {
net/netfilter/ipset/ip_set_hash_gen.h
1085
if (atomic_read(&t->ref) && ext->target) {
net/netfilter/ipset/ip_set_hash_gen.h
1136
if (atomic_dec_and_test(&t->uref) && atomic_read(&t->ref)) {
net/netfilter/ipset/ip_set_hash_gen.h
1341
if (atomic_dec_and_test(&t->uref) && atomic_read(&t->ref)) {
net/netfilter/ipset/ip_set_hash_gen.h
585
if (atomic_dec_and_test(&t->uref) && atomic_read(&t->ref)) {
net/netfilter/ipset/ip_set_hash_gen.h
997
if (atomic_read(&t->ref) && ext->target) {
net/netfilter/ipvs/ip_vs_app.c
584
atomic_read(&inc->usecnt),
net/netfilter/ipvs/ip_vs_conn.c
1023
if (unlikely(pd && atomic_read(&pd->appcnt)))
net/netfilter/ipvs/ip_vs_conn.c
1285
i = atomic_read(&cp->in_pkts);
net/netfilter/ipvs/ip_vs_conn.c
1321
if (atomic_read(&cp->n_control))
net/netfilter/ipvs/ip_vs_conn.c
1388
if (atomic_read(&cp->n_control))
net/netfilter/ipvs/ip_vs_conn.c
1393
if (cp_c && !atomic_read(&cp_c->n_control)) {
net/netfilter/ipvs/ip_vs_conn.c
1404
if (atomic_read(&ipvs->conn_count) != 0) {
net/netfilter/ipvs/ip_vs_conn.c
1427
if (atomic_read(&cp->n_control))
net/netfilter/ipvs/ip_vs_conn.c
1433
if (cp_c && !atomic_read(&cp_c->n_control)) {
net/netfilter/ipvs/ip_vs_conn.c
297
if (!cp && atomic_read(&ip_vs_conn_no_cport_cnt)) {
net/netfilter/ipvs/ip_vs_conn.c
574
return atomic_read(&dest->activeconns)
net/netfilter/ipvs/ip_vs_conn.c
575
+ atomic_read(&dest->inactconns);
net/netfilter/ipvs/ip_vs_conn.c
595
conn_flags = atomic_read(&dest->conn_flags);
net/netfilter/ipvs/ip_vs_conn.c
692
if (pd && atomic_read(&pd->appcnt))
net/netfilter/ipvs/ip_vs_conn.c
755
(atomic_read(&dest->weight) == 0);
net/netfilter/ipvs/ip_vs_conn.c
854
if (atomic_read(&cp->n_control))
net/netfilter/ipvs/ip_vs_conn.c
870
if (has_ref && !atomic_read(&ct->n_control) &&
net/netfilter/ipvs/ip_vs_conn.c
907
atomic_read(&cp->n_control));
net/netfilter/ipvs/ip_vs_core.c
1403
if (atomic_read(&ipvs->conn_out_counter)) {
net/netfilter/ipvs/ip_vs_core.c
1998
unlikely(!atomic_read(&cp->dest->weight))) {
net/netfilter/ipvs/ip_vs_core.c
2004
if (!atomic_read(&cp->n_control)) {
net/netfilter/ipvs/ip_vs_core.c
2018
if (!atomic_read(&cp->n_control))
net/netfilter/ipvs/ip_vs_core.c
2297
ipvs->gen = atomic_read(&ipvs_netns_cnt);
net/netfilter/ipvs/ip_vs_ctl.c
235
if (atomic_read(&ipvs->dropentry))
net/netfilter/ipvs/ip_vs_ctl.c
2453
ip_vs_fwd_name(atomic_read(&dest->conn_flags)),
net/netfilter/ipvs/ip_vs_ctl.c
2454
atomic_read(&dest->weight),
net/netfilter/ipvs/ip_vs_ctl.c
2455
atomic_read(&dest->activeconns),
net/netfilter/ipvs/ip_vs_ctl.c
2456
atomic_read(&dest->inactconns));
net/netfilter/ipvs/ip_vs_ctl.c
246
int genid_done = atomic_read(&ipvs->est_genid_done);
net/netfilter/ipvs/ip_vs_ctl.c
2464
ip_vs_fwd_name(atomic_read(&dest->conn_flags)),
net/netfilter/ipvs/ip_vs_ctl.c
2465
atomic_read(&dest->weight),
net/netfilter/ipvs/ip_vs_ctl.c
2466
atomic_read(&dest->activeconns),
net/netfilter/ipvs/ip_vs_ctl.c
2467
atomic_read(&dest->inactconns));
net/netfilter/ipvs/ip_vs_ctl.c
253
genid = atomic_read(&ipvs->est_genid);
net/netfilter/ipvs/ip_vs_ctl.c
2948
entry.conn_flags = atomic_read(&dest->conn_flags);
net/netfilter/ipvs/ip_vs_ctl.c
2949
entry.weight = atomic_read(&dest->weight);
net/netfilter/ipvs/ip_vs_ctl.c
2952
entry.activeconns = atomic_read(&dest->activeconns);
net/netfilter/ipvs/ip_vs_ctl.c
2953
entry.inactconns = atomic_read(&dest->inactconns);
net/netfilter/ipvs/ip_vs_ctl.c
2954
entry.persistconns = atomic_read(&dest->persistconns);
net/netfilter/ipvs/ip_vs_ctl.c
3542
(atomic_read(&dest->conn_flags) &
net/netfilter/ipvs/ip_vs_ctl.c
3545
atomic_read(&dest->weight)) ||
net/netfilter/ipvs/ip_vs_ctl.c
3555
atomic_read(&dest->activeconns)) ||
net/netfilter/ipvs/ip_vs_ctl.c
3557
atomic_read(&dest->inactconns)) ||
net/netfilter/ipvs/ip_vs_ctl.c
3559
atomic_read(&dest->persistconns)) ||
net/netfilter/ipvs/ip_vs_ctl.c
477
atomic_read(&ipvs->ftpsvc_counter) &&
net/netfilter/ipvs/ip_vs_ctl.c
487
&& atomic_read(&ipvs->nullsvc_counter)) {
net/netfilter/ipvs/ip_vs_dh.c
224
|| atomic_read(&dest->weight) <= 0
net/netfilter/ipvs/ip_vs_est.c
738
int genid = atomic_read(&ipvs->est_genid);
net/netfilter/ipvs/ip_vs_est.c
889
if (genid == atomic_read(&ipvs->est_genid))
net/netfilter/ipvs/ip_vs_fo.c
33
atomic_read(&dest->weight) > hw) {
net/netfilter/ipvs/ip_vs_fo.c
35
hw = atomic_read(&dest->weight);
net/netfilter/ipvs/ip_vs_fo.c
43
atomic_read(&hweight->activeconns),
net/netfilter/ipvs/ip_vs_fo.c
44
atomic_read(&hweight->weight));
net/netfilter/ipvs/ip_vs_lblc.c
310
if (atomic_read(&tbl->entries) <= tbl->max_size) {
net/netfilter/ipvs/ip_vs_lblc.c
315
goal = (atomic_read(&tbl->entries) - tbl->max_size)*4/3;
net/netfilter/ipvs/ip_vs_lblc.c
419
if (atomic_read(&dest->weight) > 0) {
net/netfilter/ipvs/ip_vs_lblc.c
436
if ((__s64)loh * atomic_read(&dest->weight) >
net/netfilter/ipvs/ip_vs_lblc.c
437
(__s64)doh * atomic_read(&least->weight)) {
net/netfilter/ipvs/ip_vs_lblc.c
447
atomic_read(&least->activeconns),
net/netfilter/ipvs/ip_vs_lblc.c
449
atomic_read(&least->weight), loh);
net/netfilter/ipvs/ip_vs_lblc.c
462
if (atomic_read(&dest->activeconns) > atomic_read(&dest->weight)) {
net/netfilter/ipvs/ip_vs_lblc.c
466
if (atomic_read(&d->activeconns)*2
net/netfilter/ipvs/ip_vs_lblc.c
467
< atomic_read(&d->weight)) {
net/netfilter/ipvs/ip_vs_lblc.c
506
atomic_read(&dest->weight) > 0 && !is_overloaded(dest, svc))
net/netfilter/ipvs/ip_vs_lblcr.c
172
if ((atomic_read(&least->weight) > 0)
net/netfilter/ipvs/ip_vs_lblcr.c
188
if (((__s64)loh * atomic_read(&dest->weight) >
net/netfilter/ipvs/ip_vs_lblcr.c
189
(__s64)doh * atomic_read(&least->weight))
net/netfilter/ipvs/ip_vs_lblcr.c
201
atomic_read(&least->activeconns),
net/netfilter/ipvs/ip_vs_lblcr.c
203
atomic_read(&least->weight), loh);
net/netfilter/ipvs/ip_vs_lblcr.c
221
if (atomic_read(&most->weight) > 0) {
net/netfilter/ipvs/ip_vs_lblcr.c
234
if (((__s64)moh * atomic_read(&dest->weight) <
net/netfilter/ipvs/ip_vs_lblcr.c
235
(__s64)doh * atomic_read(&most->weight))
net/netfilter/ipvs/ip_vs_lblcr.c
236
&& (atomic_read(&dest->weight) > 0)) {
net/netfilter/ipvs/ip_vs_lblcr.c
246
atomic_read(&most->activeconns),
net/netfilter/ipvs/ip_vs_lblcr.c
248
atomic_read(&most->weight), moh);
net/netfilter/ipvs/ip_vs_lblcr.c
474
if (atomic_read(&tbl->entries) <= tbl->max_size) {
net/netfilter/ipvs/ip_vs_lblcr.c
479
goal = (atomic_read(&tbl->entries) - tbl->max_size)*4/3;
net/netfilter/ipvs/ip_vs_lblcr.c
583
if (atomic_read(&dest->weight) > 0) {
net/netfilter/ipvs/ip_vs_lblcr.c
600
if ((__s64)loh * atomic_read(&dest->weight) >
net/netfilter/ipvs/ip_vs_lblcr.c
601
(__s64)doh * atomic_read(&least->weight)) {
net/netfilter/ipvs/ip_vs_lblcr.c
611
atomic_read(&least->activeconns),
net/netfilter/ipvs/ip_vs_lblcr.c
613
atomic_read(&least->weight), loh);
net/netfilter/ipvs/ip_vs_lblcr.c
626
if (atomic_read(&dest->activeconns) > atomic_read(&dest->weight)) {
net/netfilter/ipvs/ip_vs_lblcr.c
630
if (atomic_read(&d->activeconns)*2
net/netfilter/ipvs/ip_vs_lblcr.c
631
< atomic_read(&d->weight)) {
net/netfilter/ipvs/ip_vs_lblcr.c
662
if (atomic_read(&en->set.size) > 1 &&
net/netfilter/ipvs/ip_vs_lblcr.c
666
if (atomic_read(&en->set.size) > 1) {
net/netfilter/ipvs/ip_vs_lc.c
42
atomic_read(&dest->weight) == 0)
net/netfilter/ipvs/ip_vs_lc.c
58
atomic_read(&least->activeconns),
net/netfilter/ipvs/ip_vs_lc.c
59
atomic_read(&least->inactconns));
net/netfilter/ipvs/ip_vs_mh.c
150
lw = atomic_read(&dest->last_weight);
net/netfilter/ipvs/ip_vs_mh.c
327
weight = atomic_read(&dest->last_weight);
net/netfilter/ipvs/ip_vs_mh.c
355
new_weight = atomic_read(&dest->last_weight);
net/netfilter/ipvs/ip_vs_mh.c
82
return atomic_read(&dest->weight) <= 0 ||
net/netfilter/ipvs/ip_vs_nq.c
106
atomic_read(&least->activeconns),
net/netfilter/ipvs/ip_vs_nq.c
108
atomic_read(&least->weight), loh);
net/netfilter/ipvs/ip_vs_nq.c
44
return atomic_read(&dest->activeconns) + 1;
net/netfilter/ipvs/ip_vs_nq.c
76
!atomic_read(&dest->weight))
net/netfilter/ipvs/ip_vs_nq.c
82
if (atomic_read(&dest->activeconns) == 0) {
net/netfilter/ipvs/ip_vs_nq.c
89
((__s64)loh * atomic_read(&dest->weight) >
net/netfilter/ipvs/ip_vs_nq.c
90
(__s64)doh * atomic_read(&least->weight))) {
net/netfilter/ipvs/ip_vs_ovf.c
35
w = atomic_read(&dest->weight);
net/netfilter/ipvs/ip_vs_ovf.c
37
atomic_read(&dest->activeconns) > w ||
net/netfilter/ipvs/ip_vs_ovf.c
50
atomic_read(&h->activeconns),
net/netfilter/ipvs/ip_vs_ovf.c
51
atomic_read(&h->weight));
net/netfilter/ipvs/ip_vs_rr.c
70
atomic_read(&dest->weight) > 0)
net/netfilter/ipvs/ip_vs_rr.c
93
atomic_read(&dest->activeconns),
net/netfilter/ipvs/ip_vs_rr.c
94
refcount_read(&dest->refcnt), atomic_read(&dest->weight));
net/netfilter/ipvs/ip_vs_sed.c
107
atomic_read(&least->activeconns),
net/netfilter/ipvs/ip_vs_sed.c
109
atomic_read(&least->weight), loh);
net/netfilter/ipvs/ip_vs_sed.c
48
return atomic_read(&dest->activeconns) + 1;
net/netfilter/ipvs/ip_vs_sed.c
79
atomic_read(&dest->weight) > 0) {
net/netfilter/ipvs/ip_vs_sed.c
96
if ((__s64)loh * atomic_read(&dest->weight) >
net/netfilter/ipvs/ip_vs_sed.c
97
(__s64)doh * atomic_read(&least->weight)) {
net/netfilter/ipvs/ip_vs_sh.c
191
atomic_read(&dest->weight));
net/netfilter/ipvs/ip_vs_sh.c
194
if (++d_count >= atomic_read(&dest->weight)) {
net/netfilter/ipvs/ip_vs_sh.c
75
return atomic_read(&dest->weight) <= 0 ||
net/netfilter/ipvs/ip_vs_twos.c
110
ntohs(choice1->port), atomic_read(&choice1->activeconns),
net/netfilter/ipvs/ip_vs_twos.c
112
atomic_read(&choice1->weight));
net/netfilter/ipvs/ip_vs_twos.c
56
weight = atomic_read(&dest->weight);
net/netfilter/ipvs/ip_vs_twos.c
81
weight = atomic_read(&dest->weight);
net/netfilter/ipvs/ip_vs_wlc.c
51
atomic_read(&dest->weight) > 0) {
net/netfilter/ipvs/ip_vs_wlc.c
68
if ((__s64)loh * atomic_read(&dest->weight) >
net/netfilter/ipvs/ip_vs_wlc.c
69
(__s64)doh * atomic_read(&least->weight)) {
net/netfilter/ipvs/ip_vs_wlc.c
79
atomic_read(&least->activeconns),
net/netfilter/ipvs/ip_vs_wlc.c
81
atomic_read(&least->weight), loh);
net/netfilter/ipvs/ip_vs_wrr.c
180
atomic_read(&dest->weight) >= mark->cw)
net/netfilter/ipvs/ip_vs_wrr.c
214
atomic_read(&dest->activeconns),
net/netfilter/ipvs/ip_vs_wrr.c
216
atomic_read(&dest->weight));
net/netfilter/ipvs/ip_vs_wrr.c
75
weight = atomic_read(&dest->weight);
net/netfilter/ipvs/ip_vs_wrr.c
96
new_weight = atomic_read(&dest->weight);
net/netfilter/nf_conntrack_core.c
1590
if (atomic_read(&cnet->count) < nf_conntrack_max95)
net/netfilter/nf_conntrack_core.c
2389
if (atomic_read(&cnet->count) == 0)
net/netfilter/nf_conntrack_core.c
2417
if (atomic_read(&cnet->count) == 0)
net/netfilter/nf_conntrack_core.c
2507
if (atomic_read(&cnet->count) != 0)
net/netfilter/nf_conntrack_core.c
848
return !ext || ext->gen_id == atomic_read(&nf_conntrack_ext_genid);
net/netfilter/nf_conntrack_core.c
856
if (ext->gen_id != atomic_read(&nf_conntrack_ext_genid))
net/netfilter/nf_conntrack_extend.c
123
new->gen_id = atomic_read(&nf_conntrack_ext_genid);
net/netfilter/nf_conntrack_extend.c
138
unsigned int gen_id = atomic_read(&nf_conntrack_ext_genid);
net/netfilter/nf_conntrack_standalone.c
531
return atomic_read(&cnet->count);
net/netfilter/nf_nat_masquerade.c
108
if (atomic_read(&masq_worker_count) > MAX_MASQ_WORKER_COUNT)
net/netfilter/nf_tables_api.c
4882
nelems = nft_set_userspace_size(set->ops, atomic_read(&set->nelems));
net/netfilter/nf_tables_api.c
5719
atomic_read(&set->nelems) > 0)) {
net/netfilter/nft_numgen.c
29
oval = atomic_read(priv->counter);
net/netfilter/nft_set_rbtree.c
580
u32 nelems = atomic_read(&set->nelems) - set->ndeact;
net/netfilter/xt_statistic.c
42
oval = atomic_read(&info->master->count);
net/netlabel/netlabel_kapi.c
960
return (atomic_read(&netlabel_mgmt_protocount) > 0);
net/netlink/af_netlink.c
1237
rmem = atomic_read(&sk->sk_rmem_alloc);
net/netlink/af_netlink.c
1975
atomic_read(&sk->sk_rmem_alloc) <= sk->sk_rcvbuf / 2) {
net/netlink/af_netlink.c
402
WARN_ON(atomic_read(&sk->sk_rmem_alloc));
net/netlink/af_netlink.c
420
if (atomic_read(&nl_table_users)) {
net/netlink/af_netlink.c
426
if (atomic_read(&nl_table_users) == 0)
net/netlink/genetlink.c
870
atomic_read(&genl_sk_destructing_cnt) == 0);
net/netrom/nr_timer.c
136
if (atomic_read(&sk->sk_rmem_alloc) < (sk->sk_rcvbuf / 2) &&
net/nfc/llcp_core.c
503
pr_debug("%d clients\n", atomic_read(client_cnt));
net/nfc/nci/core.c
1025
if (atomic_read(&ndev->state) == NCI_LISTEN_ACTIVE ||
net/nfc/nci/core.c
1026
atomic_read(&ndev->state) == NCI_DISCOVERY) {
net/nfc/nci/core.c
1530
pr_debug("credits_cnt %d\n", atomic_read(&conn_info->credits_cnt));
net/nfc/nci/core.c
1533
while (atomic_read(&conn_info->credits_cnt)) {
net/nfc/nci/core.c
1540
if (atomic_read(&conn_info->credits_cnt) !=
net/nfc/nci/core.c
1616
pr_debug("cmd_cnt %d\n", atomic_read(&ndev->cmd_cnt));
net/nfc/nci/core.c
1619
if (atomic_read(&ndev->cmd_cnt)) {
net/nfc/nci/core.c
835
if ((atomic_read(&ndev->state) == NCI_DISCOVERY) ||
net/nfc/nci/core.c
836
(atomic_read(&ndev->state) == NCI_W4_ALL_DISCOVERIES)) {
net/nfc/nci/core.c
846
if ((atomic_read(&ndev->state) == NCI_W4_HOST_SELECT) ||
net/nfc/nci/core.c
847
(atomic_read(&ndev->state) == NCI_POLL_ACTIVE)) {
net/nfc/nci/core.c
886
if ((atomic_read(&ndev->state) != NCI_DISCOVERY) &&
net/nfc/nci/core.c
887
(atomic_read(&ndev->state) != NCI_W4_ALL_DISCOVERIES)) {
net/nfc/nci/core.c
908
if ((atomic_read(&ndev->state) != NCI_W4_HOST_SELECT) &&
net/nfc/nci/core.c
909
(atomic_read(&ndev->state) != NCI_POLL_ACTIVE)) {
net/nfc/nci/core.c
942
if (atomic_read(&ndev->state) == NCI_W4_HOST_SELECT) {
net/nfc/nci/core.c
990
if (atomic_read(&ndev->state) == NCI_POLL_ACTIVE) {
net/nfc/nci/ntf.c
112
if (atomic_read(&ndev->state) == NCI_W4_HOST_SELECT) {
net/nfc/nci/ntf.c
860
if (atomic_read(&ndev->state) == NCI_DISCOVERY) {
net/nfc/nci/rsp.c
237
(atomic_read(&ndev->state) != NCI_POLL_ACTIVE)) {
net/packet/af_packet.c
1277
int avail = rcvbuf - atomic_read(&sk->sk_rmem_alloc)
net/packet/af_packet.c
1328
WARN_ON(atomic_read(&sk->sk_rmem_alloc));
net/packet/af_packet.c
2160
if (atomic_read(&sk->sk_rmem_alloc) >= sk->sk_rcvbuf)
net/packet/af_packet.c
2315
atomic_read(&sk->sk_rmem_alloc) < sk->sk_rcvbuf) {
net/packet/af_packet.c
2378
if (atomic_read(&po->tp_drops))
net/packet/af_packet.c
4732
atomic_read(&s->sk_rmem_alloc),
net/packet/af_packet.c
789
if (atomic_read(&po->tp_drops))
net/phonet/pep.c
1185
done = atomic_read(&pn->tx_credits);
net/phonet/pep.c
1199
done = sk_wait_event(sk, &timeo, atomic_read(&pn->tx_credits), &wait);
net/phonet/pep.c
1221
return atomic_read(&pn->tx_credits);
net/phonet/socket.c
349
atomic_read(&pn->tx_credits))
net/rds/cong.c
284
unsigned long gen = atomic_read(&rds_cong_generation);
net/rds/connection.c
403
atomic_read(&cp->cp_state));
net/rds/connection.c
435
atomic_read(&cp->cp_state));
net/rds/connection.c
791
atomic_read(&cp->cp_state) == RDS_CONN_CONNECTING,
net/rds/connection.c
794
atomic_read(&cp->cp_state) == RDS_CONN_UP,
net/rds/connection.c
816
atomic_read(&cp->cp_state) == RDS_CONN_CONNECTING,
net/rds/connection.c
819
atomic_read(&cp->cp_state) == RDS_CONN_UP,
net/rds/ib.c
324
iinfo->cache_allocs = atomic_read(&ic->i_cache_allocs);
net/rds/ib.c
361
iinfo6->cache_allocs = atomic_read(&ic->i_cache_allocs);
net/rds/ib.c
507
atomic_read(&rds_ib_unloading) != 0);
net/rds/ib_cm.c
1083
(atomic_read(&ic->i_signaled_sends) == 0) &&
net/rds/ib_cm.c
1084
(atomic_read(&ic->i_fastreg_inuse_count) == 0) &&
net/rds/ib_cm.c
1085
(atomic_read(&ic->i_fastreg_wrs) == RDS_IB_DEFAULT_FR_WR));
net/rds/ib_cm.c
222
(atomic_read(&ic->i_credits));
net/rds/ib_cm.c
290
if (atomic_read(&ic->i_cq_quiesce))
net/rds/ib_cm.c
335
if (atomic_read(&ic->i_cq_quiesce))
net/rds/ib_frmr.c
118
if (atomic_read(&pool->free_pinned) >= pool->max_free_pinned ||
net/rds/ib_frmr.c
119
atomic_read(&pool->dirty_count) >= pool->max_items / 5)
net/rds/ib_frmr.c
92
if (atomic_read(&pool->item_count) > pool->max_items_soft)
net/rds/ib_rdma.c
281
item_count = atomic_read(&pool->item_count);
net/rds/ib_rdma.c
505
if (atomic_read(&pool->free_pinned) >= pool->max_free_pinned ||
net/rds/ib_rdma.c
506
atomic_read(&pool->dirty_count) >= pool->max_items / 5)
net/rds/ib_rdma.c
639
WARN_ON(atomic_read(&pool->item_count));
net/rds/ib_rdma.c
640
WARN_ON(atomic_read(&pool->free_pinned));
net/rds/ib_recv.c
1091
WARN_ON(atomic_read(&rds_ib_allocation));
net/rds/ib_ring.c
78
diff = ring->w_alloc_ctr - (u32) atomic_read(&ring->w_free_ctr);
net/rds/ib_send.c
234
BUG_ON(atomic_read(&ic->i_signaled_sends) < 0);
net/rds/ib_send.c
367
oldval = newval = atomic_read(&ic->i_credits);
net/rds/ib_send.c
417
IB_GET_SEND_CREDITS(atomic_read(&ic->i_credits)),
net/rds/ib_send.c
450
if (IB_GET_POST_CREDITS(atomic_read(&ic->i_credits)) >= 16)
net/rds/loop.c
55
return atomic_read(&rds_loop_unloading) != 0;
net/rds/rds.h
844
return atomic_read(&cp->cp_state);
net/rds/rds.h
857
return atomic_read(&cp->cp_state) == RDS_CONN_UP;
net/rds/rds.h
863
return atomic_read(&cp->cp_state) == RDS_CONN_DOWN;
net/rds/rds.h
876
return atomic_read(&cp->cp_state) == RDS_CONN_CONNECTING;
net/rds/tcp.c
429
return atomic_read(&rds_tcp_unloading) != 0;
net/rds/threads.c
80
atomic_read(&cp->cp_state));
net/rfkill/core.c
1124
if (!atomic_read(&rfkill_input_disabled))
net/rfkill/core.c
448
if (atomic_read(&rfkill_input_disabled))
net/rfkill/core.c
473
if (atomic_read(&rfkill_input_disabled))
net/rfkill/core.c
501
if (atomic_read(&rfkill_input_disabled))
net/rfkill/core.c
520
if (atomic_read(&rfkill_input_disabled))
net/rose/rose_in.c
183
if (atomic_read(&sk->sk_rmem_alloc) >
net/rose/rose_timer.c
146
if (atomic_read(&sk->sk_rmem_alloc) < (sk->sk_rcvbuf / 2) &&
net/rxrpc/af_rxrpc.c
1130
ASSERTCMP(atomic_read(&rxrpc_n_rx_skbs), ==, 0);
net/rxrpc/ar-internal.h
1325
u = atomic_read(&local->active_users);
net/rxrpc/call_object.c
752
wait_var_event(&rxnet->nr_calls, !atomic_read(&rxnet->nr_calls));
net/rxrpc/conn_client.c
234
limit = umax(atomic_read(&rxnet->nr_conns) * 4, 1024);
net/rxrpc/conn_client.c
757
nr_conns = atomic_read(&local->rxnet->nr_client_conns);
net/rxrpc/conn_object.c
225
if (atomic_read(&conn->active) >= 0 &&
net/rxrpc/conn_object.c
404
ASSERTCMP(atomic_read(&conn->active), >=, 0);
net/rxrpc/conn_object.c
405
if (likely(atomic_read(&conn->active) > 0))
net/rxrpc/conn_object.c
417
conn->debug_id, atomic_read(&conn->active),
net/rxrpc/conn_object.c
455
ASSERTCMP(atomic_read(&conn->active), ==, -1);
net/rxrpc/conn_object.c
493
wait_var_event(&rxnet->nr_conns, !atomic_read(&rxnet->nr_conns));
net/rxrpc/local_object.c
332
u = atomic_read(&local->active_users);
net/rxrpc/local_object.c
347
u = atomic_read(&local->active_users);
net/rxrpc/local_object.c
367
u = atomic_read(&local->active_users);
net/rxrpc/proc.c
183
atomic_read(&conn->active),
net/rxrpc/proc.c
258
atomic_read(&bundle->active),
net/rxrpc/proc.c
425
atomic_read(&local->active_users),
net/rxrpc/proc.c
481
atomic_read(&rxnet->stat_tx_data_send),
net/rxrpc/proc.c
482
atomic_read(&rxnet->stat_tx_data_send_frag),
net/rxrpc/proc.c
483
atomic_read(&rxnet->stat_tx_data_send_fail),
net/rxrpc/proc.c
484
atomic_read(&rxnet->stat_tx_data_send_msgsize));
net/rxrpc/proc.c
487
atomic_read(&rxnet->stat_tx_data),
net/rxrpc/proc.c
488
atomic_read(&rxnet->stat_tx_data_retrans),
net/rxrpc/proc.c
489
atomic_read(&rxnet->stat_tx_data_underflow),
net/rxrpc/proc.c
490
atomic_read(&rxnet->stat_tx_data_cwnd_reset));
net/rxrpc/proc.c
493
atomic_read(&rxnet->stat_rx_data),
net/rxrpc/proc.c
494
atomic_read(&rxnet->stat_rx_data_reqack),
net/rxrpc/proc.c
495
atomic_read(&rxnet->stat_rx_data_jumbo));
net/rxrpc/proc.c
498
atomic_read(&rxnet->stat_tx_ack_fill),
net/rxrpc/proc.c
499
atomic_read(&rxnet->stat_tx_ack_send),
net/rxrpc/proc.c
500
atomic_read(&rxnet->stat_tx_ack_skip));
net/rxrpc/proc.c
503
atomic_read(&rxnet->stat_tx_acks[RXRPC_ACK_REQUESTED]),
net/rxrpc/proc.c
504
atomic_read(&rxnet->stat_tx_acks[RXRPC_ACK_DUPLICATE]),
net/rxrpc/proc.c
505
atomic_read(&rxnet->stat_tx_acks[RXRPC_ACK_OUT_OF_SEQUENCE]),
net/rxrpc/proc.c
506
atomic_read(&rxnet->stat_tx_acks[RXRPC_ACK_EXCEEDS_WINDOW]),
net/rxrpc/proc.c
507
atomic_read(&rxnet->stat_tx_acks[RXRPC_ACK_NOSPACE]),
net/rxrpc/proc.c
508
atomic_read(&rxnet->stat_tx_acks[RXRPC_ACK_PING]),
net/rxrpc/proc.c
509
atomic_read(&rxnet->stat_tx_acks[RXRPC_ACK_PING_RESPONSE]),
net/rxrpc/proc.c
510
atomic_read(&rxnet->stat_tx_acks[RXRPC_ACK_DELAY]),
net/rxrpc/proc.c
511
atomic_read(&rxnet->stat_tx_acks[RXRPC_ACK_IDLE]));
net/rxrpc/proc.c
514
atomic_read(&rxnet->stat_rx_acks[RXRPC_ACK_REQUESTED]),
net/rxrpc/proc.c
515
atomic_read(&rxnet->stat_rx_acks[RXRPC_ACK_DUPLICATE]),
net/rxrpc/proc.c
516
atomic_read(&rxnet->stat_rx_acks[RXRPC_ACK_OUT_OF_SEQUENCE]),
net/rxrpc/proc.c
517
atomic_read(&rxnet->stat_rx_acks[RXRPC_ACK_EXCEEDS_WINDOW]),
net/rxrpc/proc.c
518
atomic_read(&rxnet->stat_rx_acks[RXRPC_ACK_NOSPACE]),
net/rxrpc/proc.c
519
atomic_read(&rxnet->stat_rx_acks[RXRPC_ACK_PING]),
net/rxrpc/proc.c
520
atomic_read(&rxnet->stat_rx_acks[RXRPC_ACK_PING_RESPONSE]),
net/rxrpc/proc.c
521
atomic_read(&rxnet->stat_rx_acks[RXRPC_ACK_DELAY]),
net/rxrpc/proc.c
522
atomic_read(&rxnet->stat_rx_acks[RXRPC_ACK_IDLE]),
net/rxrpc/proc.c
523
atomic_read(&rxnet->stat_rx_acks[0]));
net/rxrpc/proc.c
526
atomic_read(&rxnet->stat_why_req_ack[rxrpc_reqack_ack_lost]),
net/rxrpc/proc.c
527
atomic_read(&rxnet->stat_why_req_ack[rxrpc_reqack_more_rtt]),
net/rxrpc/proc.c
528
atomic_read(&rxnet->stat_why_req_ack[rxrpc_reqack_old_rtt]),
net/rxrpc/proc.c
529
atomic_read(&rxnet->stat_why_req_ack[rxrpc_reqack_app_stall]),
net/rxrpc/proc.c
530
atomic_read(&rxnet->stat_why_req_ack[rxrpc_reqack_jumbo_win]));
net/rxrpc/proc.c
533
atomic_read(&rxnet->stat_why_req_ack[rxrpc_reqack_no_srv_last]),
net/rxrpc/proc.c
534
atomic_read(&rxnet->stat_why_req_ack[rxrpc_reqack_retrans]),
net/rxrpc/proc.c
535
atomic_read(&rxnet->stat_why_req_ack[rxrpc_reqack_slow_start]),
net/rxrpc/proc.c
536
atomic_read(&rxnet->stat_why_req_ack[rxrpc_reqack_small_txwin]));
net/rxrpc/proc.c
539
atomic_read(&rxnet->stat_tx_jumbo[0]),
net/rxrpc/proc.c
540
atomic_read(&rxnet->stat_tx_jumbo[1]),
net/rxrpc/proc.c
541
atomic_read(&rxnet->stat_tx_jumbo[2]),
net/rxrpc/proc.c
542
atomic_read(&rxnet->stat_tx_jumbo[3]),
net/rxrpc/proc.c
543
atomic_read(&rxnet->stat_tx_jumbo[4]),
net/rxrpc/proc.c
544
atomic_read(&rxnet->stat_tx_jumbo[5]),
net/rxrpc/proc.c
545
atomic_read(&rxnet->stat_tx_jumbo[6]),
net/rxrpc/proc.c
546
atomic_read(&rxnet->stat_tx_jumbo[7]),
net/rxrpc/proc.c
547
atomic_read(&rxnet->stat_tx_jumbo[8]),
net/rxrpc/proc.c
548
atomic_read(&rxnet->stat_tx_jumbo[9]));
net/rxrpc/proc.c
551
atomic_read(&rxnet->stat_rx_jumbo[0]),
net/rxrpc/proc.c
552
atomic_read(&rxnet->stat_rx_jumbo[1]),
net/rxrpc/proc.c
553
atomic_read(&rxnet->stat_rx_jumbo[2]),
net/rxrpc/proc.c
554
atomic_read(&rxnet->stat_rx_jumbo[3]),
net/rxrpc/proc.c
555
atomic_read(&rxnet->stat_rx_jumbo[4]),
net/rxrpc/proc.c
556
atomic_read(&rxnet->stat_rx_jumbo[5]),
net/rxrpc/proc.c
557
atomic_read(&rxnet->stat_rx_jumbo[6]),
net/rxrpc/proc.c
558
atomic_read(&rxnet->stat_rx_jumbo[7]),
net/rxrpc/proc.c
559
atomic_read(&rxnet->stat_rx_jumbo[8]),
net/rxrpc/proc.c
560
atomic_read(&rxnet->stat_rx_jumbo[9]));
net/rxrpc/proc.c
563
atomic_read(&rxrpc_nr_txbuf),
net/rxrpc/proc.c
564
atomic_read(&rxrpc_n_rx_skbs));
net/rxrpc/proc.c
567
atomic_read(&rxnet->stat_io_loop));
net/rxrpc/skbuff.c
34
int n = atomic_read(select_skb_count(skb));
net/sched/act_api.c
1625
qstats.drops = atomic_read(&p->tcfa_drops);
net/sched/act_api.c
1626
qstats.overlimits = atomic_read(&p->tcfa_overlimits);
net/sched/act_api.c
410
if (!bind && strict && atomic_read(&p->tcfa_bindcnt) > 0)
net/sched/act_api.c
618
if (atomic_read(&p->tcfa_bindcnt) > 0)
net/sched/act_api.c
756
if (!atomic_read(&p->tcfa_bindcnt)) {
net/sched/act_bpf.c
144
.bindcnt = atomic_read(&prog->tcf_bindcnt) - bind,
net/sched/act_connmark.c
205
opt.bindcnt = atomic_read(&ci->tcf_bindcnt) - bind;
net/sched/act_csum.c
645
.bindcnt = atomic_read(&p->tcf_bindcnt) - bind,
net/sched/act_ct.c
1539
.bindcnt = atomic_read(&c->tcf_bindcnt) - bind,
net/sched/act_ctinfo.c
292
.bindcnt = atomic_read(&ci->tcf_bindcnt) - bind,
net/sched/act_gact.c
193
.bindcnt = atomic_read(&gact->tcf_bindcnt) - bind,
net/sched/act_gate.c
613
.bindcnt = atomic_read(&gact->tcf_bindcnt) - bind,
net/sched/act_ife.c
650
opt.bindcnt = atomic_read(&ife->tcf_bindcnt) - bind;
net/sched/act_mirred.c
495
.bindcnt = atomic_read(&m->tcf_bindcnt) - bind,
net/sched/act_mpls.c
337
.bindcnt = atomic_read(&m->tcf_bindcnt) - bind,
net/sched/act_nat.c
276
.bindcnt = atomic_read(&p->tcf_bindcnt) - bind,
net/sched/act_pedit.c
526
opt->bindcnt = atomic_read(&p->tcf_bindcnt) - bind;
net/sched/act_police.c
348
.bindcnt = atomic_read(&police->tcf_bindcnt) - bind,
net/sched/act_sample.c
232
.bindcnt = atomic_read(&s->tcf_bindcnt) - bind,
net/sched/act_simple.c
178
.bindcnt = atomic_read(&d->tcf_bindcnt) - bind,
net/sched/act_skbedit.c
296
.bindcnt = atomic_read(&d->tcf_bindcnt) - bind,
net/sched/act_skbmod.c
249
opt.bindcnt = atomic_read(&d->tcf_bindcnt) - bind;
net/sched/act_tunnel_key.c
724
.bindcnt = atomic_read(&t->tcf_bindcnt) - bind,
net/sched/act_vlan.c
294
.bindcnt = atomic_read(&v->tcf_bindcnt) - bind,
net/sched/cls_api.c
820
return atomic_read(&block->offloadcnt);
net/sched/em_meta.c
419
dst->value = atomic_read(&sk->sk_omem_alloc);
net/sched/sch_taprio.c
689
budget = atomic_read(&entry->budget[tc]);
net/sctp/associola.c
1420
atomic_read(&t->mtu_info));
net/sctp/associola.c
1523
rx_count = atomic_read(&asoc->rmem_alloc);
net/sctp/associola.c
1525
rx_count = atomic_read(&asoc->base.sk->sk_rmem_alloc);
net/sctp/associola.c
416
WARN_ON(atomic_read(&asoc->rmem_alloc));
net/sctp/diag.c
173
amt = atomic_read(&asoc->rmem_alloc);
net/sctp/diag.c
181
mem[SK_MEMINFO_OPTMEM] = atomic_read(&sk->sk_omem_alloc);
net/sctp/diag.c
421
r->idiag_rqueue = atomic_read(&infox->asoc->rmem_alloc);
net/sctp/objcnt.c
69
atomic_read(sctp_dbg_objcnt[i].counter));
net/sctp/proc.c
269
atomic_read(&assoc->rmem_alloc),
net/sctp/socket.c
9558
if (atomic_read(&sctp_sk(oldsk)->pd_mode)) {
net/sctp/ulpevent.c
691
rx_count = atomic_read(&asoc->rmem_alloc);
net/sctp/ulpevent.c
693
rx_count = atomic_read(&sk->sk_rmem_alloc);
net/sctp/ulpqueue.c
1052
if (sp->frag_interleave || atomic_read(&sp->pd_mode) == 0) {
net/sctp/ulpqueue.c
212
if (atomic_read(&sp->pd_mode) == 0) {
net/sctp/ulpqueue.c
477
atomic_read(&sctp_sk(asoc->base.sk)->pd_mode))
net/smc/af_smc.c
143
if (READ_ONCE(sk->sk_ack_backlog) + atomic_read(&smc->queued_smc_hs) >
net/smc/af_smc.c
2771
} else if (!atomic_read(&smc_sk(nsk)->conn.bytes_to_rcv)) {
net/smc/af_smc.c
2917
atomic_read(&smc->conn.sndbuf_space)) ||
net/smc/af_smc.c
2927
if (atomic_read(&smc->conn.sndbuf_space))
net/smc/af_smc.c
2931
if (atomic_read(&smc->conn.bytes_to_rcv))
net/smc/af_smc.c
3228
answ = atomic_read(&smc->conn.bytes_to_rcv);
net/smc/af_smc.c
3241
atomic_read(&smc->conn.sndbuf_space);
net/smc/smc_cdc.c
230
wait_event(conn->cdc_pend_tx_wq, !atomic_read(&conn->cdc_pend_tx_wr));
net/smc/smc_cdc.c
405
atomic_read(&conn->peer_rmbe_space) == conn->peer_rmbe_size) {
net/smc/smc_cdc.c
67
WARN_ON(atomic_read(&conn->cdc_pend_tx_wr) < 0);
net/smc/smc_close.c
97
if (atomic_read(&conn->bytes_to_rcv))
net/smc/smc_core.c
1655
if (atomic_read(&smcd->lgr_cnt))
net/smc/smc_core.c
1656
wait_event(smcd->lgrs_deleted, !atomic_read(&smcd->lgr_cnt));
net/smc/smc_core.c
1691
if (atomic_read(&smcibdev->lnk_cnt))
net/smc/smc_core.c
1693
!atomic_read(&smcibdev->lnk_cnt));
net/smc/smc_core.c
1695
if (atomic_read(&lgr_cnt))
net/smc/smc_core.c
1696
wait_event(lgrs_deleted, !atomic_read(&lgr_cnt));
net/smc/smc_core.c
443
atomic_read(&link->conn_cnt)))
net/smc/smc_ib.c
525
lnk_count = atomic_read(&smcibdev->lnk_cnt_by_port[port]);
net/smc/smc_ism.c
322
use_cnt = atomic_read(&smcd->lgr_cnt);
net/smc/smc_rx.c
251
!atomic_read(&conn->splice_pending);
net/smc/smc_rx.c
379
readable = atomic_read(&conn->bytes_to_rcv);
net/smc/smc_rx.c
445
splbytes = atomic_read(&conn->splice_pending);
net/smc/smc_rx.h
28
return atomic_read(&conn->bytes_to_rcv) - peeked;
net/smc/smc_tx.c
112
if (atomic_read(&conn->sndbuf_space) && !conn->urg_tx_pend)
net/smc/smc_tx.c
119
(atomic_read(&conn->sndbuf_space) &&
net/smc/smc_tx.c
153
if (atomic_read(&conn->cdc_pend_tx_wr) == 0 ||
net/smc/smc_tx.c
172
atomic_read(&conn->sndbuf_space))
net/smc/smc_tx.c
224
if (!atomic_read(&conn->sndbuf_space) || conn->urg_tx_pend) {
net/smc/smc_tx.c
235
writespace = atomic_read(&conn->sndbuf_space);
net/smc/smc_tx.c
48
if (atomic_read(&smc->conn.sndbuf_space) && sock) {
net/smc/smc_tx.c
485
rmbespace = atomic_read(&conn->peer_rmbe_space);
net/smc/smc_tx.c
637
if (unlikely(atomic_read(&conn->peer_rmbe_space) <= 0)) {
net/smc/smc_tx.c
675
!atomic_read(&conn->bytes_to_rcv))
net/smc/smc_tx.c
727
!atomic_read(&conn->bytes_to_rcv))
net/sunrpc/auth_gss/gss_krb5_mech.c
419
if (seq_send64 != atomic_read(&ctx->seq_send)) {
net/sunrpc/auth_gss/gss_krb5_mech.c
421
seq_send64, atomic_read(&ctx->seq_send));
net/sunrpc/backchannel_rqst.c
260
if (atomic_read(&xprt->bc_slot_count) >= BC_MAX_SLOTS)
net/sunrpc/cache.c
1209
if (atomic_read(&detail->writers))
net/sunrpc/clnt.c
1342
WARN_ON_ONCE(atomic_read(&task->tk_count) != 2);
net/sunrpc/clnt.c
3342
atomic_read(&clnt->cl_task_count));
net/sunrpc/clnt.c
962
atomic_read(&clnt->cl_task_count) == 0, 1 * HZ);
net/sunrpc/debugfs.c
79
atomic_read(&clnt->cl_task_count));
net/sunrpc/sched.c
1250
if (atomic_read(&task->tk_count) != 1 + !RPC_IS_ASYNC(task)) {
net/sunrpc/sched.c
901
if (!atomic_read(&xprt->swapper))
net/sunrpc/svc.c
1710
WARN_ON_ONCE(atomic_read(&task->tk_count) != 1);
net/sunrpc/svc_xprt.c
412
int nrqsts = atomic_read(&xprt->xpt_nr_rqsts);
net/sunrpc/svcsock.c
787
required = atomic_read(&svsk->sk_xprt.xpt_reserved) + serv->sv_max_mesg;
net/sunrpc/xprt.c
1088
return atomic_read(&req->rq_pin) != 0;
net/sunrpc/xprt.c
1517
if (atomic_read(&xprt->swapper))
net/sunrpc/xprtrdma/svc_rdma_rw.c
422
atomic_read(&rdma->sc_sq_avail) > cc->cc_sqecount);
net/sunrpc/xprtrdma/svc_rdma_sendto.c
379
atomic_read(&rdma->sc_sq_avail) > 0);
net/sunrpc/xprtrdma/transport.c
231
if (atomic_read(&xprt->swapper))
net/sunrpc/xprtsock.c
1288
if (atomic_read(&transport->xprt.swapper))
net/sunrpc/xprtsock.c
2123
if (atomic_read(&xprt->swapper))
net/sunrpc/xprtsock.c
2222
if (atomic_read(&xprt->swapper))
net/sunrpc/xprtsock.c
2427
if (atomic_read(&xprt->swapper))
net/sunrpc/xprtsock.c
2714
if (atomic_read(&upper_xprt->swapper))
net/tipc/core.c
121
while (atomic_read(&tn->wq_count))
net/tipc/crypto.c
1429
cur = atomic_read(&rx->peer_rx_active);
net/tipc/crypto.c
1713
if (__rx && atomic_read(&__rx->peer_rx_active) == tx_key)
net/tipc/crypto.c
1740
!atomic_read(&__rx->peer_rx_active))
net/tipc/crypto.c
2115
atomic_read(&aead->users),
net/tipc/crypto.c
2123
atomic_read(&c->peer_rx_active));
net/tipc/crypto.c
439
users = atomic_read(&tmp->users);
net/tipc/crypto.c
476
cur = atomic_read(&tmp->users);
net/tipc/socket.c
2453
lim = rcvbuf_limit(sk, skb) + atomic_read(dcnt);
net/tipc/socket.c
3902
unsigned int lim = rcvbuf_limit(sk, skb) + atomic_read(dcnt);
net/tipc/socket.c
3981
i += scnprintf(buf + i, sz - i, " %u", atomic_read(&tsk->dupl_rcvcnt));
net/tipc/topsrv.c
374
if (atomic_read(&tn->subscription_count) >= TIPC_MAX_SUBSCR) {
net/tls/tls_strp.c
197
DEBUG_NET_WARN_ON_ONCE(atomic_read(&shinfo->dataref) != 1);
net/tls/tls_strp.c
34
DEBUG_NET_WARN_ON_ONCE(atomic_read(&shinfo->dataref) != 1);
net/tls/tls_sw.c
276
DEBUG_NET_WARN_ON_ONCE(atomic_read(&ctx->decrypt_pending) < 1);
net/tls/tls_sw.c
580
DEBUG_NET_WARN_ON_ONCE(atomic_read(&ctx->encrypt_pending) < 1);
net/unix/af_unix.c
885
nr_fds += atomic_read(&u->scm_stat.nr_fds);
net/unix/af_unix.c
909
nr_fds = atomic_read(&u->scm_stat.nr_fds);
net/vmw_vsock/virtio_transport.c
380
val = atomic_read(&vsock->queued_replies);
net/wireless/scan.c
1634
!atomic_read(&bss->hold))
net/wireless/scan.c
3458
WARN_ON(atomic_read(&new->hold));
net/wireless/scan.c
477
if (atomic_read(&bss->hold))
net/wireless/scan.c
498
if (atomic_read(&bss->hold))
net/wireless/scan.c
84
if (WARN_ON(atomic_read(&bss->hold)))
net/x25/x25_in.c
294
if (atomic_read(&sk->sk_rmem_alloc) >
net/x25/x25_subr.c
377
if (atomic_read(&sk->sk_rmem_alloc) < (sk->sk_rcvbuf >> 1) &&
net/xdp/xskmap.c
93
(u64)atomic_read(&m->count) * sizeof(struct xsk_map_node);
net/xfrm/espintcp.c
18
if (atomic_read(&sk->sk_rmem_alloc) >= sk->sk_rcvbuf ||
net/xfrm/xfrm_policy.c
2891
xdst->policy_genid = atomic_read(&pols[0]->genid);
net/xfrm/xfrm_policy.c
4024
xdst->policy_genid != atomic_read(&xdst->pols[0]->genid))
rust/helpers/atomic.c
17
return atomic_read(v);
samples/vfio-mdev/mbochs.c
1345
return atomic_read(&mbochs_avail_mbytes) / type->mbytes;
samples/vfio-mdev/mbochs.c
521
int avail_mbytes = atomic_read(&mbochs_avail_mbytes);
samples/vfio-mdev/mtty.c
1319
int avail_ports = atomic_read(&mdev_avail_ports);
samples/vfio-mdev/mtty.c
1910
return atomic_read(&mdev_avail_ports) / type->nr_ports;
samples/workqueue/stall_detector/wq_stall.c
65
wait_event_idle(stall_wq_head, atomic_read(&wake_condition) != 0);
security/integrity/evm/evm_main.c
1088
atomic_read(&inode->i_writecount) == 1)
security/integrity/ima/ima_appraise.c
738
allowed_hashes = atomic_read(&ima_setxattr_allowed_hash_algorithms);
security/integrity/ima/ima_main.c
149
if (atomic_read(&inode->i_readcount) && IS_IMA(inode)) {
security/integrity/ima/ima_main.c
193
if (atomic_read(&inode->i_writecount) == 1) {
security/keys/proc.c
315
atomic_read(&user->nkeys),
security/keys/proc.c
316
atomic_read(&user->nikeys),
security/landlock/tsync.c
133
err = atomic_read(&ctx->preparation_error);
security/landlock/tsync.c
600
!atomic_read(&shared_ctx.preparation_error));
security/landlock/tsync.c
618
return atomic_read(&shared_ctx.preparation_error);
security/selinux/avc.c
170
atomic_read(&selinux_avc.avc_cache.active_nodes),
security/selinux/hooks.c
170
atomic_read(&selinux_secmark_refcount));
security/selinux/include/xfrm.h
38
return (atomic_read(&selinux_xfrm_refcount) > 0);
security/tomoyo/common.c
2218
if (atomic_read(&tomoyo_query_observers))
security/tomoyo/common.c
2262
!atomic_read(&tomoyo_query_observers), HZ))
security/tomoyo/common.c
2499
atomic_read(&tomoyo_stat_updated[i]));
security/tomoyo/condition.c
420
atomic_read(&ptr->head.users) == TOMOYO_GC_IN_PROGRESS)
security/tomoyo/gc.c
451
if (atomic_read(&container_of
security/tomoyo/gc.c
540
if (!domain->is_deleted || atomic_read(&domain->users))
security/tomoyo/gc.c
557
if (atomic_read(&ptr->users) > 0)
security/tomoyo/gc.c
583
atomic_read(&group->head.users) > 0)
security/tomoyo/gc.c
598
if (atomic_read(&ptr->users) > 0)
security/tomoyo/memory.c
114
atomic_read(&group->head.users) == TOMOYO_GC_IN_PROGRESS)
security/tomoyo/memory.c
168
atomic_read(&ptr->head.users) == TOMOYO_GC_IN_PROGRESS)
sound/core/oss/pcm_oss.c
1007
if (atomic_read(&substream->mmap_count) ||
sound/core/oss/pcm_oss.c
1016
if (atomic_read(&substream->mmap_count) ||
sound/core/oss/pcm_oss.c
1399
if (atomic_read(&substream->mmap_count))
sound/core/oss/pcm_oss.c
1509
if (atomic_read(&substream->mmap_count))
sound/core/oss/pcm_oss.c
1664
if (atomic_read(&substream->mmap_count))
sound/core/oss/pcm_oss.c
1848
if (atomic_read(&substream->mmap_count))
sound/core/oss/pcm_oss.c
2093
if (atomic_read(&psubstream->mmap_count))
sound/core/oss/pcm_oss.c
2223
if (atomic_read(&substream->mmap_count)) {
sound/core/oss/pcm_oss.c
2810
if (atomic_read(&substream->mmap_count))
sound/core/oss/pcm_oss.c
2821
if (atomic_read(&substream->mmap_count))
sound/core/oss/pcm_oss.c
699
if (atomic_read(&substream->mmap_count)) {
sound/core/oss/pcm_oss.c
824
if (atomic_read(&runtime->oss.rw_ref)) {
sound/core/oss/pcm_oss.c
872
if (atomic_read(&substream->mmap_count))
sound/core/oss/pcm_oss.c
881
if (atomic_read(&substream->mmap_count))
sound/core/pcm_native.c
774
atomic_read(&substream->mmap_count))
sound/core/pcm_native.c
935
if (atomic_read(&substream->mmap_count))
sound/core/seq/seq_clientmgr.c
1831
if (atomic_read(&client->pool->counter))
sound/core/seq/seq_clientmgr.c
410
if (atomic_read(&fifo->overflow) > 0) {
sound/core/seq/seq_clientmgr.c
701
if (atomic_read(&subs->ref_count) != 2)
sound/core/seq/seq_lock.c
16
if (atomic_read(lockp) < 0) {
sound/core/seq/seq_lock.c
17
pr_warn("ALSA: seq_lock: lock trouble [counter = %d] in %s:%d\n", atomic_read(lockp), file, line);
sound/core/seq/seq_lock.c
20
while (atomic_read(lockp) > 0) {
sound/core/seq/seq_lock.c
22
pr_warn("ALSA: seq_lock: waiting [%d left] in %s:%d\n", atomic_read(lockp), file, line);
sound/core/seq/seq_memory.c
24
return pool->total_elements - atomic_read(&pool->counter);
sound/core/seq/seq_memory.c
317
used = atomic_read(&pool->counter);
sound/core/seq/seq_memory.c
493
while (atomic_read(&pool->counter) > 0)
sound/core/seq/seq_memory.c
558
snd_iprintf(buffer, "%sCells in use : %d\n", space, atomic_read(&pool->counter));
sound/core/seq/seq_memory.h
77
return pool ? pool->total_elements - atomic_read(&pool->counter) : 0;
sound/drivers/dummy.c
376
if (!atomic_read(&dpcm->running))
sound/drivers/dummy.c
383
if (!atomic_read(&dpcm->running))
sound/drivers/pcsp/pcsp_input.c
48
if (atomic_read(&pcsp_chip.timer_active) || !pcsp_chip.pcspkr)
sound/drivers/pcsp/pcsp_lib.c
134
if (!atomic_read(&chip->timer_active) || !chip->playback_substream)
sound/drivers/pcsp/pcsp_lib.c
157
if (atomic_read(&chip->timer_active)) {
sound/drivers/pcsp/pcsp_lib.c
178
if (!atomic_read(&chip->timer_active))
sound/drivers/pcsp/pcsp_lib.c
311
if (atomic_read(&chip->timer_active)) {
sound/drivers/pcsp/pcsp_lib.c
32
if (atomic_read(&pcsp_chip.timer_active)) {
sound/isa/gus/gus_pcm.c
675
if (!wait_event_timeout(pcmp->sleep, (atomic_read(&pcmp->dma_count) <= 0), 2*HZ))
sound/pci/mixart/mixart.c
463
while (atomic_read(&mgr->msg_processed) > 0) {
sound/pci/ymfpci/ymfpci_main.c
183
if (atomic_read(&chip->interrupt_sleep_count)) {
sound/pci/ymfpci/ymfpci_main.c
782
if (atomic_read(&chip->interrupt_sleep_count)) {
sound/soc/atmel/mchp-pdmc.c
172
if (atomic_read(&dd->busy_stream))
sound/soc/atmel/mchp-pdmc.c
201
if (atomic_read(&dd->busy_stream))
sound/soc/codecs/hda.c
182
WARN_ON(atomic_read(&hdev->dev.power.usage_count) != 1 ||
sound/soc/codecs/hda.c
304
WARN_ON(atomic_read(&hdev->dev.power.usage_count) != 1 ||
sound/soc/codecs/hdac_hdmi.c
1614
if (atomic_read(&hdev->in_pm))
sound/soc/codecs/wcd937x.c
1112
if (atomic_read(&wcd937x->ana_clk_count) <= 0) {
sound/soc/codecs/wcd937x.c
302
if (atomic_read(&wcd937x->rx_clk_cnt))
sound/soc/codecs/wcd937x.c
322
if (!atomic_read(&wcd937x->rx_clk_cnt)) {
sound/soc/codecs/wcd937x.c
956
if (atomic_read(&wcd937x->ana_clk_count) <= 0) {
sound/soc/fsl/imx-pcm-fiq.c
134
if (!atomic_read(&iprtd->playing) &&
sound/soc/fsl/imx-pcm-fiq.c
135
!atomic_read(&iprtd->capturing))
sound/soc/fsl/imx-pcm-fiq.c
51
if (!atomic_read(&iprtd->playing) && !atomic_read(&iprtd->capturing))
sound/soc/intel/atom/sst/sst_drv_interface.c
35
#define GET_USAGE_COUNT(dev) (atomic_read(&dev->power.usage_count))
sound/soc/intel/avs/ipc.c
47
if (atomic_read(&adev->ipc->d0ix_disable_depth))
sound/soc/intel/avs/ipc.c
65
if (!atomic_read(&ipc->d0ix_disable_depth)) {
sound/soc/qcom/qdsp6/q6apm.c
489
return (int)atomic_read(&data->hw_ptr);
sound/soc/qcom/qdsp6/q6asm.c
595
return (int)atomic_read(&data->hw_ptr);
sound/usb/card.c
1109
!atomic_read(&chip->usage_count));
sound/usb/card.c
1168
if (atomic_read(&chip->shutdown)) {
sound/usb/card.c
1197
if (atomic_read(&chip->shutdown))
sound/usb/card.c
1220
if (atomic_read(&chip->shutdown))
sound/usb/card.c
611
if (!atomic_read(&chip->shutdown))
sound/usb/card.c
953
if (atomic_read(&usb_chip[i]->shutdown)) {
sound/usb/endpoint.c
1001
alive = atomic_read(&ep->submitted_urbs);
sound/usb/endpoint.c
1039
if (!force && atomic_read(&ep->running))
sound/usb/endpoint.c
1325
if (atomic_read(&clock->locked))
sound/usb/endpoint.c
135
return atomic_read(&ep->state) == EP_STATE_RUNNING;
sound/usb/endpoint.c
1562
if (atomic_read(&ep->chip->shutdown))
sound/usb/endpoint.c
1570
atomic_read(&ep->running));
sound/usb/endpoint.c
1623
if (!atomic_read(&ep->chip->shutdown))
sound/usb/endpoint.c
1628
if (!atomic_read(&ep->chip->shutdown))
sound/usb/endpoint.c
1683
atomic_read(&ep->running));
sound/usb/endpoint.c
1685
if (snd_BUG_ON(!atomic_read(&ep->running)))
sound/usb/endpoint.c
1766
atomic_read(&ep->running)) {
sound/usb/endpoint.c
515
if (!atomic_read(&ep->chip->shutdown))
sound/usb/endpoint.c
520
if (!atomic_read(&ep->chip->shutdown)) {
sound/usb/endpoint.c
552
if (unlikely(atomic_read(&ep->chip->shutdown)))
sound/usb/endpoint.c
593
if (!atomic_read(&ep->chip->shutdown))
sound/usb/endpoint.c
600
if (!atomic_read(&ep->chip->shutdown)) {
sound/usb/endpoint.c
924
if (unlikely(atomic_read(&chip->shutdown)))
sound/usb/endpoint.c
997
if (atomic_read(&ep->state) != EP_STATE_STOPPING)
sound/usb/midi.c
2443
if (!atomic_read(&urb->use_count)) {
sound/usb/midi.c
349
if (atomic_read(&in->urbs[j]->use_count))
sound/usb/midi2.c
166
if (urb->status >= 0 && atomic_read(&ep->running))
sound/usb/pcm.c
78
if (atomic_read(&subs->stream->chip->shutdown))
sound/usb/proc.c
36
if (!atomic_read(&chip->shutdown))
sound/usb/proc.c
43
if (!atomic_read(&chip->shutdown))
sound/usb/qcom/qc_audio_offload.c
1437
if (!atomic_read(&uadev[card_num].in_use)) {
sound/usb/qcom/qc_audio_offload.c
1573
if (!subs || !chip || atomic_read(&chip->shutdown)) {
sound/usb/qcom/qc_audio_offload.c
1580
if (atomic_read(&chip->shutdown) || !subs->stream || !subs->stream->pcm ||
sound/usb/qcom/qc_audio_offload.c
1661
if (atomic_read(&uadev[pcm_card_num].in_use))
sound/usb/qcom/qc_audio_offload.c
221
if (!chip || atomic_read(&chip->shutdown))
sound/usb/qcom/qc_audio_offload.c
355
if (atomic_read(&dev->in_use)) {
sound/usb/qcom/qc_audio_offload.c
373
!atomic_read(&dev->in_use),
sound/usb/qcom/qc_audio_offload.c
762
if (!atomic_read(&uadev[idx].in_use))
sound/usb/qcom/qc_audio_offload.c
773
if (!subs || !chip || atomic_read(&chip->shutdown)) {
sound/usb/qcom/qc_audio_offload.c
970
if (!atomic_read(&chip->shutdown)) {
sound/usb/usx2y/us144mkii.c
387
if (atomic_read(&tascam->midi_in_active))
sound/usb/usx2y/us144mkii.c
389
if (atomic_read(&tascam->midi_out_active))
sound/usb/usx2y/us144mkii_capture.c
177
while (atomic_read(&tascam->capture_active)) {
sound/usb/usx2y/us144mkii_capture.c
225
if (atomic_read(&tascam->capture_active)) {
sound/usb/usx2y/us144mkii_capture.c
276
if (!tascam || !atomic_read(&tascam->capture_active))
sound/usb/usx2y/us144mkii_capture.c
81
if (!atomic_read(&tascam->capture_active))
sound/usb/usx2y/us144mkii_midi.c
201
if (atomic_read(&tascam->midi_out_active))
sound/usb/usx2y/us144mkii_midi.c
225
if (!substream || !atomic_read(&tascam->midi_out_active))
sound/usb/usx2y/us144mkii_midi.c
57
if (atomic_read(&tascam->midi_in_active) &&
sound/usb/usx2y/us144mkii_pcm.c
274
if (!atomic_read(&tascam->playback_active)) {
sound/usb/usx2y/us144mkii_pcm.c
283
if (atomic_read(&tascam->playback_active)) {
sound/usb/usx2y/us144mkii_pcm.c
296
if (atomic_read(&tascam->active_urbs) > 0) {
sound/usb/usx2y/us144mkii_playback.c
127
if (!atomic_read(&tascam->playback_active))
sound/usb/usx2y/us144mkii_playback.c
176
if (!tascam || !atomic_read(&tascam->playback_active))
sound/usb/usx2y/us144mkii_playback.c
279
if (!tascam || !atomic_read(&tascam->playback_active))
sound/usb/usx2y/us144mkii_playback.c
397
if (atomic_read(&tascam->capture_active))
sound/usb/usx2y/us144mkii_playback.c
413
if (atomic_read(&tascam->capture_active) && capture_rt &&
sound/usb/usx2y/usbusx2yaudio.c
116
if (atomic_read(&subs->state) >= STATE_PRERUNNING) {
sound/usb/usx2y/usbusx2yaudio.c
188
state = atomic_read(&playbacksubs->state);
sound/usb/usx2y/usbusx2yaudio.c
217
state = atomic_read(&capsubs->state);
sound/usb/usx2y/usbusx2yaudio.c
244
__func__, s, subs, atomic_read(&subs->state));
sound/usb/usx2y/usbusx2yaudio.c
251
if (atomic_read(&subs->state) >= STATE_PRERUNNING)
sound/usb/usx2y/usbusx2yaudio.c
281
if (unlikely(atomic_read(&subs->state) < STATE_PREPARED)) {
sound/usb/usx2y/usbusx2yaudio.c
301
atomic_read(&capsubs->state) >= STATE_PREPARED &&
sound/usb/usx2y/usbusx2yaudio.c
303
atomic_read(&playbacksubs->state) < STATE_PREPARED)) {
sound/usb/usx2y/usbusx2yaudio.c
470
if (subs && atomic_read(&subs->state) >= STATE_PREPARED)
sound/usb/usx2y/usbusx2yaudio.c
505
if (atomic_read(&subs->state) != STATE_PREPARED)
sound/usb/usx2y/usbusx2yaudio.c
536
if (atomic_read(&subs->state) == STATE_PREPARED &&
sound/usb/usx2y/usbusx2yaudio.c
537
atomic_read(&subs->usx2y->subs[SNDRV_PCM_STREAM_CAPTURE]->state) >= STATE_PREPARED) {
sound/usb/usx2y/usbusx2yaudio.c
545
if (atomic_read(&subs->state) >= STATE_PRERUNNING)
sound/usb/usx2y/usbusx2yaudio.c
808
if (atomic_read(&playback_subs->state) < STATE_PREPARED) {
sound/usb/usx2y/usbusx2yaudio.c
835
if (atomic_read(&capsubs->state) < STATE_PREPARED) {
sound/usb/usx2y/usbusx2yaudio.c
853
if (subs != capsubs && atomic_read(&subs->state) < STATE_PREPARED)
sound/usb/usx2y/usx2yhwdeppcm.c
129
if (atomic_read(&subs->state) != STATE_RUNNING)
sound/usb/usx2y/usx2yhwdeppcm.c
174
state = atomic_read(&playbacksubs->state);
sound/usb/usx2y/usx2yhwdeppcm.c
203
state = atomic_read(&capsubs->state);
sound/usb/usx2y/usx2yhwdeppcm.c
236
if (unlikely(atomic_read(&subs->state) < STATE_PREPARED)) {
sound/usb/usx2y/usx2yhwdeppcm.c
253
if (capsubs->completed_urb && atomic_read(&capsubs->state) >= STATE_PREPARED &&
sound/usb/usx2y/usx2yhwdeppcm.c
255
(playbacksubs->completed_urb || atomic_read(&playbacksubs->state) < STATE_PREPARED)) {
sound/usb/usx2y/usx2yhwdeppcm.c
388
if (atomic_read(&playback_subs->state) < STATE_PREPARED) {
sound/usb/usx2y/usx2yhwdeppcm.c
435
if (subs && atomic_read(&subs->state) >= STATE_PREPARED)
sound/usb/usx2y/usx2yhwdeppcm.c
477
if (atomic_read(&subs->state) != STATE_PREPARED)
sound/usb/usx2y/usx2yhwdeppcm.c
519
if (atomic_read(&capsubs->state) < STATE_PREPARED) {
sound/usb/usx2y/usx2yhwdeppcm.c
540
if (atomic_read(&subs->state) < STATE_PREPARED) {
sound/xen/xen_snd_front_alsa.c
581
new_hw_ptr = (snd_pcm_uframes_t)atomic_read(&stream->hw_ptr);
sound/xen/xen_snd_front_alsa.c
596
return (snd_pcm_uframes_t)atomic_read(&stream->hw_ptr);
tools/include/linux/atomic.h
27
int c = atomic_read(v);
tools/include/linux/refcount.h
128
unsigned int old, new, val = atomic_read(&r->refs);
tools/include/linux/refcount.h
70
return atomic_read(&r->refs);
tools/include/linux/refcount.h
83
unsigned int old, new, val = atomic_read(&r->refs);
tools/perf/bench/synthesize.c
195
update_stats(&event_stats, atomic_read(&event_count));
tools/perf/bench/synthesize.c
93
update_stats(&event_stats, atomic_read(&event_count));
tools/testing/selftests/bpf/progs/bpf_arena_spin_lock.h
152
old = atomic_read(&lock->val);
tools/testing/selftests/bpf/progs/bpf_arena_spin_lock.h
212
old = atomic_read(&lock->val);
tools/testing/selftests/bpf/progs/bpf_arena_spin_lock.h
235
int val = atomic_read(&lock->val);
tools/testing/selftests/bpf/test_kmods/bpf_test_rqspinlock.c
173
int ready = atomic_read(&rqsl_ready_cpus);
tools/testing/selftests/kvm/arm64/arch_timer_edge_cases.c
120
int h = atomic_read(&shared_data.handled);
tools/testing/selftests/kvm/arm64/arch_timer_edge_cases.c
225
for (h = atomic_read(&shared_data.handled); h == atomic_read(&shared_data.handled);) {
tools/testing/selftests/kvm/arm64/arch_timer_edge_cases.c
247
h = atomic_read(&shared_data.handled);
tools/testing/selftests/kvm/arm64/arch_timer_edge_cases.c
250
while (h == atomic_read(&shared_data.handled)) {
tools/testing/selftests/kvm/mmu_stress_test.c
158
if (atomic_read(&nr_ro_faults) == nr_vcpus) {
tools/testing/selftests/kvm/mmu_stress_test.c
238
rendezvoused = atomic_read(&rendezvous);
tools/testing/selftests/kvm/mmu_stress_test.c
244
rendezvoused = atomic_read(&rendezvous);
tools/testing/selftests/kvm/mmu_stress_test.c
89
int orig = atomic_read(&rendezvous);
tools/testing/selftests/kvm/mmu_stress_test.c
93
while (atomic_read(&rendezvous) > 0)
tools/testing/selftests/kvm/mmu_stress_test.c
97
while (atomic_read(&rendezvous) < 0)
tools/testing/selftests/kvm/rseq_test.c
279
snapshot = atomic_read(&seq_cnt) & ~1;
tools/testing/selftests/kvm/rseq_test.c
292
} while (snapshot != atomic_read(&seq_cnt));
tools/testing/selftests/kvm/x86/kvm_buslock_test.c
126
TEST_ASSERT_EQ(atomic_read(val), bus_locks + host_cpu_is_intel);
tools/testing/selftests/net/bench/page_pool/bench_page_pool_simple.c
76
loops_cnt = atomic_read(&cnt);
tools/testing/selftests/net/bench/page_pool/time_bench.c
371
while (atomic_read(&sync->nr_tests_running) < running) {
tools/testing/selftests/net/bench/page_pool/time_bench.c
379
while (atomic_read(&sync->nr_tests_running)) {
virt/kvm/kvm_main.c
1722
atomic_read(&kvm->nr_memslots_dirty_logging) + change);
virt/kvm/kvm_main.c
3973
nr_vcpus = atomic_read(&kvm->online_vcpus);
virt/kvm/kvm_main.c
4225
vcpu->vcpu_idx = atomic_read(&kvm->online_vcpus);
virt/kvm/kvm_main.c
4393
if (likely(vcpu->vcpu_idx < atomic_read(&kvm->online_vcpus)))