arch/arc/include/asm/arcregs.h
341
unsigned int pad:16, entries:8, ver:8;
arch/arc/include/asm/arcregs.h
343
unsigned int ver:8, entries:8, pad:16;
arch/arc/kernel/setup.c
238
lpb.entries, IS_DISABLED_RUN(!ctl));
arch/arc/kernel/stacktrace.c
193
trace->entries[trace->nr_entries++] = address;
arch/arc/kernel/stacktrace.c
211
trace->entries[trace->nr_entries++] = address;
arch/arm/kernel/unwind.c
215
if (ctrl->entries <= 0) {
arch/arm/kernel/unwind.c
224
ctrl->entries--;
arch/arm/kernel/unwind.c
373
ctrl->entries = 0;
arch/arm/kernel/unwind.c
473
ctrl.entries = 1;
arch/arm/kernel/unwind.c
476
ctrl.entries = 1 + ((*ctrl.insn & 0x00ff0000) >> 16);
arch/arm/kernel/unwind.c
495
while (ctrl.entries > 0) {
arch/arm/kernel/unwind.c
65
int entries; /* number of entries left to interpret */
arch/arm64/kvm/vgic/vgic-irqfd.c
138
struct kvm_irq_routing_entry *entries;
arch/arm64/kvm/vgic/vgic-irqfd.c
143
entries = kzalloc_objs(*entries, nr, GFP_KERNEL_ACCOUNT);
arch/arm64/kvm/vgic/vgic-irqfd.c
144
if (!entries)
arch/arm64/kvm/vgic/vgic-irqfd.c
148
entries[i].gsi = i;
arch/arm64/kvm/vgic/vgic-irqfd.c
149
entries[i].type = KVM_IRQ_ROUTING_IRQCHIP;
arch/arm64/kvm/vgic/vgic-irqfd.c
150
entries[i].u.irqchip.irqchip = 0;
arch/arm64/kvm/vgic/vgic-irqfd.c
151
entries[i].u.irqchip.pin = i;
arch/arm64/kvm/vgic/vgic-irqfd.c
153
ret = kvm_set_irq_routing(kvm, entries, nr, 0);
arch/arm64/kvm/vgic/vgic-irqfd.c
154
kfree(entries);
arch/csky/kernel/stacktrace.c
132
trace->entries[trace->nr_entries++] = pc;
arch/hexagon/kernel/stacktrace.c
39
trace->entries[trace->nr_entries++] = frame->rets;
arch/loongarch/kvm/intc/pch_pic.c
403
struct kvm_irq_routing_entry *entries;
arch/loongarch/kvm/intc/pch_pic.c
405
entries = kzalloc_objs(*entries, nr);
arch/loongarch/kvm/intc/pch_pic.c
406
if (!entries)
arch/loongarch/kvm/intc/pch_pic.c
410
entries[i].gsi = i;
arch/loongarch/kvm/intc/pch_pic.c
411
entries[i].type = KVM_IRQ_ROUTING_IRQCHIP;
arch/loongarch/kvm/intc/pch_pic.c
412
entries[i].u.irqchip.irqchip = 0;
arch/loongarch/kvm/intc/pch_pic.c
413
entries[i].u.irqchip.pin = i;
arch/loongarch/kvm/intc/pch_pic.c
415
ret = kvm_set_irq_routing(kvm, entries, nr, 0);
arch/loongarch/kvm/intc/pch_pic.c
416
kfree(entries);
arch/microblaze/kernel/unwind.c
229
trace->entries[trace->nr_entries++] = pc;
arch/mips/alchemy/common/dbdma.c
391
u32 au1xxx_dbdma_ring_alloc(u32 chanid, int entries)
arch/mips/alchemy/common/dbdma.c
415
desc_base = (u32) kmalloc_objs(au1x_ddma_desc_t, entries,
arch/mips/alchemy/common/dbdma.c
426
i = entries * sizeof(au1x_ddma_desc_t);
arch/mips/alchemy/common/dbdma.c
564
for (i = 0; i < entries; i++) {
arch/mips/generic/yamon-dt.c
51
unsigned int entries = 0;
arch/mips/generic/yamon-dt.c
54
if (entries >= max_entries) {
arch/mips/generic/yamon-dt.c
67
++entries;
arch/mips/generic/yamon-dt.c
72
return entries;
arch/mips/include/asm/mach-au1x00/au1xxx_dbdma.h
358
u32 au1xxx_dbdma_ring_alloc(u32 chanid, int entries);
arch/mips/kernel/stacktrace.c
30
trace->entries[trace->nr_entries++] = addr;
arch/mips/kernel/stacktrace.c
58
trace->entries[trace->nr_entries++] = pc;
arch/mips/mti-malta/malta-dtshim.c
108
entries++;
arch/mips/mti-malta/malta-dtshim.c
121
entries++;
arch/mips/mti-malta/malta-dtshim.c
127
BUG_ON(entries > MAX_MEM_ARRAY_ENTRIES);
arch/mips/mti-malta/malta-dtshim.c
128
return entries;
arch/mips/mti-malta/malta-dtshim.c
74
unsigned entries;
arch/mips/mti-malta/malta-dtshim.c
76
entries = 1;
arch/openrisc/kernel/stacktrace.c
39
trace->entries[trace->nr_entries++] = addr;
arch/openrisc/kernel/stacktrace.c
65
trace->entries[trace->nr_entries++] = addr;
arch/parisc/include/asm/pdc.h
75
struct pdc_memory_table *tbl, unsigned long entries);
arch/parisc/kernel/firmware.c
1198
struct pdc_memory_table *tbl, unsigned long entries)
arch/parisc/kernel/firmware.c
1204
retval = mem_pdc_call(PDC_MEM, PDC_MEM_TABLE, __pa(pdc_result), __pa(pdc_result2), entries);
arch/parisc/kernel/firmware.c
1207
memcpy(tbl, pdc_result2, entries * sizeof(*tbl));
arch/parisc/kernel/firmware.c
1772
unsigned long flags, entries;
arch/parisc/kernel/firmware.c
1782
entries = min(pdc_result[0], max_entries);
arch/parisc/kernel/firmware.c
1783
pret->pdt_entries = entries;
arch/parisc/kernel/firmware.c
1784
pret->actual_count_bytes = entries * sizeof(unsigned long);
arch/parisc/kernel/firmware.c
1805
unsigned long flags, entries;
arch/parisc/kernel/firmware.c
1813
entries = min(pdc_result[0], count);
arch/parisc/kernel/firmware.c
1814
pret->actual_count_bytes = entries;
arch/parisc/kernel/firmware.c
1815
pret->pdt_entries = entries / sizeof(unsigned long);
arch/parisc/kernel/inventory.c
323
int entries;
arch/parisc/kernel/inventory.c
345
entries = actual_len / sizeof(struct pdc_pat_pd_addr_map_entry);
arch/parisc/kernel/inventory.c
347
if (entries > PAT_MAX_RANGES) {
arch/parisc/kernel/inventory.c
360
for (i = 0; i < entries; i++,mtbl_ptr++) {
arch/parisc/kernel/inventory.c
417
int entries;
arch/parisc/kernel/inventory.c
440
entries = (int)r_addr.entries_returned;
arch/parisc/kernel/inventory.c
445
for (i = 0; i < entries; i++,mtbl_ptr++) {
arch/parisc/kernel/pdt.c
153
unsigned long entries;
arch/parisc/kernel/pdt.c
177
entries = pdt_status.pdt_entries;
arch/parisc/kernel/pdt.c
178
if (WARN_ON(entries > MAX_PDT_ENTRIES))
arch/parisc/kernel/pdt.c
179
entries = pdt_status.pdt_entries = MAX_PDT_ENTRIES;
arch/parisc/kernel/pdt.c
190
if (entries == 0) {
arch/parisc/kernel/pdt.c
200
entries);
arch/powerpc/include/asm/mmu_context.h
26
unsigned long ua, unsigned long entries,
arch/powerpc/include/asm/mmu_context.h
29
unsigned long entries, unsigned long dev_hpa,
arch/powerpc/include/asm/mmu_context.h
37
unsigned long ua, unsigned long entries);
arch/powerpc/kernel/traps.c
2315
struct ppc_emulated_entry *entries = (void *)&ppc_emulated;
arch/powerpc/kernel/traps.c
2322
for (i = 0; i < sizeof(ppc_emulated)/sizeof(*entries); i++)
arch/powerpc/kernel/traps.c
2323
debugfs_create_u32(entries[i].name, 0644, dir,
arch/powerpc/kernel/traps.c
2324
(u32 *)&entries[i].val.counter);
arch/powerpc/kvm/book3s.c
1048
struct kvm_kernel_irq_routing_entry *entries, int gsi)
arch/powerpc/kvm/book3s.c
1050
entries->gsi = gsi;
arch/powerpc/kvm/book3s.c
1051
entries->type = KVM_IRQ_ROUTING_IRQCHIP;
arch/powerpc/kvm/book3s.c
1052
entries->set = kvmppc_book3s_set_irq;
arch/powerpc/kvm/book3s.c
1053
entries->irqchip.irqchip = 0;
arch/powerpc/kvm/book3s.c
1054
entries->irqchip.pin = gsi;
arch/powerpc/kvm/e500.h
52
int entries, ways, sets;
arch/powerpc/kvm/e500_mmu.c
153
int size = vcpu_e500->gtlb_params[1].entries;
arch/powerpc/kvm/e500_mmu.c
233
for (esel = 0; esel < vcpu_e500->gtlb_params[0].entries; esel++)
arch/powerpc/kvm/e500_mmu.c
236
for (esel = 0; esel < vcpu_e500->gtlb_params[1].entries; esel++)
arch/powerpc/kvm/e500_mmu.c
258
for (esel = 0; esel < vcpu_e500->gtlb_params[tlbsel].entries;
arch/powerpc/kvm/e500_mmu.c
282
for (esel = 0; esel < vcpu_e500->gtlb_params[tlbsel].entries; esel++) {
arch/powerpc/kvm/e500_mmu.c
73
esel &= vcpu_e500->gtlb_params[tlbsel].entries - 1;
arch/powerpc/kvm/e500_mmu.c
824
vcpu_e500->gtlb_params[0].entries = params.tlb_sizes[0];
arch/powerpc/kvm/e500_mmu.c
825
vcpu_e500->gtlb_params[1].entries = params.tlb_sizes[1];
arch/powerpc/kvm/e500_mmu.c
83
int size = vcpu_e500->gtlb_params[tlbsel].entries;
arch/powerpc/kvm/e500_mmu.c
875
vcpu->arch.tlbcfg[0] |= params[0].entries;
arch/powerpc/kvm/e500_mmu.c
880
vcpu->arch.tlbcfg[1] |= params[1].entries;
arch/powerpc/kvm/e500_mmu.c
905
vcpu_e500->gtlb_params[0].entries = KVM_E500_TLB0_SIZE;
arch/powerpc/kvm/e500_mmu.c
906
vcpu_e500->gtlb_params[1].entries = KVM_E500_TLB1_SIZE;
arch/powerpc/kvm/e500_mmu.c
924
vcpu_e500->gtlb_params[0].entries);
arch/powerpc/kvm/e500_mmu.c
929
vcpu_e500->gtlb_params[1].entries);
arch/powerpc/kvm/e500_mmu.c
933
vcpu_e500->g2h_tlb1_map = kcalloc(vcpu_e500->gtlb_params[1].entries,
arch/powerpc/kvm/e500_mmu_host.c
275
sizeof(u64) * vcpu_e500->gtlb_params[1].entries);
arch/powerpc/kvm/e500_mmu_host.c
278
sizeof(unsigned int) * host_tlb_params[1].entries);
arch/powerpc/kvm/e500_mmu_host.c
287
for (i = 0; i < vcpu_e500->gtlb_params[tlbsel].entries; i++)
arch/powerpc/kvm/e500_mmu_host.c
38
#define to_htlb1_esel(esel) (host_tlb_params[1].entries - (esel) - 1)
arch/powerpc/kvm/e500_mmu_host.c
45
return host_tlb_params[1].entries - tlbcam_index - 1;
arch/powerpc/kvm/e500_mmu_host.c
713
host_tlb_params[0].entries = mfspr(SPRN_TLB0CFG) & TLBnCFG_N_ENTRY;
arch/powerpc/kvm/e500_mmu_host.c
714
host_tlb_params[1].entries = mfspr(SPRN_TLB1CFG) & TLBnCFG_N_ENTRY;
arch/powerpc/kvm/e500_mmu_host.c
721
if (host_tlb_params[0].entries == 0 ||
arch/powerpc/kvm/e500_mmu_host.c
722
host_tlb_params[1].entries == 0) {
arch/powerpc/kvm/e500_mmu_host.c
729
host_tlb_params[1].ways = host_tlb_params[1].entries;
arch/powerpc/kvm/e500_mmu_host.c
731
if (!is_power_of_2(host_tlb_params[0].entries) ||
arch/powerpc/kvm/e500_mmu_host.c
733
host_tlb_params[0].entries < host_tlb_params[0].ways ||
arch/powerpc/kvm/e500_mmu_host.c
736
__func__, host_tlb_params[0].entries,
arch/powerpc/kvm/e500_mmu_host.c
742
host_tlb_params[0].entries / host_tlb_params[0].ways;
arch/powerpc/kvm/e500_mmu_host.c
744
vcpu_e500->h2g_tlb1_rmap = kcalloc(host_tlb_params[1].entries,
arch/powerpc/mm/book3s64/iommu_api.c
102
chunk = min(chunk, entries);
arch/powerpc/mm/book3s64/iommu_api.c
103
for (entry = 0; entry < entries; entry += chunk) {
arch/powerpc/mm/book3s64/iommu_api.c
104
unsigned long n = min(entries - entry, chunk);
arch/powerpc/mm/book3s64/iommu_api.c
118
if (pinned != entries) {
arch/powerpc/mm/book3s64/iommu_api.c
128
mem->entries = entries;
arch/powerpc/mm/book3s64/iommu_api.c
135
if ((mem2->ua < (ua + (entries << PAGE_SHIFT))) &&
arch/powerpc/mm/book3s64/iommu_api.c
137
(mem2->entries << PAGE_SHIFT)))) {
arch/powerpc/mm/book3s64/iommu_api.c
151
for (i = 0; i < entries; ++i) {
arch/powerpc/mm/book3s64/iommu_api.c
186
long mm_iommu_new(struct mm_struct *mm, unsigned long ua, unsigned long entries,
arch/powerpc/mm/book3s64/iommu_api.c
189
return mm_iommu_do_alloc(mm, ua, entries, MM_IOMMU_TABLE_INVALID_HPA,
arch/powerpc/mm/book3s64/iommu_api.c
195
unsigned long entries, unsigned long dev_hpa,
arch/powerpc/mm/book3s64/iommu_api.c
198
return mm_iommu_do_alloc(mm, ua, entries, dev_hpa, pmem);
arch/powerpc/mm/book3s64/iommu_api.c
210
for (i = 0; i < mem->entries; ++i) {
arch/powerpc/mm/book3s64/iommu_api.c
274
unlock_entries = mem->entries;
arch/powerpc/mm/book3s64/iommu_api.c
297
(mem->entries << PAGE_SHIFT))) {
arch/powerpc/mm/book3s64/iommu_api.c
309
unsigned long ua, unsigned long entries)
arch/powerpc/mm/book3s64/iommu_api.c
317
if ((mem->ua == ua) && (mem->entries == entries)) {
arch/powerpc/mm/book3s64/iommu_api.c
336
if (entry >= mem->entries)
arch/powerpc/mm/book3s64/iommu_api.c
34
u64 entries; /* number of entries in hpas/hpages[] */
arch/powerpc/mm/book3s64/iommu_api.c
365
end = mem->dev_hpa + (mem->entries << PAGE_SHIFT);
arch/powerpc/mm/book3s64/iommu_api.c
57
unsigned long entries, unsigned long dev_hpa,
arch/powerpc/mm/book3s64/iommu_api.c
66
ret = account_locked_vm(mm, entries, true);
arch/powerpc/mm/book3s64/iommu_api.c
70
locked_entries = entries;
arch/powerpc/mm/book3s64/iommu_api.c
80
mem->pageshift = __ffs(dev_hpa | (entries << PAGE_SHIFT));
arch/powerpc/mm/book3s64/iommu_api.c
91
mem->pageshift = __ffs(ua | (entries << PAGE_SHIFT));
arch/powerpc/mm/book3s64/iommu_api.c
92
mem->hpas = vzalloc(array_size(entries, sizeof(mem->hpas[0])));
arch/powerpc/platforms/powernv/pci-ioda-tce.c
262
unsigned int entries = 1UL << (shift - 3);
arch/powerpc/platforms/powernv/pci-ioda-tce.c
274
for (i = 0; i < entries; ++i) {
arch/powerpc/sysdev/fsl_rio.h
137
void *dev_id, int mbox, int entries);
arch/powerpc/sysdev/fsl_rio.h
140
void *dev_id, int mbox, int entries);
arch/powerpc/sysdev/fsl_rmu.c
720
fsl_open_outb_mbox(struct rio_mport *mport, void *dev_id, int mbox, int entries)
arch/powerpc/sysdev/fsl_rmu.c
726
if ((entries < RIO_MIN_TX_RING_SIZE) ||
arch/powerpc/sysdev/fsl_rmu.c
727
(entries > RIO_MAX_TX_RING_SIZE) || (!is_power_of_2(entries))) {
arch/powerpc/sysdev/fsl_rmu.c
734
rmu->msg_tx_ring.size = entries;
arch/powerpc/sysdev/fsl_rmu.c
793
((get_bitmask_order(entries) - 2) << 12));
arch/powerpc/sysdev/fsl_rmu.c
852
fsl_open_inb_mbox(struct rio_mport *mport, void *dev_id, int mbox, int entries)
arch/powerpc/sysdev/fsl_rmu.c
858
if ((entries < RIO_MIN_RX_RING_SIZE) ||
arch/powerpc/sysdev/fsl_rmu.c
859
(entries > RIO_MAX_RX_RING_SIZE) || (!is_power_of_2(entries))) {
arch/powerpc/sysdev/fsl_rmu.c
866
rmu->msg_rx_ring.size = entries;
arch/powerpc/sysdev/fsl_rmu.c
907
setbits32(&rmu->msg_regs->imr, (get_bitmask_order(entries) - 2) << 12);
arch/s390/boot/ipl_report.c
25
for (entry = rb->entries; \
arch/s390/include/asm/chsc.h
69
struct chsc_pnso_naid_l2 entries[];
arch/s390/include/uapi/asm/ipl.h
186
struct ipl_rb_certificate_entry entries[];
arch/s390/include/uapi/asm/ipl.h
206
struct ipl_rb_component_entry entries[];
arch/s390/include/uapi/asm/qeth.h
107
char *entries;
arch/s390/kvm/gaccess.c
1282
struct guest_fault *entries;
arch/s390/kvm/gaccess.c
1295
entries = get_entries(w);
arch/s390/kvm/gaccess.c
1300
return kvm_s390_get_guest_page(kvm, entries + LEVEL_MEM, gpa_to_gfn(saddr), false);
arch/s390/kvm/gaccess.c
1330
rc = kvm_s390_get_guest_page_and_read_gpa(kvm, entries + w->level,
arch/s390/kvm/gaccess.c
1347
rc = kvm_s390_get_guest_page_and_read_gpa(kvm, entries + w->level,
arch/s390/kvm/gaccess.c
1364
rc = kvm_s390_get_guest_page_and_read_gpa(kvm, entries + w->level,
arch/s390/kvm/gaccess.c
1387
rc = kvm_s390_get_guest_page_and_read_gpa(kvm, entries + w->level,
arch/s390/kvm/gaccess.c
1406
rc = kvm_s390_get_guest_page_and_read_gpa(kvm, entries + w->level,
arch/s390/kvm/gaccess.c
1419
return kvm_s390_get_guest_page(kvm, entries + LEVEL_MEM, table.pte.pfra, wr);
arch/s390/kvm/gaccess.c
1508
struct guest_fault *entries;
arch/s390/kvm/gaccess.c
1516
entries = get_entries(w);
arch/s390/kvm/gaccess.c
1544
rc = gmap_protect_rmap(mc, sg, entries[i].gfn, gpa_to_gfn(saddr),
arch/s390/kvm/gaccess.c
1545
entries[i].pfn, i + 1, entries[i].writable);
arch/s390/kvm/gaccess.c
1552
rc = dat_entry_walk(NULL, entries[LEVEL_MEM].gfn, sg->parent->asce, DAT_WALK_LEAF,
arch/s390/kvm/gaccess.c
1570
rc = dat_entry_walk(mc, entries[LEVEL_MEM].gfn, sg->parent->asce,
arch/s390/kvm/gaccess.c
1579
return _do_shadow_pte(sg, saddr, ptep_h, ptep, entries + LEVEL_MEM, w->p);
arch/s390/kvm/gaccess.c
1580
return _do_shadow_crste(sg, saddr, host, table, entries + LEVEL_MEM, w->p);
arch/s390/kvm/priv.c
1146
int r1, r2, nappended, entries;
arch/s390/kvm/priv.c
1159
entries = (vcpu->arch.sie_block->cbrlo & ~PAGE_MASK) >> 3;
arch/s390/kvm/priv.c
1173
cbrlo[entries] = gfn << PAGE_SHIFT;
arch/s390/kvm/priv.c
1209
int entries = (vcpu->arch.sie_block->cbrlo & ~PAGE_MASK) >> 3;
arch/s390/kvm/priv.c
1213
VCPU_EVENT(vcpu, 4, "ESSA: release %d pages", entries);
arch/s390/kvm/priv.c
1256
entries += i;
arch/s390/kvm/priv.c
1264
_essa_clear_cbrl(vcpu, cbrlo, entries);
arch/sh/include/asm/processor.h
65
unsigned int entries;
arch/sh/kernel/stacktrace.c
34
trace->entries[trace->nr_entries++] = addr;
arch/sh/kernel/stacktrace.c
66
trace->entries[trace->nr_entries++] = addr;
arch/sparc/kernel/stacktrace.c
58
trace->entries[trace->nr_entries++] = pc;
arch/sparc/kernel/stacktrace.c
68
trace->entries[trace->nr_entries++] = pc;
arch/um/kernel/stacktrace.c
53
trace->entries[trace->nr_entries++] = address;
arch/x86/boot/startup/sme.c
250
unsigned long entries = 0, tables = 0;
arch/x86/boot/startup/sme.c
267
entries += (DIV_ROUND_UP(len, PGDIR_SIZE) + 1) * sizeof(p4d_t) * PTRS_PER_P4D;
arch/x86/boot/startup/sme.c
268
entries += (DIV_ROUND_UP(len, P4D_SIZE) + 1) * sizeof(pud_t) * PTRS_PER_PUD;
arch/x86/boot/startup/sme.c
269
entries += (DIV_ROUND_UP(len, PUD_SIZE) + 1) * sizeof(pmd_t) * PTRS_PER_PMD;
arch/x86/boot/startup/sme.c
270
entries += 2 * sizeof(pte_t) * PTRS_PER_PTE;
arch/x86/boot/startup/sme.c
278
tables += DIV_ROUND_UP(entries, PGDIR_SIZE) * sizeof(p4d_t) * PTRS_PER_P4D;
arch/x86/boot/startup/sme.c
279
tables += DIV_ROUND_UP(entries, P4D_SIZE) * sizeof(pud_t) * PTRS_PER_PUD;
arch/x86/boot/startup/sme.c
280
tables += DIV_ROUND_UP(entries, PUD_SIZE) * sizeof(pmd_t) * PTRS_PER_PMD;
arch/x86/boot/startup/sme.c
282
return entries + tables;
arch/x86/coco/sev/core.c
199
e = &desc->entries[i];
arch/x86/coco/sev/core.c
242
e = &desc->entries[i];
arch/x86/coco/sev/core.c
332
e = data->entries;
arch/x86/coco/sev/core.c
337
while (vaddr < vaddr_end && i < ARRAY_SIZE(data->entries)) {
arch/x86/coco/sev/svsm.c
137
e = &desc->entries[desc_entry];
arch/x86/events/amd/core.c
925
static int amd_pmu_v2_snapshot_branch_stack(struct perf_branch_entry *entries, unsigned int cnt)
arch/x86/events/amd/core.c
942
memcpy(entries, cpuc->lbr_entries, sizeof(struct perf_branch_entry) * cnt);
arch/x86/events/core.c
2905
desc = &ldt->entries[idx];
arch/x86/events/intel/core.c
2558
__intel_pmu_snapshot_branch_stack(struct perf_branch_entry *entries,
arch/x86/events/intel/core.c
2566
memcpy(entries, cpuc->lbr_entries, sizeof(struct perf_branch_entry) * cnt);
arch/x86/events/intel/core.c
2573
intel_pmu_snapshot_branch_stack(struct perf_branch_entry *entries, unsigned int cnt)
arch/x86/events/intel/core.c
2582
return __intel_pmu_snapshot_branch_stack(entries, cnt, flags);
arch/x86/events/intel/core.c
2586
intel_pmu_snapshot_arch_branch_stack(struct perf_branch_entry *entries, unsigned int cnt)
arch/x86/events/intel/core.c
2595
return __intel_pmu_snapshot_branch_stack(entries, cnt, flags);
arch/x86/events/intel/lbr.c
1009
intel_pmu_store_lbr(cpuc, xsave->lbr.entries);
arch/x86/events/intel/lbr.c
393
struct lbr_entry *entries = task_ctx->entries;
arch/x86/events/intel/lbr.c
397
if (!entries[x86_pmu.lbr_nr - 1].from)
arch/x86/events/intel/lbr.c
401
if (!entries[i].from)
arch/x86/events/intel/lbr.c
403
wrlbr_all(&entries[i], i, true);
arch/x86/events/intel/lbr.c
485
struct lbr_entry *entries = task_ctx->entries;
arch/x86/events/intel/lbr.c
489
if (!rdlbr_all(&entries[i], i, true))
arch/x86/events/intel/lbr.c
495
entries[x86_pmu.lbr_nr - 1].from = 0;
arch/x86/events/intel/lbr.c
902
struct lbr_entry *entries)
arch/x86/events/intel/lbr.c
910
lbr = entries ? &entries[i] : NULL;
arch/x86/events/perf_event.h
1065
struct lbr_entry entries[];
arch/x86/include/asm/cpuid/types.h
111
short entries;
arch/x86/include/asm/desc.h
126
static inline void paravirt_alloc_ldt(struct desc_struct *ldt, unsigned entries)
arch/x86/include/asm/desc.h
130
static inline void paravirt_free_ldt(struct desc_struct *ldt, unsigned entries)
arch/x86/include/asm/desc.h
192
static inline void native_set_ldt(const void *addr, unsigned int entries)
arch/x86/include/asm/desc.h
194
if (likely(entries == 0))
arch/x86/include/asm/desc.h
201
entries * LDT_ENTRY_SIZE - 1);
arch/x86/include/asm/e820/types.h
87
struct e820_entry entries[E820_MAX_ENTRIES];
arch/x86/include/asm/fpu/types.h
298
struct lbr_entry entries[];
arch/x86/include/asm/io_apic.h
36
entries : 8,
arch/x86/include/asm/kvm_host.h
691
DECLARE_KFIFO(entries, u64, KVM_HV_TLB_FLUSH_FIFO_SIZE);
arch/x86/include/asm/mmu_context.h
37
struct desc_struct *entries;
arch/x86/include/asm/paravirt.h
211
static inline void paravirt_alloc_ldt(struct desc_struct *ldt, unsigned entries)
arch/x86/include/asm/paravirt.h
213
PVOP_VCALL2(pv_ops, cpu.alloc_ldt, ldt, entries);
arch/x86/include/asm/paravirt.h
216
static inline void paravirt_free_ldt(struct desc_struct *ldt, unsigned entries)
arch/x86/include/asm/paravirt.h
218
PVOP_VCALL2(pv_ops, cpu.free_ldt, ldt, entries);
arch/x86/include/asm/paravirt.h
233
static inline void set_ldt(const void *addr, unsigned entries)
arch/x86/include/asm/paravirt.h
235
PVOP_VCALL2(pv_ops, cpu.set_ldt, addr, entries);
arch/x86/include/asm/paravirt_types.h
48
void (*set_ldt)(const void *desc, unsigned entries);
arch/x86/include/asm/paravirt_types.h
58
void (*alloc_ldt)(struct desc_struct *ldt, unsigned entries);
arch/x86/include/asm/paravirt_types.h
59
void (*free_ldt)(struct desc_struct *ldt, unsigned entries);
arch/x86/include/asm/sev-common.h
176
struct psc_entry entries[VMGEXIT_PSC_MAX_ENTRY];
arch/x86/include/asm/xen/hypercall.h
280
HYPERVISOR_set_gdt(unsigned long *frame_list, int entries)
arch/x86/include/asm/xen/hypercall.h
282
return _hypercall2(int, set_gdt, frame_list, entries);
arch/x86/include/uapi/asm/kvm.h
200
struct kvm_msr_entry entries[];
arch/x86/include/uapi/asm/kvm.h
248
struct kvm_cpuid_entry entries[];
arch/x86/include/uapi/asm/kvm.h
270
struct kvm_cpuid_entry2 entries[];
arch/x86/kernel/apic/io_apic.c
1196
apic_dbg("....... : max redirection entries: %02X\n", reg_01.bits.entries);
arch/x86/kernel/apic/io_apic.c
1222
io_apic_print_entries(ioapic_idx, reg_01.bits.entries);
arch/x86/kernel/apic/io_apic.c
2355
return reg_01.bits.entries + 1;
arch/x86/kernel/apic/io_apic.c
2678
int idx, ioapic, entries;
arch/x86/kernel/apic/io_apic.c
2717
entries = io_apic_get_redir_entries(idx);
arch/x86/kernel/apic/io_apic.c
2718
gsi_end = gsi_base + entries - 1;
arch/x86/kernel/apic/io_apic.c
2757
ioapics[idx].nr_registers = entries;
arch/x86/kernel/cpu/cpuid_0x2_table.c
18
.entries = (_entries), \
arch/x86/kernel/cpu/intel.c
651
short entries = desc->entries;
arch/x86/kernel/cpu/intel.c
655
tlb_lli_4k = max(tlb_lli_4k, entries);
arch/x86/kernel/cpu/intel.c
656
tlb_lld_4k = max(tlb_lld_4k, entries);
arch/x86/kernel/cpu/intel.c
659
tlb_lli_4k = max(tlb_lli_4k, entries);
arch/x86/kernel/cpu/intel.c
660
tlb_lld_4k = max(tlb_lld_4k, entries);
arch/x86/kernel/cpu/intel.c
661
tlb_lli_2m = max(tlb_lli_2m, entries);
arch/x86/kernel/cpu/intel.c
662
tlb_lld_2m = max(tlb_lld_2m, entries);
arch/x86/kernel/cpu/intel.c
663
tlb_lli_4m = max(tlb_lli_4m, entries);
arch/x86/kernel/cpu/intel.c
664
tlb_lld_4m = max(tlb_lld_4m, entries);
arch/x86/kernel/cpu/intel.c
667
tlb_lli_4k = max(tlb_lli_4k, entries);
arch/x86/kernel/cpu/intel.c
668
tlb_lli_2m = max(tlb_lli_2m, entries);
arch/x86/kernel/cpu/intel.c
669
tlb_lli_4m = max(tlb_lli_4m, entries);
arch/x86/kernel/cpu/intel.c
672
tlb_lli_4k = max(tlb_lli_4k, entries);
arch/x86/kernel/cpu/intel.c
675
tlb_lli_4m = max(tlb_lli_4m, entries);
arch/x86/kernel/cpu/intel.c
678
tlb_lli_2m = max(tlb_lli_2m, entries);
arch/x86/kernel/cpu/intel.c
679
tlb_lli_4m = max(tlb_lli_4m, entries);
arch/x86/kernel/cpu/intel.c
683
tlb_lld_4k = max(tlb_lld_4k, entries);
arch/x86/kernel/cpu/intel.c
687
tlb_lld_4m = max(tlb_lld_4m, entries);
arch/x86/kernel/cpu/intel.c
691
tlb_lld_2m = max(tlb_lld_2m, entries);
arch/x86/kernel/cpu/intel.c
692
tlb_lld_4m = max(tlb_lld_4m, entries);
arch/x86/kernel/cpu/intel.c
695
tlb_lld_4k = max(tlb_lld_4k, entries);
arch/x86/kernel/cpu/intel.c
696
tlb_lld_4m = max(tlb_lld_4m, entries);
arch/x86/kernel/cpu/intel.c
703
tlb_lld_1g = max(tlb_lld_1g, entries);
arch/x86/kernel/e820.c
1120
struct e820_entry *entry = e820_table->entries + idx;
arch/x86/kernel/e820.c
1147
struct e820_entry *entry = e820_table_kexec->entries + idx;
arch/x86/kernel/e820.c
117
struct e820_entry *entry = &e820_table->entries[idx];
arch/x86/kernel/e820.c
1209
struct e820_entry *entry = &e820_table->entries[idx];
arch/x86/kernel/e820.c
1331
struct e820_entry *entry = &e820_table->entries[idx];
arch/x86/kernel/e820.c
170
if (idx >= ARRAY_SIZE(table->entries)) {
arch/x86/kernel/e820.c
176
entry_new = table->entries + idx;
arch/x86/kernel/e820.c
211
struct e820_entry *entry = e820_table->entries + idx;
arch/x86/kernel/e820.c
346
struct e820_entry *entries = table->entries;
arch/x86/kernel/e820.c
347
u32 max_nr_entries = ARRAY_SIZE(table->entries);
arch/x86/kernel/e820.c
361
if (entries[idx].addr + entries[idx].size < entries[idx].addr)
arch/x86/kernel/e820.c
375
if (entries[idx].size != 0) {
arch/x86/kernel/e820.c
376
change_point[chg_idx]->addr = entries[idx].addr;
arch/x86/kernel/e820.c
377
change_point[chg_idx++]->entry = &entries[idx];
arch/x86/kernel/e820.c
378
change_point[chg_idx]->addr = entries[idx].addr + entries[idx].size;
arch/x86/kernel/e820.c
379
change_point[chg_idx++]->entry = &entries[idx];
arch/x86/kernel/e820.c
438
memcpy(entries, new_entries, new_nr_entries*sizeof(*entries));
arch/x86/kernel/e820.c
449
__init static int append_e820_table(struct boot_e820_entry *entries, u32 nr_entries)
arch/x86/kernel/e820.c
451
struct boot_e820_entry *entry = entries;
arch/x86/kernel/e820.c
491
struct e820_entry *entry = &table->entries[idx];
arch/x86/kernel/e820.c
566
struct e820_entry *entry = &e820_table->entries[idx];
arch/x86/kernel/e820.c
635
entry = e820_table->entries + idx;
arch/x86/kernel/e820.c
729
size = offsetof(struct e820_table, entries) + sizeof(struct e820_entry)*e820_table->nr_entries;
arch/x86/kernel/e820.c
734
size = offsetof(struct e820_table, entries) + sizeof(struct e820_entry)*e820_table_kexec->nr_entries;
arch/x86/kernel/e820.c
739
size = offsetof(struct e820_table, entries) + sizeof(struct e820_entry)*e820_table_firmware->nr_entries;
arch/x86/kernel/e820.c
753
int entries;
arch/x86/kernel/e820.c
758
entries = sdata->len / sizeof(*extmap);
arch/x86/kernel/e820.c
761
append_e820_table(extmap, entries);
arch/x86/kernel/e820.c
786
struct e820_entry *entry = &e820_table->entries[idx];
arch/x86/kernel/e820.c
810
struct e820_entry *entry = &e820_table->entries[idx];
arch/x86/kernel/e820.c
82
struct e820_entry *entry = &table->entries[idx];
arch/x86/kernel/e820.c
863
struct e820_entry *entry = &e820_table->entries[idx];
arch/x86/kernel/kexec-bzimage64.c
117
memcpy(¶ms->e820_table, &e820_table_kexec->entries, nr_e820_entries*sizeof(struct e820_entry));
arch/x86/kernel/kvm.c
989
struct e820_entry *entry = &e820_table->entries[i];
arch/x86/kernel/ldt.c
171
new_ldt->entries = __vmalloc(alloc_size, GFP_KERNEL_ACCOUNT | __GFP_ZERO);
arch/x86/kernel/ldt.c
173
new_ldt->entries = (void *)get_zeroed_page(GFP_KERNEL_ACCOUNT);
arch/x86/kernel/ldt.c
175
if (!new_ldt->entries) {
arch/x86/kernel/ldt.c
307
is_vmalloc = is_vmalloc_addr(ldt->entries);
arch/x86/kernel/ldt.c
313
const void *src = (char *)ldt->entries + offset;
arch/x86/kernel/ldt.c
418
paravirt_alloc_ldt(ldt->entries, ldt->nr_entries);
arch/x86/kernel/ldt.c
439
paravirt_free_ldt(ldt->entries, ldt->nr_entries);
arch/x86/kernel/ldt.c
441
vfree_atomic(ldt->entries);
arch/x86/kernel/ldt.c
443
free_page((unsigned long)ldt->entries);
arch/x86/kernel/ldt.c
469
memcpy(new_ldt->entries, old_mm->context.ldt->entries,
arch/x86/kernel/ldt.c
522
if (copy_to_user(ptr, mm->context.ldt->entries, entries_size)) {
arch/x86/kernel/ldt.c
632
memcpy(new_ldt->entries, old_ldt->entries, old_nr_entries * LDT_ENTRY_SIZE);
arch/x86/kernel/ldt.c
634
new_ldt->entries[ldt_info.entry_number] = ldt;
arch/x86/kernel/ldt.c
83
set_ldt(ldt->entries, ldt->nr_entries);
arch/x86/kernel/process_64.c
445
base = get_desc_base(ldt->entries + idx);
arch/x86/kernel/resource.c
39
entry = &e820_table->entries[i];
arch/x86/kernel/step.c
43
desc = &child->mm->context.ldt->entries[seg];
arch/x86/kernel/tboot.c
203
if (e820_table->entries[i].type != E820_TYPE_RAM)
arch/x86/kernel/tboot.c
206
add_mac_region(e820_table->entries[i].addr, e820_table->entries[i].size);
arch/x86/kvm/cpuid.c
106
e = &entries[i];
arch/x86/kvm/cpuid.c
1314
struct kvm_cpuid_entry2 *entries;
arch/x86/kvm/cpuid.c
1324
return &array->entries[array->nent++];
arch/x86/kvm/cpuid.c
1412
array->nent += cpuid_func_emulated(&array->entries[array->nent], func, false);
arch/x86/kvm/cpuid.c
1939
limit = array->entries[array->nent - 1].eax;
arch/x86/kvm/cpuid.c
1949
static bool sanity_check_entries(struct kvm_cpuid_entry2 __user *entries,
arch/x86/kvm/cpuid.c
1967
if (copy_from_user(pad, entries[i].padding, sizeof(pad)))
arch/x86/kvm/cpuid.c
1977
struct kvm_cpuid_entry2 __user *entries,
arch/x86/kvm/cpuid.c
1994
if (sanity_check_entries(entries, cpuid->nent, type))
arch/x86/kvm/cpuid.c
1997
array.entries = kvzalloc_objs(struct kvm_cpuid_entry2, cpuid->nent);
arch/x86/kvm/cpuid.c
1998
if (!array.entries)
arch/x86/kvm/cpuid.c
2010
if (copy_to_user(entries, array.entries,
arch/x86/kvm/cpuid.c
2015
kvfree(array.entries);
arch/x86/kvm/cpuid.c
591
struct kvm_cpuid_entry __user *entries)
arch/x86/kvm/cpuid.c
601
e = vmemdup_array_user(entries, cpuid->nent, sizeof(*e));
arch/x86/kvm/cpuid.c
636
struct kvm_cpuid_entry2 __user *entries)
arch/x86/kvm/cpuid.c
645
e2 = vmemdup_array_user(entries, cpuid->nent, sizeof(*e2));
arch/x86/kvm/cpuid.c
659
struct kvm_cpuid_entry2 __user *entries)
arch/x86/kvm/cpuid.c
667
if (copy_to_user(entries, vcpu->arch.cpuid_entries,
arch/x86/kvm/cpuid.c
88
struct kvm_cpuid_entry2 *entries, int nent, u32 function, u64 index)
arch/x86/kvm/cpuid.h
22
struct kvm_cpuid_entry2 *kvm_find_cpuid_entry2(struct kvm_cpuid_entry2 *entries,
arch/x86/kvm/cpuid.h
51
struct kvm_cpuid_entry2 __user *entries,
arch/x86/kvm/cpuid.h
55
struct kvm_cpuid_entry __user *entries);
arch/x86/kvm/cpuid.h
58
struct kvm_cpuid_entry2 __user *entries);
arch/x86/kvm/cpuid.h
61
struct kvm_cpuid_entry2 __user *entries);
arch/x86/kvm/hyperv.c
1928
static int kvm_hv_get_tlb_flush_entries(struct kvm *kvm, struct kvm_hv_hcall *hc, u64 entries[])
arch/x86/kvm/hyperv.c
1930
return kvm_hv_get_hc_data(kvm, hc, hc->rep_cnt, hc->rep_cnt, entries);
arch/x86/kvm/hyperv.c
1935
u64 *entries, int count)
arch/x86/kvm/hyperv.c
1950
if (count && entries && count < kfifo_avail(&tlb_flush_fifo->entries)) {
arch/x86/kvm/hyperv.c
1951
WARN_ON(kfifo_in(&tlb_flush_fifo->entries, entries, count) != count);
arch/x86/kvm/hyperv.c
1959
kfifo_in(&tlb_flush_fifo->entries, &flush_all_entry, 1);
arch/x86/kvm/hyperv.c
1969
u64 entries[KVM_HV_TLB_FLUSH_FIFO_SIZE];
arch/x86/kvm/hyperv.c
1978
count = kfifo_out(&tlb_flush_fifo->entries, entries, KVM_HV_TLB_FLUSH_FIFO_SIZE);
arch/x86/kvm/hyperv.c
1981
if (entries[i] == KVM_HV_TLB_FLUSHALL_ENTRY)
arch/x86/kvm/hyperv.c
1988
gva = entries[i] & PAGE_MASK;
arch/x86/kvm/hyperv.c
1989
for (j = 0; j < (entries[i] & ~PAGE_MASK) + 1; j++) {
arch/x86/kvm/hyperv.c
2001
kfifo_reset_out(&tlb_flush_fifo->entries);
arch/x86/kvm/hyperv.c
2770
struct kvm_cpuid_entry2 __user *entries)
arch/x86/kvm/hyperv.c
2921
if (copy_to_user(entries, cpuid_entries,
arch/x86/kvm/hyperv.c
987
INIT_KFIFO(hv_vcpu->tlb_flush_fifo[i].entries);
arch/x86/kvm/hyperv.h
195
struct kvm_cpuid_entry2 __user *entries);
arch/x86/kvm/hyperv.h
216
kfifo_reset_out(&tlb_flush_fifo->entries);
arch/x86/kvm/svm/sev.c
3770
struct psc_entry entries[];
arch/x86/kvm/svm/sev.c
3793
struct psc_entry *entries = psc->entries;
arch/x86/kvm/svm/sev.c
3804
struct psc_entry *entry = &entries[idx];
arch/x86/kvm/svm/sev.c
3830
struct psc_entry *entries = psc->entries;
arch/x86/kvm/svm/sev.c
3866
entry_start = entries[idx];
arch/x86/kvm/svm/sev.c
3910
struct psc_entry entry = entries[idx];
arch/x86/kvm/vmx/tdx.c
210
td_init_cpuid_entry2(&caps->cpuid.entries[i], i);
arch/x86/kvm/vmx/tdx.c
2221
caps = kzalloc_flex(*caps, cpuid.entries, td_conf->num_cpuid_config);
arch/x86/kvm/vmx/tdx.c
2229
if (copy_to_user(user_caps, caps, struct_size(caps, cpuid.entries,
arch/x86/kvm/vmx/tdx.c
2257
entry = kvm_find_cpuid_entry2(cpuid->entries, cpuid->nent, 0x80000008, 0);
arch/x86/kvm/vmx/tdx.c
2298
entry = kvm_find_cpuid_entry2(cpuid->entries, cpuid->nent,
arch/x86/kvm/vmx/tdx.c
2731
struct_size(user_data, cpuid.entries, nr_user_entries));
arch/x86/kvm/vmx/tdx.c
3015
sizeof(output->entries[0]) * KVM_MAX_CPUID_ENTRIES,
arch/x86/kvm/vmx/tdx.c
3026
if (tdx_vcpu_get_cpuid_leaf(vcpu, 0, &i, &td_cpuid->entries[i])) {
arch/x86/kvm/vmx/tdx.c
3030
level = td_cpuid->entries[0].eax;
arch/x86/kvm/vmx/tdx.c
3033
tdx_vcpu_get_cpuid_leaf(vcpu, leaf, &i, &td_cpuid->entries[i]);
arch/x86/kvm/vmx/tdx.c
3036
if (tdx_vcpu_get_cpuid_leaf(vcpu, 0x80000000, &i, &td_cpuid->entries[i])) {
arch/x86/kvm/vmx/tdx.c
3040
level = td_cpuid->entries[i - 1].eax;
arch/x86/kvm/vmx/tdx.c
3043
tdx_vcpu_get_cpuid_leaf(vcpu, leaf, &i, &td_cpuid->entries[i]);
arch/x86/kvm/vmx/tdx.c
3057
if (copy_to_user(output->entries, td_cpuid->entries,
arch/x86/kvm/x86.c
4664
struct kvm_msr_entry *entries,
arch/x86/kvm/x86.c
4678
if (!fpu_loaded && is_xstate_managed_msr(vcpu, entries[i].index)) {
arch/x86/kvm/x86.c
4682
if (do_msr(vcpu, entries[i].index, &entries[i].data))
arch/x86/kvm/x86.c
4702
struct kvm_msr_entry *entries;
arch/x86/kvm/x86.c
4715
entries = memdup_user(user_msrs->entries, size);
arch/x86/kvm/x86.c
4716
if (IS_ERR(entries)) {
arch/x86/kvm/x86.c
4717
r = PTR_ERR(entries);
arch/x86/kvm/x86.c
4721
r = __msr_io(vcpu, &msrs, entries, do_msr);
arch/x86/kvm/x86.c
4723
if (writeback && copy_to_user(user_msrs->entries, entries, size))
arch/x86/kvm/x86.c
4726
kfree(entries);
arch/x86/kvm/x86.c
4766
r = kvm_get_hv_cpuid(vcpu, &cpuid, cpuid_arg->entries);
arch/x86/kvm/x86.c
5084
r = kvm_dev_ioctl_get_cpuid(&cpuid, cpuid_arg->entries,
arch/x86/kvm/x86.c
6261
r = kvm_vcpu_ioctl_set_cpuid(vcpu, &cpuid, cpuid_arg->entries);
arch/x86/kvm/x86.c
6272
cpuid_arg->entries);
arch/x86/kvm/x86.c
6283
cpuid_arg->entries);
arch/x86/lib/insn-eval.c
644
*out = ldt->entries[sel];
arch/x86/math-emu/fpu_system.h
32
ret = current->mm->context.ldt->entries[seg];
arch/x86/pci/irq.c
1347
int entries = (rt->size - sizeof(struct irq_routing_table)) /
arch/x86/pci/irq.c
1352
for (info = rt->slots; entries--; info++)
arch/x86/pci/mmconfig-shared.c
622
int entries;
arch/x86/pci/mmconfig-shared.c
631
entries = 0;
arch/x86/pci/mmconfig-shared.c
634
entries++;
arch/x86/pci/mmconfig-shared.c
637
if (entries == 0) {
arch/x86/pci/mmconfig-shared.c
643
for (i = 0; i < entries; i++) {
arch/x86/power/hibernate.c
75
int size = offsetof(struct e820_table, entries) +
arch/x86/xen/enlighten_pv.c
504
static void xen_alloc_ldt(struct desc_struct *ldt, unsigned entries)
arch/x86/xen/enlighten_pv.c
520
for (i = 0; i < entries; i += entries_per_page)
arch/x86/xen/enlighten_pv.c
524
static void xen_free_ldt(struct desc_struct *ldt, unsigned entries)
arch/x86/xen/enlighten_pv.c
529
for (i = 0; i < entries; i += entries_per_page)
arch/x86/xen/enlighten_pv.c
533
static void xen_set_ldt(const void *addr, unsigned entries)
arch/x86/xen/enlighten_pv.c
538
trace_xen_cpu_set_ldt(addr, entries);
arch/x86/xen/enlighten_pv.c
543
op->arg2.nr_ents = entries;
arch/x86/xen/multicalls.c
121
unsigned int opidx = mcdb->entries[idx].op & 0xff;
arch/x86/xen/multicalls.c
125
mcdb->entries[idx].op, b->entries[idx].result,
arch/x86/xen/multicalls.c
132
pr_cont("%lx ", mcdb->entries[idx].args[arg]);
arch/x86/xen/multicalls.c
161
memcpy(mcdb->entries, b->entries,
arch/x86/xen/multicalls.c
174
mc = &b->entries[0];
arch/x86/xen/multicalls.c
183
if (HYPERVISOR_multicall(b->entries, b->mcidx) != 0)
arch/x86/xen/multicalls.c
186
if (b->entries[i].result < 0)
arch/x86/xen/multicalls.c
196
} else if (b->entries[i].result < 0) {
arch/x86/xen/multicalls.c
199
b->entries[i].op,
arch/x86/xen/multicalls.c
200
b->entries[i].args[0],
arch/x86/xen/multicalls.c
201
b->entries[i].result);
arch/x86/xen/multicalls.c
238
ret.mc = &b->entries[b->mcidx];
arch/x86/xen/multicalls.c
263
b->entries[b->mcidx - 1].op != op)) {
arch/x86/xen/multicalls.c
273
ret.mc = &b->entries[b->mcidx - 1];
arch/x86/xen/multicalls.c
40
struct multicall_entry entries[MC_BATCH];
arch/x86/xen/multicalls.c
49
struct multicall_entry entries[MC_BATCH];
arch/x86/xen/setup.c
162
const struct e820_entry *entry = xen_e820_table.entries;
arch/x86/xen/setup.c
414
const struct e820_entry *entry = xen_e820_table.entries;
arch/x86/xen/setup.c
561
struct e820_entry *entry = xen_e820_table.entries;
arch/x86/xen/setup.c
580
entry = xen_e820_table.entries;
arch/x86/xen/setup.c
605
struct e820_entry *entry = xen_e820_table.entries;
arch/x86/xen/setup.c
641
entry = xen_e820_table.entries;
arch/x86/xen/setup.c
651
entry = xen_e820_table.entries +
arch/x86/xen/setup.c
698
entry = xen_e820_table.entries;
arch/x86/xen/setup.c
806
memmap.nr_entries = ARRAY_SIZE(xen_e820_table.entries);
arch/x86/xen/setup.c
807
set_xen_guest_handle(memmap.buffer, xen_e820_table.entries);
arch/x86/xen/setup.c
820
xen_e820_table.entries[0].addr = 0ULL;
arch/x86/xen/setup.c
821
xen_e820_table.entries[0].size = mem_end;
arch/x86/xen/setup.c
823
xen_e820_table.entries[0].size += 8ULL << 20;
arch/x86/xen/setup.c
824
xen_e820_table.entries[0].type = E820_TYPE_RAM;
arch/x86/xen/setup.c
844
xen_e820_table.entries[xen_e820_table.nr_entries].addr = IBFT_START;
arch/x86/xen/setup.c
845
xen_e820_table.entries[xen_e820_table.nr_entries].size = IBFT_END - IBFT_START;
arch/x86/xen/setup.c
846
xen_e820_table.entries[xen_e820_table.nr_entries].type = E820_TYPE_RESERVED;
arch/x86/xen/setup.c
894
addr = xen_e820_table.entries[0].addr;
arch/x86/xen/setup.c
895
size = xen_e820_table.entries[0].size;
arch/x86/xen/setup.c
900
type = xen_e820_table.entries[i].type;
arch/x86/xen/setup.c
927
addr = xen_e820_table.entries[i].addr;
arch/x86/xen/setup.c
928
size = xen_e820_table.entries[i].size;
arch/xtensa/kernel/stacktrace.c
220
trace->entries[trace->nr_entries++] = frame->pc;
drivers/accel/amdxdna/aie2_ctx.c
380
u32 width = 1, entries = 0;
drivers/accel/amdxdna/aie2_ctx.c
418
entries = (last - first) / width + 1;
drivers/accel/amdxdna/aie2_ctx.c
422
if (unlikely(!entries)) {
drivers/accel/amdxdna/aie2_ctx.c
428
hwctx->col_list = kmalloc_array(entries, sizeof(*hwctx->col_list), GFP_KERNEL);
drivers/accel/amdxdna/aie2_ctx.c
432
hwctx->col_list_len = entries;
drivers/accel/amdxdna/aie2_ctx.c
434
for (i = 1; i < entries; i++)
drivers/accel/amdxdna/aie2_ctx.c
438
entries * sizeof(*hwctx->col_list), false);
drivers/acpi/acpi_extlog.c
325
l1_percpu_entry = l1_head->entries;
drivers/acpi/acpi_extlog.c
44
u32 entries; /* Valid L1 Directory entries per logical processor */
drivers/acpi/acpi_watchdog.c
104
const struct acpi_wdat_entry *entries;
drivers/acpi/acpi_watchdog.c
128
entries = (struct acpi_wdat_entry *)(wdat + 1);
drivers/acpi/acpi_watchdog.c
129
for (i = 0; i < wdat->entries; i++) {
drivers/acpi/acpi_watchdog.c
135
gas = &entries[i].register_region;
drivers/acpi/acpi_watchdog.c
30
const struct acpi_wdat_entry *entries;
drivers/acpi/acpi_watchdog.c
33
entries = (struct acpi_wdat_entry *)(wdat + 1);
drivers/acpi/acpi_watchdog.c
34
for (i = 0; i < wdat->entries; i++) {
drivers/acpi/acpi_watchdog.c
37
gas = &entries[i].register_region;
drivers/acpi/apei/apei-base.c
163
for (i = 0; i < ctx->entries; i++) {
drivers/acpi/apei/apei-base.c
205
for (i = 0; i < ctx->entries; i++) {
drivers/acpi/apei/apei-base.c
249
ctx_unmap.entries = end;
drivers/acpi/apei/apei-base.c
48
u32 entries)
drivers/acpi/apei/apei-base.c
53
ctx->entries = entries;
drivers/acpi/apei/apei-internal.h
34
u32 entries;
drivers/acpi/apei/apei-internal.h
41
u32 entries);
drivers/acpi/apei/einj-core.c
191
EINJ_TAB_ENTRY(einj_tab), einj_tab->entries);
drivers/acpi/apei/einj-core.c
312
for (i = 0; i < einj_tab->entries; i++) {
drivers/acpi/apei/einj-core.c
940
if (einj_tab->entries !=
drivers/acpi/apei/erst.c
1008
if (erst_tab->entries !=
drivers/acpi/apei/erst.c
391
ERST_TAB_ENTRY(erst_tab), erst_tab->entries);
drivers/acpi/apei/erst.c
456
u64 *entries;
drivers/acpi/apei/erst.c
505
u64 *entries;
drivers/acpi/apei/erst.c
526
entries = erst_record_id_cache.entries;
drivers/acpi/apei/erst.c
528
if (entries[i] == id)
drivers/acpi/apei/erst.c
546
new_entries = kvmalloc_array(new_size, sizeof(entries[0]),
drivers/acpi/apei/erst.c
550
memcpy(new_entries, entries,
drivers/acpi/apei/erst.c
551
erst_record_id_cache.len * sizeof(entries[0]));
drivers/acpi/apei/erst.c
552
kvfree(entries);
drivers/acpi/apei/erst.c
553
erst_record_id_cache.entries = entries = new_entries;
drivers/acpi/apei/erst.c
556
entries[i] = id;
drivers/acpi/apei/erst.c
570
u64 *entries;
drivers/acpi/apei/erst.c
580
entries = erst_record_id_cache.entries;
drivers/acpi/apei/erst.c
582
if (entries[*pos] != APEI_ERST_INVALID_RECORD_ID)
drivers/acpi/apei/erst.c
586
*record_id = entries[*pos];
drivers/acpi/apei/erst.c
597
*record_id = erst_record_id_cache.entries[*pos];
drivers/acpi/apei/erst.c
615
u64 *entries;
drivers/acpi/apei/erst.c
620
entries = erst_record_id_cache.entries;
drivers/acpi/apei/erst.c
622
if (entries[i] == APEI_ERST_INVALID_RECORD_ID)
drivers/acpi/apei/erst.c
625
entries[wpos] = entries[i];
drivers/acpi/apei/erst.c
895
u64 *entries;
drivers/acpi/apei/erst.c
899
entries = erst_record_id_cache.entries;
drivers/acpi/apei/erst.c
901
if (entries[i] == record_id)
drivers/acpi/apei/erst.c
902
entries[i] = APEI_ERST_INVALID_RECORD_ID;
drivers/acpi/apei/erst.c
964
u64 *entries;
drivers/acpi/apei/erst.c
980
entries = erst_record_id_cache.entries;
drivers/acpi/apei/erst.c
982
if (entries[i] == record_id)
drivers/acpi/apei/erst.c
983
entries[i] = APEI_ERST_INVALID_RECORD_ID;
drivers/acpi/numa/hmat.c
433
u16 *entries;
drivers/acpi/numa/hmat.c
446
total_size = sizeof(*hmat_loc) + sizeof(*entries) * ipds * tpds +
drivers/acpi/numa/hmat.c
460
entries = (u16 *)(targs + tpds);
drivers/acpi/numa/hmat.c
464
value = hmat_normalize(entries[init * tpds + targ],
drivers/acpi/numa/hmat.c
651
u16 *entries;
drivers/acpi/numa/hmat.c
657
entries = (u16 *)(targs + tpds);
drivers/acpi/numa/hmat.c
678
return hmat_normalize(entries[idx * tpds + tdx],
drivers/acpi/processor_idle.c
1039
struct acpi_lpi_state *p, *t = curr_level->entries;
drivers/acpi/processor_idle.c
1075
kfree(curr_level->entries);
drivers/acpi/processor_idle.c
865
struct acpi_lpi_state *entries;
drivers/acpi/processor_idle.c
920
info->entries = lpi_state;
drivers/atm/idt77252.c
708
int entries;
drivers/atm/idt77252.c
713
entries = atomic_read(&scq->used);
drivers/atm/idt77252.c
714
if (entries > (SCQ_ENTRIES - 1)) {
drivers/atm/iphase.c
463
int entries;
drivers/atm/iphase.c
480
entries = rate / dev->Granularity;
drivers/atm/iphase.c
482
entries, rate, dev->Granularity);)
drivers/atm/iphase.c
483
if (entries < 1)
drivers/atm/iphase.c
485
rateLow = entries * dev->Granularity;
drivers/atm/iphase.c
486
rateHigh = (entries + 1) * dev->Granularity;
drivers/atm/iphase.c
488
entries++;
drivers/atm/iphase.c
489
if (entries > dev->CbrRemEntries) {
drivers/atm/iphase.c
492
entries, dev->CbrRemEntries);)
drivers/atm/iphase.c
497
ia_vcc->NumCbrEntry = entries;
drivers/atm/iphase.c
498
dev->sum_mcr += entries * dev->Granularity;
drivers/atm/iphase.c
503
spacing = dev->CbrTotEntries / entries;
drivers/atm/iphase.c
504
sp_mod = dev->CbrTotEntries % entries; // get modulo
drivers/atm/iphase.c
505
toBeAssigned = entries;
drivers/atm/iphase.c
513
if (toBeAssigned == entries)
drivers/atm/iphase.c
522
fracSlot = ((sp_mod + sp_mod2) / entries); // get new integer part
drivers/atm/iphase.c
523
sp_mod2 = ((sp_mod + sp_mod2) % entries); // calc new fractional part
drivers/base/test/property-entry-test.c
12
static const struct property_entry entries[] = {
drivers/base/test/property-entry-test.c
123
static const struct property_entry entries[] = {
drivers/base/test/property-entry-test.c
138
node = fwnode_create_software_node(entries, NULL);
drivers/base/test/property-entry-test.c
259
static const struct property_entry entries[] = {
drivers/base/test/property-entry-test.c
27
node = fwnode_create_software_node(entries, NULL);
drivers/base/test/property-entry-test.c
271
node = fwnode_create_software_node(entries, NULL);
drivers/base/test/property-entry-test.c
326
static const struct property_entry entries[] = {
drivers/base/test/property-entry-test.c
333
node = fwnode_create_software_node(entries, NULL);
drivers/base/test/property-entry-test.c
347
static const struct property_entry entries[] = {
drivers/base/test/property-entry-test.c
356
copy = property_entries_dup(entries);
drivers/base/test/property-entry-test.c
378
static struct property_entry entries[] = {
drivers/base/test/property-entry-test.c
388
copy = property_entries_dup(entries);
drivers/base/test/property-entry-test.c
417
const struct property_entry entries[] = {
drivers/base/test/property-entry-test.c
431
node = fwnode_create_software_node(entries, NULL);
drivers/bluetooth/btintel_pcie.c
220
struct msix_entry *entries = entry - queue;
drivers/bluetooth/btintel_pcie.c
222
return container_of(entries, struct btintel_pcie_data, msix_entries[0]);
drivers/bus/mhi/host/boot.c
321
for (i = 0; i < image_info->entries; i++, mhi_buf++)
drivers/bus/mhi/host/boot.c
33
for (i = 0; i < img_info->entries - 1; i++, mhi_buf++, bhi_vec++) {
drivers/bus/mhi/host/boot.c
355
img_info->entries = 1;
drivers/bus/mhi/host/boot.c
405
img_info->entries = segments;
drivers/bus/mhi/host/boot.c
467
ret = mhi_fw_load_bhi(mhi_cntrl, &image->mhi_buf[image->entries - 1]);
drivers/bus/mhi/host/boot.c
484
ret = mhi_fw_load_bhie(mhi_cntrl, &image->mhi_buf[image->entries - 1]);
drivers/bus/mhi/host/boot.c
645
&image_info->mhi_buf[image_info->entries - 1]);
drivers/char/xillybus/xillybus.h
35
int entries;
drivers/char/xillybus/xillybus_core.c
1934
idt_handle.entries);
drivers/char/xillybus/xillybus_core.c
429
int entries)
drivers/char/xillybus/xillybus_core.c
507
for (entry = 0; entry < entries; entry++, chandesc += 4) {
drivers/char/xillybus/xillybus_core.c
626
idt_handle->entries = len >> 2;
drivers/clk/tegra/clk-tegra124-dfll-fcpu.c
140
.entries = {
drivers/clk/tegra/clk-tegra124-dfll-fcpu.c
192
.entries = { \
drivers/clk/tegra/clk-tegra124-dfll-fcpu.c
219
.entries = { \
drivers/clk/tegra/clk-tegra124-dfll-fcpu.c
242
.entries = { \
drivers/clk/tegra/clk-tegra124-dfll-fcpu.c
265
.entries = { \
drivers/clk/tegra/clk-tegra124-dfll-fcpu.c
287
.entries = { \
drivers/clk/tegra/clk-tegra124-dfll-fcpu.c
308
.entries = { \
drivers/clk/tegra/clk-tegra124-dfll-fcpu.c
44
.entries = { \
drivers/clk/tegra/clk-tegra124-dfll-fcpu.c
79
.entries = {
drivers/clk/tegra/cvb.c
135
const struct cvb_table_freq_entry *entry = &table->entries[i];
drivers/clk/tegra/cvb.c
65
const struct cvb_table_freq_entry *entry = &table->entries[i];
drivers/clk/tegra/cvb.h
47
struct cvb_table_freq_entry entries[MAX_DVFS_FREQS];
drivers/crypto/nx/nx-common-pseries.c
187
struct nx842_slentry *entries; /* ptr to array of slentries */
drivers/crypto/nx/nx-common-pseries.c
205
entry = sl->entries;
drivers/crypto/nx/nx-common-pseries.c
341
slin.entries = (struct nx842_slentry *)workmem->slin;
drivers/crypto/nx/nx-common-pseries.c
342
slout.entries = (struct nx842_slentry *)workmem->slout;
drivers/crypto/nx/nx-common-pseries.c
358
op.in = nx842_get_pa(slin.entries);
drivers/crypto/nx/nx-common-pseries.c
370
op.out = nx842_get_pa(slout.entries);
drivers/crypto/nx/nx-common-pseries.c
473
slin.entries = (struct nx842_slentry *)workmem->slin;
drivers/crypto/nx/nx-common-pseries.c
474
slout.entries = (struct nx842_slentry *)workmem->slout;
drivers/crypto/nx/nx-common-pseries.c
490
op.in = nx842_get_pa(slin.entries);
drivers/crypto/nx/nx-common-pseries.c
502
op.out = nx842_get_pa(slout.entries);
drivers/cxl/acpi.c
231
int entries, int *qos_class)
drivers/cxl/acpi.c
251
if (!entries)
drivers/cxl/acpi.c
287
count = min(entries, pkg_entries);
drivers/cxl/acpi.c
312
struct access_coordinate *coord, int entries,
drivers/cxl/acpi.c
325
return cxl_acpi_evaluate_qtg_dsm(handle, coord, entries, qos_class);
drivers/cxl/core/cdat.c
18
int entries;
drivers/cxl/core/cdat.c
223
dent->entries = 1;
drivers/cxl/core/cdat.c
439
struct acpi_cdat_sslbe entries[];
drivers/cxl/core/cdat.c
445
int remain, entries, i;
drivers/cxl/core/cdat.c
450
if (!remain || remain % sizeof(tbl->entries[0]) ||
drivers/cxl/core/cdat.c
461
entries = remain / sizeof(tbl->entries[0]);
drivers/cxl/core/cdat.c
462
if (struct_size(tbl, entries, entries) != len)
drivers/cxl/core/cdat.c
465
for (i = 0; i < entries; i++) {
drivers/cxl/core/cdat.c
466
u16 x = le16_to_cpu((__force __le16)tbl->entries[i].portx_id);
drivers/cxl/core/cdat.c
467
u16 y = le16_to_cpu((__force __le16)tbl->entries[i].porty_id);
drivers/cxl/core/cdat.c
496
le_val = (__force __le16)tbl->entries[i].latency_or_bandwidth;
drivers/cxl/core/features.c
110
entry = entries->ent;
drivers/cxl/core/features.c
177
entries->num_features = count;
drivers/cxl/core/features.c
178
entries->num_user_features = user_feats;
drivers/cxl/core/features.c
180
return no_free_ptr(entries);
drivers/cxl/core/features.c
189
kvfree(cxlfs->entries);
drivers/cxl/core/features.c
213
cxlfs->entries = get_supported_features(cxlfs);
drivers/cxl/core/features.c
214
if (!cxlfs->entries)
drivers/cxl/core/features.c
374
if (!cxlfs || !cxlfs->entries)
drivers/cxl/core/features.c
377
for (int i = 0; i < cxlfs->entries->num_features; i++) {
drivers/cxl/core/features.c
378
feat = &cxlfs->entries->ent[i];
drivers/cxl/core/features.c
411
if (start >= cxlfs->entries->num_features)
drivers/cxl/core/features.c
414
requested = min_t(int, requested, cxlfs->entries->num_features - start);
drivers/cxl/core/features.c
428
i < cxlfs->entries->num_features; i++, pos++) {
drivers/cxl/core/features.c
432
memcpy(pos, &cxlfs->entries->ent[i], sizeof(*pos));
drivers/cxl/core/features.c
448
feat_out->supported_feats = cpu_to_le16(cxlfs->entries->num_features);
drivers/cxl/core/features.c
689
if (!cxlfs->entries->num_user_features)
drivers/cxl/core/features.c
96
struct cxl_feat_entries *entries __free(kvfree) =
drivers/cxl/core/features.c
97
kvmalloc_flex(*entries, ent, count);
drivers/cxl/core/features.c
98
if (!entries)
drivers/cxl/core/mbox.c
857
for (i = 0; i < le16_to_cpu(gsl->entries); i++) {
drivers/cxl/cxl.h
667
struct access_coordinate *coord, int entries,
drivers/cxl/cxlmem.h
583
__le16 entries;
drivers/dma/dw-edma/dw-edma-v0-debugfs.c
103
struct dw_edma_debugfs_entry *entries;
drivers/dma/dw-edma/dw-edma-v0-debugfs.c
106
entries = devm_kcalloc(dw->chip->dev, nr_entries, sizeof(*entries),
drivers/dma/dw-edma/dw-edma-v0-debugfs.c
108
if (!entries)
drivers/dma/dw-edma/dw-edma-v0-debugfs.c
112
entries[i] = ini[i];
drivers/dma/dw-edma/dw-edma-v0-debugfs.c
114
debugfs_create_file_unsafe(entries[i].name, 0444, dent,
drivers/dma/dw-edma/dw-edma-v0-debugfs.c
115
&entries[i], &fops_x32);
drivers/dma/dw-edma/dw-hdma-v0-debugfs.c
63
struct dw_hdma_debugfs_entry *entries;
drivers/dma/dw-edma/dw-hdma-v0-debugfs.c
66
entries = devm_kcalloc(dw->chip->dev, nr_entries, sizeof(*entries),
drivers/dma/dw-edma/dw-hdma-v0-debugfs.c
68
if (!entries)
drivers/dma/dw-edma/dw-hdma-v0-debugfs.c
72
entries[i] = ini[i];
drivers/dma/dw-edma/dw-hdma-v0-debugfs.c
74
debugfs_create_file_unsafe(entries[i].name, 0444, dent,
drivers/dma/dw-edma/dw-hdma-v0-debugfs.c
75
&entries[i], &fops_x32);
drivers/firewire/core-device.c
588
} *entry, entries[] = {
drivers/firewire/core-device.c
603
for (i = 0; i < ARRAY_SIZE(entries); ++i) {
drivers/firewire/core-device.c
606
entry = entries + i;
drivers/firmware/efi/esrt.c
333
struct efi_system_resource_entry_v1 *v1_entries = (void *)esrt->entries;
drivers/firmware/efi/esrt.c
54
u8 entries[];
drivers/fwctl/pds/main.c
202
ep_entry = (struct pds_fwctl_query_data_endpoint *)pdsfc->endpoints->entries;
drivers/fwctl/pds/main.c
214
struct pds_fwctl_query_data_operation *entries;
drivers/fwctl/pds/main.c
252
entries = (struct pds_fwctl_query_data_operation *)data->entries;
drivers/fwctl/pds/main.c
258
switch (entries[i].scope) {
drivers/fwctl/pds/main.c
262
entries[i].scope = FWCTL_RPC_CONFIGURATION;
drivers/fwctl/pds/main.c
265
entries[i].scope = FWCTL_RPC_DEBUG_READ_ONLY;
drivers/fwctl/pds/main.c
268
entries[i].scope = FWCTL_RPC_DEBUG_WRITE;
drivers/fwctl/pds/main.c
271
entries[i].scope = FWCTL_RPC_DEBUG_WRITE_FULL;
drivers/fwctl/pds/main.c
275
ep, le32_to_cpu(entries[i].id), entries[i].scope);
drivers/fwctl/pds/main.c
335
op_entry = (struct pds_fwctl_query_data_operation *)ep_info->operations->entries;
drivers/gpu/drm/amd/amdgpu/amdgpu.h
747
struct amdgpu_uma_carveout_option entries[MAX_UMA_OPTION_ENTRIES];
drivers/gpu/drm/amd/amdgpu/amdgpu_atombios.c
1411
voltage_table->entries[i].value =
drivers/gpu/drm/amd/amdgpu/amdgpu_atombios.c
1413
voltage_table->entries[i].smio_low =
drivers/gpu/drm/amd/amdgpu/amdgpu_atombios.h
129
struct atom_voltage_table_entry entries[MAX_VOLTAGE_ENTRIES];
drivers/gpu/drm/amd/amdgpu/amdgpu_atomfirmware.c
325
uma_info->entries[i].memory_carved_mb = 512;
drivers/gpu/drm/amd/amdgpu/amdgpu_atomfirmware.c
327
uma_info->entries[i].memory_carved_mb = (uint32_t)opts[i].memoryCarvedGb << 10;
drivers/gpu/drm/amd/amdgpu/amdgpu_atomfirmware.c
329
uma_info->entries[i].flags = opts[i].uma_carveout_option_flags.all8;
drivers/gpu/drm/amd/amdgpu/amdgpu_atomfirmware.c
330
strscpy(uma_info->entries[i].name, opts[i].optionName, MAX_UMA_OPTION_NAME);
drivers/gpu/drm/amd/amdgpu/amdgpu_bo_list.c
80
list = kvzalloc_flex(*list, entries, num_entries);
drivers/gpu/drm/amd/amdgpu/amdgpu_bo_list.c
87
array = list->entries;
drivers/gpu/drm/amd/amdgpu/amdgpu_bo_list.h
58
struct amdgpu_bo_list_entry entries[] __counted_by(num_entries);
drivers/gpu/drm/amd/amdgpu/amdgpu_bo_list.h
74
for (e = list->entries; \
drivers/gpu/drm/amd/amdgpu/amdgpu_bo_list.h
75
e != &list->entries[list->num_entries]; \
drivers/gpu/drm/amd/amdgpu/amdgpu_bo_list.h
79
for (e = &list->entries[list->first_userptr]; \
drivers/gpu/drm/amd/amdgpu/amdgpu_bo_list.h
80
e != &list->entries[list->num_entries]; \
drivers/gpu/drm/amd/amdgpu/amdgpu_device.c
417
memory_carved = uma_info->entries[i].memory_carved_mb;
drivers/gpu/drm/amd/amdgpu/amdgpu_device.c
421
uma_info->entries[i].name,
drivers/gpu/drm/amd/amdgpu/amdgpu_device.c
426
uma_info->entries[i].name,
drivers/gpu/drm/amd/amdgpu/amdgpu_device.c
476
opt = &uma_info->entries[val];
drivers/gpu/drm/amd/amdgpu/amdgpu_gem.c
1218
if (copy_to_user(u64_to_user_ptr(args->entries), bo_entries, num_bos * sizeof(*bo_entries)))
drivers/gpu/drm/amd/amdgpu/amdgpu_kms.c
1094
vce_clk_table.entries[i].sclk = vce_state->sclk;
drivers/gpu/drm/amd/amdgpu/amdgpu_kms.c
1095
vce_clk_table.entries[i].mclk = vce_state->mclk;
drivers/gpu/drm/amd/amdgpu/amdgpu_kms.c
1096
vce_clk_table.entries[i].eclk = vce_state->evclk;
drivers/gpu/drm/amd/amdgpu/amdgpu_object.h
140
struct amdgpu_vm_bo_base entries[];
drivers/gpu/drm/amd/amdgpu/amdgpu_vm_pt.c
193
cursor->entry = &to_amdgpu_bo_vm(cursor->entry->bo)->entries[idx];
drivers/gpu/drm/amd/amdgpu/amdgpu_vm_pt.c
223
if (cursor->entry == &parent->entries[num_entries - 1])
drivers/gpu/drm/amd/amdgpu/amdgpu_vm_pt.c
368
unsigned int entries;
drivers/gpu/drm/amd/amdgpu/amdgpu_vm_pt.c
383
entries = amdgpu_bo_size(bo) / 8;
drivers/gpu/drm/amd/amdgpu/amdgpu_vm_pt.c
420
r = vm->update_funcs->update(¶ms, vmbo, addr, 0, entries,
drivers/gpu/drm/amd/amdgpu/amdgpu_vm_pt.c
467
bp.bo_ptr_size = struct_size((*vmbo), entries, num_entries);
drivers/gpu/drm/amd/amdgpu/amdgpu_vm_pt.c
648
pde = (entry - to_amdgpu_bo_vm(parent->bo)->entries) * 8;
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
8334
struct soc15_reg_rlcg *entries, int arr_size)
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
8339
if (!entries)
drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c
8345
entry = &entries[i];
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5190
struct soc15_reg_rlcg *entries, int arr_size)
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5195
if (!entries)
drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c
5201
entry = &entries[i];
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
1699
struct soc15_reg_rlcg *entries, int arr_size)
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
1704
if (!entries)
drivers/gpu/drm/amd/amdgpu/gfx_v9_4_3.c
1710
entry = &entries[i];
drivers/gpu/drm/amd/amdgpu/umc_v12_0.c
665
struct ras_ecc_err *entries[MAX_ECC_NUM_PER_RETIREMENT];
drivers/gpu/drm/amd/amdgpu/umc_v12_0.c
672
new_detected = radix_tree_gang_lookup_tag(ecc_tree, (void **)entries,
drivers/gpu/drm/amd/amdgpu/umc_v12_0.c
673
0, ARRAY_SIZE(entries), UMC_ECC_NEW_DETECTED_TAG);
drivers/gpu/drm/amd/amdgpu/umc_v12_0.c
675
if (!entries[i])
drivers/gpu/drm/amd/amdgpu/umc_v12_0.c
678
ret = umc_v12_0_fill_error_record(adev, entries[i], ras_error_status);
drivers/gpu/drm/amd/amdgpu/umc_v12_0.c
684
entries[i]->pa_pfn, UMC_ECC_NEW_DETECTED_TAG);
drivers/gpu/drm/amd/amdkfd/kfd_crat.c
1884
uint32_t entries = 0;
drivers/gpu/drm/amd/amdkfd/kfd_crat.c
1950
&entries,
drivers/gpu/drm/amd/amdkfd/kfd_crat.c
1955
if (entries) {
drivers/gpu/drm/amd/amdkfd/kfd_crat.c
1956
crat_table->length += (sub_type_hdr->length * entries);
drivers/gpu/drm/amd/amdkfd/kfd_crat.c
1957
crat_table->total_entries += entries;
drivers/gpu/drm/amd/amdkfd/kfd_crat.c
1960
sub_type_hdr->length * entries);
drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm_color.c
500
gamma->entries.red[i] = dc_fixpt_from_int(r);
drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm_color.c
501
gamma->entries.green[i] = dc_fixpt_from_int(g);
drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm_color.c
502
gamma->entries.blue[i] = dc_fixpt_from_int(b);
drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm_color.c
513
gamma->entries.red[i] = dc_fixpt_from_fraction(r, MAX_DRM_LUT_VALUE);
drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm_color.c
514
gamma->entries.green[i] = dc_fixpt_from_fraction(g, MAX_DRM_LUT_VALUE);
drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm_color.c
515
gamma->entries.blue[i] = dc_fixpt_from_fraction(b, MAX_DRM_LUT_VALUE);
drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm_color.c
531
gamma->entries.red[i] = dc_fixpt_from_fraction(lut[i].red, MAX_DRM_LUT32_VALUE);
drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm_color.c
532
gamma->entries.green[i] = dc_fixpt_from_fraction(lut[i].green, MAX_DRM_LUT32_VALUE);
drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm_color.c
533
gamma->entries.blue[i] = dc_fixpt_from_fraction(lut[i].blue, MAX_DRM_LUT32_VALUE);
drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm_debugfs.c
913
struct dmub_debugfs_trace_entry *entries;
drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm_debugfs.c
944
entries = (struct dmub_debugfs_trace_entry
drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm_debugfs.c
952
struct dmub_debugfs_trace_entry *entry = &entries[i];
drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm_debugfs.c
960
struct dmub_debugfs_trace_entry *entry = &entries[i];
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn21/rn_clk_mgr.c
462
if (!bw_params->wm_table.entries[i].valid)
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn21/rn_clk_mgr.c
465
ranges->reader_wm_sets[num_valid_sets].wm_inst = bw_params->wm_table.entries[i].wm_inst;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn21/rn_clk_mgr.c
466
ranges->reader_wm_sets[num_valid_sets].wm_type = bw_params->wm_table.entries[i].wm_type;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn21/rn_clk_mgr.c
477
ranges->reader_wm_sets[num_valid_sets].min_drain_clk_mhz = bw_params->clk_table.entries[i - 1].dcfclk_mhz + 1;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn21/rn_clk_mgr.c
479
ranges->reader_wm_sets[num_valid_sets].max_drain_clk_mhz = bw_params->clk_table.entries[i].dcfclk_mhz;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn21/rn_clk_mgr.c
578
.entries = {
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn21/rn_clk_mgr.c
667
bw_params->clk_table.entries[i].fclk_mhz = clock_table->FClocks[j].Freq;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn21/rn_clk_mgr.c
668
bw_params->clk_table.entries[i].memclk_mhz = clock_table->MemClocks[j].Freq;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn21/rn_clk_mgr.c
669
bw_params->clk_table.entries[i].voltage = clock_table->FClocks[j].Vol;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn21/rn_clk_mgr.c
670
bw_params->clk_table.entries[i].dcfclk_mhz = find_dcfclk_for_voltage(clock_table, clock_table->FClocks[j].Vol);
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn21/rn_clk_mgr.c
671
bw_params->clk_table.entries[i].socclk_mhz = find_socclk_for_voltage(clock_table,
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn21/rn_clk_mgr.c
672
bw_params->clk_table.entries[i].voltage);
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn21/rn_clk_mgr.c
679
bw_params->wm_table.entries[i].wm_inst = i;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn21/rn_clk_mgr.c
682
bw_params->wm_table.entries[i].valid = false;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn21/rn_clk_mgr.c
686
bw_params->wm_table.entries[i].wm_type = WM_TYPE_PSTATE_CHG;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn21/rn_clk_mgr.c
687
bw_params->wm_table.entries[i].valid = true;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn30/dcn30_clk_mgr.c
134
&clk_mgr_base->bw_params->clk_table.entries[0].dcfclk_mhz,
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn30/dcn30_clk_mgr.c
140
&clk_mgr_base->bw_params->clk_table.entries[0].dtbclk_mhz,
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn30/dcn30_clk_mgr.c
145
&clk_mgr_base->bw_params->clk_table.entries[0].socclk_mhz,
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn30/dcn30_clk_mgr.c
151
&clk_mgr_base->bw_params->clk_table.entries[0].dispclk_mhz,
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn30/dcn30_clk_mgr.c
156
&clk_mgr_base->bw_params->clk_table.entries[0].dppclk_mhz,
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn30/dcn30_clk_mgr.c
161
&clk_mgr_base->bw_params->clk_table.entries[0].phyclk_mhz,
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn30/dcn30_clk_mgr.c
271
clk_mgr_base->bw_params->clk_table.entries[clk_mgr_base->bw_params->clk_table.num_entries - 1].memclk_mhz);
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn30/dcn30_clk_mgr.c
369
clk_mgr_base->bw_params->clk_table.entries[clk_mgr_base->bw_params->clk_table.num_entries - 1].memclk_mhz);
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn30/dcn30_clk_mgr.c
372
clk_mgr_base->bw_params->clk_table.entries[0].memclk_mhz);
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn30/dcn30_clk_mgr.c
385
clk_mgr_base->bw_params->clk_table.entries[clk_mgr_base->bw_params->clk_table.num_entries - 1].memclk_mhz);
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn30/dcn30_clk_mgr.c
417
&clk_mgr_base->bw_params->clk_table.entries[0].memclk_mhz,
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn30/dcn30_clk_mgr.c
469
unsigned int i, max_phyclk_req = clk_mgr_base->bw_params->clk_table.entries[0].phyclk_mhz * 1000;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn30/dcn30_clk_mgr.c
98
entry_i += sizeof(clk_mgr->base.bw_params->clk_table.entries[0]);
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn301/vg_clk_mgr.c
394
if (!bw_params->wm_table.entries[i].valid)
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn301/vg_clk_mgr.c
397
table->WatermarkRow[WM_DCFCLK][num_valid_sets].WmSetting = bw_params->wm_table.entries[i].wm_inst;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn301/vg_clk_mgr.c
398
table->WatermarkRow[WM_DCFCLK][num_valid_sets].WmType = bw_params->wm_table.entries[i].wm_type;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn301/vg_clk_mgr.c
409
bw_params->clk_table.entries[i - 1].dcfclk_mhz + 1;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn301/vg_clk_mgr.c
412
bw_params->clk_table.entries[i].dcfclk_mhz;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn301/vg_clk_mgr.c
494
.entries = {
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn301/vg_clk_mgr.c
600
bw_params->clk_table.entries[i].fclk_mhz = clock_table->DfPstateTable[j].fclk;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn301/vg_clk_mgr.c
601
bw_params->clk_table.entries[i].memclk_mhz = clock_table->DfPstateTable[j].memclk;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn301/vg_clk_mgr.c
602
bw_params->clk_table.entries[i].voltage = clock_table->DfPstateTable[j].voltage;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn301/vg_clk_mgr.c
603
bw_params->clk_table.entries[i].dcfclk_mhz = find_dcfclk_for_voltage(clock_table, clock_table->DfPstateTable[j].voltage);
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn301/vg_clk_mgr.c
606
bw_params->clk_table.entries[i].dispclk_mhz = max_dispclk;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn301/vg_clk_mgr.c
607
bw_params->clk_table.entries[i].dppclk_mhz = max_dppclk;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn301/vg_clk_mgr.c
609
bw_params->clk_table.entries[i].fclk_mhz = clock_table->DfPstateTable[j].fclk;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn301/vg_clk_mgr.c
610
bw_params->clk_table.entries[i].memclk_mhz = clock_table->DfPstateTable[j].memclk;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn301/vg_clk_mgr.c
611
bw_params->clk_table.entries[i].voltage = clock_table->DfPstateTable[j].voltage;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn301/vg_clk_mgr.c
612
bw_params->clk_table.entries[i].dcfclk_mhz = find_max_clk_value(clock_table->DcfClocks, VG_NUM_DCFCLK_DPM_LEVELS);
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn301/vg_clk_mgr.c
613
bw_params->clk_table.entries[i].dispclk_mhz = find_max_clk_value(clock_table->DispClocks, VG_NUM_DISPCLK_DPM_LEVELS);
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn301/vg_clk_mgr.c
614
bw_params->clk_table.entries[i].dppclk_mhz = find_max_clk_value(clock_table->DppClocks, VG_NUM_DPPCLK_DPM_LEVELS);
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn301/vg_clk_mgr.c
620
bw_params->wm_table.entries[i].wm_inst = i;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn301/vg_clk_mgr.c
623
bw_params->wm_table.entries[i].valid = false;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn301/vg_clk_mgr.c
627
bw_params->wm_table.entries[i].wm_type = WM_TYPE_PSTATE_CHG;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn301/vg_clk_mgr.c
628
bw_params->wm_table.entries[i].valid = true;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn31/dcn31_clk_mgr.c
345
.entries = {
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn31/dcn31_clk_mgr.c
382
.entries = {
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn31/dcn31_clk_mgr.c
430
if (!bw_params->wm_table.entries[i].valid)
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn31/dcn31_clk_mgr.c
433
table->WatermarkRow[WM_DCFCLK][num_valid_sets].WmSetting = bw_params->wm_table.entries[i].wm_inst;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn31/dcn31_clk_mgr.c
434
table->WatermarkRow[WM_DCFCLK][num_valid_sets].WmType = bw_params->wm_table.entries[i].wm_type;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn31/dcn31_clk_mgr.c
445
bw_params->clk_table.entries[i - 1].dcfclk_mhz + 1;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn31/dcn31_clk_mgr.c
448
bw_params->clk_table.entries[i].dcfclk_mhz;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn31/dcn31_clk_mgr.c
596
bw_params->clk_table.entries[i].fclk_mhz = clock_table->DfPstateTable[j].FClk;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn31/dcn31_clk_mgr.c
597
bw_params->clk_table.entries[i].memclk_mhz = clock_table->DfPstateTable[j].MemClk;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn31/dcn31_clk_mgr.c
598
bw_params->clk_table.entries[i].voltage = clock_table->DfPstateTable[j].Voltage;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn31/dcn31_clk_mgr.c
601
bw_params->clk_table.entries[i].wck_ratio = 2;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn31/dcn31_clk_mgr.c
604
bw_params->clk_table.entries[i].wck_ratio = 4;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn31/dcn31_clk_mgr.c
607
bw_params->clk_table.entries[i].wck_ratio = 1;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn31/dcn31_clk_mgr.c
609
bw_params->clk_table.entries[i].dcfclk_mhz = find_clk_for_voltage(clock_table, clock_table->DcfClocks, clock_table->DfPstateTable[j].Voltage);
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn31/dcn31_clk_mgr.c
610
bw_params->clk_table.entries[i].socclk_mhz = find_clk_for_voltage(clock_table, clock_table->SocClocks, clock_table->DfPstateTable[j].Voltage);
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn31/dcn31_clk_mgr.c
611
bw_params->clk_table.entries[i].dispclk_mhz = max_dispclk;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn31/dcn31_clk_mgr.c
612
bw_params->clk_table.entries[i].dppclk_mhz = max_dppclk;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn31/dcn31_clk_mgr.c
621
bw_params->wm_table.entries[i].wm_inst = i;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn31/dcn31_clk_mgr.c
624
bw_params->wm_table.entries[i].valid = false;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn31/dcn31_clk_mgr.c
628
bw_params->wm_table.entries[i].wm_type = WM_TYPE_PSTATE_CHG;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn31/dcn31_clk_mgr.c
629
bw_params->wm_table.entries[i].valid = true;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn314/dcn314_clk_mgr.c
411
.entries = {
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn314/dcn314_clk_mgr.c
448
.entries = {
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn314/dcn314_clk_mgr.c
501
if (!bw_params->wm_table.entries[i].valid)
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn314/dcn314_clk_mgr.c
504
table->WatermarkRow[WM_DCFCLK][num_valid_sets].WmSetting = bw_params->wm_table.entries[i].wm_inst;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn314/dcn314_clk_mgr.c
505
table->WatermarkRow[WM_DCFCLK][num_valid_sets].WmType = bw_params->wm_table.entries[i].wm_type;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn314/dcn314_clk_mgr.c
516
bw_params->clk_table.entries[i - 1].dcfclk_mhz + 1;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn314/dcn314_clk_mgr.c
519
bw_params->clk_table.entries[i].dcfclk_mhz;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn314/dcn314_clk_mgr.c
629
struct clk_limit_table_entry def_max = bw_params->clk_table.entries[bw_params->clk_table.num_entries - 1];
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn314/dcn314_clk_mgr.c
671
if (bw_params->clk_table.entries[j].dcfclk_mhz <= clock_table->DcfClocks[i])
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn314/dcn314_clk_mgr.c
674
bw_params->clk_table.entries[i].phyclk_mhz = bw_params->clk_table.entries[j].phyclk_mhz;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn314/dcn314_clk_mgr.c
675
bw_params->clk_table.entries[i].phyclk_d18_mhz = bw_params->clk_table.entries[j].phyclk_d18_mhz;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn314/dcn314_clk_mgr.c
676
bw_params->clk_table.entries[i].dtbclk_mhz = bw_params->clk_table.entries[j].dtbclk_mhz;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn314/dcn314_clk_mgr.c
679
bw_params->clk_table.entries[i].fclk_mhz = min_fclk;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn314/dcn314_clk_mgr.c
680
bw_params->clk_table.entries[i].memclk_mhz = clock_table->DfPstateTable[min_pstate].MemClk;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn314/dcn314_clk_mgr.c
681
bw_params->clk_table.entries[i].voltage = clock_table->DfPstateTable[min_pstate].Voltage;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn314/dcn314_clk_mgr.c
682
bw_params->clk_table.entries[i].dcfclk_mhz = clock_table->DcfClocks[i];
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn314/dcn314_clk_mgr.c
683
bw_params->clk_table.entries[i].socclk_mhz = clock_table->SocClocks[i];
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn314/dcn314_clk_mgr.c
684
bw_params->clk_table.entries[i].dispclk_mhz = max_dispclk;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn314/dcn314_clk_mgr.c
685
bw_params->clk_table.entries[i].dppclk_mhz = max_dppclk;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn314/dcn314_clk_mgr.c
686
bw_params->clk_table.entries[i].wck_ratio = convert_wck_ratio(
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn314/dcn314_clk_mgr.c
695
bw_params->clk_table.entries[i].fclk_mhz = max_fclk;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn314/dcn314_clk_mgr.c
696
bw_params->clk_table.entries[i].memclk_mhz = clock_table->DfPstateTable[max_pstate].MemClk;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn314/dcn314_clk_mgr.c
697
bw_params->clk_table.entries[i].voltage = clock_table->DfPstateTable[max_pstate].Voltage;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn314/dcn314_clk_mgr.c
698
bw_params->clk_table.entries[i].dcfclk_mhz = find_max_clk_value(clock_table->DcfClocks, NUM_DCFCLK_DPM_LEVELS);
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn314/dcn314_clk_mgr.c
699
bw_params->clk_table.entries[i].socclk_mhz = find_max_clk_value(clock_table->SocClocks, NUM_SOCCLK_DPM_LEVELS);
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn314/dcn314_clk_mgr.c
700
bw_params->clk_table.entries[i].dispclk_mhz = max_dispclk;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn314/dcn314_clk_mgr.c
701
bw_params->clk_table.entries[i].dppclk_mhz = max_dppclk;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn314/dcn314_clk_mgr.c
702
bw_params->clk_table.entries[i].wck_ratio = convert_wck_ratio(
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn314/dcn314_clk_mgr.c
709
bw_params->clk_table.entries[i].socclk_mhz = find_max_clk_value(clock_table->SocClocks, NUM_SOCCLK_DPM_LEVELS);
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn314/dcn314_clk_mgr.c
710
bw_params->clk_table.entries[i].dispclk_mhz = find_max_clk_value(clock_table->DispClocks, NUM_DISPCLK_DPM_LEVELS);
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn314/dcn314_clk_mgr.c
711
bw_params->clk_table.entries[i].dppclk_mhz = find_max_clk_value(clock_table->DppClocks, NUM_DPPCLK_DPM_LEVELS);
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn314/dcn314_clk_mgr.c
713
bw_params->clk_table.entries[i].phyclk_mhz = def_max.phyclk_mhz;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn314/dcn314_clk_mgr.c
714
bw_params->clk_table.entries[i].phyclk_d18_mhz = def_max.phyclk_d18_mhz;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn314/dcn314_clk_mgr.c
715
bw_params->clk_table.entries[i].dtbclk_mhz = def_max.dtbclk_mhz;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn314/dcn314_clk_mgr.c
722
if (!bw_params->clk_table.entries[i].fclk_mhz) {
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn314/dcn314_clk_mgr.c
723
bw_params->clk_table.entries[i].fclk_mhz = def_max.fclk_mhz;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn314/dcn314_clk_mgr.c
724
bw_params->clk_table.entries[i].memclk_mhz = def_max.memclk_mhz;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn314/dcn314_clk_mgr.c
725
bw_params->clk_table.entries[i].voltage = def_max.voltage;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn314/dcn314_clk_mgr.c
727
if (!bw_params->clk_table.entries[i].dcfclk_mhz)
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn314/dcn314_clk_mgr.c
728
bw_params->clk_table.entries[i].dcfclk_mhz = def_max.dcfclk_mhz;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn314/dcn314_clk_mgr.c
729
if (!bw_params->clk_table.entries[i].socclk_mhz)
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn314/dcn314_clk_mgr.c
730
bw_params->clk_table.entries[i].socclk_mhz = def_max.socclk_mhz;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn314/dcn314_clk_mgr.c
731
if (!bw_params->clk_table.entries[i].dispclk_mhz)
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn314/dcn314_clk_mgr.c
732
bw_params->clk_table.entries[i].dispclk_mhz = def_max.dispclk_mhz;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn314/dcn314_clk_mgr.c
733
if (!bw_params->clk_table.entries[i].dppclk_mhz)
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn314/dcn314_clk_mgr.c
734
bw_params->clk_table.entries[i].dppclk_mhz = def_max.dppclk_mhz;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn314/dcn314_clk_mgr.c
735
if (!bw_params->clk_table.entries[i].phyclk_mhz)
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn314/dcn314_clk_mgr.c
736
bw_params->clk_table.entries[i].phyclk_mhz = def_max.phyclk_mhz;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn314/dcn314_clk_mgr.c
737
if (!bw_params->clk_table.entries[i].phyclk_d18_mhz)
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn314/dcn314_clk_mgr.c
738
bw_params->clk_table.entries[i].phyclk_d18_mhz = def_max.phyclk_d18_mhz;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn314/dcn314_clk_mgr.c
739
if (!bw_params->clk_table.entries[i].dtbclk_mhz)
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn314/dcn314_clk_mgr.c
740
bw_params->clk_table.entries[i].dtbclk_mhz = def_max.dtbclk_mhz;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn314/dcn314_clk_mgr.c
742
ASSERT(bw_params->clk_table.entries[i-1].dcfclk_mhz);
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn314/dcn314_clk_mgr.c
749
bw_params->wm_table.entries[i].wm_inst = i;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn314/dcn314_clk_mgr.c
752
bw_params->wm_table.entries[i].valid = false;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn314/dcn314_clk_mgr.c
756
bw_params->wm_table.entries[i].wm_type = WM_TYPE_PSTATE_CHG;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn314/dcn314_clk_mgr.c
757
bw_params->wm_table.entries[i].valid = true;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn315/dcn315_clk_mgr.c
257
.entries = {
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn315/dcn315_clk_mgr.c
305
.entries = {
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn315/dcn315_clk_mgr.c
342
.entries = {
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn315/dcn315_clk_mgr.c
390
if (!bw_params->wm_table.entries[i].valid)
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn315/dcn315_clk_mgr.c
393
table->WatermarkRow[WM_DCFCLK][num_valid_sets].WmSetting = bw_params->wm_table.entries[i].wm_inst;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn315/dcn315_clk_mgr.c
394
table->WatermarkRow[WM_DCFCLK][num_valid_sets].WmType = bw_params->wm_table.entries[i].wm_type;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn315/dcn315_clk_mgr.c
405
bw_params->clk_table.entries[i - 1].dcfclk_mhz + 1;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn315/dcn315_clk_mgr.c
408
bw_params->clk_table.entries[i].dcfclk_mhz;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn315/dcn315_clk_mgr.c
488
struct clk_limit_table_entry def_max = bw_params->clk_table.entries[bw_params->clk_table.num_entries - 1];
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn315/dcn315_clk_mgr.c
505
if (bw_params->clk_table.entries[j].dcfclk_mhz <= clock_table->DcfClocks[i])
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn315/dcn315_clk_mgr.c
507
bw_params->clk_table.entries[i].phyclk_mhz = bw_params->clk_table.entries[j].phyclk_mhz;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn315/dcn315_clk_mgr.c
508
bw_params->clk_table.entries[i].phyclk_d18_mhz = bw_params->clk_table.entries[j].phyclk_d18_mhz;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn315/dcn315_clk_mgr.c
509
bw_params->clk_table.entries[i].dtbclk_mhz = bw_params->clk_table.entries[j].dtbclk_mhz;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn315/dcn315_clk_mgr.c
512
bw_params->clk_table.entries[i].fclk_mhz = clock_table->DfPstateTable[max_pstate].FClk;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn315/dcn315_clk_mgr.c
513
bw_params->clk_table.entries[i].memclk_mhz = clock_table->DfPstateTable[max_pstate].MemClk;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn315/dcn315_clk_mgr.c
514
bw_params->clk_table.entries[i].voltage = clock_table->SocVoltage[i];
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn315/dcn315_clk_mgr.c
515
bw_params->clk_table.entries[i].dcfclk_mhz = clock_table->DcfClocks[i];
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn315/dcn315_clk_mgr.c
516
bw_params->clk_table.entries[i].socclk_mhz = clock_table->SocClocks[i];
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn315/dcn315_clk_mgr.c
517
bw_params->clk_table.entries[i].dispclk_mhz = clock_table->DispClocks[i];
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn315/dcn315_clk_mgr.c
518
bw_params->clk_table.entries[i].dppclk_mhz = clock_table->DppClocks[i];
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn315/dcn315_clk_mgr.c
519
bw_params->clk_table.entries[i].wck_ratio = 1;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn315/dcn315_clk_mgr.c
524
bw_params->clk_table.entries[i].fclk_mhz = clock_table->DfPstateTable[0].FClk;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn315/dcn315_clk_mgr.c
525
bw_params->clk_table.entries[i].memclk_mhz = clock_table->DfPstateTable[0].MemClk;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn315/dcn315_clk_mgr.c
526
bw_params->clk_table.entries[i].voltage = clock_table->DfPstateTable[0].Voltage;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn315/dcn315_clk_mgr.c
527
bw_params->clk_table.entries[i].dcfclk_mhz = clock_table->DcfClocks[0];
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn315/dcn315_clk_mgr.c
528
bw_params->clk_table.entries[i].wck_ratio = 1;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn315/dcn315_clk_mgr.c
531
bw_params->clk_table.entries[i-1].voltage = clock_table->SocVoltage[clock_table->NumSocClkLevelsEnabled - 1];
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn315/dcn315_clk_mgr.c
532
bw_params->clk_table.entries[i-1].socclk_mhz = clock_table->SocClocks[clock_table->NumSocClkLevelsEnabled - 1];
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn315/dcn315_clk_mgr.c
533
bw_params->clk_table.entries[i-1].dispclk_mhz = clock_table->DispClocks[clock_table->NumDispClkLevelsEnabled - 1];
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn315/dcn315_clk_mgr.c
534
bw_params->clk_table.entries[i-1].dppclk_mhz = clock_table->DppClocks[clock_table->NumDispClkLevelsEnabled - 1];
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn315/dcn315_clk_mgr.c
542
if (!bw_params->clk_table.entries[i].fclk_mhz) {
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn315/dcn315_clk_mgr.c
543
bw_params->clk_table.entries[i].fclk_mhz = def_max.fclk_mhz;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn315/dcn315_clk_mgr.c
544
bw_params->clk_table.entries[i].memclk_mhz = def_max.memclk_mhz;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn315/dcn315_clk_mgr.c
545
bw_params->clk_table.entries[i].voltage = def_max.voltage;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn315/dcn315_clk_mgr.c
547
if (!bw_params->clk_table.entries[i].dcfclk_mhz)
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn315/dcn315_clk_mgr.c
548
bw_params->clk_table.entries[i].dcfclk_mhz = def_max.dcfclk_mhz;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn315/dcn315_clk_mgr.c
549
if (!bw_params->clk_table.entries[i].socclk_mhz)
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn315/dcn315_clk_mgr.c
550
bw_params->clk_table.entries[i].socclk_mhz = def_max.socclk_mhz;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn315/dcn315_clk_mgr.c
551
if (!bw_params->clk_table.entries[i].dispclk_mhz)
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn315/dcn315_clk_mgr.c
552
bw_params->clk_table.entries[i].dispclk_mhz = def_max.dispclk_mhz;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn315/dcn315_clk_mgr.c
553
if (!bw_params->clk_table.entries[i].dppclk_mhz)
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn315/dcn315_clk_mgr.c
554
bw_params->clk_table.entries[i].dppclk_mhz = def_max.dppclk_mhz;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn315/dcn315_clk_mgr.c
555
if (!bw_params->clk_table.entries[i].phyclk_mhz)
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn315/dcn315_clk_mgr.c
556
bw_params->clk_table.entries[i].phyclk_mhz = def_max.phyclk_mhz;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn315/dcn315_clk_mgr.c
557
if (!bw_params->clk_table.entries[i].phyclk_d18_mhz)
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn315/dcn315_clk_mgr.c
558
bw_params->clk_table.entries[i].phyclk_d18_mhz = def_max.phyclk_d18_mhz;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn315/dcn315_clk_mgr.c
559
if (!bw_params->clk_table.entries[i].dtbclk_mhz)
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn315/dcn315_clk_mgr.c
560
bw_params->clk_table.entries[i].dtbclk_mhz = def_max.dtbclk_mhz;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn315/dcn315_clk_mgr.c
564
ASSERT(bw_params->clk_table.entries[i-1].phyclk_mhz == def_max.phyclk_mhz);
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn315/dcn315_clk_mgr.c
565
ASSERT(bw_params->clk_table.entries[i-1].phyclk_d18_mhz == def_max.phyclk_d18_mhz);
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn315/dcn315_clk_mgr.c
566
ASSERT(bw_params->clk_table.entries[i-1].dtbclk_mhz == def_max.dtbclk_mhz);
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn315/dcn315_clk_mgr.c
567
ASSERT(bw_params->clk_table.entries[i-1].dcfclk_mhz);
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn315/dcn315_clk_mgr.c
573
bw_params->wm_table.entries[i].wm_inst = i;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn315/dcn315_clk_mgr.c
576
bw_params->wm_table.entries[i].valid = false;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn315/dcn315_clk_mgr.c
580
bw_params->wm_table.entries[i].wm_type = WM_TYPE_PSTATE_CHG;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn315/dcn315_clk_mgr.c
581
bw_params->wm_table.entries[i].valid = true;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn316/dcn316_clk_mgr.c
271
.entries = {
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn316/dcn316_clk_mgr.c
308
.entries = {
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn316/dcn316_clk_mgr.c
356
if (!bw_params->wm_table.entries[i].valid)
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn316/dcn316_clk_mgr.c
359
table->WatermarkRow[WM_DCFCLK][num_valid_sets].WmSetting = bw_params->wm_table.entries[i].wm_inst;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn316/dcn316_clk_mgr.c
360
table->WatermarkRow[WM_DCFCLK][num_valid_sets].WmType = bw_params->wm_table.entries[i].wm_type;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn316/dcn316_clk_mgr.c
371
bw_params->clk_table.entries[i - 1].dcfclk_mhz + 1;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn316/dcn316_clk_mgr.c
374
bw_params->clk_table.entries[i].dcfclk_mhz;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn316/dcn316_clk_mgr.c
525
bw_params->clk_table.entries[i].fclk_mhz = clock_table->DfPstateTable[j].FClk;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn316/dcn316_clk_mgr.c
526
bw_params->clk_table.entries[i].memclk_mhz = clock_table->DfPstateTable[j].MemClk;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn316/dcn316_clk_mgr.c
527
bw_params->clk_table.entries[i].voltage = clock_table->DfPstateTable[j].Voltage;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn316/dcn316_clk_mgr.c
530
bw_params->clk_table.entries[i].wck_ratio = 2;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn316/dcn316_clk_mgr.c
533
bw_params->clk_table.entries[i].wck_ratio = 4;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn316/dcn316_clk_mgr.c
536
bw_params->clk_table.entries[i].wck_ratio = 1;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn316/dcn316_clk_mgr.c
540
bw_params->clk_table.entries[i].dcfclk_mhz = temp;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn316/dcn316_clk_mgr.c
543
bw_params->clk_table.entries[i].socclk_mhz = temp;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn316/dcn316_clk_mgr.c
544
bw_params->clk_table.entries[i].dispclk_mhz = max_dispclk;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn316/dcn316_clk_mgr.c
545
bw_params->clk_table.entries[i].dppclk_mhz = max_dppclk;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn316/dcn316_clk_mgr.c
553
bw_params->wm_table.entries[i].wm_inst = i;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn316/dcn316_clk_mgr.c
556
bw_params->wm_table.entries[i].valid = false;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn316/dcn316_clk_mgr.c
560
bw_params->wm_table.entries[i].wm_type = WM_TYPE_PSTATE_CHG;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn316/dcn316_clk_mgr.c
561
bw_params->wm_table.entries[i].valid = true;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn32/dcn32_clk_mgr.c
1010
clk_mgr_base->bw_params->clk_table.entries[0].memclk_mhz);
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn32/dcn32_clk_mgr.c
1037
&clk_mgr_base->bw_params->clk_table.entries[0].memclk_mhz,
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn32/dcn32_clk_mgr.c
1046
&clk_mgr_base->bw_params->clk_table.entries[0].fclk_mhz,
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn32/dcn32_clk_mgr.c
1053
clk_mgr_base->bw_params->clk_table.entries[num_entries_per_clk->num_memclk_levels - 1].memclk_mhz;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn32/dcn32_clk_mgr.c
151
entry_i += sizeof(clk_mgr->base.bw_params->clk_table.entries[0]);
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn32/dcn32_clk_mgr.c
192
&clk_mgr_base->bw_params->clk_table.entries[0].dcfclk_mhz,
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn32/dcn32_clk_mgr.c
198
&clk_mgr_base->bw_params->clk_table.entries[0].socclk_mhz,
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn32/dcn32_clk_mgr.c
205
&clk_mgr_base->bw_params->clk_table.entries[0].dtbclk_mhz,
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn32/dcn32_clk_mgr.c
213
&clk_mgr_base->bw_params->clk_table.entries[0].dispclk_mhz,
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn32/dcn32_clk_mgr.c
223
&clk_mgr_base->bw_params->clk_table.entries[0].dppclk_mhz,
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn32/dcn32_clk_mgr.c
238
if (clk_mgr_base->bw_params->clk_table.entries[i].dispclk_mhz
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn32/dcn32_clk_mgr.c
240
clk_mgr_base->bw_params->clk_table.entries[i].dispclk_mhz
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn32/dcn32_clk_mgr.c
244
if (clk_mgr_base->bw_params->clk_table.entries[i].dispclk_mhz > 1950)
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn32/dcn32_clk_mgr.c
245
clk_mgr_base->bw_params->clk_table.entries[i].dispclk_mhz = 1950;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn32/dcn32_clk_mgr.c
249
if (clk_mgr_base->bw_params->clk_table.entries[i].dppclk_mhz
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn32/dcn32_clk_mgr.c
251
clk_mgr_base->bw_params->clk_table.entries[i].dppclk_mhz
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn32/dcn32_clk_mgr.c
256
if (clk_mgr_base->bw_params->clk_table.entries[i].dppclk_mhz > 1950)
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn32/dcn32_clk_mgr.c
257
clk_mgr_base->bw_params->clk_table.entries[i].dppclk_mhz = 1950;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn32/dcn32_clk_mgr.c
547
fclk_khz_override = clk_mgr->base.bw_params->clk_table.entries[num_fclk_levels].fclk_mhz * 1000;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn32/dcn32_clk_mgr.c
798
new_clocks->ref_dtbclk_khz = clk_mgr_base->bw_params->clk_table.entries[0].dtbclk_mhz * 1000;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn35/dcn35_clk_mgr.c
1023
struct clk_limit_table_entry def_max = bw_params->clk_table.entries[bw_params->clk_table.num_entries - 1];
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn35/dcn35_clk_mgr.c
1084
if (bw_params->clk_table.entries[j].dcfclk_mhz <= clock_table->DcfClocks[i])
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn35/dcn35_clk_mgr.c
1087
bw_params->clk_table.entries[i].phyclk_mhz = bw_params->clk_table.entries[j].phyclk_mhz;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn35/dcn35_clk_mgr.c
1088
bw_params->clk_table.entries[i].phyclk_d18_mhz = bw_params->clk_table.entries[j].phyclk_d18_mhz;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn35/dcn35_clk_mgr.c
1089
bw_params->clk_table.entries[i].dtbclk_mhz = bw_params->clk_table.entries[j].dtbclk_mhz;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn35/dcn35_clk_mgr.c
1092
bw_params->clk_table.entries[i].memclk_mhz = clock_table->MemPstateTable[min_pstate].MemClk;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn35/dcn35_clk_mgr.c
1093
bw_params->clk_table.entries[i].voltage = clock_table->MemPstateTable[min_pstate].Voltage;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn35/dcn35_clk_mgr.c
1094
bw_params->clk_table.entries[i].dcfclk_mhz = clock_table->DcfClocks[i];
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn35/dcn35_clk_mgr.c
1095
bw_params->clk_table.entries[i].socclk_mhz = clock_table->SocClocks[i];
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn35/dcn35_clk_mgr.c
1096
bw_params->clk_table.entries[i].dispclk_mhz = max_dispclk;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn35/dcn35_clk_mgr.c
1097
bw_params->clk_table.entries[i].dppclk_mhz = max_dppclk;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn35/dcn35_clk_mgr.c
1098
bw_params->clk_table.entries[i].wck_ratio =
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn35/dcn35_clk_mgr.c
1102
bw_params->clk_table.entries[i].fclk_mhz = min(max_fclk, 2 * clock_table->DcfClocks[i]);
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn35/dcn35_clk_mgr.c
1110
bw_params->clk_table.entries[i].fclk_mhz = max_fclk;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn35/dcn35_clk_mgr.c
1111
bw_params->clk_table.entries[i].memclk_mhz = clock_table->MemPstateTable[max_pstate].MemClk;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn35/dcn35_clk_mgr.c
1112
bw_params->clk_table.entries[i].voltage = clock_table->MemPstateTable[max_pstate].Voltage;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn35/dcn35_clk_mgr.c
1113
bw_params->clk_table.entries[i].dcfclk_mhz =
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn35/dcn35_clk_mgr.c
1115
bw_params->clk_table.entries[i].socclk_mhz =
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn35/dcn35_clk_mgr.c
1117
bw_params->clk_table.entries[i].dispclk_mhz = max_dispclk;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn35/dcn35_clk_mgr.c
1118
bw_params->clk_table.entries[i].dppclk_mhz = max_dppclk;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn35/dcn35_clk_mgr.c
1119
bw_params->clk_table.entries[i].wck_ratio = convert_wck_ratio(
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn35/dcn35_clk_mgr.c
1126
bw_params->clk_table.entries[i].socclk_mhz =
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn35/dcn35_clk_mgr.c
1128
bw_params->clk_table.entries[i].dispclk_mhz =
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn35/dcn35_clk_mgr.c
1130
bw_params->clk_table.entries[i].dppclk_mhz =
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn35/dcn35_clk_mgr.c
1132
bw_params->clk_table.entries[i].fclk_mhz =
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn35/dcn35_clk_mgr.c
1135
bw_params->clk_table.entries[i].phyclk_mhz = def_max.phyclk_mhz;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn35/dcn35_clk_mgr.c
1136
bw_params->clk_table.entries[i].phyclk_d18_mhz = def_max.phyclk_d18_mhz;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn35/dcn35_clk_mgr.c
1137
bw_params->clk_table.entries[i].dtbclk_mhz = def_max.dtbclk_mhz;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn35/dcn35_clk_mgr.c
1150
if (!bw_params->clk_table.entries[i].fclk_mhz) {
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn35/dcn35_clk_mgr.c
1151
bw_params->clk_table.entries[i].fclk_mhz = def_max.fclk_mhz;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn35/dcn35_clk_mgr.c
1152
bw_params->clk_table.entries[i].memclk_mhz = def_max.memclk_mhz;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn35/dcn35_clk_mgr.c
1153
bw_params->clk_table.entries[i].voltage = def_max.voltage;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn35/dcn35_clk_mgr.c
1155
if (!bw_params->clk_table.entries[i].dcfclk_mhz)
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn35/dcn35_clk_mgr.c
1156
bw_params->clk_table.entries[i].dcfclk_mhz = def_max.dcfclk_mhz;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn35/dcn35_clk_mgr.c
1157
if (!bw_params->clk_table.entries[i].socclk_mhz)
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn35/dcn35_clk_mgr.c
1158
bw_params->clk_table.entries[i].socclk_mhz = def_max.socclk_mhz;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn35/dcn35_clk_mgr.c
1159
if (!bw_params->clk_table.entries[i].dispclk_mhz)
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn35/dcn35_clk_mgr.c
1160
bw_params->clk_table.entries[i].dispclk_mhz = def_max.dispclk_mhz;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn35/dcn35_clk_mgr.c
1161
if (!bw_params->clk_table.entries[i].dppclk_mhz)
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn35/dcn35_clk_mgr.c
1162
bw_params->clk_table.entries[i].dppclk_mhz = def_max.dppclk_mhz;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn35/dcn35_clk_mgr.c
1163
if (!bw_params->clk_table.entries[i].fclk_mhz)
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn35/dcn35_clk_mgr.c
1164
bw_params->clk_table.entries[i].fclk_mhz = def_max.fclk_mhz;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn35/dcn35_clk_mgr.c
1165
if (!bw_params->clk_table.entries[i].phyclk_mhz)
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn35/dcn35_clk_mgr.c
1166
bw_params->clk_table.entries[i].phyclk_mhz = def_max.phyclk_mhz;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn35/dcn35_clk_mgr.c
1167
if (!bw_params->clk_table.entries[i].phyclk_d18_mhz)
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn35/dcn35_clk_mgr.c
1168
bw_params->clk_table.entries[i].phyclk_d18_mhz = def_max.phyclk_d18_mhz;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn35/dcn35_clk_mgr.c
1169
if (!bw_params->clk_table.entries[i].dtbclk_mhz)
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn35/dcn35_clk_mgr.c
1170
bw_params->clk_table.entries[i].dtbclk_mhz = def_max.dtbclk_mhz;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn35/dcn35_clk_mgr.c
1172
ASSERT(bw_params->clk_table.entries[i-1].dcfclk_mhz);
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn35/dcn35_clk_mgr.c
1178
bw_params->wm_table.entries[i].wm_inst = i;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn35/dcn35_clk_mgr.c
1181
bw_params->wm_table.entries[i].valid = false;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn35/dcn35_clk_mgr.c
1185
bw_params->wm_table.entries[i].wm_type = WM_TYPE_PSTATE_CHG;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn35/dcn35_clk_mgr.c
1186
bw_params->wm_table.entries[i].valid = true;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn35/dcn35_clk_mgr.c
1306
clk_mgr->base.bw_params->clk_table.entries[num_clk_levels - 1].dispclk_mhz * 1000 :
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn35/dcn35_clk_mgr.c
1311
clk_mgr->base.bw_params->clk_table.entries[num_clk_levels - 1].dppclk_mhz * 1000 :
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn35/dcn35_clk_mgr.c
1316
clk_mgr->base.bw_params->clk_table.entries[num_clk_levels - 1].dispclk_mhz * 1000 / 3 :
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn35/dcn35_clk_mgr.c
764
.entries = {
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn35/dcn35_clk_mgr.c
801
.entries = {
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn35/dcn35_clk_mgr.c
872
if (!bw_params->wm_table.entries[i].valid)
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn35/dcn35_clk_mgr.c
875
table->WatermarkRow[WM_DCFCLK][num_valid_sets].WmSetting = bw_params->wm_table.entries[i].wm_inst;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn35/dcn35_clk_mgr.c
876
table->WatermarkRow[WM_DCFCLK][num_valid_sets].WmType = bw_params->wm_table.entries[i].wm_type;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn35/dcn35_clk_mgr.c
887
bw_params->clk_table.entries[i - 1].dcfclk_mhz + 1;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn35/dcn35_clk_mgr.c
890
bw_params->clk_table.entries[i].dcfclk_mhz;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn401/dcn401_clk_mgr.c
1114
new_clocks->ref_dtbclk_khz = clk_mgr_base->bw_params->clk_table.entries[0].dtbclk_mhz * 1000;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn401/dcn401_clk_mgr.c
1346
new_clocks.dramclk_khz = clk_mgr_base->bw_params->clk_table.entries[0].memclk_mhz * 1000;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn401/dcn401_clk_mgr.c
1386
&clk_mgr_base->bw_params->clk_table.entries[0].memclk_mhz,
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn401/dcn401_clk_mgr.c
1390
clk_mgr_base->bw_params->clk_table.entries[num_entries_per_clk->num_memclk_levels - 1].memclk_mhz;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn401/dcn401_clk_mgr.c
1395
clk_mgr_base->bw_params->clk_table.entries[num_entries_per_clk->num_memclk_levels - 1].memclk_mhz)
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn401/dcn401_clk_mgr.c
1400
&clk_mgr_base->bw_params->clk_table.entries[0].fclk_mhz,
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn401/dcn401_clk_mgr.c
1404
clk_mgr_base->bw_params->clk_table.entries[num_entries_per_clk->num_fclk_levels - 1].fclk_mhz)
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn401/dcn401_clk_mgr.c
1509
clk_mgr->base.bw_params->clk_table.entries[num_clk_levels - 1].dispclk_mhz * 1000 :
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn401/dcn401_clk_mgr.c
1514
clk_mgr->base.bw_params->clk_table.entries[num_clk_levels - 1].dppclk_mhz * 1000 :
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn401/dcn401_clk_mgr.c
1519
clk_mgr->base.bw_params->clk_table.entries[num_clk_levels - 1].dispclk_mhz * 1000 / 3 :
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn401/dcn401_clk_mgr.c
176
for (i = 0; i < *num_levels && i < ARRAY_SIZE(clk_mgr->base.bw_params->clk_table.entries); i++) {
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn401/dcn401_clk_mgr.c
178
entry_i += sizeof(clk_mgr->base.bw_params->clk_table.entries[0]);
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn401/dcn401_clk_mgr.c
185
uint16_t min_uclk_mhz = clk_mgr->bw_params->clk_table.entries[0].memclk_mhz;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn401/dcn401_clk_mgr.c
186
uint16_t min_dcfclk_mhz = clk_mgr->bw_params->clk_table.entries[0].dcfclk_mhz;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn401/dcn401_clk_mgr.c
245
&clk_mgr_base->bw_params->clk_table.entries[0].dcfclk_mhz,
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn401/dcn401_clk_mgr.c
249
clk_mgr_base->bw_params->clk_table.entries[num_entries_per_clk->num_dcfclk_levels - 1].dcfclk_mhz)
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn401/dcn401_clk_mgr.c
254
&clk_mgr_base->bw_params->clk_table.entries[0].socclk_mhz,
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn401/dcn401_clk_mgr.c
258
clk_mgr_base->bw_params->clk_table.entries[num_entries_per_clk->num_socclk_levels - 1].socclk_mhz)
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn401/dcn401_clk_mgr.c
264
&clk_mgr_base->bw_params->clk_table.entries[0].dtbclk_mhz,
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn401/dcn401_clk_mgr.c
268
clk_mgr_base->bw_params->clk_table.entries[num_entries_per_clk->num_dtbclk_levels - 1].dtbclk_mhz)
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn401/dcn401_clk_mgr.c
274
&clk_mgr_base->bw_params->clk_table.entries[0].dispclk_mhz,
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn401/dcn401_clk_mgr.c
278
clk_mgr_base->bw_params->clk_table.entries[num_entries_per_clk->num_dispclk_levels - 1].dispclk_mhz)
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn401/dcn401_clk_mgr.c
283
&clk_mgr_base->bw_params->clk_table.entries[0].dppclk_mhz,
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn401/dcn401_clk_mgr.c
293
if (clk_mgr_base->bw_params->clk_table.entries[i].dispclk_mhz
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn401/dcn401_clk_mgr.c
295
clk_mgr_base->bw_params->clk_table.entries[i].dispclk_mhz
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn401/dcn401_clk_mgr.c
301
if (clk_mgr_base->bw_params->clk_table.entries[i].dppclk_mhz
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn401/dcn401_clk_mgr.c
303
clk_mgr_base->bw_params->clk_table.entries[i].dppclk_mhz
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn401/dcn401_clk_mgr.c
449
fclk_khz_override = clk_mgr->base.bw_params->clk_table.entries[num_fclk_levels].fclk_mhz * 1000;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn401/dcn401_clk_mgr.c
836
clk_mgr_base->bw_params->clk_table.entries[clk_mgr_base->bw_params->clk_table.num_entries_per_clk.num_fclk_levels - 1].fclk_mhz;
drivers/gpu/drm/amd/display/dc/core/dc.c
3151
memcpy(&surface->gamma_correction.entries,
drivers/gpu/drm/amd/display/dc/core/dc.c
3152
&srf_update->gamma->entries,
drivers/gpu/drm/amd/display/dc/core/dc.c
5852
if (dc->clk_mgr->bw_params->clk_table.entries[i].memclk_mhz > maxDPM)
drivers/gpu/drm/amd/display/dc/core/dc.c
5853
maxDPM = dc->clk_mgr->bw_params->clk_table.entries[i].memclk_mhz;
drivers/gpu/drm/amd/display/dc/dc_hw_types.h
551
} entries;
drivers/gpu/drm/amd/display/dc/dce/dce_ipp.c
199
gamma->entries.red[i]));
drivers/gpu/drm/amd/display/dc/dce/dce_ipp.c
202
gamma->entries.green[i]));
drivers/gpu/drm/amd/display/dc/dce/dce_ipp.c
205
gamma->entries.blue[i]));
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
2181
dc->clk_mgr->bw_params->wm_table.entries[i].sr_exit_time_us =
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
2188
dc->clk_mgr->bw_params->wm_table.entries[i].sr_enter_plus_exit_time_us =
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
2199
dc->clk_mgr->bw_params->wm_table.entries[i].pstate_latency_us =
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
2294
table_entry = &bw_params->wm_table.entries[WM_D];
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
2302
table_entry = &bw_params->wm_table.entries[WM_C];
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
2307
table_entry = &bw_params->wm_table.entries[WM_B];
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
2313
table_entry = &bw_params->wm_table.entries[WM_A];
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
2380
low_pstate_lvl.dcfclk_mhz = clk_table->entries[0].dcfclk_mhz;
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
2381
low_pstate_lvl.fabricclk_mhz = clk_table->entries[0].fclk_mhz;
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
2382
low_pstate_lvl.socclk_mhz = clk_table->entries[0].socclk_mhz;
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
2383
low_pstate_lvl.dram_speed_mts = clk_table->entries[0].memclk_mhz * 2;
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
2394
clk_table->entries[i] = clk_table->entries[i-1];
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
2395
clk_table->entries[1] = clk_table->entries[0];
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
2422
if ((unsigned int) dcn2_1_soc.clock_limits[j].dcfclk_mhz <= clk_table->entries[i].dcfclk_mhz) {
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
2433
s[k].dcfclk_mhz = clk_table->entries[i].dcfclk_mhz;
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
2434
s[k].fabricclk_mhz = clk_table->entries[i].fclk_mhz;
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
2435
s[k].socclk_mhz = clk_table->entries[i].socclk_mhz;
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
2436
s[k].dram_speed_mts = clk_table->entries[i].memclk_mhz * 2;
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
2468
bw_params->wm_table.entries[WM_D].pstate_latency_us = LPDDR_MEM_RETRAIN_LATENCY;
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
2469
bw_params->wm_table.entries[WM_D].wm_inst = WM_D;
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
2470
bw_params->wm_table.entries[WM_D].wm_type = WM_TYPE_RETRAINING;
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
2471
bw_params->wm_table.entries[WM_D].valid = true;
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
768
.entries = {
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
805
.entries = {
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
842
.entries = {
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
879
.entries = {
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
916
.entries = {
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
953
.entries = {
drivers/gpu/drm/amd/display/dc/dml/dcn30/dcn30_fpu.c
420
dc->clk_mgr->bw_params->clk_table.entries[min_dram_speed_mts_offset].memclk_mhz * 16;
drivers/gpu/drm/amd/display/dc/dml/dcn30/dcn30_fpu.c
664
uint16_t min_uclk_mhz = base->bw_params->clk_table.entries[0].memclk_mhz;
drivers/gpu/drm/amd/display/dc/dml/dcn301/dcn301_fpu.c
219
.entries = {
drivers/gpu/drm/amd/display/dc/dml/dcn301/dcn301_fpu.c
256
.entries = {
drivers/gpu/drm/amd/display/dc/dml/dcn301/dcn301_fpu.c
344
if (clk_table->entries[i].dispclk_mhz > max_dispclk_mhz)
drivers/gpu/drm/amd/display/dc/dml/dcn301/dcn301_fpu.c
345
max_dispclk_mhz = clk_table->entries[i].dispclk_mhz;
drivers/gpu/drm/amd/display/dc/dml/dcn301/dcn301_fpu.c
346
if (clk_table->entries[i].dppclk_mhz > max_dppclk_mhz)
drivers/gpu/drm/amd/display/dc/dml/dcn301/dcn301_fpu.c
347
max_dppclk_mhz = clk_table->entries[i].dppclk_mhz;
drivers/gpu/drm/amd/display/dc/dml/dcn301/dcn301_fpu.c
353
if ((unsigned int) dcn3_01_soc.clock_limits[j].dcfclk_mhz <= clk_table->entries[i].dcfclk_mhz) {
drivers/gpu/drm/amd/display/dc/dml/dcn301/dcn301_fpu.c
360
s[i].dcfclk_mhz = clk_table->entries[i].dcfclk_mhz;
drivers/gpu/drm/amd/display/dc/dml/dcn301/dcn301_fpu.c
361
s[i].fabricclk_mhz = clk_table->entries[i].fclk_mhz;
drivers/gpu/drm/amd/display/dc/dml/dcn301/dcn301_fpu.c
362
s[i].socclk_mhz = clk_table->entries[i].socclk_mhz;
drivers/gpu/drm/amd/display/dc/dml/dcn301/dcn301_fpu.c
363
s[i].dram_speed_mts = clk_table->entries[i].memclk_mhz * 2;
drivers/gpu/drm/amd/display/dc/dml/dcn301/dcn301_fpu.c
443
table_entry = &bw_params->wm_table.entries[WM_D];
drivers/gpu/drm/amd/display/dc/dml/dcn301/dcn301_fpu.c
451
table_entry = &bw_params->wm_table.entries[WM_C];
drivers/gpu/drm/amd/display/dc/dml/dcn301/dcn301_fpu.c
456
table_entry = &bw_params->wm_table.entries[WM_B];
drivers/gpu/drm/amd/display/dc/dml/dcn301/dcn301_fpu.c
462
table_entry = &bw_params->wm_table.entries[WM_A];
drivers/gpu/drm/amd/display/dc/dml/dcn302/dcn302_fpu.c
220
if (bw_params->clk_table.entries[0].memclk_mhz) {
drivers/gpu/drm/amd/display/dc/dml/dcn302/dcn302_fpu.c
224
if (bw_params->clk_table.entries[i].dcfclk_mhz > max_dcfclk_mhz)
drivers/gpu/drm/amd/display/dc/dml/dcn302/dcn302_fpu.c
225
max_dcfclk_mhz = bw_params->clk_table.entries[i].dcfclk_mhz;
drivers/gpu/drm/amd/display/dc/dml/dcn302/dcn302_fpu.c
226
if (bw_params->clk_table.entries[i].dispclk_mhz > max_dispclk_mhz)
drivers/gpu/drm/amd/display/dc/dml/dcn302/dcn302_fpu.c
227
max_dispclk_mhz = bw_params->clk_table.entries[i].dispclk_mhz;
drivers/gpu/drm/amd/display/dc/dml/dcn302/dcn302_fpu.c
228
if (bw_params->clk_table.entries[i].dppclk_mhz > max_dppclk_mhz)
drivers/gpu/drm/amd/display/dc/dml/dcn302/dcn302_fpu.c
229
max_dppclk_mhz = bw_params->clk_table.entries[i].dppclk_mhz;
drivers/gpu/drm/amd/display/dc/dml/dcn302/dcn302_fpu.c
230
if (bw_params->clk_table.entries[i].phyclk_mhz > max_phyclk_mhz)
drivers/gpu/drm/amd/display/dc/dml/dcn302/dcn302_fpu.c
231
max_phyclk_mhz = bw_params->clk_table.entries[i].phyclk_mhz;
drivers/gpu/drm/amd/display/dc/dml/dcn302/dcn302_fpu.c
262
dcn302_get_optimal_dcfclk_fclk_for_uclk(bw_params->clk_table.entries[i].memclk_mhz * 16,
drivers/gpu/drm/amd/display/dc/dml/dcn302/dcn302_fpu.c
264
if (optimal_dcfclk_for_uclk[i] < bw_params->clk_table.entries[0].dcfclk_mhz)
drivers/gpu/drm/amd/display/dc/dml/dcn302/dcn302_fpu.c
265
optimal_dcfclk_for_uclk[i] = bw_params->clk_table.entries[0].dcfclk_mhz;
drivers/gpu/drm/amd/display/dc/dml/dcn302/dcn302_fpu.c
273
bw_params->clk_table.entries[j].memclk_mhz * 16;
drivers/gpu/drm/amd/display/dc/dml/dcn302/dcn302_fpu.c
289
dram_speed_mts[num_states++] = bw_params->clk_table.entries[j++].memclk_mhz * 16;
drivers/gpu/drm/amd/display/dc/dml/dcn302/dcn302_fpu.c
304
dram_speed_mts[num_states++] = bw_params->clk_table.entries[j++].memclk_mhz * 16;
drivers/gpu/drm/amd/display/dc/dml/dcn302/dcn302_fpu.c
329
if (!bw_params->clk_table.entries[i].dtbclk_mhz && i > 0)
drivers/gpu/drm/amd/display/dc/dml/dcn302/dcn302_fpu.c
332
dcn3_02_soc.clock_limits[i].dtbclk_mhz = bw_params->clk_table.entries[i].dtbclk_mhz;
drivers/gpu/drm/amd/display/dc/dml/dcn302/dcn302_fpu.c
333
if (!bw_params->clk_table.entries[i].socclk_mhz && i > 0)
drivers/gpu/drm/amd/display/dc/dml/dcn302/dcn302_fpu.c
336
dcn3_02_soc.clock_limits[i].socclk_mhz = bw_params->clk_table.entries[i].socclk_mhz;
drivers/gpu/drm/amd/display/dc/dml/dcn303/dcn303_fpu.c
216
if (bw_params->clk_table.entries[0].memclk_mhz) {
drivers/gpu/drm/amd/display/dc/dml/dcn303/dcn303_fpu.c
220
if (bw_params->clk_table.entries[i].dcfclk_mhz > max_dcfclk_mhz)
drivers/gpu/drm/amd/display/dc/dml/dcn303/dcn303_fpu.c
221
max_dcfclk_mhz = bw_params->clk_table.entries[i].dcfclk_mhz;
drivers/gpu/drm/amd/display/dc/dml/dcn303/dcn303_fpu.c
222
if (bw_params->clk_table.entries[i].dispclk_mhz > max_dispclk_mhz)
drivers/gpu/drm/amd/display/dc/dml/dcn303/dcn303_fpu.c
223
max_dispclk_mhz = bw_params->clk_table.entries[i].dispclk_mhz;
drivers/gpu/drm/amd/display/dc/dml/dcn303/dcn303_fpu.c
224
if (bw_params->clk_table.entries[i].dppclk_mhz > max_dppclk_mhz)
drivers/gpu/drm/amd/display/dc/dml/dcn303/dcn303_fpu.c
225
max_dppclk_mhz = bw_params->clk_table.entries[i].dppclk_mhz;
drivers/gpu/drm/amd/display/dc/dml/dcn303/dcn303_fpu.c
226
if (bw_params->clk_table.entries[i].phyclk_mhz > max_phyclk_mhz)
drivers/gpu/drm/amd/display/dc/dml/dcn303/dcn303_fpu.c
227
max_phyclk_mhz = bw_params->clk_table.entries[i].phyclk_mhz;
drivers/gpu/drm/amd/display/dc/dml/dcn303/dcn303_fpu.c
256
dcn303_get_optimal_dcfclk_fclk_for_uclk(bw_params->clk_table.entries[i].memclk_mhz * 16,
drivers/gpu/drm/amd/display/dc/dml/dcn303/dcn303_fpu.c
258
if (optimal_dcfclk_for_uclk[i] < bw_params->clk_table.entries[0].dcfclk_mhz)
drivers/gpu/drm/amd/display/dc/dml/dcn303/dcn303_fpu.c
259
optimal_dcfclk_for_uclk[i] = bw_params->clk_table.entries[0].dcfclk_mhz;
drivers/gpu/drm/amd/display/dc/dml/dcn303/dcn303_fpu.c
267
bw_params->clk_table.entries[j].memclk_mhz * 16;
drivers/gpu/drm/amd/display/dc/dml/dcn303/dcn303_fpu.c
278
bw_params->clk_table.entries[j].memclk_mhz * 16;
drivers/gpu/drm/amd/display/dc/dml/dcn303/dcn303_fpu.c
295
bw_params->clk_table.entries[j++].memclk_mhz * 16;
drivers/gpu/drm/amd/display/dc/dml/dcn303/dcn303_fpu.c
310
dram_speed_mts[num_states++] = bw_params->clk_table.entries[j++].memclk_mhz * 16;
drivers/gpu/drm/amd/display/dc/dml/dcn303/dcn303_fpu.c
335
if (!bw_params->clk_table.entries[i].dtbclk_mhz && i > 0)
drivers/gpu/drm/amd/display/dc/dml/dcn303/dcn303_fpu.c
338
dcn3_03_soc.clock_limits[i].dtbclk_mhz = bw_params->clk_table.entries[i].dtbclk_mhz;
drivers/gpu/drm/amd/display/dc/dml/dcn303/dcn303_fpu.c
339
if (!bw_params->clk_table.entries[i].socclk_mhz && i > 0)
drivers/gpu/drm/amd/display/dc/dml/dcn303/dcn303_fpu.c
342
dcn3_03_soc.clock_limits[i].socclk_mhz = bw_params->clk_table.entries[i].socclk_mhz;
drivers/gpu/drm/amd/display/dc/dml/dcn31/dcn31_fpu.c
458
if (dc->clk_mgr->bw_params->wm_table.entries[WM_A].valid) {
drivers/gpu/drm/amd/display/dc/dml/dcn31/dcn31_fpu.c
459
context->bw_ctx.dml.soc.dram_clock_change_latency_us = dc->clk_mgr->bw_params->wm_table.entries[WM_A].pstate_latency_us;
drivers/gpu/drm/amd/display/dc/dml/dcn31/dcn31_fpu.c
460
context->bw_ctx.dml.soc.sr_enter_plus_exit_time_us = dc->clk_mgr->bw_params->wm_table.entries[WM_A].sr_enter_plus_exit_time_us;
drivers/gpu/drm/amd/display/dc/dml/dcn31/dcn31_fpu.c
461
context->bw_ctx.dml.soc.sr_exit_time_us = dc->clk_mgr->bw_params->wm_table.entries[WM_A].sr_exit_time_us;
drivers/gpu/drm/amd/display/dc/dml/dcn31/dcn31_fpu.c
469
if (dc->clk_mgr->bw_params->wm_table.entries[WM_A].valid) {
drivers/gpu/drm/amd/display/dc/dml/dcn31/dcn31_fpu.c
474
context->bw_ctx.dml.soc.dram_clock_change_latency_us = dc->clk_mgr->bw_params->wm_table.entries[WM_A].pstate_latency_us;
drivers/gpu/drm/amd/display/dc/dml/dcn31/dcn31_fpu.c
476
dc->clk_mgr->bw_params->wm_table.entries[WM_A].sr_enter_plus_exit_time_us;
drivers/gpu/drm/amd/display/dc/dml/dcn31/dcn31_fpu.c
478
dc->clk_mgr->bw_params->wm_table.entries[WM_A].sr_exit_time_us;
drivers/gpu/drm/amd/display/dc/dml/dcn31/dcn31_fpu.c
611
if (clk_table->entries[i].dispclk_mhz > max_dispclk_mhz)
drivers/gpu/drm/amd/display/dc/dml/dcn31/dcn31_fpu.c
612
max_dispclk_mhz = clk_table->entries[i].dispclk_mhz;
drivers/gpu/drm/amd/display/dc/dml/dcn31/dcn31_fpu.c
613
if (clk_table->entries[i].dppclk_mhz > max_dppclk_mhz)
drivers/gpu/drm/amd/display/dc/dml/dcn31/dcn31_fpu.c
614
max_dppclk_mhz = clk_table->entries[i].dppclk_mhz;
drivers/gpu/drm/amd/display/dc/dml/dcn31/dcn31_fpu.c
620
if ((unsigned int) dcn3_1_soc.clock_limits[j].dcfclk_mhz <= clk_table->entries[i].dcfclk_mhz) {
drivers/gpu/drm/amd/display/dc/dml/dcn31/dcn31_fpu.c
629
s[i].dcfclk_mhz = clk_table->entries[i].dcfclk_mhz;
drivers/gpu/drm/amd/display/dc/dml/dcn31/dcn31_fpu.c
630
s[i].fabricclk_mhz = clk_table->entries[i].fclk_mhz;
drivers/gpu/drm/amd/display/dc/dml/dcn31/dcn31_fpu.c
631
s[i].socclk_mhz = clk_table->entries[i].socclk_mhz;
drivers/gpu/drm/amd/display/dc/dml/dcn31/dcn31_fpu.c
632
s[i].dram_speed_mts = clk_table->entries[i].memclk_mhz *
drivers/gpu/drm/amd/display/dc/dml/dcn31/dcn31_fpu.c
633
2 * clk_table->entries[i].wck_ratio;
drivers/gpu/drm/amd/display/dc/dml/dcn31/dcn31_fpu.c
687
if (clk_table->entries[i].dispclk_mhz > max_dispclk_mhz)
drivers/gpu/drm/amd/display/dc/dml/dcn31/dcn31_fpu.c
688
max_dispclk_mhz = clk_table->entries[i].dispclk_mhz;
drivers/gpu/drm/amd/display/dc/dml/dcn31/dcn31_fpu.c
689
if (clk_table->entries[i].dppclk_mhz > max_dppclk_mhz)
drivers/gpu/drm/amd/display/dc/dml/dcn31/dcn31_fpu.c
690
max_dppclk_mhz = clk_table->entries[i].dppclk_mhz;
drivers/gpu/drm/amd/display/dc/dml/dcn31/dcn31_fpu.c
697
dcn3_15_soc.clock_limits[i].dcfclk_mhz = clk_table->entries[i].dcfclk_mhz;
drivers/gpu/drm/amd/display/dc/dml/dcn31/dcn31_fpu.c
698
dcn3_15_soc.clock_limits[i].fabricclk_mhz = clk_table->entries[i].fclk_mhz;
drivers/gpu/drm/amd/display/dc/dml/dcn31/dcn31_fpu.c
699
dcn3_15_soc.clock_limits[i].socclk_mhz = clk_table->entries[i].socclk_mhz;
drivers/gpu/drm/amd/display/dc/dml/dcn31/dcn31_fpu.c
700
dcn3_15_soc.clock_limits[i].dram_speed_mts = clk_table->entries[i].memclk_mhz * 2 * clk_table->entries[i].wck_ratio;
drivers/gpu/drm/amd/display/dc/dml/dcn31/dcn31_fpu.c
703
dcn3_15_soc.clock_limits[i].dtbclk_mhz = clk_table->entries[i].dtbclk_mhz;
drivers/gpu/drm/amd/display/dc/dml/dcn31/dcn31_fpu.c
704
dcn3_15_soc.clock_limits[i].phyclk_d18_mhz = clk_table->entries[i].phyclk_d18_mhz;
drivers/gpu/drm/amd/display/dc/dml/dcn31/dcn31_fpu.c
705
dcn3_15_soc.clock_limits[i].phyclk_mhz = clk_table->entries[i].phyclk_mhz;
drivers/gpu/drm/amd/display/dc/dml/dcn31/dcn31_fpu.c
750
if (clk_table->entries[i].dispclk_mhz > max_dispclk_mhz)
drivers/gpu/drm/amd/display/dc/dml/dcn31/dcn31_fpu.c
751
max_dispclk_mhz = clk_table->entries[i].dispclk_mhz;
drivers/gpu/drm/amd/display/dc/dml/dcn31/dcn31_fpu.c
752
if (clk_table->entries[i].dppclk_mhz > max_dppclk_mhz)
drivers/gpu/drm/amd/display/dc/dml/dcn31/dcn31_fpu.c
753
max_dppclk_mhz = clk_table->entries[i].dppclk_mhz;
drivers/gpu/drm/amd/display/dc/dml/dcn31/dcn31_fpu.c
760
clk_table->entries[i].dcfclk_mhz) {
drivers/gpu/drm/amd/display/dc/dml/dcn31/dcn31_fpu.c
769
s[i].dcfclk_mhz = clk_table->entries[i].dcfclk_mhz;
drivers/gpu/drm/amd/display/dc/dml/dcn31/dcn31_fpu.c
770
s[i].fabricclk_mhz = clk_table->entries[i].fclk_mhz;
drivers/gpu/drm/amd/display/dc/dml/dcn31/dcn31_fpu.c
771
s[i].socclk_mhz = clk_table->entries[i].socclk_mhz;
drivers/gpu/drm/amd/display/dc/dml/dcn31/dcn31_fpu.c
772
s[i].dram_speed_mts = clk_table->entries[i].memclk_mhz *
drivers/gpu/drm/amd/display/dc/dml/dcn31/dcn31_fpu.c
773
2 * clk_table->entries[i].wck_ratio;
drivers/gpu/drm/amd/display/dc/dml/dcn314/dcn314_fpu.c
209
if (clk_table->entries[i].dispclk_mhz > max_dispclk_mhz)
drivers/gpu/drm/amd/display/dc/dml/dcn314/dcn314_fpu.c
210
max_dispclk_mhz = clk_table->entries[i].dispclk_mhz;
drivers/gpu/drm/amd/display/dc/dml/dcn314/dcn314_fpu.c
211
if (clk_table->entries[i].dppclk_mhz > max_dppclk_mhz)
drivers/gpu/drm/amd/display/dc/dml/dcn314/dcn314_fpu.c
212
max_dppclk_mhz = clk_table->entries[i].dppclk_mhz;
drivers/gpu/drm/amd/display/dc/dml/dcn314/dcn314_fpu.c
218
if ((unsigned int) dcn3_14_soc.clock_limits[j].dcfclk_mhz <= clk_table->entries[i].dcfclk_mhz) {
drivers/gpu/drm/amd/display/dc/dml/dcn314/dcn314_fpu.c
231
clock_limits[i].dcfclk_mhz = clk_table->entries[i].dcfclk_mhz;
drivers/gpu/drm/amd/display/dc/dml/dcn314/dcn314_fpu.c
237
clock_limits[i].fabricclk_mhz = clk_table->entries[i].fclk_mhz;
drivers/gpu/drm/amd/display/dc/dml/dcn314/dcn314_fpu.c
238
clock_limits[i].socclk_mhz = clk_table->entries[i].socclk_mhz;
drivers/gpu/drm/amd/display/dc/dml/dcn314/dcn314_fpu.c
240
if (clk_table->entries[i].memclk_mhz && clk_table->entries[i].wck_ratio)
drivers/gpu/drm/amd/display/dc/dml/dcn314/dcn314_fpu.c
241
clock_limits[i].dram_speed_mts = clk_table->entries[i].memclk_mhz * 2 * clk_table->entries[i].wck_ratio;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
194
uint16_t min_uclk_mhz = clk_mgr->base.bw_params->clk_table.entries[0].memclk_mhz;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
195
uint16_t min_dcfclk_mhz = clk_mgr->base.bw_params->clk_table.entries[0].dcfclk_mhz;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
205
clk_mgr->base.bw_params->wm_table.nv_entries[WM_B].pmfw_breakdown.min_dcfclk = clk_mgr->base.bw_params->clk_table.entries[0].dcfclk_mhz;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
207
if (clk_mgr->base.bw_params->clk_table.entries[2].memclk_mhz)
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
208
setb_min_uclk_mhz = clk_mgr->base.bw_params->clk_table.entries[2].memclk_mhz;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
246
clk_mgr->base.bw_params->dummy_pstate_table[0].dram_speed_mts = clk_mgr->base.bw_params->clk_table.entries[0].memclk_mhz * 16;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
248
clk_mgr->base.bw_params->dummy_pstate_table[1].dram_speed_mts = clk_mgr->base.bw_params->clk_table.entries[1].memclk_mhz * 16;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
250
clk_mgr->base.bw_params->dummy_pstate_table[2].dram_speed_mts = clk_mgr->base.bw_params->clk_table.entries[2].memclk_mhz * 16;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
252
clk_mgr->base.bw_params->dummy_pstate_table[3].dram_speed_mts = clk_mgr->base.bw_params->clk_table.entries[3].memclk_mhz * 16;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2520
dc->clk_mgr->bw_params->clk_table.entries[min_dram_speed_mts_offset].memclk_mhz * 16;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2677
if (bw_params->clk_table.entries[i].dcfclk_mhz > max_dcfclk_mhz)
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2678
max_dcfclk_mhz = bw_params->clk_table.entries[i].dcfclk_mhz;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2679
if (bw_params->clk_table.entries[i].fclk_mhz > max_fclk_mhz)
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2680
max_fclk_mhz = bw_params->clk_table.entries[i].fclk_mhz;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2681
if (bw_params->clk_table.entries[i].memclk_mhz > max_uclk_mhz)
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2682
max_uclk_mhz = bw_params->clk_table.entries[i].memclk_mhz;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2683
if (bw_params->clk_table.entries[i].dispclk_mhz > max_dispclk_mhz)
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2684
max_dispclk_mhz = bw_params->clk_table.entries[i].dispclk_mhz;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2685
if (bw_params->clk_table.entries[i].dppclk_mhz > max_dppclk_mhz)
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2686
max_dppclk_mhz = bw_params->clk_table.entries[i].dppclk_mhz;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2687
if (bw_params->clk_table.entries[i].phyclk_mhz > max_phyclk_mhz)
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2688
max_phyclk_mhz = bw_params->clk_table.entries[i].phyclk_mhz;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2689
if (bw_params->clk_table.entries[i].dtbclk_mhz > max_dtbclk_mhz)
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2690
max_dtbclk_mhz = bw_params->clk_table.entries[i].dtbclk_mhz;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2700
bw_params->clk_table.entries[0].dcfclk_mhz = dcn3_2_soc.clock_limits[0].dcfclk_mhz;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2702
bw_params->clk_table.entries[0].dispclk_mhz = dcn3_2_soc.clock_limits[0].dispclk_mhz;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2704
bw_params->clk_table.entries[0].dtbclk_mhz = dcn3_2_soc.clock_limits[0].dtbclk_mhz;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2706
bw_params->clk_table.entries[0].memclk_mhz = dcn3_2_soc.clock_limits[0].dram_speed_mts / 16;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2823
if (bw_params->clk_table.entries[i].dcfclk_mhz > max_clk_data.dcfclk_mhz)
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2824
max_clk_data.dcfclk_mhz = bw_params->clk_table.entries[i].dcfclk_mhz;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2825
if (bw_params->clk_table.entries[i].fclk_mhz > max_clk_data.fclk_mhz)
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2826
max_clk_data.fclk_mhz = bw_params->clk_table.entries[i].fclk_mhz;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2827
if (bw_params->clk_table.entries[i].memclk_mhz > max_clk_data.memclk_mhz)
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2828
max_clk_data.memclk_mhz = bw_params->clk_table.entries[i].memclk_mhz;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2829
if (bw_params->clk_table.entries[i].dispclk_mhz > max_clk_data.dispclk_mhz)
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2830
max_clk_data.dispclk_mhz = bw_params->clk_table.entries[i].dispclk_mhz;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2831
if (bw_params->clk_table.entries[i].dppclk_mhz > max_clk_data.dppclk_mhz)
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2832
max_clk_data.dppclk_mhz = bw_params->clk_table.entries[i].dppclk_mhz;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2833
if (bw_params->clk_table.entries[i].phyclk_mhz > max_clk_data.phyclk_mhz)
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2834
max_clk_data.phyclk_mhz = bw_params->clk_table.entries[i].phyclk_mhz;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2835
if (bw_params->clk_table.entries[i].dtbclk_mhz > max_clk_data.dtbclk_mhz)
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2836
max_clk_data.dtbclk_mhz = bw_params->clk_table.entries[i].dtbclk_mhz;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2838
if (bw_params->clk_table.entries[i].memclk_mhz > 0) {
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2840
if (bw_params->clk_table.entries[i].memclk_mhz <= bw_params->dc_mode_limit.memclk_mhz)
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2843
if (bw_params->clk_table.entries[i].fclk_mhz > 0) {
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2845
if (bw_params->clk_table.entries[i].fclk_mhz <= bw_params->dc_mode_limit.fclk_mhz)
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2848
if (bw_params->clk_table.entries[i].dcfclk_mhz > 0) {
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2850
if (bw_params->clk_table.entries[i].dcfclk_mhz <= bw_params->dc_mode_limit.dcfclk_mhz)
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2865
if (num_dcfclk_dpms > 0 && bw_params->clk_table.entries[0].fclk_mhz > min_fclk_mhz)
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2866
min_fclk_mhz = bw_params->clk_table.entries[0].fclk_mhz;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2915
entry.dram_speed_mts = bw_params->clk_table.entries[i].memclk_mhz * 16;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2926
entry.fabricclk_mhz = bw_params->clk_table.entries[i].fclk_mhz;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2980
if (bw_params->clk_table.entries[j].memclk_mhz * 16 >= table[i].dram_speed_mts) {
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2981
table[i].dram_speed_mts = bw_params->clk_table.entries[j].memclk_mhz * 16;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2991
if (bw_params->clk_table.entries[j].fclk_mhz >= table[i].fabricclk_mhz) {
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2992
table[i].fabricclk_mhz = bw_params->clk_table.entries[j].fclk_mhz;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
3151
if (bw_params->clk_table.entries[0].memclk_mhz) {
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
3167
if (bw_params->clk_table.entries[i].dcfclk_mhz > max_dcfclk_mhz)
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
3168
max_dcfclk_mhz = bw_params->clk_table.entries[i].dcfclk_mhz;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
3169
if (bw_params->clk_table.entries[i].dcfclk_mhz != 0 &&
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
3170
bw_params->clk_table.entries[i].dcfclk_mhz < min_dcfclk)
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
3171
min_dcfclk = bw_params->clk_table.entries[i].dcfclk_mhz;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
3172
if (bw_params->clk_table.entries[i].dispclk_mhz > max_dispclk_mhz)
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
3173
max_dispclk_mhz = bw_params->clk_table.entries[i].dispclk_mhz;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
3174
if (bw_params->clk_table.entries[i].dppclk_mhz > max_dppclk_mhz)
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
3175
max_dppclk_mhz = bw_params->clk_table.entries[i].dppclk_mhz;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
3176
if (bw_params->clk_table.entries[i].phyclk_mhz > max_phyclk_mhz)
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
3177
max_phyclk_mhz = bw_params->clk_table.entries[i].phyclk_mhz;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
3210
dcn32_get_optimal_dcfclk_fclk_for_uclk(bw_params->clk_table.entries[i].memclk_mhz * 16,
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
3212
if (optimal_dcfclk_for_uclk[i] < bw_params->clk_table.entries[0].dcfclk_mhz) {
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
3213
optimal_dcfclk_for_uclk[i] = bw_params->clk_table.entries[0].dcfclk_mhz;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
3222
bw_params->clk_table.entries[j].memclk_mhz * 16;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
3238
dram_speed_mts[num_states++] = bw_params->clk_table.entries[j++].memclk_mhz * 16;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
3253
dram_speed_mts[num_states++] = bw_params->clk_table.entries[j++].memclk_mhz * 16;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
3280
if (!bw_params->clk_table.entries[i].dtbclk_mhz) {
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
3283
dcn3_2_soc.clock_limits[i].dtbclk_mhz = bw_params->clk_table.entries[i].dtbclk_mhz;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
3285
} else if (bw_params->clk_table.entries[i].dtbclk_mhz) {
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
3286
dcn3_2_soc.clock_limits[i].dtbclk_mhz = bw_params->clk_table.entries[i].dtbclk_mhz;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
3289
if (!bw_params->clk_table.entries[i].socclk_mhz && i > 0)
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
3292
dcn3_2_soc.clock_limits[i].socclk_mhz = bw_params->clk_table.entries[i].socclk_mhz;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
3342
if (dc->clk_mgr->bw_params->clk_table.entries[i].dcfclk_mhz)
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
3344
dc->clk_mgr->bw_params->clk_table.entries[i].dcfclk_mhz;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
3348
if (dc->clk_mgr->bw_params->clk_table.entries[i].fclk_mhz)
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
3350
dc->clk_mgr->bw_params->clk_table.entries[i].fclk_mhz;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
3354
if (dc->clk_mgr->bw_params->clk_table.entries[i].memclk_mhz)
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
3356
dc->clk_mgr->bw_params->clk_table.entries[i].memclk_mhz;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
3360
if (dc->clk_mgr->bw_params->clk_table.entries[i].socclk_mhz)
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
3362
dc->clk_mgr->bw_params->clk_table.entries[i].socclk_mhz;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
3366
if (dc->clk_mgr->bw_params->clk_table.entries[i].dtbclk_mhz)
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
3368
dc->clk_mgr->bw_params->clk_table.entries[i].dtbclk_mhz;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
3372
if (dc->clk_mgr->bw_params->clk_table.entries[i].dispclk_mhz) {
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
3374
dc->clk_mgr->bw_params->clk_table.entries[i].dispclk_mhz;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
3376
dc->clk_mgr->bw_params->clk_table.entries[i].dispclk_mhz;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
3607
if (context->bw_ctx.dml.vba.DRAMSpeed <= dc->clk_mgr->bw_params->clk_table.entries[0].memclk_mhz * 16 &&
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
3609
context->bw_ctx.dml.vba.DRAMSpeed = dc->clk_mgr->bw_params->clk_table.entries[1].memclk_mhz * 16;
drivers/gpu/drm/amd/display/dc/dml/dcn321/dcn321_fpu.c
365
if (bw_params->clk_table.entries[i].dcfclk_mhz > max_clk_data.dcfclk_mhz)
drivers/gpu/drm/amd/display/dc/dml/dcn321/dcn321_fpu.c
366
max_clk_data.dcfclk_mhz = bw_params->clk_table.entries[i].dcfclk_mhz;
drivers/gpu/drm/amd/display/dc/dml/dcn321/dcn321_fpu.c
367
if (bw_params->clk_table.entries[i].fclk_mhz > max_clk_data.fclk_mhz)
drivers/gpu/drm/amd/display/dc/dml/dcn321/dcn321_fpu.c
368
max_clk_data.fclk_mhz = bw_params->clk_table.entries[i].fclk_mhz;
drivers/gpu/drm/amd/display/dc/dml/dcn321/dcn321_fpu.c
369
if (bw_params->clk_table.entries[i].memclk_mhz > max_clk_data.memclk_mhz)
drivers/gpu/drm/amd/display/dc/dml/dcn321/dcn321_fpu.c
370
max_clk_data.memclk_mhz = bw_params->clk_table.entries[i].memclk_mhz;
drivers/gpu/drm/amd/display/dc/dml/dcn321/dcn321_fpu.c
371
if (bw_params->clk_table.entries[i].dispclk_mhz > max_clk_data.dispclk_mhz)
drivers/gpu/drm/amd/display/dc/dml/dcn321/dcn321_fpu.c
372
max_clk_data.dispclk_mhz = bw_params->clk_table.entries[i].dispclk_mhz;
drivers/gpu/drm/amd/display/dc/dml/dcn321/dcn321_fpu.c
373
if (bw_params->clk_table.entries[i].dppclk_mhz > max_clk_data.dppclk_mhz)
drivers/gpu/drm/amd/display/dc/dml/dcn321/dcn321_fpu.c
374
max_clk_data.dppclk_mhz = bw_params->clk_table.entries[i].dppclk_mhz;
drivers/gpu/drm/amd/display/dc/dml/dcn321/dcn321_fpu.c
375
if (bw_params->clk_table.entries[i].phyclk_mhz > max_clk_data.phyclk_mhz)
drivers/gpu/drm/amd/display/dc/dml/dcn321/dcn321_fpu.c
376
max_clk_data.phyclk_mhz = bw_params->clk_table.entries[i].phyclk_mhz;
drivers/gpu/drm/amd/display/dc/dml/dcn321/dcn321_fpu.c
377
if (bw_params->clk_table.entries[i].dtbclk_mhz > max_clk_data.dtbclk_mhz)
drivers/gpu/drm/amd/display/dc/dml/dcn321/dcn321_fpu.c
378
max_clk_data.dtbclk_mhz = bw_params->clk_table.entries[i].dtbclk_mhz;
drivers/gpu/drm/amd/display/dc/dml/dcn321/dcn321_fpu.c
380
if (bw_params->clk_table.entries[i].memclk_mhz > 0) {
drivers/gpu/drm/amd/display/dc/dml/dcn321/dcn321_fpu.c
382
if (bw_params->clk_table.entries[i].memclk_mhz <= bw_params->dc_mode_limit.memclk_mhz)
drivers/gpu/drm/amd/display/dc/dml/dcn321/dcn321_fpu.c
385
if (bw_params->clk_table.entries[i].fclk_mhz > 0) {
drivers/gpu/drm/amd/display/dc/dml/dcn321/dcn321_fpu.c
387
if (bw_params->clk_table.entries[i].fclk_mhz <= bw_params->dc_mode_limit.fclk_mhz)
drivers/gpu/drm/amd/display/dc/dml/dcn321/dcn321_fpu.c
390
if (bw_params->clk_table.entries[i].dcfclk_mhz > 0) {
drivers/gpu/drm/amd/display/dc/dml/dcn321/dcn321_fpu.c
392
if (bw_params->clk_table.entries[i].dcfclk_mhz <= bw_params->dc_mode_limit.dcfclk_mhz)
drivers/gpu/drm/amd/display/dc/dml/dcn321/dcn321_fpu.c
407
if (num_dcfclk_dpms > 0 && bw_params->clk_table.entries[0].fclk_mhz > min_fclk_mhz)
drivers/gpu/drm/amd/display/dc/dml/dcn321/dcn321_fpu.c
408
min_fclk_mhz = bw_params->clk_table.entries[0].fclk_mhz;
drivers/gpu/drm/amd/display/dc/dml/dcn321/dcn321_fpu.c
457
entry.dram_speed_mts = bw_params->clk_table.entries[i].memclk_mhz * 16;
drivers/gpu/drm/amd/display/dc/dml/dcn321/dcn321_fpu.c
468
entry.fabricclk_mhz = bw_params->clk_table.entries[i].fclk_mhz;
drivers/gpu/drm/amd/display/dc/dml/dcn321/dcn321_fpu.c
524
if (bw_params->clk_table.entries[j].memclk_mhz * 16 >= table[i].dram_speed_mts) {
drivers/gpu/drm/amd/display/dc/dml/dcn321/dcn321_fpu.c
525
table[i].dram_speed_mts = bw_params->clk_table.entries[j].memclk_mhz * 16;
drivers/gpu/drm/amd/display/dc/dml/dcn321/dcn321_fpu.c
535
if (bw_params->clk_table.entries[j].fclk_mhz >= table[i].fabricclk_mhz) {
drivers/gpu/drm/amd/display/dc/dml/dcn321/dcn321_fpu.c
536
table[i].fabricclk_mhz = bw_params->clk_table.entries[j].fclk_mhz;
drivers/gpu/drm/amd/display/dc/dml/dcn321/dcn321_fpu.c
722
if (bw_params->clk_table.entries[i].dcfclk_mhz > max_dcfclk_mhz)
drivers/gpu/drm/amd/display/dc/dml/dcn321/dcn321_fpu.c
723
max_dcfclk_mhz = bw_params->clk_table.entries[i].dcfclk_mhz;
drivers/gpu/drm/amd/display/dc/dml/dcn321/dcn321_fpu.c
724
if (bw_params->clk_table.entries[i].dispclk_mhz > max_dispclk_mhz)
drivers/gpu/drm/amd/display/dc/dml/dcn321/dcn321_fpu.c
725
max_dispclk_mhz = bw_params->clk_table.entries[i].dispclk_mhz;
drivers/gpu/drm/amd/display/dc/dml/dcn321/dcn321_fpu.c
726
if (bw_params->clk_table.entries[i].dppclk_mhz > max_dppclk_mhz)
drivers/gpu/drm/amd/display/dc/dml/dcn321/dcn321_fpu.c
727
max_dppclk_mhz = bw_params->clk_table.entries[i].dppclk_mhz;
drivers/gpu/drm/amd/display/dc/dml/dcn321/dcn321_fpu.c
728
if (bw_params->clk_table.entries[i].phyclk_mhz > max_phyclk_mhz)
drivers/gpu/drm/amd/display/dc/dml/dcn321/dcn321_fpu.c
729
max_phyclk_mhz = bw_params->clk_table.entries[i].phyclk_mhz;
drivers/gpu/drm/amd/display/dc/dml/dcn321/dcn321_fpu.c
760
dcn321_get_optimal_dcfclk_fclk_for_uclk(bw_params->clk_table.entries[i].memclk_mhz * 16,
drivers/gpu/drm/amd/display/dc/dml/dcn321/dcn321_fpu.c
762
if (optimal_dcfclk_for_uclk[i] < bw_params->clk_table.entries[0].dcfclk_mhz) {
drivers/gpu/drm/amd/display/dc/dml/dcn321/dcn321_fpu.c
763
optimal_dcfclk_for_uclk[i] = bw_params->clk_table.entries[0].dcfclk_mhz;
drivers/gpu/drm/amd/display/dc/dml/dcn321/dcn321_fpu.c
772
bw_params->clk_table.entries[j].memclk_mhz * 16;
drivers/gpu/drm/amd/display/dc/dml/dcn321/dcn321_fpu.c
788
dram_speed_mts[num_states++] = bw_params->clk_table.entries[j++].memclk_mhz * 16;
drivers/gpu/drm/amd/display/dc/dml/dcn321/dcn321_fpu.c
803
dram_speed_mts[num_states++] = bw_params->clk_table.entries[j++].memclk_mhz * 16;
drivers/gpu/drm/amd/display/dc/dml/dcn321/dcn321_fpu.c
830
if (!bw_params->clk_table.entries[i].dtbclk_mhz) {
drivers/gpu/drm/amd/display/dc/dml/dcn321/dcn321_fpu.c
833
dcn3_21_soc.clock_limits[i].dtbclk_mhz = bw_params->clk_table.entries[i].dtbclk_mhz;
drivers/gpu/drm/amd/display/dc/dml/dcn321/dcn321_fpu.c
835
} else if (bw_params->clk_table.entries[i].dtbclk_mhz) {
drivers/gpu/drm/amd/display/dc/dml/dcn321/dcn321_fpu.c
836
dcn3_21_soc.clock_limits[i].dtbclk_mhz = bw_params->clk_table.entries[i].dtbclk_mhz;
drivers/gpu/drm/amd/display/dc/dml/dcn321/dcn321_fpu.c
839
if (!bw_params->clk_table.entries[i].socclk_mhz && i > 0)
drivers/gpu/drm/amd/display/dc/dml/dcn321/dcn321_fpu.c
842
dcn3_21_soc.clock_limits[i].socclk_mhz = bw_params->clk_table.entries[i].socclk_mhz;
drivers/gpu/drm/amd/display/dc/dml/dcn321/dcn321_fpu.c
892
if (dc->clk_mgr->bw_params->clk_table.entries[i].dcfclk_mhz)
drivers/gpu/drm/amd/display/dc/dml/dcn321/dcn321_fpu.c
894
dc->clk_mgr->bw_params->clk_table.entries[i].dcfclk_mhz;
drivers/gpu/drm/amd/display/dc/dml/dcn321/dcn321_fpu.c
898
if (dc->clk_mgr->bw_params->clk_table.entries[i].fclk_mhz)
drivers/gpu/drm/amd/display/dc/dml/dcn321/dcn321_fpu.c
900
dc->clk_mgr->bw_params->clk_table.entries[i].fclk_mhz;
drivers/gpu/drm/amd/display/dc/dml/dcn321/dcn321_fpu.c
904
if (dc->clk_mgr->bw_params->clk_table.entries[i].memclk_mhz)
drivers/gpu/drm/amd/display/dc/dml/dcn321/dcn321_fpu.c
906
dc->clk_mgr->bw_params->clk_table.entries[i].memclk_mhz;
drivers/gpu/drm/amd/display/dc/dml/dcn321/dcn321_fpu.c
910
if (dc->clk_mgr->bw_params->clk_table.entries[i].socclk_mhz)
drivers/gpu/drm/amd/display/dc/dml/dcn321/dcn321_fpu.c
912
dc->clk_mgr->bw_params->clk_table.entries[i].socclk_mhz;
drivers/gpu/drm/amd/display/dc/dml/dcn321/dcn321_fpu.c
916
if (dc->clk_mgr->bw_params->clk_table.entries[i].dtbclk_mhz)
drivers/gpu/drm/amd/display/dc/dml/dcn321/dcn321_fpu.c
918
dc->clk_mgr->bw_params->clk_table.entries[i].dtbclk_mhz;
drivers/gpu/drm/amd/display/dc/dml/dcn321/dcn321_fpu.c
922
if (dc->clk_mgr->bw_params->clk_table.entries[i].dispclk_mhz) {
drivers/gpu/drm/amd/display/dc/dml/dcn321/dcn321_fpu.c
924
dc->clk_mgr->bw_params->clk_table.entries[i].dispclk_mhz;
drivers/gpu/drm/amd/display/dc/dml/dcn321/dcn321_fpu.c
926
dc->clk_mgr->bw_params->clk_table.entries[i].dispclk_mhz;
drivers/gpu/drm/amd/display/dc/dml/dcn35/dcn35_fpu.c
248
if (clk_table->entries[i].dispclk_mhz > max_dispclk_mhz)
drivers/gpu/drm/amd/display/dc/dml/dcn35/dcn35_fpu.c
249
max_dispclk_mhz = clk_table->entries[i].dispclk_mhz;
drivers/gpu/drm/amd/display/dc/dml/dcn35/dcn35_fpu.c
250
if (clk_table->entries[i].dppclk_mhz > max_dppclk_mhz)
drivers/gpu/drm/amd/display/dc/dml/dcn35/dcn35_fpu.c
251
max_dppclk_mhz = clk_table->entries[i].dppclk_mhz;
drivers/gpu/drm/amd/display/dc/dml/dcn35/dcn35_fpu.c
259
clk_table->entries[i].dcfclk_mhz) {
drivers/gpu/drm/amd/display/dc/dml/dcn35/dcn35_fpu.c
272
clock_limits[i].dcfclk_mhz = clk_table->entries[i].dcfclk_mhz;
drivers/gpu/drm/amd/display/dc/dml/dcn35/dcn35_fpu.c
282
clk_table->entries[i].fclk_mhz;
drivers/gpu/drm/amd/display/dc/dml/dcn35/dcn35_fpu.c
284
clk_table->entries[i].socclk_mhz;
drivers/gpu/drm/amd/display/dc/dml/dcn35/dcn35_fpu.c
286
if (clk_table->entries[i].memclk_mhz &&
drivers/gpu/drm/amd/display/dc/dml/dcn35/dcn35_fpu.c
287
clk_table->entries[i].wck_ratio)
drivers/gpu/drm/amd/display/dc/dml/dcn35/dcn35_fpu.c
289
clk_table->entries[i].memclk_mhz * 2 *
drivers/gpu/drm/amd/display/dc/dml/dcn35/dcn35_fpu.c
290
clk_table->entries[i].wck_ratio;
drivers/gpu/drm/amd/display/dc/dml/dcn35/dcn35_fpu.c
369
clk_table->entries[i].memclk_mhz * clk_table->entries[i].wck_ratio;
drivers/gpu/drm/amd/display/dc/dml/dcn351/dcn351_fpu.c
282
if (clk_table->entries[i].dispclk_mhz > max_dispclk_mhz)
drivers/gpu/drm/amd/display/dc/dml/dcn351/dcn351_fpu.c
283
max_dispclk_mhz = clk_table->entries[i].dispclk_mhz;
drivers/gpu/drm/amd/display/dc/dml/dcn351/dcn351_fpu.c
284
if (clk_table->entries[i].dppclk_mhz > max_dppclk_mhz)
drivers/gpu/drm/amd/display/dc/dml/dcn351/dcn351_fpu.c
285
max_dppclk_mhz = clk_table->entries[i].dppclk_mhz;
drivers/gpu/drm/amd/display/dc/dml/dcn351/dcn351_fpu.c
293
clk_table->entries[i].dcfclk_mhz) {
drivers/gpu/drm/amd/display/dc/dml/dcn351/dcn351_fpu.c
306
clock_limits[i].dcfclk_mhz = clk_table->entries[i].dcfclk_mhz;
drivers/gpu/drm/amd/display/dc/dml/dcn351/dcn351_fpu.c
316
clk_table->entries[i].fclk_mhz;
drivers/gpu/drm/amd/display/dc/dml/dcn351/dcn351_fpu.c
318
clk_table->entries[i].socclk_mhz;
drivers/gpu/drm/amd/display/dc/dml/dcn351/dcn351_fpu.c
320
if (clk_table->entries[i].memclk_mhz &&
drivers/gpu/drm/amd/display/dc/dml/dcn351/dcn351_fpu.c
321
clk_table->entries[i].wck_ratio)
drivers/gpu/drm/amd/display/dc/dml/dcn351/dcn351_fpu.c
323
clk_table->entries[i].memclk_mhz * 2 *
drivers/gpu/drm/amd/display/dc/dml/dcn351/dcn351_fpu.c
324
clk_table->entries[i].wck_ratio;
drivers/gpu/drm/amd/display/dc/dml/dcn351/dcn351_fpu.c
403
clk_table->entries[i].memclk_mhz * clk_table->entries[i].wck_ratio;
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/src/dml2_core/dml2_core_dcn4_calcs.c
10406
double hard_minimum_dcfclk_mhz = (double)min_clk_table->dram_bw_table.entries[0].min_dcfclk_khz / 1000.0;
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/src/dml2_core/dml2_core_dcn4_calcs.c
10500
DML_LOG_VERBOSE("DML::%s: min_clk_table min_fclk_khz = %ld\n", __func__, min_clk_table->dram_bw_table.entries[in_out_params->min_clk_index].min_fclk_khz);
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/src/dml2_core/dml2_core_dcn4_calcs.c
10501
if (min_clk_table->dram_bw_table.entries[in_out_params->min_clk_index].min_uclk_khz)
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/src/dml2_core/dml2_core_dcn4_calcs.c
10502
DML_LOG_VERBOSE("DML::%s: min_clk_table uclk_mhz = %f\n", __func__, min_clk_table->dram_bw_table.entries[in_out_params->min_clk_index].min_uclk_khz / 1000.0);
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/src/dml2_core/dml2_core_dcn4_calcs.c
10504
DML_LOG_VERBOSE("DML::%s: min_clk_table uclk_mhz = %f\n", __func__, dram_bw_kbps_to_uclk_mhz(min_clk_table->dram_bw_table.entries[in_out_params->min_clk_index].pre_derate_dram_bw_kbps, &mode_lib->soc.clk_table.dram_config));
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/src/dml2_core/dml2_core_dcn4_calcs.c
7089
if (dram_bw_table->entries[i].min_uclk_khz >= uclk_khz) {
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/src/dml2_core/dml2_core_dcn4_calcs.c
7090
bw_mbps = (double)dram_bw_table->entries[i].pre_derate_dram_bw_kbps / 1000.0;
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/src/dml2_core/dml2_core_dcn4_calcs.c
7201
} entries[DML_MAX_CLK_TABLE_SIZE];
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/src/dml2_core/dml2_core_dcn4_calcs.c
7205
.entries = {
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/src/dml2_core/dml2_core_dcn4_calcs.c
7247
unsigned int blackout_us = core_dcn4_g6_temp_read_blackout_table.entries[0].blackout_us;
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/src/dml2_core/dml2_core_dcn4_calcs.c
7255
blackout_us = core_dcn4_g6_temp_read_blackout_table.entries[0].blackout_us;
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/src/dml2_core/dml2_core_dcn4_calcs.c
7258
if (uclk_freq_khz < core_dcn4_g6_temp_read_blackout_table.entries[i].uclk_khz ||
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/src/dml2_core/dml2_core_dcn4_calcs.c
7259
core_dcn4_g6_temp_read_blackout_table.entries[i].uclk_khz == 0) {
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/src/dml2_core/dml2_core_dcn4_calcs.c
7263
blackout_us = core_dcn4_g6_temp_read_blackout_table.entries[i].blackout_us;
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/src/dml2_core/dml2_core_dcn4_calcs.c
7971
mode_lib->ms.DCFCLK = ((double)min_clk_table->dram_bw_table.entries[in_out_params->min_clk_index].min_dcfclk_khz / 1000);
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/src/dml2_core/dml2_core_dcn4_calcs.c
7972
mode_lib->ms.FabricClock = ((double)min_clk_table->dram_bw_table.entries[in_out_params->min_clk_index].min_fclk_khz / 1000);
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/src/dml2_core/dml2_core_dcn4_calcs.c
7978
mode_lib->ms.uclk_freq_mhz = (double)min_clk_table->dram_bw_table.entries[in_out_params->min_clk_index].min_uclk_khz / 1000.0;
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/src/dml2_core/dml2_core_dcn4_calcs.c
7980
mode_lib->ms.uclk_freq_mhz = dram_bw_kbps_to_uclk_mhz(min_clk_table->dram_bw_table.entries[in_out_params->min_clk_index].pre_derate_dram_bw_kbps, &mode_lib->soc.clk_table.dram_config);
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/src/dml2_core/dml2_core_dcn4_calcs.c
7981
mode_lib->ms.dram_bw_mbps = ((double)min_clk_table->dram_bw_table.entries[in_out_params->min_clk_index].pre_derate_dram_bw_kbps / 1000);
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/src/dml2_core/dml2_core_dcn4_calcs.c
7982
mode_lib->ms.max_dram_bw_mbps = ((double)min_clk_table->dram_bw_table.entries[min_clk_table->dram_bw_table.num_entries - 1].pre_derate_dram_bw_kbps / 1000);
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/src/dml2_dpmm/dml2_dpmm_dcn4.c
23
if (dram_bw_table->entries[i].pre_derate_dram_bw_kbps >= bandwidth_kbps) {
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/src/dml2_dpmm/dml2_dpmm_dcn4.c
24
uclk_khz = dram_bw_table->entries[i].min_uclk_khz;
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/src/dml2_dpmm/dml2_dpmm_dcn4.c
44
*dcfclk = in_out->min_clk_table->dram_bw_table.entries[min_clock_index_for_latency].min_dcfclk_khz;
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/src/dml2_dpmm/dml2_dpmm_dcn4.c
45
*fclk = in_out->min_clk_table->dram_bw_table.entries[min_clock_index_for_latency].min_fclk_khz;
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/src/dml2_dpmm/dml2_dpmm_dcn4.c
46
*uclk = dram_bw_kbps_to_uclk_khz(in_out->min_clk_table->dram_bw_table.entries[min_clock_index_for_latency].pre_derate_dram_bw_kbps,
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/src/dml2_mcg/dml2_mcg_dcn4.c
101
min_table->dram_bw_table.entries[i].min_dcfclk_khz = round_up_to_quantized_values(min_table->dram_bw_table.entries[i].min_dcfclk_khz, soc_bb->clk_table.dcfclk.clk_values_khz, soc_bb->clk_table.dcfclk.num_clk_values);
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/src/dml2_mcg/dml2_mcg_dcn4.c
107
if (min_table->dram_bw_table.entries[i].min_dcfclk_khz > min_table->max_clocks_khz.dcfclk ||
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/src/dml2_mcg/dml2_mcg_dcn4.c
108
min_table->dram_bw_table.entries[i].min_fclk_khz > min_table->max_clocks_khz.fclk) {
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/src/dml2_mcg/dml2_mcg_dcn4.c
116
if (min_table->dram_bw_table.entries[i].min_dcfclk_khz == min_table->dram_bw_table.entries[i + 1].min_dcfclk_khz &&
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/src/dml2_mcg/dml2_mcg_dcn4.c
117
min_table->dram_bw_table.entries[i].min_fclk_khz == min_table->dram_bw_table.entries[i + 1].min_fclk_khz &&
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/src/dml2_mcg/dml2_mcg_dcn4.c
118
min_table->dram_bw_table.entries[i].pre_derate_dram_bw_kbps == min_table->dram_bw_table.entries[i + 1].pre_derate_dram_bw_kbps) {
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/src/dml2_mcg/dml2_mcg_dcn4.c
122
min_table->dram_bw_table.entries[j].min_dcfclk_khz = min_table->dram_bw_table.entries[j + 1].min_dcfclk_khz;
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/src/dml2_mcg/dml2_mcg_dcn4.c
123
min_table->dram_bw_table.entries[j].min_fclk_khz = min_table->dram_bw_table.entries[j + 1].min_fclk_khz;
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/src/dml2_mcg/dml2_mcg_dcn4.c
124
min_table->dram_bw_table.entries[j].pre_derate_dram_bw_kbps = min_table->dram_bw_table.entries[j + 1].pre_derate_dram_bw_kbps;
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/src/dml2_mcg/dml2_mcg_dcn4.c
138
min_table->dram_bw_table.entries[i].pre_derate_dram_bw_kbps = uclk_to_dram_bw_kbps(soc_bb->clk_table.uclk.clk_values_khz[i], &soc_bb->clk_table.dram_config);
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/src/dml2_mcg/dml2_mcg_dcn4.c
139
min_table->dram_bw_table.entries[i].min_dcfclk_khz = soc_bb->clk_table.dcfclk.clk_values_khz[i];
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/src/dml2_mcg/dml2_mcg_dcn4.c
140
min_table->dram_bw_table.entries[i].min_fclk_khz = soc_bb->clk_table.fclk.clk_values_khz[i];
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/src/dml2_mcg/dml2_mcg_dcn4.c
63
min_table->dram_bw_table.entries[i].pre_derate_dram_bw_kbps = uclk_to_dram_bw_kbps(soc_bb->clk_table.uclk.clk_values_khz[i], &soc_bb->clk_table.dram_config);
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/src/dml2_mcg/dml2_mcg_dcn4.c
65
min_table->dram_bw_table.entries[i].min_fclk_khz = (unsigned long)((((double)min_table->dram_bw_table.entries[i].pre_derate_dram_bw_kbps * soc_bb->qos_parameters.derate_table.system_active_urgent.dram_derate_percent_pixel / 100) / ((double)soc_bb->qos_parameters.derate_table.system_active_urgent.fclk_derate_percent / 100)) / soc_bb->fabric_datapath_to_dcn_data_return_bytes);
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/src/dml2_mcg/dml2_mcg_dcn4.c
71
prev_100 = min_table->dram_bw_table.entries[i - 1].min_fclk_khz;
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/src/dml2_mcg/dml2_mcg_dcn4.c
72
cur_50 = min_table->dram_bw_table.entries[i].min_fclk_khz / 2;
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/src/dml2_mcg/dml2_mcg_dcn4.c
73
min_table->dram_bw_table.entries[i].min_fclk_khz = prev_100 > cur_50 ? prev_100 : cur_50;
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/src/dml2_mcg/dml2_mcg_dcn4.c
76
min_table->dram_bw_table.entries[i].min_fclk_khz = round_up_to_quantized_values(min_table->dram_bw_table.entries[i].min_fclk_khz, soc_bb->clk_table.fclk.clk_values_khz, soc_bb->clk_table.fclk.num_clk_values);
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/src/dml2_mcg/dml2_mcg_dcn4.c
79
min_table->dram_bw_table.entries[0].min_fclk_khz /= 2;
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/src/dml2_mcg/dml2_mcg_dcn4.c
83
if (min_table->dram_bw_table.entries[i].min_dcfclk_khz < min_dcfclk_khz)
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/src/dml2_mcg/dml2_mcg_dcn4.c
84
min_table->dram_bw_table.entries[i].min_dcfclk_khz = min_dcfclk_khz;
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/src/dml2_mcg/dml2_mcg_dcn4.c
86
if (min_table->dram_bw_table.entries[i].min_fclk_khz < min_fclk_khz)
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/src/dml2_mcg/dml2_mcg_dcn4.c
87
min_table->dram_bw_table.entries[i].min_fclk_khz = min_fclk_khz;
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/src/dml2_mcg/dml2_mcg_dcn4.c
90
min_table->dram_bw_table.entries[i].min_fclk_khz > soc_bb->max_fclk_for_uclk_dpm_khz)
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/src/dml2_mcg/dml2_mcg_dcn4.c
91
min_table->dram_bw_table.entries[i].min_fclk_khz = soc_bb->max_fclk_for_uclk_dpm_khz;
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/src/dml2_mcg/dml2_mcg_dcn4.c
93
min_table->dram_bw_table.entries[i].min_dcfclk_khz =
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/src/dml2_mcg/dml2_mcg_dcn4.c
94
min_table->dram_bw_table.entries[i].min_fclk_khz *
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/src/dml2_mcg/dml2_mcg_dcn4.c
97
min_table->dram_bw_table.entries[i].min_dcfclk_khz =
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/src/dml2_mcg/dml2_mcg_dcn4.c
98
min_table->dram_bw_table.entries[i].min_dcfclk_khz * soc_bb->fabric_datapath_to_dcn_data_return_bytes / soc_bb->return_bus_width_bytes;
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/src/inc/dml2_internal_shared_types.h
25
struct dram_bw_to_min_clk_table_entry entries[DML_MCG_MAX_CLK_TABLE_SIZE];
drivers/gpu/drm/amd/display/dc/dpp/dcn10/dcn10_dpp_cm.c
860
gamma->entries.red[i]));
drivers/gpu/drm/amd/display/dc/dpp/dcn10/dcn10_dpp_cm.c
863
gamma->entries.green[i]));
drivers/gpu/drm/amd/display/dc/dpp/dcn10/dcn10_dpp_cm.c
866
gamma->entries.blue[i]));
drivers/gpu/drm/amd/display/dc/dpp/dcn20/dcn20_dpp_cm.c
1052
uint32_t entries)
drivers/gpu/drm/amd/display/dc/dpp/dcn20/dcn20_dpp_cm.c
1057
for (i = 0 ; i < entries; i += 2) {
drivers/gpu/drm/amd/display/dc/dpp/dcn20/dcn20_dpp_cm.c
1086
uint32_t entries)
drivers/gpu/drm/amd/display/dc/dpp/dcn20/dcn20_dpp_cm.c
1091
for (i = 0; i < entries; i++) {
drivers/gpu/drm/amd/display/dc/dpp/dcn30/dcn30_dpp.c
1339
uint32_t entries)
drivers/gpu/drm/amd/display/dc/dpp/dcn30/dcn30_dpp.c
1344
for (i = 0 ; i < entries; i += 2) {
drivers/gpu/drm/amd/display/dc/dpp/dcn30/dcn30_dpp.c
1373
uint32_t entries)
drivers/gpu/drm/amd/display/dc/dpp/dcn30/dcn30_dpp.c
1378
for (i = 0; i < entries; i++) {
drivers/gpu/drm/amd/display/dc/hwss/dcn30/dcn30_hwseq.c
1201
dc->clk_mgr->funcs->set_max_memclk(dc->clk_mgr, dc->clk_mgr->bw_params->clk_table.entries[dc->clk_mgr->bw_params->clk_table.num_entries - 1].memclk_mhz);
drivers/gpu/drm/amd/display/dc/hwss/dcn32/dcn32_hwseq.c
1811
dc->clk_mgr->funcs->set_max_memclk(dc->clk_mgr, dc->clk_mgr->bw_params->clk_table.entries[dc->clk_mgr->bw_params->clk_table.num_entries - 1].memclk_mhz);
drivers/gpu/drm/amd/display/dc/hwss/dcn32/dcn32_hwseq.c
761
clocks->dcfclk_khz = dc->clk_mgr->bw_params->clk_table.entries[0].dcfclk_mhz * 1000;
drivers/gpu/drm/amd/display/dc/hwss/dcn32/dcn32_hwseq.c
762
clocks->socclk_khz = dc->clk_mgr->bw_params->clk_table.entries[0].socclk_mhz * 1000;
drivers/gpu/drm/amd/display/dc/hwss/dcn32/dcn32_hwseq.c
763
clocks->dramclk_khz = dc->clk_mgr->bw_params->clk_table.entries[0].memclk_mhz * 1000;
drivers/gpu/drm/amd/display/dc/hwss/dcn32/dcn32_hwseq.c
764
clocks->dppclk_khz = dc->clk_mgr->bw_params->clk_table.entries[0].dppclk_mhz * 1000;
drivers/gpu/drm/amd/display/dc/hwss/dcn32/dcn32_hwseq.c
765
clocks->ref_dtbclk_khz = dc->clk_mgr->bw_params->clk_table.entries[0].dtbclk_mhz * 1000;
drivers/gpu/drm/amd/display/dc/hwss/dcn32/dcn32_hwseq.c
769
clocks->dispclk_khz = dc->clk_mgr->bw_params->clk_table.entries[0].dispclk_mhz * 1000;
drivers/gpu/drm/amd/display/dc/hwss/dcn35/dcn35_hwseq.c
1512
if (pix_clk_mhz > dc->clk_mgr->bw_params->clk_table.entries[0].dispclk_mhz)
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.c
1397
dc->clk_mgr->funcs->set_max_memclk(dc->clk_mgr, dc->clk_mgr->bw_params->clk_table.entries[dc->clk_mgr->bw_params->clk_table.num_entries - 1].memclk_mhz);
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.c
63
clocks->dcfclk_khz = dc->clk_mgr->bw_params->clk_table.entries[0].dcfclk_mhz * 1000;
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.c
64
clocks->socclk_khz = dc->clk_mgr->bw_params->clk_table.entries[0].socclk_mhz * 1000;
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.c
65
clocks->dramclk_khz = dc->clk_mgr->bw_params->clk_table.entries[0].memclk_mhz * 1000;
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.c
66
clocks->dppclk_khz = dc->clk_mgr->bw_params->clk_table.entries[0].dppclk_mhz * 1000;
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.c
68
clocks->dispclk_khz = dc->clk_mgr->bw_params->clk_table.entries[0].dispclk_mhz * 1000;
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.c
81
clocks->ref_dtbclk_khz = dc->clk_mgr->bw_params->clk_table.entries[0].dtbclk_mhz * 1000;
drivers/gpu/drm/amd/display/dc/inc/hw/clk_mgr.h
143
struct clk_limit_table_entry entries[MAX_NUM_DPM_LVL];
drivers/gpu/drm/amd/display/dc/inc/hw/clk_mgr.h
250
struct wm_range_table_entry entries[WM_SET_COUNT];
drivers/gpu/drm/amd/display/dc/mpc/dcn30/dcn30_mpc.c
1005
for (i = 0 ; i < entries; i += 2) {
drivers/gpu/drm/amd/display/dc/mpc/dcn30/dcn30_mpc.c
1030
uint32_t entries,
drivers/gpu/drm/amd/display/dc/mpc/dcn30/dcn30_mpc.c
1036
for (i = 0; i < entries; i++) {
drivers/gpu/drm/amd/display/dc/mpc/dcn30/dcn30_mpc.c
999
uint32_t entries,
drivers/gpu/drm/amd/display/dc/mpc/dcn32/dcn32_mpc.c
829
uint32_t entries,
drivers/gpu/drm/amd/display/dc/mpc/dcn32/dcn32_mpc.c
835
for (i = 0 ; i < entries; i += 2) {
drivers/gpu/drm/amd/display/dc/mpc/dcn32/dcn32_mpc.c
861
uint32_t entries,
drivers/gpu/drm/amd/display/dc/mpc/dcn32/dcn32_mpc.c
867
for (i = 0; i < entries; i++) {
drivers/gpu/drm/amd/display/dc/mpc/dcn32/dcn32_mpc.h
378
uint32_t entries,
drivers/gpu/drm/amd/display/dc/mpc/dcn32/dcn32_mpc.h
383
uint32_t entries,
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
2156
if (bw_params->clk_table.entries[0].memclk_mhz) {
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
2159
if (bw_params->clk_table.entries[i].dcfclk_mhz > dcn30_bb_max_clk.max_dcfclk_mhz)
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
2160
dcn30_bb_max_clk.max_dcfclk_mhz = bw_params->clk_table.entries[i].dcfclk_mhz;
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
2161
if (bw_params->clk_table.entries[i].dispclk_mhz > dcn30_bb_max_clk.max_dispclk_mhz)
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
2162
dcn30_bb_max_clk.max_dispclk_mhz = bw_params->clk_table.entries[i].dispclk_mhz;
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
2163
if (bw_params->clk_table.entries[i].dppclk_mhz > dcn30_bb_max_clk.max_dppclk_mhz)
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
2164
dcn30_bb_max_clk.max_dppclk_mhz = bw_params->clk_table.entries[i].dppclk_mhz;
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
2165
if (bw_params->clk_table.entries[i].phyclk_mhz > dcn30_bb_max_clk.max_phyclk_mhz)
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
2166
dcn30_bb_max_clk.max_phyclk_mhz = bw_params->clk_table.entries[i].phyclk_mhz;
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
2194
dcn30_fpu_get_optimal_dcfclk_fclk_for_uclk(bw_params->clk_table.entries[i].memclk_mhz * 16,
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
2197
if (optimal_dcfclk_for_uclk[i] < bw_params->clk_table.entries[0].dcfclk_mhz) {
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
2198
optimal_dcfclk_for_uclk[i] = bw_params->clk_table.entries[0].dcfclk_mhz;
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
2207
bw_params->clk_table.entries[j].memclk_mhz * 16;
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
2218
bw_params->clk_table.entries[j].memclk_mhz * 16;
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
2234
dram_speed_mts[num_states++] = bw_params->clk_table.entries[j++].memclk_mhz * 16;
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
2249
dram_speed_mts[num_states++] = bw_params->clk_table.entries[j++].memclk_mhz * 16;
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
1799
if (context->clk_mgr->bw_params->clk_table.entries[i].memclk_mhz == 0 ||
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
1800
uclk_mhz < context->clk_mgr->bw_params->clk_table.entries[i].memclk_mhz)
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
1802
if (uclk_mhz > context->clk_mgr->bw_params->clk_table.entries[i].memclk_mhz)
drivers/gpu/drm/amd/display/dc/soc_and_ip_translator/dcn401/dcn401_soc_and_ip_translator.c
104
dml_clk_table->uclk.clk_values_khz[i] = dc_clk_table->entries[i].memclk_mhz * 1000;
drivers/gpu/drm/amd/display/dc/soc_and_ip_translator/dcn401/dcn401_soc_and_ip_translator.c
118
dc_clk_table->entries[i].dispclk_mhz > dc_bw_params->dc_mode_limit.dispclk_mhz) {
drivers/gpu/drm/amd/display/dc/soc_and_ip_translator/dcn401/dcn401_soc_and_ip_translator.c
119
if (i == 0 || dc_clk_table->entries[i-1].dispclk_mhz < dc_bw_params->dc_mode_limit.dispclk_mhz) {
drivers/gpu/drm/amd/display/dc/soc_and_ip_translator/dcn401/dcn401_soc_and_ip_translator.c
127
dml_clk_table->dispclk.clk_values_khz[i] = dc_clk_table->entries[i].dispclk_mhz * 1000;
drivers/gpu/drm/amd/display/dc/soc_and_ip_translator/dcn401/dcn401_soc_and_ip_translator.c
141
dc_clk_table->entries[i].dppclk_mhz > dc_bw_params->dc_mode_limit.dppclk_mhz) {
drivers/gpu/drm/amd/display/dc/soc_and_ip_translator/dcn401/dcn401_soc_and_ip_translator.c
142
if (i == 0 || dc_clk_table->entries[i-1].dppclk_mhz < dc_bw_params->dc_mode_limit.dppclk_mhz) {
drivers/gpu/drm/amd/display/dc/soc_and_ip_translator/dcn401/dcn401_soc_and_ip_translator.c
150
dml_clk_table->dppclk.clk_values_khz[i] = dc_clk_table->entries[i].dppclk_mhz * 1000;
drivers/gpu/drm/amd/display/dc/soc_and_ip_translator/dcn401/dcn401_soc_and_ip_translator.c
164
dc_clk_table->entries[i].dtbclk_mhz > dc_bw_params->dc_mode_limit.dtbclk_mhz) {
drivers/gpu/drm/amd/display/dc/soc_and_ip_translator/dcn401/dcn401_soc_and_ip_translator.c
165
if (i == 0 || dc_clk_table->entries[i-1].dtbclk_mhz < dc_bw_params->dc_mode_limit.dtbclk_mhz) {
drivers/gpu/drm/amd/display/dc/soc_and_ip_translator/dcn401/dcn401_soc_and_ip_translator.c
173
dml_clk_table->dtbclk.clk_values_khz[i] = dc_clk_table->entries[i].dtbclk_mhz * 1000;
drivers/gpu/drm/amd/display/dc/soc_and_ip_translator/dcn401/dcn401_soc_and_ip_translator.c
187
dc_clk_table->entries[i].socclk_mhz > dc_bw_params->dc_mode_limit.socclk_mhz) {
drivers/gpu/drm/amd/display/dc/soc_and_ip_translator/dcn401/dcn401_soc_and_ip_translator.c
188
if (i == 0 || dc_clk_table->entries[i-1].socclk_mhz < dc_bw_params->dc_mode_limit.socclk_mhz) {
drivers/gpu/drm/amd/display/dc/soc_and_ip_translator/dcn401/dcn401_soc_and_ip_translator.c
196
dml_clk_table->socclk.clk_values_khz[i] = dc_clk_table->entries[i].socclk_mhz * 1000;
drivers/gpu/drm/amd/display/dc/soc_and_ip_translator/dcn401/dcn401_soc_and_ip_translator.c
49
dc_clk_table->entries[i].dcfclk_mhz > dc_bw_params->dc_mode_limit.dcfclk_mhz) {
drivers/gpu/drm/amd/display/dc/soc_and_ip_translator/dcn401/dcn401_soc_and_ip_translator.c
50
if (i == 0 || dc_clk_table->entries[i-1].dcfclk_mhz < dc_bw_params->dc_mode_limit.dcfclk_mhz) {
drivers/gpu/drm/amd/display/dc/soc_and_ip_translator/dcn401/dcn401_soc_and_ip_translator.c
58
dml_clk_table->dcfclk.clk_values_khz[i] = dc_clk_table->entries[i].dcfclk_mhz * 1000;
drivers/gpu/drm/amd/display/dc/soc_and_ip_translator/dcn401/dcn401_soc_and_ip_translator.c
72
dc_clk_table->entries[i].fclk_mhz > dc_bw_params->dc_mode_limit.fclk_mhz) {
drivers/gpu/drm/amd/display/dc/soc_and_ip_translator/dcn401/dcn401_soc_and_ip_translator.c
73
if (i == 0 || dc_clk_table->entries[i-1].fclk_mhz < dc_bw_params->dc_mode_limit.fclk_mhz) {
drivers/gpu/drm/amd/display/dc/soc_and_ip_translator/dcn401/dcn401_soc_and_ip_translator.c
81
dml_clk_table->fclk.clk_values_khz[i] = dc_clk_table->entries[i].fclk_mhz * 1000;
drivers/gpu/drm/amd/display/dc/soc_and_ip_translator/dcn401/dcn401_soc_and_ip_translator.c
95
dc_clk_table->entries[i].memclk_mhz > dc_bw_params->dc_mode_limit.memclk_mhz) {
drivers/gpu/drm/amd/display/dc/soc_and_ip_translator/dcn401/dcn401_soc_and_ip_translator.c
96
if (i == 0 || dc_clk_table->entries[i-1].memclk_mhz < dc_bw_params->dc_mode_limit.memclk_mhz) {
drivers/gpu/drm/amd/display/dmub/inc/dmub_trace_buffer.h
65
struct dmcub_trace_buf_entry entries[PERF_TRACE_MAX_ENTRY];
drivers/gpu/drm/amd/display/modules/color/color_gamma.c
1289
if (dc_fixpt_lt(max_os, ramp->entries.red[i]) ||
drivers/gpu/drm/amd/display/modules/color/color_gamma.c
1290
dc_fixpt_lt(max_os, ramp->entries.green[i]) ||
drivers/gpu/drm/amd/display/modules/color/color_gamma.c
1291
dc_fixpt_lt(max_os, ramp->entries.blue[i])) {
drivers/gpu/drm/amd/display/modules/color/color_gamma.c
1302
ramp->entries.red[i], scaler);
drivers/gpu/drm/amd/display/modules/color/color_gamma.c
1304
ramp->entries.green[i], scaler);
drivers/gpu/drm/amd/display/modules/color/color_gamma.c
1306
ramp->entries.blue[i], scaler);
drivers/gpu/drm/amd/display/modules/color/color_gamma.c
1350
if (dc_fixpt_lt(ramp->entries.red[i], min))
drivers/gpu/drm/amd/display/modules/color/color_gamma.c
1351
min = ramp->entries.red[i];
drivers/gpu/drm/amd/display/modules/color/color_gamma.c
1353
if (dc_fixpt_lt(ramp->entries.green[i], min))
drivers/gpu/drm/amd/display/modules/color/color_gamma.c
1354
min = ramp->entries.green[i];
drivers/gpu/drm/amd/display/modules/color/color_gamma.c
1356
if (dc_fixpt_lt(ramp->entries.blue[i], min))
drivers/gpu/drm/amd/display/modules/color/color_gamma.c
1357
min = ramp->entries.blue[i];
drivers/gpu/drm/amd/display/modules/color/color_gamma.c
1359
if (dc_fixpt_lt(max, ramp->entries.red[i]))
drivers/gpu/drm/amd/display/modules/color/color_gamma.c
1360
max = ramp->entries.red[i];
drivers/gpu/drm/amd/display/modules/color/color_gamma.c
1362
if (dc_fixpt_lt(max, ramp->entries.green[i]))
drivers/gpu/drm/amd/display/modules/color/color_gamma.c
1363
max = ramp->entries.green[i];
drivers/gpu/drm/amd/display/modules/color/color_gamma.c
1365
if (dc_fixpt_lt(max, ramp->entries.blue[i]))
drivers/gpu/drm/amd/display/modules/color/color_gamma.c
1366
max = ramp->entries.blue[i];
drivers/gpu/drm/amd/display/modules/color/color_gamma.c
1377
ramp->entries.red[i], delta), offset);
drivers/gpu/drm/amd/display/modules/color/color_gamma.c
1380
ramp->entries.green[i], delta), offset);
drivers/gpu/drm/amd/display/modules/color/color_gamma.c
1383
ramp->entries.blue[i], delta), offset);
drivers/gpu/drm/amd/display/modules/color/color_gamma.c
1476
lut1 = ramp->entries.red[index];
drivers/gpu/drm/amd/display/modules/color/color_gamma.c
1477
lut2 = ramp->entries.red[index_next];
drivers/gpu/drm/amd/display/modules/color/color_gamma.c
1479
lut1 = ramp->entries.green[index];
drivers/gpu/drm/amd/display/modules/color/color_gamma.c
1480
lut2 = ramp->entries.green[index_next];
drivers/gpu/drm/amd/display/modules/color/color_gamma.c
1482
lut1 = ramp->entries.blue[index];
drivers/gpu/drm/amd/display/modules/color/color_gamma.c
1483
lut2 = ramp->entries.blue[index_next];
drivers/gpu/drm/amd/include/mes_v11_api_def.h
196
struct MES_LOG_ENTRY_DATA entries[1];
drivers/gpu/drm/amd/include/mes_v12_api_def.h
250
struct MES_LOG_ENTRY_DATA entries[];
drivers/gpu/drm/amd/include/pptable.h
518
ATOM_PPLIB_Clock_Voltage_Dependency_Record entries[] __counted_by(ucNumEntries);
drivers/gpu/drm/amd/include/pptable.h
536
ATOM_PPLIB_Clock_Voltage_Limit_Record entries[] __counted_by(ucNumEntries);
drivers/gpu/drm/amd/include/pptable.h
562
ATOM_PPLIB_CAC_Leakage_Record entries[] __counted_by(ucNumEntries);
drivers/gpu/drm/amd/include/pptable.h
579
ATOM_PPLIB_PhaseSheddingLimits_Record entries[] __counted_by(ucNumEntries);
drivers/gpu/drm/amd/include/pptable.h
591
VCEClockInfo entries[] __counted_by(ucNumEntries);
drivers/gpu/drm/amd/include/pptable.h
603
ATOM_PPLIB_VCE_Clock_Voltage_Limit_Record entries[] __counted_by(numEntries);
drivers/gpu/drm/amd/include/pptable.h
615
ATOM_PPLIB_VCE_State_Record entries[] __counted_by(numEntries);
drivers/gpu/drm/amd/include/pptable.h
637
UVDClockInfo entries[] __counted_by(ucNumEntries);
drivers/gpu/drm/amd/include/pptable.h
649
ATOM_PPLIB_UVD_Clock_Voltage_Limit_Record entries[] __counted_by(numEntries);
drivers/gpu/drm/amd/include/pptable.h
668
ATOM_PPLIB_SAMClk_Voltage_Limit_Record entries[] __counted_by(numEntries);
drivers/gpu/drm/amd/include/pptable.h
686
ATOM_PPLIB_ACPClk_Voltage_Limit_Record entries[] __counted_by(numEntries);
drivers/gpu/drm/amd/include/pptable.h
755
ATOM_PPLIB_VQ_Budgeting_Record entries[] __counted_by(numEntries);
drivers/gpu/drm/amd/include/umsch_mm_4_0_api_def.h
123
struct UMSCH_LOG_ENTRY_DATA entries[1];
drivers/gpu/drm/amd/pm/inc/amdgpu_dpm.h
123
struct amdgpu_clock_voltage_dependency_entry *entries;
drivers/gpu/drm/amd/pm/inc/amdgpu_dpm.h
140
union amdgpu_cac_leakage_entry *entries;
drivers/gpu/drm/amd/pm/inc/amdgpu_dpm.h
151
struct amdgpu_phase_shedding_limits_entry *entries;
drivers/gpu/drm/amd/pm/inc/amdgpu_dpm.h
162
struct amdgpu_uvd_clock_voltage_dependency_entry *entries;
drivers/gpu/drm/amd/pm/inc/amdgpu_dpm.h
173
struct amdgpu_vce_clock_voltage_dependency_entry *entries;
drivers/gpu/drm/amd/pm/legacy-dpm/kv_dpm.c
104
if (vddc_sclk_table->entries[i].v == vid_7bit)
drivers/gpu/drm/amd/pm/legacy-dpm/kv_dpm.c
1040
pi->high_voltage_t < table->entries[i].v)
drivers/gpu/drm/amd/pm/legacy-dpm/kv_dpm.c
1043
pi->samu_level[i].Frequency = cpu_to_be32(table->entries[i].clk);
drivers/gpu/drm/amd/pm/legacy-dpm/kv_dpm.c
1044
pi->samu_level[i].MinVoltage = cpu_to_be16(table->entries[i].v);
drivers/gpu/drm/amd/pm/legacy-dpm/kv_dpm.c
1047
(u8)kv_get_clk_bypass(adev, table->entries[i].clk);
drivers/gpu/drm/amd/pm/legacy-dpm/kv_dpm.c
1050
table->entries[i].clk, false, ÷rs);
drivers/gpu/drm/amd/pm/legacy-dpm/kv_dpm.c
110
if (vid_mapping_table->entries[i].vid_7bit == vid_7bit)
drivers/gpu/drm/amd/pm/legacy-dpm/kv_dpm.c
1105
pi->acp_level[i].Frequency = cpu_to_be32(table->entries[i].clk);
drivers/gpu/drm/amd/pm/legacy-dpm/kv_dpm.c
1106
pi->acp_level[i].MinVoltage = cpu_to_be16(table->entries[i].v);
drivers/gpu/drm/amd/pm/legacy-dpm/kv_dpm.c
1109
table->entries[i].clk, false, ÷rs);
drivers/gpu/drm/amd/pm/legacy-dpm/kv_dpm.c
111
return vid_mapping_table->entries[i].vid_2bit;
drivers/gpu/drm/amd/pm/legacy-dpm/kv_dpm.c
114
return vid_mapping_table->entries[vid_mapping_table->num_entries - 1].vid_2bit;
drivers/gpu/drm/amd/pm/legacy-dpm/kv_dpm.c
1159
if (kv_get_clock_difference(table->entries[i].clk, 40000) < 200)
drivers/gpu/drm/amd/pm/legacy-dpm/kv_dpm.c
1161
else if (kv_get_clock_difference(table->entries[i].clk, 30000) < 200)
drivers/gpu/drm/amd/pm/legacy-dpm/kv_dpm.c
1163
else if (kv_get_clock_difference(table->entries[i].clk, 26600) < 200)
drivers/gpu/drm/amd/pm/legacy-dpm/kv_dpm.c
1165
else if (kv_get_clock_difference(table->entries[i].clk, 20000) < 200)
drivers/gpu/drm/amd/pm/legacy-dpm/kv_dpm.c
1167
else if (kv_get_clock_difference(table->entries[i].clk, 10000) < 200)
drivers/gpu/drm/amd/pm/legacy-dpm/kv_dpm.c
1180
if (kv_get_clock_difference(table->entries[i].sclk_frequency, 40000) < 200)
drivers/gpu/drm/amd/pm/legacy-dpm/kv_dpm.c
1182
else if (kv_get_clock_difference(table->entries[i].sclk_frequency, 30000) < 200)
drivers/gpu/drm/amd/pm/legacy-dpm/kv_dpm.c
1184
else if (kv_get_clock_difference(table->entries[i].sclk_frequency, 26600) < 200)
drivers/gpu/drm/amd/pm/legacy-dpm/kv_dpm.c
1186
else if (kv_get_clock_difference(table->entries[i].sclk_frequency, 20000) < 200)
drivers/gpu/drm/amd/pm/legacy-dpm/kv_dpm.c
1188
else if (kv_get_clock_difference(table->entries[i].sclk_frequency, 10000) < 200)
drivers/gpu/drm/amd/pm/legacy-dpm/kv_dpm.c
147
sclk_voltage_mapping_table->entries[n].sclk_frequency =
drivers/gpu/drm/amd/pm/legacy-dpm/kv_dpm.c
149
sclk_voltage_mapping_table->entries[n].vid_2bit =
drivers/gpu/drm/amd/pm/legacy-dpm/kv_dpm.c
1542
if (table->entries[i].evclk >= evclk)
drivers/gpu/drm/amd/pm/legacy-dpm/kv_dpm.c
169
vid_mapping_table->entries[table[i].usVoltageIndex].vid_7bit =
drivers/gpu/drm/amd/pm/legacy-dpm/kv_dpm.c
171
vid_mapping_table->entries[table[i].usVoltageIndex].vid_2bit =
drivers/gpu/drm/amd/pm/legacy-dpm/kv_dpm.c
177
if (vid_mapping_table->entries[i].vid_7bit == 0) {
drivers/gpu/drm/amd/pm/legacy-dpm/kv_dpm.c
1774
if ((table->entries[i].clk >= new_ps->levels[0].sclk) ||
drivers/gpu/drm/amd/pm/legacy-dpm/kv_dpm.c
1782
if (table->entries[i].clk <= new_ps->levels[new_ps->num_levels - 1].sclk)
drivers/gpu/drm/amd/pm/legacy-dpm/kv_dpm.c
1788
if ((new_ps->levels[0].sclk - table->entries[pi->highest_valid].clk) >
drivers/gpu/drm/amd/pm/legacy-dpm/kv_dpm.c
1789
(table->entries[pi->lowest_valid].clk - new_ps->levels[new_ps->num_levels - 1].sclk))
drivers/gpu/drm/amd/pm/legacy-dpm/kv_dpm.c
179
if (vid_mapping_table->entries[j].vid_7bit != 0) {
drivers/gpu/drm/amd/pm/legacy-dpm/kv_dpm.c
1799
if (table->entries[i].sclk_frequency >= new_ps->levels[0].sclk ||
drivers/gpu/drm/amd/pm/legacy-dpm/kv_dpm.c
180
vid_mapping_table->entries[i] =
drivers/gpu/drm/amd/pm/legacy-dpm/kv_dpm.c
1807
if (table->entries[i].sclk_frequency <=
drivers/gpu/drm/amd/pm/legacy-dpm/kv_dpm.c
181
vid_mapping_table->entries[j];
drivers/gpu/drm/amd/pm/legacy-dpm/kv_dpm.c
1815
table->entries[pi->highest_valid].sclk_frequency) >
drivers/gpu/drm/amd/pm/legacy-dpm/kv_dpm.c
1816
(table->entries[pi->lowest_valid].sclk_frequency -
drivers/gpu/drm/amd/pm/legacy-dpm/kv_dpm.c
182
vid_mapping_table->entries[j].vid_7bit = 0;
drivers/gpu/drm/amd/pm/legacy-dpm/kv_dpm.c
2034
pi->sys_info.sclk_voltage_mapping_table.entries[idx].sclk_frequency;
drivers/gpu/drm/amd/pm/legacy-dpm/kv_dpm.c
2037
pi->sys_info.sclk_voltage_mapping_table.entries[idx].vid_2bit);
drivers/gpu/drm/amd/pm/legacy-dpm/kv_dpm.c
2057
uvd_table->entries[i].v =
drivers/gpu/drm/amd/pm/legacy-dpm/kv_dpm.c
2059
uvd_table->entries[i].v);
drivers/gpu/drm/amd/pm/legacy-dpm/kv_dpm.c
2064
vce_table->entries[i].v =
drivers/gpu/drm/amd/pm/legacy-dpm/kv_dpm.c
2066
vce_table->entries[i].v);
drivers/gpu/drm/amd/pm/legacy-dpm/kv_dpm.c
2071
samu_table->entries[i].v =
drivers/gpu/drm/amd/pm/legacy-dpm/kv_dpm.c
2073
samu_table->entries[i].v);
drivers/gpu/drm/amd/pm/legacy-dpm/kv_dpm.c
2078
acp_table->entries[i].v =
drivers/gpu/drm/amd/pm/legacy-dpm/kv_dpm.c
2080
acp_table->entries[i].v);
drivers/gpu/drm/amd/pm/legacy-dpm/kv_dpm.c
2172
(kv_convert_8bit_index_to_voltage(adev, table->entries[i].v) <=
drivers/gpu/drm/amd/pm/legacy-dpm/kv_dpm.c
2184
(kv_convert_2bit_index_to_voltage(adev, table->entries[i].vid_2bit) <=
drivers/gpu/drm/amd/pm/legacy-dpm/kv_dpm.c
2227
if (stable_p_state_sclk >= table->entries[i].clk) {
drivers/gpu/drm/amd/pm/legacy-dpm/kv_dpm.c
2228
stable_p_state_sclk = table->entries[i].clk;
drivers/gpu/drm/amd/pm/legacy-dpm/kv_dpm.c
2234
stable_p_state_sclk = table->entries[0].clk;
drivers/gpu/drm/amd/pm/legacy-dpm/kv_dpm.c
2257
ps->levels[i].sclk = table->entries[limit].clk;
drivers/gpu/drm/amd/pm/legacy-dpm/kv_dpm.c
2269
ps->levels[i].sclk = table->entries[limit].sclk_frequency;
drivers/gpu/drm/amd/pm/legacy-dpm/kv_dpm.c
2420
kv_convert_8bit_index_to_voltage(adev, table->entries[i].v)))
drivers/gpu/drm/amd/pm/legacy-dpm/kv_dpm.c
2423
kv_set_divider_value(adev, i, table->entries[i].clk);
drivers/gpu/drm/amd/pm/legacy-dpm/kv_dpm.c
2426
table->entries[i].v);
drivers/gpu/drm/amd/pm/legacy-dpm/kv_dpm.c
2440
kv_convert_2bit_index_to_voltage(adev, table->entries[i].vid_2bit))
drivers/gpu/drm/amd/pm/legacy-dpm/kv_dpm.c
2443
kv_set_divider_value(adev, i, table->entries[i].sclk_frequency);
drivers/gpu/drm/amd/pm/legacy-dpm/kv_dpm.c
2444
kv_set_vid(adev, i, table->entries[i].vid_2bit);
drivers/gpu/drm/amd/pm/legacy-dpm/kv_dpm.c
797
if (table->entries[i].clk == pi->boot_pl.sclk)
drivers/gpu/drm/amd/pm/legacy-dpm/kv_dpm.c
811
if (table->entries[i].sclk_frequency == pi->boot_pl.sclk)
drivers/gpu/drm/amd/pm/legacy-dpm/kv_dpm.c
82
return vddc_sclk_table->entries[vid_2bit].v;
drivers/gpu/drm/amd/pm/legacy-dpm/kv_dpm.c
84
return vddc_sclk_table->entries[vddc_sclk_table->count - 1].v;
drivers/gpu/drm/amd/pm/legacy-dpm/kv_dpm.c
87
if (vid_mapping_table->entries[i].vid_2bit == vid_2bit)
drivers/gpu/drm/amd/pm/legacy-dpm/kv_dpm.c
88
return vid_mapping_table->entries[i].vid_7bit;
drivers/gpu/drm/amd/pm/legacy-dpm/kv_dpm.c
90
return vid_mapping_table->entries[vid_mapping_table->num_entries - 1].vid_7bit;
drivers/gpu/drm/amd/pm/legacy-dpm/kv_dpm.c
906
(pi->high_voltage_t < table->entries[i].v))
drivers/gpu/drm/amd/pm/legacy-dpm/kv_dpm.c
909
pi->uvd_level[i].VclkFrequency = cpu_to_be32(table->entries[i].vclk);
drivers/gpu/drm/amd/pm/legacy-dpm/kv_dpm.c
910
pi->uvd_level[i].DclkFrequency = cpu_to_be32(table->entries[i].dclk);
drivers/gpu/drm/amd/pm/legacy-dpm/kv_dpm.c
911
pi->uvd_level[i].MinVddNb = cpu_to_be16(table->entries[i].v);
drivers/gpu/drm/amd/pm/legacy-dpm/kv_dpm.c
914
(u8)kv_get_clk_bypass(adev, table->entries[i].vclk);
drivers/gpu/drm/amd/pm/legacy-dpm/kv_dpm.c
916
(u8)kv_get_clk_bypass(adev, table->entries[i].dclk);
drivers/gpu/drm/amd/pm/legacy-dpm/kv_dpm.c
919
table->entries[i].vclk, false, ÷rs);
drivers/gpu/drm/amd/pm/legacy-dpm/kv_dpm.c
925
table->entries[i].dclk, false, ÷rs);
drivers/gpu/drm/amd/pm/legacy-dpm/kv_dpm.c
977
pi->high_voltage_t < table->entries[i].v)
drivers/gpu/drm/amd/pm/legacy-dpm/kv_dpm.c
980
pi->vce_level[i].Frequency = cpu_to_be32(table->entries[i].evclk);
drivers/gpu/drm/amd/pm/legacy-dpm/kv_dpm.c
981
pi->vce_level[i].MinVoltage = cpu_to_be16(table->entries[i].v);
drivers/gpu/drm/amd/pm/legacy-dpm/kv_dpm.c
984
(u8)kv_get_clk_bypass(adev, table->entries[i].evclk);
drivers/gpu/drm/amd/pm/legacy-dpm/kv_dpm.c
987
table->entries[i].evclk, false, ÷rs);
drivers/gpu/drm/amd/pm/legacy-dpm/kv_dpm.h
44
struct sumo_vid_mapping_entry entries[SUMO_MAX_NUMBER_VOLTAGES];
drivers/gpu/drm/amd/pm/legacy-dpm/kv_dpm.h
55
struct sumo_sclk_voltage_mapping_entry entries[SUMO_MAX_HARDWARE_POWERLEVELS];
drivers/gpu/drm/amd/pm/legacy-dpm/legacy_dpm.c
167
amdgpu_table->entries = kzalloc(size, GFP_KERNEL);
drivers/gpu/drm/amd/pm/legacy-dpm/legacy_dpm.c
168
if (!amdgpu_table->entries)
drivers/gpu/drm/amd/pm/legacy-dpm/legacy_dpm.c
171
entry = &atom_table->entries[0];
drivers/gpu/drm/amd/pm/legacy-dpm/legacy_dpm.c
173
amdgpu_table->entries[i].clk = le16_to_cpu(entry->usClockLow) |
drivers/gpu/drm/amd/pm/legacy-dpm/legacy_dpm.c
175
amdgpu_table->entries[i].v = le16_to_cpu(entry->usVoltage);
drivers/gpu/drm/amd/pm/legacy-dpm/legacy_dpm.c
286
le16_to_cpu(clk_v->entries[0].usSclkLow) |
drivers/gpu/drm/amd/pm/legacy-dpm/legacy_dpm.c
287
(clk_v->entries[0].ucSclkHigh << 16);
drivers/gpu/drm/amd/pm/legacy-dpm/legacy_dpm.c
289
le16_to_cpu(clk_v->entries[0].usMclkLow) |
drivers/gpu/drm/amd/pm/legacy-dpm/legacy_dpm.c
290
(clk_v->entries[0].ucMclkHigh << 16);
drivers/gpu/drm/amd/pm/legacy-dpm/legacy_dpm.c
292
le16_to_cpu(clk_v->entries[0].usVddc);
drivers/gpu/drm/amd/pm/legacy-dpm/legacy_dpm.c
294
le16_to_cpu(clk_v->entries[0].usVddci);
drivers/gpu/drm/amd/pm/legacy-dpm/legacy_dpm.c
304
adev->pm.dpm.dyn_state.phase_shedding_limits_table.entries =
drivers/gpu/drm/amd/pm/legacy-dpm/legacy_dpm.c
307
if (!adev->pm.dpm.dyn_state.phase_shedding_limits_table.entries)
drivers/gpu/drm/amd/pm/legacy-dpm/legacy_dpm.c
310
entry = &psl->entries[0];
drivers/gpu/drm/amd/pm/legacy-dpm/legacy_dpm.c
312
adev->pm.dpm.dyn_state.phase_shedding_limits_table.entries[i].sclk =
drivers/gpu/drm/amd/pm/legacy-dpm/legacy_dpm.c
314
adev->pm.dpm.dyn_state.phase_shedding_limits_table.entries[i].mclk =
drivers/gpu/drm/amd/pm/legacy-dpm/legacy_dpm.c
316
adev->pm.dpm.dyn_state.phase_shedding_limits_table.entries[i].voltage =
drivers/gpu/drm/amd/pm/legacy-dpm/legacy_dpm.c
348
adev->pm.dpm.dyn_state.cac_leakage_table.entries = kzalloc(size, GFP_KERNEL);
drivers/gpu/drm/amd/pm/legacy-dpm/legacy_dpm.c
349
if (!adev->pm.dpm.dyn_state.cac_leakage_table.entries)
drivers/gpu/drm/amd/pm/legacy-dpm/legacy_dpm.c
351
entry = &cac_table->entries[0];
drivers/gpu/drm/amd/pm/legacy-dpm/legacy_dpm.c
354
adev->pm.dpm.dyn_state.cac_leakage_table.entries[i].vddc1 =
drivers/gpu/drm/amd/pm/legacy-dpm/legacy_dpm.c
356
adev->pm.dpm.dyn_state.cac_leakage_table.entries[i].vddc2 =
drivers/gpu/drm/amd/pm/legacy-dpm/legacy_dpm.c
358
adev->pm.dpm.dyn_state.cac_leakage_table.entries[i].vddc3 =
drivers/gpu/drm/amd/pm/legacy-dpm/legacy_dpm.c
361
adev->pm.dpm.dyn_state.cac_leakage_table.entries[i].vddc =
drivers/gpu/drm/amd/pm/legacy-dpm/legacy_dpm.c
363
adev->pm.dpm.dyn_state.cac_leakage_table.entries[i].leakage =
drivers/gpu/drm/amd/pm/legacy-dpm/legacy_dpm.c
400
adev->pm.dpm.dyn_state.vce_clock_voltage_dependency_table.entries =
drivers/gpu/drm/amd/pm/legacy-dpm/legacy_dpm.c
402
if (!adev->pm.dpm.dyn_state.vce_clock_voltage_dependency_table.entries)
drivers/gpu/drm/amd/pm/legacy-dpm/legacy_dpm.c
406
entry = &limits->entries[0];
drivers/gpu/drm/amd/pm/legacy-dpm/legacy_dpm.c
407
state_entry = &states->entries[0];
drivers/gpu/drm/amd/pm/legacy-dpm/legacy_dpm.c
410
((u8 *)&array->entries[0] +
drivers/gpu/drm/amd/pm/legacy-dpm/legacy_dpm.c
412
adev->pm.dpm.dyn_state.vce_clock_voltage_dependency_table.entries[i].evclk =
drivers/gpu/drm/amd/pm/legacy-dpm/legacy_dpm.c
414
adev->pm.dpm.dyn_state.vce_clock_voltage_dependency_table.entries[i].ecclk =
drivers/gpu/drm/amd/pm/legacy-dpm/legacy_dpm.c
416
adev->pm.dpm.dyn_state.vce_clock_voltage_dependency_table.entries[i].v =
drivers/gpu/drm/amd/pm/legacy-dpm/legacy_dpm.c
426
((u8 *)&array->entries[0] +
drivers/gpu/drm/amd/pm/legacy-dpm/legacy_dpm.c
453
adev->pm.dpm.dyn_state.uvd_clock_voltage_dependency_table.entries =
drivers/gpu/drm/amd/pm/legacy-dpm/legacy_dpm.c
455
if (!adev->pm.dpm.dyn_state.uvd_clock_voltage_dependency_table.entries)
drivers/gpu/drm/amd/pm/legacy-dpm/legacy_dpm.c
459
entry = &limits->entries[0];
drivers/gpu/drm/amd/pm/legacy-dpm/legacy_dpm.c
462
((u8 *)&array->entries[0] +
drivers/gpu/drm/amd/pm/legacy-dpm/legacy_dpm.c
464
adev->pm.dpm.dyn_state.uvd_clock_voltage_dependency_table.entries[i].vclk =
drivers/gpu/drm/amd/pm/legacy-dpm/legacy_dpm.c
466
adev->pm.dpm.dyn_state.uvd_clock_voltage_dependency_table.entries[i].dclk =
drivers/gpu/drm/amd/pm/legacy-dpm/legacy_dpm.c
468
adev->pm.dpm.dyn_state.uvd_clock_voltage_dependency_table.entries[i].v =
drivers/gpu/drm/amd/pm/legacy-dpm/legacy_dpm.c
483
adev->pm.dpm.dyn_state.samu_clock_voltage_dependency_table.entries =
drivers/gpu/drm/amd/pm/legacy-dpm/legacy_dpm.c
485
if (!adev->pm.dpm.dyn_state.samu_clock_voltage_dependency_table.entries)
drivers/gpu/drm/amd/pm/legacy-dpm/legacy_dpm.c
489
entry = &limits->entries[0];
drivers/gpu/drm/amd/pm/legacy-dpm/legacy_dpm.c
491
adev->pm.dpm.dyn_state.samu_clock_voltage_dependency_table.entries[i].clk =
drivers/gpu/drm/amd/pm/legacy-dpm/legacy_dpm.c
493
adev->pm.dpm.dyn_state.samu_clock_voltage_dependency_table.entries[i].v =
drivers/gpu/drm/amd/pm/legacy-dpm/legacy_dpm.c
537
adev->pm.dpm.dyn_state.acp_clock_voltage_dependency_table.entries =
drivers/gpu/drm/amd/pm/legacy-dpm/legacy_dpm.c
539
if (!adev->pm.dpm.dyn_state.acp_clock_voltage_dependency_table.entries)
drivers/gpu/drm/amd/pm/legacy-dpm/legacy_dpm.c
543
entry = &limits->entries[0];
drivers/gpu/drm/amd/pm/legacy-dpm/legacy_dpm.c
545
adev->pm.dpm.dyn_state.acp_clock_voltage_dependency_table.entries[i].clk =
drivers/gpu/drm/amd/pm/legacy-dpm/legacy_dpm.c
547
adev->pm.dpm.dyn_state.acp_clock_voltage_dependency_table.entries[i].v =
drivers/gpu/drm/amd/pm/legacy-dpm/legacy_dpm.c
609
kfree(dyn_state->vddc_dependency_on_sclk.entries);
drivers/gpu/drm/amd/pm/legacy-dpm/legacy_dpm.c
610
kfree(dyn_state->vddci_dependency_on_mclk.entries);
drivers/gpu/drm/amd/pm/legacy-dpm/legacy_dpm.c
611
kfree(dyn_state->vddc_dependency_on_mclk.entries);
drivers/gpu/drm/amd/pm/legacy-dpm/legacy_dpm.c
612
kfree(dyn_state->mvdd_dependency_on_mclk.entries);
drivers/gpu/drm/amd/pm/legacy-dpm/legacy_dpm.c
613
kfree(dyn_state->cac_leakage_table.entries);
drivers/gpu/drm/amd/pm/legacy-dpm/legacy_dpm.c
614
kfree(dyn_state->phase_shedding_limits_table.entries);
drivers/gpu/drm/amd/pm/legacy-dpm/legacy_dpm.c
617
kfree(dyn_state->vce_clock_voltage_dependency_table.entries);
drivers/gpu/drm/amd/pm/legacy-dpm/legacy_dpm.c
618
kfree(dyn_state->uvd_clock_voltage_dependency_table.entries);
drivers/gpu/drm/amd/pm/legacy-dpm/legacy_dpm.c
619
kfree(dyn_state->samu_clock_voltage_dependency_table.entries);
drivers/gpu/drm/amd/pm/legacy-dpm/legacy_dpm.c
620
kfree(dyn_state->acp_clock_voltage_dependency_table.entries);
drivers/gpu/drm/amd/pm/legacy-dpm/legacy_dpm.c
621
kfree(dyn_state->vddgfx_dependency_on_sclk.entries);
drivers/gpu/drm/amd/pm/legacy-dpm/si_dpm.c
2653
if (table->entries[i].vddc > *max)
drivers/gpu/drm/amd/pm/legacy-dpm/si_dpm.c
2654
*max = table->entries[i].vddc;
drivers/gpu/drm/amd/pm/legacy-dpm/si_dpm.c
2655
if (table->entries[i].vddc < *min)
drivers/gpu/drm/amd/pm/legacy-dpm/si_dpm.c
2656
*min = table->entries[i].vddc;
drivers/gpu/drm/amd/pm/legacy-dpm/si_dpm.c
3030
if (highest_leakage < si_pi->leakage_voltage.entries[i].voltage)
drivers/gpu/drm/amd/pm/legacy-dpm/si_dpm.c
3031
highest_leakage = si_pi->leakage_voltage.entries[i].voltage;
drivers/gpu/drm/amd/pm/legacy-dpm/si_dpm.c
3055
if ((evclk <= table->entries[i].evclk) &&
drivers/gpu/drm/amd/pm/legacy-dpm/si_dpm.c
3056
(ecclk <= table->entries[i].ecclk)) {
drivers/gpu/drm/amd/pm/legacy-dpm/si_dpm.c
3057
*voltage = table->entries[i].v;
drivers/gpu/drm/amd/pm/legacy-dpm/si_dpm.c
3065
*voltage = table->entries[table->count - 1].v;
drivers/gpu/drm/amd/pm/legacy-dpm/si_dpm.c
3224
if (voltage <= table->entries[i].value)
drivers/gpu/drm/amd/pm/legacy-dpm/si_dpm.c
3225
return table->entries[i].value;
drivers/gpu/drm/amd/pm/legacy-dpm/si_dpm.c
3227
return table->entries[table->count - 1].value;
drivers/gpu/drm/amd/pm/legacy-dpm/si_dpm.c
3272
if (clock < table->entries[i].clk)
drivers/gpu/drm/amd/pm/legacy-dpm/si_dpm.c
3273
clock = table->entries[i].clk;
drivers/gpu/drm/amd/pm/legacy-dpm/si_dpm.c
3287
if (clock <= table->entries[i].clk) {
drivers/gpu/drm/amd/pm/legacy-dpm/si_dpm.c
3288
if (*voltage < table->entries[i].v)
drivers/gpu/drm/amd/pm/legacy-dpm/si_dpm.c
3289
*voltage = (u16)((table->entries[i].v < max_voltage) ?
drivers/gpu/drm/amd/pm/legacy-dpm/si_dpm.c
3290
table->entries[i].v : max_voltage);
drivers/gpu/drm/amd/pm/legacy-dpm/si_dpm.c
3756
si_pi->leakage_voltage.entries[count].voltage = vddc;
drivers/gpu/drm/amd/pm/legacy-dpm/si_dpm.c
3757
si_pi->leakage_voltage.entries[count].leakage_index =
drivers/gpu/drm/amd/pm/legacy-dpm/si_dpm.c
3784
if (si_pi->leakage_voltage.entries[i].leakage_index == index) {
drivers/gpu/drm/amd/pm/legacy-dpm/si_dpm.c
3785
*leakage_voltage = si_pi->leakage_voltage.entries[i].voltage;
drivers/gpu/drm/amd/pm/legacy-dpm/si_dpm.c
4439
voltage_table->entries[i] = voltage_table->entries[i + diff];
drivers/gpu/drm/amd/pm/legacy-dpm/si_dpm.c
4458
voltage_table->entries[i].value = voltage_dependency_table->entries[i].v;
drivers/gpu/drm/amd/pm/legacy-dpm/si_dpm.c
4459
voltage_table->entries[i].smio_low = 0;
drivers/gpu/drm/amd/pm/legacy-dpm/si_dpm.c
4552
table->lowSMIO[i] |= cpu_to_be32(voltage_table->entries[i].smio_low);
drivers/gpu/drm/amd/pm/legacy-dpm/si_dpm.c
4577
if (pi->max_vddc_in_table <= eg_pi->vddc_voltage_table.entries[i].value) {
drivers/gpu/drm/amd/pm/legacy-dpm/si_dpm.c
4625
if (value <= table->entries[i].value) {
drivers/gpu/drm/amd/pm/legacy-dpm/si_dpm.c
4627
voltage->value = cpu_to_be16(table->entries[i].value);
drivers/gpu/drm/amd/pm/legacy-dpm/si_dpm.c
4650
voltage->value = cpu_to_be16(si_pi->mvdd_voltage_table.entries[voltage->index].value);
drivers/gpu/drm/amd/pm/legacy-dpm/si_dpm.c
4663
if (adev->pm.dpm.dyn_state.cac_leakage_table.entries) {
drivers/gpu/drm/amd/pm/legacy-dpm/si_dpm.c
4665
if (adev->pm.dpm.dyn_state.vddc_dependency_on_sclk.entries == NULL)
drivers/gpu/drm/amd/pm/legacy-dpm/si_dpm.c
4670
(u16)adev->pm.dpm.dyn_state.vddc_dependency_on_sclk.entries[v_index].v) {
drivers/gpu/drm/amd/pm/legacy-dpm/si_dpm.c
4674
adev->pm.dpm.dyn_state.cac_leakage_table.entries[v_index].vddc;
drivers/gpu/drm/amd/pm/legacy-dpm/si_dpm.c
4677
adev->pm.dpm.dyn_state.cac_leakage_table.entries[adev->pm.dpm.dyn_state.cac_leakage_table.count-1].vddc;
drivers/gpu/drm/amd/pm/legacy-dpm/si_dpm.c
4685
(u16)adev->pm.dpm.dyn_state.vddc_dependency_on_sclk.entries[v_index].v) {
drivers/gpu/drm/amd/pm/legacy-dpm/si_dpm.c
4689
adev->pm.dpm.dyn_state.cac_leakage_table.entries[v_index].vddc;
drivers/gpu/drm/amd/pm/legacy-dpm/si_dpm.c
4692
adev->pm.dpm.dyn_state.cac_leakage_table.entries[adev->pm.dpm.dyn_state.cac_leakage_table.count-1].vddc;
drivers/gpu/drm/amd/pm/legacy-dpm/si_dpm.c
4699
*std_voltage = adev->pm.dpm.dyn_state.cac_leakage_table.entries[voltage->index].vddc;
drivers/gpu/drm/amd/pm/legacy-dpm/si_dpm.c
4724
if ((voltage <= limits->entries[i].voltage) &&
drivers/gpu/drm/amd/pm/legacy-dpm/si_dpm.c
4725
(sclk <= limits->entries[i].sclk) &&
drivers/gpu/drm/amd/pm/legacy-dpm/si_dpm.c
4726
(mclk <= limits->entries[i].mclk))
drivers/gpu/drm/amd/pm/legacy-dpm/si_dpm.c
5693
adev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[i].clk) {
drivers/gpu/drm/amd/pm/legacy-dpm/si_dpm.c
5695
adev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[i].v)
drivers/gpu/drm/amd/pm/legacy-dpm/si_dpm.c
6396
table->entries[i].v,
drivers/gpu/drm/amd/pm/legacy-dpm/si_dpm.c
6399
table->entries[i].v = leakage_voltage;
drivers/gpu/drm/amd/pm/legacy-dpm/si_dpm.c
6410
table->entries[j].v = (table->entries[j].v <= table->entries[j + 1].v) ?
drivers/gpu/drm/amd/pm/legacy-dpm/si_dpm.c
6411
table->entries[j].v : table->entries[j + 1].v;
drivers/gpu/drm/amd/pm/legacy-dpm/si_dpm.c
7446
adev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries =
drivers/gpu/drm/amd/pm/legacy-dpm/si_dpm.c
7448
if (!adev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries)
drivers/gpu/drm/amd/pm/legacy-dpm/si_dpm.c
7452
adev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[0].clk = 0;
drivers/gpu/drm/amd/pm/legacy-dpm/si_dpm.c
7453
adev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[0].v = 0;
drivers/gpu/drm/amd/pm/legacy-dpm/si_dpm.c
7454
adev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[1].clk = 36000;
drivers/gpu/drm/amd/pm/legacy-dpm/si_dpm.c
7455
adev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[1].v = 720;
drivers/gpu/drm/amd/pm/legacy-dpm/si_dpm.c
7456
adev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[2].clk = 54000;
drivers/gpu/drm/amd/pm/legacy-dpm/si_dpm.c
7457
adev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[2].v = 810;
drivers/gpu/drm/amd/pm/legacy-dpm/si_dpm.c
7458
adev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[3].clk = 72000;
drivers/gpu/drm/amd/pm/legacy-dpm/si_dpm.c
7459
adev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[3].v = 900;
drivers/gpu/drm/amd/pm/legacy-dpm/si_dpm.c
7568
kfree(adev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries);
drivers/gpu/drm/amd/pm/legacy-dpm/si_dpm.h
396
struct si_leakage_voltage_entry entries[SISLANDS_MAX_LEAKAGE_COUNT];
drivers/gpu/drm/amd/pm/powerplay/hwmgr/hwmgr_ppt.h
106
phm_ppt_v1_pcie_record entries[]; /* Dynamically allocate count entries. */
drivers/gpu/drm/amd/pm/powerplay/hwmgr/hwmgr_ppt.h
51
phm_ppt_v1_clock_voltage_dependency_record entries[]; /* Dynamically allocate count entries. */
drivers/gpu/drm/amd/pm/powerplay/hwmgr/hwmgr_ppt.h
74
phm_ppt_v1_mm_clock_voltage_dependency_record entries[]; /* Dynamically allocate count entries. */
drivers/gpu/drm/amd/pm/powerplay/hwmgr/hwmgr_ppt.h
89
phm_ppt_v1_voltage_lookup_record entries[]; /* Dynamically allocate count entries. */
drivers/gpu/drm/amd/pm/powerplay/hwmgr/ppatomctrl.c
600
voltage_table->entries[i].value =
drivers/gpu/drm/amd/pm/powerplay/hwmgr/ppatomctrl.c
602
voltage_table->entries[i].smio_low =
drivers/gpu/drm/amd/pm/powerplay/hwmgr/ppatomctrl.c
744
if (hwmgr->dyn_state.vddc_dependency_on_sclk->entries[entry_id].v == virtual_voltage_id) {
drivers/gpu/drm/amd/pm/powerplay/hwmgr/ppatomctrl.c
759
cpu_to_le32(hwmgr->dyn_state.vddc_dependency_on_sclk->entries[entry_id].clk);
drivers/gpu/drm/amd/pm/powerplay/hwmgr/ppatomctrl.h
202
pp_atomctrl_voltage_table_entry entries[PP_ATOMCTRL_MAX_VOLTAGE_ENTRIES];
drivers/gpu/drm/amd/pm/powerplay/hwmgr/ppatomfwctrl.c
127
voltage_table->entries[i].value =
drivers/gpu/drm/amd/pm/powerplay/hwmgr/ppatomfwctrl.c
130
voltage_table->entries[i].smio_low =
drivers/gpu/drm/amd/pm/powerplay/hwmgr/ppatomfwctrl.h
52
struct pp_atomfwctrl_voltage_table_entry entries[PP_ATOMFWCTRL_MAX_VOLTAGE_ENTRIES];
drivers/gpu/drm/amd/pm/powerplay/hwmgr/pptable_v1_0.h
167
ATOM_Tonga_State entries[] __counted_by(ucNumEntries);
drivers/gpu/drm/amd/pm/powerplay/hwmgr/pptable_v1_0.h
182
ATOM_Tonga_MCLK_Dependency_Record entries[] __counted_by(ucNumEntries);
drivers/gpu/drm/amd/pm/powerplay/hwmgr/pptable_v1_0.h
197
ATOM_Tonga_SCLK_Dependency_Record entries[] __counted_by(ucNumEntries);
drivers/gpu/drm/amd/pm/powerplay/hwmgr/pptable_v1_0.h
213
ATOM_Polaris_SCLK_Dependency_Record entries[] __counted_by(ucNumEntries);
drivers/gpu/drm/amd/pm/powerplay/hwmgr/pptable_v1_0.h
225
ATOM_Tonga_PCIE_Record entries[] __counted_by(ucNumEntries);
drivers/gpu/drm/amd/pm/powerplay/hwmgr/pptable_v1_0.h
238
ATOM_Polaris10_PCIE_Record entries[] __counted_by(ucNumEntries);
drivers/gpu/drm/amd/pm/powerplay/hwmgr/pptable_v1_0.h
255
ATOM_Tonga_MM_Dependency_Record entries[] __counted_by(ucNumEntries);
drivers/gpu/drm/amd/pm/powerplay/hwmgr/pptable_v1_0.h
268
ATOM_Tonga_Voltage_Lookup_Record entries[] __counted_by(ucNumEntries);
drivers/gpu/drm/amd/pm/powerplay/hwmgr/pptable_v1_0.h
370
ATOM_Tonga_VCE_State_Record entries[] __counted_by(ucNumEntries);
drivers/gpu/drm/amd/pm/powerplay/hwmgr/pptable_v1_0.h
484
ATOM_Tonga_Hard_Limit_Record entries[] __counted_by(ucNumEntries);
drivers/gpu/drm/amd/pm/powerplay/hwmgr/process_pptables_v1_0.c
1324
entries, vce_state_table, i);
drivers/gpu/drm/amd/pm/powerplay/hwmgr/process_pptables_v1_0.c
1327
entries, sclk_dep_table,
drivers/gpu/drm/amd/pm/powerplay/hwmgr/process_pptables_v1_0.c
1331
entries, mm_dep_table,
drivers/gpu/drm/amd/pm/powerplay/hwmgr/process_pptables_v1_0.c
1342
entries, mclk_dep_table,
drivers/gpu/drm/amd/pm/powerplay/hwmgr/process_pptables_v1_0.c
1347
entries, mclk_dep_table,
drivers/gpu/drm/amd/pm/powerplay/hwmgr/process_pptables_v1_0.c
1391
ATOM_Tonga_State, entries,
drivers/gpu/drm/amd/pm/powerplay/hwmgr/process_pptables_v1_0.c
168
table = kzalloc_flex(*table, entries, max_levels);
drivers/gpu/drm/amd/pm/powerplay/hwmgr/process_pptables_v1_0.c
177
entries, table, i);
drivers/gpu/drm/amd/pm/powerplay/hwmgr/process_pptables_v1_0.c
180
entries, vddc_lookup_pp_tables, i);
drivers/gpu/drm/amd/pm/powerplay/hwmgr/process_pptables_v1_0.c
333
entries, clk_volt_pp_table, i);
drivers/gpu/drm/amd/pm/powerplay/hwmgr/process_pptables_v1_0.c
350
limits->sclk = le32_to_cpu(limitable->entries[0].ulSCLKLimit);
drivers/gpu/drm/amd/pm/powerplay/hwmgr/process_pptables_v1_0.c
351
limits->mclk = le32_to_cpu(limitable->entries[0].ulMCLKLimit);
drivers/gpu/drm/amd/pm/powerplay/hwmgr/process_pptables_v1_0.c
352
limits->vddc = le16_to_cpu(limitable->entries[0].usVddcLimit);
drivers/gpu/drm/amd/pm/powerplay/hwmgr/process_pptables_v1_0.c
353
limits->vddci = le16_to_cpu(limitable->entries[0].usVddciLimit);
drivers/gpu/drm/amd/pm/powerplay/hwmgr/process_pptables_v1_0.c
354
limits->vddgfx = le16_to_cpu(limitable->entries[0].usVddgfxLimit);
drivers/gpu/drm/amd/pm/powerplay/hwmgr/process_pptables_v1_0.c
373
mclk_table = kzalloc_flex(*mclk_table, entries,
drivers/gpu/drm/amd/pm/powerplay/hwmgr/process_pptables_v1_0.c
383
entries, mclk_table, i);
drivers/gpu/drm/amd/pm/powerplay/hwmgr/process_pptables_v1_0.c
386
entries, mclk_dep_table, i);
drivers/gpu/drm/amd/pm/powerplay/hwmgr/process_pptables_v1_0.c
417
sclk_table = kzalloc_flex(*sclk_table, entries,
drivers/gpu/drm/amd/pm/powerplay/hwmgr/process_pptables_v1_0.c
427
entries, tonga_table, i);
drivers/gpu/drm/amd/pm/powerplay/hwmgr/process_pptables_v1_0.c
430
entries, sclk_table, i);
drivers/gpu/drm/amd/pm/powerplay/hwmgr/process_pptables_v1_0.c
446
sclk_table = kzalloc_flex(*sclk_table, entries,
drivers/gpu/drm/amd/pm/powerplay/hwmgr/process_pptables_v1_0.c
456
entries, polaris_table, i);
drivers/gpu/drm/amd/pm/powerplay/hwmgr/process_pptables_v1_0.c
459
entries, sclk_table, i);
drivers/gpu/drm/amd/pm/powerplay/hwmgr/process_pptables_v1_0.c
493
pcie_table = kzalloc_flex(*pcie_table, entries,
drivers/gpu/drm/amd/pm/powerplay/hwmgr/process_pptables_v1_0.c
512
entries, pcie_table, i);
drivers/gpu/drm/amd/pm/powerplay/hwmgr/process_pptables_v1_0.c
515
entries, atom_pcie_table, i);
drivers/gpu/drm/amd/pm/powerplay/hwmgr/process_pptables_v1_0.c
529
pcie_table = kzalloc_flex(*pcie_table, entries,
drivers/gpu/drm/amd/pm/powerplay/hwmgr/process_pptables_v1_0.c
549
entries, pcie_table, i);
drivers/gpu/drm/amd/pm/powerplay/hwmgr/process_pptables_v1_0.c
552
entries, atom_pcie_table, i);
drivers/gpu/drm/amd/pm/powerplay/hwmgr/process_pptables_v1_0.c
724
mm_table = kzalloc_flex(*mm_table, entries,
drivers/gpu/drm/amd/pm/powerplay/hwmgr/process_pptables_v1_0.c
734
entries, mm_dependency_table, i);
drivers/gpu/drm/amd/pm/powerplay/hwmgr/process_pptables_v1_0.c
737
entries, mm_table, i);
drivers/gpu/drm/amd/pm/powerplay/hwmgr/processpptables.c
1211
uvd_table = kzalloc_flex(*uvd_table, entries, table->numEntries);
drivers/gpu/drm/amd/pm/powerplay/hwmgr/processpptables.c
1219
&array->entries[table->entries[i].ucUVDClockInfoIndex];
drivers/gpu/drm/amd/pm/powerplay/hwmgr/processpptables.c
1220
uvd_table->entries[i].v = (unsigned long)le16_to_cpu(table->entries[i].usVoltage);
drivers/gpu/drm/amd/pm/powerplay/hwmgr/processpptables.c
1221
uvd_table->entries[i].vclk = ((unsigned long)entry->ucVClkHigh << 16)
drivers/gpu/drm/amd/pm/powerplay/hwmgr/processpptables.c
1223
uvd_table->entries[i].dclk = ((unsigned long)entry->ucDClkHigh << 16)
drivers/gpu/drm/amd/pm/powerplay/hwmgr/processpptables.c
1240
vce_table = kzalloc_flex(*vce_table, entries, table->numEntries);
drivers/gpu/drm/amd/pm/powerplay/hwmgr/processpptables.c
1246
const VCEClockInfo *entry = &array->entries[table->entries[i].ucVCEClockInfoIndex];
drivers/gpu/drm/amd/pm/powerplay/hwmgr/processpptables.c
1248
vce_table->entries[i].v = (unsigned long)le16_to_cpu(table->entries[i].usVoltage);
drivers/gpu/drm/amd/pm/powerplay/hwmgr/processpptables.c
1249
vce_table->entries[i].evclk = ((unsigned long)entry->ucEVClkHigh << 16)
drivers/gpu/drm/amd/pm/powerplay/hwmgr/processpptables.c
1251
vce_table->entries[i].ecclk = ((unsigned long)entry->ucECClkHigh << 16)
drivers/gpu/drm/amd/pm/powerplay/hwmgr/processpptables.c
1267
samu_table = kzalloc_flex(*samu_table, entries, table->numEntries);
drivers/gpu/drm/amd/pm/powerplay/hwmgr/processpptables.c
1274
samu_table->entries[i].v = (unsigned long)le16_to_cpu(table->entries[i].usVoltage);
drivers/gpu/drm/amd/pm/powerplay/hwmgr/processpptables.c
1275
samu_table->entries[i].samclk = ((unsigned long)table->entries[i].ucSAMClockHigh << 16)
drivers/gpu/drm/amd/pm/powerplay/hwmgr/processpptables.c
1276
| le16_to_cpu(table->entries[i].usSAMClockLow);
drivers/gpu/drm/amd/pm/powerplay/hwmgr/processpptables.c
1291
acp_table = kzalloc_flex(*acp_table, entries, table->numEntries);
drivers/gpu/drm/amd/pm/powerplay/hwmgr/processpptables.c
1298
acp_table->entries[i].v = (unsigned long)le16_to_cpu(table->entries[i].usVoltage);
drivers/gpu/drm/amd/pm/powerplay/hwmgr/processpptables.c
1299
acp_table->entries[i].acpclk = ((unsigned long)table->entries[i].ucACPClockHigh << 16)
drivers/gpu/drm/amd/pm/powerplay/hwmgr/processpptables.c
1300
| le16_to_cpu(table->entries[i].usACPClockLow);
drivers/gpu/drm/amd/pm/powerplay/hwmgr/processpptables.c
1485
cac_leakage_table = kzalloc_flex(*cac_leakage_table, entries,
drivers/gpu/drm/amd/pm/powerplay/hwmgr/processpptables.c
1495
cac_leakage_table->entries[i].Vddc1 = le16_to_cpu(table->entries[i].usVddc1);
drivers/gpu/drm/amd/pm/powerplay/hwmgr/processpptables.c
1496
cac_leakage_table->entries[i].Vddc2 = le16_to_cpu(table->entries[i].usVddc2);
drivers/gpu/drm/amd/pm/powerplay/hwmgr/processpptables.c
1497
cac_leakage_table->entries[i].Vddc3 = le16_to_cpu(table->entries[i].usVddc3);
drivers/gpu/drm/amd/pm/powerplay/hwmgr/processpptables.c
1499
cac_leakage_table->entries[i].Vddc = le16_to_cpu(table->entries[i].usVddc);
drivers/gpu/drm/amd/pm/powerplay/hwmgr/processpptables.c
1500
cac_leakage_table->entries[i].Leakage = le32_to_cpu(table->entries[i].ulLeakageValue);
drivers/gpu/drm/amd/pm/powerplay/hwmgr/processpptables.c
1623
table = kzalloc_flex(*table, entries,
drivers/gpu/drm/amd/pm/powerplay/hwmgr/processpptables.c
1631
table->entries[i].Voltage = (unsigned long)le16_to_cpu(ptable->entries[i].usVoltage);
drivers/gpu/drm/amd/pm/powerplay/hwmgr/processpptables.c
1632
table->entries[i].Sclk = ((unsigned long)ptable->entries[i].ucSclkHigh << 16)
drivers/gpu/drm/amd/pm/powerplay/hwmgr/processpptables.c
1633
| le16_to_cpu(ptable->entries[i].usSclkLow);
drivers/gpu/drm/amd/pm/powerplay/hwmgr/processpptables.c
1634
table->entries[i].Mclk = ((unsigned long)ptable->entries[i].ucMclkHigh << 16)
drivers/gpu/drm/amd/pm/powerplay/hwmgr/processpptables.c
1635
| le16_to_cpu(ptable->entries[i].usMclkLow);
drivers/gpu/drm/amd/pm/powerplay/hwmgr/processpptables.c
1675
const ATOM_PPLIB_VCE_State_Record *record = &vce_state_table->entries[i];
drivers/gpu/drm/amd/pm/powerplay/hwmgr/processpptables.c
1677
const VCEClockInfo *vce_clock_info = &vce_clock_info_array->entries[record->ucVCEClockInfoIndex];
drivers/gpu/drm/amd/pm/powerplay/hwmgr/processpptables.c
385
dep_table = kzalloc_flex(*dep_table, entries, table->ucNumEntries);
drivers/gpu/drm/amd/pm/powerplay/hwmgr/processpptables.c
392
dep_table->entries[i].clk =
drivers/gpu/drm/amd/pm/powerplay/hwmgr/processpptables.c
393
((unsigned long)table->entries[i].ucClockHigh << 16) |
drivers/gpu/drm/amd/pm/powerplay/hwmgr/processpptables.c
394
le16_to_cpu(table->entries[i].usClockLow);
drivers/gpu/drm/amd/pm/powerplay/hwmgr/processpptables.c
395
dep_table->entries[i].v =
drivers/gpu/drm/amd/pm/powerplay/hwmgr/processpptables.c
396
(unsigned long)le16_to_cpu(table->entries[i].usVoltage);
drivers/gpu/drm/amd/pm/powerplay/hwmgr/processpptables.c
418
clock_table->values[i] = (unsigned long)table->entries[i].clk;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/processpptables.c
429
limits->sclk = ((unsigned long)table->entries[0].ucSclkHigh << 16) |
drivers/gpu/drm/amd/pm/powerplay/hwmgr/processpptables.c
430
le16_to_cpu(table->entries[0].usSclkLow);
drivers/gpu/drm/amd/pm/powerplay/hwmgr/processpptables.c
431
limits->mclk = ((unsigned long)table->entries[0].ucMclkHigh << 16) |
drivers/gpu/drm/amd/pm/powerplay/hwmgr/processpptables.c
432
le16_to_cpu(table->entries[0].usMclkLow);
drivers/gpu/drm/amd/pm/powerplay/hwmgr/processpptables.c
433
limits->vddc = (unsigned long)le16_to_cpu(table->entries[0].usVddc);
drivers/gpu/drm/amd/pm/powerplay/hwmgr/processpptables.c
434
limits->vddci = (unsigned long)le16_to_cpu(table->entries[0].usVddci);
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu10_hwmgr.c
1010
mclk_table->entries[low].clk/100,
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu10_hwmgr.c
1015
mclk_table->entries[high].clk/100,
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu10_hwmgr.c
1069
mclk_table->entries[i].clk / 100,
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu10_hwmgr.c
1070
((mclk_table->entries[i].clk / 100) == now) ?
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu10_hwmgr.c
1132
level->memory_clock = data->clock_vol_info.vdd_dep_on_fclk->entries[0].clk;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu10_hwmgr.c
1135
level->memory_clock = data->clock_vol_info.vdd_dep_on_fclk->entries[
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu10_hwmgr.c
1219
if (pclk_vol_table->entries[i].clk) {
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu10_hwmgr.c
1221
pclk_vol_table->entries[i].clk * 10;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu10_hwmgr.c
1224
pclk_vol_table->entries[i].clk) :
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu10_hwmgr.c
1273
if (pclk_vol_table->entries[i].clk) {
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu10_hwmgr.c
1274
clocks->data[clocks->num_levels].clocks_in_khz = pclk_vol_table->entries[i].clk * 10;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu10_hwmgr.c
1275
clocks->data[clocks->num_levels].voltage_in_mv = pclk_vol_table->entries[i].vol;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu10_hwmgr.c
136
table_clk_vlt = kzalloc_flex(*table_clk_vlt, entries, count);
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu10_hwmgr.c
144
table_clk_vlt->entries[0].clk = PP_DAL_POWERLEVEL_0;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu10_hwmgr.c
145
table_clk_vlt->entries[0].v = 0;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu10_hwmgr.c
146
table_clk_vlt->entries[1].clk = PP_DAL_POWERLEVEL_1;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu10_hwmgr.c
147
table_clk_vlt->entries[1].v = 1;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu10_hwmgr.c
148
table_clk_vlt->entries[2].clk = PP_DAL_POWERLEVEL_2;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu10_hwmgr.c
149
table_clk_vlt->entries[2].v = 2;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu10_hwmgr.c
150
table_clk_vlt->entries[3].clk = PP_DAL_POWERLEVEL_3;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu10_hwmgr.c
151
table_clk_vlt->entries[3].v = 3;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu10_hwmgr.c
152
table_clk_vlt->entries[4].clk = PP_DAL_POWERLEVEL_4;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu10_hwmgr.c
153
table_clk_vlt->entries[4].v = 4;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu10_hwmgr.c
154
table_clk_vlt->entries[5].clk = PP_DAL_POWERLEVEL_5;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu10_hwmgr.c
155
table_clk_vlt->entries[5].v = 5;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu10_hwmgr.c
156
table_clk_vlt->entries[6].clk = PP_DAL_POWERLEVEL_6;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu10_hwmgr.c
157
table_clk_vlt->entries[6].v = 6;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu10_hwmgr.c
158
table_clk_vlt->entries[7].clk = PP_DAL_POWERLEVEL_7;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu10_hwmgr.c
159
table_clk_vlt->entries[7].v = 7;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu10_hwmgr.c
475
ptable = kzalloc_flex(*ptable, entries, num_entry);
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu10_hwmgr.c
482
ptable->entries[i].clk = pclk_dependency_table->Freq * 100;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu10_hwmgr.c
483
ptable->entries[i].vol = pclk_dependency_table->Vol;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu10_hwmgr.c
785
(data->clock_vol_info.vdd_dep_on_fclk->entries[0].clk / 100) :
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu10_hwmgr.c
791
data->clock_vol_info.vdd_dep_on_socclk->entries[0].clk / 100,
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu10_hwmgr.c
804
data->clock_vol_info.vdd_dep_on_fclk->entries[index_fclk].clk / 100,
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu10_hwmgr.c
808
data->clock_vol_info.vdd_dep_on_socclk->entries[index_socclk].clk / 100,
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu10_hwmgr.c
861
return data->clock_vol_info.vdd_dep_on_fclk->entries[0].clk;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu10_hwmgr.c
863
return data->clock_vol_info.vdd_dep_on_fclk->entries[
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu10_hwmgr.h
165
struct smu10_display_phy_info_entry entries[SMU10_MAX_DISPLAYPHY_IDS];
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu10_hwmgr.h
184
struct smu10_mclk_latency_entries entries[MAX_REGULAR_DPM_NUMBER];
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu10_hwmgr.h
195
struct smu10_clock_voltage_dependency_record entries[] __counted_by(count);
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
1020
if (odn_table->odn_core_clock_dpm_levels.entries[i].clock !=
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
1028
if (odn_table->odn_memory_clock_dpm_levels.entries[i].clock !=
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
1039
if (dep_table->entries[i].vddc != odn_dep_table->entries[i].vddc) {
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
1048
if (dep_table->entries[i].vddc != odn_dep_table->entries[i].vddc) {
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
1525
if (tmp_sclk >= vddc_dependency_on_sclk->entries[count].clk) {
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
1526
hwmgr->pstate_sclk = vddc_dependency_on_sclk->entries[count].clk;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
1531
hwmgr->pstate_sclk = vddc_dependency_on_sclk->entries[0].clk;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
1534
vddc_dependency_on_sclk->entries[vddc_dependency_on_sclk->count - 1].clk;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
1542
if (tmp_sclk >= vdd_dep_on_sclk->entries[count].clk) {
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
1543
hwmgr->pstate_sclk = vdd_dep_on_sclk->entries[count].clk;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
1548
hwmgr->pstate_sclk = vdd_dep_on_sclk->entries[0].clk;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
1551
vdd_dep_on_sclk->entries[vdd_dep_on_sclk->count - 1].clk;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
2074
if (sclk_table->entries[j].clk == sclk &&
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
2075
sclk_table->entries[j].cks_enable == 0) {
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
2108
if (sclk_table->entries[j].clk == sclk &&
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
2109
sclk_table->entries[j].cks_enable == 0) {
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
2181
&lookup_table->entries[i].us_vdd, leakage_table);
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
2216
voltage_id = sclk_table->entries[entry_id].vddInd;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
2217
sclk_table->entries[entry_id].vddgfx =
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
2218
table_info->vddgfx_lookup_table->entries[voltage_id].us_vdd;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
2222
voltage_id = sclk_table->entries[entry_id].vddInd;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
2223
sclk_table->entries[entry_id].vddc =
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
2224
table_info->vddc_lookup_table->entries[voltage_id].us_vdd;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
2229
voltage_id = mclk_table->entries[entry_id].vddInd;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
2230
mclk_table->entries[entry_id].vddc =
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
2231
table_info->vddc_lookup_table->entries[voltage_id].us_vdd;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
2235
voltage_id = mm_table->entries[entry_id].vddcInd;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
2236
mm_table->entries[entry_id].vddc =
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
2237
table_info->vddc_lookup_table->entries[voltage_id].us_vdd;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
2261
if (look_up_table->entries[i].us_vdd == record->us_vdd) {
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
2262
if (look_up_table->entries[i].us_calculated == 1)
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
2268
look_up_table->entries[i].us_calculated = 1;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
2269
look_up_table->entries[i].us_vdd = record->us_vdd;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
2270
look_up_table->entries[i].us_cac_low = record->us_cac_low;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
2271
look_up_table->entries[i].us_cac_mid = record->us_cac_mid;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
2272
look_up_table->entries[i].us_cac_high = record->us_cac_high;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
2293
if (sclk_table->entries[entry_id].vdd_offset & (1 << 15))
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
2294
v_record.us_vdd = sclk_table->entries[entry_id].vddgfx +
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
2295
sclk_table->entries[entry_id].vdd_offset - 0xFFFF;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
2297
v_record.us_vdd = sclk_table->entries[entry_id].vddgfx +
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
2298
sclk_table->entries[entry_id].vdd_offset;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
2300
sclk_table->entries[entry_id].vddc =
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
2308
if (mclk_table->entries[entry_id].vdd_offset & (1 << 15))
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
2309
v_record.us_vdd = mclk_table->entries[entry_id].vddc +
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
2310
mclk_table->entries[entry_id].vdd_offset - 0xFFFF;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
2312
v_record.us_vdd = mclk_table->entries[entry_id].vddc +
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
2313
mclk_table->entries[entry_id].vdd_offset;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
2315
mclk_table->entries[entry_id].vddgfx = v_record.us_cac_low =
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
2333
if (mm_table->entries[entry_id].vddgfx_offset & (1 << 15))
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
2334
v_record.us_vdd = mm_table->entries[entry_id].vddc +
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
2335
mm_table->entries[entry_id].vddgfx_offset - 0xFFFF;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
2337
v_record.us_vdd = mm_table->entries[entry_id].vddc +
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
2338
mm_table->entries[entry_id].vddgfx_offset;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
2341
mm_table->entries[entry_id].vddgfx = v_record.us_cac_low =
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
2361
if (lookup_table->entries[j].us_vdd <
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
2362
lookup_table->entries[j - 1].us_vdd) {
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
2363
swap(lookup_table->entries[j - 1],
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
2364
lookup_table->entries[j]);
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
2435
highest_voltage = allowed_sclk_vdd_table->entries[allowed_sclk_vdd_table->count - 1].vddc;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
2438
if (lookup_table->entries[i].us_vdd < ATOM_VIRTUAL_VOLTAGE_ID0 &&
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
2439
lookup_table->entries[i].us_vdd > highest_voltage)
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
2440
highest_voltage = lookup_table->entries[i].us_vdd;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
2471
allowed_sclk_vdd_table->entries[allowed_sclk_vdd_table->count - 1].clk;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
2473
allowed_mclk_vdd_table->entries[allowed_mclk_vdd_table->count - 1].clk;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
2479
allowed_sclk_vdd_table->entries[allowed_sclk_vdd_table->count - 1].vddc;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
2481
allowed_mclk_vdd_table->entries[allowed_mclk_vdd_table->count - 1].vddci;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
2522
if (lookup_table->entries[dep_mclk_table->entries[dep_mclk_table->count-1].vddInd].us_vdd >= 1000)
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
2526
if (lookup_table->entries[i].us_vdd < 0xff01 && lookup_table->entries[i].us_vdd >= 1000) {
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
2527
dep_mclk_table->entries[dep_mclk_table->count-1].vddInd = (uint8_t) i;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
2657
smu7_patch_ppt_v0_with_vdd_leakage(hwmgr, &tab->entries[i].v,
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
2671
smu7_patch_ppt_v0_with_vdd_leakage(hwmgr, &tab->entries[i].v,
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
2685
smu7_patch_ppt_v0_with_vdd_leakage(hwmgr, &tab->entries[i].v,
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
2700
smu7_patch_ppt_v0_with_vdd_leakage(hwmgr, &tab->entries[i].v,
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
2714
smu7_patch_ppt_v0_with_vdd_leakage(hwmgr, &tab->entries[i].Voltage,
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
2728
smu7_patch_ppt_v0_with_vdd_leakage(hwmgr, &tab->entries[i].v,
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
2742
smu7_patch_ppt_v0_with_vdd_leakage(hwmgr, &tab->entries[i].v,
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
2776
vddc = (uint32_t)(tab->entries[i].Vddc);
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
2778
tab->entries[i].Vddc = (uint16_t)vddc;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
2863
data->min_vddc_in_pptable = (uint16_t)allowed_sclk_vddc_table->entries[0].v;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
2864
data->max_vddc_in_pptable = (uint16_t)allowed_sclk_vddc_table->entries[allowed_sclk_vddc_table->count - 1].v;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
2867
allowed_sclk_vddc_table->entries[allowed_sclk_vddc_table->count - 1].clk;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
2869
allowed_mclk_vddc_table->entries[allowed_mclk_vddc_table->count - 1].clk;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
2871
allowed_sclk_vddc_table->entries[allowed_sclk_vddc_table->count - 1].v;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
2874
data->min_vddci_in_pptable = (uint16_t)allowed_mclk_vddci_table->entries[0].v;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
2875
data->max_vddci_in_pptable = (uint16_t)allowed_mclk_vddci_table->entries[allowed_mclk_vddci_table->count - 1].v;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
2879
hwmgr->dyn_state.max_clock_voltage_on_ac.vddci = hwmgr->dyn_state.vddci_dependency_on_mclk->entries[hwmgr->dyn_state.vddci_dependency_on_mclk->count - 1].v;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
301
voltage_table->entries[i].value =
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
302
voltage_dependency_table->entries[i].v;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
303
voltage_table->entries[i].smio_low = 0;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
3197
if (tmp_sclk >= hwmgr->dyn_state.vddc_dependency_on_sclk->entries[count].clk) {
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
3212
if (tmp_sclk >= table_info->vdd_dep_on_sclk->entries[count].clk) {
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
3367
table_info->vdd_dep_on_sclk->entries[count].clk) {
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
3369
table_info->vdd_dep_on_sclk->entries[count].clk;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
3375
stable_pstate_sclk = table_info->vdd_dep_on_sclk->entries[0].clk;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
3439
if (data->mclk_latency_table.entries[i].latency <= latency) {
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
3442
if ((data->mclk_latency_table.entries[i].frequency >=
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
3444
(data->mclk_latency_table.entries[i].frequency <=
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
3446
mclk = data->mclk_latency_table.entries[i].frequency;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
3656
performance_level->memory_clock = mclk_dep_table->entries
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
3659
performance_level->engine_clock = ((ATOM_Tonga_SCLK_Dependency_Table *)sclk_dep_table)->entries
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
3662
performance_level->engine_clock = ((ATOM_Polaris_SCLK_Dependency_Table *)sclk_dep_table)->entries
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
3671
performance_level->memory_clock = mclk_dep_table->entries
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
3675
performance_level->engine_clock = ((ATOM_Tonga_SCLK_Dependency_Table *)sclk_dep_table)->entries
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
3678
performance_level->engine_clock = ((ATOM_Polaris_SCLK_Dependency_Table *)sclk_dep_table)->entries
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
3712
if (dep_mclk_table->entries[0].clk !=
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
3716
if (dep_mclk_table->entries[0].vddci !=
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
3860
if (dep_mclk_table->entries[0].clk !=
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
3864
if (dep_mclk_table->entries[0].v !=
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
4264
dpm_table->sclk_table.dpm_levels[count].enabled = odn_sclk_table->entries[count].enabled;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
4265
dpm_table->sclk_table.dpm_levels[count].value = odn_sclk_table->entries[count].clock;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
4271
dpm_table->mclk_table.dpm_levels[count].enabled = odn_mclk_table->entries[count].enabled;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
4272
dpm_table->mclk_table.dpm_levels[count].value = odn_mclk_table->entries[count].clock;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
5040
odn_sclk_table->entries[i].clock / 100,
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
5041
odn_sclk_table->entries[i].vddc);
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
5050
odn_mclk_table->entries[i].clock / 100,
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
5051
odn_mclk_table->entries[i].vddc);
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
5210
clocks->clock[i] = dep_sclk_table->entries[i].clk * 10;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
5215
clocks->clock[i] = sclk_table->entries[i].clk * 10;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
5247
clocks->clock[i] = dep_mclk_table->entries[i].clk * 10;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
5249
dep_mclk_table->entries[i].clk);
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
5255
clocks->clock[i] = mclk_table->entries[i].clk * 10;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
5289
if (dep_sclk_table->entries[i].clk) {
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
5291
dep_sclk_table->entries[i].clk * 10;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
5312
if (dep_mclk_table->entries[i].clk) {
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
5314
dep_mclk_table->entries[i].clk * 10;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
5315
data->mclk_latency_table.entries[data->mclk_latency_table.count].frequency =
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
5316
dep_mclk_table->entries[i].clk;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
5318
data->mclk_latency_table.entries[data->mclk_latency_table.count].latency =
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
5319
smu7_get_mem_latency(hwmgr, dep_mclk_table->entries[i].clk);
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
5375
if (dep_sclk_table->entries[i].clk >= watermarks->wm_clk_ranges[k].wm_min_eng_clk_in_khz / 10 &&
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
5376
dep_sclk_table->entries[i].clk < watermarks->wm_clk_ranges[k].wm_max_eng_clk_in_khz / 10 &&
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
5377
dep_mclk_table->entries[i].clk >= watermarks->wm_clk_ranges[k].wm_min_mem_clk_in_khz / 10 &&
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
5378
dep_mclk_table->entries[i].clk < watermarks->wm_clk_ranges[k].wm_max_mem_clk_in_khz / 10) {
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
5568
podn_dpm_table_in_backend->entries[input_level].clock = input_clk;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
5569
podn_vdd_dep_in_backend->entries[input_level].clk = input_clk;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
5570
podn_dpm_table_in_backend->entries[input_level].vddc = input_vol;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
5571
podn_vdd_dep_in_backend->entries[input_level].vddc = input_vol;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
5572
podn_vdd_dep_in_backend->entries[input_level].vddgfx = input_vol;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
675
pcie_table->entries[i].gen_speed),
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
677
pcie_table->entries[i].lane_width));
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
808
allowed_vdd_sclk_table->entries[i].clk) {
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
810
allowed_vdd_sclk_table->entries[i].clk;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
822
allowed_vdd_mclk_table->entries[i].clk) {
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
824
allowed_vdd_mclk_table->entries[i].clk;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
832
data->dpm_table.vddc_table.dpm_levels[i].value = allowed_vdd_mclk_table->entries[i].v;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
833
data->dpm_table.vddc_table.dpm_levels[i].param1 = std_voltage_table->entries[i].Leakage;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
844
data->dpm_table.vddci_table.dpm_levels[i].value = allowed_vdd_mclk_table->entries[i].v;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
858
data->dpm_table.mvdd_table.dpm_levels[i].value = allowed_vdd_mclk_table->entries[i].v;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
901
dep_sclk_table->entries[i].clk) {
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
904
dep_sclk_table->entries[i].clk;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
912
hwmgr->platform_descriptor.overdriveLimit.engineClock = dep_sclk_table->entries[i-1].clk;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
918
dep_mclk_table->entries[i].clk) {
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
920
dep_mclk_table->entries[i].clk;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
928
hwmgr->platform_descriptor.overdriveLimit.memoryClock = dep_mclk_table->entries[i-1].clk;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
942
struct phm_odn_performance_level *entries;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
952
entries = odn_table->odn_core_clock_dpm_levels.entries;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
954
entries[i].clock = data->golden_dpm_table.sclk_table.dpm_levels[i].value;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
955
entries[i].enabled = true;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
956
entries[i].vddc = dep_sclk_table->entries[i].vddc;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
964
entries = odn_table->odn_memory_clock_dpm_levels.entries;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
966
entries[i].clock = data->golden_dpm_table.mclk_table.dpm_levels[i].value;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
967
entries[i].enabled = true;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
968
entries[i].vddc = dep_mclk_table->entries[i].vddc;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
994
|| min_vddc > dep_sclk_table->entries[0].vddc)
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
995
min_vddc = dep_sclk_table->entries[0].vddc;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
998
|| max_vddc < dep_sclk_table->entries[dep_sclk_table->count-1].vddc)
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.c
999
max_vddc = dep_sclk_table->entries[dep_sclk_table->count-1].vddc;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.h
179
phm_ppt_v1_clock_voltage_dependency_record entries[MAX_REGULAR_DPM_NUMBER];
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu7_hwmgr.h
210
struct smu7_mclk_latency_entries entries[MAX_REGULAR_DPM_NUMBER];
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu8_hwmgr.c
1032
hwmgr->pstate_sclk = table->entries[0].clk / 100;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu8_hwmgr.c
1035
hwmgr->pstate_sclk_peak = table->entries[table->count - 1].clk / 100;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu8_hwmgr.c
110
if (clock <= table->entries[i].clk)
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu8_hwmgr.c
118
if (clock >= table->entries[i].clk)
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu8_hwmgr.c
1192
data->sclk_dpm.soft_min_clk = table->entries[0].clk;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu8_hwmgr.c
1193
data->sclk_dpm.hard_min_clk = table->entries[0].clk;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu8_hwmgr.c
1198
clock = table->entries[level].clk;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu8_hwmgr.c
1200
clock = table->entries[table->count - 1].clk;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu8_hwmgr.c
1300
ptable->entries[ptable->count - 1].ecclk;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu8_hwmgr.c
140
if (clock <= ptable->entries[i].vclk)
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu8_hwmgr.c
1401
smu8_ps->levels[index].engineClock = table->entries[clock_info_index].clk;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu8_hwmgr.c
1402
smu8_ps->levels[index].vddcIndex = (uint8_t)table->entries[clock_info_index].v;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu8_hwmgr.c
148
if (clock >= ptable->entries[i].vclk)
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu8_hwmgr.c
1537
if (limits->vddc >= table->entries[i].v) {
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu8_hwmgr.c
1538
info->level = table->entries[i].clk;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu8_hwmgr.c
1586
sclk_table->entries[i].clk / 100,
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu8_hwmgr.c
1679
clocks->clock[i] = table->entries[i].clk * 10;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu8_hwmgr.c
1707
clocks->engine_max_clock = table->entries[level].clk;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu8_hwmgr.c
1709
clocks->engine_max_clock = table->entries[table->count - 1].clk;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu8_hwmgr.c
1764
sclk = table->entries[sclk_index].clk;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu8_hwmgr.c
1786
vclk = uvd_table->entries[uvd_index].vclk;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu8_hwmgr.c
1798
dclk = uvd_table->entries[uvd_index].dclk;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu8_hwmgr.c
1810
ecclk = vce_table->entries[vce_index].ecclk;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu8_hwmgr.c
1922
ptable->entries[ptable->count - 1].vclk;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu8_hwmgr.c
265
table->sclk = dep_table->entries[dep_table->count-1].clk;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu8_hwmgr.c
267
(uint16_t)dep_table->entries[dep_table->count-1].v);
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu8_hwmgr.c
279
table_clk_vlt = kzalloc_flex(*table_clk_vlt, entries, 8);
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu8_hwmgr.c
287
table_clk_vlt->entries[0].clk = PP_DAL_POWERLEVEL_0;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu8_hwmgr.c
288
table_clk_vlt->entries[0].v = 0;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu8_hwmgr.c
289
table_clk_vlt->entries[1].clk = PP_DAL_POWERLEVEL_1;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu8_hwmgr.c
290
table_clk_vlt->entries[1].v = 1;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu8_hwmgr.c
291
table_clk_vlt->entries[2].clk = PP_DAL_POWERLEVEL_2;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu8_hwmgr.c
292
table_clk_vlt->entries[2].v = 2;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu8_hwmgr.c
293
table_clk_vlt->entries[3].clk = PP_DAL_POWERLEVEL_3;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu8_hwmgr.c
294
table_clk_vlt->entries[3].v = 3;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu8_hwmgr.c
295
table_clk_vlt->entries[4].clk = PP_DAL_POWERLEVEL_4;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu8_hwmgr.c
296
table_clk_vlt->entries[4].v = 4;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu8_hwmgr.c
297
table_clk_vlt->entries[5].clk = PP_DAL_POWERLEVEL_5;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu8_hwmgr.c
298
table_clk_vlt->entries[5].v = 5;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu8_hwmgr.c
299
table_clk_vlt->entries[6].clk = PP_DAL_POWERLEVEL_6;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu8_hwmgr.c
300
table_clk_vlt->entries[6].v = 6;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu8_hwmgr.c
301
table_clk_vlt->entries[7].clk = PP_DAL_POWERLEVEL_7;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu8_hwmgr.c
302
table_clk_vlt->entries[7].v = 7;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu8_hwmgr.c
479
(i < vddc_table->count) ? (uint8_t)vddc_table->entries[i].v : 0;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu8_hwmgr.c
481
(i < vddc_table->count) ? vddc_table->entries[i].clk : 0;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu8_hwmgr.c
492
(i < vdd_gfx_table->count) ? (uint8_t)vdd_gfx_table->entries[i].v : 0;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu8_hwmgr.c
496
(i < acp_table->count) ? (uint8_t)acp_table->entries[i].v : 0;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu8_hwmgr.c
498
(i < acp_table->count) ? acp_table->entries[i].acpclk : 0;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu8_hwmgr.c
510
(i < uvd_table->count) ? (uint8_t)uvd_table->entries[i].v : 0;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu8_hwmgr.c
512
(i < uvd_table->count) ? uvd_table->entries[i].vclk : 0;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu8_hwmgr.c
522
(i < uvd_table->count) ? (uint8_t)uvd_table->entries[i].v : 0;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu8_hwmgr.c
524
(i < uvd_table->count) ? uvd_table->entries[i].dclk : 0;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu8_hwmgr.c
535
(i < vce_table->count) ? (uint8_t)vce_table->entries[i].v : 0;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu8_hwmgr.c
537
(i < vce_table->count) ? vce_table->entries[i].ecclk : 0;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu8_hwmgr.c
563
data->sclk_dpm.soft_min_clk = table->entries[0].clk;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu8_hwmgr.c
564
data->sclk_dpm.hard_min_clk = table->entries[0].clk;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu8_hwmgr.c
569
clock = table->entries[level].clk;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu8_hwmgr.c
571
clock = table->entries[table->count - 1].clk;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu8_hwmgr.c
599
clock = table->entries[level].vclk;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu8_hwmgr.c
601
clock = table->entries[table->count - 1].vclk;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu8_hwmgr.c
629
clock = table->entries[level].ecclk;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu8_hwmgr.c
631
clock = table->entries[table->count - 1].ecclk;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu8_hwmgr.c
659
clock = table->entries[level].acpclk;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu8_hwmgr.c
661
clock = table->entries[table->count - 1].acpclk;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu8_hwmgr.c
702
data->sclk_dpm.soft_min_clk = table->entries[0].clk;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu8_hwmgr.c
706
data->sclk_dpm.soft_max_clk = table->entries[level].clk;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu8_hwmgr.c
708
data->sclk_dpm.soft_max_clk = table->entries[table->count - 1].clk;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu8_hwmgr.c
79
if (clock <= ptable->entries[i].ecclk)
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu8_hwmgr.c
87
if (clock >= ptable->entries[i].ecclk)
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu8_hwmgr.h
96
struct smu8_display_phy_info_entry entries[SMU8_MAX_DISPLAYPHY_IDS];
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu_helper.c
223
vvalue = vol_table->entries[i].value;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu_helper.c
227
if (vvalue == table->entries[j].value) {
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu_helper.c
234
table->entries[table->count].value = vvalue;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu_helper.c
235
table->entries[table->count].smio_low =
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu_helper.c
236
vol_table->entries[i].smio_low;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu_helper.c
264
vol_table->entries[i].value = dep_table->entries[i].mvdd;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu_helper.c
265
vol_table->entries[i].smio_low = 0;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu_helper.c
292
vol_table->entries[i].value = dep_table->entries[i].vddci;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu_helper.c
293
vol_table->entries[i].smio_low = 0;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu_helper.c
320
vol_table->entries[i].value = lookup_table->entries[i].us_vdd;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu_helper.c
321
vol_table->entries[i].smio_low = 0;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu_helper.c
338
vol_table->entries[i] = vol_table->entries[i + diff];
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu_helper.c
401
if (lookup_table->entries[i].us_vdd >= voltage)
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu_helper.c
421
if (voltage_table->entries[i].value >= voltage)
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu_helper.c
434
if (vddci_table->entries[i].value >= vddci)
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu_helper.c
435
return vddci_table->entries[i].value;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu_helper.c
439
return vddci_table->entries[i-1].value;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu_helper.c
472
voltage_id = table_info->vdd_dep_on_sclk->entries[entry_id].vddInd;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu_helper.c
473
if (lookup_table->entries[voltage_id].us_vdd == virtual_voltage_id)
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu_helper.c
482
*sclk = table_info->vdd_dep_on_sclk->entries[entry_id].clk;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu_helper.c
498
table_clk_vlt = kzalloc_flex(*table_clk_vlt, entries, 4);
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu_helper.c
505
table_clk_vlt->entries[0].clk = PP_DAL_POWERLEVEL_ULTRALOW;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu_helper.c
508
table_clk_vlt->entries[0].v = 700;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu_helper.c
510
table_clk_vlt->entries[0].v = 0;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu_helper.c
511
table_clk_vlt->entries[1].clk = PP_DAL_POWERLEVEL_LOW;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu_helper.c
514
table_clk_vlt->entries[1].v = 740;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu_helper.c
516
table_clk_vlt->entries[1].v = 720;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu_helper.c
517
table_clk_vlt->entries[2].clk = PP_DAL_POWERLEVEL_NOMINAL;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu_helper.c
520
table_clk_vlt->entries[2].v = 800;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu_helper.c
522
table_clk_vlt->entries[2].v = 810;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu_helper.c
523
table_clk_vlt->entries[3].clk = PP_DAL_POWERLEVEL_PERFORMANCE;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu_helper.c
524
table_clk_vlt->entries[3].v = 900;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu_helper.c
559
if (dal_power_level == table->entries[i].clk) {
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu_helper.c
560
req_vddc = table->entries[i].v;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu_helper.c
567
if (req_vddc <= vddc_table->entries[i].vddc) {
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu_helper.c
568
req_volt = (((uint32_t)vddc_table->entries[i].vddc) * VOLTAGE_SCALE);
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu_helper.c
698
dep_table->entries[i].clk = allowed_dep_table->entries[i].clk;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu_helper.c
699
dep_table->entries[i].vddInd = allowed_dep_table->entries[i].vddInd;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu_helper.c
700
dep_table->entries[i].vdd_offset = allowed_dep_table->entries[i].vdd_offset;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu_helper.c
701
dep_table->entries[i].vddc = allowed_dep_table->entries[i].vddc;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu_helper.c
702
dep_table->entries[i].vddgfx = allowed_dep_table->entries[i].vddgfx;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu_helper.c
703
dep_table->entries[i].vddci = allowed_dep_table->entries[i].vddci;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu_helper.c
704
dep_table->entries[i].mvdd = allowed_dep_table->entries[i].mvdd;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu_helper.c
705
dep_table->entries[i].phases = allowed_dep_table->entries[i].phases;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu_helper.c
706
dep_table->entries[i].cks_enable = allowed_dep_table->entries[i].cks_enable;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/smu_helper.c
707
dep_table->entries[i].cks_voffset = allowed_dep_table->entries[i].cks_voffset;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_hwmgr.c
1041
vvalue = vol_table->entries[i].value;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_hwmgr.c
1045
if (vvalue == table->entries[j].value) {
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_hwmgr.c
1052
table->entries[table->count].value = vvalue;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_hwmgr.c
1053
table->entries[table->count].smio_low =
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_hwmgr.c
1054
vol_table->entries[i].smio_low;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_hwmgr.c
1080
vol_table->entries[i].value = dep_table->entries[i].mvdd;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_hwmgr.c
1081
vol_table->entries[i].smio_low = 0;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_hwmgr.c
1107
vol_table->entries[i].value = dep_table->entries[i].vddci;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_hwmgr.c
1108
vol_table->entries[i].smio_low = 0;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_hwmgr.c
1133
vol_table->entries[i].value = dep_table->entries[i].vddc;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_hwmgr.c
1134
vol_table->entries[i].smio_low = 0;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_hwmgr.c
1158
vol_table->entries[i] = vol_table->entries[i + diff];
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_hwmgr.c
1249
dep_table->entries[i].clk) {
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_hwmgr.c
1251
dep_table->entries[i].clk;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_hwmgr.c
1277
bios_pcie_table->entries[i].gen_speed;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_hwmgr.c
1284
bios_pcie_table->entries[i].lane_width);
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_hwmgr.c
1290
bios_pcie_table->entries[i].pcie_sclk;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_hwmgr.c
1385
dep_mm_table->entries[i].eclk) {
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_hwmgr.c
1387
dep_mm_table->entries[i].eclk;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_hwmgr.c
1400
dep_mm_table->entries[i].vclk) {
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_hwmgr.c
1402
dep_mm_table->entries[i].vclk;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_hwmgr.c
1413
dep_mm_table->entries[i].dclk) {
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_hwmgr.c
1415
dep_mm_table->entries[i].dclk;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_hwmgr.c
1639
if (dep_on_sclk->entries[i].clk == gfx_clock)
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_hwmgr.c
1694
if (dep_on_soc->entries[i].clk >= soc_clock)
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_hwmgr.c
1700
if (dep_on_soc->entries[i].clk == soc_clock)
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_hwmgr.c
1716
*current_vol_index = (uint8_t)(dep_on_soc->entries[i].vddInd);
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_hwmgr.c
1800
soc_vid = (uint8_t)convert_to_vid(vddc_lookup_table->entries[i].us_vdd);
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_hwmgr.c
1843
if (dep_on_mclk->entries[i].clk == mem_clock)
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_hwmgr.c
1857
(uint8_t)(convert_to_vid(dep_on_mclk->entries[i].mvdd));
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_hwmgr.c
1859
(uint8_t)(dep_on_mclk->entries[i].vddInd);
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_hwmgr.c
1954
clk = (uint16_t)(dep_table->entries[i].clk / 100);
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_hwmgr.c
1956
entries[dep_table->entries[i].vddInd].us_vdd;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_hwmgr.c
2008
if (dep_table->entries[i].eclk == eclock)
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_hwmgr.c
2009
*current_soc_vol = dep_table->entries[i].vddcInd;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_hwmgr.c
2130
if (dep_table->entries[i].vclk ==
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_hwmgr.c
2132
dep_table->entries[i].dclk ==
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_hwmgr.c
2135
dep_table->entries[i].vddcInd;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_hwmgr.c
2142
pp_table->UvdDpmVoltageIndex[i] = dep_table->entries[j].vddcInd;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_hwmgr.c
2160
pp_table->CksEnable[i] = dep_table->entries[i].cks_enable;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_hwmgr.c
2161
pp_table->CksVidOffset[i] = (uint8_t)(dep_table->entries[i].cks_voffset
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_hwmgr.c
2244
convert_to_vid((uint8_t)(dep_table->entries[i].sclk_offset));
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_hwmgr.c
2540
if (dep_table->entries[i].vddc != odn_dep_table->entries[i].vddc) {
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_hwmgr.c
2549
if (dep_table->entries[i].vddc != odn_dep_table->entries[i].vddc) {
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_hwmgr.c
3032
hwmgr->pstate_sclk = table_info->vdd_dep_on_sclk->entries[VEGA10_UMD_PSTATE_GFXCLK_LEVEL].clk;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_hwmgr.c
3033
hwmgr->pstate_mclk = table_info->vdd_dep_on_mclk->entries[VEGA10_UMD_PSTATE_MCLK_LEVEL].clk;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_hwmgr.c
3035
hwmgr->pstate_sclk = table_info->vdd_dep_on_sclk->entries[0].clk;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_hwmgr.c
3036
hwmgr->pstate_mclk = table_info->vdd_dep_on_mclk->entries[0].clk;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_hwmgr.c
3039
hwmgr->pstate_sclk_peak = table_info->vdd_dep_on_sclk->entries[table_info->vdd_dep_on_sclk->count - 1].clk;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_hwmgr.c
3040
hwmgr->pstate_mclk_peak = table_info->vdd_dep_on_mclk->entries[table_info->vdd_dep_on_mclk->count - 1].clk;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_hwmgr.c
3205
performance_level->soc_clock = socclk_dep_table->entries
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_hwmgr.c
3207
performance_level->gfx_clock = gfxclk_dep_table->entries
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_hwmgr.c
3209
performance_level->mem_clock = mclk_dep_table->entries
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_hwmgr.c
3214
performance_level->soc_clock = socclk_dep_table->entries
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_hwmgr.c
3222
gfxclk_dep_table->entries[4].ulClk;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_hwmgr.c
3224
performance_level->gfx_clock = gfxclk_dep_table->entries
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_hwmgr.c
3227
patom_record_V2 = (ATOM_Vega10_GFXCLK_Dependency_Record_V2 *)gfxclk_dep_table->entries;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_hwmgr.c
3235
performance_level->mem_clock = mclk_dep_table->entries
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_hwmgr.c
327
od_lookup_table->entries[i].us_vdd = vddc_lookup_table->entries[i].us_vdd;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_hwmgr.c
3346
table_info->vdd_dep_on_sclk->entries[count].clk) {
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_hwmgr.c
3348
table_info->vdd_dep_on_sclk->entries[count].clk;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_hwmgr.c
3354
stable_pstate_sclk = table_info->vdd_dep_on_sclk->entries[0].clk;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_hwmgr.c
3406
if ((data->mclk_latency_table.entries[i].latency <= latency) &&
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_hwmgr.c
3407
(data->mclk_latency_table.entries[i].frequency >=
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_hwmgr.c
3409
(data->mclk_latency_table.entries[i].frequency <=
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_hwmgr.c
3411
mclk = data->mclk_latency_table.entries[i].frequency;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_hwmgr.c
342
odn_table->max_vddc = dep_table[0]->entries[dep_table[0]->count - 1].vddc;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_hwmgr.c
344
odn_table->min_vddc = dep_table[0]->entries[0].vddc;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_hwmgr.c
347
od_table[2]->entries[i].clk = hwmgr->platform_descriptor.overdriveLimit.memoryClock > od_table[2]->entries[i].clk ?
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_hwmgr.c
349
od_table[2]->entries[i].clk;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_hwmgr.c
3495
dpm_table->gfx_table.dpm_levels[count].value = odn_clk_table->entries[count].clk;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_hwmgr.c
350
od_table[2]->entries[i].vddc = odn_table->max_vddc > od_table[2]->entries[i].vddc ?
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_hwmgr.c
3501
dpm_table->mem_table.dpm_levels[count].value = odn_clk_table->entries[count].clk;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_hwmgr.c
352
od_table[2]->entries[i].vddc;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_hwmgr.c
3636
return vdd_dep_table_on_mclk->entries[NUM_UCLK_DPM_LEVELS - 1].vddInd + 1;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_hwmgr.c
4088
if(mclk_table->entries[i].clk >= frequency)
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_hwmgr.c
4414
if (dep_table->entries[i].clk) {
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_hwmgr.c
4416
dep_table->entries[i].clk * 10;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_hwmgr.c
4435
if (dep_table->entries[i].clk) {
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_hwmgr.c
4438
dep_table->entries[i].clk * 10;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_hwmgr.c
4439
data->mclk_latency_table.entries[j].frequency =
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_hwmgr.c
4440
dep_table->entries[i].clk;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_hwmgr.c
4442
data->mclk_latency_table.entries[j].latency = 25;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_hwmgr.c
4459
clocks->data[i].clocks_in_khz = dep_table->entries[i].clk * 10;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_hwmgr.c
4475
clocks->data[i].clocks_in_khz = dep_table->entries[i].clk * 10;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_hwmgr.c
4535
clocks->data[i].clocks_in_khz = dep_table->entries[i].clk * 10;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_hwmgr.c
4537
entries[dep_table->entries[i].vddInd].us_vdd);
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_hwmgr.c
4791
i, podn_vdd_dep->entries[i].clk / 100,
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_hwmgr.c
4792
podn_vdd_dep->entries[i].vddc);
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_hwmgr.c
4802
i, podn_vdd_dep->entries[i].clk/100,
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_hwmgr.c
4803
podn_vdd_dep->entries[i].vddc);
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_hwmgr.c
539
voltage_id = table_info->vdd_dep_on_socclk->entries[entry_id].vddInd;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_hwmgr.c
5397
od_vddc_lookup_table->entries[i].us_vdd = podn_vdd_dep->entries[i].vddc;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_hwmgr.c
540
if (lookup_table->entries[voltage_id].us_vdd == virtual_voltage_id)
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_hwmgr.c
5402
if (od_vddc_lookup_table->entries[j].us_vdd >
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_hwmgr.c
5403
podn_vdd_dep->entries[i].vddc)
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_hwmgr.c
5408
od_vddc_lookup_table->entries[j].us_vdd =
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_hwmgr.c
5409
podn_vdd_dep->entries[i].vddc;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_hwmgr.c
5412
podn_vdd_dep->entries[i].vddInd = j;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_hwmgr.c
5416
if (dep_table->entries[i].vddInd == podn_vdd_dep->entries[podn_vdd_dep->count-1].vddInd &&
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_hwmgr.c
5417
dep_table->entries[i].clk < podn_vdd_dep->entries[podn_vdd_dep->count-1].clk) {
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_hwmgr.c
5420
(dep_table->entries[i].clk < podn_vdd_dep->entries[podn_vdd_dep->count - 1].clk); i++) {
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_hwmgr.c
5421
podn_vdd_dep_on_socclk->entries[i].clk = podn_vdd_dep->entries[podn_vdd_dep->count-1].clk;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_hwmgr.c
5422
dpm_table->dpm_levels[i].value = podn_vdd_dep_on_socclk->entries[i].clk;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_hwmgr.c
5426
dpm_table->dpm_levels[i].value = dep_table->entries[i].clk;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_hwmgr.c
5427
podn_vdd_dep_on_socclk->entries[i].vddc = dep_table->entries[i].vddc;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_hwmgr.c
5428
podn_vdd_dep_on_socclk->entries[i].vddInd = dep_table->entries[i].vddInd;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_hwmgr.c
5429
podn_vdd_dep_on_socclk->entries[i].clk = dep_table->entries[i].clk;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_hwmgr.c
5432
if (podn_vdd_dep_on_socclk->entries[podn_vdd_dep_on_socclk->count - 1].clk <
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_hwmgr.c
5433
podn_vdd_dep->entries[podn_vdd_dep->count - 1].clk) {
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_hwmgr.c
5435
podn_vdd_dep_on_socclk->entries[podn_vdd_dep_on_socclk->count - 1].clk =
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_hwmgr.c
5436
podn_vdd_dep->entries[podn_vdd_dep->count - 1].clk;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_hwmgr.c
5438
podn_vdd_dep->entries[podn_vdd_dep->count - 1].clk;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_hwmgr.c
5440
if (podn_vdd_dep_on_socclk->entries[podn_vdd_dep_on_socclk->count - 1].vddInd <
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_hwmgr.c
5441
podn_vdd_dep->entries[podn_vdd_dep->count - 1].vddInd) {
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_hwmgr.c
5443
podn_vdd_dep_on_socclk->entries[podn_vdd_dep_on_socclk->count - 1].vddInd =
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_hwmgr.c
5444
podn_vdd_dep->entries[podn_vdd_dep->count - 1].vddInd;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_hwmgr.c
548
*socclk = table_info->vdd_dep_on_socclk->entries[entry_id].clk;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_hwmgr.c
5506
podn_vdd_dep_table->entries[input_level].clk = input_clk;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_hwmgr.c
5507
podn_vdd_dep_table->entries[input_level].vddc = input_vol;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_hwmgr.c
580
if (socclk_table->entries[j].clk == sclk &&
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_hwmgr.c
581
socclk_table->entries[j].cks_enable == 0) {
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_hwmgr.c
652
&lookup_table->entries[i].us_vdd, leakage_table);
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_hwmgr.c
691
voltage_id = vdt->entries[entry_id].vddInd;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_hwmgr.c
692
vdt->entries[entry_id].vddc =
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_hwmgr.c
693
table_info->vddc_lookup_table->entries[voltage_id].us_vdd;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_hwmgr.c
698
voltage_id = mm_table->entries[entry_id].vddcInd;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_hwmgr.c
699
mm_table->entries[entry_id].vddc =
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_hwmgr.c
700
table_info->vddc_lookup_table->entries[voltage_id].us_vdd;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_hwmgr.c
704
voltage_id = mclk_table->entries[entry_id].vddInd;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_hwmgr.c
705
mclk_table->entries[entry_id].vddc =
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_hwmgr.c
706
table_info->vddc_lookup_table->entries[voltage_id].us_vdd;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_hwmgr.c
707
voltage_id = mclk_table->entries[entry_id].vddciInd;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_hwmgr.c
708
mclk_table->entries[entry_id].vddci =
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_hwmgr.c
709
table_info->vddci_lookup_table->entries[voltage_id].us_vdd;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_hwmgr.c
710
voltage_id = mclk_table->entries[entry_id].mvddInd;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_hwmgr.c
711
mclk_table->entries[entry_id].mvdd =
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_hwmgr.c
712
table_info->vddmem_lookup_table->entries[voltage_id].us_vdd;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_hwmgr.c
733
if (lookup_table->entries[j].us_vdd <
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_hwmgr.c
734
lookup_table->entries[j - 1].us_vdd) {
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_hwmgr.c
735
swap(lookup_table->entries[j - 1],
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_hwmgr.c
736
lookup_table->entries[j]);
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_hwmgr.c
795
allowed_sclk_vdd_table->entries[allowed_sclk_vdd_table->count - 1].clk;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_hwmgr.c
797
allowed_mclk_vdd_table->entries[allowed_mclk_vdd_table->count - 1].clk;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_hwmgr.c
799
allowed_sclk_vdd_table->entries[allowed_sclk_vdd_table->count - 1].vddc;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_hwmgr.c
801
allowed_mclk_vdd_table->entries[allowed_mclk_vdd_table->count - 1].vddci;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_hwmgr.h
221
struct vega10_mclk_latency_entries entries[MAX_REGULAR_DPM_NUMBER];
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_hwmgr.h
286
struct phm_ppt_v1_clock_voltage_dependency_record entries[MAX_REGULAR_DPM_NUMBER];
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_hwmgr.h
291
struct phm_ppt_v1_voltage_lookup_record entries[MAX_REGULAR_DPM_NUMBER];
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_pptable.h
166
ATOM_Vega10_GFXCLK_Dependency_Record entries[]; /* Dynamically allocate entries. */
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_pptable.h
172
ATOM_Vega10_MCLK_Dependency_Record entries[]; /* Dynamically allocate entries. */
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_pptable.h
178
ATOM_Vega10_CLK_Dependency_Record entries[]; /* Dynamically allocate entries. */
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_pptable.h
184
ATOM_Vega10_CLK_Dependency_Record entries[]; /* Dynamically allocate entries. */
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_pptable.h
190
ATOM_Vega10_CLK_Dependency_Record entries[]; /* Dynamically allocate entries. */
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_pptable.h
196
ATOM_Vega10_CLK_Dependency_Record entries[]; /* Dynamically allocate entries. */
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_pptable.h
202
ATOM_Vega10_CLK_Dependency_Record entries[]; /* Dynamically allocate entries. */
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_pptable.h
216
ATOM_Vega10_MM_Dependency_Record entries[]; /* Dynamically allocate entries */
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_pptable.h
228
ATOM_Vega10_PCIE_Record entries[]; /* Dynamically allocate entries. */
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_pptable.h
238
ATOM_Vega10_Voltage_Lookup_Record entries[]; /* Dynamically allocate entries */
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_pptable.h
330
ATOM_Vega10_VCE_State_Record entries[];
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_pptable.h
430
ATOM_Vega10_Hard_Limit_Record entries[];
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_processpptables.c
1042
table = kzalloc_flex(*table, entries, max_levels);
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_processpptables.c
1049
table->entries[i].us_vdd =
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_processpptables.c
1050
le16_to_cpu(vddc_lookup_pp_tables->entries[i].usVdd);
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_processpptables.c
319
(ATOM_Vega10_GFXCLK_Dependency_Record_V2 *)gfxclk_dep_table->entries;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_processpptables.c
353
mm_table = kzalloc_flex(*mm_table, entries,
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_processpptables.c
361
mm_dependency_record = &mm_dependency_table->entries[i];
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_processpptables.c
362
mm_table->entries[i].vddcInd = mm_dependency_record->ucVddcInd;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_processpptables.c
363
mm_table->entries[i].samclock =
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_processpptables.c
365
mm_table->entries[i].eclk = le32_to_cpu(mm_dependency_record->ulEClk);
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_processpptables.c
366
mm_table->entries[i].vclk = le32_to_cpu(mm_dependency_record->ulVClk);
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_processpptables.c
367
mm_table->entries[i].dclk = le32_to_cpu(mm_dependency_record->ulDClk);
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_processpptables.c
576
clk_table = kzalloc_flex(*clk_table, entries,
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_processpptables.c
584
clk_table->entries[i].vddInd =
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_processpptables.c
585
clk_dep_table->entries[i].ucVddInd;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_processpptables.c
586
clk_table->entries[i].clk =
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_processpptables.c
587
le32_to_cpu(clk_dep_table->entries[i].ulClk);
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_processpptables.c
606
mclk_table = kzalloc_flex(*mclk_table, entries,
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_processpptables.c
614
mclk_table->entries[i].vddInd =
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_processpptables.c
615
mclk_dep_table->entries[i].ucVddInd;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_processpptables.c
616
mclk_table->entries[i].vddciInd =
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_processpptables.c
617
mclk_dep_table->entries[i].ucVddciInd;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_processpptables.c
618
mclk_table->entries[i].mvddInd =
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_processpptables.c
619
mclk_dep_table->entries[i].ucVddMemInd;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_processpptables.c
620
mclk_table->entries[i].clk =
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_processpptables.c
621
le32_to_cpu(mclk_dep_table->entries[i].ulMemClk);
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_processpptables.c
643
clk_table = kzalloc_flex(*clk_table, entries,
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_processpptables.c
652
clk_table->entries[i].vddInd =
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_processpptables.c
653
clk_dep_table->entries[i].ucVddInd;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_processpptables.c
654
clk_table->entries[i].clk =
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_processpptables.c
655
le32_to_cpu(clk_dep_table->entries[i].ulClk);
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_processpptables.c
656
clk_table->entries[i].cks_enable =
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_processpptables.c
657
(((le16_to_cpu(clk_dep_table->entries[i].usCKSVOffsetandDisable) & 0x8000)
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_processpptables.c
659
clk_table->entries[i].cks_voffset =
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_processpptables.c
660
le16_to_cpu(clk_dep_table->entries[i].usCKSVOffsetandDisable) & 0x7F;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_processpptables.c
661
clk_table->entries[i].sclk_offset =
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_processpptables.c
662
le16_to_cpu(clk_dep_table->entries[i].usAVFSOffset);
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_processpptables.c
665
patom_record_v2 = (ATOM_Vega10_GFXCLK_Dependency_Record_V2 *)clk_dep_table->entries;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_processpptables.c
667
clk_table->entries[i].vddInd =
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_processpptables.c
669
clk_table->entries[i].clk =
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_processpptables.c
671
clk_table->entries[i].cks_enable =
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_processpptables.c
674
clk_table->entries[i].cks_voffset =
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_processpptables.c
676
clk_table->entries[i].sclk_offset =
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_processpptables.c
705
clk_table = kzalloc_flex(*clk_table, entries,
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_processpptables.c
713
clk_table->entries[i].vddInd =
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_processpptables.c
714
clk_dep_table->entries[i].ucVddInd;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_processpptables.c
715
clk_table->entries[i].clk =
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_processpptables.c
716
le32_to_cpu(clk_dep_table->entries[i].ulClk);
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_processpptables.c
751
clk_dep_table->entries[clk_dep_table->ucNumEntries - 1].ulClk < 90000)
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_processpptables.c
758
clk_table = kzalloc_flex(*clk_table, entries, num_entries);
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_processpptables.c
765
clk_table->entries[i].vddInd =
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_processpptables.c
766
clk_dep_table->entries[i].ucVddInd;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_processpptables.c
767
clk_table->entries[i].clk =
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_processpptables.c
768
le32_to_cpu(clk_dep_table->entries[i].ulClk);
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_processpptables.c
772
clk_table->entries[i].vddInd = clk_dep_table->entries[i-1].ucVddInd;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_processpptables.c
773
clk_table->entries[i].clk = 90000;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_processpptables.c
796
pcie_table = kzalloc_flex(*pcie_table, entries,
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_processpptables.c
812
pcie_table->entries[i].gen_speed =
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_processpptables.c
813
atom_pcie_table->entries[i].ucPCIEGenSpeed;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_processpptables.c
814
pcie_table->entries[i].lane_width =
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_processpptables.c
815
atom_pcie_table->entries[i].ucPCIELaneWidth;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_processpptables.c
816
pcie_table->entries[i].pcie_sclk =
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_processpptables.c
817
atom_pcie_table->entries[i].ulLCLK;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_processpptables.c
834
limits->sclk = le32_to_cpu(limit_table->entries[0].ulSOCCLKLimit);
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_processpptables.c
835
limits->mclk = le32_to_cpu(limit_table->entries[0].ulMCLKLimit);
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_processpptables.c
836
limits->gfxclk = le32_to_cpu(limit_table->entries[0].ulGFXCLKLimit);
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_processpptables.c
837
limits->vddc = le16_to_cpu(limit_table->entries[0].usVddcLimit);
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_processpptables.c
838
limits->vddci = le16_to_cpu(limit_table->entries[0].usVddciLimit);
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_processpptables.c
839
limits->vddmem = le16_to_cpu(limit_table->entries[0].usVddMemLimit);
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega10_processpptables.c
862
table->values[i] = (uint32_t)clk_volt_pp_table->entries[i].clk;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega12_hwmgr.c
1904
data->mclk_latency_table.entries[i].frequency = dpm_table->dpm_levels[i].value * 100;
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega12_hwmgr.c
1906
data->mclk_latency_table.entries[i].latency =
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega12_hwmgr.c
2445
if (data->mclk_latency_table.entries[i].latency <= latency) {
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega12_hwmgr.h
114
uint32_t entries[MAX_REGULAR_DPM_NUMBER];
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega12_hwmgr.h
211
struct vega12_mclk_latency_entries entries[MAX_REGULAR_DPM_NUMBER];
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega12_hwmgr.h
284
entries[MAX_REGULAR_DPM_NUMBER];
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega20_hwmgr.c
2858
data->mclk_latency_table.entries[i].frequency =
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega20_hwmgr.c
2861
data->mclk_latency_table.entries[i].latency =
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega20_hwmgr.c
3862
if (data->mclk_latency_table.entries[i].latency <= latency) {
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega20_hwmgr.h
167
uint32_t entries[MAX_REGULAR_DPM_NUMBER];
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega20_hwmgr.h
272
struct vega20_mclk_latency_entries entries[MAX_REGULAR_DPM_NUMBER];
drivers/gpu/drm/amd/pm/powerplay/hwmgr/vega20_hwmgr.h
347
entries[MAX_REGULAR_DPM_NUMBER];
drivers/gpu/drm/amd/pm/powerplay/inc/hardwaremanager.h
395
struct phm_odn_performance_level entries[8];
drivers/gpu/drm/amd/pm/powerplay/inc/hwmgr.h
126
struct phm_clock_voltage_dependency_record entries[];
drivers/gpu/drm/amd/pm/powerplay/inc/hwmgr.h
143
struct phm_uvd_clock_voltage_dependency_record entries[];
drivers/gpu/drm/amd/pm/powerplay/inc/hwmgr.h
153
struct phm_acp_clock_voltage_dependency_record entries[];
drivers/gpu/drm/amd/pm/powerplay/inc/hwmgr.h
164
struct phm_phase_shedding_limits_record entries[];
drivers/gpu/drm/amd/pm/powerplay/inc/hwmgr.h
169
struct phm_vceclock_voltage_dependency_record entries[];
drivers/gpu/drm/amd/pm/powerplay/inc/hwmgr.h
174
struct phm_uvdclock_voltage_dependency_record entries[];
drivers/gpu/drm/amd/pm/powerplay/inc/hwmgr.h
179
struct phm_samuclock_voltage_dependency_record entries[];
drivers/gpu/drm/amd/pm/powerplay/inc/hwmgr.h
184
struct phm_acpclock_voltage_dependency_record entries[];
drivers/gpu/drm/amd/pm/powerplay/inc/hwmgr.h
189
struct phm_vce_clock_voltage_dependency_record entries[];
drivers/gpu/drm/amd/pm/powerplay/inc/hwmgr.h
395
union phm_cac_leakage_record entries[];
drivers/gpu/drm/amd/pm/powerplay/inc/hwmgr.h
406
struct phm_samu_clock_voltage_dependency_record entries[];
drivers/gpu/drm/amd/pm/powerplay/inc/hwmgr.h
517
struct phm_vq_budgeting_record entries[0];
drivers/gpu/drm/amd/pm/powerplay/inc/smu71_discrete.h
179
SMU71_Discrete_MCArbDramTimingTableEntry entries[SMU__NUM_SCLK_DPM_STATE][SMU__NUM_MCLK_DPM_LEVELS];
drivers/gpu/drm/amd/pm/powerplay/inc/smu72_discrete.h
166
SMU72_Discrete_MCArbDramTimingTableEntry entries[SMU__NUM_SCLK_DPM_STATE][SMU__NUM_MCLK_DPM_LEVELS];
drivers/gpu/drm/amd/pm/powerplay/inc/smu73_discrete.h
150
SMU73_Discrete_MCArbDramTimingTableEntry entries[SMU__NUM_SCLK_DPM_STATE][SMU__NUM_MCLK_DPM_LEVELS];
drivers/gpu/drm/amd/pm/powerplay/inc/smu74_discrete.h
179
SMU74_Discrete_MCArbDramTimingTableEntry entries[SMU__NUM_SCLK_DPM_STATE][SMU__NUM_MCLK_DPM_LEVELS];
drivers/gpu/drm/amd/pm/powerplay/inc/smu75_discrete.h
192
SMU75_Discrete_MCArbDramTimingTableEntry entries[SMU__NUM_SCLK_DPM_STATE][SMU__NUM_MCLK_DPM_LEVELS];
drivers/gpu/drm/amd/pm/powerplay/inc/smu7_discrete.h
235
SMU7_Discrete_MCArbDramTimingTableEntry entries[SMU__NUM_SCLK_DPM_STATE][SMU__NUM_MCLK_DPM_LEVELS];
drivers/gpu/drm/amd/pm/powerplay/smumgr/ci_smumgr.c
1165
if (memory_clock < pl->entries[i].Mclk) {
drivers/gpu/drm/amd/pm/powerplay/smumgr/ci_smumgr.c
1360
if (mclk <= hwmgr->dyn_state.mvdd_dependency_on_mclk->entries[i].clk) {
drivers/gpu/drm/amd/pm/powerplay/smumgr/ci_smumgr.c
1362
voltage->Voltage = data->mvdd_voltage_table.entries[i].value;
drivers/gpu/drm/amd/pm/powerplay/smumgr/ci_smumgr.c
1531
uvd_table->entries[count].vclk;
drivers/gpu/drm/amd/pm/powerplay/smumgr/ci_smumgr.c
1533
uvd_table->entries[count].dclk;
drivers/gpu/drm/amd/pm/powerplay/smumgr/ci_smumgr.c
1535
uvd_table->entries[count].v * VOLTAGE_SCALE;
drivers/gpu/drm/amd/pm/powerplay/smumgr/ci_smumgr.c
1572
table->VceLevel[count].Frequency = vce_table->entries[count].evclk;
drivers/gpu/drm/amd/pm/powerplay/smumgr/ci_smumgr.c
1574
vce_table->entries[count].v * VOLTAGE_SCALE;
drivers/gpu/drm/amd/pm/powerplay/smumgr/ci_smumgr.c
1604
table->AcpLevel[count].Frequency = acp_table->entries[count].acpclk;
drivers/gpu/drm/amd/pm/powerplay/smumgr/ci_smumgr.c
1605
table->AcpLevel[count].MinVoltage = acp_table->entries[count].v;
drivers/gpu/drm/amd/pm/powerplay/smumgr/ci_smumgr.c
1665
&arb_regs.entries[i][j]);
drivers/gpu/drm/amd/pm/powerplay/smumgr/ci_smumgr.c
1863
if (hwmgr->dyn_state.vddc_dependency_on_sclk->entries[level].clk
drivers/gpu/drm/amd/pm/powerplay/smumgr/ci_smumgr.c
1873
if (hwmgr->dyn_state.vddc_dependency_on_mclk->entries[level].clk
drivers/gpu/drm/amd/pm/powerplay/smumgr/ci_smumgr.c
287
if (allowed_clock_voltage_table->entries[i].clk >= clock) {
drivers/gpu/drm/amd/pm/powerplay/smumgr/ci_smumgr.c
288
*vol = allowed_clock_voltage_table->entries[i].v;
drivers/gpu/drm/amd/pm/powerplay/smumgr/ci_smumgr.c
2882
if (uvd_table->entries[i].v <= max_vddc)
drivers/gpu/drm/amd/pm/powerplay/smumgr/ci_smumgr.c
2914
if (vce_table->entries[i].v <= max_vddc)
drivers/gpu/drm/amd/pm/powerplay/smumgr/ci_smumgr.c
293
*vol = allowed_clock_voltage_table->entries[i - 1].v;
drivers/gpu/drm/amd/pm/powerplay/smumgr/ci_smumgr.c
381
if (sclk < pl->entries[i].Sclk) {
drivers/gpu/drm/amd/pm/powerplay/smumgr/ci_smumgr.c
594
lo_vid[i] = convert_to_vid(hwmgr->dyn_state.cac_leakage_table->entries[i].Vddc1);
drivers/gpu/drm/amd/pm/powerplay/smumgr/ci_smumgr.c
595
hi_vid[i] = convert_to_vid(hwmgr->dyn_state.cac_leakage_table->entries[i].Vddc2);
drivers/gpu/drm/amd/pm/powerplay/smumgr/ci_smumgr.c
596
hi2_vid[i] = convert_to_vid(hwmgr->dyn_state.cac_leakage_table->entries[i].Vddc3);
drivers/gpu/drm/amd/pm/powerplay/smumgr/ci_smumgr.c
598
lo_vid[i] = convert_to_vid(hwmgr->dyn_state.cac_leakage_table->entries[i].Vddc);
drivers/gpu/drm/amd/pm/powerplay/smumgr/ci_smumgr.c
599
hi_vid[i] = convert_to_vid(hwmgr->dyn_state.cac_leakage_table->entries[i].Leakage);
drivers/gpu/drm/amd/pm/powerplay/smumgr/ci_smumgr.c
618
vid[i] = convert_to_vid(data->vddc_voltage_table.entries[i].value);
drivers/gpu/drm/amd/pm/powerplay/smumgr/ci_smumgr.c
783
if (tab->value == hwmgr->dyn_state.vddc_dependency_on_sclk->entries[v_index].v) {
drivers/gpu/drm/amd/pm/powerplay/smumgr/ci_smumgr.c
786
*lo = hwmgr->dyn_state.cac_leakage_table->entries[v_index].Vddc * VOLTAGE_SCALE;
drivers/gpu/drm/amd/pm/powerplay/smumgr/ci_smumgr.c
787
*hi = (uint16_t)(hwmgr->dyn_state.cac_leakage_table->entries[v_index].Leakage * VOLTAGE_SCALE);
drivers/gpu/drm/amd/pm/powerplay/smumgr/ci_smumgr.c
790
*lo = hwmgr->dyn_state.cac_leakage_table->entries[hwmgr->dyn_state.cac_leakage_table->count - 1].Vddc * VOLTAGE_SCALE;
drivers/gpu/drm/amd/pm/powerplay/smumgr/ci_smumgr.c
791
*hi = (uint16_t)(hwmgr->dyn_state.cac_leakage_table->entries[hwmgr->dyn_state.cac_leakage_table->count - 1].Leakage * VOLTAGE_SCALE);
drivers/gpu/drm/amd/pm/powerplay/smumgr/ci_smumgr.c
799
if (tab->value <= hwmgr->dyn_state.vddc_dependency_on_sclk->entries[v_index].v) {
drivers/gpu/drm/amd/pm/powerplay/smumgr/ci_smumgr.c
802
*lo = hwmgr->dyn_state.cac_leakage_table->entries[v_index].Vddc * VOLTAGE_SCALE;
drivers/gpu/drm/amd/pm/powerplay/smumgr/ci_smumgr.c
803
*hi = (uint16_t)(hwmgr->dyn_state.cac_leakage_table->entries[v_index].Leakage) * VOLTAGE_SCALE;
drivers/gpu/drm/amd/pm/powerplay/smumgr/ci_smumgr.c
806
*lo = hwmgr->dyn_state.cac_leakage_table->entries[hwmgr->dyn_state.cac_leakage_table->count - 1].Vddc * VOLTAGE_SCALE;
drivers/gpu/drm/amd/pm/powerplay/smumgr/ci_smumgr.c
807
*hi = (uint16_t)(hwmgr->dyn_state.cac_leakage_table->entries[hwmgr->dyn_state.cac_leakage_table->count - 1].Leakage * VOLTAGE_SCALE);
drivers/gpu/drm/amd/pm/powerplay/smumgr/ci_smumgr.c
851
&(data->vddc_voltage_table.entries[count]),
drivers/gpu/drm/amd/pm/powerplay/smumgr/ci_smumgr.c
858
table->Smio[count] |= data->vddc_voltage_table.entries[count].smio_low;
drivers/gpu/drm/amd/pm/powerplay/smumgr/ci_smumgr.c
859
table->SmioMaskVddcVid |= data->vddc_voltage_table.entries[count].smio_low;
drivers/gpu/drm/amd/pm/powerplay/smumgr/ci_smumgr.c
881
&(data->vddci_voltage_table.entries[count]),
drivers/gpu/drm/amd/pm/powerplay/smumgr/ci_smumgr.c
886
table->Smio[count] |= data->vddci_voltage_table.entries[count].smio_low;
drivers/gpu/drm/amd/pm/powerplay/smumgr/ci_smumgr.c
887
table->SmioMaskVddciVid |= data->vddci_voltage_table.entries[count].smio_low;
drivers/gpu/drm/amd/pm/powerplay/smumgr/ci_smumgr.c
909
&(data->mvdd_voltage_table.entries[count]),
drivers/gpu/drm/amd/pm/powerplay/smumgr/ci_smumgr.c
914
table->Smio[count] |= data->mvdd_voltage_table.entries[count].smio_low;
drivers/gpu/drm/amd/pm/powerplay/smumgr/ci_smumgr.c
915
table->SmioMaskMvddVid |= data->mvdd_voltage_table.entries[count].smio_low;
drivers/gpu/drm/amd/pm/powerplay/smumgr/ci_smumgr.c
967
if (ulv_voltage > hwmgr->dyn_state.vddc_dependency_on_sclk->entries[0].v)
drivers/gpu/drm/amd/pm/powerplay/smumgr/ci_smumgr.c
971
state->VddcOffset = (uint16_t)(hwmgr->dyn_state.vddc_dependency_on_sclk->entries[0].v - ulv_voltage);
drivers/gpu/drm/amd/pm/powerplay/smumgr/ci_smumgr.c
974
if (ulv_voltage > hwmgr->dyn_state.vddc_dependency_on_sclk->entries[0].v)
drivers/gpu/drm/amd/pm/powerplay/smumgr/ci_smumgr.c
978
(hwmgr->dyn_state.vddc_dependency_on_sclk->entries[0].v - ulv_voltage)
drivers/gpu/drm/amd/pm/powerplay/smumgr/fiji_smumgr.c
1282
if (mclk <= table_info->vdd_dep_on_mclk->entries[i].clk) {
drivers/gpu/drm/amd/pm/powerplay/smumgr/fiji_smumgr.c
1283
smio_pat->Voltage = data->mvdd_voltage_table.entries[i].value;
drivers/gpu/drm/amd/pm/powerplay/smumgr/fiji_smumgr.c
1433
table->VceLevel[count].Frequency = mm_table->entries[count].eclk;
drivers/gpu/drm/amd/pm/powerplay/smumgr/fiji_smumgr.c
1436
(mm_table->entries[count].vddc * VOLTAGE_SCALE) << VDDC_SHIFT;
drivers/gpu/drm/amd/pm/powerplay/smumgr/fiji_smumgr.c
1438
((mm_table->entries[count].vddc - VDDC_VDDCI_DELTA) *
drivers/gpu/drm/amd/pm/powerplay/smumgr/fiji_smumgr.c
1472
table->AcpLevel[count].Frequency = mm_table->entries[count].aclk;
drivers/gpu/drm/amd/pm/powerplay/smumgr/fiji_smumgr.c
1473
table->AcpLevel[count].MinVoltage |= (mm_table->entries[count].vddc *
drivers/gpu/drm/amd/pm/powerplay/smumgr/fiji_smumgr.c
1475
table->AcpLevel[count].MinVoltage |= ((mm_table->entries[count].vddc -
drivers/gpu/drm/amd/pm/powerplay/smumgr/fiji_smumgr.c
1537
&arb_regs.entries[i][j]);
drivers/gpu/drm/amd/pm/powerplay/smumgr/fiji_smumgr.c
1569
table->UvdLevel[count].VclkFrequency = mm_table->entries[count].vclk;
drivers/gpu/drm/amd/pm/powerplay/smumgr/fiji_smumgr.c
1570
table->UvdLevel[count].DclkFrequency = mm_table->entries[count].dclk;
drivers/gpu/drm/amd/pm/powerplay/smumgr/fiji_smumgr.c
1571
table->UvdLevel[count].MinVoltage |= (mm_table->entries[count].vddc *
drivers/gpu/drm/amd/pm/powerplay/smumgr/fiji_smumgr.c
1573
table->UvdLevel[count].MinVoltage |= ((mm_table->entries[count].vddc -
drivers/gpu/drm/amd/pm/powerplay/smumgr/fiji_smumgr.c
1641
if (table_info->vdd_dep_on_sclk->entries[level].clk >=
drivers/gpu/drm/amd/pm/powerplay/smumgr/fiji_smumgr.c
1650
if (table_info->vdd_dep_on_mclk->entries[level].clk >=
drivers/gpu/drm/amd/pm/powerplay/smumgr/fiji_smumgr.c
1702
sclk_table->entries[i].cks_enable << i;
drivers/gpu/drm/amd/pm/powerplay/smumgr/fiji_smumgr.c
1704
(sclk_table->entries[i].clk/100) / 10000 + 3571 + 75 - ro) * 1000 /
drivers/gpu/drm/amd/pm/powerplay/smumgr/fiji_smumgr.c
1705
(4026 - (13924 * (sclk_table->entries[i].clk/100) / 10000)));
drivers/gpu/drm/amd/pm/powerplay/smumgr/fiji_smumgr.c
1707
(sclk_table->entries[i].clk/100) / 10000 + 3320 + 45 - ro) * 1000 /
drivers/gpu/drm/amd/pm/powerplay/smumgr/fiji_smumgr.c
1708
(3664 - (11454 * (sclk_table->entries[i].clk/100) / 10000)));
drivers/gpu/drm/amd/pm/powerplay/smumgr/fiji_smumgr.c
1711
sclk_table->entries[i].cks_voffset) * 100 / 625) + 1);
drivers/gpu/drm/amd/pm/powerplay/smumgr/fiji_smumgr.c
368
if (dep_table->entries[i].clk >= clock) {
drivers/gpu/drm/amd/pm/powerplay/smumgr/fiji_smumgr.c
369
*voltage |= (dep_table->entries[i].vddc *
drivers/gpu/drm/amd/pm/powerplay/smumgr/fiji_smumgr.c
374
else if (dep_table->entries[i].vddci)
drivers/gpu/drm/amd/pm/powerplay/smumgr/fiji_smumgr.c
375
*voltage |= (dep_table->entries[i].vddci *
drivers/gpu/drm/amd/pm/powerplay/smumgr/fiji_smumgr.c
379
(dep_table->entries[i].vddc -
drivers/gpu/drm/amd/pm/powerplay/smumgr/fiji_smumgr.c
387
else if (dep_table->entries[i].mvdd)
drivers/gpu/drm/amd/pm/powerplay/smumgr/fiji_smumgr.c
388
*mvdd = (uint32_t) dep_table->entries[i].mvdd *
drivers/gpu/drm/amd/pm/powerplay/smumgr/fiji_smumgr.c
397
*voltage |= (dep_table->entries[i - 1].vddc * VOLTAGE_SCALE) << VDDC_SHIFT;
drivers/gpu/drm/amd/pm/powerplay/smumgr/fiji_smumgr.c
402
else if (dep_table->entries[i-1].vddci) {
drivers/gpu/drm/amd/pm/powerplay/smumgr/fiji_smumgr.c
404
(dep_table->entries[i].vddc -
drivers/gpu/drm/amd/pm/powerplay/smumgr/fiji_smumgr.c
411
else if (dep_table->entries[i].mvdd)
drivers/gpu/drm/amd/pm/powerplay/smumgr/fiji_smumgr.c
412
*mvdd = (uint32_t) dep_table->entries[i - 1].mvdd * VOLTAGE_SCALE;
drivers/gpu/drm/amd/pm/powerplay/smumgr/fiji_smumgr.c
772
data->vddc_voltage_table.entries[count].value);
drivers/gpu/drm/amd/pm/powerplay/smumgr/fiji_smumgr.c
774
convert_to_vid(lookup_table->entries[index].us_cac_low);
drivers/gpu/drm/amd/pm/powerplay/smumgr/fiji_smumgr.c
776
convert_to_vid(lookup_table->entries[index].us_cac_high);
drivers/gpu/drm/amd/pm/powerplay/smumgr/iceland_smumgr.c
1218
if (memory_clock < pl->entries[i].Mclk) {
drivers/gpu/drm/amd/pm/powerplay/smumgr/iceland_smumgr.c
1405
if (mclk <= hwmgr->dyn_state.mvdd_dependency_on_mclk->entries[i].clk) {
drivers/gpu/drm/amd/pm/powerplay/smumgr/iceland_smumgr.c
1407
voltage->Voltage = data->mvdd_voltage_table.entries[i].value;
drivers/gpu/drm/amd/pm/powerplay/smumgr/iceland_smumgr.c
1626
&arb_regs.entries[i][j]);
drivers/gpu/drm/amd/pm/powerplay/smumgr/iceland_smumgr.c
1829
if (hwmgr->dyn_state.vddc_dependency_on_sclk->entries[level].clk
drivers/gpu/drm/amd/pm/powerplay/smumgr/iceland_smumgr.c
1839
if (hwmgr->dyn_state.vddc_dependency_on_mclk->entries[level].clk
drivers/gpu/drm/amd/pm/powerplay/smumgr/iceland_smumgr.c
405
lo_vid[i] = convert_to_vid(hwmgr->dyn_state.cac_leakage_table->entries[i].Vddc1);
drivers/gpu/drm/amd/pm/powerplay/smumgr/iceland_smumgr.c
406
hi_vid[i] = convert_to_vid(hwmgr->dyn_state.cac_leakage_table->entries[i].Vddc2);
drivers/gpu/drm/amd/pm/powerplay/smumgr/iceland_smumgr.c
427
vid[i] = convert_to_vid(data->vddc_voltage_table.entries[i].value);
drivers/gpu/drm/amd/pm/powerplay/smumgr/iceland_smumgr.c
518
if (allowed_clock_voltage_table->entries[i].clk >= clock) {
drivers/gpu/drm/amd/pm/powerplay/smumgr/iceland_smumgr.c
519
*vol = allowed_clock_voltage_table->entries[i].v;
drivers/gpu/drm/amd/pm/powerplay/smumgr/iceland_smumgr.c
525
*vol = allowed_clock_voltage_table->entries[i - 1].v;
drivers/gpu/drm/amd/pm/powerplay/smumgr/iceland_smumgr.c
555
if (tab->value == hwmgr->dyn_state.vddc_dependency_on_sclk->entries[v_index].v) {
drivers/gpu/drm/amd/pm/powerplay/smumgr/iceland_smumgr.c
558
*lo = hwmgr->dyn_state.cac_leakage_table->entries[v_index].Vddc * VOLTAGE_SCALE;
drivers/gpu/drm/amd/pm/powerplay/smumgr/iceland_smumgr.c
559
*hi = (uint16_t)(hwmgr->dyn_state.cac_leakage_table->entries[v_index].Leakage * VOLTAGE_SCALE);
drivers/gpu/drm/amd/pm/powerplay/smumgr/iceland_smumgr.c
562
*lo = hwmgr->dyn_state.cac_leakage_table->entries[hwmgr->dyn_state.cac_leakage_table->count - 1].Vddc * VOLTAGE_SCALE;
drivers/gpu/drm/amd/pm/powerplay/smumgr/iceland_smumgr.c
563
*hi = (uint16_t)(hwmgr->dyn_state.cac_leakage_table->entries[hwmgr->dyn_state.cac_leakage_table->count - 1].Leakage * VOLTAGE_SCALE);
drivers/gpu/drm/amd/pm/powerplay/smumgr/iceland_smumgr.c
575
if (tab->value <= hwmgr->dyn_state.vddc_dependency_on_sclk->entries[v_index].v) {
drivers/gpu/drm/amd/pm/powerplay/smumgr/iceland_smumgr.c
578
*lo = hwmgr->dyn_state.cac_leakage_table->entries[v_index].Vddc * VOLTAGE_SCALE;
drivers/gpu/drm/amd/pm/powerplay/smumgr/iceland_smumgr.c
579
*hi = (uint16_t)(hwmgr->dyn_state.cac_leakage_table->entries[v_index].Leakage) * VOLTAGE_SCALE;
drivers/gpu/drm/amd/pm/powerplay/smumgr/iceland_smumgr.c
582
*lo = hwmgr->dyn_state.cac_leakage_table->entries[hwmgr->dyn_state.cac_leakage_table->count - 1].Vddc * VOLTAGE_SCALE;
drivers/gpu/drm/amd/pm/powerplay/smumgr/iceland_smumgr.c
583
*hi = (uint16_t)(hwmgr->dyn_state.cac_leakage_table->entries[hwmgr->dyn_state.cac_leakage_table->count - 1].Leakage * VOLTAGE_SCALE);
drivers/gpu/drm/amd/pm/powerplay/smumgr/iceland_smumgr.c
627
&(data->vddc_voltage_table.entries[count]),
drivers/gpu/drm/amd/pm/powerplay/smumgr/iceland_smumgr.c
633
table->VddcLevel[count].Smio |= data->vddc_voltage_table.entries[count].smio_low;
drivers/gpu/drm/amd/pm/powerplay/smumgr/iceland_smumgr.c
654
&(data->vddci_voltage_table.entries[count]),
drivers/gpu/drm/amd/pm/powerplay/smumgr/iceland_smumgr.c
658
table->VddciLevel[count].Smio |= data->vddci_voltage_table.entries[count].smio_low;
drivers/gpu/drm/amd/pm/powerplay/smumgr/iceland_smumgr.c
679
&(data->mvdd_voltage_table.entries[count]),
drivers/gpu/drm/amd/pm/powerplay/smumgr/iceland_smumgr.c
683
table->MvddLevel[count].Smio |= data->mvdd_voltage_table.entries[count].smio_low;
drivers/gpu/drm/amd/pm/powerplay/smumgr/iceland_smumgr.c
734
if (ulv_voltage > hwmgr->dyn_state.vddc_dependency_on_sclk->entries[0].v)
drivers/gpu/drm/amd/pm/powerplay/smumgr/iceland_smumgr.c
738
state->VddcOffset = (uint16_t)(hwmgr->dyn_state.vddc_dependency_on_sclk->entries[0].v - ulv_voltage);
drivers/gpu/drm/amd/pm/powerplay/smumgr/iceland_smumgr.c
741
if (ulv_voltage > hwmgr->dyn_state.vddc_dependency_on_sclk->entries[0].v)
drivers/gpu/drm/amd/pm/powerplay/smumgr/iceland_smumgr.c
745
(hwmgr->dyn_state.vddc_dependency_on_sclk->entries[0].v - ulv_voltage)
drivers/gpu/drm/amd/pm/powerplay/smumgr/iceland_smumgr.c
883
if (sclk < pl->entries[i].Sclk) {
drivers/gpu/drm/amd/pm/powerplay/smumgr/polaris10_smumgr.c
1263
if (mclk <= table_info->vdd_dep_on_mclk->entries[i].clk) {
drivers/gpu/drm/amd/pm/powerplay/smumgr/polaris10_smumgr.c
1264
smio_pat->Voltage = data->mvdd_voltage_table.entries[i].value;
drivers/gpu/drm/amd/pm/powerplay/smumgr/polaris10_smumgr.c
1382
table->VceLevel[count].Frequency = mm_table->entries[count].eclk;
drivers/gpu/drm/amd/pm/powerplay/smumgr/polaris10_smumgr.c
1385
(mm_table->entries[count].vddc * VOLTAGE_SCALE) << VDDC_SHIFT;
drivers/gpu/drm/amd/pm/powerplay/smumgr/polaris10_smumgr.c
1389
mm_table->entries[count].vddc - VDDC_VDDCI_DELTA);
drivers/gpu/drm/amd/pm/powerplay/smumgr/polaris10_smumgr.c
1391
vddci = mm_table->entries[count].vddc - VDDC_VDDCI_DELTA;
drivers/gpu/drm/amd/pm/powerplay/smumgr/polaris10_smumgr.c
1432
table->SamuLevel[count].Frequency = mm_table->entries[count].samclock;
drivers/gpu/drm/amd/pm/powerplay/smumgr/polaris10_smumgr.c
1434
(mm_table->entries[count].vddc * VOLTAGE_SCALE) << VDDC_SHIFT;
drivers/gpu/drm/amd/pm/powerplay/smumgr/polaris10_smumgr.c
1438
mm_table->entries[count].vddc - VDDC_VDDCI_DELTA);
drivers/gpu/drm/amd/pm/powerplay/smumgr/polaris10_smumgr.c
1440
vddci = mm_table->entries[count].vddc - VDDC_VDDCI_DELTA;
drivers/gpu/drm/amd/pm/powerplay/smumgr/polaris10_smumgr.c
1503
&arb_regs.entries[i][j]);
drivers/gpu/drm/amd/pm/powerplay/smumgr/polaris10_smumgr.c
1538
table->UvdLevel[count].VclkFrequency = mm_table->entries[count].vclk;
drivers/gpu/drm/amd/pm/powerplay/smumgr/polaris10_smumgr.c
1539
table->UvdLevel[count].DclkFrequency = mm_table->entries[count].dclk;
drivers/gpu/drm/amd/pm/powerplay/smumgr/polaris10_smumgr.c
1540
table->UvdLevel[count].MinVoltage |= (mm_table->entries[count].vddc *
drivers/gpu/drm/amd/pm/powerplay/smumgr/polaris10_smumgr.c
1545
mm_table->entries[count].vddc - VDDC_VDDCI_DELTA);
drivers/gpu/drm/amd/pm/powerplay/smumgr/polaris10_smumgr.c
1547
vddci = mm_table->entries[count].vddc - VDDC_VDDCI_DELTA;
drivers/gpu/drm/amd/pm/powerplay/smumgr/polaris10_smumgr.c
1628
if (table_info->vdd_dep_on_sclk->entries[level].clk >=
drivers/gpu/drm/amd/pm/powerplay/smumgr/polaris10_smumgr.c
1637
if (table_info->vdd_dep_on_mclk->entries[level].clk >=
drivers/gpu/drm/amd/pm/powerplay/smumgr/polaris10_smumgr.c
1673
sclk_table->entries[i].cks_enable << i;
drivers/gpu/drm/amd/pm/powerplay/smumgr/polaris10_smumgr.c
1675
volt_without_cks = (uint32_t)((2753594000U + (sclk_table->entries[i].clk/100) * 136418 - (ro - 70) * 1000000) / \
drivers/gpu/drm/amd/pm/powerplay/smumgr/polaris10_smumgr.c
1676
(2424180 - (sclk_table->entries[i].clk/100) * 1132925/1000));
drivers/gpu/drm/amd/pm/powerplay/smumgr/polaris10_smumgr.c
1677
volt_with_cks = (uint32_t)((2797202000U + sclk_table->entries[i].clk/100 * 3232 - (ro - 65) * 1000000) / \
drivers/gpu/drm/amd/pm/powerplay/smumgr/polaris10_smumgr.c
1678
(2522480 - sclk_table->entries[i].clk/100 * 115764/100));
drivers/gpu/drm/amd/pm/powerplay/smumgr/polaris10_smumgr.c
1680
volt_without_cks = (uint32_t)((2416794800U + (sclk_table->entries[i].clk/100) * 1476925/10 - (ro - 50) * 1000000) / \
drivers/gpu/drm/amd/pm/powerplay/smumgr/polaris10_smumgr.c
1681
(2625416 - (sclk_table->entries[i].clk/100) * (12586807/10000)));
drivers/gpu/drm/amd/pm/powerplay/smumgr/polaris10_smumgr.c
1682
volt_with_cks = (uint32_t)((2999656000U - sclk_table->entries[i].clk/100 * 392803 - (ro - 44) * 1000000) / \
drivers/gpu/drm/amd/pm/powerplay/smumgr/polaris10_smumgr.c
1683
(3422454 - sclk_table->entries[i].clk/100 * (18886376/10000)));
drivers/gpu/drm/amd/pm/powerplay/smumgr/polaris10_smumgr.c
1688
sclk_table->entries[i].cks_voffset) * 100 + 624) / 625);
drivers/gpu/drm/amd/pm/powerplay/smumgr/polaris10_smumgr.c
1864
AVFS_meanNsigma.Static_Voltage_Offset[i] = (uint8_t)(sclk_table->entries[i].cks_voffset * 100 / 625);
drivers/gpu/drm/amd/pm/powerplay/smumgr/polaris10_smumgr.c
1865
AVFS_SclkOffset.Sclk_Offset[i] = PP_HOST_TO_SMC_US((uint16_t)(sclk_table->entries[i].sclk_offset) / 100);
drivers/gpu/drm/amd/pm/powerplay/smumgr/polaris10_smumgr.c
2360
smu_data->bif_sclk_table[i] = pcie_table->entries[i].pcie_sclk;
drivers/gpu/drm/amd/pm/powerplay/smumgr/polaris10_smumgr.c
369
if (dep_table->entries[i].clk >= clock) {
drivers/gpu/drm/amd/pm/powerplay/smumgr/polaris10_smumgr.c
370
*voltage |= (dep_table->entries[i].vddc *
drivers/gpu/drm/amd/pm/powerplay/smumgr/polaris10_smumgr.c
375
else if (dep_table->entries[i].vddci)
drivers/gpu/drm/amd/pm/powerplay/smumgr/polaris10_smumgr.c
376
*voltage |= (dep_table->entries[i].vddci *
drivers/gpu/drm/amd/pm/powerplay/smumgr/polaris10_smumgr.c
380
(dep_table->entries[i].vddc -
drivers/gpu/drm/amd/pm/powerplay/smumgr/polaris10_smumgr.c
388
else if (dep_table->entries[i].mvdd)
drivers/gpu/drm/amd/pm/powerplay/smumgr/polaris10_smumgr.c
389
*mvdd = (uint32_t) dep_table->entries[i].mvdd *
drivers/gpu/drm/amd/pm/powerplay/smumgr/polaris10_smumgr.c
398
*voltage |= (dep_table->entries[i - 1].vddc * VOLTAGE_SCALE) << VDDC_SHIFT;
drivers/gpu/drm/amd/pm/powerplay/smumgr/polaris10_smumgr.c
403
else if (dep_table->entries[i-1].vddci) {
drivers/gpu/drm/amd/pm/powerplay/smumgr/polaris10_smumgr.c
404
*voltage |= (dep_table->entries[i - 1].vddci * VOLTAGE_SCALE) << VDDC_SHIFT;
drivers/gpu/drm/amd/pm/powerplay/smumgr/polaris10_smumgr.c
407
(dep_table->entries[i].vddc -
drivers/gpu/drm/amd/pm/powerplay/smumgr/polaris10_smumgr.c
414
else if (dep_table->entries[i].mvdd)
drivers/gpu/drm/amd/pm/powerplay/smumgr/polaris10_smumgr.c
415
*mvdd = (uint32_t) dep_table->entries[i - 1].mvdd * VOLTAGE_SCALE;
drivers/gpu/drm/amd/pm/powerplay/smumgr/polaris10_smumgr.c
676
PP_HOST_TO_SMC_US(data->mvdd_voltage_table.entries[level].value * VOLTAGE_SCALE);
drivers/gpu/drm/amd/pm/powerplay/smumgr/polaris10_smumgr.c
681
data->mvdd_voltage_table.entries[level].smio_low;
drivers/gpu/drm/amd/pm/powerplay/smumgr/polaris10_smumgr.c
704
PP_HOST_TO_SMC_US(data->vddc_voltage_table.entries[level].value * VOLTAGE_SCALE);
drivers/gpu/drm/amd/pm/powerplay/smumgr/polaris10_smumgr.c
707
table->Smio[level] |= data->vddc_voltage_table.entries[level].smio_low;
drivers/gpu/drm/amd/pm/powerplay/smumgr/polaris10_smumgr.c
729
PP_HOST_TO_SMC_US(data->vddci_voltage_table.entries[level].value * VOLTAGE_SCALE);
drivers/gpu/drm/amd/pm/powerplay/smumgr/polaris10_smumgr.c
732
table->Smio[level] |= data->vddci_voltage_table.entries[level].smio_low;
drivers/gpu/drm/amd/pm/powerplay/smumgr/polaris10_smumgr.c
758
data->vddc_voltage_table.entries[count].value);
drivers/gpu/drm/amd/pm/powerplay/smumgr/polaris10_smumgr.c
759
table->BapmVddcVidLoSidd[count] = convert_to_vid(lookup_table->entries[index].us_cac_low);
drivers/gpu/drm/amd/pm/powerplay/smumgr/polaris10_smumgr.c
760
table->BapmVddcVidHiSidd[count] = convert_to_vid(lookup_table->entries[index].us_cac_mid);
drivers/gpu/drm/amd/pm/powerplay/smumgr/polaris10_smumgr.c
761
table->BapmVddcVidHiSidd2[count] = convert_to_vid(lookup_table->entries[index].us_cac_high);
drivers/gpu/drm/amd/pm/powerplay/smumgr/tonga_smumgr.c
1154
if (mclk <= table_info->vdd_dep_on_mclk->entries[i].clk) {
drivers/gpu/drm/amd/pm/powerplay/smumgr/tonga_smumgr.c
1157
data->mvdd_voltage_table.entries[i].value;
drivers/gpu/drm/amd/pm/powerplay/smumgr/tonga_smumgr.c
1324
table->UvdLevel[count].VclkFrequency = mm_table->entries[count].vclk;
drivers/gpu/drm/amd/pm/powerplay/smumgr/tonga_smumgr.c
1325
table->UvdLevel[count].DclkFrequency = mm_table->entries[count].dclk;
drivers/gpu/drm/amd/pm/powerplay/smumgr/tonga_smumgr.c
1328
mm_table->entries[count].vddc);
drivers/gpu/drm/amd/pm/powerplay/smumgr/tonga_smumgr.c
1332
mm_table->entries[count].vddgfx) : 0;
drivers/gpu/drm/amd/pm/powerplay/smumgr/tonga_smumgr.c
1335
mm_table->entries[count].vddc - VDDC_VDDCI_DELTA);
drivers/gpu/drm/amd/pm/powerplay/smumgr/tonga_smumgr.c
1385
mm_table->entries[count].eclk;
drivers/gpu/drm/amd/pm/powerplay/smumgr/tonga_smumgr.c
1388
mm_table->entries[count].vddc);
drivers/gpu/drm/amd/pm/powerplay/smumgr/tonga_smumgr.c
1392
mm_table->entries[count].vddgfx) : 0;
drivers/gpu/drm/amd/pm/powerplay/smumgr/tonga_smumgr.c
1395
mm_table->entries[count].vddc - VDDC_VDDCI_DELTA);
drivers/gpu/drm/amd/pm/powerplay/smumgr/tonga_smumgr.c
1430
pptable_info->mm_dep_table->entries[count].aclk;
drivers/gpu/drm/amd/pm/powerplay/smumgr/tonga_smumgr.c
1433
mm_table->entries[count].vddc);
drivers/gpu/drm/amd/pm/powerplay/smumgr/tonga_smumgr.c
1437
mm_table->entries[count].vddgfx) : 0;
drivers/gpu/drm/amd/pm/powerplay/smumgr/tonga_smumgr.c
1440
mm_table->entries[count].vddc - VDDC_VDDCI_DELTA);
drivers/gpu/drm/amd/pm/powerplay/smumgr/tonga_smumgr.c
1502
&arb_regs.entries[i][j]);
drivers/gpu/drm/amd/pm/powerplay/smumgr/tonga_smumgr.c
1622
sclk_table->entries[i].cks_enable << i;
drivers/gpu/drm/amd/pm/powerplay/smumgr/tonga_smumgr.c
1625
(sclk_table->entries[i].clk/100) / 10000) * 1000 /
drivers/gpu/drm/amd/pm/powerplay/smumgr/tonga_smumgr.c
1626
(8730 - (5301 * (sclk_table->entries[i].clk/100) / 1000)));
drivers/gpu/drm/amd/pm/powerplay/smumgr/tonga_smumgr.c
1628
(sclk_table->entries[i].clk/100) / 100000) * 1000 /
drivers/gpu/drm/amd/pm/powerplay/smumgr/tonga_smumgr.c
1629
(6146 - (3193 * (sclk_table->entries[i].clk/100) / 1000)));
drivers/gpu/drm/amd/pm/powerplay/smumgr/tonga_smumgr.c
1632
(sclk_table->entries[i].clk/100) / 10000 + 3571 + 75 - ro) * 1000 /
drivers/gpu/drm/amd/pm/powerplay/smumgr/tonga_smumgr.c
1633
(4026 - (13924 * (sclk_table->entries[i].clk/100) / 10000)));
drivers/gpu/drm/amd/pm/powerplay/smumgr/tonga_smumgr.c
1635
(sclk_table->entries[i].clk/100) / 10000 + 3320 + 45 - ro) * 1000 /
drivers/gpu/drm/amd/pm/powerplay/smumgr/tonga_smumgr.c
1636
(3664 - (11454 * (sclk_table->entries[i].clk/100) / 10000)));
drivers/gpu/drm/amd/pm/powerplay/smumgr/tonga_smumgr.c
1640
sclk_table->entries[i].cks_voffset) * 100 / 625) + 1);
drivers/gpu/drm/amd/pm/powerplay/smumgr/tonga_smumgr.c
261
if (allowed_clock_voltage_table->entries[i].clk >= clock) {
drivers/gpu/drm/amd/pm/powerplay/smumgr/tonga_smumgr.c
264
allowed_clock_voltage_table->entries[i].vddgfx);
drivers/gpu/drm/amd/pm/powerplay/smumgr/tonga_smumgr.c
267
allowed_clock_voltage_table->entries[i].vddc);
drivers/gpu/drm/amd/pm/powerplay/smumgr/tonga_smumgr.c
269
if (allowed_clock_voltage_table->entries[i].vddci)
drivers/gpu/drm/amd/pm/powerplay/smumgr/tonga_smumgr.c
271
phm_get_voltage_id(&data->vddci_voltage_table, allowed_clock_voltage_table->entries[i].vddci);
drivers/gpu/drm/amd/pm/powerplay/smumgr/tonga_smumgr.c
275
allowed_clock_voltage_table->entries[i].vddc - VDDC_VDDCI_DELTA);
drivers/gpu/drm/amd/pm/powerplay/smumgr/tonga_smumgr.c
278
if (allowed_clock_voltage_table->entries[i].mvdd)
drivers/gpu/drm/amd/pm/powerplay/smumgr/tonga_smumgr.c
279
*mvdd = (uint32_t) allowed_clock_voltage_table->entries[i].mvdd;
drivers/gpu/drm/amd/pm/powerplay/smumgr/tonga_smumgr.c
288
allowed_clock_voltage_table->entries[i-1].vddgfx);
drivers/gpu/drm/amd/pm/powerplay/smumgr/tonga_smumgr.c
290
allowed_clock_voltage_table->entries[i-1].vddc);
drivers/gpu/drm/amd/pm/powerplay/smumgr/tonga_smumgr.c
292
if (allowed_clock_voltage_table->entries[i-1].vddci)
drivers/gpu/drm/amd/pm/powerplay/smumgr/tonga_smumgr.c
294
allowed_clock_voltage_table->entries[i-1].vddci);
drivers/gpu/drm/amd/pm/powerplay/smumgr/tonga_smumgr.c
296
if (allowed_clock_voltage_table->entries[i-1].mvdd)
drivers/gpu/drm/amd/pm/powerplay/smumgr/tonga_smumgr.c
297
*mvdd = (uint32_t) allowed_clock_voltage_table->entries[i-1].mvdd;
drivers/gpu/drm/amd/pm/powerplay/smumgr/tonga_smumgr.c
312
PP_HOST_TO_SMC_US(data->vddc_voltage_table.entries[count].value * VOLTAGE_SCALE);
drivers/gpu/drm/amd/pm/powerplay/smumgr/tonga_smumgr.c
329
PP_HOST_TO_SMC_US(data->vddgfx_voltage_table.entries[count].value * VOLTAGE_SCALE);
drivers/gpu/drm/amd/pm/powerplay/smumgr/tonga_smumgr.c
346
PP_HOST_TO_SMC_US(data->vddci_voltage_table.entries[count].value * VOLTAGE_SCALE);
drivers/gpu/drm/amd/pm/powerplay/smumgr/tonga_smumgr.c
349
PP_HOST_TO_SMC_US(data->vddci_voltage_table.entries[count].value * VOLTAGE_SCALE);
drivers/gpu/drm/amd/pm/powerplay/smumgr/tonga_smumgr.c
354
data->vddci_voltage_table.entries[count].smio_low;
drivers/gpu/drm/amd/pm/powerplay/smumgr/tonga_smumgr.c
356
PP_HOST_TO_SMC_US(data->vddci_voltage_table.entries[count].value * VOLTAGE_SCALE);
drivers/gpu/drm/amd/pm/powerplay/smumgr/tonga_smumgr.c
376
PP_HOST_TO_SMC_US(data->mvdd_voltage_table.entries[count].value * VOLTAGE_SCALE);
drivers/gpu/drm/amd/pm/powerplay/smumgr/tonga_smumgr.c
381
data->mvdd_voltage_table.entries[count].smio_low;
drivers/gpu/drm/amd/pm/powerplay/smumgr/tonga_smumgr.c
413
data->vddc_voltage_table.entries[count].value);
drivers/gpu/drm/amd/pm/powerplay/smumgr/tonga_smumgr.c
415
convert_to_vid(vddc_lookup_table->entries[index].us_cac_low);
drivers/gpu/drm/amd/pm/powerplay/smumgr/tonga_smumgr.c
417
convert_to_vid(vddc_lookup_table->entries[index].us_cac_mid);
drivers/gpu/drm/amd/pm/powerplay/smumgr/tonga_smumgr.c
419
convert_to_vid(vddc_lookup_table->entries[index].us_cac_high);
drivers/gpu/drm/amd/pm/powerplay/smumgr/tonga_smumgr.c
426
convert_to_vid(vddgfx_lookup_table->entries[index].us_cac_mid));
drivers/gpu/drm/amd/pm/powerplay/smumgr/tonga_smumgr.c
428
convert_to_vid(vddgfx_lookup_table->entries[index].us_cac_high);
drivers/gpu/drm/amd/pm/powerplay/smumgr/tonga_smumgr.c
433
data->vddc_voltage_table.entries[count].value);
drivers/gpu/drm/amd/pm/powerplay/smumgr/tonga_smumgr.c
435
convert_to_vid(vddc_lookup_table->entries[index].us_cac_low);
drivers/gpu/drm/amd/pm/powerplay/smumgr/tonga_smumgr.c
437
convert_to_vid(vddc_lookup_table->entries[index].us_cac_mid);
drivers/gpu/drm/amd/pm/powerplay/smumgr/tonga_smumgr.c
439
convert_to_vid(vddc_lookup_table->entries[index].us_cac_high);
drivers/gpu/drm/amd/pm/powerplay/smumgr/vegam_smumgr.c
1096
if (mclk <= table_info->vdd_dep_on_mclk->entries[i].clk) {
drivers/gpu/drm/amd/pm/powerplay/smumgr/vegam_smumgr.c
1097
smio_pat->Voltage = data->mvdd_voltage_table.entries[i].value;
drivers/gpu/drm/amd/pm/powerplay/smumgr/vegam_smumgr.c
1211
table->VceLevel[count].Frequency = mm_table->entries[count].eclk;
drivers/gpu/drm/amd/pm/powerplay/smumgr/vegam_smumgr.c
1214
(mm_table->entries[count].vddc * VOLTAGE_SCALE) << VDDC_SHIFT;
drivers/gpu/drm/amd/pm/powerplay/smumgr/vegam_smumgr.c
1218
mm_table->entries[count].vddc - VDDC_VDDCI_DELTA);
drivers/gpu/drm/amd/pm/powerplay/smumgr/vegam_smumgr.c
1220
vddci = mm_table->entries[count].vddc - VDDC_VDDCI_DELTA;
drivers/gpu/drm/amd/pm/powerplay/smumgr/vegam_smumgr.c
1292
&arb_regs.entries[i][j]);
drivers/gpu/drm/amd/pm/powerplay/smumgr/vegam_smumgr.c
1325
table->UvdLevel[count].VclkFrequency = mm_table->entries[count].vclk;
drivers/gpu/drm/amd/pm/powerplay/smumgr/vegam_smumgr.c
1326
table->UvdLevel[count].DclkFrequency = mm_table->entries[count].dclk;
drivers/gpu/drm/amd/pm/powerplay/smumgr/vegam_smumgr.c
1328
(mm_table->entries[count].vddc * VOLTAGE_SCALE) << VDDC_SHIFT;
drivers/gpu/drm/amd/pm/powerplay/smumgr/vegam_smumgr.c
1332
mm_table->entries[count].vddc - VDDC_VDDCI_DELTA);
drivers/gpu/drm/amd/pm/powerplay/smumgr/vegam_smumgr.c
1334
vddci = mm_table->entries[count].vddc - VDDC_VDDCI_DELTA;
drivers/gpu/drm/amd/pm/powerplay/smumgr/vegam_smumgr.c
1412
if (table_info->vdd_dep_on_sclk->entries[level].clk >=
drivers/gpu/drm/amd/pm/powerplay/smumgr/vegam_smumgr.c
1421
if (table_info->vdd_dep_on_mclk->entries[level].clk >=
drivers/gpu/drm/amd/pm/powerplay/smumgr/vegam_smumgr.c
1511
sclk_table->entries[i].cks_enable << i;
drivers/gpu/drm/amd/pm/powerplay/smumgr/vegam_smumgr.c
1512
volt_without_cks = (uint32_t)((2753594000U + (sclk_table->entries[i].clk/100) *
drivers/gpu/drm/amd/pm/powerplay/smumgr/vegam_smumgr.c
1514
(2424180 - (sclk_table->entries[i].clk/100) * 1132925/1000));
drivers/gpu/drm/amd/pm/powerplay/smumgr/vegam_smumgr.c
1515
volt_with_cks = (uint32_t)((2797202000U + sclk_table->entries[i].clk/100 *
drivers/gpu/drm/amd/pm/powerplay/smumgr/vegam_smumgr.c
1517
(2522480 - sclk_table->entries[i].clk/100 * 115764/100));
drivers/gpu/drm/amd/pm/powerplay/smumgr/vegam_smumgr.c
1521
sclk_table->entries[i].cks_voffset) * 100 + 624) / 625);
drivers/gpu/drm/amd/pm/powerplay/smumgr/vegam_smumgr.c
1630
(uint8_t)(sclk_table->entries[i].cks_voffset * 100 / 625);
drivers/gpu/drm/amd/pm/powerplay/smumgr/vegam_smumgr.c
1633
(sclk_table->entries[i].sclk_offset) / 100);
drivers/gpu/drm/amd/pm/powerplay/smumgr/vegam_smumgr.c
409
smu_data->bif_sclk_table[i] = pcie_table->entries[i].pcie_sclk;
drivers/gpu/drm/amd/pm/powerplay/smumgr/vegam_smumgr.c
460
data->mvdd_voltage_table.entries[level].value * VOLTAGE_SCALE);
drivers/gpu/drm/amd/pm/powerplay/smumgr/vegam_smumgr.c
465
data->mvdd_voltage_table.entries[level].smio_low;
drivers/gpu/drm/amd/pm/powerplay/smumgr/vegam_smumgr.c
488
data->vddci_voltage_table.entries[level].value * VOLTAGE_SCALE);
drivers/gpu/drm/amd/pm/powerplay/smumgr/vegam_smumgr.c
491
table->Smio[level] |= data->vddci_voltage_table.entries[level].smio_low;
drivers/gpu/drm/amd/pm/powerplay/smumgr/vegam_smumgr.c
517
data->vddc_voltage_table.entries[count].value);
drivers/gpu/drm/amd/pm/powerplay/smumgr/vegam_smumgr.c
519
convert_to_vid(lookup_table->entries[index].us_cac_low);
drivers/gpu/drm/amd/pm/powerplay/smumgr/vegam_smumgr.c
521
convert_to_vid(lookup_table->entries[index].us_cac_mid);
drivers/gpu/drm/amd/pm/powerplay/smumgr/vegam_smumgr.c
523
convert_to_vid(lookup_table->entries[index].us_cac_high);
drivers/gpu/drm/amd/pm/powerplay/smumgr/vegam_smumgr.c
616
if (dep_table->entries[i].clk >= clock) {
drivers/gpu/drm/amd/pm/powerplay/smumgr/vegam_smumgr.c
617
*voltage |= (dep_table->entries[i].vddc *
drivers/gpu/drm/amd/pm/powerplay/smumgr/vegam_smumgr.c
622
else if (dep_table->entries[i].vddci)
drivers/gpu/drm/amd/pm/powerplay/smumgr/vegam_smumgr.c
623
*voltage |= (dep_table->entries[i].vddci *
drivers/gpu/drm/amd/pm/powerplay/smumgr/vegam_smumgr.c
627
(dep_table->entries[i].vddc -
drivers/gpu/drm/amd/pm/powerplay/smumgr/vegam_smumgr.c
635
else if (dep_table->entries[i].mvdd)
drivers/gpu/drm/amd/pm/powerplay/smumgr/vegam_smumgr.c
636
*mvdd = (uint32_t) dep_table->entries[i].mvdd *
drivers/gpu/drm/amd/pm/powerplay/smumgr/vegam_smumgr.c
645
*voltage |= (dep_table->entries[i - 1].vddc * VOLTAGE_SCALE) << VDDC_SHIFT;
drivers/gpu/drm/amd/pm/powerplay/smumgr/vegam_smumgr.c
650
else if (dep_table->entries[i - 1].vddci)
drivers/gpu/drm/amd/pm/powerplay/smumgr/vegam_smumgr.c
651
*voltage |= (dep_table->entries[i - 1].vddci *
drivers/gpu/drm/amd/pm/powerplay/smumgr/vegam_smumgr.c
655
(dep_table->entries[i - 1].vddc -
drivers/gpu/drm/amd/pm/powerplay/smumgr/vegam_smumgr.c
663
else if (dep_table->entries[i].mvdd)
drivers/gpu/drm/amd/pm/powerplay/smumgr/vegam_smumgr.c
664
*mvdd = (uint32_t) dep_table->entries[i - 1].mvdd * VOLTAGE_SCALE;
drivers/gpu/drm/amd/pm/swsmu/inc/amdgpu_smu.h
525
struct mclk_latency_entries entries[MAX_REGULAR_DPM_NUM];
drivers/gpu/drm/amd/pm/swsmu/smu11/smu_v11_0.c
290
struct smc_soft_pptable_entry *entries;
drivers/gpu/drm/amd/pm/swsmu/smu11/smu_v11_0.c
295
entries = (struct smc_soft_pptable_entry *)
drivers/gpu/drm/amd/pm/swsmu/smu11/smu_v11_0.c
299
if (le32_to_cpu(entries[i].id) == pptable_id) {
drivers/gpu/drm/amd/pm/swsmu/smu11/smu_v11_0.c
300
*table = ((uint8_t *)v2_1 + le32_to_cpu(entries[i].ppt_offset_bytes));
drivers/gpu/drm/amd/pm/swsmu/smu11/smu_v11_0.c
301
*size = le32_to_cpu(entries[i].ppt_size_bytes);
drivers/gpu/drm/amd/pm/swsmu/smu13/smu_v13_0.c
323
struct smc_soft_pptable_entry *entries;
drivers/gpu/drm/amd/pm/swsmu/smu13/smu_v13_0.c
328
entries = (struct smc_soft_pptable_entry *)
drivers/gpu/drm/amd/pm/swsmu/smu13/smu_v13_0.c
332
if (le32_to_cpu(entries[i].id) == pptable_id) {
drivers/gpu/drm/amd/pm/swsmu/smu13/smu_v13_0.c
333
*table = ((uint8_t *)v2_1 + le32_to_cpu(entries[i].ppt_offset_bytes));
drivers/gpu/drm/amd/pm/swsmu/smu13/smu_v13_0.c
334
*size = le32_to_cpu(entries[i].ppt_size_bytes);
drivers/gpu/drm/amd/pm/swsmu/smu13/smu_v13_0_6_ppt.c
494
struct smc_soft_pptable_entry *entries;
drivers/gpu/drm/amd/pm/swsmu/smu13/smu_v13_0_6_ppt.c
526
entries = (struct smc_soft_pptable_entry
drivers/gpu/drm/amd/pm/swsmu/smu13/smu_v13_0_6_ppt.c
531
if (le32_to_cpu(entries[i].id) == p2s_table_id) {
drivers/gpu/drm/amd/pm/swsmu/smu13/smu_v13_0_6_ppt.c
534
le32_to_cpu(entries[i].ppt_offset_bytes));
drivers/gpu/drm/amd/pm/swsmu/smu13/smu_v13_0_6_ppt.c
536
le32_to_cpu(entries[i].ppt_size_bytes);
drivers/gpu/drm/amd/pm/swsmu/smu14/smu_v14_0.c
312
struct smc_soft_pptable_entry *entries;
drivers/gpu/drm/amd/pm/swsmu/smu14/smu_v14_0.c
317
entries = (struct smc_soft_pptable_entry *)
drivers/gpu/drm/amd/pm/swsmu/smu14/smu_v14_0.c
321
if (le32_to_cpu(entries[i].id) == pptable_id) {
drivers/gpu/drm/amd/pm/swsmu/smu14/smu_v14_0.c
322
*table = ((uint8_t *)v2_1 + le32_to_cpu(entries[i].ppt_offset_bytes));
drivers/gpu/drm/amd/pm/swsmu/smu14/smu_v14_0.c
323
*size = le32_to_cpu(entries[i].ppt_size_bytes);
drivers/gpu/drm/amd/pm/swsmu/smu15/smu_v15_0.c
282
struct smc_soft_pptable_entry *entries;
drivers/gpu/drm/amd/pm/swsmu/smu15/smu_v15_0.c
287
entries = (struct smc_soft_pptable_entry *)
drivers/gpu/drm/amd/pm/swsmu/smu15/smu_v15_0.c
291
if (le32_to_cpu(entries[i].id) == pptable_id) {
drivers/gpu/drm/amd/pm/swsmu/smu15/smu_v15_0.c
292
*table = ((uint8_t *)v2_1 + le32_to_cpu(entries[i].ppt_offset_bytes));
drivers/gpu/drm/amd/pm/swsmu/smu15/smu_v15_0.c
293
*size = le32_to_cpu(entries[i].ppt_size_bytes);
drivers/gpu/drm/amd/ras/rascore/ras_umc.c
281
struct eeprom_umc_record *entries[MAX_ECC_NUM_PER_RETIREMENT];
drivers/gpu/drm/amd/ras/rascore/ras_umc.c
287
new_detected = radix_tree_gang_lookup_tag(&ras_umc->root, (void **)entries,
drivers/gpu/drm/amd/ras/rascore/ras_umc.c
290
if (!entries[i])
drivers/gpu/drm/amd/ras/rascore/ras_umc.c
293
memcpy(&records[i], entries[i], sizeof(struct eeprom_umc_record));
drivers/gpu/drm/amd/ras/rascore/ras_umc.c
296
entries[i]->cur_nps_retired_row_pfn, UMC_ECC_NEW_DETECTED_TAG);
drivers/gpu/drm/display/drm_dp_mst_topology.c
1558
if (history->entries[i].backtrace == backtrace) {
drivers/gpu/drm/display/drm_dp_mst_topology.c
1559
entry = &history->entries[i];
drivers/gpu/drm/display/drm_dp_mst_topology.c
1569
new = krealloc(history->entries, sizeof(*new) * new_len,
drivers/gpu/drm/display/drm_dp_mst_topology.c
1576
history->entries = new;
drivers/gpu/drm/display/drm_dp_mst_topology.c
1626
sort(history->entries, history->len, sizeof(*history->entries),
drivers/gpu/drm/display/drm_dp_mst_topology.c
1634
&history->entries[i];
drivers/gpu/drm/display/drm_dp_mst_topology.c
1647
kfree(history->entries);
drivers/gpu/drm/drm_debugfs_crc.c
187
kfree(crc->entries);
drivers/gpu/drm/drm_debugfs_crc.c
189
crc->entries = NULL;
drivers/gpu/drm/drm_debugfs_crc.c
200
struct drm_crtc_crc_entry *entries = NULL;
drivers/gpu/drm/drm_debugfs_crc.c
227
entries = kzalloc_objs(*entries, DRM_CRC_ENTRIES_NR);
drivers/gpu/drm/drm_debugfs_crc.c
228
if (!entries)
drivers/gpu/drm/drm_debugfs_crc.c
234
crc->entries = entries;
drivers/gpu/drm/drm_debugfs_crc.c
242
kfree(entries);
drivers/gpu/drm/drm_debugfs_crc.c
318
entry = &crc->entries[crc->tail];
drivers/gpu/drm/drm_debugfs_crc.c
405
if (!crc->entries) {
drivers/gpu/drm/drm_debugfs_crc.c
425
entry = &crc->entries[head];
drivers/gpu/drm/drm_mm.c
110
unsigned long entries[STACKDEPTH];
drivers/gpu/drm/drm_mm.c
113
n = stack_trace_save(entries, ARRAY_SIZE(entries), 1);
drivers/gpu/drm/drm_mm.c
116
node->stack = stack_depot_save(entries, n, GFP_NOWAIT);
drivers/gpu/drm/drm_modeset_lock.c
105
nr_entries = stack_depot_fetch(stack_depot, &entries);
drivers/gpu/drm/drm_modeset_lock.c
106
stack_trace_snprint(buf, PAGE_SIZE, entries, nr_entries, 2);
drivers/gpu/drm/drm_modeset_lock.c
86
unsigned long entries[8];
drivers/gpu/drm/drm_modeset_lock.c
89
n = stack_trace_save(entries, ARRAY_SIZE(entries), 1);
drivers/gpu/drm/drm_modeset_lock.c
91
return stack_depot_save(entries, n, GFP_NOWAIT | __GFP_NOWARN);
drivers/gpu/drm/drm_modeset_lock.c
97
unsigned long *entries;
drivers/gpu/drm/drm_syncobj.c
1041
struct syncobj_wait_entry *entries;
drivers/gpu/drm/drm_syncobj.c
1065
entries = kzalloc_objs(*entries, count);
drivers/gpu/drm/drm_syncobj.c
1066
if (!entries) {
drivers/gpu/drm/drm_syncobj.c
1079
entries[i].task = current;
drivers/gpu/drm/drm_syncobj.c
1080
entries[i].point = points[i];
drivers/gpu/drm/drm_syncobj.c
1094
entries[i].fence = fence;
drivers/gpu/drm/drm_syncobj.c
1096
entries[i].fence = dma_fence_get_stub();
drivers/gpu/drm/drm_syncobj.c
1099
dma_fence_is_signaled(entries[i].fence)) {
drivers/gpu/drm/drm_syncobj.c
1121
drm_syncobj_fence_add_wait(syncobjs[i], &entries[i]);
drivers/gpu/drm/drm_syncobj.c
1126
fence = entries[i].fence;
drivers/gpu/drm/drm_syncobj.c
1138
fence = entries[i].fence;
drivers/gpu/drm/drm_syncobj.c
1144
(!entries[i].fence_cb.func &&
drivers/gpu/drm/drm_syncobj.c
1146
&entries[i].fence_cb,
drivers/gpu/drm/drm_syncobj.c
1180
drm_syncobj_remove_wait(syncobjs[i], &entries[i]);
drivers/gpu/drm/drm_syncobj.c
1181
if (entries[i].fence_cb.func)
drivers/gpu/drm/drm_syncobj.c
1182
dma_fence_remove_callback(entries[i].fence,
drivers/gpu/drm/drm_syncobj.c
1183
&entries[i].fence_cb);
drivers/gpu/drm/drm_syncobj.c
1184
dma_fence_put(entries[i].fence);
drivers/gpu/drm/drm_syncobj.c
1186
kfree(entries);
drivers/gpu/drm/i915/display/i9xx_wm.c
2153
int entries;
drivers/gpu/drm/i915/display/i9xx_wm.c
2155
entries = intel_wm_method2(pixel_rate, htotal,
drivers/gpu/drm/i915/display/i9xx_wm.c
2157
entries = DIV_ROUND_UP(entries, I915_FIFO_LINE_SIZE);
drivers/gpu/drm/i915/display/i9xx_wm.c
2158
srwm = I965_FIFO_SIZE - entries;
drivers/gpu/drm/i915/display/i9xx_wm.c
2164
entries, srwm);
drivers/gpu/drm/i915/display/i9xx_wm.c
2166
entries = intel_wm_method2(pixel_rate, htotal,
drivers/gpu/drm/i915/display/i9xx_wm.c
2169
entries = DIV_ROUND_UP(entries,
drivers/gpu/drm/i915/display/i9xx_wm.c
2173
cursor_sr = i965_cursor_wm_info.fifo_size - entries;
drivers/gpu/drm/i915/display/i9xx_wm.c
2326
int entries;
drivers/gpu/drm/i915/display/i9xx_wm.c
2333
entries = intel_wm_method2(pixel_rate, htotal, width, cpp,
drivers/gpu/drm/i915/display/i9xx_wm.c
2335
entries = DIV_ROUND_UP(entries, wm_info->cacheline_size);
drivers/gpu/drm/i915/display/i9xx_wm.c
2337
"self-refresh entries: %d\n", entries);
drivers/gpu/drm/i915/display/i9xx_wm.c
2338
srwm = wm_info->fifo_size - entries;
drivers/gpu/drm/i915/display/i9xx_wm.c
574
int entries, wm_size;
drivers/gpu/drm/i915/display/i9xx_wm.c
582
entries = intel_wm_method1(pixel_rate, cpp,
drivers/gpu/drm/i915/display/i9xx_wm.c
584
entries = DIV_ROUND_UP(entries, wm->cacheline_size) +
drivers/gpu/drm/i915/display/i9xx_wm.c
586
drm_dbg_kms(display->drm, "FIFO entries required for mode: %d\n", entries);
drivers/gpu/drm/i915/display/i9xx_wm.c
588
wm_size = fifo_size - entries;
drivers/gpu/drm/i915/display/intel_cx0_phy.c
520
C10_PHY_OVRD_LEVEL(trans->entries[level].snps.pre_cursor),
drivers/gpu/drm/i915/display/intel_cx0_phy.c
524
C10_PHY_OVRD_LEVEL(trans->entries[level].snps.vswing),
drivers/gpu/drm/i915/display/intel_cx0_phy.c
528
C10_PHY_OVRD_LEVEL(trans->entries[level].snps.post_cursor),
drivers/gpu/drm/i915/display/intel_ddi.c
1113
iboost = trans->entries[level].hsw.i_boost;
drivers/gpu/drm/i915/display/intel_ddi.c
1207
SWING_SEL_UPPER(trans->entries[level].icl.dw2_swing_sel) |
drivers/gpu/drm/i915/display/intel_ddi.c
1208
SWING_SEL_LOWER(trans->entries[level].icl.dw2_swing_sel) |
drivers/gpu/drm/i915/display/intel_ddi.c
1219
POST_CURSOR_1(trans->entries[level].icl.dw4_post_cursor_1) |
drivers/gpu/drm/i915/display/intel_ddi.c
1220
POST_CURSOR_2(trans->entries[level].icl.dw4_post_cursor_2) |
drivers/gpu/drm/i915/display/intel_ddi.c
1221
CURSOR_COEFF(trans->entries[level].icl.dw4_cursor_coeff));
drivers/gpu/drm/i915/display/intel_ddi.c
1230
N_SCALAR(trans->entries[level].icl.dw7_n_scalar));
drivers/gpu/drm/i915/display/intel_ddi.c
1315
CRI_TXDEEMPH_OVERRIDE_17_12(trans->entries[level].mg.cri_txdeemph_override_17_12));
drivers/gpu/drm/i915/display/intel_ddi.c
1321
CRI_TXDEEMPH_OVERRIDE_17_12(trans->entries[level].mg.cri_txdeemph_override_17_12));
drivers/gpu/drm/i915/display/intel_ddi.c
1333
CRI_TXDEEMPH_OVERRIDE_11_6(trans->entries[level].mg.cri_txdeemph_override_11_6) |
drivers/gpu/drm/i915/display/intel_ddi.c
1334
CRI_TXDEEMPH_OVERRIDE_5_0(trans->entries[level].mg.cri_txdeemph_override_5_0) |
drivers/gpu/drm/i915/display/intel_ddi.c
1342
CRI_TXDEEMPH_OVERRIDE_11_6(trans->entries[level].mg.cri_txdeemph_override_11_6) |
drivers/gpu/drm/i915/display/intel_ddi.c
1343
CRI_TXDEEMPH_OVERRIDE_5_0(trans->entries[level].mg.cri_txdeemph_override_5_0) |
drivers/gpu/drm/i915/display/intel_ddi.c
1427
DKL_TX_PRESHOOT_COEFF(trans->entries[level].dkl.preshoot) |
drivers/gpu/drm/i915/display/intel_ddi.c
1428
DKL_TX_DE_EMPHASIS_COEFF(trans->entries[level].dkl.de_emphasis) |
drivers/gpu/drm/i915/display/intel_ddi.c
1429
DKL_TX_VSWING_CONTROL(trans->entries[level].dkl.vswing));
drivers/gpu/drm/i915/display/intel_ddi.c
1437
DKL_TX_PRESHOOT_COEFF(trans->entries[level].dkl.preshoot) |
drivers/gpu/drm/i915/display/intel_ddi.c
1438
DKL_TX_DE_EMPHASIS_COEFF(trans->entries[level].dkl.de_emphasis) |
drivers/gpu/drm/i915/display/intel_ddi.c
1439
DKL_TX_VSWING_CONTROL(trans->entries[level].dkl.vswing));
drivers/gpu/drm/i915/display/intel_ddi.c
154
trans->entries[i].hsw.trans1 | iboost_bit);
drivers/gpu/drm/i915/display/intel_ddi.c
156
trans->entries[i].hsw.trans2);
drivers/gpu/drm/i915/display/intel_ddi.c
186
trans->entries[level].hsw.trans1 | iboost_bit);
drivers/gpu/drm/i915/display/intel_ddi.c
188
trans->entries[level].hsw.trans2);
drivers/gpu/drm/i915/display/intel_ddi_buf_trans.c
1011
.entries = _dg2_snps_trans,
drivers/gpu/drm/i915/display/intel_ddi_buf_trans.c
1036
.entries = _dg2_snps_trans_uhbr,
drivers/gpu/drm/i915/display/intel_ddi_buf_trans.c
105
.entries = _bdw_trans_dp,
drivers/gpu/drm/i915/display/intel_ddi_buf_trans.c
1054
.entries = _mtl_c10_trans_dp14,
drivers/gpu/drm/i915/display/intel_ddi_buf_trans.c
1103
.entries = _mtl_c20_trans_hdmi,
drivers/gpu/drm/i915/display/intel_ddi_buf_trans.c
1109
.entries = _mtl_c20_trans_dp14,
drivers/gpu/drm/i915/display/intel_ddi_buf_trans.c
1115
.entries = _mtl_c20_trans_uhbr,
drivers/gpu/drm/i915/display/intel_ddi_buf_trans.c
1168
.entries = _xe3plpd_lt_trans_dp14,
drivers/gpu/drm/i915/display/intel_ddi_buf_trans.c
1173
.entries = _xe3plpd_lt_trans_uhbr,
drivers/gpu/drm/i915/display/intel_ddi_buf_trans.c
1178
.entries = _xe3plpd_lt_trans_edp,
drivers/gpu/drm/i915/display/intel_ddi_buf_trans.c
122
.entries = _bdw_trans_fdi,
drivers/gpu/drm/i915/display/intel_ddi_buf_trans.c
141
.entries = _bdw_trans_hdmi,
drivers/gpu/drm/i915/display/intel_ddi_buf_trans.c
160
.entries = _skl_trans_dp,
drivers/gpu/drm/i915/display/intel_ddi_buf_trans.c
178
.entries = _skl_u_trans_dp,
drivers/gpu/drm/i915/display/intel_ddi_buf_trans.c
196
.entries = _skl_y_trans_dp,
drivers/gpu/drm/i915/display/intel_ddi_buf_trans.c
214
.entries = _kbl_trans_dp,
drivers/gpu/drm/i915/display/intel_ddi_buf_trans.c
232
.entries = _kbl_u_trans_dp,
drivers/gpu/drm/i915/display/intel_ddi_buf_trans.c
250
.entries = _kbl_y_trans_dp,
drivers/gpu/drm/i915/display/intel_ddi_buf_trans.c
272
.entries = _skl_trans_edp,
drivers/gpu/drm/i915/display/intel_ddi_buf_trans.c
294
.entries = _skl_u_trans_edp,
drivers/gpu/drm/i915/display/intel_ddi_buf_trans.c
316
.entries = _skl_y_trans_edp,
drivers/gpu/drm/i915/display/intel_ddi_buf_trans.c
32
.entries = _hsw_trans_dp,
drivers/gpu/drm/i915/display/intel_ddi_buf_trans.c
336
.entries = _skl_trans_hdmi,
drivers/gpu/drm/i915/display/intel_ddi_buf_trans.c
357
.entries = _skl_y_trans_hdmi,
drivers/gpu/drm/i915/display/intel_ddi_buf_trans.c
377
.entries = _bxt_trans_dp,
drivers/gpu/drm/i915/display/intel_ddi_buf_trans.c
396
.entries = _bxt_trans_edp,
drivers/gpu/drm/i915/display/intel_ddi_buf_trans.c
418
.entries = _bxt_trans_hdmi,
drivers/gpu/drm/i915/display/intel_ddi_buf_trans.c
439
.entries = _icl_combo_phy_trans_dp_hbr2_edp_hbr3,
drivers/gpu/drm/i915/display/intel_ddi_buf_trans.c
458
.entries = _icl_combo_phy_trans_edp_hbr2,
drivers/gpu/drm/i915/display/intel_ddi_buf_trans.c
474
.entries = _icl_combo_phy_trans_hdmi,
drivers/gpu/drm/i915/display/intel_ddi_buf_trans.c
49
.entries = _hsw_trans_fdi,
drivers/gpu/drm/i915/display/intel_ddi_buf_trans.c
494
.entries = _ehl_combo_phy_trans_dp,
drivers/gpu/drm/i915/display/intel_ddi_buf_trans.c
513
.entries = _ehl_combo_phy_trans_edp_hbr2,
drivers/gpu/drm/i915/display/intel_ddi_buf_trans.c
532
.entries = _jsl_combo_phy_trans_edp_hbr,
drivers/gpu/drm/i915/display/intel_ddi_buf_trans.c
551
.entries = _jsl_combo_phy_trans_edp_hbr2,
drivers/gpu/drm/i915/display/intel_ddi_buf_trans.c
570
.entries = _dg1_combo_phy_trans_dp_rbr_hbr,
drivers/gpu/drm/i915/display/intel_ddi_buf_trans.c
589
.entries = _dg1_combo_phy_trans_dp_hbr2_hbr3,
drivers/gpu/drm/i915/display/intel_ddi_buf_trans.c
608
.entries = _icl_mg_phy_trans_rbr_hbr,
drivers/gpu/drm/i915/display/intel_ddi_buf_trans.c
627
.entries = _icl_mg_phy_trans_hbr2_hbr3,
drivers/gpu/drm/i915/display/intel_ddi_buf_trans.c
646
.entries = _icl_mg_phy_trans_hdmi,
drivers/gpu/drm/i915/display/intel_ddi_buf_trans.c
666
.entries = _tgl_dkl_phy_trans_dp_hbr,
drivers/gpu/drm/i915/display/intel_ddi_buf_trans.c
685
.entries = _tgl_dkl_phy_trans_dp_hbr2,
drivers/gpu/drm/i915/display/intel_ddi_buf_trans.c
70
.entries = _hsw_trans_hdmi,
drivers/gpu/drm/i915/display/intel_ddi_buf_trans.c
704
.entries = _tgl_dkl_phy_trans_hdmi,
drivers/gpu/drm/i915/display/intel_ddi_buf_trans.c
724
.entries = _tgl_combo_phy_trans_dp_hbr,
drivers/gpu/drm/i915/display/intel_ddi_buf_trans.c
743
.entries = _tgl_combo_phy_trans_dp_hbr2,
drivers/gpu/drm/i915/display/intel_ddi_buf_trans.c
762
.entries = _tgl_uy_combo_phy_trans_dp_hbr2,
drivers/gpu/drm/i915/display/intel_ddi_buf_trans.c
784
.entries = _tgl_combo_phy_trans_edp_hbr2_hobl,
drivers/gpu/drm/i915/display/intel_ddi_buf_trans.c
803
.entries = _rkl_combo_phy_trans_dp_hbr,
drivers/gpu/drm/i915/display/intel_ddi_buf_trans.c
822
.entries = _rkl_combo_phy_trans_dp_hbr2_hbr3,
drivers/gpu/drm/i915/display/intel_ddi_buf_trans.c
841
.entries = _adls_combo_phy_trans_dp_hbr2_hbr3,
drivers/gpu/drm/i915/display/intel_ddi_buf_trans.c
860
.entries = _adls_combo_phy_trans_edp_hbr2,
drivers/gpu/drm/i915/display/intel_ddi_buf_trans.c
879
.entries = _adls_combo_phy_trans_edp_hbr3,
drivers/gpu/drm/i915/display/intel_ddi_buf_trans.c
88
.entries = _bdw_trans_edp,
drivers/gpu/drm/i915/display/intel_ddi_buf_trans.c
898
.entries = _adlp_combo_phy_trans_dp_hbr,
drivers/gpu/drm/i915/display/intel_ddi_buf_trans.c
945
.entries = _adlp_combo_phy_trans_dp_hbr2_hbr3,
drivers/gpu/drm/i915/display/intel_ddi_buf_trans.c
950
.entries = _adlp_combo_phy_trans_dp_hbr2_edp_hbr3,
drivers/gpu/drm/i915/display/intel_ddi_buf_trans.c
955
.entries = _adlp_combo_phy_trans_edp_hbr2,
drivers/gpu/drm/i915/display/intel_ddi_buf_trans.c
974
.entries = _adlp_dkl_phy_trans_dp_hbr,
drivers/gpu/drm/i915/display/intel_ddi_buf_trans.c
993
.entries = _adlp_dkl_phy_trans_dp_hbr2_hbr3,
drivers/gpu/drm/i915/display/intel_ddi_buf_trans.h
72
const union intel_ddi_buf_trans_entry *entries;
drivers/gpu/drm/i915/display/intel_display.c
7017
struct skl_ddb_entry entries[I915_MAX_PIPES] = {};
drivers/gpu/drm/i915/display/intel_display.c
7029
entries[pipe] = old_crtc_state->wm.skl.ddb;
drivers/gpu/drm/i915/display/intel_display.c
7069
entries, I915_MAX_PIPES, pipe))
drivers/gpu/drm/i915/display/intel_display.c
7072
entries[pipe] = new_crtc_state->wm.skl.ddb;
drivers/gpu/drm/i915/display/intel_display.c
7158
entries, I915_MAX_PIPES, pipe));
drivers/gpu/drm/i915/display/intel_display.c
7160
entries[pipe] = new_crtc_state->wm.skl.ddb;
drivers/gpu/drm/i915/display/intel_dpio_phy.c
323
MARGIN_000(trans->entries[level].bxt.margin) |
drivers/gpu/drm/i915/display/intel_dpio_phy.c
324
UNIQ_TRANS_SCALE(trans->entries[level].bxt.scale));
drivers/gpu/drm/i915/display/intel_dpio_phy.c
333
trans->entries[level].bxt.enable ?
drivers/gpu/drm/i915/display/intel_dpio_phy.c
347
DE_EMPHASIS(trans->entries[level].bxt.deemphasis));
drivers/gpu/drm/i915/display/intel_lt_phy.c
2173
LT_PHY_TX_SWING_LEVEL(trans->entries[level].lt.txswing_level) |
drivers/gpu/drm/i915/display/intel_lt_phy.c
2174
LT_PHY_TX_SWING(trans->entries[level].lt.txswing),
drivers/gpu/drm/i915/display/intel_lt_phy.c
2179
LT_PHY_TX_CURSOR(trans->entries[level].lt.pre_cursor),
drivers/gpu/drm/i915/display/intel_lt_phy.c
2183
LT_PHY_TX_CURSOR(trans->entries[level].lt.main_cursor),
drivers/gpu/drm/i915/display/intel_lt_phy.c
2187
LT_PHY_TX_CURSOR(trans->entries[level].lt.post_cursor),
drivers/gpu/drm/i915/display/intel_snps_phy.c
83
val |= REG_FIELD_PREP(SNPS_PHY_TX_EQ_MAIN, trans->entries[level].snps.vswing);
drivers/gpu/drm/i915/display/intel_snps_phy.c
84
val |= REG_FIELD_PREP(SNPS_PHY_TX_EQ_PRE, trans->entries[level].snps.pre_cursor);
drivers/gpu/drm/i915/display/intel_snps_phy.c
85
val |= REG_FIELD_PREP(SNPS_PHY_TX_EQ_POST, trans->entries[level].snps.post_cursor);
drivers/gpu/drm/i915/display/skl_watermark.c
2411
const struct skl_ddb_entry *entries,
drivers/gpu/drm/i915/display/skl_watermark.c
2418
skl_ddb_entries_overlap(ddb, &entries[i]))
drivers/gpu/drm/i915/display/skl_watermark.c
3760
struct skl_ddb_entry entries[I915_MAX_PIPES] = {};
drivers/gpu/drm/i915/display/skl_watermark.c
3767
entries[crtc->pipe] = crtc_state->wm.skl.ddb;
drivers/gpu/drm/i915/display/skl_watermark.c
3780
if (skl_ddb_allocation_overlaps(&crtc_state->wm.skl.ddb, entries,
drivers/gpu/drm/i915/display/skl_watermark.h
34
const struct skl_ddb_entry *entries,
drivers/gpu/drm/i915/gt/uc/intel_guc_ct.c
409
unsigned long entries[SZ_32];
drivers/gpu/drm/i915/gt/uc/intel_guc_ct.c
412
n = stack_trace_save(entries, ARRAY_SIZE(entries), 1);
drivers/gpu/drm/i915/gt/uc/intel_guc_ct.c
415
ct->requests.lost_and_found[lost].stack = stack_depot_save(entries, n, GFP_NOWAIT);
drivers/gpu/drm/imagination/pvr_mmu.c
1003
struct pvr_page_table_l0 *entries[ROGUE_MMUCTRL_ENTRIES_PD_VALUE];
drivers/gpu/drm/imagination/pvr_mmu.c
1148
return &pvr_page_table_l1_get_raw(table)->entries[idx];
drivers/gpu/drm/imagination/pvr_mmu.c
1326
return &pvr_page_table_l0_get_raw(table)->entries[idx];
drivers/gpu/drm/imagination/pvr_mmu.c
1493
l2_table->entries[op_ctx->curr_page.l2_idx] = child_table;
drivers/gpu/drm/imagination/pvr_mmu.c
1519
l2_table->entries[op_ctx->curr_page.l1_table->parent_idx] = NULL;
drivers/gpu/drm/imagination/pvr_mmu.c
1554
op_ctx->curr_page.l1_table->entries[op_ctx->curr_page.l1_idx] = child_table;
drivers/gpu/drm/imagination/pvr_mmu.c
1582
op_ctx->curr_page.l1_table->entries[op_ctx->curr_page.l0_table->parent_idx] = NULL;
drivers/gpu/drm/imagination/pvr_mmu.c
1747
l2_table->entries[op_ctx->curr_page.l2_idx];
drivers/gpu/drm/imagination/pvr_mmu.c
1796
op_ctx->curr_page.l1_table->entries[op_ctx->curr_page.l1_idx];
drivers/gpu/drm/imagination/pvr_mmu.c
792
entries[ROGUE_MMUCTRL_ENTRIES_PC_VALUE];
drivers/gpu/drm/imagination/pvr_mmu.c
805
entries[ROGUE_MMUCTRL_ENTRIES_PD_VALUE];
drivers/gpu/drm/imagination/pvr_mmu.c
827
entries[ROGUE_MMUCTRL_ENTRIES_PT_VALUE_X];
drivers/gpu/drm/imagination/pvr_mmu.c
859
struct pvr_page_table_l1 *entries[ROGUE_MMUCTRL_ENTRIES_PC_VALUE];
drivers/gpu/drm/imagination/pvr_mmu.c
967
return &pvr_page_table_l2_get_raw(table)->entries[idx];
drivers/gpu/drm/msm/disp/dpu1/catalog/dpu_10_0_sm8650.h
400
.entries = sc7180_qos_linear
drivers/gpu/drm/msm/disp/dpu1/catalog/dpu_10_0_sm8650.h
403
.entries = sc7180_qos_macrotile
drivers/gpu/drm/msm/disp/dpu1/catalog/dpu_10_0_sm8650.h
406
.entries = sc7180_qos_nrt
drivers/gpu/drm/msm/disp/dpu1/catalog/dpu_12_0_sm8750.h
441
.entries = sc7180_qos_linear
drivers/gpu/drm/msm/disp/dpu1/catalog/dpu_12_0_sm8750.h
444
.entries = sc7180_qos_macrotile
drivers/gpu/drm/msm/disp/dpu1/catalog/dpu_12_0_sm8750.h
447
.entries = sc7180_qos_nrt
drivers/gpu/drm/msm/disp/dpu1/catalog/dpu_12_2_glymur.h
488
.entries = sc7180_qos_linear
drivers/gpu/drm/msm/disp/dpu1/catalog/dpu_12_2_glymur.h
491
.entries = sc7180_qos_macrotile
drivers/gpu/drm/msm/disp/dpu1/catalog/dpu_12_2_glymur.h
494
.entries = sc7180_qos_nrt
drivers/gpu/drm/msm/disp/dpu1/catalog/dpu_13_0_kaanapali.h
439
.entries = kaanapali_qos_linear
drivers/gpu/drm/msm/disp/dpu1/catalog/dpu_13_0_kaanapali.h
442
.entries = kaanapali_qos_macrotile
drivers/gpu/drm/msm/disp/dpu1/catalog/dpu_13_0_kaanapali.h
445
.entries = sc7180_qos_nrt
drivers/gpu/drm/msm/disp/dpu1/catalog/dpu_1_14_msm8937.h
161
.entries = msm8998_qos_linear
drivers/gpu/drm/msm/disp/dpu1/catalog/dpu_1_14_msm8937.h
164
.entries = msm8998_qos_macrotile
drivers/gpu/drm/msm/disp/dpu1/catalog/dpu_1_14_msm8937.h
167
.entries = msm8998_qos_nrt
drivers/gpu/drm/msm/disp/dpu1/catalog/dpu_1_15_msm8917.h
140
.entries = msm8998_qos_linear
drivers/gpu/drm/msm/disp/dpu1/catalog/dpu_1_15_msm8917.h
143
.entries = msm8998_qos_macrotile
drivers/gpu/drm/msm/disp/dpu1/catalog/dpu_1_15_msm8917.h
146
.entries = msm8998_qos_nrt
drivers/gpu/drm/msm/disp/dpu1/catalog/dpu_1_16_msm8953.h
168
.entries = msm8998_qos_linear
drivers/gpu/drm/msm/disp/dpu1/catalog/dpu_1_16_msm8953.h
171
.entries = msm8998_qos_macrotile
drivers/gpu/drm/msm/disp/dpu1/catalog/dpu_1_16_msm8953.h
174
.entries = msm8998_qos_nrt
drivers/gpu/drm/msm/disp/dpu1/catalog/dpu_1_7_msm8996.h
282
.entries = msm8998_qos_linear
drivers/gpu/drm/msm/disp/dpu1/catalog/dpu_1_7_msm8996.h
285
.entries = msm8998_qos_macrotile
drivers/gpu/drm/msm/disp/dpu1/catalog/dpu_1_7_msm8996.h
288
.entries = msm8998_qos_nrt
drivers/gpu/drm/msm/disp/dpu1/catalog/dpu_3_0_msm8998.h
267
.entries = msm8998_qos_linear
drivers/gpu/drm/msm/disp/dpu1/catalog/dpu_3_0_msm8998.h
270
.entries = msm8998_qos_macrotile
drivers/gpu/drm/msm/disp/dpu1/catalog/dpu_3_0_msm8998.h
273
.entries = msm8998_qos_nrt
drivers/gpu/drm/msm/disp/dpu1/catalog/dpu_3_2_sdm660.h
231
.entries = msm8998_qos_linear
drivers/gpu/drm/msm/disp/dpu1/catalog/dpu_3_2_sdm660.h
234
.entries = msm8998_qos_macrotile
drivers/gpu/drm/msm/disp/dpu1/catalog/dpu_3_2_sdm660.h
237
.entries = msm8998_qos_nrt
drivers/gpu/drm/msm/disp/dpu1/catalog/dpu_3_3_sdm630.h
171
.entries = msm8998_qos_linear
drivers/gpu/drm/msm/disp/dpu1/catalog/dpu_3_3_sdm630.h
174
.entries = msm8998_qos_macrotile
drivers/gpu/drm/msm/disp/dpu1/catalog/dpu_3_3_sdm630.h
177
.entries = msm8998_qos_nrt
drivers/gpu/drm/msm/disp/dpu1/catalog/dpu_4_0_sdm845.h
287
.entries = sdm845_qos_linear
drivers/gpu/drm/msm/disp/dpu1/catalog/dpu_4_0_sdm845.h
290
.entries = sdm845_qos_macrotile
drivers/gpu/drm/msm/disp/dpu1/catalog/dpu_4_0_sdm845.h
293
.entries = sdm845_qos_nrt
drivers/gpu/drm/msm/disp/dpu1/catalog/dpu_5_0_sm8150.h
338
.entries = sm8150_qos_linear
drivers/gpu/drm/msm/disp/dpu1/catalog/dpu_5_0_sm8150.h
341
.entries = sc7180_qos_macrotile
drivers/gpu/drm/msm/disp/dpu1/catalog/dpu_5_0_sm8150.h
344
.entries = sc7180_qos_nrt
drivers/gpu/drm/msm/disp/dpu1/catalog/dpu_5_1_sc8180x.h
362
.entries = sc7180_qos_linear
drivers/gpu/drm/msm/disp/dpu1/catalog/dpu_5_1_sc8180x.h
365
.entries = sc7180_qos_macrotile
drivers/gpu/drm/msm/disp/dpu1/catalog/dpu_5_1_sc8180x.h
368
.entries = sc7180_qos_nrt
drivers/gpu/drm/msm/disp/dpu1/catalog/dpu_5_2_sm7150.h
267
.entries = sm8150_qos_linear
drivers/gpu/drm/msm/disp/dpu1/catalog/dpu_5_2_sm7150.h
270
.entries = sc7180_qos_macrotile
drivers/gpu/drm/msm/disp/dpu1/catalog/dpu_5_2_sm7150.h
273
.entries = sc7180_qos_nrt
drivers/gpu/drm/msm/disp/dpu1/catalog/dpu_5_3_sm6150.h
207
.entries = sm8150_qos_linear
drivers/gpu/drm/msm/disp/dpu1/catalog/dpu_5_3_sm6150.h
210
.entries = sc7180_qos_macrotile
drivers/gpu/drm/msm/disp/dpu1/catalog/dpu_5_3_sm6150.h
213
.entries = sc7180_qos_nrt
drivers/gpu/drm/msm/disp/dpu1/catalog/dpu_5_4_sm6125.h
178
.entries = sm8150_qos_linear
drivers/gpu/drm/msm/disp/dpu1/catalog/dpu_5_4_sm6125.h
181
.entries = sc7180_qos_macrotile
drivers/gpu/drm/msm/disp/dpu1/catalog/dpu_5_4_sm6125.h
184
.entries = sc7180_qos_nrt
drivers/gpu/drm/msm/disp/dpu1/catalog/dpu_6_0_sm8250.h
337
.entries = sc7180_qos_linear
drivers/gpu/drm/msm/disp/dpu1/catalog/dpu_6_0_sm8250.h
340
.entries = sc7180_qos_macrotile
drivers/gpu/drm/msm/disp/dpu1/catalog/dpu_6_0_sm8250.h
343
.entries = sc7180_qos_nrt
drivers/gpu/drm/msm/disp/dpu1/catalog/dpu_6_2_sc7180.h
173
.entries = sc7180_qos_linear
drivers/gpu/drm/msm/disp/dpu1/catalog/dpu_6_2_sc7180.h
176
.entries = sc7180_qos_macrotile
drivers/gpu/drm/msm/disp/dpu1/catalog/dpu_6_2_sc7180.h
179
.entries = sc7180_qos_nrt
drivers/gpu/drm/msm/disp/dpu1/catalog/dpu_6_3_sm6115.h
108
.entries = sc7180_qos_linear
drivers/gpu/drm/msm/disp/dpu1/catalog/dpu_6_3_sm6115.h
111
.entries = sc7180_qos_macrotile
drivers/gpu/drm/msm/disp/dpu1/catalog/dpu_6_3_sm6115.h
114
.entries = sc7180_qos_nrt
drivers/gpu/drm/msm/disp/dpu1/catalog/dpu_6_4_sm6350.h
189
.entries = sm6350_qos_linear_macrotile
drivers/gpu/drm/msm/disp/dpu1/catalog/dpu_6_4_sm6350.h
192
.entries = sm6350_qos_linear_macrotile
drivers/gpu/drm/msm/disp/dpu1/catalog/dpu_6_4_sm6350.h
195
.entries = sc7180_qos_nrt
drivers/gpu/drm/msm/disp/dpu1/catalog/dpu_6_5_qcm2290.h
108
.entries = qcm2290_qos_linear
drivers/gpu/drm/msm/disp/dpu1/catalog/dpu_6_9_sm6375.h
118
.entries = sm6350_qos_linear_macrotile
drivers/gpu/drm/msm/disp/dpu1/catalog/dpu_6_9_sm6375.h
121
.entries = sm6350_qos_linear_macrotile
drivers/gpu/drm/msm/disp/dpu1/catalog/dpu_6_9_sm6375.h
124
.entries = sc7180_qos_nrt
drivers/gpu/drm/msm/disp/dpu1/catalog/dpu_7_0_sm8350.h
349
.entries = sc7180_qos_linear
drivers/gpu/drm/msm/disp/dpu1/catalog/dpu_7_0_sm8350.h
352
.entries = sc7180_qos_macrotile
drivers/gpu/drm/msm/disp/dpu1/catalog/dpu_7_0_sm8350.h
355
.entries = sc7180_qos_nrt
drivers/gpu/drm/msm/disp/dpu1/catalog/dpu_7_2_sc7280.h
221
.entries = sc7180_qos_macrotile
drivers/gpu/drm/msm/disp/dpu1/catalog/dpu_7_2_sc7280.h
224
.entries = sc7180_qos_macrotile
drivers/gpu/drm/msm/disp/dpu1/catalog/dpu_7_2_sc7280.h
227
.entries = sc7180_qos_nrt
drivers/gpu/drm/msm/disp/dpu1/catalog/dpu_8_0_sc8280xp.h
380
.entries = sc8180x_qos_linear
drivers/gpu/drm/msm/disp/dpu1/catalog/dpu_8_0_sc8280xp.h
383
.entries = sc8180x_qos_macrotile
drivers/gpu/drm/msm/disp/dpu1/catalog/dpu_8_0_sc8280xp.h
386
.entries = sc7180_qos_nrt
drivers/gpu/drm/msm/disp/dpu1/catalog/dpu_8_1_sm8450.h
362
.entries = sc7180_qos_linear
drivers/gpu/drm/msm/disp/dpu1/catalog/dpu_8_1_sm8450.h
365
.entries = sc7180_qos_macrotile
drivers/gpu/drm/msm/disp/dpu1/catalog/dpu_8_1_sm8450.h
368
.entries = sc7180_qos_nrt
drivers/gpu/drm/msm/disp/dpu1/catalog/dpu_8_4_sa8775p.h
402
.entries = sm6350_qos_linear_macrotile
drivers/gpu/drm/msm/disp/dpu1/catalog/dpu_8_4_sa8775p.h
405
.entries = sm6350_qos_linear_macrotile
drivers/gpu/drm/msm/disp/dpu1/catalog/dpu_8_4_sa8775p.h
408
.entries = sc7180_qos_nrt
drivers/gpu/drm/msm/disp/dpu1/catalog/dpu_9_0_sm8550.h
357
.entries = sc7180_qos_linear
drivers/gpu/drm/msm/disp/dpu1/catalog/dpu_9_0_sm8550.h
360
.entries = sc7180_qos_macrotile
drivers/gpu/drm/msm/disp/dpu1/catalog/dpu_9_0_sm8550.h
363
.entries = sc7180_qos_nrt
drivers/gpu/drm/msm/disp/dpu1/catalog/dpu_9_1_sar2130p.h
357
.entries = sc7180_qos_linear
drivers/gpu/drm/msm/disp/dpu1/catalog/dpu_9_1_sar2130p.h
360
.entries = sc7180_qos_macrotile
drivers/gpu/drm/msm/disp/dpu1/catalog/dpu_9_1_sar2130p.h
363
.entries = sc7180_qos_nrt
drivers/gpu/drm/msm/disp/dpu1/catalog/dpu_9_2_x1e80100.h
398
.entries = sc7180_qos_linear
drivers/gpu/drm/msm/disp/dpu1/catalog/dpu_9_2_x1e80100.h
401
.entries = sc7180_qos_macrotile
drivers/gpu/drm/msm/disp/dpu1/catalog/dpu_9_2_x1e80100.h
404
.entries = sc7180_qos_nrt
drivers/gpu/drm/msm/disp/dpu1/dpu_hw_catalog.h
252
const struct dpu_qos_lut_entry *entries;
drivers/gpu/drm/msm/disp/dpu1/dpu_hw_util.c
453
if (!tbl || !tbl->nentry || !tbl->entries)
drivers/gpu/drm/msm/disp/dpu1/dpu_hw_util.c
457
if (total_fl <= tbl->entries[i].fl)
drivers/gpu/drm/msm/disp/dpu1/dpu_hw_util.c
458
return tbl->entries[i].lut;
drivers/gpu/drm/msm/disp/dpu1/dpu_hw_util.c
461
if (!tbl->entries[i-1].fl)
drivers/gpu/drm/msm/disp/dpu1/dpu_hw_util.c
462
return tbl->entries[i-1].lut;
drivers/gpu/drm/nouveau/dispnv04/disp.c
251
for (i = 0; i < dcb->entries; i++) {
drivers/gpu/drm/nouveau/dispnv50/crc907d.c
22
} entries[CRC907D_MAX_ENTRIES];
drivers/gpu/drm/nouveau/dispnv50/crc907d.c
94
return ioread32_native(¬ifier->entries[idx].output_crc[0]);
drivers/gpu/drm/nouveau/dispnv50/crcc37d.c
71
struct crcc37d_entry __iomem *entry = ¬ifier->entries[idx];
drivers/gpu/drm/nouveau/dispnv50/crcc37d.h
32
} entries[CRCC37D_MAX_ENTRIES];
drivers/gpu/drm/nouveau/include/nvif/clb069.h
6
__u32 entries;
drivers/gpu/drm/nouveau/nouveau_bios.c
1374
struct dcb_output *entry = &dcb->entry[dcb->entries];
drivers/gpu/drm/nouveau/nouveau_bios.c
1377
entry->index = dcb->entries++;
drivers/gpu/drm/nouveau/nouveau_bios.c
1516
dcb->entries--;
drivers/gpu/drm/nouveau/nouveau_bios.c
1608
for (i = 0; i < dcb->entries; i++) {
drivers/gpu/drm/nouveau/nouveau_bios.c
1612
for (j = i + 1; j < dcb->entries; j++) {
drivers/gpu/drm/nouveau/nouveau_bios.c
1632
for (i = 0; i < dcb->entries; i++) {
drivers/gpu/drm/nouveau/nouveau_bios.c
1643
dcb->entries = newentries;
drivers/gpu/drm/nouveau/nouveau_bios.c
1832
for (i = 0; i < dcbt->entries; i++) {
drivers/gpu/drm/nouveau/nouveau_bios.c
1844
for (i = 0; i < dcbt->entries; i++) {
drivers/gpu/drm/nouveau/nouveau_bios.c
975
u8 entries, *entry;
drivers/gpu/drm/nouveau/nouveau_bios.c
980
entries = bios->data[bios->offset + 10];
drivers/gpu/drm/nouveau/nouveau_bios.c
982
while (entries--) {
drivers/gpu/drm/nouveau/nouveau_bios.h
57
int entries;
drivers/gpu/drm/nouveau/nouveau_connector.c
1362
for (i = 0; i < dcbt->entries; i++) {
drivers/gpu/drm/nouveau/nouveau_display.c
697
if (!ret && (disp->disp.outp_mask || drm->vbios.dcb.entries)) {
drivers/gpu/drm/nouveau/nouveau_svm.c
1003
buffer->entries = args.entries;
drivers/gpu/drm/nouveau/nouveau_svm.c
1013
buffer->fault = kvzalloc_objs(*buffer->fault, buffer->entries);
drivers/gpu/drm/nouveau/nouveau_svm.c
49
u32 entries;
drivers/gpu/drm/nouveau/nouveau_svm.c
744
if (++buffer->get == buffer->entries)
drivers/gpu/drm/nouveau/nouveau_svm.c
975
for (i = 0; buffer->fault[i] && i < buffer->entries; i++)
drivers/gpu/drm/nouveau/nvkm/subdev/bios/bit.c
31
u8 entries = nvbios_rd08(bios, bios->bit_offset + 10);
drivers/gpu/drm/nouveau/nvkm/subdev/bios/bit.c
33
while (entries--) {
drivers/gpu/drm/nouveau/nvkm/subdev/fault/base.c
86
nvkm_debug(subdev, "buffer %d: %d entries\n", id, buffer->entries);
drivers/gpu/drm/nouveau/nvkm/subdev/fault/base.c
88
ret = nvkm_memory_new(device, NVKM_MEM_TARGET_INST, buffer->entries *
drivers/gpu/drm/nouveau/nvkm/subdev/fault/gp100.c
60
buffer->entries = nvkm_rd32(buffer->fault->subdev.device, 0x002a78);
drivers/gpu/drm/nouveau/nvkm/subdev/fault/gv100.c
114
buffer->entries = nvkm_rd32(device, 0x100e34 + foff) & 0x000fffff;
drivers/gpu/drm/nouveau/nvkm/subdev/fault/gv100.c
55
if (++get == buffer->entries)
drivers/gpu/drm/nouveau/nvkm/subdev/fault/priv.h
14
int entries;
drivers/gpu/drm/nouveau/nvkm/subdev/fault/tu102.c
80
buffer->entries = nvkm_rd32(device, 0xb83010 + foff) & 0x000fffff;
drivers/gpu/drm/nouveau/nvkm/subdev/fault/user.c
101
args->v0.entries = buffer->entries;
drivers/gpu/drm/nouveau/nvkm/subdev/gsp/rm/r535/fifo.c
481
const u32 addr = ctrl->entries[i].engineData[ENGINE_INFO_TYPE_RUNLIST_PRI_BASE];
drivers/gpu/drm/nouveau/nvkm/subdev/gsp/rm/r535/fifo.c
482
const u32 id = ctrl->entries[i].engineData[ENGINE_INFO_TYPE_RUNLIST];
drivers/gpu/drm/nouveau/nvkm/subdev/gsp/rm/r535/fifo.c
493
const u32 addr = ctrl->entries[i].engineData[ENGINE_INFO_TYPE_RUNLIST_PRI_BASE];
drivers/gpu/drm/nouveau/nvkm/subdev/gsp/rm/r535/fifo.c
494
const u32 rmid = ctrl->entries[i].engineData[ENGINE_INFO_TYPE_RM_ENGINE_TYPE];
drivers/gpu/drm/nouveau/nvkm/subdev/gsp/rm/r535/fifo.c
495
const u32 id = ctrl->entries[i].engineData[ENGINE_INFO_TYPE_RUNLIST];
drivers/gpu/drm/nouveau/nvkm/subdev/gsp/rm/r535/fifo.c
559
engn->rm.desc = ctrl->entries[i].engineData[ENGINE_INFO_TYPE_ENG_DESC];
drivers/gpu/drm/nouveau/nvkm/subdev/gsp/rm/r535/gsp.c
509
str_offset = struct_size(registry, entries, registry->numEntries);
drivers/gpu/drm/nouveau/nvkm/subdev/gsp/rm/r535/gsp.c
512
registry->entries[i].type = reg->type;
drivers/gpu/drm/nouveau/nvkm/subdev/gsp/rm/r535/gsp.c
513
registry->entries[i].length = reg->vlen;
drivers/gpu/drm/nouveau/nvkm/subdev/gsp/rm/r535/gsp.c
516
registry->entries[i].nameOffset = str_offset;
drivers/gpu/drm/nouveau/nvkm/subdev/gsp/rm/r535/gsp.c
522
registry->entries[i].data = reg->dword;
drivers/gpu/drm/nouveau/nvkm/subdev/gsp/rm/r535/gsp.c
528
registry->entries[i].data = str_offset;
drivers/gpu/drm/nouveau/nvkm/subdev/gsp/rm/r535/nvrm/fifo.h
33
NV2080_CTRL_FIFO_DEVICE_ENTRY entries[NV2080_CTRL_FIFO_GET_DEVICE_INFO_TABLE_MAX_ENTRIES];
drivers/gpu/drm/nouveau/nvkm/subdev/gsp/rm/r535/nvrm/gsp.h
239
PACKED_REGISTRY_ENTRY entries[] __counted_by(numEntries);
drivers/gpu/drm/nouveau/nvkm/subdev/mxm/mxms.c
107
u8 entries = 0;
drivers/gpu/drm/nouveau/nvkm/subdev/mxm/mxms.c
124
entries = (ROM32(desc[0]) & 0x01f00000) >> 20;
drivers/gpu/drm/nouveau/nvkm/subdev/mxm/mxms.c
133
entries = (desc[1] & 0xf0) >> 4;
drivers/gpu/drm/nouveau/nvkm/subdev/mxm/mxms.c
141
entries = desc[1] & 0x07;
drivers/gpu/drm/nouveau/nvkm/subdev/mxm/mxms.c
162
for (i = 0; i < entries; i++, dump += recordlen) {
drivers/gpu/drm/nouveau/nvkm/subdev/mxm/mxms.c
174
desc += headerlen + (entries * recordlen);
drivers/gpu/drm/radeon/btc_dpm.c
1155
if (clock < table->entries[i].clk)
drivers/gpu/drm/radeon/btc_dpm.c
1156
clock = table->entries[i].clk;
drivers/gpu/drm/radeon/btc_dpm.c
1170
if (clock <= table->entries[i].clk) {
drivers/gpu/drm/radeon/btc_dpm.c
1171
if (*voltage < table->entries[i].v)
drivers/gpu/drm/radeon/btc_dpm.c
1172
*voltage = (u16)((table->entries[i].v < max_voltage) ?
drivers/gpu/drm/radeon/btc_dpm.c
1173
table->entries[i].v : max_voltage);
drivers/gpu/drm/radeon/btc_dpm.c
1271
if (voltage <= table->entries[i].value)
drivers/gpu/drm/radeon/btc_dpm.c
1272
return table->entries[i].value;
drivers/gpu/drm/radeon/btc_dpm.c
1275
return table->entries[table->count - 1].value;
drivers/gpu/drm/radeon/btc_dpm.c
2554
rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries =
drivers/gpu/drm/radeon/btc_dpm.c
2556
if (!rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries) {
drivers/gpu/drm/radeon/btc_dpm.c
2561
rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[0].clk = 0;
drivers/gpu/drm/radeon/btc_dpm.c
2562
rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[0].v = 0;
drivers/gpu/drm/radeon/btc_dpm.c
2563
rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[1].clk = 36000;
drivers/gpu/drm/radeon/btc_dpm.c
2564
rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[1].v = 800;
drivers/gpu/drm/radeon/btc_dpm.c
2565
rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[2].clk = 54000;
drivers/gpu/drm/radeon/btc_dpm.c
2566
rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[2].v = 800;
drivers/gpu/drm/radeon/btc_dpm.c
2567
rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[3].clk = 72000;
drivers/gpu/drm/radeon/btc_dpm.c
2568
rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[3].v = 800;
drivers/gpu/drm/radeon/btc_dpm.c
2699
kfree(rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries);
drivers/gpu/drm/radeon/ci_dpm.c
2083
voltage_table->entries[i].value = voltage_dependency_table->entries[i].v;
drivers/gpu/drm/radeon/ci_dpm.c
2084
voltage_table->entries[i].smio_low = 0;
drivers/gpu/drm/radeon/ci_dpm.c
2183
&pi->vddc_voltage_table.entries[count],
drivers/gpu/drm/radeon/ci_dpm.c
2188
pi->vddc_voltage_table.entries[count].smio_low;
drivers/gpu/drm/radeon/ci_dpm.c
2206
&pi->vddci_voltage_table.entries[count],
drivers/gpu/drm/radeon/ci_dpm.c
2211
pi->vddci_voltage_table.entries[count].smio_low;
drivers/gpu/drm/radeon/ci_dpm.c
2229
&pi->mvdd_voltage_table.entries[count],
drivers/gpu/drm/radeon/ci_dpm.c
2234
pi->mvdd_voltage_table.entries[count].smio_low;
drivers/gpu/drm/radeon/ci_dpm.c
2271
if (mclk <= rdev->pm.dpm.dyn_state.mvdd_dependency_on_mclk.entries[i].clk) {
drivers/gpu/drm/radeon/ci_dpm.c
2272
voltage->Voltage = pi->mvdd_voltage_table.entries[i].value;
drivers/gpu/drm/radeon/ci_dpm.c
2293
if (rdev->pm.dpm.dyn_state.vddc_dependency_on_sclk.entries == NULL)
drivers/gpu/drm/radeon/ci_dpm.c
2296
if (rdev->pm.dpm.dyn_state.cac_leakage_table.entries) {
drivers/gpu/drm/radeon/ci_dpm.c
2299
rdev->pm.dpm.dyn_state.vddc_dependency_on_sclk.entries[v_index].v) {
drivers/gpu/drm/radeon/ci_dpm.c
2306
rdev->pm.dpm.dyn_state.cac_leakage_table.entries[idx].vddc * VOLTAGE_SCALE;
drivers/gpu/drm/radeon/ci_dpm.c
2308
rdev->pm.dpm.dyn_state.cac_leakage_table.entries[idx].leakage * VOLTAGE_SCALE;
drivers/gpu/drm/radeon/ci_dpm.c
2316
rdev->pm.dpm.dyn_state.vddc_dependency_on_sclk.entries[v_index].v) {
drivers/gpu/drm/radeon/ci_dpm.c
2323
rdev->pm.dpm.dyn_state.cac_leakage_table.entries[idx].vddc * VOLTAGE_SCALE;
drivers/gpu/drm/radeon/ci_dpm.c
2325
rdev->pm.dpm.dyn_state.cac_leakage_table.entries[idx].leakage * VOLTAGE_SCALE;
drivers/gpu/drm/radeon/ci_dpm.c
2345
if (sclk < limits->entries[i].sclk) {
drivers/gpu/drm/radeon/ci_dpm.c
2362
if (mclk < limits->entries[i].mclk) {
drivers/gpu/drm/radeon/ci_dpm.c
2397
if (allowed_clock_voltage_table->entries[i].clk >= clock) {
drivers/gpu/drm/radeon/ci_dpm.c
2398
*voltage = allowed_clock_voltage_table->entries[i].v;
drivers/gpu/drm/radeon/ci_dpm.c
2403
*voltage = allowed_clock_voltage_table->entries[i-1].v;
drivers/gpu/drm/radeon/ci_dpm.c
2516
&arb_regs.entries[i][j]);
drivers/gpu/drm/radeon/ci_dpm.c
254
if (rdev->pm.dpm.dyn_state.cac_leakage_table.entries == NULL)
drivers/gpu/drm/radeon/ci_dpm.c
2550
if (rdev->pm.dpm.dyn_state.vddc_dependency_on_sclk.entries[level].clk >=
drivers/gpu/drm/radeon/ci_dpm.c
2558
if (rdev->pm.dpm.dyn_state.vddc_dependency_on_mclk.entries[level].clk >=
drivers/gpu/drm/radeon/ci_dpm.c
2616
rdev->pm.dpm.dyn_state.uvd_clock_voltage_dependency_table.entries[count].vclk;
drivers/gpu/drm/radeon/ci_dpm.c
2618
rdev->pm.dpm.dyn_state.uvd_clock_voltage_dependency_table.entries[count].dclk;
drivers/gpu/drm/radeon/ci_dpm.c
2620
rdev->pm.dpm.dyn_state.uvd_clock_voltage_dependency_table.entries[count].v * VOLTAGE_SCALE;
drivers/gpu/drm/radeon/ci_dpm.c
264
lo_vid[i] = ci_convert_to_vid(rdev->pm.dpm.dyn_state.cac_leakage_table.entries[i].vddc1);
drivers/gpu/drm/radeon/ci_dpm.c
265
hi_vid[i] = ci_convert_to_vid(rdev->pm.dpm.dyn_state.cac_leakage_table.entries[i].vddc2);
drivers/gpu/drm/radeon/ci_dpm.c
2659
rdev->pm.dpm.dyn_state.vce_clock_voltage_dependency_table.entries[count].evclk;
drivers/gpu/drm/radeon/ci_dpm.c
266
hi2_vid[i] = ci_convert_to_vid(rdev->pm.dpm.dyn_state.cac_leakage_table.entries[i].vddc3);
drivers/gpu/drm/radeon/ci_dpm.c
2661
(u16)rdev->pm.dpm.dyn_state.vce_clock_voltage_dependency_table.entries[count].v * VOLTAGE_SCALE;
drivers/gpu/drm/radeon/ci_dpm.c
268
lo_vid[i] = ci_convert_to_vid(rdev->pm.dpm.dyn_state.cac_leakage_table.entries[i].vddc);
drivers/gpu/drm/radeon/ci_dpm.c
269
hi_vid[i] = ci_convert_to_vid((u16)rdev->pm.dpm.dyn_state.cac_leakage_table.entries[i].leakage);
drivers/gpu/drm/radeon/ci_dpm.c
2692
rdev->pm.dpm.dyn_state.acp_clock_voltage_dependency_table.entries[count].clk;
drivers/gpu/drm/radeon/ci_dpm.c
2694
rdev->pm.dpm.dyn_state.acp_clock_voltage_dependency_table.entries[count].v;
drivers/gpu/drm/radeon/ci_dpm.c
2724
rdev->pm.dpm.dyn_state.samu_clock_voltage_dependency_table.entries[count].clk;
drivers/gpu/drm/radeon/ci_dpm.c
2726
rdev->pm.dpm.dyn_state.samu_clock_voltage_dependency_table.entries[count].v * VOLTAGE_SCALE;
drivers/gpu/drm/radeon/ci_dpm.c
2839
if (rdev->pm.dpm.dyn_state.vddc_dependency_on_mclk.entries) {
drivers/gpu/drm/radeon/ci_dpm.c
2847
if (rdev->pm.dpm.dyn_state.vddci_dependency_on_mclk.entries) {
drivers/gpu/drm/radeon/ci_dpm.c
285
vid[i] = ci_convert_to_vid(pi->vddc_voltage_table.entries[i].value);
drivers/gpu/drm/radeon/ci_dpm.c
2855
if (rdev->pm.dpm.dyn_state.mvdd_dependency_on_mclk.entries) {
drivers/gpu/drm/radeon/ci_dpm.c
3093
if (ulv_voltage > rdev->pm.dpm.dyn_state.vddc_dependency_on_sclk.entries[0].v)
drivers/gpu/drm/radeon/ci_dpm.c
3097
rdev->pm.dpm.dyn_state.vddc_dependency_on_sclk.entries[0].v - ulv_voltage;
drivers/gpu/drm/radeon/ci_dpm.c
3099
if (ulv_voltage > rdev->pm.dpm.dyn_state.vddc_dependency_on_sclk.entries[0].v)
drivers/gpu/drm/radeon/ci_dpm.c
3103
((rdev->pm.dpm.dyn_state.vddc_dependency_on_sclk.entries[0].v - ulv_voltage) *
drivers/gpu/drm/radeon/ci_dpm.c
3437
allowed_sclk_vddc_table->entries[i].clk)) {
drivers/gpu/drm/radeon/ci_dpm.c
3439
allowed_sclk_vddc_table->entries[i].clk;
drivers/gpu/drm/radeon/ci_dpm.c
3450
allowed_mclk_table->entries[i].clk)) {
drivers/gpu/drm/radeon/ci_dpm.c
3452
allowed_mclk_table->entries[i].clk;
drivers/gpu/drm/radeon/ci_dpm.c
3461
allowed_sclk_vddc_table->entries[i].v;
drivers/gpu/drm/radeon/ci_dpm.c
3463
std_voltage_table->entries[i].leakage;
drivers/gpu/drm/radeon/ci_dpm.c
3471
allowed_mclk_table->entries[i].v;
drivers/gpu/drm/radeon/ci_dpm.c
3479
allowed_mclk_table->entries[i].v;
drivers/gpu/drm/radeon/ci_dpm.c
3748
if (rdev->clock.current_dispclk == disp_voltage_table->entries[i].clk)
drivers/gpu/drm/radeon/ci_dpm.c
3749
requested_voltage = disp_voltage_table->entries[i].v;
drivers/gpu/drm/radeon/ci_dpm.c
3753
if (requested_voltage <= vddc_table->entries[i].v) {
drivers/gpu/drm/radeon/ci_dpm.c
3754
requested_voltage = vddc_table->entries[i].v;
drivers/gpu/drm/radeon/ci_dpm.c
3896
if (rdev->pm.dpm.dyn_state.uvd_clock_voltage_dependency_table.entries[i].v <= max_limits->vddc) {
drivers/gpu/drm/radeon/ci_dpm.c
3944
if (rdev->pm.dpm.dyn_state.vce_clock_voltage_dependency_table.entries[i].v <= max_limits->vddc) {
drivers/gpu/drm/radeon/ci_dpm.c
3977
if (rdev->pm.dpm.dyn_state.samu_clock_voltage_dependency_table.entries[i].v <= max_limits->vddc) {
drivers/gpu/drm/radeon/ci_dpm.c
4008
if (rdev->pm.dpm.dyn_state.acp_clock_voltage_dependency_table.entries[i].v <= max_limits->vddc) {
drivers/gpu/drm/radeon/ci_dpm.c
4057
if (table->entries[i].evclk >= min_evclk)
drivers/gpu/drm/radeon/ci_dpm.c
4884
pi->min_vddc_in_pp_table = allowed_sclk_vddc_table->entries[0].v;
drivers/gpu/drm/radeon/ci_dpm.c
4886
allowed_sclk_vddc_table->entries[allowed_sclk_vddc_table->count - 1].v;
drivers/gpu/drm/radeon/ci_dpm.c
4888
pi->min_vddci_in_pp_table = allowed_mclk_vddci_table->entries[0].v;
drivers/gpu/drm/radeon/ci_dpm.c
4890
allowed_mclk_vddci_table->entries[allowed_mclk_vddci_table->count - 1].v;
drivers/gpu/drm/radeon/ci_dpm.c
4893
allowed_sclk_vddc_table->entries[allowed_sclk_vddc_table->count - 1].clk;
drivers/gpu/drm/radeon/ci_dpm.c
4895
allowed_mclk_vddc_table->entries[allowed_sclk_vddc_table->count - 1].clk;
drivers/gpu/drm/radeon/ci_dpm.c
4897
allowed_sclk_vddc_table->entries[allowed_sclk_vddc_table->count - 1].v;
drivers/gpu/drm/radeon/ci_dpm.c
4899
allowed_mclk_vddci_table->entries[allowed_mclk_vddci_table->count - 1].v;
drivers/gpu/drm/radeon/ci_dpm.c
4939
ci_patch_with_vddc_leakage(rdev, &table->entries[i].v);
drivers/gpu/drm/radeon/ci_dpm.c
4950
ci_patch_with_vddci_leakage(rdev, &table->entries[i].v);
drivers/gpu/drm/radeon/ci_dpm.c
4961
ci_patch_with_vddc_leakage(rdev, &table->entries[i].v);
drivers/gpu/drm/radeon/ci_dpm.c
4972
ci_patch_with_vddc_leakage(rdev, &table->entries[i].v);
drivers/gpu/drm/radeon/ci_dpm.c
4983
ci_patch_with_vddc_leakage(rdev, &table->entries[i].voltage);
drivers/gpu/drm/radeon/ci_dpm.c
5003
ci_patch_with_vddc_leakage(rdev, &table->entries[i].vddc);
drivers/gpu/drm/radeon/ci_dpm.c
5624
kfree(rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries);
drivers/gpu/drm/radeon/ci_dpm.c
5742
rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries =
drivers/gpu/drm/radeon/ci_dpm.c
5744
if (!rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries) {
drivers/gpu/drm/radeon/ci_dpm.c
5749
rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[0].clk = 0;
drivers/gpu/drm/radeon/ci_dpm.c
5750
rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[0].v = 0;
drivers/gpu/drm/radeon/ci_dpm.c
5751
rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[1].clk = 36000;
drivers/gpu/drm/radeon/ci_dpm.c
5752
rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[1].v = 720;
drivers/gpu/drm/radeon/ci_dpm.c
5753
rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[2].clk = 54000;
drivers/gpu/drm/radeon/ci_dpm.c
5754
rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[2].v = 810;
drivers/gpu/drm/radeon/ci_dpm.c
5755
rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[3].clk = 72000;
drivers/gpu/drm/radeon/ci_dpm.c
5756
rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[3].v = 900;
drivers/gpu/drm/radeon/cypress_dpm.c
1481
voltage_table->entries[i] = voltage_table->entries[i + diff];
drivers/gpu/drm/radeon/cypress_dpm.c
1522
table->lowSMIO[i] |= cpu_to_be32(voltage_table->entries[i].smio_low);
drivers/gpu/drm/radeon/cypress_dpm.c
1544
eg_pi->vddc_voltage_table.entries[i].value) {
drivers/gpu/drm/radeon/cypress_dpm.c
409
if (value <= table->entries[i].value) {
drivers/gpu/drm/radeon/cypress_dpm.c
411
voltage->value = cpu_to_be16(table->entries[i].value);
drivers/gpu/drm/radeon/kv_dpm.c
1286
if (table->entries[i].evclk >= evclk)
drivers/gpu/drm/radeon/kv_dpm.c
1374
if (table->entries[i].clk >= 0) /* XXX */
drivers/gpu/drm/radeon/kv_dpm.c
1536
if ((table->entries[i].clk >= new_ps->levels[0].sclk) ||
drivers/gpu/drm/radeon/kv_dpm.c
1544
if (table->entries[i].clk <= new_ps->levels[new_ps->num_levels - 1].sclk)
drivers/gpu/drm/radeon/kv_dpm.c
1550
if ((new_ps->levels[0].sclk - table->entries[pi->highest_valid].clk) >
drivers/gpu/drm/radeon/kv_dpm.c
1551
(table->entries[pi->lowest_valid].clk - new_ps->levels[new_ps->num_levels - 1].sclk))
drivers/gpu/drm/radeon/kv_dpm.c
1561
if (table->entries[i].sclk_frequency >= new_ps->levels[0].sclk ||
drivers/gpu/drm/radeon/kv_dpm.c
1569
if (table->entries[i].sclk_frequency <=
drivers/gpu/drm/radeon/kv_dpm.c
1577
table->entries[pi->highest_valid].sclk_frequency) >
drivers/gpu/drm/radeon/kv_dpm.c
1578
(table->entries[pi->lowest_valid].sclk_frequency -
drivers/gpu/drm/radeon/kv_dpm.c
1771
pi->sys_info.sclk_voltage_mapping_table.entries[idx].sclk_frequency;
drivers/gpu/drm/radeon/kv_dpm.c
1774
pi->sys_info.sclk_voltage_mapping_table.entries[idx].vid_2bit);
drivers/gpu/drm/radeon/kv_dpm.c
1794
uvd_table->entries[i].v =
drivers/gpu/drm/radeon/kv_dpm.c
1796
uvd_table->entries[i].v);
drivers/gpu/drm/radeon/kv_dpm.c
1801
vce_table->entries[i].v =
drivers/gpu/drm/radeon/kv_dpm.c
1803
vce_table->entries[i].v);
drivers/gpu/drm/radeon/kv_dpm.c
1808
samu_table->entries[i].v =
drivers/gpu/drm/radeon/kv_dpm.c
1810
samu_table->entries[i].v);
drivers/gpu/drm/radeon/kv_dpm.c
1815
acp_table->entries[i].v =
drivers/gpu/drm/radeon/kv_dpm.c
1817
acp_table->entries[i].v);
drivers/gpu/drm/radeon/kv_dpm.c
1910
(kv_convert_8bit_index_to_voltage(rdev, table->entries[i].v) <=
drivers/gpu/drm/radeon/kv_dpm.c
1922
(kv_convert_2bit_index_to_voltage(rdev, table->entries[i].vid_2bit) <=
drivers/gpu/drm/radeon/kv_dpm.c
1965
if (stable_p_state_sclk >= table->entries[i].clk) {
drivers/gpu/drm/radeon/kv_dpm.c
1966
stable_p_state_sclk = table->entries[i].clk;
drivers/gpu/drm/radeon/kv_dpm.c
1972
stable_p_state_sclk = table->entries[0].clk;
drivers/gpu/drm/radeon/kv_dpm.c
1995
ps->levels[i].sclk = table->entries[limit].clk;
drivers/gpu/drm/radeon/kv_dpm.c
2007
ps->levels[i].sclk = table->entries[limit].sclk_frequency;
drivers/gpu/drm/radeon/kv_dpm.c
2158
kv_convert_8bit_index_to_voltage(rdev, table->entries[i].v)))
drivers/gpu/drm/radeon/kv_dpm.c
2161
kv_set_divider_value(rdev, i, table->entries[i].clk);
drivers/gpu/drm/radeon/kv_dpm.c
2164
table->entries[i].v);
drivers/gpu/drm/radeon/kv_dpm.c
2178
kv_convert_2bit_index_to_voltage(rdev, table->entries[i].vid_2bit))
drivers/gpu/drm/radeon/kv_dpm.c
2181
kv_set_divider_value(rdev, i, table->entries[i].sclk_frequency);
drivers/gpu/drm/radeon/kv_dpm.c
2182
kv_set_vid(rdev, i, table->entries[i].vid_2bit);
drivers/gpu/drm/radeon/kv_dpm.c
403
return vddc_sclk_table->entries[vid_2bit].v;
drivers/gpu/drm/radeon/kv_dpm.c
405
return vddc_sclk_table->entries[vddc_sclk_table->count - 1].v;
drivers/gpu/drm/radeon/kv_dpm.c
408
if (vid_mapping_table->entries[i].vid_2bit == vid_2bit)
drivers/gpu/drm/radeon/kv_dpm.c
409
return vid_mapping_table->entries[i].vid_7bit;
drivers/gpu/drm/radeon/kv_dpm.c
411
return vid_mapping_table->entries[vid_mapping_table->num_entries - 1].vid_7bit;
drivers/gpu/drm/radeon/kv_dpm.c
425
if (vddc_sclk_table->entries[i].v == vid_7bit)
drivers/gpu/drm/radeon/kv_dpm.c
431
if (vid_mapping_table->entries[i].vid_7bit == vid_7bit)
drivers/gpu/drm/radeon/kv_dpm.c
432
return vid_mapping_table->entries[i].vid_2bit;
drivers/gpu/drm/radeon/kv_dpm.c
435
return vid_mapping_table->entries[vid_mapping_table->num_entries - 1].vid_2bit;
drivers/gpu/drm/radeon/kv_dpm.c
565
if (table->entries[i].clk == pi->boot_pl.sclk)
drivers/gpu/drm/radeon/kv_dpm.c
579
if (table->entries[i].sclk_frequency == pi->boot_pl.sclk)
drivers/gpu/drm/radeon/kv_dpm.c
674
(pi->high_voltage_t < table->entries[i].v))
drivers/gpu/drm/radeon/kv_dpm.c
677
pi->uvd_level[i].VclkFrequency = cpu_to_be32(table->entries[i].vclk);
drivers/gpu/drm/radeon/kv_dpm.c
678
pi->uvd_level[i].DclkFrequency = cpu_to_be32(table->entries[i].dclk);
drivers/gpu/drm/radeon/kv_dpm.c
679
pi->uvd_level[i].MinVddNb = cpu_to_be16(table->entries[i].v);
drivers/gpu/drm/radeon/kv_dpm.c
682
(u8)kv_get_clk_bypass(rdev, table->entries[i].vclk);
drivers/gpu/drm/radeon/kv_dpm.c
684
(u8)kv_get_clk_bypass(rdev, table->entries[i].dclk);
drivers/gpu/drm/radeon/kv_dpm.c
687
table->entries[i].vclk, false, ÷rs);
drivers/gpu/drm/radeon/kv_dpm.c
693
table->entries[i].dclk, false, ÷rs);
drivers/gpu/drm/radeon/kv_dpm.c
745
pi->high_voltage_t < table->entries[i].v)
drivers/gpu/drm/radeon/kv_dpm.c
748
pi->vce_level[i].Frequency = cpu_to_be32(table->entries[i].evclk);
drivers/gpu/drm/radeon/kv_dpm.c
749
pi->vce_level[i].MinVoltage = cpu_to_be16(table->entries[i].v);
drivers/gpu/drm/radeon/kv_dpm.c
752
(u8)kv_get_clk_bypass(rdev, table->entries[i].evclk);
drivers/gpu/drm/radeon/kv_dpm.c
755
table->entries[i].evclk, false, ÷rs);
drivers/gpu/drm/radeon/kv_dpm.c
808
pi->high_voltage_t < table->entries[i].v)
drivers/gpu/drm/radeon/kv_dpm.c
811
pi->samu_level[i].Frequency = cpu_to_be32(table->entries[i].clk);
drivers/gpu/drm/radeon/kv_dpm.c
812
pi->samu_level[i].MinVoltage = cpu_to_be16(table->entries[i].v);
drivers/gpu/drm/radeon/kv_dpm.c
815
(u8)kv_get_clk_bypass(rdev, table->entries[i].clk);
drivers/gpu/drm/radeon/kv_dpm.c
818
table->entries[i].clk, false, ÷rs);
drivers/gpu/drm/radeon/kv_dpm.c
873
pi->acp_level[i].Frequency = cpu_to_be32(table->entries[i].clk);
drivers/gpu/drm/radeon/kv_dpm.c
874
pi->acp_level[i].MinVoltage = cpu_to_be16(table->entries[i].v);
drivers/gpu/drm/radeon/kv_dpm.c
877
table->entries[i].clk, false, ÷rs);
drivers/gpu/drm/radeon/kv_dpm.c
927
if (kv_get_clock_difference(table->entries[i].clk, 40000) < 200)
drivers/gpu/drm/radeon/kv_dpm.c
929
else if (kv_get_clock_difference(table->entries[i].clk, 30000) < 200)
drivers/gpu/drm/radeon/kv_dpm.c
931
else if (kv_get_clock_difference(table->entries[i].clk, 26600) < 200)
drivers/gpu/drm/radeon/kv_dpm.c
933
else if (kv_get_clock_difference(table->entries[i].clk, 20000) < 200)
drivers/gpu/drm/radeon/kv_dpm.c
935
else if (kv_get_clock_difference(table->entries[i].clk, 10000) < 200)
drivers/gpu/drm/radeon/kv_dpm.c
948
if (kv_get_clock_difference(table->entries[i].sclk_frequency, 40000) < 200)
drivers/gpu/drm/radeon/kv_dpm.c
950
else if (kv_get_clock_difference(table->entries[i].sclk_frequency, 30000) < 200)
drivers/gpu/drm/radeon/kv_dpm.c
952
else if (kv_get_clock_difference(table->entries[i].sclk_frequency, 26600) < 200)
drivers/gpu/drm/radeon/kv_dpm.c
954
else if (kv_get_clock_difference(table->entries[i].sclk_frequency, 20000) < 200)
drivers/gpu/drm/radeon/kv_dpm.c
956
else if (kv_get_clock_difference(table->entries[i].sclk_frequency, 10000) < 200)
drivers/gpu/drm/radeon/ni_dpm.c
1000
table->entries[i].v = pi->max_vddc;
drivers/gpu/drm/radeon/ni_dpm.c
1265
table->lowSMIO[i] |= cpu_to_be32(voltage_table->entries[i].smio_low);
drivers/gpu/drm/radeon/ni_dpm.c
1283
if (pi->max_vddc_in_table <= eg_pi->vddc_voltage_table.entries[i].value) {
drivers/gpu/drm/radeon/ni_dpm.c
1307
if (value <= table->entries[i].value) {
drivers/gpu/drm/radeon/ni_dpm.c
1309
voltage->value = cpu_to_be16(table->entries[i].value);
drivers/gpu/drm/radeon/ni_dpm.c
1346
if (rdev->pm.dpm.dyn_state.cac_leakage_table.entries &&
drivers/gpu/drm/radeon/ni_dpm.c
1348
*std_voltage = rdev->pm.dpm.dyn_state.cac_leakage_table.entries[voltage->index].vddc;
drivers/gpu/drm/radeon/ni_dpm.c
3076
eg_pi->vddc_voltage_table.entries[j].value,
drivers/gpu/drm/radeon/ni_dpm.c
3121
smc_leakage = leakage_table->entries[j].leakage;
drivers/gpu/drm/radeon/ni_dpm.c
4079
rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries =
drivers/gpu/drm/radeon/ni_dpm.c
4081
if (!rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries) {
drivers/gpu/drm/radeon/ni_dpm.c
4086
rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[0].clk = 0;
drivers/gpu/drm/radeon/ni_dpm.c
4087
rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[0].v = 0;
drivers/gpu/drm/radeon/ni_dpm.c
4088
rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[1].clk = 36000;
drivers/gpu/drm/radeon/ni_dpm.c
4089
rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[1].v = 720;
drivers/gpu/drm/radeon/ni_dpm.c
4090
rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[2].clk = 54000;
drivers/gpu/drm/radeon/ni_dpm.c
4091
rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[2].v = 810;
drivers/gpu/drm/radeon/ni_dpm.c
4092
rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[3].clk = 72000;
drivers/gpu/drm/radeon/ni_dpm.c
4093
rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[3].v = 900;
drivers/gpu/drm/radeon/ni_dpm.c
4275
kfree(rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries);
drivers/gpu/drm/radeon/ni_dpm.c
997
if (0xff01 == table->entries[i].v) {
drivers/gpu/drm/radeon/pptable.h
476
ATOM_PPLIB_Clock_Voltage_Dependency_Record entries[1]; // Dynamically allocate entries.
drivers/gpu/drm/radeon/pptable.h
492
ATOM_PPLIB_Clock_Voltage_Limit_Record entries[1]; // Dynamically allocate entries.
drivers/gpu/drm/radeon/pptable.h
516
ATOM_PPLIB_CAC_Leakage_Record entries[1]; // Dynamically allocate entries.
drivers/gpu/drm/radeon/pptable.h
531
ATOM_PPLIB_PhaseSheddingLimits_Record entries[1]; // Dynamically allocate entries.
drivers/gpu/drm/radeon/pptable.h
543
VCEClockInfo entries[1];
drivers/gpu/drm/radeon/pptable.h
555
ATOM_PPLIB_VCE_Clock_Voltage_Limit_Record entries[1];
drivers/gpu/drm/radeon/pptable.h
567
ATOM_PPLIB_VCE_State_Record entries[1];
drivers/gpu/drm/radeon/pptable.h
589
UVDClockInfo entries[1];
drivers/gpu/drm/radeon/pptable.h
601
ATOM_PPLIB_UVD_Clock_Voltage_Limit_Record entries[1];
drivers/gpu/drm/radeon/pptable.h
620
ATOM_PPLIB_SAMClk_Voltage_Limit_Record entries[1];
drivers/gpu/drm/radeon/pptable.h
638
ATOM_PPLIB_ACPClk_Voltage_Limit_Record entries[1];
drivers/gpu/drm/radeon/r600_dpm.c
1001
rdev->pm.dpm.dyn_state.phase_shedding_limits_table.entries[i].mclk =
drivers/gpu/drm/radeon/r600_dpm.c
1003
rdev->pm.dpm.dyn_state.phase_shedding_limits_table.entries[i].voltage =
drivers/gpu/drm/radeon/r600_dpm.c
1035
rdev->pm.dpm.dyn_state.cac_leakage_table.entries = kzalloc(size, GFP_KERNEL);
drivers/gpu/drm/radeon/r600_dpm.c
1036
if (!rdev->pm.dpm.dyn_state.cac_leakage_table.entries) {
drivers/gpu/drm/radeon/r600_dpm.c
1040
entry = &cac_table->entries[0];
drivers/gpu/drm/radeon/r600_dpm.c
1043
rdev->pm.dpm.dyn_state.cac_leakage_table.entries[i].vddc1 =
drivers/gpu/drm/radeon/r600_dpm.c
1045
rdev->pm.dpm.dyn_state.cac_leakage_table.entries[i].vddc2 =
drivers/gpu/drm/radeon/r600_dpm.c
1047
rdev->pm.dpm.dyn_state.cac_leakage_table.entries[i].vddc3 =
drivers/gpu/drm/radeon/r600_dpm.c
1050
rdev->pm.dpm.dyn_state.cac_leakage_table.entries[i].vddc =
drivers/gpu/drm/radeon/r600_dpm.c
1052
rdev->pm.dpm.dyn_state.cac_leakage_table.entries[i].leakage =
drivers/gpu/drm/radeon/r600_dpm.c
1089
rdev->pm.dpm.dyn_state.vce_clock_voltage_dependency_table.entries =
drivers/gpu/drm/radeon/r600_dpm.c
1091
if (!rdev->pm.dpm.dyn_state.vce_clock_voltage_dependency_table.entries) {
drivers/gpu/drm/radeon/r600_dpm.c
1097
entry = &limits->entries[0];
drivers/gpu/drm/radeon/r600_dpm.c
1098
state_entry = &states->entries[0];
drivers/gpu/drm/radeon/r600_dpm.c
1101
((u8 *)&array->entries[0] +
drivers/gpu/drm/radeon/r600_dpm.c
1103
rdev->pm.dpm.dyn_state.vce_clock_voltage_dependency_table.entries[i].evclk =
drivers/gpu/drm/radeon/r600_dpm.c
1105
rdev->pm.dpm.dyn_state.vce_clock_voltage_dependency_table.entries[i].ecclk =
drivers/gpu/drm/radeon/r600_dpm.c
1107
rdev->pm.dpm.dyn_state.vce_clock_voltage_dependency_table.entries[i].v =
drivers/gpu/drm/radeon/r600_dpm.c
1116
((u8 *)&array->entries[0] +
drivers/gpu/drm/radeon/r600_dpm.c
1143
rdev->pm.dpm.dyn_state.uvd_clock_voltage_dependency_table.entries =
drivers/gpu/drm/radeon/r600_dpm.c
1145
if (!rdev->pm.dpm.dyn_state.uvd_clock_voltage_dependency_table.entries) {
drivers/gpu/drm/radeon/r600_dpm.c
1151
entry = &limits->entries[0];
drivers/gpu/drm/radeon/r600_dpm.c
1154
((u8 *)&array->entries[0] +
drivers/gpu/drm/radeon/r600_dpm.c
1156
rdev->pm.dpm.dyn_state.uvd_clock_voltage_dependency_table.entries[i].vclk =
drivers/gpu/drm/radeon/r600_dpm.c
1158
rdev->pm.dpm.dyn_state.uvd_clock_voltage_dependency_table.entries[i].dclk =
drivers/gpu/drm/radeon/r600_dpm.c
1160
rdev->pm.dpm.dyn_state.uvd_clock_voltage_dependency_table.entries[i].v =
drivers/gpu/drm/radeon/r600_dpm.c
1175
rdev->pm.dpm.dyn_state.samu_clock_voltage_dependency_table.entries =
drivers/gpu/drm/radeon/r600_dpm.c
1177
if (!rdev->pm.dpm.dyn_state.samu_clock_voltage_dependency_table.entries) {
drivers/gpu/drm/radeon/r600_dpm.c
1183
entry = &limits->entries[0];
drivers/gpu/drm/radeon/r600_dpm.c
1185
rdev->pm.dpm.dyn_state.samu_clock_voltage_dependency_table.entries[i].clk =
drivers/gpu/drm/radeon/r600_dpm.c
1187
rdev->pm.dpm.dyn_state.samu_clock_voltage_dependency_table.entries[i].v =
drivers/gpu/drm/radeon/r600_dpm.c
1233
rdev->pm.dpm.dyn_state.acp_clock_voltage_dependency_table.entries =
drivers/gpu/drm/radeon/r600_dpm.c
1235
if (!rdev->pm.dpm.dyn_state.acp_clock_voltage_dependency_table.entries) {
drivers/gpu/drm/radeon/r600_dpm.c
1241
entry = &limits->entries[0];
drivers/gpu/drm/radeon/r600_dpm.c
1243
rdev->pm.dpm.dyn_state.acp_clock_voltage_dependency_table.entries[i].clk =
drivers/gpu/drm/radeon/r600_dpm.c
1245
rdev->pm.dpm.dyn_state.acp_clock_voltage_dependency_table.entries[i].v =
drivers/gpu/drm/radeon/r600_dpm.c
1298
kfree(dyn_state->vddc_dependency_on_sclk.entries);
drivers/gpu/drm/radeon/r600_dpm.c
1299
kfree(dyn_state->vddci_dependency_on_mclk.entries);
drivers/gpu/drm/radeon/r600_dpm.c
1300
kfree(dyn_state->vddc_dependency_on_mclk.entries);
drivers/gpu/drm/radeon/r600_dpm.c
1301
kfree(dyn_state->mvdd_dependency_on_mclk.entries);
drivers/gpu/drm/radeon/r600_dpm.c
1302
kfree(dyn_state->cac_leakage_table.entries);
drivers/gpu/drm/radeon/r600_dpm.c
1303
kfree(dyn_state->phase_shedding_limits_table.entries);
drivers/gpu/drm/radeon/r600_dpm.c
1306
kfree(dyn_state->vce_clock_voltage_dependency_table.entries);
drivers/gpu/drm/radeon/r600_dpm.c
1307
kfree(dyn_state->uvd_clock_voltage_dependency_table.entries);
drivers/gpu/drm/radeon/r600_dpm.c
1308
kfree(dyn_state->samu_clock_voltage_dependency_table.entries);
drivers/gpu/drm/radeon/r600_dpm.c
1309
kfree(dyn_state->acp_clock_voltage_dependency_table.entries);
drivers/gpu/drm/radeon/r600_dpm.c
824
radeon_table->entries = kzalloc_objs(struct radeon_clock_voltage_dependency_entry,
drivers/gpu/drm/radeon/r600_dpm.c
826
if (!radeon_table->entries)
drivers/gpu/drm/radeon/r600_dpm.c
829
entry = &atom_table->entries[0];
drivers/gpu/drm/radeon/r600_dpm.c
831
radeon_table->entries[i].clk = le16_to_cpu(entry->usClockLow) |
drivers/gpu/drm/radeon/r600_dpm.c
833
radeon_table->entries[i].v = le16_to_cpu(entry->usVoltage);
drivers/gpu/drm/radeon/r600_dpm.c
935
kfree(rdev->pm.dpm.dyn_state.vddc_dependency_on_sclk.entries);
drivers/gpu/drm/radeon/r600_dpm.c
946
kfree(rdev->pm.dpm.dyn_state.vddc_dependency_on_sclk.entries);
drivers/gpu/drm/radeon/r600_dpm.c
947
kfree(rdev->pm.dpm.dyn_state.vddci_dependency_on_mclk.entries);
drivers/gpu/drm/radeon/r600_dpm.c
958
kfree(rdev->pm.dpm.dyn_state.vddc_dependency_on_sclk.entries);
drivers/gpu/drm/radeon/r600_dpm.c
959
kfree(rdev->pm.dpm.dyn_state.vddci_dependency_on_mclk.entries);
drivers/gpu/drm/radeon/r600_dpm.c
960
kfree(rdev->pm.dpm.dyn_state.vddc_dependency_on_mclk.entries);
drivers/gpu/drm/radeon/r600_dpm.c
971
le16_to_cpu(clk_v->entries[0].usSclkLow) |
drivers/gpu/drm/radeon/r600_dpm.c
972
(clk_v->entries[0].ucSclkHigh << 16);
drivers/gpu/drm/radeon/r600_dpm.c
974
le16_to_cpu(clk_v->entries[0].usMclkLow) |
drivers/gpu/drm/radeon/r600_dpm.c
975
(clk_v->entries[0].ucMclkHigh << 16);
drivers/gpu/drm/radeon/r600_dpm.c
977
le16_to_cpu(clk_v->entries[0].usVddc);
drivers/gpu/drm/radeon/r600_dpm.c
979
le16_to_cpu(clk_v->entries[0].usVddci);
drivers/gpu/drm/radeon/r600_dpm.c
989
rdev->pm.dpm.dyn_state.phase_shedding_limits_table.entries =
drivers/gpu/drm/radeon/r600_dpm.c
992
if (!rdev->pm.dpm.dyn_state.phase_shedding_limits_table.entries) {
drivers/gpu/drm/radeon/r600_dpm.c
997
entry = &psl->entries[0];
drivers/gpu/drm/radeon/r600_dpm.c
999
rdev->pm.dpm.dyn_state.phase_shedding_limits_table.entries[i].sclk =
drivers/gpu/drm/radeon/radeon.h
1386
struct radeon_clock_voltage_dependency_entry *entries;
drivers/gpu/drm/radeon/radeon.h
1403
union radeon_cac_leakage_entry *entries;
drivers/gpu/drm/radeon/radeon.h
1414
struct radeon_phase_shedding_limits_entry *entries;
drivers/gpu/drm/radeon/radeon.h
1425
struct radeon_uvd_clock_voltage_dependency_entry *entries;
drivers/gpu/drm/radeon/radeon.h
1436
struct radeon_vce_clock_voltage_dependency_entry *entries;
drivers/gpu/drm/radeon/radeon_atombios.c
3308
if (rdev->pm.dpm.dyn_state.vddc_dependency_on_sclk.entries[entry_id].v ==
drivers/gpu/drm/radeon/radeon_atombios.c
3320
cpu_to_le32(rdev->pm.dpm.dyn_state.vddc_dependency_on_sclk.entries[entry_id].clk);
drivers/gpu/drm/radeon/radeon_atombios.c
3744
voltage_table->entries[i].value =
drivers/gpu/drm/radeon/radeon_atombios.c
3747
voltage_table->entries[i].value,
drivers/gpu/drm/radeon/radeon_atombios.c
3749
&voltage_table->entries[i].smio_low,
drivers/gpu/drm/radeon/radeon_atombios.c
3779
voltage_table->entries[i].value =
drivers/gpu/drm/radeon/radeon_atombios.c
3781
voltage_table->entries[i].smio_low =
drivers/gpu/drm/radeon/radeon_combios.c
2744
u8 entries = RBIOS8(offset + 0x5 + 0xb);
drivers/gpu/drm/radeon/radeon_combios.c
2746
if (entries && voltage_table_offset) {
drivers/gpu/drm/radeon/radeon_mode.h
647
struct atom_voltage_table_entry entries[MAX_VOLTAGE_ENTRIES];
drivers/gpu/drm/radeon/radeon_vm.c
390
unsigned entries;
drivers/gpu/drm/radeon/radeon_vm.c
403
entries = radeon_bo_size(bo) / 8;
drivers/gpu/drm/radeon/radeon_vm.c
411
radeon_vm_set_pages(rdev, &ib, addr, 0, entries, 0, 0);
drivers/gpu/drm/radeon/si_dpm.c
2485
if (table->entries[i].vddc > *max)
drivers/gpu/drm/radeon/si_dpm.c
2486
*max = table->entries[i].vddc;
drivers/gpu/drm/radeon/si_dpm.c
2487
if (table->entries[i].vddc < *min)
drivers/gpu/drm/radeon/si_dpm.c
2488
*min = table->entries[i].vddc;
drivers/gpu/drm/radeon/si_dpm.c
2860
if (highest_leakage < si_pi->leakage_voltage.entries[i].voltage)
drivers/gpu/drm/radeon/si_dpm.c
2861
highest_leakage = si_pi->leakage_voltage.entries[i].voltage;
drivers/gpu/drm/radeon/si_dpm.c
2885
if ((evclk <= table->entries[i].evclk) &&
drivers/gpu/drm/radeon/si_dpm.c
2886
(ecclk <= table->entries[i].ecclk)) {
drivers/gpu/drm/radeon/si_dpm.c
2887
*voltage = table->entries[i].v;
drivers/gpu/drm/radeon/si_dpm.c
2895
*voltage = table->entries[table->count - 1].v;
drivers/gpu/drm/radeon/si_dpm.c
3186
si_pi->leakage_voltage.entries[count].voltage = vddc;
drivers/gpu/drm/radeon/si_dpm.c
3187
si_pi->leakage_voltage.entries[count].leakage_index =
drivers/gpu/drm/radeon/si_dpm.c
3214
if (si_pi->leakage_voltage.entries[i].leakage_index == index) {
drivers/gpu/drm/radeon/si_dpm.c
3215
*leakage_voltage = si_pi->leakage_voltage.entries[i].voltage;
drivers/gpu/drm/radeon/si_dpm.c
3874
voltage_table->entries[i] = voltage_table->entries[i + diff];
drivers/gpu/drm/radeon/si_dpm.c
3893
voltage_table->entries[i].value = voltage_dependency_table->entries[i].v;
drivers/gpu/drm/radeon/si_dpm.c
3894
voltage_table->entries[i].smio_low = 0;
drivers/gpu/drm/radeon/si_dpm.c
3987
table->lowSMIO[i] |= cpu_to_be32(voltage_table->entries[i].smio_low);
drivers/gpu/drm/radeon/si_dpm.c
4012
if (pi->max_vddc_in_table <= eg_pi->vddc_voltage_table.entries[i].value) {
drivers/gpu/drm/radeon/si_dpm.c
4060
if (value <= table->entries[i].value) {
drivers/gpu/drm/radeon/si_dpm.c
4062
voltage->value = cpu_to_be16(table->entries[i].value);
drivers/gpu/drm/radeon/si_dpm.c
4085
voltage->value = cpu_to_be16(si_pi->mvdd_voltage_table.entries[voltage->index].value);
drivers/gpu/drm/radeon/si_dpm.c
4098
if (rdev->pm.dpm.dyn_state.cac_leakage_table.entries) {
drivers/gpu/drm/radeon/si_dpm.c
4100
if (rdev->pm.dpm.dyn_state.vddc_dependency_on_sclk.entries == NULL)
drivers/gpu/drm/radeon/si_dpm.c
4105
(u16)rdev->pm.dpm.dyn_state.vddc_dependency_on_sclk.entries[v_index].v) {
drivers/gpu/drm/radeon/si_dpm.c
4109
rdev->pm.dpm.dyn_state.cac_leakage_table.entries[v_index].vddc;
drivers/gpu/drm/radeon/si_dpm.c
4112
rdev->pm.dpm.dyn_state.cac_leakage_table.entries[rdev->pm.dpm.dyn_state.cac_leakage_table.count-1].vddc;
drivers/gpu/drm/radeon/si_dpm.c
4120
(u16)rdev->pm.dpm.dyn_state.vddc_dependency_on_sclk.entries[v_index].v) {
drivers/gpu/drm/radeon/si_dpm.c
4124
rdev->pm.dpm.dyn_state.cac_leakage_table.entries[v_index].vddc;
drivers/gpu/drm/radeon/si_dpm.c
4127
rdev->pm.dpm.dyn_state.cac_leakage_table.entries[rdev->pm.dpm.dyn_state.cac_leakage_table.count-1].vddc;
drivers/gpu/drm/radeon/si_dpm.c
4134
*std_voltage = rdev->pm.dpm.dyn_state.cac_leakage_table.entries[voltage->index].vddc;
drivers/gpu/drm/radeon/si_dpm.c
4159
if ((voltage <= limits->entries[i].voltage) &&
drivers/gpu/drm/radeon/si_dpm.c
4160
(sclk <= limits->entries[i].sclk) &&
drivers/gpu/drm/radeon/si_dpm.c
4161
(mclk <= limits->entries[i].mclk))
drivers/gpu/drm/radeon/si_dpm.c
5106
rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[i].clk) {
drivers/gpu/drm/radeon/si_dpm.c
5108
rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[i].v)
drivers/gpu/drm/radeon/si_dpm.c
5823
table->entries[i].v,
drivers/gpu/drm/radeon/si_dpm.c
5826
table->entries[i].v = leakage_voltage;
drivers/gpu/drm/radeon/si_dpm.c
5837
table->entries[j].v = (table->entries[j].v <= table->entries[j + 1].v) ?
drivers/gpu/drm/radeon/si_dpm.c
5838
table->entries[j].v : table->entries[j + 1].v;
drivers/gpu/drm/radeon/si_dpm.c
6902
rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries =
drivers/gpu/drm/radeon/si_dpm.c
6904
if (!rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries) {
drivers/gpu/drm/radeon/si_dpm.c
6909
rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[0].clk = 0;
drivers/gpu/drm/radeon/si_dpm.c
6910
rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[0].v = 0;
drivers/gpu/drm/radeon/si_dpm.c
6911
rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[1].clk = 36000;
drivers/gpu/drm/radeon/si_dpm.c
6912
rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[1].v = 720;
drivers/gpu/drm/radeon/si_dpm.c
6913
rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[2].clk = 54000;
drivers/gpu/drm/radeon/si_dpm.c
6914
rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[2].v = 810;
drivers/gpu/drm/radeon/si_dpm.c
6915
rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[3].clk = 72000;
drivers/gpu/drm/radeon/si_dpm.c
6916
rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[3].v = 900;
drivers/gpu/drm/radeon/si_dpm.c
7028
kfree(rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries);
drivers/gpu/drm/radeon/si_dpm.h
130
struct si_leakage_voltage_entry entries[SISLANDS_MAX_LEAKAGE_COUNT];
drivers/gpu/drm/radeon/smu7_discrete.h
226
SMU7_Discrete_MCArbDramTimingTableEntry entries[SMU__NUM_SCLK_DPM_STATE][SMU__NUM_MCLK_DPM_LEVELS];
drivers/gpu/drm/radeon/sumo_dpm.c
1033
if (pi->sys_info.sclk_voltage_mapping_table.entries[i].sclk_frequency >= lower_limit)
drivers/gpu/drm/radeon/sumo_dpm.c
1034
return pi->sys_info.sclk_voltage_mapping_table.entries[i].sclk_frequency;
drivers/gpu/drm/radeon/sumo_dpm.c
1037
return pi->sys_info.sclk_voltage_mapping_table.entries[pi->sys_info.sclk_voltage_mapping_table.num_max_dpm_entries - 1].sclk_frequency;
drivers/gpu/drm/radeon/sumo_dpm.c
1534
if (vid_mapping_table->entries[i].vid_2bit == vid_2bit)
drivers/gpu/drm/radeon/sumo_dpm.c
1535
return vid_mapping_table->entries[i].vid_7bit;
drivers/gpu/drm/radeon/sumo_dpm.c
1538
return vid_mapping_table->entries[vid_mapping_table->num_entries - 1].vid_7bit;
drivers/gpu/drm/radeon/sumo_dpm.c
1549
if (vid_mapping_table->entries[i].vid_7bit == vid_7bit)
drivers/gpu/drm/radeon/sumo_dpm.c
1550
return vid_mapping_table->entries[i].vid_2bit;
drivers/gpu/drm/radeon/sumo_dpm.c
1553
return vid_mapping_table->entries[vid_mapping_table->num_entries - 1].vid_2bit;
drivers/gpu/drm/radeon/sumo_dpm.c
1601
sclk_voltage_mapping_table->entries[n].sclk_frequency =
drivers/gpu/drm/radeon/sumo_dpm.c
1603
sclk_voltage_mapping_table->entries[n].vid_2bit =
drivers/gpu/drm/radeon/sumo_dpm.c
1623
vid_mapping_table->entries[table[i].usVoltageIndex].vid_7bit =
drivers/gpu/drm/radeon/sumo_dpm.c
1625
vid_mapping_table->entries[table[i].usVoltageIndex].vid_2bit =
drivers/gpu/drm/radeon/sumo_dpm.c
1631
if (vid_mapping_table->entries[i].vid_7bit == 0) {
drivers/gpu/drm/radeon/sumo_dpm.c
1633
if (vid_mapping_table->entries[j].vid_7bit != 0) {
drivers/gpu/drm/radeon/sumo_dpm.c
1634
vid_mapping_table->entries[i] =
drivers/gpu/drm/radeon/sumo_dpm.c
1635
vid_mapping_table->entries[j];
drivers/gpu/drm/radeon/sumo_dpm.c
1636
vid_mapping_table->entries[j].vid_7bit = 0;
drivers/gpu/drm/radeon/sumo_dpm.h
67
struct sumo_vid_mapping_entry entries[SUMO_MAX_NUMBER_VOLTAGES];
drivers/gpu/drm/radeon/sumo_dpm.h
78
struct sumo_sclk_voltage_mapping_entry entries[SUMO_MAX_HARDWARE_POWERLEVELS];
drivers/gpu/drm/radeon/trinity_dpm.c
1340
if (pi->sys_info.sclk_voltage_mapping_table.entries[i].sclk_frequency >= lower_limit)
drivers/gpu/drm/radeon/trinity_dpm.c
1341
return pi->sys_info.sclk_voltage_mapping_table.entries[i].sclk_frequency;
drivers/gpu/drm/radeon/trinity_dpm.c
1471
if ((evclk <= table->entries[i].evclk) &&
drivers/gpu/drm/radeon/trinity_dpm.c
1472
(ecclk <= table->entries[i].ecclk)) {
drivers/gpu/drm/radeon/trinity_dpm.c
1473
*voltage = table->entries[i].v;
drivers/gpu/drm/radeon/trinity_dpm.c
1481
*voltage = table->entries[table->count - 1].v;
drivers/gpu/drm/xe/tests/xe_rtp_test.c
111
.entries = (const struct xe_rtp_entry_sr[]) {
drivers/gpu/drm/xe/tests/xe_rtp_test.c
137
.entries = (const struct xe_rtp_entry_sr[]) {
drivers/gpu/drm/xe/tests/xe_rtp_test.c
166
.entries = (const struct xe_rtp_entry_sr[]) {
drivers/gpu/drm/xe/tests/xe_rtp_test.c
186
.entries = (const struct xe_rtp_entry_sr[]) {
drivers/gpu/drm/xe/tests/xe_rtp_test.c
206
.entries = (const struct xe_rtp_entry_sr[]) {
drivers/gpu/drm/xe/tests/xe_rtp_test.c
228
.entries = (const struct xe_rtp_entry_sr[]) {
drivers/gpu/drm/xe/tests/xe_rtp_test.c
247
.entries = (const struct xe_rtp_entry_sr[]) {
drivers/gpu/drm/xe/tests/xe_rtp_test.c
268
.entries = (const struct xe_rtp_entry_sr[]) {
drivers/gpu/drm/xe/tests/xe_rtp_test.c
289
.entries = (const struct xe_rtp_entry_sr[]) {
drivers/gpu/drm/xe/tests/xe_rtp_test.c
321
while (param->entries[count_rtp_entries].rules)
drivers/gpu/drm/xe/tests/xe_rtp_test.c
325
xe_rtp_process_to_sr(&ctx, param->entries, count_rtp_entries, reg_sr);
drivers/gpu/drm/xe/tests/xe_rtp_test.c
358
.entries = (const struct xe_rtp_entry[]) {
drivers/gpu/drm/xe/tests/xe_rtp_test.c
368
.entries = (const struct xe_rtp_entry[]) {
drivers/gpu/drm/xe/tests/xe_rtp_test.c
381
.entries = (const struct xe_rtp_entry[]) {
drivers/gpu/drm/xe/tests/xe_rtp_test.c
394
.entries = (const struct xe_rtp_entry[]) {
drivers/gpu/drm/xe/tests/xe_rtp_test.c
407
.entries = (const struct xe_rtp_entry[]) {
drivers/gpu/drm/xe/tests/xe_rtp_test.c
42
const struct xe_rtp_entry_sr *entries;
drivers/gpu/drm/xe/tests/xe_rtp_test.c
424
.entries = (const struct xe_rtp_entry[]) {
drivers/gpu/drm/xe/tests/xe_rtp_test.c
441
.entries = (const struct xe_rtp_entry[]) {
drivers/gpu/drm/xe/tests/xe_rtp_test.c
458
.entries = (const struct xe_rtp_entry[]) {
drivers/gpu/drm/xe/tests/xe_rtp_test.c
48
const struct xe_rtp_entry *entries;
drivers/gpu/drm/xe/tests/xe_rtp_test.c
482
while (param->entries[count_rtp_entries].rules)
drivers/gpu/drm/xe/tests/xe_rtp_test.c
486
xe_rtp_process(&ctx, param->entries);
drivers/gpu/drm/xe/tests/xe_rtp_test.c
72
.entries = (const struct xe_rtp_entry_sr[]) {
drivers/gpu/drm/xe/tests/xe_rtp_test.c
92
.entries = (const struct xe_rtp_entry_sr[]) {
drivers/gpu/drm/xe/xe_guc_ct.c
104
unsigned long entries[SZ_32];
drivers/gpu/drm/xe/xe_guc_ct.c
107
n = stack_trace_save(entries, ARRAY_SIZE(entries), 1);
drivers/gpu/drm/xe/xe_guc_ct.c
109
ct->fast_req[slot].stack = stack_depot_save(entries, n, GFP_NOWAIT);
drivers/gpu/drm/xe/xe_lmtt.c
131
lmtt_assert(lmtt, !pd->entries[n]);
drivers/gpu/drm/xe/xe_lmtt.c
331
pt = pd->entries[i];
drivers/gpu/drm/xe/xe_lmtt.c
332
pd->entries[i] = NULL;
drivers/gpu/drm/xe/xe_lmtt.c
347
pt = pd->entries[vfid];
drivers/gpu/drm/xe/xe_lmtt.c
348
pd->entries[vfid] = NULL;
drivers/gpu/drm/xe/xe_lmtt.c
386
pd->entries[idx] = pt;
drivers/gpu/drm/xe/xe_lmtt.c
415
if (pd->entries[vfid])
drivers/gpu/drm/xe/xe_lmtt.c
428
pd->entries[vfid] = pt;
drivers/gpu/drm/xe/xe_lmtt.c
449
pt = pd->entries[vfid];
drivers/gpu/drm/xe/xe_lmtt.c
455
pt = pt->entries[lmtt->ops->lmtt_pte_index(addr, pt->level)];
drivers/gpu/drm/xe/xe_lmtt.c
67
pt = kzalloc_flex(*pt, entries, num_entries);
drivers/gpu/drm/xe/xe_lmtt_types.h
42
struct xe_lmtt_pt *entries[];
drivers/gpu/drm/xe/xe_migrate.c
1738
&pt_op->entries[j];
drivers/gpu/drm/xe/xe_migrate.c
1780
struct xe_vm_pgtable_update *updates = pt_op->entries;
drivers/gpu/drm/xe/xe_migrate.c
1849
struct xe_vm_pgtable_update *updates = pt_op->entries;
drivers/gpu/drm/xe/xe_migrate.c
1886
struct xe_vm_pgtable_update *updates = pt_op->entries;
drivers/gpu/drm/xe/xe_migrate.c
1904
struct xe_vm_pgtable_update *updates = pt_op->entries;
drivers/gpu/drm/xe/xe_migrate.c
2003
u64 entries = DIV_U64_ROUND_UP(size, XE_PAGE_SIZE);
drivers/gpu/drm/xe/xe_migrate.c
2016
num_dword = (1 + 2) * DIV_U64_ROUND_UP(entries, MAX_PTE_PER_SDI);
drivers/gpu/drm/xe/xe_migrate.c
2017
num_dword += entries * 2;
drivers/gpu/drm/xe/xe_page_reclaim.c
108
prl->entries = NULL;
drivers/gpu/drm/xe/xe_page_reclaim.c
122
if (XE_WARN_ON(prl->entries))
drivers/gpu/drm/xe/xe_page_reclaim.c
127
prl->entries = page_address(page);
drivers/gpu/drm/xe/xe_page_reclaim.c
76
memcpy(xe_sa_bo_cpu_addr(prl_sa), prl->entries,
drivers/gpu/drm/xe/xe_page_reclaim.c
95
xe_page_reclaim_entries_put(prl->entries);
drivers/gpu/drm/xe/xe_page_reclaim.c
96
prl->entries = NULL;
drivers/gpu/drm/xe/xe_page_reclaim.h
106
static inline void xe_page_reclaim_entries_get(struct xe_guc_page_reclaim_entry *entries)
drivers/gpu/drm/xe/xe_page_reclaim.h
108
if (entries)
drivers/gpu/drm/xe/xe_page_reclaim.h
109
get_page(virt_to_page(entries));
drivers/gpu/drm/xe/xe_page_reclaim.h
119
static inline void xe_page_reclaim_entries_put(struct xe_guc_page_reclaim_entry *entries)
drivers/gpu/drm/xe/xe_page_reclaim.h
121
if (entries)
drivers/gpu/drm/xe/xe_page_reclaim.h
122
put_page(virt_to_page(entries));
drivers/gpu/drm/xe/xe_page_reclaim.h
44
struct xe_guc_page_reclaim_entry *entries;
drivers/gpu/drm/xe/xe_page_reclaim.h
58
return !prl->entries && prl->num_entries == 0;
drivers/gpu/drm/xe/xe_pt.c
1002
struct xe_vm_pgtable_update *entries,
drivers/gpu/drm/xe/xe_pt.c
1008
struct xe_pt *pt = entries[i].pt;
drivers/gpu/drm/xe/xe_pt.c
1014
for (j = 0; j < entries[i].qwords; j++)
drivers/gpu/drm/xe/xe_pt.c
1015
xe_pt_destroy(entries[i].pt_entries[j].pt,
drivers/gpu/drm/xe/xe_pt.c
1019
kfree(entries[i].pt_entries);
drivers/gpu/drm/xe/xe_pt.c
1020
entries[i].pt_entries = NULL;
drivers/gpu/drm/xe/xe_pt.c
1021
entries[i].qwords = 0;
drivers/gpu/drm/xe/xe_pt.c
1058
struct xe_vm_pgtable_update *entries,
drivers/gpu/drm/xe/xe_pt.c
1066
struct xe_pt *pt = entries[i].pt;
drivers/gpu/drm/xe/xe_pt.c
1073
for (j = 0; j < entries[i].qwords; j++) {
drivers/gpu/drm/xe/xe_pt.c
1074
struct xe_pt *oldpte = entries[i].pt_entries[j].pt;
drivers/gpu/drm/xe/xe_pt.c
1075
int j_ = j + entries[i].ofs;
drivers/gpu/drm/xe/xe_pt.c
1085
struct xe_vm_pgtable_update *entries,
drivers/gpu/drm/xe/xe_pt.c
1093
struct xe_pt *pt = entries[i].pt;
drivers/gpu/drm/xe/xe_pt.c
1097
pt->num_live -= entries[i].qwords;
drivers/gpu/drm/xe/xe_pt.c
1103
for (j = 0; j < entries[i].qwords; j++) {
drivers/gpu/drm/xe/xe_pt.c
1104
u32 j_ = j + entries[i].ofs;
drivers/gpu/drm/xe/xe_pt.c
1106
struct xe_pt *oldpte = entries[i].pt_entries[j].pt;
drivers/gpu/drm/xe/xe_pt.c
1115
struct xe_vm_pgtable_update *entries,
drivers/gpu/drm/xe/xe_pt.c
1123
struct xe_pt *pt = entries[i].pt;
drivers/gpu/drm/xe/xe_pt.c
1127
pt->num_live += entries[i].qwords;
drivers/gpu/drm/xe/xe_pt.c
1133
for (j = 0; j < entries[i].qwords; j++) {
drivers/gpu/drm/xe/xe_pt.c
1134
u32 j_ = j + entries[i].ofs;
drivers/gpu/drm/xe/xe_pt.c
1135
struct xe_pt *newpte = entries[i].pt_entries[j].pt;
drivers/gpu/drm/xe/xe_pt.c
1142
entries[i].pt_entries[j].pt = oldpte;
drivers/gpu/drm/xe/xe_pt.c
1147
static void xe_pt_free_bind(struct xe_vm_pgtable_update *entries,
drivers/gpu/drm/xe/xe_pt.c
1153
kfree(entries[i].pt_entries);
drivers/gpu/drm/xe/xe_pt.c
1159
struct xe_vm_pgtable_update *entries,
drivers/gpu/drm/xe/xe_pt.c
1165
err = xe_pt_stage_bind(tile, vma, range, entries, num_entries,
drivers/gpu/drm/xe/xe_pt.c
1174
const struct xe_vm_pgtable_update *entries,
drivers/gpu/drm/xe/xe_pt.c
1183
const struct xe_vm_pgtable_update *entry = &entries[i];
drivers/gpu/drm/xe/xe_pt.c
1590
struct xe_guc_page_reclaim_entry *reclaim_entries = prl->entries;
drivers/gpu/drm/xe/xe_pt.c
1806
struct xe_vm_pgtable_update *entries)
drivers/gpu/drm/xe/xe_pt.c
1811
container_of(entries, struct xe_vm_pgtable_update_op, entries[0]);
drivers/gpu/drm/xe/xe_pt.c
1822
.wupd.entries = entries,
drivers/gpu/drm/xe/xe_pt.c
1855
struct xe_vm_pgtable_update *entries,
drivers/gpu/drm/xe/xe_pt.c
1863
struct xe_vm_pgtable_update *entry = &entries[i];
drivers/gpu/drm/xe/xe_pt.c
1874
entries[i].pt_entries[j - entry->ofs].pt ?
drivers/gpu/drm/xe/xe_pt.c
1875
&entries[i].pt_entries[j - entry->ofs].pt->base : NULL;
drivers/gpu/drm/xe/xe_pt.c
1881
struct xe_vm_pgtable_update *entries,
drivers/gpu/drm/xe/xe_pt.c
1889
struct xe_vm_pgtable_update *entry = &entries[i];
drivers/gpu/drm/xe/xe_pt.c
1916
const struct xe_vm_pgtable_update *entry = &pt_op->entries[i];
drivers/gpu/drm/xe/xe_pt.c
1966
err = xe_pt_prepare_bind(tile, vma, NULL, pt_op->entries,
drivers/gpu/drm/xe/xe_pt.c
1970
ARRAY_SIZE(pt_op->entries));
drivers/gpu/drm/xe/xe_pt.c
1971
xe_vm_dbg_print_entries(tile_to_xe(tile), pt_op->entries,
drivers/gpu/drm/xe/xe_pt.c
2001
xe_pt_commit_prepare_bind(vma, pt_op->entries,
drivers/gpu/drm/xe/xe_pt.c
2004
xe_pt_cancel_bind(vma, pt_op->entries, pt_op->num_entries);
drivers/gpu/drm/xe/xe_pt.c
2028
err = xe_pt_prepare_bind(tile, vma, range, pt_op->entries,
drivers/gpu/drm/xe/xe_pt.c
2032
ARRAY_SIZE(pt_op->entries));
drivers/gpu/drm/xe/xe_pt.c
2033
xe_vm_dbg_print_entries(tile_to_xe(tile), pt_op->entries,
drivers/gpu/drm/xe/xe_pt.c
2043
xe_pt_commit_prepare_bind(vma, pt_op->entries,
drivers/gpu/drm/xe/xe_pt.c
2046
xe_pt_cancel_bind(vma, pt_op->entries, pt_op->num_entries);
drivers/gpu/drm/xe/xe_pt.c
2091
vma, NULL, pt_op->entries);
drivers/gpu/drm/xe/xe_pt.c
2093
xe_vm_dbg_print_entries(tile_to_xe(tile), pt_op->entries,
drivers/gpu/drm/xe/xe_pt.c
2101
xe_pt_commit_prepare_unbind(vma, pt_op->entries, pt_op->num_entries);
drivers/gpu/drm/xe/xe_pt.c
2110
struct xe_vm_pgtable_update *update = pt_op->entries;
drivers/gpu/drm/xe/xe_pt.c
2153
pt_op->entries);
drivers/gpu/drm/xe/xe_pt.c
2155
xe_vm_dbg_print_entries(tile_to_xe(tile), pt_op->entries,
drivers/gpu/drm/xe/xe_pt.c
2166
xe_pt_commit_prepare_unbind(XE_INVALID_VMA, pt_op->entries,
drivers/gpu/drm/xe/xe_pt.c
2611
xe_pt_commit(pt_op->vma, pt_op->entries,
drivers/gpu/drm/xe/xe_pt.c
2702
xe_page_reclaim_entries_put(pt_update_ops->prl.entries);
drivers/gpu/drm/xe/xe_pt.c
2710
xe_pt_free_bind(pt_op->entries, pt_op->num_entries);
drivers/gpu/drm/xe/xe_pt.c
2739
xe_pt_abort_bind(pt_op->vma, pt_op->entries,
drivers/gpu/drm/xe/xe_pt.c
2743
xe_pt_abort_unbind(pt_op->vma, pt_op->entries,
drivers/gpu/drm/xe/xe_pt.c
319
struct xe_vm_pgtable_update *entries;
drivers/gpu/drm/xe/xe_pt.c
361
entry = wupd->entries + wupd->num_used_entries++;
drivers/gpu/drm/xe/xe_pt.c
699
struct xe_vm_pgtable_update *entries,
drivers/gpu/drm/xe/xe_pt.c
719
.wupd.entries = entries,
drivers/gpu/drm/xe/xe_pt_types.h
81
struct xe_vm_pgtable_update entries[XE_VM_MAX_LEVEL * 2 + 1];
drivers/gpu/drm/xe/xe_pt_walk.c
77
struct xe_ptw **entries = walk->staging ? (parent->staging ?: NULL) :
drivers/gpu/drm/xe/xe_pt_walk.c
92
child = entries ? entries[offset] : NULL;
drivers/gpu/drm/xe/xe_rtp.c
279
const struct xe_rtp_entry_sr *entries,
drivers/gpu/drm/xe/xe_rtp.c
290
xe_assert(xe, entries);
drivers/gpu/drm/xe/xe_rtp.c
292
for (entry = entries; entry - entries < n_entries; entry++) {
drivers/gpu/drm/xe/xe_rtp.c
307
rtp_mark_active(xe, ctx, entry - entries);
drivers/gpu/drm/xe/xe_rtp.c
324
const struct xe_rtp_entry *entries)
drivers/gpu/drm/xe/xe_rtp.c
333
for (entry = entries; entry && entry->rules; entry++) {
drivers/gpu/drm/xe/xe_rtp.c
337
rtp_mark_active(xe, ctx, entry - entries);
drivers/gpu/drm/xe/xe_rtp.h
433
const struct xe_rtp_entry_sr *entries,
drivers/gpu/drm/xe/xe_rtp.h
437
const struct xe_rtp_entry *entries);
drivers/gpu/drm/xe/xe_tlb_inval_job.c
171
xe_page_reclaim_entries_get(job->prl.entries);
drivers/gpu/drm/xe/xe_tlb_inval_job.c
185
xe_page_reclaim_entries_put(job->prl.entries);
drivers/gpu/drm/xe/xe_uc_fw.c
108
const struct uc_fw_entry *entries;
drivers/gpu/drm/xe/xe_uc_fw.c
240
const struct uc_fw_entry *entries;
drivers/gpu/drm/xe/xe_uc_fw.c
247
entries = blobs_all[uc_fw->type].entries;
drivers/gpu/drm/xe/xe_uc_fw.c
250
for (i = 0; i < count && p <= entries[i].platform; i++) {
drivers/gpu/drm/xe/xe_uc_fw.c
251
if (p != entries[i].platform)
drivers/gpu/drm/xe/xe_uc_fw.c
254
if (entries[i].gt_type != XE_GT_TYPE_ANY &&
drivers/gpu/drm/xe/xe_uc_fw.c
255
entries[i].gt_type != gt->info.type)
drivers/gpu/drm/xe/xe_uc_fw.c
258
uc_fw->path = entries[i].path;
drivers/gpu/drm/xe/xe_uc_fw.c
259
uc_fw->versions.wanted.major = entries[i].major;
drivers/gpu/drm/xe/xe_uc_fw.c
260
uc_fw->versions.wanted.minor = entries[i].minor;
drivers/gpu/drm/xe/xe_uc_fw.c
261
uc_fw->versions.wanted.patch = entries[i].patch;
drivers/gpu/drm/xe/xe_uc_fw.c
262
uc_fw->full_ver_required = entries[i].full_ver_required;
drivers/hid/hid-core.c
1784
struct hid_field_entry *entries;
drivers/hid/hid-core.c
1799
entries = kzalloc_objs(*entries, count);
drivers/hid/hid-core.c
1800
if (!entries)
drivers/hid/hid-core.c
1803
report->field_entries = entries;
drivers/hid/hid-core.c
1819
&entries[usages],
drivers/hid/hid-core.c
1824
__hid_insert_field_entry(hid, report, &entries[usages],
drivers/hid/intel-thc-hid/intel-thc/intel-thc-dma.c
197
prd_tbl->entries[j].dest_addr =
drivers/hid/intel-thc-hid/intel-thc/intel-thc-dma.c
199
prd_tbl->entries[j].len = sg_dma_len(sg);
drivers/hid/intel-thc-hid/intel-thc/intel-thc-dma.c
200
prd_tbl->entries[j].hw_status = 0;
drivers/hid/intel-thc-hid/intel-thc/intel-thc-dma.c
201
prd_tbl->entries[j].end_of_prd = 0;
drivers/hid/intel-thc-hid/intel-thc/intel-thc-dma.c
206
prd_tbl->entries[j - 1].end_of_prd = 1;
drivers/hid/intel-thc-hid/intel-thc/intel-thc-dma.c
465
mes_len += prd_tbl->entries[j].len;
drivers/hid/intel-thc-hid/intel-thc/intel-thc-dma.c
466
if (prd_tbl->entries[j].end_of_prd)
drivers/hid/intel-thc-hid/intel-thc/intel-thc-dma.c
905
prd_tbl->entries[i].dest_addr =
drivers/hid/intel-thc-hid/intel-thc/intel-thc-dma.c
909
prd_tbl->entries[i].len = len_left;
drivers/hid/intel-thc-hid/intel-thc/intel-thc-dma.c
910
prd_tbl->entries[i].end_of_prd = 1;
drivers/hid/intel-thc-hid/intel-thc/intel-thc-dma.c
914
prd_tbl->entries[i].len = sg_dma_len(sg);
drivers/hid/intel-thc-hid/intel-thc/intel-thc-dma.c
915
prd_tbl->entries[i].end_of_prd = 0;
drivers/hid/intel-thc-hid/intel-thc/intel-thc-dma.h
82
struct thc_prd_entry entries[PRD_ENTRIES_NUM];
drivers/hv/mshv_root.h
229
const struct mshv_user_irq_entry *entries,
drivers/hv/mshv_root_main.c
1532
struct mshv_user_irq_entry *entries = NULL;
drivers/hv/mshv_root_main.c
1546
entries = vmemdup_user(urouting->entries,
drivers/hv/mshv_root_main.c
1547
array_size(sizeof(*entries),
drivers/hv/mshv_root_main.c
1549
if (IS_ERR(entries))
drivers/hv/mshv_root_main.c
1550
return PTR_ERR(entries);
drivers/hv/mshv_root_main.c
1552
ret = mshv_update_routing_table(partition, entries, args.nr);
drivers/hv/mshv_root_main.c
1553
kvfree(entries);
drivers/hwmon/hp-wmi-sensors.c
1321
struct dentry *entries;
drivers/hwmon/hp-wmi-sensors.c
1338
entries = debugfs_create_dir("sensor", debugfs);
drivers/hwmon/hp-wmi-sensors.c
1344
dir = debugfs_create_dir(buf, entries);
drivers/hwmon/hp-wmi-sensors.c
1387
entries = debugfs_create_dir("platform_events", debugfs);
drivers/hwmon/hp-wmi-sensors.c
1391
dir = debugfs_create_dir(buf, entries);
drivers/hwmon/pmbus/pmbus_core.c
3650
struct pmbus_debugfs_entry *entries;
drivers/hwmon/pmbus/pmbus_core.c
3692
entries = devm_kcalloc(data->dev,
drivers/hwmon/pmbus/pmbus_core.c
3695
sizeof(*entries), GFP_KERNEL);
drivers/hwmon/pmbus/pmbus_core.c
3696
if (!entries)
drivers/hwmon/pmbus/pmbus_core.c
3709
entries[idx].client = client;
drivers/hwmon/pmbus/pmbus_core.c
3710
entries[idx].page = 0;
drivers/hwmon/pmbus/pmbus_core.c
3711
entries[idx].reg = PMBUS_CAPABILITY;
drivers/hwmon/pmbus/pmbus_core.c
3713
&entries[idx++],
drivers/hwmon/pmbus/pmbus_core.c
3717
entries[idx].client = client;
drivers/hwmon/pmbus/pmbus_core.c
3718
entries[idx].page = 0;
drivers/hwmon/pmbus/pmbus_core.c
3719
entries[idx].reg = PMBUS_REVISION;
drivers/hwmon/pmbus/pmbus_core.c
3721
&entries[idx++],
drivers/hwmon/pmbus/pmbus_core.c
3729
entries[idx].client = client;
drivers/hwmon/pmbus/pmbus_core.c
3730
entries[idx].page = 0;
drivers/hwmon/pmbus/pmbus_core.c
3731
entries[idx].reg = d->reg;
drivers/hwmon/pmbus/pmbus_core.c
3733
&entries[idx++],
drivers/hwmon/pmbus/pmbus_core.c
3743
entries[idx].client = client;
drivers/hwmon/pmbus/pmbus_core.c
3744
entries[idx].page = page;
drivers/hwmon/pmbus/pmbus_core.c
3747
&entries[idx++],
drivers/hwmon/pmbus/pmbus_core.c
3757
entries[idx].client = client;
drivers/hwmon/pmbus/pmbus_core.c
3758
entries[idx].page = page;
drivers/hwmon/pmbus/pmbus_core.c
3759
entries[idx].reg = d->reg;
drivers/hwmon/pmbus/pmbus_core.c
3762
&entries[idx++],
drivers/hwmon/pmbus/ucd9000.c
451
struct ucd9000_debugfs_entry *entries;
drivers/hwmon/pmbus/ucd9000.c
471
entries = devm_kcalloc(&client->dev,
drivers/hwmon/pmbus/ucd9000.c
472
gpi_count, sizeof(*entries),
drivers/hwmon/pmbus/ucd9000.c
474
if (!entries)
drivers/hwmon/pmbus/ucd9000.c
478
entries[i].client = client;
drivers/hwmon/pmbus/ucd9000.c
479
entries[i].index = i;
drivers/hwmon/pmbus/ucd9000.c
483
&entries[i],
drivers/i3c/master/mipi-i3c-hci/ext_caps.c
70
u32 entries = FIELD_GET(CAP_HEADER_LENGTH, header) - 1;
drivers/i3c/master/mipi-i3c-hci/ext_caps.c
73
dev_dbg(&hci->master.dev, "transfer mode table has %d entries\n", entries);
drivers/i3c/master/mipi-i3c-hci/ext_caps.c
75
for (index = 0; index < entries; index++) {
drivers/i3c/master/mipi-i3c-hci/ext_caps.c
90
u32 entries = FIELD_GET(CAP_HEADER_LENGTH, header) - 1;
drivers/i3c/master/mipi-i3c-hci/ext_caps.c
97
for (index = 0; index < entries; index++) {
drivers/infiniband/core/cache.c
1221
struct ib_uverbs_gid_entry *entries,
drivers/infiniband/core/cache.c
1244
memcpy(&entries->gid, &gid_attr->gid,
drivers/infiniband/core/cache.c
1246
entries->gid_index = gid_attr->index;
drivers/infiniband/core/cache.c
1247
entries->port_num = gid_attr->port_num;
drivers/infiniband/core/cache.c
1248
entries->gid_type = gid_attr->gid_type;
drivers/infiniband/core/cache.c
1253
entries->netdev_ifindex = ndev->ifindex;
drivers/infiniband/core/cache.c
1256
entries++;
drivers/infiniband/core/uverbs_std_types_device.c
306
struct ib_uverbs_gid_entry *entries,
drivers/infiniband/core/uverbs_std_types_device.c
315
if (user_entry_size == sizeof(*entries)) {
drivers/infiniband/core/uverbs_std_types_device.c
318
entries, sizeof(*entries) * num_entries);
drivers/infiniband/core/uverbs_std_types_device.c
322
copy_len = min_t(size_t, user_entry_size, sizeof(*entries));
drivers/infiniband/core/uverbs_std_types_device.c
329
if (copy_to_user(user_entries, entries, copy_len))
drivers/infiniband/core/uverbs_std_types_device.c
332
if (user_entry_size > sizeof(*entries)) {
drivers/infiniband/core/uverbs_std_types_device.c
333
if (clear_user(user_entries + sizeof(*entries),
drivers/infiniband/core/uverbs_std_types_device.c
334
user_entry_size - sizeof(*entries)))
drivers/infiniband/core/uverbs_std_types_device.c
338
entries++;
drivers/infiniband/core/uverbs_std_types_device.c
349
struct ib_uverbs_gid_entry *entries;
drivers/infiniband/core/uverbs_std_types_device.c
382
entries = uverbs_kcalloc(attrs, max_entries, sizeof(*entries));
drivers/infiniband/core/uverbs_std_types_device.c
383
if (IS_ERR(entries))
drivers/infiniband/core/uverbs_std_types_device.c
384
return PTR_ERR(entries);
drivers/infiniband/core/uverbs_std_types_device.c
386
num_entries = rdma_query_gid_table(ib_dev, entries, max_entries);
drivers/infiniband/core/uverbs_std_types_device.c
390
ret = copy_gid_entries_to_user(attrs, entries, num_entries,
drivers/infiniband/hw/bnxt_re/ib_verbs.c
1308
int entries;
drivers/infiniband/hw/bnxt_re/ib_verbs.c
1331
entries = bnxt_re_init_depth(init_attr->cap.max_recv_wr + 1, uctx);
drivers/infiniband/hw/bnxt_re/ib_verbs.c
1332
rq->max_wqe = min_t(u32, entries, dev_attr->max_qp_wqes + 1);
drivers/infiniband/hw/bnxt_re/ib_verbs.c
1370
int entries;
drivers/infiniband/hw/bnxt_re/ib_verbs.c
1379
entries = init_attr->cap.max_send_wr;
drivers/infiniband/hw/bnxt_re/ib_verbs.c
1397
entries = bnxt_re_init_depth(entries + diff + 1, uctx);
drivers/infiniband/hw/bnxt_re/ib_verbs.c
1398
sq->max_wqe = min_t(u32, entries, dev_attr->max_qp_wqes + diff + 1);
drivers/infiniband/hw/bnxt_re/ib_verbs.c
1425
int entries;
drivers/infiniband/hw/bnxt_re/ib_verbs.c
1432
entries = bnxt_re_init_depth(init_attr->cap.max_send_wr + 1, uctx);
drivers/infiniband/hw/bnxt_re/ib_verbs.c
1433
qplqp->sq.max_wqe = min_t(u32, entries,
drivers/infiniband/hw/bnxt_re/ib_verbs.c
1898
int rc, entries;
drivers/infiniband/hw/bnxt_re/ib_verbs.c
1924
entries = bnxt_re_init_depth(srq_init_attr->attr.max_wr + 1, uctx);
drivers/infiniband/hw/bnxt_re/ib_verbs.c
1925
if (entries > dev_attr->max_srq_wqes + 1)
drivers/infiniband/hw/bnxt_re/ib_verbs.c
1926
entries = dev_attr->max_srq_wqes + 1;
drivers/infiniband/hw/bnxt_re/ib_verbs.c
1927
srq->qplib_srq.max_wqe = entries;
drivers/infiniband/hw/bnxt_re/ib_verbs.c
2104
int rc, entries;
drivers/infiniband/hw/bnxt_re/ib_verbs.c
2314
entries = bnxt_re_init_depth(qp_attr->cap.max_send_wr, uctx);
drivers/infiniband/hw/bnxt_re/ib_verbs.c
2315
qp->qplib_qp.sq.max_wqe = min_t(u32, entries,
drivers/infiniband/hw/bnxt_re/ib_verbs.c
2327
entries = bnxt_re_init_depth(qp_attr->cap.max_recv_wr, uctx);
drivers/infiniband/hw/bnxt_re/ib_verbs.c
2329
min_t(u32, entries, dev_attr->max_qp_wqes + 1);
drivers/infiniband/hw/bnxt_re/ib_verbs.c
3156
int rc, entries;
drivers/infiniband/hw/bnxt_re/ib_verbs.c
3172
entries = bnxt_re_init_depth(cqe + 1, uctx);
drivers/infiniband/hw/bnxt_re/ib_verbs.c
3173
if (entries > dev_attr->max_cq_wqes + 1)
drivers/infiniband/hw/bnxt_re/ib_verbs.c
3174
entries = dev_attr->max_cq_wqes + 1;
drivers/infiniband/hw/bnxt_re/ib_verbs.c
3186
entries * sizeof(struct cq_base),
drivers/infiniband/hw/bnxt_re/ib_verbs.c
3195
cq->max_cql = min_t(u32, entries, MAX_CQL_PER_POLL);
drivers/infiniband/hw/bnxt_re/ib_verbs.c
3204
cq->qplib_cq.max_wqe = entries;
drivers/infiniband/hw/bnxt_re/ib_verbs.c
3215
cq->ib_cq.cqe = entries;
drivers/infiniband/hw/bnxt_re/ib_verbs.c
3283
int rc, entries;
drivers/infiniband/hw/bnxt_re/ib_verbs.c
3307
entries = bnxt_re_init_depth(cqe + 1, uctx);
drivers/infiniband/hw/bnxt_re/ib_verbs.c
3308
if (entries > dev_attr->max_cq_wqes + 1)
drivers/infiniband/hw/bnxt_re/ib_verbs.c
3309
entries = dev_attr->max_cq_wqes + 1;
drivers/infiniband/hw/bnxt_re/ib_verbs.c
3318
entries * sizeof(struct cq_base),
drivers/infiniband/hw/bnxt_re/ib_verbs.c
3327
cq->resize_cqe = entries;
drivers/infiniband/hw/bnxt_re/ib_verbs.c
3336
rc = bnxt_qplib_resize_cq(&rdev->qplib_res, &cq->qplib_cq, entries);
drivers/infiniband/hw/cxgb4/cq.c
1002
int entries = attr->cqe;
drivers/infiniband/hw/cxgb4/cq.c
1014
pr_debug("ib_dev %p entries %d\n", ibdev, entries);
drivers/infiniband/hw/cxgb4/cq.c
1018
if (entries < 1 || entries > ibdev->attrs.max_cqe)
drivers/infiniband/hw/cxgb4/cq.c
1044
entries++;
drivers/infiniband/hw/cxgb4/cq.c
1047
entries++;
drivers/infiniband/hw/cxgb4/cq.c
1052
entries = roundup(entries, 16);
drivers/infiniband/hw/cxgb4/cq.c
1057
hwentries = min(entries * 2, rhp->rdev.hw_queue.t4_max_iq_size);
drivers/infiniband/hw/cxgb4/cq.c
1087
chp->ibcq.cqe = entries - 2;
drivers/infiniband/hw/efa/efa_com.c
132
u16 size = aq->depth * sizeof(*sq->entries);
drivers/infiniband/hw/efa/efa_com.c
137
sq->entries =
drivers/infiniband/hw/efa/efa_com.c
139
if (!sq->entries)
drivers/infiniband/hw/efa/efa_com.c
169
u16 size = aq->depth * sizeof(*cq->entries);
drivers/infiniband/hw/efa/efa_com.c
174
cq->entries =
drivers/infiniband/hw/efa/efa_com.c
176
if (!cq->entries)
drivers/infiniband/hw/efa/efa_com.c
214
size = EFA_ASYNC_QUEUE_DEPTH * sizeof(*aenq->entries);
drivers/infiniband/hw/efa/efa_com.c
215
aenq->entries = dma_alloc_coherent(edev->dmadev, size, &aenq->dma_addr,
drivers/infiniband/hw/efa/efa_com.c
217
if (!aenq->entries)
drivers/infiniband/hw/efa/efa_com.c
347
aqe = &aq->sq.entries[pi];
drivers/infiniband/hw/efa/efa_com.c
453
cqe = &aq->cq.entries[ci];
drivers/infiniband/hw/efa/efa_com.c
474
cqe = &aq->cq.entries[ci];
drivers/infiniband/hw/efa/efa_com.c
683
size = aq->depth * sizeof(*sq->entries);
drivers/infiniband/hw/efa/efa_com.c
684
dma_free_coherent(edev->dmadev, size, sq->entries, sq->dma_addr);
drivers/infiniband/hw/efa/efa_com.c
686
size = aq->depth * sizeof(*cq->entries);
drivers/infiniband/hw/efa/efa_com.c
687
dma_free_coherent(edev->dmadev, size, cq->entries, cq->dma_addr);
drivers/infiniband/hw/efa/efa_com.c
689
size = aenq->depth * sizeof(*aenq->entries);
drivers/infiniband/hw/efa/efa_com.c
690
dma_free_coherent(edev->dmadev, size, aenq->entries, aenq->dma_addr);
drivers/infiniband/hw/efa/efa_com.c
792
dma_free_coherent(edev->dmadev, aq->depth * sizeof(*aq->cq.entries),
drivers/infiniband/hw/efa/efa_com.c
793
aq->cq.entries, aq->cq.dma_addr);
drivers/infiniband/hw/efa/efa_com.c
795
dma_free_coherent(edev->dmadev, aq->depth * sizeof(*aq->sq.entries),
drivers/infiniband/hw/efa/efa_com.c
796
aq->sq.entries, aq->sq.dma_addr);
drivers/infiniband/hw/efa/efa_com.c
855
aenq_e = &aenq->entries[ci]; /* Get first entry */
drivers/infiniband/hw/efa/efa_com.c
880
aenq_e = &aenq->entries[ci];
drivers/infiniband/hw/efa/efa_com.h
25
struct efa_admin_acq_entry *entries;
drivers/infiniband/hw/efa/efa_com.h
34
struct efa_admin_aq_entry *entries;
drivers/infiniband/hw/efa/efa_com.h
88
struct efa_admin_aenq_entry *entries;
drivers/infiniband/hw/efa/efa_verbs.c
1147
int entries = attr->cqe;
drivers/infiniband/hw/efa/efa_verbs.c
1151
ibdev_dbg(ibdev, "create_cq entries %d\n", entries);
drivers/infiniband/hw/efa/efa_verbs.c
1156
if (entries < 1 || entries > dev->dev_attr.max_cq_depth) {
drivers/infiniband/hw/efa/efa_verbs.c
1159
entries, dev->dev_attr.max_cq_depth);
drivers/infiniband/hw/efa/efa_verbs.c
1213
cq->size = PAGE_ALIGN(cmd.cq_entry_size * entries * cmd.num_sub_cqs);
drivers/infiniband/hw/efa/efa_verbs.c
1241
params.sub_cq_depth = entries;
drivers/infiniband/hw/efa/efa_verbs.c
1260
WARN_ON_ONCE(entries != result.actual_depth);
drivers/infiniband/hw/hfi1/init.c
519
cce = cc_state->cct.entries[max_ccti].entry;
drivers/infiniband/hw/hfi1/init.c
564
ccti_min = cc_state->cong_setting.entries[sl].ccti_min;
drivers/infiniband/hw/hfi1/init.c
565
ccti_timer = cc_state->cong_setting.entries[sl].ccti_timer;
drivers/infiniband/hw/hfi1/mad.c
3695
struct opa_congestion_setting_entry_shadow *entries;
drivers/infiniband/hw/hfi1/mad.c
3712
entries = cc_state->cong_setting.entries;
drivers/infiniband/hw/hfi1/mad.c
3716
p->entries[i].ccti_increase = entries[i].ccti_increase;
drivers/infiniband/hw/hfi1/mad.c
3717
p->entries[i].ccti_timer = cpu_to_be16(entries[i].ccti_timer);
drivers/infiniband/hw/hfi1/mad.c
3718
p->entries[i].trigger_threshold =
drivers/infiniband/hw/hfi1/mad.c
3719
entries[i].trigger_threshold;
drivers/infiniband/hw/hfi1/mad.c
3720
p->entries[i].ccti_min = entries[i].ccti_min;
drivers/infiniband/hw/hfi1/mad.c
3764
memcpy(new_cc_state->cct.entries, ppd->ccti_entries,
drivers/infiniband/hw/hfi1/mad.c
3769
memcpy(new_cc_state->cong_setting.entries, ppd->congestion_entries,
drivers/infiniband/hw/hfi1/mad.c
3787
struct opa_congestion_setting_entry_shadow *entries;
drivers/infiniband/hw/hfi1/mad.c
3802
entries = ppd->congestion_entries;
drivers/infiniband/hw/hfi1/mad.c
3804
entries[i].ccti_increase = p->entries[i].ccti_increase;
drivers/infiniband/hw/hfi1/mad.c
3805
entries[i].ccti_timer = be16_to_cpu(p->entries[i].ccti_timer);
drivers/infiniband/hw/hfi1/mad.c
3806
entries[i].trigger_threshold =
drivers/infiniband/hw/hfi1/mad.c
3807
p->entries[i].trigger_threshold;
drivers/infiniband/hw/hfi1/mad.c
3808
entries[i].ccti_min = p->entries[i].ccti_min;
drivers/infiniband/hw/hfi1/mad.c
3895
struct ib_cc_table_entry_shadow *entries;
drivers/infiniband/hw/hfi1/mad.c
3922
entries = cc_state->cct.entries;
drivers/infiniband/hw/hfi1/mad.c
3927
cpu_to_be16(entries[i].entry);
drivers/infiniband/hw/hfi1/mad.c
3946
struct ib_cc_table_entry_shadow *entries;
drivers/infiniband/hw/hfi1/mad.c
3976
entries = ppd->ccti_entries;
drivers/infiniband/hw/hfi1/mad.c
3978
entries[i].entry = be16_to_cpu(p->ccti_entries[j].entry);
drivers/infiniband/hw/hfi1/mad.h
256
struct opa_congestion_setting_entry entries[OPA_MAX_SLS];
drivers/infiniband/hw/hfi1/mad.h
262
struct opa_congestion_setting_entry_shadow entries[OPA_MAX_SLS];
drivers/infiniband/hw/hfi1/mad.h
295
struct ib_cc_table_entry_shadow entries[CC_TABLE_SHADOW_MAX];
drivers/infiniband/hw/hfi1/msix.c
20
struct hfi1_msix_entry *entries;
drivers/infiniband/hw/hfi1/msix.c
41
entries = kzalloc_objs(*dd->msix_info.msix_entries, total);
drivers/infiniband/hw/hfi1/msix.c
42
if (!entries) {
drivers/infiniband/hw/hfi1/msix.c
47
dd->msix_info.msix_entries = entries;
drivers/infiniband/hw/hfi1/rc.c
2723
ccti_incr = cc_state->cong_setting.entries[sl].ccti_increase;
drivers/infiniband/hw/hfi1/rc.c
2724
ccti_timer = cc_state->cong_setting.entries[sl].ccti_timer;
drivers/infiniband/hw/hfi1/rc.c
2726
cc_state->cong_setting.entries[sl].trigger_threshold;
drivers/infiniband/hw/hns/hns_roce_device.h
708
u32 entries;
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
6494
(eq->cons_index & (eq->entries - 1)) *
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
6498
!!(eq->cons_index & eq->entries)) ? aeqe : NULL;
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
6563
(eq->cons_index & (eq->entries - 1)) *
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
6567
!!(eq->cons_index & eq->entries)) ? ceqe : NULL;
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
6832
eq->shift = ilog2((unsigned int)eq->entries);
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
6909
buf_attr.region[0].size = eq->entries * eq->eqe_size;
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
7123
eq->entries = hr_dev->caps.ceqe_depth;
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
7132
eq->entries = hr_dev->caps.aeqe_depth;
drivers/infiniband/hw/ionic/ionic_queue.h
208
u16 entries)
drivers/infiniband/hw/ionic/ionic_queue.h
210
q->cons = (q->cons + entries) & q->mask;
drivers/infiniband/hw/irdma/verbs.c
2016
static int irdma_resize_cq(struct ib_cq *ibcq, int entries,
drivers/infiniband/hw/irdma/verbs.c
2046
if (entries > rf->max_cqe)
drivers/infiniband/hw/irdma/verbs.c
2050
entries += 2;
drivers/infiniband/hw/irdma/verbs.c
2054
entries *= 2;
drivers/infiniband/hw/irdma/verbs.c
2056
if (entries & 1)
drivers/infiniband/hw/irdma/verbs.c
2057
entries += 1; /* cq size must be an even number */
drivers/infiniband/hw/irdma/verbs.c
2060
if (entries * cqe_size == IRDMA_HW_PAGE_SIZE)
drivers/infiniband/hw/irdma/verbs.c
2061
entries += 2;
drivers/infiniband/hw/irdma/verbs.c
2064
info.cq_size = max(entries, 4);
drivers/infiniband/hw/irdma/verbs.c
2491
int entries = attr->cqe;
drivers/infiniband/hw/irdma/verbs.c
2516
ukinfo->cq_size = max(entries, 4);
drivers/infiniband/hw/irdma/verbs.c
2588
if (entries < 1 || entries > rf->max_cqe) {
drivers/infiniband/hw/irdma/verbs.c
2593
entries += 2;
drivers/infiniband/hw/irdma/verbs.c
2595
entries *= 2;
drivers/infiniband/hw/irdma/verbs.c
2597
if (entries & 1)
drivers/infiniband/hw/irdma/verbs.c
2598
entries += 1; /* cq size must be an even number */
drivers/infiniband/hw/irdma/verbs.c
2600
if (entries * cqe_size == IRDMA_HW_PAGE_SIZE)
drivers/infiniband/hw/irdma/verbs.c
2601
entries += 2;
drivers/infiniband/hw/irdma/verbs.c
2603
ukinfo->cq_size = entries;
drivers/infiniband/hw/mana/qp.c
213
resp.entries[i].cqid = cq->queue.id;
drivers/infiniband/hw/mana/qp.c
214
resp.entries[i].wqid = wq->queue.id;
drivers/infiniband/hw/mlx4/cq.c
183
int entries = attr->cqe;
drivers/infiniband/hw/mlx4/cq.c
193
if (entries < 1 || entries > dev->dev->caps.max_cqes)
drivers/infiniband/hw/mlx4/cq.c
199
entries = roundup_pow_of_two(entries + 1);
drivers/infiniband/hw/mlx4/cq.c
200
cq->ibcq.cqe = entries - 1;
drivers/infiniband/hw/mlx4/cq.c
219
ucmd.buf_addr, entries);
drivers/infiniband/hw/mlx4/cq.c
239
err = mlx4_ib_alloc_cq_buf(dev, &cq->buf, entries);
drivers/infiniband/hw/mlx4/cq.c
252
err = mlx4_cq_alloc(dev->dev, entries, &cq->buf.mtt, uar, cq->db.dma,
drivers/infiniband/hw/mlx4/cq.c
296
int entries)
drivers/infiniband/hw/mlx4/cq.c
307
err = mlx4_ib_alloc_cq_buf(dev, &cq->resize_buf->buf, entries);
drivers/infiniband/hw/mlx4/cq.c
314
cq->resize_buf->cqe = entries - 1;
drivers/infiniband/hw/mlx4/cq.c
320
int entries, struct ib_udata *udata)
drivers/infiniband/hw/mlx4/cq.c
336
ucmd.buf_addr, entries);
drivers/infiniband/hw/mlx4/cq.c
343
cq->resize_buf->cqe = entries - 1;
drivers/infiniband/hw/mlx4/cq.c
384
int mlx4_ib_resize_cq(struct ib_cq *ibcq, int entries, struct ib_udata *udata)
drivers/infiniband/hw/mlx4/cq.c
393
if (entries < 1 || entries > dev->dev->caps.max_cqes) {
drivers/infiniband/hw/mlx4/cq.c
398
entries = roundup_pow_of_two(entries + 1);
drivers/infiniband/hw/mlx4/cq.c
399
if (entries == ibcq->cqe + 1) {
drivers/infiniband/hw/mlx4/cq.c
404
if (entries > dev->dev->caps.max_cqes + 1) {
drivers/infiniband/hw/mlx4/cq.c
410
err = mlx4_alloc_resize_umem(dev, cq, entries, udata);
drivers/infiniband/hw/mlx4/cq.c
416
if (entries < outst_cqe + 1) {
drivers/infiniband/hw/mlx4/cq.c
421
err = mlx4_alloc_resize_buf(dev, cq, entries);
drivers/infiniband/hw/mlx4/cq.c
428
err = mlx4_cq_resize(dev->dev, &cq->mcq, entries, &cq->resize_buf->buf.mtt);
drivers/infiniband/hw/mlx4/mlx4_ib.h
772
int mlx4_ib_resize_cq(struct ib_cq *ibcq, int entries, struct ib_udata *udata);
drivers/infiniband/hw/mlx5/cq.c
1016
MLX5_SET(cqc, cqc, log_cq_size, ilog2(entries));
drivers/infiniband/hw/mlx5/cq.c
1176
int entries, struct ib_udata *udata,
drivers/infiniband/hw/mlx5/cq.c
1191
if (ucmd.cqe_size && SIZE_MAX / ucmd.cqe_size <= entries - 1)
drivers/infiniband/hw/mlx5/cq.c
1195
(size_t)ucmd.cqe_size * entries,
drivers/infiniband/hw/mlx5/cq.c
1209
int entries, int cqe_size)
drivers/infiniband/hw/mlx5/cq.c
1217
err = alloc_cq_frag_buf(dev, cq->resize_buf, entries, cqe_size);
drivers/infiniband/hw/mlx5/cq.c
1285
int mlx5_ib_resize_cq(struct ib_cq *ibcq, int entries, struct ib_udata *udata)
drivers/infiniband/hw/mlx5/cq.c
1305
if (entries < 1 ||
drivers/infiniband/hw/mlx5/cq.c
1306
entries > (1 << MLX5_CAP_GEN(dev->mdev, log_max_cq_sz))) {
drivers/infiniband/hw/mlx5/cq.c
1308
entries,
drivers/infiniband/hw/mlx5/cq.c
1313
entries = roundup_pow_of_two(entries + 1);
drivers/infiniband/hw/mlx5/cq.c
1314
if (entries > (1 << MLX5_CAP_GEN(dev->mdev, log_max_cq_sz)) + 1)
drivers/infiniband/hw/mlx5/cq.c
1317
if (entries == ibcq->cqe + 1)
drivers/infiniband/hw/mlx5/cq.c
1324
err = resize_user(dev, cq, entries, udata, &cqe_size);
drivers/infiniband/hw/mlx5/cq.c
1342
err = resize_kernel(dev, cq, entries, cqe_size);
drivers/infiniband/hw/mlx5/cq.c
1381
MLX5_SET(cqc, cqc, log_cq_size, ilog2(entries));
drivers/infiniband/hw/mlx5/cq.c
1391
cq->ibcq.cqe = entries - 1;
drivers/infiniband/hw/mlx5/cq.c
1410
cq->ibcq.cqe = entries - 1;
drivers/infiniband/hw/mlx5/cq.c
719
struct mlx5_ib_cq *cq, int entries, u32 **cqb,
drivers/infiniband/hw/mlx5/cq.c
754
entries * ucmd.cqe_size, IB_ACCESS_LOCAL_WRITE);
drivers/infiniband/hw/mlx5/cq.c
776
ucmd.buf_addr, entries * ucmd.cqe_size,
drivers/infiniband/hw/mlx5/cq.c
888
int entries, int cqe_size,
drivers/infiniband/hw/mlx5/cq.c
903
err = alloc_cq_frag_buf(dev, &cq->buf, entries, cqe_size);
drivers/infiniband/hw/mlx5/cq.c
957
int entries = attr->cqe;
drivers/infiniband/hw/mlx5/cq.c
970
if (entries < 0 ||
drivers/infiniband/hw/mlx5/cq.c
971
(entries > (1 << MLX5_CAP_GEN(dev->mdev, log_max_cq_sz))))
drivers/infiniband/hw/mlx5/cq.c
977
entries = roundup_pow_of_two(entries + 1);
drivers/infiniband/hw/mlx5/cq.c
978
if (entries > (1 << MLX5_CAP_GEN(dev->mdev, log_max_cq_sz)))
drivers/infiniband/hw/mlx5/cq.c
981
cq->ibcq.cqe = entries - 1;
drivers/infiniband/hw/mlx5/cq.c
991
err = create_cq_user(dev, udata, cq, entries, &cqb, &cqe_size,
drivers/infiniband/hw/mlx5/cq.c
997
err = create_cq_kernel(dev, cq, entries, cqe_size, &cqb,
drivers/infiniband/hw/mlx5/mlx5_ib.h
1380
int mlx5_ib_resize_cq(struct ib_cq *ibcq, int entries, struct ib_udata *udata);
drivers/infiniband/hw/mthca/mthca_provider.c
582
int entries = attr->cqe;
drivers/infiniband/hw/mthca/mthca_provider.c
593
if (entries < 1 || entries > to_mdev(ibdev)->limits.max_cqes)
drivers/infiniband/hw/mthca/mthca_provider.c
621
for (nent = 1; nent <= entries; nent <<= 1)
drivers/infiniband/hw/mthca/mthca_provider.c
654
int entries)
drivers/infiniband/hw/mthca/mthca_provider.c
680
ret = mthca_alloc_cq_buf(dev, &cq->resize_buf->buf, entries);
drivers/infiniband/hw/mthca/mthca_provider.c
689
cq->resize_buf->cqe = entries - 1;
drivers/infiniband/hw/mthca/mthca_provider.c
698
static int mthca_resize_cq(struct ib_cq *ibcq, int entries, struct ib_udata *udata)
drivers/infiniband/hw/mthca/mthca_provider.c
706
if (entries < 1 || entries > dev->limits.max_cqes)
drivers/infiniband/hw/mthca/mthca_provider.c
711
entries = roundup_pow_of_two(entries + 1);
drivers/infiniband/hw/mthca/mthca_provider.c
712
if (entries == ibcq->cqe + 1) {
drivers/infiniband/hw/mthca/mthca_provider.c
718
ret = mthca_alloc_resize_buf(dev, cq, entries);
drivers/infiniband/hw/mthca/mthca_provider.c
730
ret = mthca_RESIZE_CQ(dev, cq->cqn, lkey, ilog2(entries));
drivers/infiniband/hw/mthca/mthca_provider.c
766
ibcq->cqe = entries - 1;
drivers/infiniband/hw/ocrdma/ocrdma_hw.c
1779
int entries, int dpp_cq, u16 pd_id)
drivers/infiniband/hw/ocrdma/ocrdma_hw.c
1787
if (entries > dev->attr.max_cqe) {
drivers/infiniband/hw/ocrdma/ocrdma_hw.c
1789
__func__, dev->id, dev->attr.max_cqe, entries);
drivers/infiniband/hw/ocrdma/ocrdma_hw.h
124
int entries, int dpp_cq, u16 pd_id);
drivers/infiniband/hw/ocrdma/ocrdma_verbs.c
973
int entries = attr->cqe;
drivers/infiniband/hw/ocrdma/ocrdma_verbs.c
999
status = ocrdma_mbx_create_cq(dev, cq, entries, ureq.dpp_cq, pd_id);
drivers/infiniband/hw/qedr/verbs.c
732
static inline int qedr_align_cq_entries(int entries)
drivers/infiniband/hw/qedr/verbs.c
737
size = (entries + 1) * QEDR_CQE_SIZE;
drivers/infiniband/hw/qedr/verbs.c
921
int entries = attr->cqe;
drivers/infiniband/hw/qedr/verbs.c
932
udata ? "User Lib" : "Kernel", entries, vector);
drivers/infiniband/hw/qedr/verbs.c
937
if (entries > QEDR_MAX_CQES) {
drivers/infiniband/hw/qedr/verbs.c
940
entries, QEDR_MAX_CQES);
drivers/infiniband/hw/qedr/verbs.c
944
chain_entries = qedr_align_cq_entries(entries);
drivers/infiniband/hw/vmw_pvrdma/pvrdma_cq.c
106
int entries = attr->cqe;
drivers/infiniband/hw/vmw_pvrdma/pvrdma_cq.c
126
entries = roundup_pow_of_two(entries);
drivers/infiniband/hw/vmw_pvrdma/pvrdma_cq.c
127
if (entries < 1 || entries > dev->dsr->caps.max_cqe)
drivers/infiniband/hw/vmw_pvrdma/pvrdma_cq.c
133
cq->ibcq.cqe = entries;
drivers/infiniband/hw/vmw_pvrdma/pvrdma_cq.c
152
npages = 1 + (entries * sizeof(struct pvrdma_cqe) +
drivers/infiniband/hw/vmw_pvrdma/pvrdma_cq.c
187
cmd->cqe = entries;
drivers/infiniband/sw/rdmavt/cq.c
169
unsigned int entries = attr->cqe;
drivers/infiniband/sw/rdmavt/cq.c
176
if (entries < 1 || entries > rdi->dparms.props.max_cqe)
drivers/infiniband/sw/rdmavt/cq.c
192
sz = sizeof(struct ib_uverbs_wc) * (entries + 1);
drivers/infiniband/sw/rdmavt/cq.c
198
sz = sizeof(struct ib_wc) * (entries + 1);
drivers/infiniband/sw/rdmavt/cq.c
251
cq->ibcq.cqe = entries;
drivers/infiniband/sw/rdmavt/qp.c
111
kfree(wss->entries);
drivers/infiniband/sw/rdmavt/qp.c
112
wss->entries = NULL;
drivers/infiniband/sw/rdmavt/qp.c
176
wss->entries = kcalloc_node(wss->num_entries, sizeof(*wss->entries),
drivers/infiniband/sw/rdmavt/qp.c
178
if (!wss->entries) {
drivers/infiniband/sw/rdmavt/qp.c
234
bits = xchg(&wss->entries[entry], 0);
drivers/infiniband/sw/rdmavt/qp.c
251
if (!test_and_set_bit(nr, &wss->entries[entry]))
drivers/infiniband/ulp/ipoib/ipoib.h
306
atomic_t entries;
drivers/infiniband/ulp/ipoib/ipoib_main.c
1510
atomic_inc(&ntbl->entries);
drivers/infiniband/ulp/ipoib/ipoib_main.c
1536
if (atomic_dec_and_test(&priv->ntbl.entries)) {
drivers/infiniband/ulp/ipoib/ipoib_main.c
1610
atomic_set(&ntbl->entries, 0);
drivers/infiniband/ulp/ipoib/ipoib_main.c
1690
wait_flushed = atomic_read(&priv->ntbl.entries);
drivers/iommu/dma-iommu.c
118
struct iova_fq_entry entries[];
drivers/iommu/dma-iommu.c
150
if (fq->entries[idx].counter >= counter)
drivers/iommu/dma-iommu.c
153
iommu_put_pages_list(&fq->entries[idx].freelist);
drivers/iommu/dma-iommu.c
155
fq->entries[idx].iova_pfn,
drivers/iommu/dma-iommu.c
156
fq->entries[idx].pages);
drivers/iommu/dma-iommu.c
158
fq->entries[idx].freelist =
drivers/iommu/dma-iommu.c
159
IOMMU_PAGES_LIST_INIT(fq->entries[idx].freelist);
drivers/iommu/dma-iommu.c
235
fq->entries[idx].iova_pfn = pfn;
drivers/iommu/dma-iommu.c
236
fq->entries[idx].pages = pages;
drivers/iommu/dma-iommu.c
237
fq->entries[idx].counter = atomic64_read(&cookie->fq_flush_start_cnt);
drivers/iommu/dma-iommu.c
238
iommu_pages_list_splice(freelist, &fq->entries[idx].freelist);
drivers/iommu/dma-iommu.c
254
iommu_put_pages_list(&fq->entries[idx].freelist);
drivers/iommu/dma-iommu.c
267
iommu_put_pages_list(&fq->entries[idx].freelist);
drivers/iommu/dma-iommu.c
296
fq->entries[i].freelist =
drivers/iommu/dma-iommu.c
297
IOMMU_PAGES_LIST_INIT(fq->entries[i].freelist);
drivers/iommu/dma-iommu.c
305
queue = vmalloc(struct_size(queue, entries, fq_size));
drivers/iommu/dma-iommu.c
320
queue = __alloc_percpu(struct_size(queue, entries, fq_size),
drivers/iommu/intel/iommu.c
582
struct pasid_entry *entries, *pte;
drivers/iommu/intel/iommu.c
643
entries = get_pasid_table_from_pde(pde);
drivers/iommu/intel/iommu.c
644
if (!entries) {
drivers/iommu/intel/iommu.c
649
pte = &entries[index];
drivers/iommu/intel/pasid.c
134
struct pasid_entry *entries;
drivers/iommu/intel/pasid.c
147
entries = get_pasid_table_from_pde(&dir[dir_index]);
drivers/iommu/intel/pasid.c
148
if (!entries) {
drivers/iommu/intel/pasid.c
151
entries = iommu_alloc_pages_node_sz(info->iommu->node,
drivers/iommu/intel/pasid.c
153
if (!entries)
drivers/iommu/intel/pasid.c
157
clflush_cache_range(entries, VTD_PAGE_SIZE);
drivers/iommu/intel/pasid.c
167
(u64)virt_to_phys(entries) | PASID_PTE_PRESENT)) {
drivers/iommu/intel/pasid.c
168
iommu_free_pages(entries);
drivers/iommu/intel/pasid.c
175
return &entries[index];
drivers/irqchip/irq-gic-v3-its.c
5164
int entries;
drivers/irqchip/irq-gic-v3-its.c
5174
entries = roundup_pow_of_two(nr_cpu_ids);
drivers/irqchip/irq-gic-v3-its.c
5175
vpe_proxy.vpes = kzalloc_objs(*vpe_proxy.vpes, entries);
drivers/irqchip/irq-gic-v3-its.c
5181
vpe_proxy.dev = its_create_device(its, devid, entries, false);
drivers/irqchip/irq-gic-v3-its.c
5188
BUG_ON(entries > vpe_proxy.dev->nr_ites);
drivers/md/bcache/journal.c
340
int ret = 0, keys = 0, entries = 0;
drivers/md/bcache/journal.c
378
entries++;
drivers/md/bcache/journal.c
382
keys, entries, end);
drivers/md/dm-bio-prison-v1.c
296
struct dm_deferred_entry entries[DEFERRED_SET_SIZE];
drivers/md/dm-bio-prison-v1.c
312
ds->entries[i].ds = ds;
drivers/md/dm-bio-prison-v1.c
313
ds->entries[i].count = 0;
drivers/md/dm-bio-prison-v1.c
314
INIT_LIST_HEAD(&ds->entries[i].work_items);
drivers/md/dm-bio-prison-v1.c
333
entry = ds->entries + ds->current_entry;
drivers/md/dm-bio-prison-v1.c
349
!ds->entries[ds->sweeper].count) {
drivers/md/dm-bio-prison-v1.c
350
list_splice_init(&ds->entries[ds->sweeper].work_items, head);
drivers/md/dm-bio-prison-v1.c
354
if ((ds->sweeper == ds->current_entry) && !ds->entries[ds->sweeper].count)
drivers/md/dm-bio-prison-v1.c
355
list_splice_init(&ds->entries[ds->sweeper].work_items, head);
drivers/md/dm-bio-prison-v1.c
380
!ds->entries[ds->current_entry].count)
drivers/md/dm-bio-prison-v1.c
383
list_add(work, &ds->entries[ds->current_entry].work_items);
drivers/md/dm-bio-prison-v1.c
385
if (!ds->entries[next_entry].count)
drivers/md/dm-integrity.c
131
__u8 entries[JOURNAL_SECTOR_DATA - JOURNAL_MAC_PER_SECTOR];
drivers/md/dm-vdo/block-map.c
1803
vdo_unpack_block_map_entry(&page->entries[slot.block_map_slot.slot]);
drivers/md/dm-vdo/block-map.c
2282
mapping = vdo_unpack_block_map_entry(&page->entries[tree_slot.block_map_slot.slot]);
drivers/md/dm-vdo/block-map.c
2331
mapping = vdo_unpack_block_map_entry(&page->entries[slot]);
drivers/md/dm-vdo/block-map.c
2426
page->entries[0] =
drivers/md/dm-vdo/block-map.c
2464
static int make_forest(struct block_map *map, block_count_t entries)
drivers/md/dm-vdo/block-map.c
2475
entries, &new_boundary);
drivers/md/dm-vdo/block-map.c
2477
map->next_entry_count = entries;
drivers/md/dm-vdo/block-map.c
2495
map->next_entry_count = entries;
drivers/md/dm-vdo/block-map.c
2597
vdo_unpack_block_map_entry(&page->entries[level->slot]);
drivers/md/dm-vdo/block-map.c
2601
page->entries[level->slot] = UNMAPPED_BLOCK_MAP_ENTRY;
drivers/md/dm-vdo/block-map.c
2611
page->entries[level->slot] = UNMAPPED_BLOCK_MAP_ENTRY;
drivers/md/dm-vdo/block-map.c
2620
page->entries[level->slot] = UNMAPPED_BLOCK_MAP_ENTRY;
drivers/md/dm-vdo/block-map.c
3217
entry = &page->entries[tree_slot->block_map_slot.slot];
drivers/md/dm-vdo/block-map.c
3235
page->entries[tree_lock->tree_slots[tree_lock->height].block_map_slot.slot] =
drivers/md/dm-vdo/encodings.c
445
block_count_t entries,
drivers/md/dm-vdo/encodings.c
448
page_count_t leaf_pages = max(vdo_compute_block_map_page_count(entries), 1U);
drivers/md/dm-vdo/encodings.c
776
vdo_unpack_slab_journal_entry(&block->payload.entries[entry_count]);
drivers/md/dm-vdo/encodings.h
174
struct block_map_entry entries[];
drivers/md/dm-vdo/encodings.h
368
struct packed_recovery_journal_entry entries[];
drivers/md/dm-vdo/encodings.h
521
packed_slab_journal_entry entries[VDO_SLAB_JOURNAL_FULL_ENTRIES_PER_BLOCK];
drivers/md/dm-vdo/encodings.h
530
packed_slab_journal_entry entries[VDO_SLAB_JOURNAL_ENTRIES_PER_BLOCK];
drivers/md/dm-vdo/encodings.h
862
static inline page_count_t vdo_compute_block_map_page_count(block_count_t entries)
drivers/md/dm-vdo/encodings.h
864
return DIV_ROUND_UP(entries, VDO_BLOCK_MAP_ENTRIES_PER_PAGE);
drivers/md/dm-vdo/encodings.h
869
block_count_t entries,
drivers/md/dm-vdo/indexer/index-page-map.c
102
0 : map->entries[slot + index_page_number - 1] + 1);
drivers/md/dm-vdo/indexer/index-page-map.c
104
map->entries[slot + index_page_number] :
drivers/md/dm-vdo/indexer/index-page-map.c
129
encode_u16_le(buffer, &offset, map->entries[i]);
drivers/md/dm-vdo/indexer/index-page-map.c
167
decode_u16_le(buffer, &offset, &map->entries[i]);
drivers/md/dm-vdo/indexer/index-page-map.c
48
&map->entries);
drivers/md/dm-vdo/indexer/index-page-map.c
61
vdo_free(map->entries);
drivers/md/dm-vdo/indexer/index-page-map.c
77
map->entries[slot] = delta_list_number;
drivers/md/dm-vdo/indexer/index-page-map.c
88
if (delta_list_number <= map->entries[slot + page])
drivers/md/dm-vdo/indexer/index-page-map.h
22
u16 *entries;
drivers/md/dm-vdo/indexer/sparse-cache.c
140
struct cached_chapter_index *entries[];
drivers/md/dm-vdo/indexer/sparse-cache.c
252
list->entries[i] = &cache->chapters[i];
drivers/md/dm-vdo/indexer/sparse-cache.c
375
newest = search_list->entries[index];
drivers/md/dm-vdo/indexer/sparse-cache.c
376
memmove(&search_list->entries[1], &search_list->entries[0],
drivers/md/dm-vdo/indexer/sparse-cache.c
378
search_list->entries[0] = newest;
drivers/md/dm-vdo/indexer/sparse-cache.c
405
chapter = search_list->entries[i];
drivers/md/dm-vdo/indexer/sparse-cache.c
427
struct cached_chapter_index **entries;
drivers/md/dm-vdo/indexer/sparse-cache.c
436
entries = &search_list->entries[0];
drivers/md/dm-vdo/indexer/sparse-cache.c
441
chapter = search_list->entries[i];
drivers/md/dm-vdo/indexer/sparse-cache.c
448
entries[next_alive++] = chapter;
drivers/md/dm-vdo/indexer/sparse-cache.c
451
memcpy(&entries[next_alive], skipped,
drivers/md/dm-vdo/indexer/sparse-cache.c
453
memcpy(&entries[next_alive + next_skipped], dead,
drivers/md/dm-vdo/indexer/sparse-cache.c
483
memcpy(target->entries, source->entries,
drivers/md/dm-vdo/indexer/sparse-cache.c
522
result = cache_chapter_index(list->entries[0], virtual_chapter,
drivers/md/dm-vdo/indexer/sparse-cache.c
590
chapter = search_list->entries[i];
drivers/md/dm-vdo/message-stats.c
121
write_commit_statistics("entries : ", &stats->entries, ", ", buf, maxlen);
drivers/md/dm-vdo/recovery-journal.c
1109
journal->events.entries.started++;
drivers/md/dm-vdo/recovery-journal.c
1280
journal->events.entries.committed += block->entries_in_commit;
drivers/md/dm-vdo/recovery-journal.c
1348
packed_entry = &block->sector->entries[block->sector->entry_count++];
drivers/md/dm-vdo/recovery-journal.c
1392
journal->events.entries.written += block->entries_in_commit;
drivers/md/dm-vdo/recovery-journal.c
1755
(unsigned long long) stats.entries.started,
drivers/md/dm-vdo/recovery-journal.c
1756
(unsigned long long) stats.entries.written,
drivers/md/dm-vdo/recovery-journal.c
1757
(unsigned long long) stats.entries.committed);
drivers/md/dm-vdo/repair.c
1000
page->entries[current_entry->block_map_slot.slot] = current_entry->block_map_entry;
drivers/md/dm-vdo/repair.c
1030
if (repair->current_unfetched_entry < repair->entries)
drivers/md/dm-vdo/repair.c
1121
.data = repair->entries,
drivers/md/dm-vdo/repair.c
1130
repair->current_entry = &repair->entries[repair->block_map_entry_count - 1];
drivers/md/dm-vdo/repair.c
1140
if (repair->current_unfetched_entry < repair->entries)
drivers/md/dm-vdo/repair.c
1321
static void append_sector_entries(struct repair_completion *repair, char *entries,
drivers/md/dm-vdo/repair.c
1331
for (i = 0; i < entry_count; i++, entries += increment) {
drivers/md/dm-vdo/repair.c
1334
if (!unpack_entry(vdo, entries, format, &entry))
drivers/md/dm-vdo/repair.c
1338
repair->entries[repair->block_map_entry_count] =
drivers/md/dm-vdo/repair.c
1364
journal_entry_count_t entries)
drivers/md/dm-vdo/repair.c
1377
entries = min(entries, header.entry_count);
drivers/md/dm-vdo/repair.c
1382
min(entries, entries_per_sector(format, i));
drivers/md/dm-vdo/repair.c
1386
append_sector_entries(repair, (char *) sector->entries, format,
drivers/md/dm-vdo/repair.c
1396
entries -= sector_entries;
drivers/md/dm-vdo/repair.c
1421
&repair->entries);
drivers/md/dm-vdo/repair.c
1468
__func__, &repair->entries);
drivers/md/dm-vdo/repair.c
1482
repair->entries[repair->block_map_entry_count] =
drivers/md/dm-vdo/repair.c
184
last = &repair->entries[--heap->nr];
drivers/md/dm-vdo/repair.c
246
vdo_free(vdo_forget(repair->entries));
drivers/md/dm-vdo/repair.c
388
page->entries[slot] = UNMAPPED_BLOCK_MAP_ENTRY;
drivers/md/dm-vdo/repair.c
405
struct data_location mapping = vdo_unpack_block_map_entry(&page->entries[slot]);
drivers/md/dm-vdo/repair.c
425
struct data_location mapping = vdo_unpack_block_map_entry(&page->entries[slot]);
drivers/md/dm-vdo/repair.c
741
return vdo_unpack_recovery_journal_entry(§or->entries[point->entry_count]);
drivers/md/dm-vdo/repair.c
89
struct numbered_block_mapping *entries;
drivers/md/dm-vdo/repair.c
938
if (repair->current_entry >= repair->entries)
drivers/md/dm-vdo/repair.c
968
if (current_entry < repair->entries)
drivers/md/dm-vdo/repair.c
974
while ((current_entry >= repair->entries) &&
drivers/md/dm-vdo/slab-depot.c
335
memcpy(block->outgoing_entries, block->entries, VDO_BLOCK_SIZE);
drivers/md/dm-vdo/slab-depot.c
4059
block->entries = &allocator->summary_entries[VDO_SLAB_SUMMARY_ENTRIES_PER_BLOCK * index];
drivers/md/dm-vdo/slab-depot.c
4573
struct slab_summary_entry *entries = depot->summary_entries;
drivers/md/dm-vdo/slab-depot.c
4580
memcpy(entries + entry_number,
drivers/md/dm-vdo/slab-depot.c
4581
entries + (zone * MAX_VDO_SLABS) + entry_number,
drivers/md/dm-vdo/slab-depot.c
4593
memcpy(entries + (zone * MAX_VDO_SLABS), entries,
drivers/md/dm-vdo/slab-depot.c
865
vdo_pack_slab_journal_entry(&payload->entries[entry_number], sbn, increment);
drivers/md/dm-vdo/slab-depot.h
361
struct slab_summary_entry *entries;
drivers/md/dm-vdo/statistics.h
47
struct commit_statistics entries;
drivers/md/dm-writecache.c
1015
sb_entries_offset = offsetof(struct wc_memory_superblock, entries);
drivers/md/dm-writecache.c
1041
struct wc_entry *e = &wc->entries[b];
drivers/md/dm-writecache.c
1064
struct wc_entry *e = &wc->entries[b];
drivers/md/dm-writecache.c
153
struct wc_entry *entries;
drivers/md/dm-writecache.c
2128
offset = offsetof(struct wc_memory_superblock, entries[n_blocks]);
drivers/md/dm-writecache.c
2168
write_original_sector_seq_count(wc, &wc->entries[b], -1, -1);
drivers/md/dm-writecache.c
2207
vfree(wc->entries);
drivers/md/dm-writecache.c
394
return &sb(wc)->entries[e->index];
drivers/md/dm-writecache.c
78
struct wc_memory_entry entries[];
drivers/md/dm-writecache.c
962
if (wc->entries)
drivers/md/dm-writecache.c
964
wc->entries = vmalloc_array(wc->n_blocks, sizeof(struct wc_entry));
drivers/md/dm-writecache.c
965
if (!wc->entries)
drivers/md/dm-writecache.c
968
struct wc_entry *e = &wc->entries[b];
drivers/md/persistent-data/dm-block-manager.c
109
stack_trace_print(lock->traces[i].entries,
drivers/md/persistent-data/dm-block-manager.c
41
unsigned long entries[MAX_STACK];
drivers/md/persistent-data/dm-block-manager.c
87
t->nr_entries = stack_trace_save(t->entries, MAX_STACK, 2);
drivers/md/raid5-ppl.c
1153
le32_to_cpu(pplhdr->entries[i].pp_size) >> 9;
drivers/md/raid5-ppl.c
317
&pplhdr->entries[io->entries_count - 1];
drivers/md/raid5-ppl.c
337
e = &pplhdr->entries[io->entries_count++];
drivers/md/raid5-ppl.c
445
struct ppl_header_entry *e = &pplhdr->entries[i];
drivers/md/raid5-ppl.c
977
struct ppl_header_entry *e = &pplhdr->entries[i];
drivers/media/dvb-frontends/dvb-pll.c
102
.entries = {
drivers/media/dvb-frontends/dvb-pll.c
125
.entries = {
drivers/media/dvb-frontends/dvb-pll.c
143
.entries = {
drivers/media/dvb-frontends/dvb-pll.c
158
.entries = {
drivers/media/dvb-frontends/dvb-pll.c
180
.entries = {
drivers/media/dvb-frontends/dvb-pll.c
194
.entries = {
drivers/media/dvb-frontends/dvb-pll.c
220
.entries = {
drivers/media/dvb-frontends/dvb-pll.c
253
.entries = {
drivers/media/dvb-frontends/dvb-pll.c
277
.entries = {
drivers/media/dvb-frontends/dvb-pll.c
294
.entries = {
drivers/media/dvb-frontends/dvb-pll.c
311
.entries = {
drivers/media/dvb-frontends/dvb-pll.c
330
.entries = {
drivers/media/dvb-frontends/dvb-pll.c
399
.entries = {
drivers/media/dvb-frontends/dvb-pll.c
444
.entries = {
drivers/media/dvb-frontends/dvb-pll.c
463
.entries = {
drivers/media/dvb-frontends/dvb-pll.c
477
.entries = {
drivers/media/dvb-frontends/dvb-pll.c
498
.entries = {
drivers/media/dvb-frontends/dvb-pll.c
519
.entries = {
drivers/media/dvb-frontends/dvb-pll.c
537
.entries = {
drivers/media/dvb-frontends/dvb-pll.c
552
.entries = {
drivers/media/dvb-frontends/dvb-pll.c
605
if (frequency > desc->entries[i].limit)
drivers/media/dvb-frontends/dvb-pll.c
617
desc->entries[i].stepsize/2) / desc->entries[i].stepsize;
drivers/media/dvb-frontends/dvb-pll.c
620
buf[2] = desc->entries[i].config;
drivers/media/dvb-frontends/dvb-pll.c
621
buf[3] = desc->entries[i].cb;
drivers/media/dvb-frontends/dvb-pll.c
631
return (div * desc->entries[i].stepsize) - desc->iffreq;
drivers/media/dvb-frontends/dvb-pll.c
66
} entries[];
drivers/media/dvb-frontends/dvb-pll.c
79
.entries = {
drivers/media/mc/mc-entity.c
470
struct media_pipeline_walk_entry *entries;
drivers/media/mc/mc-entity.c
479
return &walk->stack.entries[walk->stack.top];
drivers/media/mc/mc-entity.c
490
struct media_pipeline_walk_entry *entries;
drivers/media/mc/mc-entity.c
499
entries = krealloc(walk->stack.entries,
drivers/media/mc/mc-entity.c
500
new_size * sizeof(*walk->stack.entries),
drivers/media/mc/mc-entity.c
502
if (!entries)
drivers/media/mc/mc-entity.c
505
walk->stack.entries = entries;
drivers/media/mc/mc-entity.c
571
kfree(walk->stack.entries);
drivers/media/pci/cx18/cx18-ioctl.c
730
e_idx = &idx->entry[idx->entries];
drivers/media/pci/cx18/cx18-ioctl.c
734
idx->entries < V4L2_ENC_IDX_ENTRIES) {
drivers/media/pci/cx18/cx18-ioctl.c
749
idx->entries++;
drivers/media/pci/cx18/cx18-ioctl.c
750
e_idx = &idx->entry[idx->entries];
drivers/media/pci/cx18/cx18-ioctl.c
794
if (idx->entries >= V4L2_ENC_IDX_ENTRIES ||
drivers/media/pci/cx18/cx18-ioctl.c
821
idx->entries = 0;
drivers/media/pci/cx18/cx18-ioctl.c
842
} while (idx->entries < V4L2_ENC_IDX_ENTRIES);
drivers/media/pci/ivtv/ivtv-ioctl.c
1266
int entries;
drivers/media/pci/ivtv/ivtv-ioctl.c
1269
entries = (itv->pgm_info_write_idx + IVTV_MAX_PGM_INDEX - itv->pgm_info_read_idx) %
drivers/media/pci/ivtv/ivtv-ioctl.c
1271
if (entries > V4L2_ENC_IDX_ENTRIES)
drivers/media/pci/ivtv/ivtv-ioctl.c
1272
entries = V4L2_ENC_IDX_ENTRIES;
drivers/media/pci/ivtv/ivtv-ioctl.c
1273
idx->entries = 0;
drivers/media/pci/ivtv/ivtv-ioctl.c
1277
for (i = 0; i < entries; i++) {
drivers/media/pci/ivtv/ivtv-ioctl.c
1280
idx->entries++;
drivers/media/pci/ivtv/ivtv-ioctl.c
1284
itv->pgm_info_read_idx = (itv->pgm_info_read_idx + idx->entries) % IVTV_MAX_PGM_INDEX;
drivers/media/pci/saa7164/saa7164-core.c
227
u32 entries = 0;
drivers/media/pci/saa7164/saa7164-core.c
240
entries++;
drivers/media/pci/saa7164/saa7164-core.c
242
printk(KERN_ERR "Total: %d\n", entries);
drivers/media/platform/qcom/venus/hfi_parser.c
171
u32 entries = fmt->format_entries;
drivers/media/platform/qcom/venus/hfi_parser.c
176
while (entries) {
drivers/media/platform/qcom/venus/hfi_parser.c
191
entries--;
drivers/media/platform/qcom/venus/hfi_parser.c
270
unsigned int entries;
drivers/media/platform/qcom/venus/hfi_parser.c
285
caps = plat->capabilities(core, &entries);
drivers/media/platform/qcom/venus/hfi_parser.c
287
if (!caps || !entries || !count)
drivers/media/platform/qcom/venus/hfi_parser.c
295
memcpy(core->caps, caps, sizeof(*caps) * entries);
drivers/media/platform/qcom/venus/hfi_platform.h
59
unsigned int *entries);
drivers/media/platform/qcom/venus/hfi_platform_v4.c
366
unsigned int *entries)
drivers/media/platform/qcom/venus/hfi_platform_v4.c
368
*entries = is_lite(core) ? ARRAY_SIZE(caps_lite) : ARRAY_SIZE(caps);
drivers/media/platform/qcom/venus/hfi_platform_v6.c
250
unsigned int *entries)
drivers/media/platform/qcom/venus/hfi_platform_v6.c
255
*entries = ARRAY_SIZE(caps);
drivers/media/platform/renesas/vsp1/vsp1_dl.c
113
struct vsp1_dl_entry *entries;
drivers/media/platform/renesas/vsp1/vsp1_dl.c
301
dlb->entries = pool->mem + i * dlb_size;
drivers/media/platform/renesas/vsp1/vsp1_dl.c
393
dlb->entries[dlb->num_entries].addr = reg;
drivers/media/platform/renesas/vsp1/vsp1_dl.c
394
dlb->entries[dlb->num_entries].data = data;
drivers/media/platform/renesas/vsp1/vsp1_dl.c
574
header_offset = dl->body0->max_entries * sizeof(*dl->body0->entries);
drivers/media/platform/renesas/vsp1/vsp1_dl.c
576
dl->header = ((void *)dl->body0->entries) + header_offset;
drivers/media/platform/rockchip/rkvdec/rkvdec-h264-common.c
141
set_dpb_info(hw_rps->entries, j, i, ref->index | (dpb_valid << 4), bottom);
drivers/media/platform/rockchip/rkvdec/rkvdec-h264-common.c
24
static void set_dpb_info(struct rkvdec_rps_entry *entries,
drivers/media/platform/rockchip/rkvdec/rkvdec-h264-common.c
30
struct rkvdec_rps_entry *entry = &entries[(reflist * 4) + refnum / 8];
drivers/media/platform/rockchip/rkvdec/rkvdec-h264-common.h
71
struct rkvdec_rps_entry entries[12];
drivers/media/v4l2-core/v4l2-ioctl.c
635
p->entries, p->entries_cap);
drivers/memstick/core/mspro_block.c
65
struct mspro_attr_entry entries[];
drivers/memstick/core/mspro_block.c
964
addr = be32_to_cpu(attr->entries[cnt].address);
drivers/memstick/core/mspro_block.c
965
s_attr->size = be32_to_cpu(attr->entries[cnt].size);
drivers/memstick/core/mspro_block.c
967
"size %zx\n", cnt, attr->entries[cnt].id, addr,
drivers/memstick/core/mspro_block.c
969
s_attr->id = attr->entries[cnt].id;
drivers/memstick/core/mspro_block.c
972
mspro_block_attr_name(attr->entries[cnt].id));
drivers/memstick/core/mspro_block.c
975
"attr_x%02x", attr->entries[cnt].id);
drivers/misc/bcm-vk/bcm_vk.h
338
struct bcm_vk_proc_mon_entry_t entries[BCM_VK_PROC_MON_MAX];
drivers/misc/bcm-vk/bcm_vk_dev.c
467
dst = (u8 *)&mon->entries[0];
drivers/misc/genwqe/card_base.c
404
cd->ffdc[type].entries = e;
drivers/misc/genwqe/card_base.c
489
cd->ffdc[GENWQE_DBG_REGS].entries, 0);
drivers/misc/genwqe/card_base.c
493
cd->ffdc[GENWQE_DBG_UNIT0].entries);
drivers/misc/genwqe/card_base.c
497
cd->ffdc[GENWQE_DBG_UNIT1].entries);
drivers/misc/genwqe/card_base.c
501
cd->ffdc[GENWQE_DBG_UNIT2].entries);
drivers/misc/genwqe/card_base.h
244
unsigned int entries;
drivers/misc/genwqe/card_debugfs.c
29
int entries)
drivers/misc/genwqe/card_debugfs.c
34
for (i = 0; i < entries; i++) {
drivers/misc/genwqe/card_debugfs.c
46
int entries;
drivers/misc/genwqe/card_debugfs.c
49
entries = genwqe_ffdc_buff_size(cd, uid);
drivers/misc/genwqe/card_debugfs.c
50
if (entries < 0)
drivers/misc/genwqe/card_debugfs.c
53
if (entries == 0)
drivers/misc/genwqe/card_debugfs.c
56
regs = kzalloc_objs(*regs, entries);
drivers/misc/genwqe/card_debugfs.c
61
genwqe_ffdc_buff_read(cd, uid, regs, entries);
drivers/misc/genwqe/card_debugfs.c
64
dbg_uidn_show(s, regs, entries);
drivers/misc/genwqe/card_debugfs.c
94
dbg_uidn_show(s, cd->ffdc[uid].regs, cd->ffdc[uid].entries);
drivers/misc/genwqe/card_utils.c
832
int entries = 0, ring, traps, traces, trace_entries;
drivers/misc/genwqe/card_utils.c
855
entries += d_len;
drivers/misc/genwqe/card_utils.c
857
entries += d_len >> 3;
drivers/misc/genwqe/card_utils.c
875
entries += traps + (traces * trace_entries);
drivers/misc/genwqe/card_utils.c
877
return entries;
drivers/misc/ntsync.c
1001
struct ntsync_q_entry *entry = &q->entries[i];
drivers/misc/ntsync.c
1048
struct ntsync_q_entry *entry = &q->entries[i];
drivers/misc/ntsync.c
1061
struct ntsync_q_entry *entry = &q->entries[args.count];
drivers/misc/ntsync.c
1081
struct ntsync_obj *obj = q->entries[args.count].obj;
drivers/misc/ntsync.c
1099
struct ntsync_q_entry *entry = &q->entries[i];
drivers/misc/ntsync.c
1116
struct ntsync_q_entry *entry = &q->entries[args.count];
drivers/misc/ntsync.c
118
struct ntsync_q_entry entries[];
drivers/misc/ntsync.c
267
if (q->entries[i].obj != locked_obj)
drivers/misc/ntsync.c
268
dev_lock_obj(dev, q->entries[i].obj);
drivers/misc/ntsync.c
272
if (!is_signaled(q->entries[i].obj, q->owner)) {
drivers/misc/ntsync.c
280
struct ntsync_obj *obj = q->entries[i].obj;
drivers/misc/ntsync.c
303
if (q->entries[i].obj != locked_obj)
drivers/misc/ntsync.c
304
dev_unlock_obj(dev, q->entries[i].obj);
drivers/misc/ntsync.c
887
q = kmalloc_flex(*q, entries, total_count);
drivers/misc/ntsync.c
898
struct ntsync_q_entry *entry = &q->entries[i];
drivers/misc/ntsync.c
907
if (obj == q->entries[j].obj) {
drivers/misc/ntsync.c
924
put_obj(q->entries[j].obj);
drivers/misc/ntsync.c
967
struct ntsync_q_entry *entry = &q->entries[i];
drivers/misc/ntsync.c
984
struct ntsync_obj *obj = q->entries[i].obj;
drivers/misc/vmw_vmci/vmci_doorbell.c
123
hlist_for_each_entry(dbell, &vmci_doorbell_it.entries[bucket],
drivers/misc/vmw_vmci/vmci_doorbell.c
188
hlist_add_head(&entry->node, &vmci_doorbell_it.entries[bucket]);
drivers/misc/vmw_vmci/vmci_doorbell.c
343
hlist_for_each_entry(dbell, &vmci_doorbell_it.entries[bucket], node) {
drivers/misc/vmw_vmci/vmci_doorbell.c
48
struct hlist_head entries[VMCI_DOORBELL_INDEX_TABLE_SIZE];
drivers/misc/vmw_vmci/vmci_handle_array.c
100
array->entries[array->size] = VMCI_INVALID_HANDLE;
drivers/misc/vmw_vmci/vmci_handle_array.c
115
return array->entries[index];
drivers/misc/vmw_vmci/vmci_handle_array.c
124
if (vmci_handle_is_equal(array->entries[i], entry_handle))
drivers/misc/vmw_vmci/vmci_handle_array.c
137
return array->entries;
drivers/misc/vmw_vmci/vmci_handle_array.c
22
array = kmalloc_flex(*array, entries, capacity, GFP_ATOMIC);
drivers/misc/vmw_vmci/vmci_handle_array.c
48
size_t new_size = struct_size(array, entries,
drivers/misc/vmw_vmci/vmci_handle_array.c
62
array->entries[array->size] = handle;
drivers/misc/vmw_vmci/vmci_handle_array.c
78
if (vmci_handle_is_equal(array->entries[i], entry_handle)) {
drivers/misc/vmw_vmci/vmci_handle_array.c
79
handle = array->entries[i];
drivers/misc/vmw_vmci/vmci_handle_array.c
81
array->entries[i] = array->entries[array->size];
drivers/misc/vmw_vmci/vmci_handle_array.c
82
array->entries[array->size] = VMCI_INVALID_HANDLE;
drivers/misc/vmw_vmci/vmci_handle_array.c
99
handle = array->entries[array->size];
drivers/misc/vmw_vmci/vmci_handle_array.h
20
struct vmci_handle entries[] __counted_by(capacity);
drivers/misc/vmw_vmci/vmci_resource.c
128
hlist_add_head_rcu(&resource->node, &vmci_resource_table.entries[idx]);
drivers/misc/vmw_vmci/vmci_resource.c
146
hlist_for_each_entry(r, &vmci_resource_table.entries[idx], node) {
drivers/misc/vmw_vmci/vmci_resource.c
23
struct hlist_head entries[VMCI_RESOURCE_HASH_BUCKETS];
drivers/misc/vmw_vmci/vmci_resource.c
46
&vmci_resource_table.entries[idx], node) {
drivers/mmc/host/dw_mmc-pci.c
35
const struct pci_device_id *entries)
drivers/mtd/ubi/eba.c
1026
pnum = vol->eba_tbl->entries[lnum].pnum;
drivers/mtd/ubi/eba.c
108
ldesc->pnum = vol->eba_tbl->entries[lnum].pnum;
drivers/mtd/ubi/eba.c
1154
ubi_assert(vol->eba_tbl->entries[lnum].pnum < 0);
drivers/mtd/ubi/eba.c
131
tbl->entries = kmalloc_objs(*tbl->entries, nentries);
drivers/mtd/ubi/eba.c
132
if (!tbl->entries)
drivers/mtd/ubi/eba.c
136
tbl->entries[i].pnum = UBI_LEB_UNMAPPED;
drivers/mtd/ubi/eba.c
1370
if (vol->eba_tbl->entries[lnum].pnum != from) {
drivers/mtd/ubi/eba.c
1372
vol_id, lnum, from, vol->eba_tbl->entries[lnum].pnum);
drivers/mtd/ubi/eba.c
1457
ubi_assert(vol->eba_tbl->entries[lnum].pnum == from);
drivers/mtd/ubi/eba.c
1464
vol->eba_tbl->entries[lnum].pnum = to;
drivers/mtd/ubi/eba.c
157
kfree(tbl->entries);
drivers/mtd/ubi/eba.c
1667
entry = &vol->eba_tbl->entries[aeb->lnum];
drivers/mtd/ubi/eba.c
180
dst->entries[i].pnum = src->entries[i].pnum;
drivers/mtd/ubi/eba.c
433
return vol->eba_tbl->entries[lnum].pnum >= 0;
drivers/mtd/ubi/eba.c
458
pnum = vol->eba_tbl->entries[lnum].pnum;
drivers/mtd/ubi/eba.c
466
vol->eba_tbl->entries[lnum].pnum = UBI_LEB_UNMAPPED;
drivers/mtd/ubi/eba.c
528
vol->eba_tbl->entries[lnum].pnum = UBI_LEB_UNMAPPED;
drivers/mtd/ubi/eba.c
57
struct ubi_eba_entry *entries;
drivers/mtd/ubi/eba.c
603
pnum = vol->eba_tbl->entries[lnum].pnum;
drivers/mtd/ubi/eba.c
862
vol->eba_tbl->entries[lnum].pnum = new_pnum;
drivers/mtd/ubi/eba.c
953
opnum = vol->eba_tbl->entries[lnum].pnum;
drivers/mtd/ubi/eba.c
975
vol->eba_tbl->entries[lnum].pnum = pnum;
drivers/net/dsa/hirschmann/hellcreek.c
1585
cur = initial = &schedule->entries[0];
drivers/net/dsa/hirschmann/hellcreek.c
1851
if (schedule->entries[i].command != TC_TAPRIO_CMD_SET_GATES)
drivers/net/dsa/hirschmann/hellcreek.c
926
u16 entries;
drivers/net/dsa/hirschmann/hellcreek.c
936
entries = hellcreek_read(hellcreek, HR_FDBMAX);
drivers/net/dsa/hirschmann/hellcreek.c
939
dev_dbg(hellcreek->dev, "FDB dump for port %d, entries=%d!\n", port, entries);
drivers/net/dsa/microchip/ksz8.c
1334
u16 entries = 0;
drivers/net/dsa/microchip/ksz8.c
1339
&entries);
drivers/net/dsa/microchip/ksz8.c
1343
if (i >= entries)
drivers/net/dsa/microchip/ksz8.c
536
u8 *fid, u8 *src_port, u16 *entries)
drivers/net/dsa/microchip/ksz8.c
564
*entries = 0;
drivers/net/dsa/microchip/ksz8.c
580
*entries = cnt + 1;
drivers/net/dsa/microchip/ksz9477.h
81
struct ksz9477_acl_entry entries[KSZ9477_ACL_MAX_ENTRIES];
drivers/net/dsa/microchip/ksz9477_acl.c
1002
if (acles->entries[i].cookie == cookie) {
drivers/net/dsa/microchip/ksz9477_acl.c
1031
struct ksz9477_acl_entry *entry = &acles->entries[i];
drivers/net/dsa/microchip/ksz9477_acl.c
1071
for (i = 0; i < ARRAY_SIZE(acles->entries); i++) {
drivers/net/dsa/microchip/ksz9477_acl.c
1072
u8 *entry = acles->entries[i].entry;
drivers/net/dsa/microchip/ksz9477_acl.c
1375
entry = &acles->entries[acles->entries_count];
drivers/net/dsa/microchip/ksz9477_acl.c
246
entry = &acles->entries[index].entry[0];
drivers/net/dsa/microchip/ksz9477_acl.c
285
u8 *current_entry = &acles->entries[i].entry[0];
drivers/net/dsa/microchip/ksz9477_acl.c
478
e = &acles->entries[i];
drivers/net/dsa/microchip/ksz9477_acl.c
479
*e = acles->entries[i + num_entries_to_move];
drivers/net/dsa/microchip/ksz9477_acl.c
518
e = &acles->entries[b];
drivers/net/dsa/microchip/ksz9477_acl.c
519
*e = acles->entries[i - 1];
drivers/net/dsa/microchip/ksz9477_acl.c
573
buffer[i] = acles->entries[src_idx + i];
drivers/net/dsa/microchip/ksz9477_acl.c
593
acles->entries[dst_idx + i] = buffer[i];
drivers/net/dsa/microchip/ksz9477_acl.c
706
curr = &acles->entries[i];
drivers/net/dsa/microchip/ksz9477_acl.c
713
next = &acles->entries[j];
drivers/net/dsa/microchip/ksz9477_acl.c
757
memcpy(backup, acles->entries, sizeof(backup));
drivers/net/dsa/microchip/ksz9477_acl.c
766
ksz9477_dump_acl(dev, acles->entries);
drivers/net/dsa/microchip/ksz9477_acl.c
768
memcpy(acles->entries, backup, sizeof(backup));
drivers/net/dsa/microchip/ksz9477_acl.c
943
for (i = 0; i < ARRAY_SIZE(acles->entries); i++) {
drivers/net/dsa/microchip/ksz9477_acl.c
944
u8 *entry = acles->entries[i].entry;
drivers/net/dsa/microchip/ksz9477_tc_flower.c
192
entry = &acl->acles.entries[entry_idx];
drivers/net/dsa/ocelot/felix_vsc9959.c
1116
entry = &taprio->entries[i % n];
drivers/net/dsa/ocelot/felix_vsc9959.c
1514
vsc9959_tas_gcl_set(ocelot, i, &taprio->entries[i]);
drivers/net/dsa/ocelot/felix_vsc9959.c
1580
vsc9959_tas_gcl_set(ocelot, i, &taprio->entries[i]);
drivers/net/dsa/ocelot/felix_vsc9959.c
1748
struct action_gate_entry entries[] __counted_by(num_entries);
drivers/net/dsa/ocelot/felix_vsc9959.c
2110
memcpy(sgi->entries, entry->gate.entries,
drivers/net/dsa/ocelot/felix_vsc9959.c
2167
e = sgi->entries;
drivers/net/dsa/ocelot/felix_vsc9959.c
2274
size = struct_size(sgi, entries, a->gate.num_entries);
drivers/net/dsa/sja1105/sja1105.h
236
struct action_gate_entry *entries;
drivers/net/dsa/sja1105/sja1105_clocking.c
594
mac = priv->static_config.tables[BLK_IDX_MAC_CONFIG].entries;
drivers/net/dsa/sja1105/sja1105_clocking.c
774
mii = priv->static_config.tables[BLK_IDX_XMII_PARAMS].entries;
drivers/net/dsa/sja1105/sja1105_dynamic_config.c
1397
priv->static_config.tables[BLK_IDX_L2_LOOKUP_PARAMS].entries;
drivers/net/dsa/sja1105/sja1105_flower.c
133
policing = priv->static_config.tables[BLK_IDX_L2_POLICING].entries;
drivers/net/dsa/sja1105/sja1105_flower.c
428
act->gate.entries);
drivers/net/dsa/sja1105/sja1105_flower.c
476
policing = priv->static_config.tables[BLK_IDX_L2_POLICING].entries;
drivers/net/dsa/sja1105/sja1105_flower.c
61
policing = priv->static_config.tables[BLK_IDX_L2_POLICING].entries;
drivers/net/dsa/sja1105/sja1105_main.c
1018
kfree(table->entries);
drivers/net/dsa/sja1105/sja1105_main.c
1022
table->entries = kcalloc(table->ops->max_entry_count,
drivers/net/dsa/sja1105/sja1105_main.c
1024
if (!table->entries)
drivers/net/dsa/sja1105/sja1105_main.c
1029
policing = table->entries;
drivers/net/dsa/sja1105/sja1105_main.c
107
mac = priv->static_config.tables[BLK_IDX_MAC_CONFIG].entries;
drivers/net/dsa/sja1105/sja1105_main.c
1273
mac = priv->static_config.tables[BLK_IDX_MAC_CONFIG].entries;
drivers/net/dsa/sja1105/sja1105_main.c
1403
mii = priv->static_config.tables[BLK_IDX_XMII_PARAMS].entries;
drivers/net/dsa/sja1105/sja1105_main.c
1421
l2_lookup = table->entries;
drivers/net/dsa/sja1105/sja1105_main.c
145
vlan = priv->static_config.tables[BLK_IDX_VLAN_LOOKUP].entries;
drivers/net/dsa/sja1105/sja1105_main.c
1463
l2_lookup = table->entries;
drivers/net/dsa/sja1105/sja1105_main.c
1964
l2_fwd = priv->static_config.tables[BLK_IDX_L2_FORWARDING].entries;
drivers/net/dsa/sja1105/sja1105_main.c
2003
l2_fwd = priv->static_config.tables[BLK_IDX_L2_FORWARDING].entries;
drivers/net/dsa/sja1105/sja1105_main.c
2052
mac = priv->static_config.tables[BLK_IDX_MAC_CONFIG].entries;
drivers/net/dsa/sja1105/sja1105_main.c
209
kfree(table->entries);
drivers/net/dsa/sja1105/sja1105_main.c
213
table->entries = kcalloc(table->ops->max_entry_count,
drivers/net/dsa/sja1105/sja1105_main.c
215
if (!table->entries)
drivers/net/dsa/sja1105/sja1105_main.c
220
mac = table->entries;
drivers/net/dsa/sja1105/sja1105_main.c
2273
mac = priv->static_config.tables[BLK_IDX_MAC_CONFIG].entries;
drivers/net/dsa/sja1105/sja1105_main.c
2397
general_params = table->entries;
drivers/net/dsa/sja1105/sja1105_main.c
2437
vlan = table->entries;
drivers/net/dsa/sja1105/sja1105_main.c
2472
vlan = table->entries;
drivers/net/dsa/sja1105/sja1105_main.c
260
kfree(table->entries);
drivers/net/dsa/sja1105/sja1105_main.c
264
table->entries = kcalloc(table->ops->max_entry_count,
drivers/net/dsa/sja1105/sja1105_main.c
266
if (!table->entries)
drivers/net/dsa/sja1105/sja1105_main.c
2719
l2_lookup_params = table->entries;
drivers/net/dsa/sja1105/sja1105_main.c
272
mii = table->entries;
drivers/net/dsa/sja1105/sja1105_main.c
2741
policing = priv->static_config.tables[BLK_IDX_L2_POLICING].entries;
drivers/net/dsa/sja1105/sja1105_main.c
2788
general_params = table->entries;
drivers/net/dsa/sja1105/sja1105_main.c
2790
mac = priv->static_config.tables[BLK_IDX_MAC_CONFIG].entries;
drivers/net/dsa/sja1105/sja1105_main.c
2855
policing = priv->static_config.tables[BLK_IDX_L2_POLICING].entries;
drivers/net/dsa/sja1105/sja1105_main.c
2873
policing = priv->static_config.tables[BLK_IDX_L2_POLICING].entries;
drivers/net/dsa/sja1105/sja1105_main.c
2886
mac = priv->static_config.tables[BLK_IDX_MAC_CONFIG].entries;
drivers/net/dsa/sja1105/sja1105_main.c
2925
l2_lookup = table->entries;
drivers/net/dsa/sja1105/sja1105_main.c
356
kfree(table->entries);
drivers/net/dsa/sja1105/sja1105_main.c
363
table->entries = kcalloc(1, table->ops->unpacked_entry_size,
drivers/net/dsa/sja1105/sja1105_main.c
365
if (!table->entries)
drivers/net/dsa/sja1105/sja1105_main.c
369
l2_lookup = table->entries;
drivers/net/dsa/sja1105/sja1105_main.c
436
kfree(table->entries);
drivers/net/dsa/sja1105/sja1105_main.c
440
table->entries = kcalloc(table->ops->max_entry_count,
drivers/net/dsa/sja1105/sja1105_main.c
442
if (!table->entries)
drivers/net/dsa/sja1105/sja1105_main.c
448
((struct sja1105_l2_lookup_params_entry *)table->entries)[0] =
drivers/net/dsa/sja1105/sja1105_main.c
479
kfree(table->entries);
drivers/net/dsa/sja1105/sja1105_main.c
483
table->entries = kzalloc(table->ops->unpacked_entry_size,
drivers/net/dsa/sja1105/sja1105_main.c
485
if (!table->entries)
drivers/net/dsa/sja1105/sja1105_main.c
504
((struct sja1105_vlan_lookup_entry *)table->entries)[0] = pvid;
drivers/net/dsa/sja1105/sja1105_main.c
521
kfree(table->entries);
drivers/net/dsa/sja1105/sja1105_main.c
525
table->entries = kcalloc(table->ops->max_entry_count,
drivers/net/dsa/sja1105/sja1105_main.c
527
if (!table->entries)
drivers/net/dsa/sja1105/sja1105_main.c
532
l2fwd = table->entries;
drivers/net/dsa/sja1105/sja1105_main.c
656
kfree(table->entries);
drivers/net/dsa/sja1105/sja1105_main.c
660
table->entries = kcalloc(table->ops->max_entry_count,
drivers/net/dsa/sja1105/sja1105_main.c
662
if (!table->entries)
drivers/net/dsa/sja1105/sja1105_main.c
667
pcp_remap = table->entries;
drivers/net/dsa/sja1105/sja1105_main.c
689
kfree(table->entries);
drivers/net/dsa/sja1105/sja1105_main.c
693
table->entries = kcalloc(table->ops->max_entry_count,
drivers/net/dsa/sja1105/sja1105_main.c
695
if (!table->entries)
drivers/net/dsa/sja1105/sja1105_main.c
701
l2fwd_params = table->entries;
drivers/net/dsa/sja1105/sja1105_main.c
718
l2_fwd_params = table->entries;
drivers/net/dsa/sja1105/sja1105_main.c
731
vl_fwd_params = table->entries;
drivers/net/dsa/sja1105/sja1105_main.c
76
vlan = priv->static_config.tables[BLK_IDX_VLAN_LOOKUP].entries;
drivers/net/dsa/sja1105/sja1105_main.c
763
general_params = table->entries;
drivers/net/dsa/sja1105/sja1105_main.c
904
kfree(table->entries);
drivers/net/dsa/sja1105/sja1105_main.c
908
table->entries = kcalloc(table->ops->max_entry_count,
drivers/net/dsa/sja1105/sja1105_main.c
910
if (!table->entries)
drivers/net/dsa/sja1105/sja1105_main.c
915
general_params = table->entries;
drivers/net/dsa/sja1105/sja1105_main.c
92
mac = priv->static_config.tables[BLK_IDX_MAC_CONFIG].entries;
drivers/net/dsa/sja1105/sja1105_main.c
934
kfree(table->entries);
drivers/net/dsa/sja1105/sja1105_main.c
938
table->entries = kcalloc(table->ops->max_entry_count,
drivers/net/dsa/sja1105/sja1105_main.c
940
if (!table->entries)
drivers/net/dsa/sja1105/sja1105_main.c
945
avb = table->entries;
drivers/net/dsa/sja1105/sja1105_ptp.c
696
avb = priv->static_config.tables[BLK_IDX_AVB_PARAMS].entries;
drivers/net/dsa/sja1105/sja1105_static_config.c
1022
l2_fwd_params = tables[BLK_IDX_L2_FORWARDING_PARAMS].entries;
drivers/net/dsa/sja1105/sja1105_static_config.c
1028
vl_fwd_params = tables[BLK_IDX_VL_FORWARDING_PARAMS].entries;
drivers/net/dsa/sja1105/sja1105_static_config.c
1068
vl_lookup = tables[BLK_IDX_VL_LOOKUP].entries;
drivers/net/dsa/sja1105/sja1105_static_config.c
1141
u8 *entry_ptr = table->entries;
drivers/net/dsa/sja1105/sja1105_static_config.c
1906
kfree(config->tables[i].entries);
drivers/net/dsa/sja1105/sja1105_static_config.c
1915
u8 *entries = table->entries;
drivers/net/dsa/sja1105/sja1105_static_config.c
1921
memmove(entries + i * entry_size, entries + (i + 1) * entry_size,
drivers/net/dsa/sja1105/sja1105_static_config.c
1934
void *new_entries, *old_entries = table->entries;
drivers/net/dsa/sja1105/sja1105_static_config.c
1946
table->entries = new_entries;
drivers/net/dsa/sja1105/sja1105_static_config.h
443
void *entries;
drivers/net/dsa/sja1105/sja1105_tas.c
185
kfree(table->entries);
drivers/net/dsa/sja1105/sja1105_tas.c
192
kfree(table->entries);
drivers/net/dsa/sja1105/sja1105_tas.c
199
kfree(table->entries);
drivers/net/dsa/sja1105/sja1105_tas.c
206
kfree(table->entries);
drivers/net/dsa/sja1105/sja1105_tas.c
218
if (!list_empty(&gating_cfg->entries)) {
drivers/net/dsa/sja1105/sja1105_tas.c
231
table->entries = kcalloc(num_entries, table->ops->unpacked_entry_size,
drivers/net/dsa/sja1105/sja1105_tas.c
233
if (!table->entries)
drivers/net/dsa/sja1105/sja1105_tas.c
236
schedule = table->entries;
drivers/net/dsa/sja1105/sja1105_tas.c
240
table->entries = kcalloc(SJA1105_MAX_SCHEDULE_ENTRY_POINTS_PARAMS_COUNT,
drivers/net/dsa/sja1105/sja1105_tas.c
242
if (!table->entries)
drivers/net/dsa/sja1105/sja1105_tas.c
249
schedule_entry_points_params = table->entries;
drivers/net/dsa/sja1105/sja1105_tas.c
253
table->entries = kcalloc(SJA1105_MAX_SCHEDULE_PARAMS_COUNT,
drivers/net/dsa/sja1105/sja1105_tas.c
255
if (!table->entries)
drivers/net/dsa/sja1105/sja1105_tas.c
258
schedule_params = table->entries;
drivers/net/dsa/sja1105/sja1105_tas.c
262
table->entries = kcalloc(num_cycles, table->ops->unpacked_entry_size,
drivers/net/dsa/sja1105/sja1105_tas.c
264
if (!table->entries)
drivers/net/dsa/sja1105/sja1105_tas.c
267
schedule_entry_points = table->entries;
drivers/net/dsa/sja1105/sja1105_tas.c
311
s64 delta_ns = offload->entries[i].interval;
drivers/net/dsa/sja1105/sja1105_tas.c
317
~offload->entries[i].gate_mask;
drivers/net/dsa/sja1105/sja1105_tas.c
322
if (!list_empty(&gating_cfg->entries)) {
drivers/net/dsa/sja1105/sja1105_tas.c
343
list_for_each_entry(e, &gating_cfg->entries, list) {
drivers/net/dsa/sja1105/sja1105_tas.c
426
delta1 += offload->entries[i].interval, i++) {
drivers/net/dsa/sja1105/sja1105_tas.c
432
delta2 += admin->entries[j].interval, j++) {
drivers/net/dsa/sja1105/sja1105_tas.c
477
if (list_empty(&gating_cfg->entries))
drivers/net/dsa/sja1105/sja1105_tas.c
480
dummy = kzalloc_flex(*dummy, entries, num_entries);
drivers/net/dsa/sja1105/sja1105_tas.c
49
if (!list_empty(&gating_cfg->entries)) {
drivers/net/dsa/sja1105/sja1105_tas.c
490
list_for_each_entry(e, &gating_cfg->entries, list)
drivers/net/dsa/sja1105/sja1105_tas.c
491
dummy->entries[i++].interval = e->interval;
drivers/net/dsa/sja1105/sja1105_tas.c
548
s64 delta_ns = admin->entries[i].interval;
drivers/net/dsa/sja1105/sja1105_tas.c
880
INIT_LIST_HEAD(&tas_data->gating_cfg.entries);
drivers/net/dsa/sja1105/sja1105_tas.h
38
struct list_head entries;
drivers/net/dsa/sja1105/sja1105_vl.c
144
u8 gate_state = rule->vl.entries[i].gate_state;
drivers/net/dsa/sja1105/sja1105_vl.c
157
time += rule->vl.entries[i].interval;
drivers/net/dsa/sja1105/sja1105_vl.c
27
if (list_empty(&gating_cfg->entries)) {
drivers/net/dsa/sja1105/sja1105_vl.c
28
list_add(&e->list, &gating_cfg->entries);
drivers/net/dsa/sja1105/sja1105_vl.c
32
list_for_each_entry(p, &gating_cfg->entries, list) {
drivers/net/dsa/sja1105/sja1105_vl.c
353
kfree(table->entries);
drivers/net/dsa/sja1105/sja1105_vl.c
360
kfree(table->entries);
drivers/net/dsa/sja1105/sja1105_vl.c
367
kfree(table->entries);
drivers/net/dsa/sja1105/sja1105_vl.c
374
kfree(table->entries);
drivers/net/dsa/sja1105/sja1105_vl.c
386
table->entries = kcalloc(num_virtual_links,
drivers/net/dsa/sja1105/sja1105_vl.c
389
if (!table->entries)
drivers/net/dsa/sja1105/sja1105_vl.c
392
vl_lookup = table->entries;
drivers/net/dsa/sja1105/sja1105_vl.c
455
table->entries = kcalloc(max_sharindx, table->ops->unpacked_entry_size,
drivers/net/dsa/sja1105/sja1105_vl.c
457
if (!table->entries)
drivers/net/dsa/sja1105/sja1105_vl.c
460
vl_policing = table->entries;
drivers/net/dsa/sja1105/sja1105_vl.c
464
table->entries = kcalloc(max_sharindx, table->ops->unpacked_entry_size,
drivers/net/dsa/sja1105/sja1105_vl.c
466
if (!table->entries)
drivers/net/dsa/sja1105/sja1105_vl.c
469
vl_fwd = table->entries;
drivers/net/dsa/sja1105/sja1105_vl.c
473
table->entries = kcalloc(1, table->ops->unpacked_entry_size,
drivers/net/dsa/sja1105/sja1105_vl.c
475
if (!table->entries)
drivers/net/dsa/sja1105/sja1105_vl.c
585
u32 num_entries, struct action_gate_entry *entries)
drivers/net/dsa/sja1105/sja1105_vl.c
638
rule->vl.entries = kzalloc_objs(struct action_gate_entry,
drivers/net/dsa/sja1105/sja1105_vl.c
640
if (!rule->vl.entries) {
drivers/net/dsa/sja1105/sja1105_vl.c
646
div_s64_rem(entries[i].interval,
drivers/net/dsa/sja1105/sja1105_vl.c
65
list_for_each_entry(e, &gating_cfg->entries, list) {
drivers/net/dsa/sja1105/sja1105_vl.c
655
if (!entries[i].interval) {
drivers/net/dsa/sja1105/sja1105_vl.c
662
if (ns_to_sja1105_delta(entries[i].interval) >
drivers/net/dsa/sja1105/sja1105_vl.c
670
if (entries[i].maxoctets != -1) {
drivers/net/dsa/sja1105/sja1105_vl.c
678
ipv = entries[i].ipv;
drivers/net/dsa/sja1105/sja1105_vl.c
679
} else if (ipv != entries[i].ipv) {
drivers/net/dsa/sja1105/sja1105_vl.c
686
rule->vl.entries[i] = entries[i];
drivers/net/dsa/sja1105/sja1105_vl.c
70
if (prev == &gating_cfg->entries)
drivers/net/dsa/sja1105/sja1105_vl.c
722
kfree(rule->vl.entries);
drivers/net/dsa/sja1105/sja1105_vl.c
742
vl_lookup = table->entries;
drivers/net/dsa/sja1105/sja1105_vl.c
76
last_e = list_last_entry(&gating_cfg->entries,
drivers/net/dsa/sja1105/sja1105_vl.c
85
list_for_each_entry_safe(e, n, &gating_cfg->entries, list) {
drivers/net/dsa/sja1105/sja1105_vl.h
24
u32 num_entries, struct action_gate_entry *entries);
drivers/net/dsa/sja1105/sja1105_vl.h
57
struct action_gate_entry *entries)
drivers/net/ethernet/airoha/airoha_eth.c
2585
act = &actions->entries[0];
drivers/net/ethernet/airoha/airoha_eth.c
2636
act = &f->rule->action.entries[0];
drivers/net/ethernet/amazon/ena/ena_com.c
101
if (!sq->entries) {
drivers/net/ethernet/amazon/ena/ena_com.c
121
cq->entries = dma_alloc_coherent(admin_queue->q_dmadev, size, &cq->dma_addr, GFP_KERNEL);
drivers/net/ethernet/amazon/ena/ena_com.c
123
if (!cq->entries) {
drivers/net/ethernet/amazon/ena/ena_com.c
143
aenq->entries = dma_alloc_coherent(ena_dev->dmadev, size, &aenq->dma_addr, GFP_KERNEL);
drivers/net/ethernet/amazon/ena/ena_com.c
145
if (!aenq->entries) {
drivers/net/ethernet/amazon/ena/ena_com.c
1624
if (sq->entries)
drivers/net/ethernet/amazon/ena/ena_com.c
1625
dma_free_coherent(ena_dev->dmadev, size, sq->entries, sq->dma_addr);
drivers/net/ethernet/amazon/ena/ena_com.c
1626
sq->entries = NULL;
drivers/net/ethernet/amazon/ena/ena_com.c
1629
if (cq->entries)
drivers/net/ethernet/amazon/ena/ena_com.c
1630
dma_free_coherent(ena_dev->dmadev, size, cq->entries, cq->dma_addr);
drivers/net/ethernet/amazon/ena/ena_com.c
1631
cq->entries = NULL;
drivers/net/ethernet/amazon/ena/ena_com.c
1634
if (ena_dev->aenq.entries)
drivers/net/ethernet/amazon/ena/ena_com.c
1635
dma_free_coherent(ena_dev->dmadev, size, aenq->entries, aenq->dma_addr);
drivers/net/ethernet/amazon/ena/ena_com.c
1636
aenq->entries = NULL;
drivers/net/ethernet/amazon/ena/ena_com.c
2305
aenq_e = &aenq->entries[masked_head]; /* Get first entry */
drivers/net/ethernet/amazon/ena/ena_com.c
2334
aenq_e = &aenq->entries[masked_head];
drivers/net/ethernet/amazon/ena/ena_com.c
253
memcpy(&admin_queue->sq.entries[tail_masked], cmd, cmd_size_in_bytes);
drivers/net/ethernet/amazon/ena/ena_com.c
466
cqe = &admin_queue->cq.entries[head_masked];
drivers/net/ethernet/amazon/ena/ena_com.c
484
cqe = &admin_queue->cq.entries[head_masked];
drivers/net/ethernet/amazon/ena/ena_com.c
99
sq->entries = dma_alloc_coherent(admin_queue->q_dmadev, size, &sq->dma_addr, GFP_KERNEL);
drivers/net/ethernet/amazon/ena/ena_com.h
186
struct ena_admin_acq_entry *entries;
drivers/net/ethernet/amazon/ena/ena_com.h
194
struct ena_admin_aq_entry *entries;
drivers/net/ethernet/amazon/ena/ena_com.h
253
struct ena_admin_aenq_entry *entries;
drivers/net/ethernet/amd/pcnet32.c
483
unsigned int entries = BIT(size);
drivers/net/ethernet/amd/pcnet32.c
489
sizeof(struct pcnet32_tx_head) * entries,
drivers/net/ethernet/amd/pcnet32.c
494
new_dma_addr_list = kzalloc_objs(dma_addr_t, entries, GFP_ATOMIC);
drivers/net/ethernet/amd/pcnet32.c
498
new_skb_list = kzalloc_objs(struct sk_buff *, entries, GFP_ATOMIC);
drivers/net/ethernet/amd/pcnet32.c
508
lp->tx_ring_size = entries;
drivers/net/ethernet/amd/pcnet32.c
521
sizeof(struct pcnet32_tx_head) * entries,
drivers/net/ethernet/amd/pcnet32.c
544
unsigned int entries = BIT(size);
drivers/net/ethernet/amd/pcnet32.c
548
sizeof(struct pcnet32_rx_head) * entries,
drivers/net/ethernet/amd/pcnet32.c
553
new_dma_addr_list = kzalloc_objs(dma_addr_t, entries, GFP_ATOMIC);
drivers/net/ethernet/amd/pcnet32.c
557
new_skb_list = kzalloc_objs(struct sk_buff *, entries, GFP_ATOMIC);
drivers/net/ethernet/amd/pcnet32.c
562
overlap = min(entries, lp->rx_ring_size);
drivers/net/ethernet/amd/pcnet32.c
569
for (; new < entries; new++) {
drivers/net/ethernet/amd/pcnet32.c
612
lp->rx_ring_size = entries;
drivers/net/ethernet/amd/pcnet32.c
637
sizeof(struct pcnet32_rx_head) * entries,
drivers/net/ethernet/broadcom/bnge/bnge_hwrm_lib.c
370
if (!ctx_pg->entries)
drivers/net/ethernet/broadcom/bnge/bnge_hwrm_lib.c
372
req->num_entries = cpu_to_le32(ctx_pg->entries);
drivers/net/ethernet/broadcom/bnge/bnge_resc.c
569
u16 entries;
drivers/net/ethernet/broadcom/bnge/bnge_resc.c
571
entries = BNGE_MAX_RSS_TABLE_ENTRIES;
drivers/net/ethernet/broadcom/bnge/bnge_resc.c
573
bd->rss_indir_tbl_entries = entries;
drivers/net/ethernet/broadcom/bnge/bnge_resc.c
575
kmalloc_array(entries, sizeof(*bd->rss_indir_tbl), GFP_KERNEL);
drivers/net/ethernet/broadcom/bnge/bnge_rmem.c
233
struct bnge_ctx_mem_type *ctxm, u32 entries,
drivers/net/ethernet/broadcom/bnge/bnge_rmem.c
245
entries = roundup(entries, ctxm->entry_multiple);
drivers/net/ethernet/broadcom/bnge/bnge_rmem.c
246
entries = clamp_t(u32, entries, ctxm->min_entries, ctxm->max_entries);
drivers/net/ethernet/broadcom/bnge/bnge_rmem.c
247
mem_size = entries * ctxm->entry_size;
drivers/net/ethernet/broadcom/bnge/bnge_rmem.c
249
ctx_pg[i].entries = entries;
drivers/net/ethernet/broadcom/bnge/bnge_rmem.c
325
u32 ena, entries_sp, entries;
drivers/net/ethernet/broadcom/bnge/bnge_rmem.c
424
entries = l2_qps + 2 * (extra_qps + qp1_qps);
drivers/net/ethernet/broadcom/bnge/bnge_rmem.c
425
rc = bnge_setup_ctxm_pg_tbls(bd, ctxm, entries, 2);
drivers/net/ethernet/broadcom/bnge/bnge_rmem.h
61
u32 entries;
drivers/net/ethernet/broadcom/bnx2x/bnx2x_main.c
14834
u32 offset, entries;
drivers/net/ethernet/broadcom/bnx2x/bnx2x_main.c
14865
entries = tbl->fc_npiv_cfg.num_of_npiv;
drivers/net/ethernet/broadcom/bnx2x/bnx2x_main.c
14866
entries = (__force u32)be32_to_cpu((__force __be32)entries);
drivers/net/ethernet/broadcom/bnx2x/bnx2x_main.c
14867
tbl->fc_npiv_cfg.num_of_npiv = entries;
drivers/net/ethernet/broadcom/bnxt/bnxt.c
6533
int entries;
drivers/net/ethernet/broadcom/bnxt/bnxt.c
6536
entries = BNXT_MAX_RSS_TABLE_ENTRIES_P5;
drivers/net/ethernet/broadcom/bnxt/bnxt.c
6538
entries = HW_HASH_INDEX_SIZE;
drivers/net/ethernet/broadcom/bnxt/bnxt.c
6540
bp->rss_indir_tbl_entries = entries;
drivers/net/ethernet/broadcom/bnxt/bnxt.c
6542
kmalloc_array(entries, sizeof(*bp->rss_indir_tbl), GFP_KERNEL);
drivers/net/ethernet/broadcom/bnxt/bnxt.c
8916
req->qp_num_entries = cpu_to_le32(ctx_pg->entries);
drivers/net/ethernet/broadcom/bnxt/bnxt.c
8930
req->srq_num_entries = cpu_to_le32(ctx_pg->entries);
drivers/net/ethernet/broadcom/bnxt/bnxt.c
8940
req->cq_num_entries = cpu_to_le32(ctx_pg->entries);
drivers/net/ethernet/broadcom/bnxt/bnxt.c
8972
req->mrav_num_entries = cpu_to_le32(ctx_pg->entries);
drivers/net/ethernet/broadcom/bnxt/bnxt.c
8976
u32 entries;
drivers/net/ethernet/broadcom/bnxt/bnxt.c
8978
num_mr = ctx_pg->entries - num_ah;
drivers/net/ethernet/broadcom/bnxt/bnxt.c
8979
entries = ((num_mr / units) << 16) | (num_ah / units);
drivers/net/ethernet/broadcom/bnxt/bnxt.c
8980
req->mrav_num_entries = cpu_to_le32(entries);
drivers/net/ethernet/broadcom/bnxt/bnxt.c
8991
req->tim_num_entries = cpu_to_le32(ctx_pg->entries);
drivers/net/ethernet/broadcom/bnxt/bnxt.c
9010
*num_entries = cpu_to_le32(ctx_pg->entries);
drivers/net/ethernet/broadcom/bnxt/bnxt.c
9151
struct bnxt_ctx_mem_type *ctxm, u32 entries,
drivers/net/ethernet/broadcom/bnxt/bnxt.c
9163
entries = roundup(entries, ctxm->entry_multiple);
drivers/net/ethernet/broadcom/bnxt/bnxt.c
9164
entries = clamp_t(u32, entries, ctxm->min_entries, ctxm->max_entries);
drivers/net/ethernet/broadcom/bnxt/bnxt.c
9165
mem_size = entries * ctxm->entry_size;
drivers/net/ethernet/broadcom/bnxt/bnxt.c
9167
ctx_pg[i].entries = entries;
drivers/net/ethernet/broadcom/bnxt/bnxt.c
9219
if (!ctx_pg->entries)
drivers/net/ethernet/broadcom/bnxt/bnxt.c
9221
req->num_entries = cpu_to_le32(ctx_pg->entries);
drivers/net/ethernet/broadcom/bnxt/bnxt.c
9374
u32 ena, entries_sp, entries;
drivers/net/ethernet/broadcom/bnxt/bnxt.c
9490
entries = l2_qps + 2 * (extra_qps + qp1_qps);
drivers/net/ethernet/broadcom/bnxt/bnxt.c
9491
rc = bnxt_setup_ctxm_pg_tbls(bp, ctxm, entries, 2);
drivers/net/ethernet/broadcom/bnxt/bnxt.h
1884
u32 entries;
drivers/net/ethernet/broadcom/bnxt/bnxt_ethtool.c
4131
static int nvm_get_dir_info(struct net_device *dev, u32 *entries, u32 *length)
drivers/net/ethernet/broadcom/bnxt/bnxt_ethtool.c
4145
*entries = le32_to_cpu(output->entries);
drivers/net/ethernet/cadence/macb_main.c
4219
entry = &conf->entries[i];
drivers/net/ethernet/chelsio/cxgb/sge.c
1005
struct freelQ_e *from = &fl->entries[idx];
drivers/net/ethernet/chelsio/cxgb/sge.c
1006
struct freelQ_e *to = &fl->entries[fl->pidx];
drivers/net/ethernet/chelsio/cxgb/sge.c
1187
e1 = q->entries;
drivers/net/ethernet/chelsio/cxgb/sge.c
1211
e = e1 = &q->entries[pidx];
drivers/net/ethernet/chelsio/cxgb/sge.c
1240
e1 = q->entries;
drivers/net/ethernet/chelsio/cxgb/sge.c
1263
e1 = q->entries;
drivers/net/ethernet/chelsio/cxgb/sge.c
1473
struct respQ_e *e = &q->entries[q->cidx];
drivers/net/ethernet/chelsio/cxgb/sge.c
1527
e = q->entries;
drivers/net/ethernet/chelsio/cxgb/sge.c
1546
const struct respQ_e *e = &Q->entries[Q->cidx];
drivers/net/ethernet/chelsio/cxgb/sge.c
1563
struct respQ_e *e = &q->entries[q->cidx];
drivers/net/ethernet/chelsio/cxgb/sge.c
1582
e = q->entries;
drivers/net/ethernet/chelsio/cxgb/sge.c
179
struct cmdQ_e *entries; /* HW command descriptor Q */
drivers/net/ethernet/chelsio/cxgb/sge.c
194
struct freelQ_e *entries; /* HW freelist descriptor Q */
drivers/net/ethernet/chelsio/cxgb/sge.c
204
struct respQ_e *entries; /* HW response descriptor Q */
drivers/net/ethernet/chelsio/cxgb/sge.c
523
if (sge->respQ.entries) {
drivers/net/ethernet/chelsio/cxgb/sge.c
525
dma_free_coherent(&pdev->dev, size, sge->respQ.entries,
drivers/net/ethernet/chelsio/cxgb/sge.c
536
if (q->entries) {
drivers/net/ethernet/chelsio/cxgb/sge.c
538
dma_free_coherent(&pdev->dev, size, q->entries,
drivers/net/ethernet/chelsio/cxgb/sge.c
560
q->entries = dma_alloc_coherent(&pdev->dev, size,
drivers/net/ethernet/chelsio/cxgb/sge.c
562
if (!q->entries)
drivers/net/ethernet/chelsio/cxgb/sge.c
597
sge->respQ.entries =
drivers/net/ethernet/chelsio/cxgb/sge.c
600
if (!sge->respQ.entries)
drivers/net/ethernet/chelsio/cxgb/sge.c
660
if (q->entries) {
drivers/net/ethernet/chelsio/cxgb/sge.c
662
dma_free_coherent(&pdev->dev, size, q->entries,
drivers/net/ethernet/chelsio/cxgb/sge.c
688
q->entries = dma_alloc_coherent(&pdev->dev, size,
drivers/net/ethernet/chelsio/cxgb/sge.c
690
if (!q->entries)
drivers/net/ethernet/chelsio/cxgb/sge.c
825
struct freelQ_e *e = &q->entries[q->pidx];
drivers/net/ethernet/chelsio/cxgb/sge.c
856
e = q->entries;
drivers/net/ethernet/chelsio/cxgb3/cxgb3_main.c
3116
struct msix_entry entries[SGE_QSETS + 1];
drivers/net/ethernet/chelsio/cxgb3/cxgb3_main.c
3120
vectors = ARRAY_SIZE(entries);
drivers/net/ethernet/chelsio/cxgb3/cxgb3_main.c
3122
entries[i].entry = i;
drivers/net/ethernet/chelsio/cxgb3/cxgb3_main.c
3124
vectors = pci_enable_msix_range(adap->pdev, entries,
drivers/net/ethernet/chelsio/cxgb3/cxgb3_main.c
3130
adap->msix_info[i].vec = entries[i].vector;
drivers/net/ethernet/chelsio/cxgb4/cxgb4.h
1901
int t4_read_rss(struct adapter *adapter, u16 *entries);
drivers/net/ethernet/chelsio/cxgb4/cxgb4_debugfs.c
2858
int entries;
drivers/net/ethernet/chelsio/cxgb4/cxgb4_debugfs.c
2862
entries = 0;
drivers/net/ethernet/chelsio/cxgb4/cxgb4_debugfs.c
2864
entries += port_mqprio->mqprio.qopt.count[tc];
drivers/net/ethernet/chelsio/cxgb4/cxgb4_debugfs.c
2866
if (!entries)
drivers/net/ethernet/chelsio/cxgb4/cxgb4_debugfs.c
2869
eosw_entries = DIV_ROUND_UP(entries, 4);
drivers/net/ethernet/chelsio/cxgb4/cxgb4_debugfs.c
2873
n = min(4, entries - 4 * r);
drivers/net/ethernet/chelsio/cxgb4/cxgb4_debugfs.c
3167
u32 entries = 0;
drivers/net/ethernet/chelsio/cxgb4/cxgb4_debugfs.c
3171
entries += port_mqprio->mqprio.qopt.count[tc];
drivers/net/ethernet/chelsio/cxgb4/cxgb4_debugfs.c
3173
if (entries)
drivers/net/ethernet/chelsio/cxgb4/cxgb4_debugfs.c
3174
eosw_entries += DIV_ROUND_UP(entries, 4);
drivers/net/ethernet/chelsio/cxgb4/cxgb4_debugfs.c
3209
int entries = sge_queue_entries(seq->private);
drivers/net/ethernet/chelsio/cxgb4/cxgb4_debugfs.c
3211
return *pos < entries ? (void *)((uintptr_t)*pos + 1) : NULL;
drivers/net/ethernet/chelsio/cxgb4/cxgb4_debugfs.c
3220
int entries = sge_queue_entries(seq->private);
drivers/net/ethernet/chelsio/cxgb4/cxgb4_debugfs.c
3223
return *pos < entries ? (void *)((uintptr_t)*pos + 1) : NULL;
drivers/net/ethernet/chelsio/cxgb4/cxgb4_main.c
5900
struct msix_entry *entries;
drivers/net/ethernet/chelsio/cxgb4/cxgb4_main.c
5936
entries = kmalloc_objs(*entries, want);
drivers/net/ethernet/chelsio/cxgb4/cxgb4_main.c
5937
if (!entries)
drivers/net/ethernet/chelsio/cxgb4/cxgb4_main.c
5941
entries[i].entry = i;
drivers/net/ethernet/chelsio/cxgb4/cxgb4_main.c
5943
allocated = pci_enable_msix_range(adap->pdev, entries, need, want);
drivers/net/ethernet/chelsio/cxgb4/cxgb4_main.c
5950
allocated = pci_enable_msix_range(adap->pdev, entries,
drivers/net/ethernet/chelsio/cxgb4/cxgb4_main.c
6065
adap->msix_info[i].vec = entries[i].vector;
drivers/net/ethernet/chelsio/cxgb4/cxgb4_main.c
6074
kfree(entries);
drivers/net/ethernet/chelsio/cxgb4/cxgb4_main.c
6081
kfree(entries);
drivers/net/ethernet/chelsio/cxgb4vf/cxgb4vf_main.c
2146
int entries = sge_queue_entries(seq->private);
drivers/net/ethernet/chelsio/cxgb4vf/cxgb4vf_main.c
2148
return *pos < entries ? (void *)((uintptr_t)*pos + 1) : NULL;
drivers/net/ethernet/chelsio/cxgb4vf/cxgb4vf_main.c
2157
int entries = sge_queue_entries(seq->private);
drivers/net/ethernet/chelsio/cxgb4vf/cxgb4vf_main.c
2160
return *pos < entries ? (void *)((uintptr_t)*pos + 1) : NULL;
drivers/net/ethernet/chelsio/cxgb4vf/cxgb4vf_main.c
2275
int entries = sge_qstats_entries(seq->private);
drivers/net/ethernet/chelsio/cxgb4vf/cxgb4vf_main.c
2277
return *pos < entries ? (void *)((uintptr_t)*pos + 1) : NULL;
drivers/net/ethernet/chelsio/cxgb4vf/cxgb4vf_main.c
2286
int entries = sge_qstats_entries(seq->private);
drivers/net/ethernet/chelsio/cxgb4vf/cxgb4vf_main.c
2289
return *pos < entries ? (void *)((uintptr_t)*pos + 1) : NULL;
drivers/net/ethernet/chelsio/cxgb4vf/cxgb4vf_main.c
2806
struct msix_entry entries[MSIX_ENTRIES];
drivers/net/ethernet/chelsio/cxgb4vf/cxgb4vf_main.c
2810
entries[i].entry = i;
drivers/net/ethernet/chelsio/cxgb4vf/cxgb4vf_main.c
2822
want = pci_enable_msix_range(adapter->pdev, entries, need, want);
drivers/net/ethernet/chelsio/cxgb4vf/cxgb4vf_main.c
2835
adapter->msix_info[i].vec = entries[i].vector;
drivers/net/ethernet/cisco/enic/vnic_rq.h
46
#define VNIC_RQ_BUF_BLK_ENTRIES(entries) \
drivers/net/ethernet/cisco/enic/vnic_rq.h
47
((unsigned int)((entries < VNIC_RQ_BUF_DFLT_BLK_ENTRIES) ? \
drivers/net/ethernet/cisco/enic/vnic_rq.h
49
#define VNIC_RQ_BUF_BLK_SZ(entries) \
drivers/net/ethernet/cisco/enic/vnic_rq.h
50
(VNIC_RQ_BUF_BLK_ENTRIES(entries) * sizeof(struct vnic_rq_buf))
drivers/net/ethernet/cisco/enic/vnic_rq.h
51
#define VNIC_RQ_BUF_BLKS_NEEDED(entries) \
drivers/net/ethernet/cisco/enic/vnic_rq.h
52
DIV_ROUND_UP(entries, VNIC_RQ_BUF_BLK_ENTRIES(entries))
drivers/net/ethernet/cisco/enic/vnic_wq.h
58
#define VNIC_WQ_BUF_BLK_ENTRIES(entries) \
drivers/net/ethernet/cisco/enic/vnic_wq.h
59
((unsigned int)((entries < VNIC_WQ_BUF_DFLT_BLK_ENTRIES) ? \
drivers/net/ethernet/cisco/enic/vnic_wq.h
61
#define VNIC_WQ_BUF_BLK_SZ(entries) \
drivers/net/ethernet/cisco/enic/vnic_wq.h
62
(VNIC_WQ_BUF_BLK_ENTRIES(entries) * sizeof(struct vnic_wq_buf))
drivers/net/ethernet/cisco/enic/vnic_wq.h
63
#define VNIC_WQ_BUF_BLKS_NEEDED(entries) \
drivers/net/ethernet/cisco/enic/vnic_wq.h
64
DIV_ROUND_UP(entries, VNIC_WQ_BUF_BLK_ENTRIES(entries))
drivers/net/ethernet/cortina/gemini.c
546
size_t entries = 1 << port->txq_order;
drivers/net/ethernet/cortina/gemini.c
549
size_t len = n_txq * entries;
drivers/net/ethernet/cortina/gemini.c
592
desc_ring += entries;
drivers/net/ethernet/cortina/gemini.c
593
skb_tab += entries;
drivers/net/ethernet/engleder/tsnep_selftests.c
357
qopt = kzalloc_flex(*qopt, entries, 255);
drivers/net/ethernet/engleder/tsnep_selftests.c
361
qopt->entries[i].command = TC_TAPRIO_CMD_SET_GATES;
drivers/net/ethernet/engleder/tsnep_selftests.c
367
qopt->entries[0].gate_mask = 0x02;
drivers/net/ethernet/engleder/tsnep_selftests.c
368
qopt->entries[0].interval = 200000;
drivers/net/ethernet/engleder/tsnep_selftests.c
369
qopt->entries[1].gate_mask = 0x03;
drivers/net/ethernet/engleder/tsnep_selftests.c
370
qopt->entries[1].interval = 800000;
drivers/net/ethernet/engleder/tsnep_selftests.c
371
qopt->entries[2].gate_mask = 0x07;
drivers/net/ethernet/engleder/tsnep_selftests.c
372
qopt->entries[2].interval = 240000;
drivers/net/ethernet/engleder/tsnep_selftests.c
373
qopt->entries[3].gate_mask = 0x01;
drivers/net/ethernet/engleder/tsnep_selftests.c
374
qopt->entries[3].interval = 80000;
drivers/net/ethernet/engleder/tsnep_selftests.c
375
qopt->entries[4].gate_mask = 0x04;
drivers/net/ethernet/engleder/tsnep_selftests.c
376
qopt->entries[4].interval = 70000;
drivers/net/ethernet/engleder/tsnep_selftests.c
377
qopt->entries[5].gate_mask = 0x06;
drivers/net/ethernet/engleder/tsnep_selftests.c
378
qopt->entries[5].interval = 60000;
drivers/net/ethernet/engleder/tsnep_selftests.c
379
qopt->entries[6].gate_mask = 0x0F;
drivers/net/ethernet/engleder/tsnep_selftests.c
380
qopt->entries[6].interval = 50000;
drivers/net/ethernet/engleder/tsnep_selftests.c
389
qopt->entries[0].gate_mask = 0x17;
drivers/net/ethernet/engleder/tsnep_selftests.c
390
qopt->entries[0].interval = 23842;
drivers/net/ethernet/engleder/tsnep_selftests.c
391
qopt->entries[1].gate_mask = 0x16;
drivers/net/ethernet/engleder/tsnep_selftests.c
392
qopt->entries[1].interval = 13482;
drivers/net/ethernet/engleder/tsnep_selftests.c
393
qopt->entries[2].gate_mask = 0x15;
drivers/net/ethernet/engleder/tsnep_selftests.c
394
qopt->entries[2].interval = 49428;
drivers/net/ethernet/engleder/tsnep_selftests.c
395
qopt->entries[3].gate_mask = 0x14;
drivers/net/ethernet/engleder/tsnep_selftests.c
396
qopt->entries[3].interval = 38189;
drivers/net/ethernet/engleder/tsnep_selftests.c
397
qopt->entries[4].gate_mask = 0x13;
drivers/net/ethernet/engleder/tsnep_selftests.c
398
qopt->entries[4].interval = 92321;
drivers/net/ethernet/engleder/tsnep_selftests.c
399
qopt->entries[5].gate_mask = 0x12;
drivers/net/ethernet/engleder/tsnep_selftests.c
400
qopt->entries[5].interval = 71239;
drivers/net/ethernet/engleder/tsnep_selftests.c
401
qopt->entries[6].gate_mask = 0x11;
drivers/net/ethernet/engleder/tsnep_selftests.c
402
qopt->entries[6].interval = 69932;
drivers/net/ethernet/engleder/tsnep_selftests.c
403
qopt->entries[7].gate_mask = 0x10;
drivers/net/ethernet/engleder/tsnep_selftests.c
404
qopt->entries[7].interval = 53421;
drivers/net/ethernet/engleder/tsnep_selftests.c
414
qopt->entries[0].gate_mask = 0x27;
drivers/net/ethernet/engleder/tsnep_selftests.c
415
qopt->entries[0].interval = 15000;
drivers/net/ethernet/engleder/tsnep_selftests.c
416
qopt->entries[1].gate_mask = 0x26;
drivers/net/ethernet/engleder/tsnep_selftests.c
417
qopt->entries[1].interval = 15000;
drivers/net/ethernet/engleder/tsnep_selftests.c
418
qopt->entries[2].gate_mask = 0x25;
drivers/net/ethernet/engleder/tsnep_selftests.c
419
qopt->entries[2].interval = 12500;
drivers/net/ethernet/engleder/tsnep_selftests.c
420
qopt->entries[3].gate_mask = 0x24;
drivers/net/ethernet/engleder/tsnep_selftests.c
421
qopt->entries[3].interval = 17500;
drivers/net/ethernet/engleder/tsnep_selftests.c
422
qopt->entries[4].gate_mask = 0x23;
drivers/net/ethernet/engleder/tsnep_selftests.c
423
qopt->entries[4].interval = 10000;
drivers/net/ethernet/engleder/tsnep_selftests.c
424
qopt->entries[5].gate_mask = 0x22;
drivers/net/ethernet/engleder/tsnep_selftests.c
425
qopt->entries[5].interval = 11000;
drivers/net/ethernet/engleder/tsnep_selftests.c
426
qopt->entries[6].gate_mask = 0x21;
drivers/net/ethernet/engleder/tsnep_selftests.c
427
qopt->entries[6].interval = 9000;
drivers/net/ethernet/engleder/tsnep_selftests.c
428
qopt->entries[7].gate_mask = 0x20;
drivers/net/ethernet/engleder/tsnep_selftests.c
429
qopt->entries[7].interval = 10000;
drivers/net/ethernet/engleder/tsnep_selftests.c
430
qopt->entries[8].gate_mask = 0x20;
drivers/net/ethernet/engleder/tsnep_selftests.c
431
qopt->entries[8].interval = 12500;
drivers/net/ethernet/engleder/tsnep_selftests.c
432
qopt->entries[9].gate_mask = 0x20;
drivers/net/ethernet/engleder/tsnep_selftests.c
433
qopt->entries[9].interval = 12500;
drivers/net/ethernet/engleder/tsnep_selftests.c
454
qopt = kzalloc_flex(*qopt, entries, 255);
drivers/net/ethernet/engleder/tsnep_selftests.c
458
qopt->entries[i].command = TC_TAPRIO_CMD_SET_GATES;
drivers/net/ethernet/engleder/tsnep_selftests.c
464
qopt->entries[0].gate_mask = 0x30;
drivers/net/ethernet/engleder/tsnep_selftests.c
465
qopt->entries[0].interval = 20000;
drivers/net/ethernet/engleder/tsnep_selftests.c
466
qopt->entries[1].gate_mask = 0x31;
drivers/net/ethernet/engleder/tsnep_selftests.c
467
qopt->entries[1].interval = 80000;
drivers/net/ethernet/engleder/tsnep_selftests.c
481
qopt->entries[0].gate_mask = 0x42;
drivers/net/ethernet/engleder/tsnep_selftests.c
482
qopt->entries[1].gate_mask = 0x43;
drivers/net/ethernet/engleder/tsnep_selftests.c
487
qopt->entries[0].gate_mask = 0x54;
drivers/net/ethernet/engleder/tsnep_selftests.c
488
qopt->entries[0].interval = 33333;
drivers/net/ethernet/engleder/tsnep_selftests.c
489
qopt->entries[1].gate_mask = 0x55;
drivers/net/ethernet/engleder/tsnep_selftests.c
490
qopt->entries[1].interval = 66667;
drivers/net/ethernet/engleder/tsnep_selftests.c
495
qopt->entries[0].gate_mask = 0x66;
drivers/net/ethernet/engleder/tsnep_selftests.c
496
qopt->entries[0].interval = 50000;
drivers/net/ethernet/engleder/tsnep_selftests.c
497
qopt->entries[1].gate_mask = 0x67;
drivers/net/ethernet/engleder/tsnep_selftests.c
498
qopt->entries[1].interval = 25000;
drivers/net/ethernet/engleder/tsnep_selftests.c
499
qopt->entries[2].gate_mask = 0x68;
drivers/net/ethernet/engleder/tsnep_selftests.c
500
qopt->entries[2].interval = 25000;
drivers/net/ethernet/engleder/tsnep_selftests.c
509
qopt->entries[0].gate_mask = 0x79;
drivers/net/ethernet/engleder/tsnep_selftests.c
510
qopt->entries[0].interval = 50000;
drivers/net/ethernet/engleder/tsnep_selftests.c
511
qopt->entries[1].gate_mask = 0x7A;
drivers/net/ethernet/engleder/tsnep_selftests.c
512
qopt->entries[1].interval = 150000;
drivers/net/ethernet/engleder/tsnep_selftests.c
519
qopt->entries[0].gate_mask = 0x7B;
drivers/net/ethernet/engleder/tsnep_selftests.c
520
qopt->entries[0].interval = 125000;
drivers/net/ethernet/engleder/tsnep_selftests.c
521
qopt->entries[1].gate_mask = 0x7C;
drivers/net/ethernet/engleder/tsnep_selftests.c
522
qopt->entries[1].interval = 250000;
drivers/net/ethernet/engleder/tsnep_selftests.c
523
qopt->entries[2].gate_mask = 0x7D;
drivers/net/ethernet/engleder/tsnep_selftests.c
524
qopt->entries[2].interval = 375000;
drivers/net/ethernet/engleder/tsnep_selftests.c
525
qopt->entries[3].gate_mask = 0x7E;
drivers/net/ethernet/engleder/tsnep_selftests.c
526
qopt->entries[3].interval = 250000;
drivers/net/ethernet/engleder/tsnep_selftests.c
535
qopt->entries[0].gate_mask = 0x8F;
drivers/net/ethernet/engleder/tsnep_selftests.c
536
qopt->entries[0].interval = 166666;
drivers/net/ethernet/engleder/tsnep_selftests.c
537
qopt->entries[1].gate_mask = 0x80;
drivers/net/ethernet/engleder/tsnep_selftests.c
538
qopt->entries[1].interval = 166667;
drivers/net/ethernet/engleder/tsnep_selftests.c
545
qopt->entries[0].gate_mask = 0x81;
drivers/net/ethernet/engleder/tsnep_selftests.c
546
qopt->entries[0].interval = 31250;
drivers/net/ethernet/engleder/tsnep_selftests.c
547
qopt->entries[1].gate_mask = 0x82;
drivers/net/ethernet/engleder/tsnep_selftests.c
548
qopt->entries[1].interval = 15625;
drivers/net/ethernet/engleder/tsnep_selftests.c
549
qopt->entries[2].gate_mask = 0x83;
drivers/net/ethernet/engleder/tsnep_selftests.c
550
qopt->entries[2].interval = 15625;
drivers/net/ethernet/engleder/tsnep_selftests.c
559
qopt->entries[0].gate_mask = 0x84;
drivers/net/ethernet/engleder/tsnep_selftests.c
560
qopt->entries[0].interval = 100000;
drivers/net/ethernet/engleder/tsnep_selftests.c
561
qopt->entries[1].gate_mask = 0x85;
drivers/net/ethernet/engleder/tsnep_selftests.c
562
qopt->entries[1].interval = 100000;
drivers/net/ethernet/engleder/tsnep_selftests.c
563
qopt->entries[2].gate_mask = 0x86;
drivers/net/ethernet/engleder/tsnep_selftests.c
564
qopt->entries[2].interval = 100000;
drivers/net/ethernet/engleder/tsnep_selftests.c
565
qopt->entries[3].gate_mask = 0x87;
drivers/net/ethernet/engleder/tsnep_selftests.c
566
qopt->entries[3].interval = 100000;
drivers/net/ethernet/engleder/tsnep_selftests.c
573
qopt->entries[0].gate_mask = 0x88;
drivers/net/ethernet/engleder/tsnep_selftests.c
574
qopt->entries[0].interval = 200000;
drivers/net/ethernet/engleder/tsnep_selftests.c
575
qopt->entries[1].gate_mask = 0x89;
drivers/net/ethernet/engleder/tsnep_selftests.c
576
qopt->entries[1].interval = 300000;
drivers/net/ethernet/engleder/tsnep_selftests.c
577
qopt->entries[2].gate_mask = 0x8A;
drivers/net/ethernet/engleder/tsnep_selftests.c
578
qopt->entries[2].interval = 600000;
drivers/net/ethernet/engleder/tsnep_selftests.c
579
qopt->entries[3].gate_mask = 0x8B;
drivers/net/ethernet/engleder/tsnep_selftests.c
580
qopt->entries[3].interval = 100000;
drivers/net/ethernet/engleder/tsnep_selftests.c
581
qopt->entries[4].gate_mask = 0x8C;
drivers/net/ethernet/engleder/tsnep_selftests.c
582
qopt->entries[4].interval = 500000;
drivers/net/ethernet/engleder/tsnep_selftests.c
607
qopt = kzalloc_flex(*qopt, entries, 255);
drivers/net/ethernet/engleder/tsnep_selftests.c
611
qopt->entries[i].command = TC_TAPRIO_CMD_SET_GATES;
drivers/net/ethernet/engleder/tsnep_selftests.c
617
qopt->entries[0].gate_mask = 0x90;
drivers/net/ethernet/engleder/tsnep_selftests.c
618
qopt->entries[0].interval = 20000;
drivers/net/ethernet/engleder/tsnep_selftests.c
619
qopt->entries[1].gate_mask = 0x91;
drivers/net/ethernet/engleder/tsnep_selftests.c
620
qopt->entries[1].interval = 80000;
drivers/net/ethernet/engleder/tsnep_selftests.c
627
qopt->entries[0].gate_mask = 0x92;
drivers/net/ethernet/engleder/tsnep_selftests.c
628
qopt->entries[0].interval = 33000;
drivers/net/ethernet/engleder/tsnep_selftests.c
629
qopt->entries[1].gate_mask = 0x93;
drivers/net/ethernet/engleder/tsnep_selftests.c
630
qopt->entries[1].interval = 67000;
drivers/net/ethernet/engleder/tsnep_selftests.c
640
qopt->entries[0].gate_mask = 0x94;
drivers/net/ethernet/engleder/tsnep_selftests.c
641
qopt->entries[0].interval = 400000;
drivers/net/ethernet/engleder/tsnep_selftests.c
642
qopt->entries[1].gate_mask = 0x95;
drivers/net/ethernet/engleder/tsnep_selftests.c
643
qopt->entries[1].interval = 600000;
drivers/net/ethernet/engleder/tsnep_selftests.c
651
qopt->entries[0].gate_mask = 0x96;
drivers/net/ethernet/engleder/tsnep_selftests.c
652
qopt->entries[0].interval = 400000;
drivers/net/ethernet/engleder/tsnep_selftests.c
653
qopt->entries[1].gate_mask = 0x97;
drivers/net/ethernet/engleder/tsnep_selftests.c
654
qopt->entries[1].interval = 1600000;
drivers/net/ethernet/engleder/tsnep_selftests.c
664
qopt->entries[0].gate_mask = 0x98;
drivers/net/ethernet/engleder/tsnep_selftests.c
665
qopt->entries[0].interval = 400000;
drivers/net/ethernet/engleder/tsnep_selftests.c
666
qopt->entries[1].gate_mask = 0x99;
drivers/net/ethernet/engleder/tsnep_selftests.c
667
qopt->entries[1].interval = 600000;
drivers/net/ethernet/engleder/tsnep_selftests.c
668
qopt->entries[2].gate_mask = 0x9A;
drivers/net/ethernet/engleder/tsnep_selftests.c
669
qopt->entries[2].interval = 500000;
drivers/net/ethernet/engleder/tsnep_selftests.c
677
qopt->entries[0].gate_mask = 0x9B;
drivers/net/ethernet/engleder/tsnep_selftests.c
678
qopt->entries[0].interval = 150000;
drivers/net/ethernet/engleder/tsnep_selftests.c
679
qopt->entries[1].gate_mask = 0x9C;
drivers/net/ethernet/engleder/tsnep_selftests.c
680
qopt->entries[1].interval = 350000;
drivers/net/ethernet/engleder/tsnep_selftests.c
690
qopt->entries[0].gate_mask = 0xAD;
drivers/net/ethernet/engleder/tsnep_selftests.c
691
qopt->entries[0].interval = 400000;
drivers/net/ethernet/engleder/tsnep_selftests.c
692
qopt->entries[1].gate_mask = 0xAE;
drivers/net/ethernet/engleder/tsnep_selftests.c
693
qopt->entries[1].interval = 300000;
drivers/net/ethernet/engleder/tsnep_selftests.c
694
qopt->entries[2].gate_mask = 0xAF;
drivers/net/ethernet/engleder/tsnep_selftests.c
695
qopt->entries[2].interval = 300000;
drivers/net/ethernet/engleder/tsnep_selftests.c
702
qopt->entries[0].gate_mask = 0xA0;
drivers/net/ethernet/engleder/tsnep_selftests.c
703
qopt->entries[0].interval = 200000;
drivers/net/ethernet/engleder/tsnep_selftests.c
704
qopt->entries[1].gate_mask = 0xA1;
drivers/net/ethernet/engleder/tsnep_selftests.c
705
qopt->entries[1].interval = 200000;
drivers/net/ethernet/engleder/tsnep_selftests.c
713
qopt->entries[0].gate_mask = 0xB2;
drivers/net/ethernet/engleder/tsnep_selftests.c
714
qopt->entries[0].interval = 100000;
drivers/net/ethernet/engleder/tsnep_selftests.c
715
qopt->entries[1].gate_mask = 0xB3;
drivers/net/ethernet/engleder/tsnep_selftests.c
716
qopt->entries[1].interval = 100000;
drivers/net/ethernet/engleder/tsnep_selftests.c
717
qopt->entries[2].gate_mask = 0xB4;
drivers/net/ethernet/engleder/tsnep_selftests.c
718
qopt->entries[2].interval = 100000;
drivers/net/ethernet/engleder/tsnep_selftests.c
719
qopt->entries[3].gate_mask = 0xB5;
drivers/net/ethernet/engleder/tsnep_selftests.c
720
qopt->entries[3].interval = 200000;
drivers/net/ethernet/engleder/tsnep_selftests.c
728
qopt->entries[0].gate_mask = 0xC6;
drivers/net/ethernet/engleder/tsnep_selftests.c
729
qopt->entries[0].interval = 1000000;
drivers/net/ethernet/engleder/tsnep_selftests.c
730
qopt->entries[1].gate_mask = 0xC7;
drivers/net/ethernet/engleder/tsnep_selftests.c
731
qopt->entries[1].interval = 1000000;
drivers/net/ethernet/engleder/tsnep_selftests.c
732
qopt->entries[2].gate_mask = 0xC8;
drivers/net/ethernet/engleder/tsnep_selftests.c
733
qopt->entries[2].interval = 1000000;
drivers/net/ethernet/engleder/tsnep_selftests.c
734
qopt->entries[3].gate_mask = 0xC9;
drivers/net/ethernet/engleder/tsnep_selftests.c
735
qopt->entries[3].interval = 1500000;
drivers/net/ethernet/engleder/tsnep_selftests.c
736
qopt->entries[4].gate_mask = 0xCA;
drivers/net/ethernet/engleder/tsnep_selftests.c
737
qopt->entries[4].interval = 1500000;
drivers/net/ethernet/engleder/tsnep_tc.c
100
properties = qopt->entries[i].gate_mask;
drivers/net/ethernet/engleder/tsnep_tc.c
105
qopt->entries[i].interval, true);
drivers/net/ethernet/engleder/tsnep_tc.c
22
if (qopt->entries[i].command != TC_TAPRIO_CMD_SET_GATES)
drivers/net/ethernet/engleder/tsnep_tc.c
24
if (qopt->entries[i].gate_mask & ~TSNEP_GCL_MASK)
drivers/net/ethernet/engleder/tsnep_tc.c
26
if (qopt->entries[i].interval < TSNEP_GCL_MIN_INTERVAL)
drivers/net/ethernet/engleder/tsnep_tc.c
28
cycle_time += qopt->entries[i].interval;
drivers/net/ethernet/freescale/dpaa2/dpaa2-switch-flower.c
516
act = &rule->action.entries[0];
drivers/net/ethernet/freescale/dpaa2/dpaa2-switch-flower.c
597
cls_act = &cls->rule->action.entries[0];
drivers/net/ethernet/freescale/dpaa2/dpaa2-switch-flower.c
660
act = &rule->action.entries[0];
drivers/net/ethernet/freescale/dpaa2/dpaa2-switch-flower.c
715
act = &cls->rule->action.entries[0];
drivers/net/ethernet/freescale/dpaa2/dpaa2-switch-flower.c
750
cls_act = &cls->rule->action.entries[0];
drivers/net/ethernet/freescale/dpaa2/dpaa2-switch-flower.c
805
act = &cls->rule->action.entries[0];
drivers/net/ethernet/freescale/enetc/enetc_qos.c
101
temp_entry = &admin_conf->entries[i];
drivers/net/ethernet/freescale/enetc/enetc_qos.c
1245
entries_size = struct_size(sgi, entries, entryg->gate.num_entries);
drivers/net/ethernet/freescale/enetc/enetc_qos.c
1259
e = sgi->entries;
drivers/net/ethernet/freescale/enetc/enetc_qos.c
1261
e[i].gate_state = entryg->gate.entries[i].gate_state;
drivers/net/ethernet/freescale/enetc/enetc_qos.c
1262
e[i].interval = entryg->gate.entries[i].interval;
drivers/net/ethernet/freescale/enetc/enetc_qos.c
1263
e[i].ipv = entryg->gate.entries[i].ipv;
drivers/net/ethernet/freescale/enetc/enetc_qos.c
1264
e[i].maxoctets = entryg->gate.entries[i].maxoctets;
drivers/net/ethernet/freescale/enetc/enetc_qos.c
446
struct action_gate_entry entries[] __counted_by(num_entries);
drivers/net/ethernet/freescale/enetc/enetc_qos.c
814
struct action_gate_entry *from = &sgi->entries[i];
drivers/net/ethernet/freescale/fec_main.c
339
int entries;
drivers/net/ethernet/freescale/fec_main.c
341
entries = (((const char *)txq->dirty_tx -
drivers/net/ethernet/freescale/fec_main.c
344
return entries >= 0 ? entries : entries + txq->bd.ring_size;
drivers/net/ethernet/ibm/ehea/ehea.h
135
u8 entries[PAGE_SIZE];
drivers/net/ethernet/ibm/ehea/ehea_qmr.h
203
return ¤t_page->entries[q_offset & (EHEA_PAGESIZE - 1)];
drivers/net/ethernet/ibm/ibmvnic.c
2299
int entries;
drivers/net/ethernet/ibm/ibmvnic.c
2304
entries = (u64)ind_bufp->index;
drivers/net/ethernet/ibm/ibmvnic.c
2307
for (i = entries - 1; i >= 0; --i) {
drivers/net/ethernet/ibm/ibmvnic.c
2333
if (atomic_sub_return(entries, &tx_scrq->used) <=
drivers/net/ethernet/ibm/ibmvnic.c
2374
u64 entries;
drivers/net/ethernet/ibm/ibmvnic.c
2380
entries = (u64)ind_bufp->index;
drivers/net/ethernet/ibm/ibmvnic.c
2383
if (!entries)
drivers/net/ethernet/ibm/ibmvnic.c
2387
rc = send_subcrq_indirect(adapter, handle, dma_addr, entries);
drivers/net/ethernet/ibm/ibmvnic.c
2395
rc, entries, &dma_addr, handle);
drivers/net/ethernet/intel/ice/ice_flex_pipe.c
2058
list_for_each_entry_safe(e, t, &p->entries, l_entry)
drivers/net/ethernet/intel/ice/ice_flow.c
1515
INIT_LIST_HEAD(¶ms->prof->entries);
drivers/net/ethernet/intel/ice/ice_flow.c
1544
if (!list_empty(&prof->entries)) {
drivers/net/ethernet/intel/ice/ice_flow.c
1549
list_for_each_entry_safe(e, t, &prof->entries, l_entry) {
drivers/net/ethernet/intel/ice/ice_flow.c
1848
list_add(&e->l_entry, &prof->entries);
drivers/net/ethernet/intel/ice/ice_flow.c
2023
if (!list_empty(&prof->entries)) {
drivers/net/ethernet/intel/ice/ice_flow.c
2027
list_for_each_entry_safe(e, t, &prof->entries, l_entry) {
drivers/net/ethernet/intel/ice/ice_flow.h
477
struct list_head entries;
drivers/net/ethernet/intel/ice/ice_irq.c
20
xa_init_flags(&pf->irq_tracker.entries, XA_FLAGS_ALLOC);
drivers/net/ethernet/intel/ice/ice_irq.c
223
entry = xa_load(&pf->irq_tracker.entries, map.index);
drivers/net/ethernet/intel/ice/ice_irq.c
42
xa_destroy(&pf->irq_tracker.entries);
drivers/net/ethernet/intel/ice/ice_irq.c
59
entry = xa_erase(&pf->irq_tracker.entries, index);
drivers/net/ethernet/intel/ice/ice_irq.c
92
ret = xa_alloc(&pf->irq_tracker.entries, &index, entry, limit,
drivers/net/ethernet/intel/ice/ice_irq.h
13
struct xarray entries;
drivers/net/ethernet/intel/ice/ice_main.c
3974
if (!xa_empty(&pf->irq_tracker.entries))
drivers/net/ethernet/intel/ice/ice_parser.c
1764
ice_xlt_kb_entry_dump(hw, &kb->entries[i], i);
drivers/net/ethernet/intel/ice/ice_parser.c
1880
ice_kb_entry_init(&kb->entries[i],
drivers/net/ethernet/intel/ice/ice_parser.c
1970
struct ice_xlt_kb_entry *entry = &kb->entries[0];
drivers/net/ethernet/intel/ice/ice_parser.h
381
struct ice_xlt_kb_entry entries[ICE_XLT_KB_TBL_CNT];
drivers/net/ethernet/intel/ice/ice_ptp.c
2382
static int ice_ptp_parse_sdp_entries(struct ice_pf *pf, __le16 *entries,
drivers/net/ethernet/intel/ice/ice_ptp.c
2397
u16 entry = le16_to_cpu(entries[i]);
drivers/net/ethernet/intel/ice/ice_ptp.c
2469
__le16 entries[ICE_AQC_NVM_SDP_AC_MAX_SIZE];
drivers/net/ethernet/intel/ice/ice_ptp.c
2475
err = ice_ptp_read_sdp_ac(&pf->hw, entries, &num_entries);
drivers/net/ethernet/intel/ice/ice_ptp.c
2493
err = ice_ptp_parse_sdp_entries(pf, entries, num_entries, desc);
drivers/net/ethernet/intel/ice/ice_ptp_hw.c
4837
int ice_ptp_read_sdp_ac(struct ice_hw *hw, __le16 *entries, uint *num_entries)
drivers/net/ethernet/intel/ice/ice_ptp_hw.c
4879
entries, false, true, NULL);
drivers/net/ethernet/intel/ice/ice_ptp_hw.h
357
int ice_ptp_read_sdp_ac(struct ice_hw *hw, __le16 *entries, uint *num_entries);
drivers/net/ethernet/intel/igc/igc_main.c
6380
prev = n ? &qopt->entries[n - 1] : NULL;
drivers/net/ethernet/intel/igc/igc_main.c
6381
e = &qopt->entries[n];
drivers/net/ethernet/intel/igc/igc_main.c
6523
struct tc_taprio_sched_entry *e = &qopt->entries[n];
drivers/net/ethernet/intel/ixgbe/ixgbe_sriov.c
369
int entries = FIELD_GET(IXGBE_VT_MSGINFO_MASK, msgbuf[0]);
drivers/net/ethernet/intel/ixgbe/ixgbe_sriov.c
380
entries = min(entries, IXGBE_MAX_VF_MC_ENTRIES);
drivers/net/ethernet/intel/ixgbe/ixgbe_sriov.c
387
vfinfo->num_vf_mc_hashes = entries;
drivers/net/ethernet/intel/ixgbe/ixgbe_sriov.c
393
for (i = 0; i < entries; i++) {
drivers/net/ethernet/intel/libie/fwlog.c
202
struct libie_fwlog_module_entry *entries, u16 num_entries,
drivers/net/ethernet/intel/libie/fwlog.c
217
cpu_to_le16(entries[i].module_id);
drivers/net/ethernet/intel/libie/fwlog.c
218
fw_modules[i].log_level = entries[i].log_level;
drivers/net/ethernet/marvell/mvpp2/mvpp2_cls.c
1101
act = &rule->flow->action.entries[0];
drivers/net/ethernet/marvell/mvpp2/mvpp2_cls.c
1320
act = &flow->action.entries[0];
drivers/net/ethernet/marvell/octeontx2/af/npc.h
527
int entries;
drivers/net/ethernet/marvell/octeontx2/af/rvu_npc.c
1553
int entries;
drivers/net/ethernet/marvell/octeontx2/af/rvu_npc.c
1601
if (fw_kpu->entries > KPU_MAX_CST_ENT)
drivers/net/ethernet/marvell/octeontx2/af/rvu_npc.c
1604
kpu, fw_kpu->entries, KPU_MAX_CST_ENT);
drivers/net/ethernet/marvell/octeontx2/af/rvu_npc.c
1605
entries = min(fw_kpu->entries, KPU_MAX_CST_ENT);
drivers/net/ethernet/marvell/octeontx2/af/rvu_npc.c
1607
offset += sizeof(*fw_kpu) + fw_kpu->entries * sizeof(*cam);
drivers/net/ethernet/marvell/octeontx2/af/rvu_npc.c
1609
offset += fw_kpu->entries * sizeof(*action);
drivers/net/ethernet/marvell/octeontx2/af/rvu_npc.c
1616
for (entry = 0; entry < entries; entry++) {
drivers/net/ethernet/marvell/octeontx2/nic/otx2_tc.c
1445
entry = &cls->rule->action.entries[0];
drivers/net/ethernet/marvell/octeontx2/nic/otx2_tc.c
251
entry = &cls->rule->action.entries[0];
drivers/net/ethernet/marvell/prestera/prestera_matchall.c
77
act = &f->rule->action.entries[0];
drivers/net/ethernet/marvell/prestera/prestera_span.c
177
INIT_LIST_HEAD(&span->entries);
drivers/net/ethernet/marvell/prestera/prestera_span.c
189
WARN_ON(!list_empty(&span->entries));
drivers/net/ethernet/marvell/prestera/prestera_span.c
22
struct list_head entries;
drivers/net/ethernet/marvell/prestera/prestera_span.c
37
list_add_tail(&entry->list, &port->sw->span->entries);
drivers/net/ethernet/marvell/prestera/prestera_span.c
53
list_for_each_entry(entry, &span->entries, list) {
drivers/net/ethernet/marvell/prestera/prestera_span.c
67
list_for_each_entry(entry, &span->entries, list) {
drivers/net/ethernet/mellanox/mlx4/alloc.c
203
struct list_head entries;
drivers/net/ethernet/mellanox/mlx4/alloc.c
231
INIT_LIST_HEAD(&zones->entries);
drivers/net/ethernet/mellanox/mlx4/alloc.c
291
if (!list_is_last(&entry->list, &zone_alloc->entries)) {
drivers/net/ethernet/mellanox/mlx4/alloc.c
325
list_for_each_entry_safe(zone, tmp, &zone_alloc->entries, list) {
drivers/net/ethernet/mellanox/mlx4/alloc.c
361
list_for_each_entry_continue_reverse(it, &zone_alloc->entries, list) {
drivers/net/ethernet/mellanox/mlx4/alloc.c
375
list_for_each_entry_from(it, &zone_alloc->entries, list) {
drivers/net/ethernet/mellanox/mlx4/alloc.c
400
list_for_each_entry_from(curr_node, &zone_alloc->entries, list) {
drivers/net/ethernet/mellanox/mlx4/alloc.c
430
list_for_each_entry(zone, &zones->entries, list) {
drivers/net/ethernet/mellanox/mlx4/alloc.c
490
list_for_each_entry(zone, &zones->entries, list) {
drivers/net/ethernet/mellanox/mlx4/cq.c
192
int entries, struct mlx4_mtt *mtt)
drivers/net/ethernet/mellanox/mlx4/cq.c
204
cq_context->logsize_usrpage = cpu_to_be32(ilog2(entries) << 24);
drivers/net/ethernet/mellanox/mlx4/cq.c
290
static int mlx4_init_user_cqes(void *buf, int entries, int cqe_size)
drivers/net/ethernet/mellanox/mlx4/cq.c
307
if (entries_per_copy < entries) {
drivers/net/ethernet/mellanox/mlx4/cq.c
308
for (i = 0; i < entries / entries_per_copy; i++) {
drivers/net/ethernet/mellanox/mlx4/cq.c
318
array_size(entries, cqe_size)) ?
drivers/net/ethernet/mellanox/mlx4/cq.c
329
int entries,
drivers/net/ethernet/mellanox/mlx4/cq.c
335
memset(buf->direct.buf, 0xcc, entries * cqe_size);
drivers/net/ethernet/mellanox/mlx4/en_cq.c
48
int entries, int ring, enum cq_type mode,
drivers/net/ethernet/mellanox/mlx4/en_cq.c
61
cq->size = entries;
drivers/net/ethernet/mellanox/mlx4/main.c
2955
struct msix_entry *entries;
drivers/net/ethernet/mellanox/mlx4/main.c
2968
entries = kzalloc_objs(*entries, nreq);
drivers/net/ethernet/mellanox/mlx4/main.c
2969
if (!entries)
drivers/net/ethernet/mellanox/mlx4/main.c
2973
entries[i].entry = i;
drivers/net/ethernet/mellanox/mlx4/main.c
2975
nreq = pci_enable_msix_range(dev->persist->pdev, entries, 2,
drivers/net/ethernet/mellanox/mlx4/main.c
2979
kfree(entries);
drivers/net/ethernet/mellanox/mlx4/main.c
2985
priv->eq_table.eq[MLX4_EQ_ASYNC].irq = entries[0].vector;
drivers/net/ethernet/mellanox/mlx4/main.c
2994
entries[i + 1 - !!(i > MLX4_EQ_ASYNC)].vector;
drivers/net/ethernet/mellanox/mlx4/main.c
3030
kfree(entries);
drivers/net/ethernet/mellanox/mlx4/mlx4.h
748
__be64 entries[MLX4_MAX_MAC_NUM];
drivers/net/ethernet/mellanox/mlx4/mlx4.h
771
__be32 entries[MLX4_MAX_VLAN_NUM];
drivers/net/ethernet/mellanox/mlx4/mlx4_en.h
684
int entries, int ring, enum cq_type mode, int node);
drivers/net/ethernet/mellanox/mlx4/port.c
1004
if (!t1->entries[i])
drivers/net/ethernet/mellanox/mlx4/port.c
1008
t1->entries[i] = 0;
drivers/net/ethernet/mellanox/mlx4/port.c
1013
t2->entries[i] = 0;
drivers/net/ethernet/mellanox/mlx4/port.c
1019
ret = mlx4_set_port_vlan_table(dev, 1, t1->entries);
drivers/net/ethernet/mellanox/mlx4/port.c
1024
ret1 = mlx4_set_port_vlan_table(dev, 2, t2->entries);
drivers/net/ethernet/mellanox/mlx4/port.c
105
if (index < 0 || index >= table->max || !table->entries[index]) {
drivers/net/ethernet/mellanox/mlx4/port.c
120
(MLX4_MAC_MASK & be64_to_cpu(table->entries[i])))
drivers/net/ethernet/mellanox/mlx4/port.c
128
__be64 *entries)
drivers/net/ethernet/mellanox/mlx4/port.c
138
memcpy(mailbox->buf, entries, MLX4_MAC_TABLE_SIZE);
drivers/net/ethernet/mellanox/mlx4/port.c
196
if (((MLX4_MAC_MASK & mac) == (MLX4_MAC_MASK & be64_to_cpu(table->entries[i]))))
drivers/net/ethernet/mellanox/mlx4/port.c
198
if (((MLX4_MAC_MASK & mac) == (MLX4_MAC_MASK & be64_to_cpu(dup_table->entries[i]))))
drivers/net/ethernet/mellanox/mlx4/port.c
223
((MLX4_MAC_MASK & mac) == (MLX4_MAC_MASK & be64_to_cpu(table->entries[index_at_dup_port]))))
drivers/net/ethernet/mellanox/mlx4/port.c
242
(MLX4_MAC_MASK & be64_to_cpu(table->entries[i]))) {
drivers/net/ethernet/mellanox/mlx4/port.c
247
u64 dup_mac = MLX4_MAC_MASK & be64_to_cpu(dup_table->entries[i]);
drivers/net/ethernet/mellanox/mlx4/port.c
279
table->entries[free] = cpu_to_be64(mac | MLX4_MAC_VALID);
drivers/net/ethernet/mellanox/mlx4/port.c
281
err = mlx4_set_port_mac_table(dev, port, table->entries);
drivers/net/ethernet/mellanox/mlx4/port.c
285
table->entries[free] = 0;
drivers/net/ethernet/mellanox/mlx4/port.c
294
dup_table->entries[free] = cpu_to_be64(mac | MLX4_MAC_VALID);
drivers/net/ethernet/mellanox/mlx4/port.c
296
err = mlx4_set_port_mac_table(dev, dup_port, dup_table->entries);
drivers/net/ethernet/mellanox/mlx4/port.c
300
dup_table->entries[free] = 0;
drivers/net/ethernet/mellanox/mlx4/port.c
400
table->entries[index] = 0;
drivers/net/ethernet/mellanox/mlx4/port.c
401
if (mlx4_set_port_mac_table(dev, port, table->entries))
drivers/net/ethernet/mellanox/mlx4/port.c
409
dup_table->entries[index] = 0;
drivers/net/ethernet/mellanox/mlx4/port.c
410
if (mlx4_set_port_mac_table(dev, dup_port, dup_table->entries))
drivers/net/ethernet/mellanox/mlx4/port.c
481
table->entries[index] = cpu_to_be64(new_mac | MLX4_MAC_VALID);
drivers/net/ethernet/mellanox/mlx4/port.c
483
err = mlx4_set_port_mac_table(dev, port, table->entries);
drivers/net/ethernet/mellanox/mlx4/port.c
487
table->entries[index] = 0;
drivers/net/ethernet/mellanox/mlx4/port.c
490
dup_table->entries[index] = cpu_to_be64(new_mac | MLX4_MAC_VALID);
drivers/net/ethernet/mellanox/mlx4/port.c
492
err = mlx4_set_port_mac_table(dev, dup_port, dup_table->entries);
drivers/net/ethernet/mellanox/mlx4/port.c
496
dup_table->entries[index] = 0;
drivers/net/ethernet/mellanox/mlx4/port.c
517
__be32 *entries)
drivers/net/ethernet/mellanox/mlx4/port.c
527
memcpy(mailbox->buf, entries, MLX4_VLAN_TABLE_SIZE);
drivers/net/ethernet/mellanox/mlx4/port.c
546
be32_to_cpu(table->entries[i])))) {
drivers/net/ethernet/mellanox/mlx4/port.c
597
if (vlan == (MLX4_VLAN_MASK & be32_to_cpu(table->entries[i])))
drivers/net/ethernet/mellanox/mlx4/port.c
599
if (vlan == (MLX4_VLAN_MASK & be32_to_cpu(dup_table->entries[i])))
drivers/net/ethernet/mellanox/mlx4/port.c
623
(vlan == (MLX4_VLAN_MASK & be32_to_cpu(dup_table->entries[index_at_dup_port]))))
drivers/net/ethernet/mellanox/mlx4/port.c
642
be32_to_cpu(table->entries[i])))) {
drivers/net/ethernet/mellanox/mlx4/port.c
648
u16 dup_vlan = MLX4_VLAN_MASK & be32_to_cpu(dup_table->entries[i]);
drivers/net/ethernet/mellanox/mlx4/port.c
679
table->entries[free] = cpu_to_be32(vlan | MLX4_VLAN_VALID);
drivers/net/ethernet/mellanox/mlx4/port.c
68
table->entries[i] = 0;
drivers/net/ethernet/mellanox/mlx4/port.c
681
err = mlx4_set_port_vlan_table(dev, port, table->entries);
drivers/net/ethernet/mellanox/mlx4/port.c
685
table->entries[free] = 0;
drivers/net/ethernet/mellanox/mlx4/port.c
692
dup_table->entries[free] = cpu_to_be32(vlan | MLX4_VLAN_VALID);
drivers/net/ethernet/mellanox/mlx4/port.c
694
err = mlx4_set_port_vlan_table(dev, dup_port, dup_table->entries);
drivers/net/ethernet/mellanox/mlx4/port.c
698
dup_table->entries[free] = 0;
drivers/net/ethernet/mellanox/mlx4/port.c
779
table->entries[index] = 0;
drivers/net/ethernet/mellanox/mlx4/port.c
780
if (mlx4_set_port_vlan_table(dev, port, table->entries))
drivers/net/ethernet/mellanox/mlx4/port.c
787
dup_table->entries[index] = 0;
drivers/net/ethernet/mellanox/mlx4/port.c
788
if (mlx4_set_port_vlan_table(dev, dup_port, dup_table->entries))
drivers/net/ethernet/mellanox/mlx4/port.c
82
table->entries[i] = 0;
drivers/net/ethernet/mellanox/mlx4/port.c
834
if ((t1->entries[i] != t2->entries[i]) &&
drivers/net/ethernet/mellanox/mlx4/port.c
835
t1->entries[i] && t2->entries[i]) {
drivers/net/ethernet/mellanox/mlx4/port.c
843
if (t1->entries[i] && !t2->entries[i]) {
drivers/net/ethernet/mellanox/mlx4/port.c
844
t2->entries[i] = t1->entries[i];
drivers/net/ethernet/mellanox/mlx4/port.c
847
} else if (!t1->entries[i] && t2->entries[i]) {
drivers/net/ethernet/mellanox/mlx4/port.c
848
t1->entries[i] = t2->entries[i];
drivers/net/ethernet/mellanox/mlx4/port.c
851
} else if (t1->entries[i] && t2->entries[i]) {
drivers/net/ethernet/mellanox/mlx4/port.c
858
ret = mlx4_set_port_mac_table(dev, 1, t1->entries);
drivers/net/ethernet/mellanox/mlx4/port.c
863
ret = mlx4_set_port_mac_table(dev, 2, t2->entries);
drivers/net/ethernet/mellanox/mlx4/port.c
889
if (t1->entries[i] != t2->entries[i]) {
drivers/net/ethernet/mellanox/mlx4/port.c
897
if (!t1->entries[i])
drivers/net/ethernet/mellanox/mlx4/port.c
901
t1->entries[i] = 0;
drivers/net/ethernet/mellanox/mlx4/port.c
906
t2->entries[i] = 0;
drivers/net/ethernet/mellanox/mlx4/port.c
912
ret = mlx4_set_port_mac_table(dev, 1, t1->entries);
drivers/net/ethernet/mellanox/mlx4/port.c
917
ret1 = mlx4_set_port_mac_table(dev, 2, t2->entries);
drivers/net/ethernet/mellanox/mlx4/port.c
941
if ((t1->entries[i] != t2->entries[i]) &&
drivers/net/ethernet/mellanox/mlx4/port.c
942
t1->entries[i] && t2->entries[i]) {
drivers/net/ethernet/mellanox/mlx4/port.c
950
if (t1->entries[i] && !t2->entries[i]) {
drivers/net/ethernet/mellanox/mlx4/port.c
951
t2->entries[i] = t1->entries[i];
drivers/net/ethernet/mellanox/mlx4/port.c
954
} else if (!t1->entries[i] && t2->entries[i]) {
drivers/net/ethernet/mellanox/mlx4/port.c
955
t1->entries[i] = t2->entries[i];
drivers/net/ethernet/mellanox/mlx4/port.c
958
} else if (t1->entries[i] && t2->entries[i]) {
drivers/net/ethernet/mellanox/mlx4/port.c
965
ret = mlx4_set_port_vlan_table(dev, 1, t1->entries);
drivers/net/ethernet/mellanox/mlx4/port.c
970
ret = mlx4_set_port_vlan_table(dev, 2, t2->entries);
drivers/net/ethernet/mellanox/mlx4/port.c
996
if (t1->entries[i] != t2->entries[i]) {
drivers/net/ethernet/mellanox/mlx5/core/diag/en_tc_tracepoint.c
14
ids[i] = entries[i].id;
drivers/net/ethernet/mellanox/mlx5/core/diag/en_tc_tracepoint.c
8
const struct flow_action_entry *entries,
drivers/net/ethernet/mellanox/mlx5/core/diag/en_tc_tracepoint.h
18
const struct flow_action_entry *entries,
drivers/net/ethernet/mellanox/mlx5/core/diag/en_tc_tracepoint.h
38
f->rule->action.entries,
drivers/net/ethernet/mellanox/mlx5/core/en_main.c
1441
int entries;
drivers/net/ethernet/mellanox/mlx5/core/en_main.c
1445
entries = roundup_pow_of_two(wq_sz * MLX5_SEND_WQEBB_NUM_DS *
drivers/net/ethernet/mellanox/mlx5/core/en_main.c
1448
size = array_size(sizeof(*xdpi_fifo->xi), entries);
drivers/net/ethernet/mellanox/mlx5/core/en_main.c
1455
xdpi_fifo->mask = entries - 1;
drivers/net/ethernet/mellanox/mlx5/core/en_main.c
310
static u16 mlx5e_mpwrq_umr_octowords(u32 entries, enum mlx5e_mpwrq_umr_mode umr_mode)
drivers/net/ethernet/mellanox/mlx5/core/en_main.c
315
sz = ALIGN(entries * umr_entry_size, MLX5_UMR_FLEX_ALIGNMENT);
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/send.c
1022
kfree(queue->completed.entries);
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/send.c
1036
queue->completed.entries = kzalloc_objs(queue->completed.entries[0],
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/send.c
1038
if (!queue->completed.entries)
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/send.c
1051
kfree(queue->completed.entries);
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/send.c
645
comp->entries[comp->ci].status;
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/send.c
647
comp->entries[comp->ci].user_data;
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/send.h
130
struct mlx5hws_completed_poll_entry *entries;
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/send.h
254
comp->entries[comp->pi].status = comp_status;
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/send.h
255
comp->entries[comp->pi].user_data = user_data;
drivers/net/ethernet/mellanox/mlxsw/spectrum_matchall.c
266
act = &f->rule->action.entries[0];
drivers/net/ethernet/mellanox/mlxsw/spectrum_nve.c
280
mc_record = kzalloc_flex(*mc_record, entries, num_max_entries);
drivers/net/ethernet/mellanox/mlxsw/spectrum_nve.c
351
if (mc_record->entries[i].valid)
drivers/net/ethernet/mellanox/mlxsw/spectrum_nve.c
353
return &mc_record->entries[i];
drivers/net/ethernet/mellanox/mlxsw/spectrum_nve.c
388
mc_entry = &mc_record->entries[i];
drivers/net/ethernet/mellanox/mlxsw/spectrum_nve.c
424
mc_entry = &mc_record->entries[i];
drivers/net/ethernet/mellanox/mlxsw/spectrum_nve.c
690
struct mlxsw_sp_nve_mc_entry *mc_entry = &mc_record->entries[i];
drivers/net/ethernet/mellanox/mlxsw/spectrum_nve.c
70
struct mlxsw_sp_nve_mc_entry entries[];
drivers/net/ethernet/mellanox/mlxsw/spectrum_qdisc.c
2017
act = &f->rule->action.entries[0];
drivers/net/ethernet/mellanox/mlxsw/spectrum_span.c
103
mlxsw_sp->span->entries[i].id = i;
drivers/net/ethernet/mellanox/mlxsw/spectrum_span.c
1040
struct mlxsw_sp_span_entry *curr = &mlxsw_sp->span->entries[i];
drivers/net/ethernet/mellanox/mlxsw/spectrum_span.c
34
struct mlxsw_sp_span_entry entries[] __counted_by(entries_count);
drivers/net/ethernet/mellanox/mlxsw/spectrum_span.c
861
if (!refcount_read(&mlxsw_sp->span->entries[i].ref_count)) {
drivers/net/ethernet/mellanox/mlxsw/spectrum_span.c
862
span_entry = &mlxsw_sp->span->entries[i];
drivers/net/ethernet/mellanox/mlxsw/spectrum_span.c
90
span = kzalloc_flex(*span, entries, entries_count);
drivers/net/ethernet/mellanox/mlxsw/spectrum_span.c
903
struct mlxsw_sp_span_entry *curr = &mlxsw_sp->span->entries[i];
drivers/net/ethernet/mellanox/mlxsw/spectrum_span.c
924
struct mlxsw_sp_span_entry *curr = &mlxsw_sp->span->entries[i];
drivers/net/ethernet/mellanox/mlxsw/spectrum_span.c
940
struct mlxsw_sp_span_entry *curr = &mlxsw_sp->span->entries[i];
drivers/net/ethernet/meta/fbnic/fbnic_debugfs.c
589
list_for_each_entry_reverse(entry, &fbd->fw_log.entries, list) {
drivers/net/ethernet/meta/fbnic/fbnic_fw_log.c
104
list_for_each_entry_safe_reverse(tail, next, &log->entries, list) {
drivers/net/ethernet/meta/fbnic/fbnic_fw_log.c
115
list_add(&entry->list, &log->entries);
drivers/net/ethernet/meta/fbnic/fbnic_fw_log.c
49
INIT_LIST_HEAD(&log->entries);
drivers/net/ethernet/meta/fbnic/fbnic_fw_log.c
64
INIT_LIST_HEAD(&log->entries);
drivers/net/ethernet/meta/fbnic/fbnic_fw_log.c
87
if (list_empty(&log->entries)) {
drivers/net/ethernet/meta/fbnic/fbnic_fw_log.c
90
head = list_first_entry(&log->entries, typeof(*head), list);
drivers/net/ethernet/meta/fbnic/fbnic_fw_log.h
32
struct list_head entries;
drivers/net/ethernet/meta/fbnic/fbnic_pci.c
493
fbnic_fw_log_enable(fbd, list_empty(&fbd->fw_log.entries));
drivers/net/ethernet/microchip/lan966x/lan966x_taprio.c
240
struct tc_taprio_sched_entry *entry = &qopt->entries[i];
drivers/net/ethernet/microchip/lan966x/lan966x_taprio.c
249
if (qopt->entries[i].command != TC_TAPRIO_CMD_SET_GATES)
drivers/net/ethernet/microchip/lan966x/lan966x_taprio.c
252
total_time += qopt->entries[i].interval;
drivers/net/ethernet/microchip/lan966x/lan966x_taprio.c
363
lan966x_taprio_gcl_setup_entry(port, &qopt->entries[i], next);
drivers/net/ethernet/microchip/lan966x/lan966x_tc_matchall.c
17
act = &f->rule->action.entries[0];
drivers/net/ethernet/microchip/sparx5/sparx5_tc_flower.c
736
sg->gce[i].gate_state = !!act->gate.entries[i].gate_state;
drivers/net/ethernet/microchip/sparx5/sparx5_tc_flower.c
737
sg->gce[i].interval = act->gate.entries[i].interval;
drivers/net/ethernet/microchip/sparx5/sparx5_tc_flower.c
738
sg->gce[i].ipv = act->gate.entries[i].ipv;
drivers/net/ethernet/microchip/sparx5/sparx5_tc_flower.c
739
sg->gce[i].maxoctets = act->gate.entries[i].maxoctets;
drivers/net/ethernet/microchip/sparx5/sparx5_tc_matchall.c
15
sparx5_tc_matchall_entry_find(struct list_head *entries, unsigned long cookie)
drivers/net/ethernet/microchip/sparx5/sparx5_tc_matchall.c
19
list_for_each_entry(entry, entries, list) {
drivers/net/ethernet/microchip/sparx5/sparx5_tc_matchall.c
61
action = &tmo->rule->action.entries[0];
drivers/net/ethernet/mscc/ocelot_net.c
224
struct flow_action_entry *action = &f->rule->action.entries[0];
drivers/net/ethernet/mscc/ocelot_net.c
277
a = &action->entries[0];
drivers/net/ethernet/mscc/ocelot_net.c
362
action = &f->rule->action.entries[0];
drivers/net/ethernet/mscc/ocelot_net.c
381
action = &f->rule->action.entries[0];
drivers/net/ethernet/netronome/nfp/flower/action.c
1242
current_act = flow_act->entries[current_act_idx];
drivers/net/ethernet/netronome/nfp/flower/action.c
1249
prev_act = flow_act->entries[current_act_idx - 1];
drivers/net/ethernet/netronome/nfp/flower/action.c
1260
current_act = flow_act->entries[current_act_idx];
drivers/net/ethernet/netronome/nfp/flower/action.c
1267
next_act = flow_act->entries[current_act_idx + 1];
drivers/net/ethernet/netronome/nfp/flower/action.c
268
struct flow_action_entry *act = rule->action.entries;
drivers/net/ethernet/netronome/nfp/flower/conntrack.c
1542
new_act = &entry->rule->action.entries[i];
drivers/net/ethernet/netronome/nfp/flower/conntrack.c
1598
kfree(entry->rule->action.entries[entry->tun_offset].tunnel);
drivers/net/ethernet/netronome/nfp/flower/conntrack.c
1721
kfree(entry->rule->action.entries[entry->tun_offset].tunnel);
drivers/net/ethernet/netronome/nfp/flower/conntrack.c
733
tmp_stats = rules[num_rules - 1]->action.entries[0].hw_stats;
drivers/net/ethernet/netronome/nfp/flower/conntrack.c
761
a_in = &rules[j]->action.entries[i];
drivers/net/ethernet/netronome/nfp/flower/conntrack.c
784
memcpy(&a_rule->action.entries[offset++],
drivers/net/ethernet/netronome/nfp/flower/conntrack.c
796
csum_action = &a_rule->action.entries[offset++];
drivers/net/ethernet/netronome/nfp/flower/qos_conf.c
169
struct flow_action_entry *paction = &flow->rule->action.entries[0];
drivers/net/ethernet/netronome/nfp/flower/qos_conf.c
697
struct flow_action_entry *paction = &fl_act->action.entries[0];
drivers/net/ethernet/netronome/nfp/nfpcore/nfp.h
34
void nfp_nsp_config_set_state(struct nfp_nsp *state, void *entries,
drivers/net/ethernet/netronome/nfp/nfpcore/nfp_nsp.c
138
void *entries;
drivers/net/ethernet/netronome/nfp/nfpcore/nfp_nsp.c
195
return state->entries;
drivers/net/ethernet/netronome/nfp/nfpcore/nfp_nsp.c
204
nfp_nsp_config_set_state(struct nfp_nsp *state, void *entries, unsigned int idx)
drivers/net/ethernet/netronome/nfp/nfpcore/nfp_nsp.c
206
state->entries = entries;
drivers/net/ethernet/netronome/nfp/nfpcore/nfp_nsp.c
212
state->entries = NULL;
drivers/net/ethernet/netronome/nfp/nfpcore/nfp_nsp_eth.c
294
union eth_table_entry *entries;
drivers/net/ethernet/netronome/nfp/nfpcore/nfp_nsp_eth.c
298
entries = kzalloc(NSP_ETH_TABLE_SIZE, GFP_KERNEL);
drivers/net/ethernet/netronome/nfp/nfpcore/nfp_nsp_eth.c
299
if (!entries)
drivers/net/ethernet/netronome/nfp/nfpcore/nfp_nsp_eth.c
302
ret = nfp_nsp_read_eth_table(nsp, entries, NSP_ETH_TABLE_SIZE);
drivers/net/ethernet/netronome/nfp/nfpcore/nfp_nsp_eth.c
309
if (entries[i].port & NSP_ETH_PORT_LANES_MASK)
drivers/net/ethernet/netronome/nfp/nfpcore/nfp_nsp_eth.c
328
if (entries[i].port & NSP_ETH_PORT_LANES_MASK)
drivers/net/ethernet/netronome/nfp/nfpcore/nfp_nsp_eth.c
329
nfp_eth_port_translate(nsp, &entries[i], i,
drivers/net/ethernet/netronome/nfp/nfpcore/nfp_nsp_eth.c
338
kfree(entries);
drivers/net/ethernet/netronome/nfp/nfpcore/nfp_nsp_eth.c
343
kfree(entries);
drivers/net/ethernet/netronome/nfp/nfpcore/nfp_nsp_eth.c
349
union eth_table_entry *entries;
drivers/net/ethernet/netronome/nfp/nfpcore/nfp_nsp_eth.c
353
entries = kzalloc(NSP_ETH_TABLE_SIZE, GFP_KERNEL);
drivers/net/ethernet/netronome/nfp/nfpcore/nfp_nsp_eth.c
354
if (!entries)
drivers/net/ethernet/netronome/nfp/nfpcore/nfp_nsp_eth.c
359
kfree(entries);
drivers/net/ethernet/netronome/nfp/nfpcore/nfp_nsp_eth.c
363
ret = nfp_nsp_read_eth_table(nsp, entries, NSP_ETH_TABLE_SIZE);
drivers/net/ethernet/netronome/nfp/nfpcore/nfp_nsp_eth.c
369
if (!(entries[idx].port & NSP_ETH_PORT_LANES_MASK)) {
drivers/net/ethernet/netronome/nfp/nfpcore/nfp_nsp_eth.c
375
nfp_nsp_config_set_state(nsp, entries, idx);
drivers/net/ethernet/netronome/nfp/nfpcore/nfp_nsp_eth.c
380
kfree(entries);
drivers/net/ethernet/netronome/nfp/nfpcore/nfp_nsp_eth.c
386
union eth_table_entry *entries = nfp_nsp_config_entries(nsp);
drivers/net/ethernet/netronome/nfp/nfpcore/nfp_nsp_eth.c
391
kfree(entries);
drivers/net/ethernet/netronome/nfp/nfpcore/nfp_nsp_eth.c
410
union eth_table_entry *entries = nfp_nsp_config_entries(nsp);
drivers/net/ethernet/netronome/nfp/nfpcore/nfp_nsp_eth.c
414
ret = nfp_nsp_write_eth_table(nsp, entries, NSP_ETH_TABLE_SIZE);
drivers/net/ethernet/netronome/nfp/nfpcore/nfp_nsp_eth.c
439
union eth_table_entry *entries;
drivers/net/ethernet/netronome/nfp/nfpcore/nfp_nsp_eth.c
447
entries = nfp_nsp_config_entries(nsp);
drivers/net/ethernet/netronome/nfp/nfpcore/nfp_nsp_eth.c
450
reg = le64_to_cpu(entries[idx].state);
drivers/net/ethernet/netronome/nfp/nfpcore/nfp_nsp_eth.c
452
reg = le64_to_cpu(entries[idx].control);
drivers/net/ethernet/netronome/nfp/nfpcore/nfp_nsp_eth.c
455
entries[idx].control = cpu_to_le64(reg);
drivers/net/ethernet/netronome/nfp/nfpcore/nfp_nsp_eth.c
478
union eth_table_entry *entries;
drivers/net/ethernet/netronome/nfp/nfpcore/nfp_nsp_eth.c
494
entries = nfp_nsp_config_entries(nsp);
drivers/net/ethernet/netronome/nfp/nfpcore/nfp_nsp_eth.c
497
reg = le64_to_cpu(entries[idx].state);
drivers/net/ethernet/netronome/nfp/nfpcore/nfp_nsp_eth.c
499
reg = le64_to_cpu(entries[idx].control);
drivers/net/ethernet/netronome/nfp/nfpcore/nfp_nsp_eth.c
502
entries[idx].control = cpu_to_le64(reg);
drivers/net/ethernet/netronome/nfp/nfpcore/nfp_nsp_eth.c
515
union eth_table_entry *entries = nfp_nsp_config_entries(nsp);
drivers/net/ethernet/netronome/nfp/nfpcore/nfp_nsp_eth.c
529
reg = le64_to_cpu(entries[idx].raw[raw_idx]);
drivers/net/ethernet/netronome/nfp/nfpcore/nfp_nsp_eth.c
535
entries[idx].raw[raw_idx] = cpu_to_le64(reg);
drivers/net/ethernet/netronome/nfp/nfpcore/nfp_nsp_eth.c
537
entries[idx].control |= cpu_to_le64(ctrl_bit);
drivers/net/ethernet/netronome/nfp/nfpcore/nfp_nsp_eth.c
546
union eth_table_entry *entries;
drivers/net/ethernet/netronome/nfp/nfpcore/nfp_nsp_eth.c
562
entries = nfp_nsp_config_entries(nsp);
drivers/net/ethernet/netronome/nfp/nfpcore/nfp_nsp_eth.c
564
reg = le64_to_cpu(entries[idx].control);
drivers/net/ethernet/netronome/nfp/nfpcore/nfp_nsp_eth.c
567
entries[idx].control = cpu_to_le64(reg);
drivers/net/ethernet/nvidia/forcedeth.c
2201
u32 entries = (size >> NV_TX2_TSO_MAX_SHIFT) + ((size & (NV_TX2_TSO_MAX_SIZE-1)) ? 1 : 0);
drivers/net/ethernet/nvidia/forcedeth.c
2215
entries += (frag_size >> NV_TX2_TSO_MAX_SHIFT) +
drivers/net/ethernet/nvidia/forcedeth.c
2221
if (unlikely(empty_slots <= entries)) {
drivers/net/ethernet/nvidia/forcedeth.c
2375
u32 entries = (size >> NV_TX2_TSO_MAX_SHIFT) + ((size & (NV_TX2_TSO_MAX_SIZE-1)) ? 1 : 0);
drivers/net/ethernet/nvidia/forcedeth.c
2390
entries += (frag_size >> NV_TX2_TSO_MAX_SHIFT) +
drivers/net/ethernet/nvidia/forcedeth.c
2396
if (unlikely(empty_slots <= entries)) {
drivers/net/ethernet/qlogic/netxen/netxen_nic_init.c
568
__le32 entries = cpu_to_le32(directory->num_entries);
drivers/net/ethernet/qlogic/netxen/netxen_nic_init.c
570
for (i = 0; i < entries; i++) {
drivers/net/ethernet/qlogic/netxen/netxen_nic_init.c
592
__le32 entries;
drivers/net/ethernet/qlogic/netxen/netxen_nic_init.c
598
entries = cpu_to_le32(directory->num_entries);
drivers/net/ethernet/qlogic/netxen/netxen_nic_init.c
600
tab_size = cpu_to_le32(directory->findex) + (entries * entry_size);
drivers/net/ethernet/qlogic/netxen/netxen_nic_init.c
685
__le32 entries;
drivers/net/ethernet/qlogic/netxen/netxen_nic_init.c
694
entries = cpu_to_le32(ptab_descr->num_entries);
drivers/net/ethernet/qlogic/netxen/netxen_nic_init.c
696
tab_size = cpu_to_le32(ptab_descr->findex) + (entries * entry_size);
drivers/net/ethernet/qlogic/netxen/netxen_nic_init.c
702
for (i = 0; i < entries; i++) {
drivers/net/ethernet/qlogic/qed/qed_cxt.c
874
struct src_ent *entries = p_t2->dma_mem[i].virt_addr;
drivers/net/ethernet/qlogic/qed/qed_cxt.c
880
entries[j].next = cpu_to_be64(val);
drivers/net/ethernet/qlogic/qed/qed_cxt.c
887
entries[j].next = cpu_to_be64(val);
drivers/net/ethernet/qlogic/qlcnic/qlcnic_83xx_init.c
1977
int index, entries;
drivers/net/ethernet/qlogic/qlcnic/qlcnic_83xx_init.c
1983
entries = p_dev->ahw->reset.hdr->entries;
drivers/net/ethernet/qlogic/qlcnic/qlcnic_83xx_init.c
1986
for (; (!p_dev->ahw->reset.seq_end) && (index < entries); index++) {
drivers/net/ethernet/qlogic/qlcnic/qlcnic_83xx_init.c
51
u16 entries;
drivers/net/ethernet/qlogic/qlcnic/qlcnic_83xx_init.c
59
u16 entries;
drivers/net/ethernet/qlogic/qlcnic/qlcnic_init.c
746
u32 i, entries;
drivers/net/ethernet/qlogic/qlcnic/qlcnic_init.c
748
entries = le32_to_cpu(directory->num_entries);
drivers/net/ethernet/qlogic/qlcnic/qlcnic_init.c
750
for (i = 0; i < entries; i++) {
drivers/net/ethernet/qlogic/qlcnic/qlcnic_init.c
770
u32 entries, entry_size, tab_size, fw_file_size;
drivers/net/ethernet/qlogic/qlcnic/qlcnic_init.c
777
entries = le32_to_cpu(directory->num_entries);
drivers/net/ethernet/qlogic/qlcnic/qlcnic_init.c
779
tab_size = le32_to_cpu(directory->findex) + (entries * entry_size);
drivers/net/ethernet/qlogic/qlcnic/qlcnic_init.c
862
u32 entries, entry_size, tab_size, i;
drivers/net/ethernet/qlogic/qlcnic/qlcnic_init.c
870
entries = le32_to_cpu(ptab_descr->num_entries);
drivers/net/ethernet/qlogic/qlcnic/qlcnic_init.c
872
tab_size = le32_to_cpu(ptab_descr->findex) + (entries * entry_size);
drivers/net/ethernet/qlogic/qlcnic/qlcnic_init.c
878
for (i = 0; i < entries; i++) {
drivers/net/ethernet/sfc/efx_channels.c
427
unsigned long entries;
drivers/net/ethernet/sfc/efx_channels.c
435
entries = roundup_pow_of_two(efx->rxq_entries + efx->txq_entries + 128);
drivers/net/ethernet/sfc/efx_channels.c
436
EFX_WARN_ON_PARANOID(entries > EFX_MAX_EVQ_SIZE);
drivers/net/ethernet/sfc/efx_channels.c
437
channel->eventq_mask = max(entries, EFX_MIN_EVQ_SIZE) - 1;
drivers/net/ethernet/sfc/falcon/efx.c
333
unsigned long entries;
drivers/net/ethernet/sfc/falcon/efx.c
340
entries = roundup_pow_of_two(efx->rxq_entries + efx->txq_entries + 128);
drivers/net/ethernet/sfc/falcon/efx.c
341
EF4_BUG_ON_PARANOID(entries > EF4_MAX_EVQ_SIZE);
drivers/net/ethernet/sfc/falcon/efx.c
342
channel->eventq_mask = max(entries, EF4_MIN_EVQ_SIZE) - 1;
drivers/net/ethernet/sfc/falcon/efx.c
780
channel->eventq.entries);
drivers/net/ethernet/sfc/falcon/efx.c
784
rx_queue->rxd.entries);
drivers/net/ethernet/sfc/falcon/efx.c
788
tx_queue->txd.entries);
drivers/net/ethernet/sfc/falcon/farch.c
1330
unsigned entries;
drivers/net/ethernet/sfc/falcon/farch.c
1332
entries = channel->eventq_mask + 1;
drivers/net/ethernet/sfc/falcon/farch.c
1334
entries * sizeof(ef4_qword_t));
drivers/net/ethernet/sfc/falcon/farch.c
1345
channel->eventq.index + channel->eventq.entries - 1);
drivers/net/ethernet/sfc/falcon/farch.c
1356
FRF_AZ_EVQ_SIZE, __ffs(channel->eventq.entries),
drivers/net/ethernet/sfc/falcon/farch.c
178
for (i = 0; i < buffer->entries; i++) {
drivers/net/ethernet/sfc/falcon/farch.c
198
unsigned int end = (buffer->index + buffer->entries - 1);
drivers/net/ethernet/sfc/falcon/farch.c
200
if (!buffer->entries)
drivers/net/ethernet/sfc/falcon/farch.c
204
buffer->index, buffer->index + buffer->entries - 1);
drivers/net/ethernet/sfc/falcon/farch.c
231
buffer->entries = len / EF4_BUF_SIZE;
drivers/net/ethernet/sfc/falcon/farch.c
236
efx->next_buffer_table += buffer->entries;
drivers/net/ethernet/sfc/falcon/farch.c
241
buffer->index + buffer->entries - 1,
drivers/net/ethernet/sfc/falcon/farch.c
257
buffer->index + buffer->entries - 1,
drivers/net/ethernet/sfc/falcon/farch.c
262
buffer->entries = 0;
drivers/net/ethernet/sfc/falcon/farch.c
366
unsigned entries;
drivers/net/ethernet/sfc/falcon/farch.c
368
entries = tx_queue->ptr_mask + 1;
drivers/net/ethernet/sfc/falcon/farch.c
370
entries * sizeof(ef4_qword_t));
drivers/net/ethernet/sfc/falcon/farch.c
392
__ffs(tx_queue->txd.entries),
drivers/net/ethernet/sfc/falcon/farch.c
512
unsigned entries;
drivers/net/ethernet/sfc/falcon/farch.c
514
entries = rx_queue->ptr_mask + 1;
drivers/net/ethernet/sfc/falcon/farch.c
516
entries * sizeof(ef4_qword_t));
drivers/net/ethernet/sfc/falcon/farch.c
537
rx_queue->rxd.index + rx_queue->rxd.entries - 1);
drivers/net/ethernet/sfc/falcon/farch.c
555
__ffs(rx_queue->rxd.entries),
drivers/net/ethernet/sfc/falcon/net_driver.h
128
unsigned int entries;
drivers/net/ethernet/sfc/falcon/rx.c
690
unsigned int entries;
drivers/net/ethernet/sfc/falcon/rx.c
694
entries = max(roundup_pow_of_two(efx->rxq_entries), EF4_MIN_DMAQ_SIZE);
drivers/net/ethernet/sfc/falcon/rx.c
695
EF4_BUG_ON_PARANOID(entries > EF4_MAX_DMAQ_SIZE);
drivers/net/ethernet/sfc/falcon/rx.c
696
rx_queue->ptr_mask = entries - 1;
drivers/net/ethernet/sfc/falcon/rx.c
704
rx_queue->buffer = kzalloc_objs(*rx_queue->buffer, entries);
drivers/net/ethernet/sfc/falcon/tx.c
534
unsigned int entries;
drivers/net/ethernet/sfc/falcon/tx.c
538
entries = max(roundup_pow_of_two(efx->txq_entries), EF4_MIN_DMAQ_SIZE);
drivers/net/ethernet/sfc/falcon/tx.c
539
EF4_BUG_ON_PARANOID(entries > EF4_MAX_DMAQ_SIZE);
drivers/net/ethernet/sfc/falcon/tx.c
540
tx_queue->ptr_mask = entries - 1;
drivers/net/ethernet/sfc/falcon/tx.c
547
tx_queue->buffer = kzalloc_objs(*tx_queue->buffer, entries);
drivers/net/ethernet/sfc/mcdi_functions.c
116
for (i = 0; i < entries; ++i) {
drivers/net/ethernet/sfc/mcdi_functions.c
121
inlen = MC_CMD_INIT_EVQ_IN_LEN(entries);
drivers/net/ethernet/sfc/mcdi_functions.c
169
size_t entries = tx_queue->txd.len / EFX_BUF_SIZE;
drivers/net/ethernet/sfc/mcdi_functions.c
188
tx_queue->queue, entries, (u64)dma_addr);
drivers/net/ethernet/sfc/mcdi_functions.c
190
for (i = 0; i < entries; ++i) {
drivers/net/ethernet/sfc/mcdi_functions.c
195
inlen = MC_CMD_INIT_TXQ_IN_LEN(entries);
drivers/net/ethernet/sfc/mcdi_functions.c
281
size_t entries = rx_queue->rxd.len / EFX_BUF_SIZE;
drivers/net/ethernet/sfc/mcdi_functions.c
312
efx_rx_queue_index(rx_queue), entries, (u64)dma_addr);
drivers/net/ethernet/sfc/mcdi_functions.c
314
for (i = 0; i < entries; ++i) {
drivers/net/ethernet/sfc/mcdi_functions.c
77
size_t entries = channel->eventq.len / EFX_BUF_SIZE;
drivers/net/ethernet/sfc/rx_common.c
192
unsigned int entries;
drivers/net/ethernet/sfc/rx_common.c
196
entries = max(roundup_pow_of_two(efx->rxq_entries), EFX_MIN_DMAQ_SIZE);
drivers/net/ethernet/sfc/rx_common.c
197
EFX_WARN_ON_PARANOID(entries > EFX_MAX_DMAQ_SIZE);
drivers/net/ethernet/sfc/rx_common.c
198
rx_queue->ptr_mask = entries - 1;
drivers/net/ethernet/sfc/rx_common.c
206
rx_queue->buffer = kzalloc_objs(*rx_queue->buffer, entries);
drivers/net/ethernet/sfc/siena/efx_channels.c
428
unsigned long entries;
drivers/net/ethernet/sfc/siena/efx_channels.c
436
entries = roundup_pow_of_two(efx->rxq_entries + efx->txq_entries + 128);
drivers/net/ethernet/sfc/siena/efx_channels.c
437
EFX_WARN_ON_PARANOID(entries > EFX_MAX_EVQ_SIZE);
drivers/net/ethernet/sfc/siena/efx_channels.c
438
channel->eventq_mask = max(entries, EFX_MIN_EVQ_SIZE) - 1;
drivers/net/ethernet/sfc/siena/efx_channels.c
878
channel->eventq.entries);
drivers/net/ethernet/sfc/siena/efx_channels.c
882
rx_queue->rxd.entries);
drivers/net/ethernet/sfc/siena/efx_channels.c
886
tx_queue->txd.entries);
drivers/net/ethernet/sfc/siena/farch.c
1340
unsigned entries;
drivers/net/ethernet/sfc/siena/farch.c
1342
entries = channel->eventq_mask + 1;
drivers/net/ethernet/sfc/siena/farch.c
1344
entries * sizeof(efx_qword_t));
drivers/net/ethernet/sfc/siena/farch.c
1355
channel->eventq.index + channel->eventq.entries - 1);
drivers/net/ethernet/sfc/siena/farch.c
1372
FRF_AZ_EVQ_SIZE, __ffs(channel->eventq.entries),
drivers/net/ethernet/sfc/siena/farch.c
182
for (i = 0; i < buffer->entries; i++) {
drivers/net/ethernet/sfc/siena/farch.c
202
unsigned int end = (buffer->index + buffer->entries - 1);
drivers/net/ethernet/sfc/siena/farch.c
204
if (!buffer->entries)
drivers/net/ethernet/sfc/siena/farch.c
208
buffer->index, buffer->index + buffer->entries - 1);
drivers/net/ethernet/sfc/siena/farch.c
238
buffer->entries = len / EFX_BUF_SIZE;
drivers/net/ethernet/sfc/siena/farch.c
243
efx->next_buffer_table += buffer->entries;
drivers/net/ethernet/sfc/siena/farch.c
252
buffer->index + buffer->entries - 1,
drivers/net/ethernet/sfc/siena/farch.c
268
buffer->index + buffer->entries - 1,
drivers/net/ethernet/sfc/siena/farch.c
273
buffer->entries = 0;
drivers/net/ethernet/sfc/siena/farch.c
374
unsigned entries;
drivers/net/ethernet/sfc/siena/farch.c
378
entries = tx_queue->ptr_mask + 1;
drivers/net/ethernet/sfc/siena/farch.c
380
entries * sizeof(efx_qword_t));
drivers/net/ethernet/sfc/siena/farch.c
403
__ffs(tx_queue->txd.entries),
drivers/net/ethernet/sfc/siena/farch.c
506
unsigned entries;
drivers/net/ethernet/sfc/siena/farch.c
508
entries = rx_queue->ptr_mask + 1;
drivers/net/ethernet/sfc/siena/farch.c
510
entries * sizeof(efx_qword_t));
drivers/net/ethernet/sfc/siena/farch.c
525
rx_queue->rxd.index + rx_queue->rxd.entries - 1);
drivers/net/ethernet/sfc/siena/farch.c
543
__ffs(rx_queue->rxd.entries),
drivers/net/ethernet/sfc/siena/net_driver.h
142
unsigned int entries;
drivers/net/ethernet/sfc/siena/rx_common.c
195
unsigned int entries;
drivers/net/ethernet/sfc/siena/rx_common.c
199
entries = max(roundup_pow_of_two(efx->rxq_entries), EFX_MIN_DMAQ_SIZE);
drivers/net/ethernet/sfc/siena/rx_common.c
200
EFX_WARN_ON_PARANOID(entries > EFX_MAX_DMAQ_SIZE);
drivers/net/ethernet/sfc/siena/rx_common.c
201
rx_queue->ptr_mask = entries - 1;
drivers/net/ethernet/sfc/siena/rx_common.c
209
rx_queue->buffer = kzalloc_objs(*rx_queue->buffer, entries);
drivers/net/ethernet/sfc/siena/tx_common.c
26
unsigned int entries;
drivers/net/ethernet/sfc/siena/tx_common.c
30
entries = max(roundup_pow_of_two(efx->txq_entries), EFX_MIN_DMAQ_SIZE);
drivers/net/ethernet/sfc/siena/tx_common.c
31
EFX_WARN_ON_PARANOID(entries > EFX_MAX_DMAQ_SIZE);
drivers/net/ethernet/sfc/siena/tx_common.c
32
tx_queue->ptr_mask = entries - 1;
drivers/net/ethernet/sfc/siena/tx_common.c
39
tx_queue->buffer = kzalloc_objs(*tx_queue->buffer, entries);
drivers/net/ethernet/sfc/tx_common.c
26
unsigned int entries;
drivers/net/ethernet/sfc/tx_common.c
30
entries = max(roundup_pow_of_two(efx->txq_entries), EFX_MIN_DMAQ_SIZE);
drivers/net/ethernet/sfc/tx_common.c
31
EFX_WARN_ON_PARANOID(entries > EFX_MAX_DMAQ_SIZE);
drivers/net/ethernet/sfc/tx_common.c
32
tx_queue->ptr_mask = entries - 1;
drivers/net/ethernet/sfc/tx_common.c
39
tx_queue->buffer = kzalloc_objs(*tx_queue->buffer, entries);
drivers/net/ethernet/stmicro/stmmac/dwmac5.c
394
dwmac5_rxp_get_next_entry(struct stmmac_tc_entry *entries, unsigned int count,
drivers/net/ethernet/stmicro/stmmac/dwmac5.c
403
entry = &entries[i];
drivers/net/ethernet/stmicro/stmmac/dwmac5.c
429
return &entries[min_prio_idx];
drivers/net/ethernet/stmicro/stmmac/dwmac5.c
433
int dwmac5_rxp_config(void __iomem *ioaddr, struct stmmac_tc_entry *entries,
drivers/net/ethernet/stmicro/stmmac/dwmac5.c
453
entry = &entries[i];
drivers/net/ethernet/stmicro/stmmac/dwmac5.c
459
entry = dwmac5_rxp_get_next_entry(entries, count, curr_prio);
drivers/net/ethernet/stmicro/stmmac/dwmac5.c
495
entry = &entries[i];
drivers/net/ethernet/stmicro/stmmac/dwmac5.h
91
int dwmac5_rxp_config(void __iomem *ioaddr, struct stmmac_tc_entry *entries,
drivers/net/ethernet/stmicro/stmmac/dwxgmac2_core.c
1016
dwxgmac3_rxp_get_next_entry(struct stmmac_tc_entry *entries,
drivers/net/ethernet/stmicro/stmmac/dwxgmac2_core.c
1025
entry = &entries[i];
drivers/net/ethernet/stmicro/stmmac/dwxgmac2_core.c
1051
return &entries[min_prio_idx];
drivers/net/ethernet/stmicro/stmmac/dwxgmac2_core.c
1056
struct stmmac_tc_entry *entries,
drivers/net/ethernet/stmicro/stmmac/dwxgmac2_core.c
1076
entry = &entries[i];
drivers/net/ethernet/stmicro/stmmac/dwxgmac2_core.c
1082
entry = dwxgmac3_rxp_get_next_entry(entries, count, curr_prio);
drivers/net/ethernet/stmicro/stmmac/dwxgmac2_core.c
1118
entry = &entries[i];
drivers/net/ethernet/stmicro/stmmac/hwif.h
395
int (*rxp_config)(void __iomem *ioaddr, struct stmmac_tc_entry *entries,
drivers/net/ethernet/stmicro/stmmac/stmmac_selftests.c
1371
rule = kzalloc_flex(*rule, action.entries, 1);
drivers/net/ethernet/stmicro/stmmac/stmmac_selftests.c
1388
rule->action.entries[0].id = FLOW_ACTION_DROP;
drivers/net/ethernet/stmicro/stmmac/stmmac_selftests.c
1389
rule->action.entries[0].hw_stats = FLOW_ACTION_HW_STATS_ANY;
drivers/net/ethernet/stmicro/stmmac/stmmac_selftests.c
1499
rule = kzalloc_flex(*rule, action.entries, 1);
drivers/net/ethernet/stmicro/stmmac/stmmac_selftests.c
1517
rule->action.entries[0].id = FLOW_ACTION_DROP;
drivers/net/ethernet/stmicro/stmmac/stmmac_selftests.c
1518
rule->action.entries[0].hw_stats = FLOW_ACTION_HW_STATS_ANY;
drivers/net/ethernet/stmicro/stmmac/stmmac_tc.c
1012
s64 delta_ns = qopt->entries[i].interval;
drivers/net/ethernet/stmicro/stmmac/stmmac_tc.c
1013
u32 gates = qopt->entries[i].gate_mask;
drivers/net/ethernet/stmicro/stmmac/stmmac_tc.c
1020
switch (qopt->entries[i].command) {
drivers/net/ethernet/ti/am65-cpsw-qos.c
680
if (taprio->entries[i].command != TC_TAPRIO_CMD_SET_GATES) {
drivers/net/ethernet/ti/am65-cpsw-qos.c
685
fetch_cnt = am65_est_cmd_ns_to_cnt(taprio->entries[i].interval,
drivers/net/ethernet/ti/am65-cpsw-qos.c
735
entry = &est_new->taprio.entries[i];
drivers/net/ethernet/ti/am65-cpsw-qos.c
853
to->entries[i] = from->entries[i];
drivers/net/ethernet/ti/am65-cpsw-qos.c
888
struct_size(est_new, taprio.entries, taprio->num_entries),
drivers/net/ethernet/ti/icssm/icssm_prueth_switch.c
634
unsigned int idx, entries;
drivers/net/ethernet/ti/icssm/icssm_prueth_switch.c
676
entries = readw(&bucket_info->bucket_entries);
drivers/net/ethernet/ti/icssm/icssm_prueth_switch.c
677
right = idx + entries - 1;
drivers/net/ethernet/wangxun/libwx/wx_sriov.c
468
u16 entries = (msgbuf[0] & WX_VT_MSGINFO_MASK)
drivers/net/ethernet/wangxun/libwx/wx_sriov.c
475
entries = min_t(u16, entries, WX_MAX_VF_MC_ENTRIES);
drivers/net/ethernet/wangxun/libwx/wx_sriov.c
476
vfinfo->num_vf_mc_hashes = entries;
drivers/net/ethernet/wangxun/libwx/wx_sriov.c
478
for (i = 0; i < entries; i++)
drivers/net/virtio_net.c
3835
(2 * sizeof(mac_data->entries)), GFP_ATOMIC);
drivers/net/virtio_net.c
3846
mac_data->entries = cpu_to_virtio32(vi->vdev, uc_count);
drivers/net/virtio_net.c
3852
sizeof(mac_data->entries) + (uc_count * ETH_ALEN));
drivers/net/virtio_net.c
3857
mac_data->entries = cpu_to_virtio32(vi->vdev, mc_count);
drivers/net/virtio_net.c
3865
sizeof(mac_data->entries) + (mc_count * ETH_ALEN));
drivers/net/vmxnet3/vmxnet3_drv.c
401
int entries = 0;
drivers/net/vmxnet3/vmxnet3_drv.c
423
entries++;
drivers/net/vmxnet3/vmxnet3_drv.c
434
return entries;
drivers/net/wireless/ath/ath10k/core.h
662
struct ath10k_ce_crash_data entries[];
drivers/net/wireless/ath/ath10k/coredump.c
1511
CE_COUNT * sizeof(ce_hdr->entries[0]);
drivers/net/wireless/ath/ath10k/coredump.c
1571
dump_tlv->tlv_len = cpu_to_le32(struct_size(ce_hdr, entries,
drivers/net/wireless/ath/ath10k/coredump.c
1576
memcpy(ce_hdr->entries, crash_data->ce_crash_data,
drivers/net/wireless/ath/ath10k/coredump.c
1577
CE_COUNT * sizeof(ce_hdr->entries[0]));
drivers/net/wireless/ath/ath10k/coredump.c
1579
CE_COUNT * sizeof(ce_hdr->entries[0]);
drivers/net/wireless/ath/ath11k/debugfs.c
1149
dbr_dbg_data->entries[i].hp,
drivers/net/wireless/ath/ath11k/debugfs.c
1150
dbr_dbg_data->entries[i].tp,
drivers/net/wireless/ath/ath11k/debugfs.c
1151
dbr_dbg_data->entries[i].timestamp,
drivers/net/wireless/ath/ath11k/debugfs.c
1152
event_id_to_string[dbr_dbg_data->entries[i].event]);
drivers/net/wireless/ath/ath11k/debugfs.c
1182
kfree(dbr_dbg_data->entries);
drivers/net/wireless/ath/ath11k/debugfs.c
1218
dbr_dbg_data->entries = kzalloc_objs(struct ath11k_dbg_dbr_entry,
drivers/net/wireless/ath/ath11k/debugfs.c
1220
if (!dbr_dbg_data->entries)
drivers/net/wireless/ath/ath11k/debugfs.c
1545
kfree(dbr_dbg_data->entries);
drivers/net/wireless/ath/ath11k/debugfs.c
82
if (dbr_data->entries) {
drivers/net/wireless/ath/ath11k/debugfs.c
83
entry = &dbr_data->entries[dbr_data->dbr_debug_idx];
drivers/net/wireless/ath/ath11k/debugfs.h
70
struct ath11k_dbg_dbr_entry *entries;
drivers/net/wireless/ath/ath11k/pci.c
653
for (i = 0; i < fw_img->entries ; i++) {
drivers/net/wireless/ath/ath11k/pci.c
661
for (i = 0; i < rddm_img->entries; i++) {
drivers/net/wireless/ath/ath11k/pci.c
753
for (i = 0; i < fw_img->entries ; i++) {
drivers/net/wireless/ath/ath11k/pci.c
770
for (i = 0; i < rddm_img->entries; i++) {
drivers/net/wireless/ath/ath12k/pci.c
1264
for (i = 0; i < fw_img->entries ; i++) {
drivers/net/wireless/ath/ath12k/pci.c
1272
for (i = 0; i < rddm_img->entries; i++) {
drivers/net/wireless/ath/ath12k/pci.c
1364
for (i = 0; i < fw_img->entries ; i++) {
drivers/net/wireless/ath/ath12k/pci.c
1381
for (i = 0; i < rddm_img->entries; i++) {
drivers/net/wireless/ath/ath6kl/trace.h
122
unsigned int entries, struct hif_scatter_item *list),
drivers/net/wireless/ath/ath6kl/trace.h
124
TP_ARGS(addr, flags, total_len, entries, list),
drivers/net/wireless/ath/ath6kl/trace.h
130
__field(unsigned int, entries)
drivers/net/wireless/ath/ath6kl/trace.h
132
__dynamic_array(unsigned int, len_array, entries)
drivers/net/wireless/ath/ath6kl/trace.h
143
__entry->entries = entries;
drivers/net/wireless/ath/ath6kl/trace.h
153
for (i = 0; i < entries; i++) {
drivers/net/wireless/ath/ath6kl/trace.h
169
__entry->entries,
drivers/net/wireless/intel/ipw2x00/ipw2100.c
2659
if (r >= rxq->entries) {
drivers/net/wireless/intel/ipw2x00/ipw2100.c
2664
i = (rxq->next + 1) % rxq->entries;
drivers/net/wireless/intel/ipw2x00/ipw2100.c
2738
i = (i + 1) % rxq->entries;
drivers/net/wireless/intel/ipw2x00/ipw2100.c
2743
rxq->next = (i ? i : rxq->entries) - 1;
drivers/net/wireless/intel/ipw2x00/ipw2100.c
2820
e %= txq->entries;
drivers/net/wireless/intel/ipw2x00/ipw2100.c
2880
i = (i + 1) % txq->entries;
drivers/net/wireless/intel/ipw2x00/ipw2100.c
2902
tbd = &txq->drv[(packet->index + 1 + i) % txq->entries];
drivers/net/wireless/intel/ipw2x00/ipw2100.c
2905
(packet->index + 1 + i) % txq->entries,
drivers/net/wireless/intel/ipw2x00/ipw2100.c
2953
txq->oldest = (e + 1) % txq->entries;
drivers/net/wireless/intel/ipw2x00/ipw2100.c
3025
txq->next %= txq->entries;
drivers/net/wireless/intel/ipw2x00/ipw2100.c
3125
txq->next %= txq->entries;
drivers/net/wireless/intel/ipw2x00/ipw2100.c
3170
txq->next %= txq->entries;
drivers/net/wireless/intel/ipw2x00/ipw2100.c
4294
static int status_queue_allocate(struct ipw2100_priv *priv, int entries)
drivers/net/wireless/intel/ipw2x00/ipw2100.c
4300
q->size = entries * sizeof(struct ipw2100_status);
drivers/net/wireless/intel/ipw2x00/ipw2100.c
4329
struct ipw2100_bd_queue *q, int entries)
drivers/net/wireless/intel/ipw2x00/ipw2100.c
4335
q->entries = entries;
drivers/net/wireless/intel/ipw2x00/ipw2100.c
4336
q->size = entries * sizeof(struct ipw2100_bd);
drivers/net/wireless/intel/ipw2x00/ipw2100.c
4376
write_register(priv->net_dev, size, q->entries);
drivers/net/wireless/intel/ipw2x00/ipw2100.c
4487
priv->tx_queue.available = priv->tx_queue.entries;
drivers/net/wireless/intel/ipw2x00/ipw2100.c
4607
priv->rx_queue.available = priv->rx_queue.entries - 1;
drivers/net/wireless/intel/ipw2x00/ipw2100.c
4608
priv->rx_queue.next = priv->rx_queue.entries - 1;
drivers/net/wireless/intel/ipw2x00/ipw2100.h
176
u32 entries;
drivers/net/wireless/intel/ipw2x00/ipw2200.c
4039
avg->sum -= avg->entries[avg->pos];
drivers/net/wireless/intel/ipw2x00/ipw2200.c
4041
avg->entries[avg->pos++] = val;
drivers/net/wireless/intel/ipw2x00/ipw2200.h
1068
s16 entries[AVG_ENTRIES];
drivers/net/wireless/intel/iwlwifi/dvm/eeprom.c
363
int idx, entries;
drivers/net/wireless/intel/iwlwifi/dvm/eeprom.c
371
entries = le16_to_cpup(txp_len) * 2 / EEPROM_TXP_ENTRY_LEN;
drivers/net/wireless/intel/iwlwifi/dvm/eeprom.c
375
for (idx = 0; idx < entries; idx++) {
drivers/net/wireless/intel/iwlwifi/iwl-trans.h
776
struct iwl_pcie_txq_entry *entries;
drivers/net/wireless/intel/iwlwifi/mld/agg.c
16
struct iwl_mld_reorder_buf_entry *entries =
drivers/net/wireless/intel/iwlwifi/mld/agg.c
17
&baid_data->entries[reorder_buf->queue *
drivers/net/wireless/intel/iwlwifi/mld/agg.c
196
struct iwl_mld_reorder_buf_entry *entries;
drivers/net/wireless/intel/iwlwifi/mld/agg.c
23
struct sk_buff_head *skb_list = &entries[index].frames;
drivers/net/wireless/intel/iwlwifi/mld/agg.c
253
entries = &baid_data->entries[queue * baid_data->entries_per_queue];
drivers/net/wireless/intel/iwlwifi/mld/agg.c
295
__skb_queue_tail(&entries[index].frames, skb);
drivers/net/wireless/intel/iwlwifi/mld/agg.c
457
struct iwl_mld_reorder_buf_entry *entries =
drivers/net/wireless/intel/iwlwifi/mld/agg.c
458
&data->entries[i * data->entries_per_queue];
drivers/net/wireless/intel/iwlwifi/mld/agg.c
464
__skb_queue_head_init(&entries[j].frames);
drivers/net/wireless/intel/iwlwifi/mld/agg.c
481
struct iwl_mld_reorder_buf_entry *entries =
drivers/net/wireless/intel/iwlwifi/mld/agg.c
482
&data->entries[i * data->entries_per_queue];
drivers/net/wireless/intel/iwlwifi/mld/agg.c
494
__skb_queue_purge(&entries[j].frames);
drivers/net/wireless/intel/iwlwifi/mld/agg.c
503
u32 reorder_buf_size = buf_size * sizeof(baid_data->entries[0]);
drivers/net/wireless/intel/iwlwifi/mld/agg.c
526
BUILD_BUG_ON(SMP_CACHE_BYTES % sizeof(baid_data->entries[0]) &&
drivers/net/wireless/intel/iwlwifi/mld/agg.c
527
sizeof(baid_data->entries[0]) % SMP_CACHE_BYTES);
drivers/net/wireless/intel/iwlwifi/mld/agg.c
549
reorder_buf_size / sizeof(baid_data->entries[0]);
drivers/net/wireless/intel/iwlwifi/mld/agg.h
76
struct iwl_mld_reorder_buf_entry entries[] ____cacheline_aligned_in_smp;
drivers/net/wireless/intel/iwlwifi/mld/tests/agg.c
244
.entries[0].sn = 101,
drivers/net/wireless/intel/iwlwifi/mld/tests/agg.c
270
.entries[0].sn = IEEE80211_MAX_SN,
drivers/net/wireless/intel/iwlwifi/mld/tests/agg.c
297
.entries[0].sn = 102,
drivers/net/wireless/intel/iwlwifi/mld/tests/agg.c
323
.entries[0].sn = 101,
drivers/net/wireless/intel/iwlwifi/mld/tests/agg.c
324
.entries[1].sn = 104,
drivers/net/wireless/intel/iwlwifi/mld/tests/agg.c
325
.entries[2].sn = 105,
drivers/net/wireless/intel/iwlwifi/mld/tests/agg.c
351
.entries[0].sn = 101,
drivers/net/wireless/intel/iwlwifi/mld/tests/agg.c
352
.entries[1].sn = 103,
drivers/net/wireless/intel/iwlwifi/mld/tests/agg.c
353
.entries[2].sn = 104,
drivers/net/wireless/intel/iwlwifi/mld/tests/agg.c
381
.entries[0] = {
drivers/net/wireless/intel/iwlwifi/mld/tests/agg.c
412
.entries[0] = {
drivers/net/wireless/intel/iwlwifi/mld/tests/agg.c
53
} entries[BA_WINDOW_SIZE];
drivers/net/wireless/intel/iwlwifi/mld/tests/agg.c
537
struct iwl_mld_reorder_buf_entry *entries = baid_data->entries;
drivers/net/wireless/intel/iwlwifi/mld/tests/agg.c
545
__skb_queue_head_init(&entries[i].frames);
drivers/net/wireless/intel/iwlwifi/mld/tests/agg.c
548
u16 sn = param->reorder_buf_state.entries[i].sn;
drivers/net/wireless/intel/iwlwifi/mld/tests/agg.c
551
param->reorder_buf_state.entries[i].add_subframes;
drivers/net/wireless/intel/iwlwifi/mld/tests/agg.c
560
__skb_queue_tail(&entries[index].frames, fake_skb);
drivers/net/wireless/intel/iwlwifi/mld/tests/agg.c
576
u32 reorder_buf_size = BA_WINDOW_SIZE * sizeof(baid_data->entries[0]);
drivers/net/wireless/intel/iwlwifi/mvm/mvm.h
749
struct iwl_mvm_reorder_buf_entry entries[] ____cacheline_aligned_in_smp;
drivers/net/wireless/intel/iwlwifi/mvm/rxmq.c
617
struct iwl_mvm_reorder_buf_entry *entries =
drivers/net/wireless/intel/iwlwifi/mvm/rxmq.c
618
&baid_data->entries[reorder_buf->queue *
drivers/net/wireless/intel/iwlwifi/mvm/rxmq.c
626
struct sk_buff_head *skb_list = &entries[index].frames;
drivers/net/wireless/intel/iwlwifi/mvm/rxmq.c
794
struct iwl_mvm_reorder_buf_entry *entries;
drivers/net/wireless/intel/iwlwifi/mvm/rxmq.c
853
entries = &baid_data->entries[queue * baid_data->entries_per_queue];
drivers/net/wireless/intel/iwlwifi/mvm/rxmq.c
900
__skb_queue_tail(&entries[index].frames, skb);
drivers/net/wireless/intel/iwlwifi/mvm/sta.c
2711
struct iwl_mvm_reorder_buf_entry *entries =
drivers/net/wireless/intel/iwlwifi/mvm/sta.c
2712
&data->entries[i * data->entries_per_queue];
drivers/net/wireless/intel/iwlwifi/mvm/sta.c
2728
__skb_queue_purge(&entries[j].frames);
drivers/net/wireless/intel/iwlwifi/mvm/sta.c
2743
struct iwl_mvm_reorder_buf_entry *entries =
drivers/net/wireless/intel/iwlwifi/mvm/sta.c
2744
&data->entries[i * data->entries_per_queue];
drivers/net/wireless/intel/iwlwifi/mvm/sta.c
2753
__skb_queue_head_init(&entries[j].frames);
drivers/net/wireless/intel/iwlwifi/mvm/sta.c
2889
u32 reorder_buf_size = buf_size * sizeof(baid_data->entries[0]);
drivers/net/wireless/intel/iwlwifi/mvm/sta.c
2899
BUILD_BUG_ON(SMP_CACHE_BYTES % sizeof(baid_data->entries[0]) &&
drivers/net/wireless/intel/iwlwifi/mvm/sta.c
2900
sizeof(baid_data->entries[0]) % SMP_CACHE_BYTES);
drivers/net/wireless/intel/iwlwifi/mvm/sta.c
2926
reorder_buf_size / sizeof(baid_data->entries[0]);
drivers/net/wireless/intel/iwlwifi/pcie/gen1_2/rx.c
1394
kfree_sensitive(txq->entries[cmd_index].free_buf);
drivers/net/wireless/intel/iwlwifi/pcie/gen1_2/rx.c
1395
txq->entries[cmd_index].free_buf = NULL;
drivers/net/wireless/intel/iwlwifi/pcie/gen1_2/rx.c
1630
struct msix_entry *entries = entry - queue;
drivers/net/wireless/intel/iwlwifi/pcie/gen1_2/rx.c
1632
return container_of(entries, struct iwl_trans_pcie, msix_entries[0]);
drivers/net/wireless/intel/iwlwifi/pcie/gen1_2/trans.c
3632
memcpy(txcmd->data, cmdq->entries[idx].cmd,
drivers/net/wireless/intel/iwlwifi/pcie/gen1_2/tx-gen2.c
1302
out_cmd = txq->entries[idx].cmd;
drivers/net/wireless/intel/iwlwifi/pcie/gen1_2/tx-gen2.c
1303
out_meta = &txq->entries[idx].meta;
drivers/net/wireless/intel/iwlwifi/pcie/gen1_2/tx-gen2.c
1412
if (WARN_ON_ONCE(txq->entries[idx].free_buf))
drivers/net/wireless/intel/iwlwifi/pcie/gen1_2/tx-gen2.c
1413
kfree_sensitive(txq->entries[idx].free_buf);
drivers/net/wireless/intel/iwlwifi/pcie/gen1_2/tx-gen2.c
1414
txq->entries[idx].free_buf = dup_buf;
drivers/net/wireless/intel/iwlwifi/pcie/gen1_2/tx-gen2.c
683
if (!txq->entries)
drivers/net/wireless/intel/iwlwifi/pcie/gen1_2/tx-gen2.c
686
iwl_txq_gen2_tfd_unmap(trans, &txq->entries[idx].meta,
drivers/net/wireless/intel/iwlwifi/pcie/gen1_2/tx-gen2.c
689
skb = txq->entries[idx].skb;
drivers/net/wireless/intel/iwlwifi/pcie/gen1_2/tx-gen2.c
697
txq->entries[idx].skb = NULL;
drivers/net/wireless/intel/iwlwifi/pcie/gen1_2/tx-gen2.c
763
txq->entries[idx].skb = skb;
drivers/net/wireless/intel/iwlwifi/pcie/gen1_2/tx-gen2.c
764
txq->entries[idx].cmd = dev_cmd;
drivers/net/wireless/intel/iwlwifi/pcie/gen1_2/tx-gen2.c
771
out_meta = &txq->entries[idx].meta;
drivers/net/wireless/intel/iwlwifi/pcie/gen1_2/tx-gen2.c
829
struct iwl_cmd_meta *cmd_meta = &txq->entries[idx].meta;
drivers/net/wireless/intel/iwlwifi/pcie/gen1_2/tx-gen2.c
830
struct sk_buff *skb = txq->entries[idx].skb;
drivers/net/wireless/intel/iwlwifi/pcie/gen1_2/tx-gen2.c
868
kfree(txq->entries);
drivers/net/wireless/intel/iwlwifi/pcie/gen1_2/tx-gen2.c
903
kfree_sensitive(txq->entries[i].cmd);
drivers/net/wireless/intel/iwlwifi/pcie/gen1_2/tx-gen2.c
904
kfree_sensitive(txq->entries[i].free_buf);
drivers/net/wireless/intel/iwlwifi/pcie/gen1_2/tx.c
1458
out_cmd = txq->entries[idx].cmd;
drivers/net/wireless/intel/iwlwifi/pcie/gen1_2/tx.c
1459
out_meta = &txq->entries[idx].meta;
drivers/net/wireless/intel/iwlwifi/pcie/gen1_2/tx.c
1586
if (WARN_ON_ONCE(txq->entries[idx].free_buf))
drivers/net/wireless/intel/iwlwifi/pcie/gen1_2/tx.c
1587
kfree_sensitive(txq->entries[idx].free_buf);
drivers/net/wireless/intel/iwlwifi/pcie/gen1_2/tx.c
1588
txq->entries[idx].free_buf = dup_buf;
drivers/net/wireless/intel/iwlwifi/pcie/gen1_2/tx.c
1648
cmd = txq->entries[cmd_index].cmd;
drivers/net/wireless/intel/iwlwifi/pcie/gen1_2/tx.c
1649
meta = &txq->entries[cmd_index].meta;
drivers/net/wireless/intel/iwlwifi/pcie/gen1_2/tx.c
2072
struct iwl_device_tx_cmd *dev_cmd = txq->entries[txq->write_ptr].cmd;
drivers/net/wireless/intel/iwlwifi/pcie/gen1_2/tx.c
2177
txq->entries[txq->write_ptr].skb = skb;
drivers/net/wireless/intel/iwlwifi/pcie/gen1_2/tx.c
2178
txq->entries[txq->write_ptr].cmd = dev_cmd;
drivers/net/wireless/intel/iwlwifi/pcie/gen1_2/tx.c
2192
out_meta = &txq->entries[txq->write_ptr].meta;
drivers/net/wireless/intel/iwlwifi/pcie/gen1_2/tx.c
2318
struct iwl_device_tx_cmd *dev_cmd = txq->entries[read_ptr].cmd;
drivers/net/wireless/intel/iwlwifi/pcie/gen1_2/tx.c
2403
struct iwl_cmd_meta *cmd_meta = &txq->entries[read_ptr].meta;
drivers/net/wireless/intel/iwlwifi/pcie/gen1_2/tx.c
2404
struct sk_buff *skb = txq->entries[read_ptr].skb;
drivers/net/wireless/intel/iwlwifi/pcie/gen1_2/tx.c
2414
txq->entries[read_ptr].skb = NULL;
drivers/net/wireless/intel/iwlwifi/pcie/gen1_2/tx.c
2635
txq->entries[cmd_idx].meta.flags &= ~CMD_WANT_SKB;
drivers/net/wireless/intel/iwlwifi/pcie/gen1_2/tx.c
354
if (!txq->entries)
drivers/net/wireless/intel/iwlwifi/pcie/gen1_2/tx.c
361
iwl_txq_gen2_tfd_unmap(trans, &txq->entries[idx].meta,
drivers/net/wireless/intel/iwlwifi/pcie/gen1_2/tx.c
364
iwl_txq_gen1_tfd_unmap(trans, &txq->entries[idx].meta,
drivers/net/wireless/intel/iwlwifi/pcie/gen1_2/tx.c
368
skb = txq->entries[idx].skb;
drivers/net/wireless/intel/iwlwifi/pcie/gen1_2/tx.c
376
txq->entries[idx].skb = NULL;
drivers/net/wireless/intel/iwlwifi/pcie/gen1_2/tx.c
400
struct sk_buff *skb = txq->entries[txq->read_ptr].skb;
drivers/net/wireless/intel/iwlwifi/pcie/gen1_2/tx.c
402
&txq->entries[txq->read_ptr].meta;
drivers/net/wireless/intel/iwlwifi/pcie/gen1_2/tx.c
453
kfree_sensitive(txq->entries[i].cmd);
drivers/net/wireless/intel/iwlwifi/pcie/gen1_2/tx.c
454
kfree_sensitive(txq->entries[i].free_buf);
drivers/net/wireless/intel/iwlwifi/pcie/gen1_2/tx.c
471
kfree(txq->entries);
drivers/net/wireless/intel/iwlwifi/pcie/gen1_2/tx.c
472
txq->entries = NULL;
drivers/net/wireless/intel/iwlwifi/pcie/gen1_2/tx.c
734
if (WARN_ON(txq->entries || txq->tfds))
drivers/net/wireless/intel/iwlwifi/pcie/gen1_2/tx.c
744
txq->entries = kzalloc_objs(struct iwl_pcie_txq_entry, slots_num);
drivers/net/wireless/intel/iwlwifi/pcie/gen1_2/tx.c
746
if (!txq->entries)
drivers/net/wireless/intel/iwlwifi/pcie/gen1_2/tx.c
751
txq->entries[i].cmd =
drivers/net/wireless/intel/iwlwifi/pcie/gen1_2/tx.c
753
if (!txq->entries[i].cmd)
drivers/net/wireless/intel/iwlwifi/pcie/gen1_2/tx.c
789
if (txq->entries && cmd_queue)
drivers/net/wireless/intel/iwlwifi/pcie/gen1_2/tx.c
791
kfree(txq->entries[i].cmd);
drivers/net/wireless/intel/iwlwifi/pcie/gen1_2/tx.c
792
kfree(txq->entries);
drivers/net/wireless/intel/iwlwifi/pcie/gen1_2/tx.c
793
txq->entries = NULL;
drivers/net/wireless/intersil/p54/eeprom.c
154
if ((!list->entries) || (!list->band_channel_num[band]))
drivers/net/wireless/intersil/p54/eeprom.c
171
(i < list->entries); i++) {
drivers/net/wireless/intersil/p54/eeprom.c
242
for (i = list->entries; i >= 0; i--) {
drivers/net/wireless/intersil/p54/eeprom.c
249
if ((i < 0) && (list->entries < list->max_entries)) {
drivers/net/wireless/intersil/p54/eeprom.c
258
i = list->entries++;
drivers/net/wireless/intersil/p54/eeprom.c
327
if ((priv->iq_autocal_len != priv->curve_data->entries) ||
drivers/net/wireless/intersil/p54/eeprom.c
328
(priv->iq_autocal_len != priv->output_limit->entries))
drivers/net/wireless/intersil/p54/eeprom.c
333
max_channel_num = max_t(unsigned int, priv->output_limit->entries,
drivers/net/wireless/intersil/p54/eeprom.c
336
priv->curve_data->entries);
drivers/net/wireless/intersil/p54/eeprom.c
363
if (i < priv->output_limit->entries) {
drivers/net/wireless/intersil/p54/eeprom.c
379
if (i < priv->curve_data->entries) {
drivers/net/wireless/intersil/p54/eeprom.c
390
sort(list->channels, list->entries, sizeof(struct p54_channel_entry),
drivers/net/wireless/intersil/p54/eeprom.c
433
priv->curve_data->entries = curve_data->channels;
drivers/net/wireless/intersil/p54/eeprom.c
485
priv->curve_data->entries = curve_data->channels;
drivers/net/wireless/intersil/p54/eeprom.c
518
size_t db_len, entries;
drivers/net/wireless/intersil/p54/eeprom.c
522
entries = (type == PDR_RSSI_LINEAR_APPROXIMATION) ? 1 : 2;
drivers/net/wireless/intersil/p54/eeprom.c
523
if (len != sizeof(struct pda_rssi_cal_entry) * entries) {
drivers/net/wireless/intersil/p54/eeprom.c
535
entries = (len - offset) /
drivers/net/wireless/intersil/p54/eeprom.c
540
entries == 0) {
drivers/net/wireless/intersil/p54/eeprom.c
546
db_len = sizeof(*entry) * entries;
drivers/net/wireless/intersil/p54/eeprom.c
552
priv->rssi_db->entries = entries;
drivers/net/wireless/intersil/p54/eeprom.c
560
for (i = 0; i < entries; i++) {
drivers/net/wireless/intersil/p54/eeprom.c
568
for (i = 0; i < entries; i++) {
drivers/net/wireless/intersil/p54/eeprom.c
586
sort(entry, entries, sizeof(*entry), p54_compare_rssichan, NULL);
drivers/net/wireless/intersil/p54/eeprom.c
609
for (i = 0; i < priv->rssi_db->entries; i++) {
drivers/net/wireless/intersil/p54/eeprom.c
685
priv->output_limit->entries = data[1];
drivers/net/wireless/intersil/p54/eeprom.c
689
priv->output_limit->entries +
drivers/net/wireless/intersil/p54/eeprom.c
702
size_t payload_len, entries, entry_size, offset;
drivers/net/wireless/intersil/p54/eeprom.c
705
entries = le16_to_cpu(src->entries);
drivers/net/wireless/intersil/p54/eeprom.c
708
if (((entries * entry_size + offset) != payload_len) ||
drivers/net/wireless/intersil/p54/eeprom.c
716
dst->entries = entries;
drivers/net/wireless/intersil/p54/eeprom.c
81
size_t entries;
drivers/net/wireless/intersil/p54/eeprom.c
840
for (i = 0; i < priv->rssi_db->entries; i++)
drivers/net/wireless/intersil/p54/eeprom.h
118
__le16 entries;
drivers/net/wireless/intersil/p54/fwio.c
440
for (i = 0; i < priv->output_limit->entries; i++) {
drivers/net/wireless/intersil/p54/fwio.c
467
if (i == priv->output_limit->entries)
drivers/net/wireless/intersil/p54/fwio.c
471
for (i = 0; i < priv->curve_data->entries; i++) {
drivers/net/wireless/intersil/p54/fwio.c
495
if (i == priv->curve_data->entries)
drivers/net/wireless/intersil/p54/p54.h
125
size_t entries;
drivers/net/wireless/mediatek/mt7601u/dma.c
184
q->start = (q->start + 1) % q->entries;
drivers/net/wireless/mediatek/mt7601u/dma.c
218
q->end = (q->end + 1) % q->entries;
drivers/net/wireless/mediatek/mt7601u/dma.c
271
if (q->used == q->entries - q->entries / 8)
drivers/net/wireless/mediatek/mt7601u/dma.c
274
q->start = (q->start + 1) % q->entries;
drivers/net/wireless/mediatek/mt7601u/dma.c
318
if (WARN_ON(q->entries <= q->used)) {
drivers/net/wireless/mediatek/mt7601u/dma.c
339
q->end = (q->end + 1) % q->entries;
drivers/net/wireless/mediatek/mt7601u/dma.c
343
if (q->used >= q->entries)
drivers/net/wireless/mediatek/mt7601u/dma.c
394
for (i = 0; i < dev->rx_q.entries; i++)
drivers/net/wireless/mediatek/mt7601u/dma.c
423
for (i = 0; i < dev->rx_q.entries; i++) {
drivers/net/wireless/mediatek/mt7601u/dma.c
436
for (i = 0; i < dev->rx_q.entries; i++) {
drivers/net/wireless/mediatek/mt7601u/dma.c
448
dev->rx_q.entries = N_RX_ENTRIES;
drivers/net/wireless/mediatek/mt7601u/dma.c
465
for (i = 0; i < q->entries; i++) {
drivers/net/wireless/mediatek/mt7601u/dma.c
490
q->entries = N_TX_ENTRIES;
drivers/net/wireless/mediatek/mt7601u/mt7601u.h
77
unsigned int entries;
drivers/net/wireless/mediatek/mt7601u/mt7601u.h
93
unsigned int entries;
drivers/net/wireless/microchip/wilc1000/wlan.c
1010
if (entries == 0) {
drivers/net/wireless/microchip/wilc1000/wlan.c
1079
} while (--entries);
drivers/net/wireless/microchip/wilc1000/wlan.c
823
int i, entries = 0;
drivers/net/wireless/microchip/wilc1000/wlan.c
961
entries = FIELD_GET(WILC_VMM_ENTRY_COUNT, reg);
drivers/net/wireless/microchip/wilc1000/wlan.c
985
entries = FIELD_GET(WILC_VMM_ENTRY_COUNT, reg);
drivers/net/wireless/microchip/wilc1000/wlan.c
998
if (entries == 0) {
drivers/net/wireless/ralink/rt2x00/rt2400pci.c
772
entry_priv = rt2x00dev->tx[1].entries[0].priv_data;
drivers/net/wireless/ralink/rt2x00/rt2400pci.c
778
entry_priv = rt2x00dev->tx[0].entries[0].priv_data;
drivers/net/wireless/ralink/rt2x00/rt2400pci.c
784
entry_priv = rt2x00dev->atim->entries[0].priv_data;
drivers/net/wireless/ralink/rt2x00/rt2400pci.c
790
entry_priv = rt2x00dev->bcn->entries[0].priv_data;
drivers/net/wireless/ralink/rt2x00/rt2400pci.c
801
entry_priv = rt2x00dev->rx->entries[0].priv_data;
drivers/net/wireless/ralink/rt2x00/rt2500pci.c
857
entry_priv = rt2x00dev->tx[1].entries[0].priv_data;
drivers/net/wireless/ralink/rt2x00/rt2500pci.c
863
entry_priv = rt2x00dev->tx[0].entries[0].priv_data;
drivers/net/wireless/ralink/rt2x00/rt2500pci.c
869
entry_priv = rt2x00dev->atim->entries[0].priv_data;
drivers/net/wireless/ralink/rt2x00/rt2500pci.c
875
entry_priv = rt2x00dev->bcn->entries[0].priv_data;
drivers/net/wireless/ralink/rt2x00/rt2500pci.c
886
entry_priv = rt2x00dev->rx->entries[0].priv_data;
drivers/net/wireless/ralink/rt2x00/rt2800lib.c
1392
entry = &queue->entries[i];
drivers/net/wireless/ralink/rt2x00/rt2800lib.c
1961
entry = &queue->entries[i];
drivers/net/wireless/ralink/rt2x00/rt2800mmio.c
668
entry_priv = rt2x00dev->tx[0].entries[0].priv_data;
drivers/net/wireless/ralink/rt2x00/rt2800mmio.c
676
entry_priv = rt2x00dev->tx[1].entries[0].priv_data;
drivers/net/wireless/ralink/rt2x00/rt2800mmio.c
684
entry_priv = rt2x00dev->tx[2].entries[0].priv_data;
drivers/net/wireless/ralink/rt2x00/rt2800mmio.c
692
entry_priv = rt2x00dev->tx[3].entries[0].priv_data;
drivers/net/wireless/ralink/rt2x00/rt2800mmio.c
710
entry_priv = rt2x00dev->rx->entries[0].priv_data;
drivers/net/wireless/ralink/rt2x00/rt2x00mac.c
227
entry = &queue->entries[i];
drivers/net/wireless/ralink/rt2x00/rt2x00mmio.c
121
entry_priv = queue->entries[i].priv_data;
drivers/net/wireless/ralink/rt2x00/rt2x00mmio.c
133
queue->entries[0].priv_data;
drivers/net/wireless/ralink/rt2x00/rt2x00queue.c
1093
rt2x00dev->ops->lib->clear_entry(&queue->entries[i]);
drivers/net/wireless/ralink/rt2x00/rt2x00queue.c
1099
struct queue_entry *entries;
drivers/net/wireless/ralink/rt2x00/rt2x00queue.c
1108
entry_size = sizeof(*entries) + queue->priv_size;
drivers/net/wireless/ralink/rt2x00/rt2x00queue.c
1109
entries = kcalloc(queue->limit, entry_size, GFP_KERNEL);
drivers/net/wireless/ralink/rt2x00/rt2x00queue.c
1110
if (!entries)
drivers/net/wireless/ralink/rt2x00/rt2x00queue.c
1118
entries[i].flags = 0;
drivers/net/wireless/ralink/rt2x00/rt2x00queue.c
1119
entries[i].queue = queue;
drivers/net/wireless/ralink/rt2x00/rt2x00queue.c
1120
entries[i].skb = NULL;
drivers/net/wireless/ralink/rt2x00/rt2x00queue.c
1121
entries[i].entry_idx = i;
drivers/net/wireless/ralink/rt2x00/rt2x00queue.c
1122
entries[i].priv_data =
drivers/net/wireless/ralink/rt2x00/rt2x00queue.c
1123
QUEUE_ENTRY_PRIV_OFFSET(entries, i, queue->limit,
drivers/net/wireless/ralink/rt2x00/rt2x00queue.c
1124
sizeof(*entries), queue->priv_size);
drivers/net/wireless/ralink/rt2x00/rt2x00queue.c
1129
queue->entries = entries;
drivers/net/wireless/ralink/rt2x00/rt2x00queue.c
1138
if (!queue->entries)
drivers/net/wireless/ralink/rt2x00/rt2x00queue.c
1142
rt2x00queue_free_skb(&queue->entries[i]);
drivers/net/wireless/ralink/rt2x00/rt2x00queue.c
1152
skb = rt2x00queue_alloc_rxskb(&queue->entries[i], GFP_KERNEL);
drivers/net/wireless/ralink/rt2x00/rt2x00queue.c
1155
queue->entries[i].skb = skb;
drivers/net/wireless/ralink/rt2x00/rt2x00queue.c
1207
kfree(queue->entries);
drivers/net/wireless/ralink/rt2x00/rt2x00queue.c
1208
queue->entries = NULL;
drivers/net/wireless/ralink/rt2x00/rt2x00queue.c
820
if (fn(&queue->entries[i], data))
drivers/net/wireless/ralink/rt2x00/rt2x00queue.c
825
if (fn(&queue->entries[i], data))
drivers/net/wireless/ralink/rt2x00/rt2x00queue.c
830
if (fn(&queue->entries[i], data))
drivers/net/wireless/ralink/rt2x00/rt2x00queue.c
853
entry = &queue->entries[queue->index[index]];
drivers/net/wireless/ralink/rt2x00/rt2x00queue.h
453
struct queue_entry *entries;
drivers/net/wireless/ralink/rt2x00/rt2x00usb.c
657
entry_priv = queue->entries[i].priv_data;
drivers/net/wireless/ralink/rt2x00/rt2x00usb.c
673
bcn_priv = queue->entries[i].priv_data;
drivers/net/wireless/ralink/rt2x00/rt2x00usb.c
689
if (!queue->entries)
drivers/net/wireless/ralink/rt2x00/rt2x00usb.c
693
entry_priv = queue->entries[i].priv_data;
drivers/net/wireless/ralink/rt2x00/rt2x00usb.c
708
bcn_priv = queue->entries[i].priv_data;
drivers/net/wireless/ralink/rt2x00/rt61pci.c
1351
entry_priv = rt2x00dev->tx[0].entries[0].priv_data;
drivers/net/wireless/ralink/rt2x00/rt61pci.c
1357
entry_priv = rt2x00dev->tx[1].entries[0].priv_data;
drivers/net/wireless/ralink/rt2x00/rt61pci.c
1363
entry_priv = rt2x00dev->tx[2].entries[0].priv_data;
drivers/net/wireless/ralink/rt2x00/rt61pci.c
1369
entry_priv = rt2x00dev->tx[3].entries[0].priv_data;
drivers/net/wireless/ralink/rt2x00/rt61pci.c
1382
entry_priv = rt2x00dev->rx->entries[0].priv_data;
drivers/net/wireless/ralink/rt2x00/rt61pci.c
2103
entry = &queue->entries[index];
drivers/net/wireless/realtek/rtl818x/rtl8180/dev.c
1071
unsigned int prio, unsigned int entries)
drivers/net/wireless/realtek/rtl818x/rtl8180/dev.c
1078
ring = dma_alloc_coherent(&priv->pdev->dev, sizeof(*ring) * entries,
drivers/net/wireless/realtek/rtl818x/rtl8180/dev.c
1089
priv->tx_ring[prio].entries = entries;
drivers/net/wireless/realtek/rtl818x/rtl8180/dev.c
1092
for (i = 0; i < entries; i++)
drivers/net/wireless/realtek/rtl818x/rtl8180/dev.c
1094
cpu_to_le32((u32)dma + ((i + 1) % entries) * sizeof(*ring));
drivers/net/wireless/realtek/rtl818x/rtl8180/dev.c
1111
ring->idx = (ring->idx + 1) % ring->entries;
drivers/net/wireless/realtek/rtl818x/rtl8180/dev.c
1115
sizeof(*ring->desc) * ring->entries, ring->desc,
drivers/net/wireless/realtek/rtl818x/rtl8180/dev.c
356
ring->idx = (ring->idx + 1) % ring->entries;
drivers/net/wireless/realtek/rtl818x/rtl8180/dev.c
371
if (ring->entries - skb_queue_len(&ring->queue) == 2)
drivers/net/wireless/realtek/rtl818x/rtl8180/dev.c
548
idx = (ring->idx + skb_queue_len(&ring->queue)) % ring->entries;
drivers/net/wireless/realtek/rtl818x/rtl8180/dev.c
578
if (ring->entries - skb_queue_len(&ring->queue) < 2)
drivers/net/wireless/realtek/rtl818x/rtl8180/rtl8180.h
92
unsigned int entries;
drivers/net/wireless/realtek/rtlwifi/pci.c
1139
unsigned int prio, unsigned int entries)
drivers/net/wireless/realtek/rtlwifi/pci.c
1153
sizeof(*buffer_desc) * entries,
drivers/net/wireless/realtek/rtlwifi/pci.c
1170
desc = dma_alloc_coherent(&rtlpci->pdev->dev, sizeof(*desc) * entries,
drivers/net/wireless/realtek/rtlwifi/pci.c
1182
rtlpci->tx_ring[prio].entries = entries;
drivers/net/wireless/realtek/rtlwifi/pci.c
1190
for (i = 0; i < entries; i++) {
drivers/net/wireless/realtek/rtlwifi/pci.c
1192
((i + 1) % entries) *
drivers/net/wireless/realtek/rtlwifi/pci.c
1285
ring->idx = (ring->idx + 1) % ring->entries;
drivers/net/wireless/realtek/rtlwifi/pci.c
1290
sizeof(*ring->desc) * ring->entries, ring->desc,
drivers/net/wireless/realtek/rtlwifi/pci.c
1295
sizeof(*ring->buffer_desc) * ring->entries,
drivers/net/wireless/realtek/rtlwifi/pci.c
1464
ring->idx = (ring->idx + 1) % ring->entries;
drivers/net/wireless/realtek/rtlwifi/pci.c
1473
ring->entries = rtlpci->txringcount[i];
drivers/net/wireless/realtek/rtlwifi/pci.c
1565
ring->entries;
drivers/net/wireless/realtek/rtlwifi/pci.c
1613
if ((ring->entries - skb_queue_len(&ring->queue)) < 2 &&
drivers/net/wireless/realtek/rtlwifi/pci.c
428
(ring->entries - skb_queue_len(&ring->queue) >
drivers/net/wireless/realtek/rtlwifi/pci.c
471
ring->idx = (ring->idx + 1) % ring->entries;
drivers/net/wireless/realtek/rtlwifi/pci.c
533
if ((ring->entries - skb_queue_len(&ring->queue)) <= 4) {
drivers/net/wireless/realtek/rtlwifi/pci.h
132
unsigned int entries;
drivers/net/wireless/realtek/rtlwifi/rtl8188ee/hw.c
85
ring->idx = (ring->idx + 1) % ring->entries;
drivers/net/wireless/realtek/rtlwifi/rtl8192ee/trx.c
901
u16 max_tx_desc = ring->entries;
drivers/net/wireless/realtek/rtlwifi/rtl8192se/fw.c
123
idx = (ring->idx + skb_queue_len(&ring->queue)) % ring->entries;
drivers/net/wireless/realtek/rtlwifi/rtl8723be/hw.c
47
ring->idx = (ring->idx + 1) % ring->entries;
drivers/net/wireless/realtek/rtlwifi/rtl8821ae/hw.c
43
ring->idx = (ring->idx + 1) % ring->entries;
drivers/net/wireless/realtek/rtw89/acpi.c
1087
tbl->entries[regd] = tmp;
drivers/net/wireless/realtek/rtw89/acpi.c
1138
tbl->entries[regd] = tmp;
drivers/net/wireless/realtek/rtw89/acpi.c
1229
hdl->load(rtwdev, data->buf, regd, &cfg->tables[i].entries[regd]);
drivers/net/wireless/realtek/rtw89/acpi.c
738
ptr_ent = &ptr->entries[geo_idx];
drivers/net/wireless/realtek/rtw89/acpi.c
779
ptr_ent = &ptr->entries[geo_idx];
drivers/net/wireless/realtek/rtw89/acpi.c
822
ptr_ent = &ptr->entries[geo_idx];
drivers/net/wireless/realtek/rtw89/acpi.c
863
ptr_ent = &ptr->entries[geo_idx];
drivers/net/wireless/realtek/rtw89/acpi.h
221
entries[RTW89_ACPI_GEO_SAR_REGD_NR_HP];
drivers/net/wireless/realtek/rtw89/acpi.h
226
entries[RTW89_ACPI_GEO_SAR_REGD_NR_HP];
drivers/net/wireless/realtek/rtw89/acpi.h
258
entries[RTW89_ACPI_GEO_SAR_REGD_NR_RT];
drivers/net/wireless/realtek/rtw89/acpi.h
263
entries[RTW89_ACPI_GEO_SAR_REGD_NR_RT];
drivers/net/wireless/realtek/rtw89/core.h
4968
struct rtw89_sar_entry_from_acpi entries[RTW89_REGD_NUM];
drivers/net/wireless/realtek/rtw89/sar.c
108
return &tbl->entries[regd];
drivers/net/wireless/realtek/rtw89/sar.c
478
ent = &tbl->entries[j];
drivers/nvme/host/core.c
2892
table->entries[state] = target;
drivers/nvme/host/pci.c
1127
dma_addr_t dma_addr, int entries)
drivers/nvme/host/pci.c
1130
sge->length = cpu_to_le32(entries * sizeof(*sge));
drivers/nvme/host/pci.c
1139
unsigned int entries = blk_rq_nr_phys_segments(req);
drivers/nvme/host/pci.c
1147
if (entries == 1 || blk_rq_dma_map_coalesce(&iod->dma_state)) {
drivers/nvme/host/pci.c
1153
if (entries <= NVME_SMALL_POOL_SIZE / sizeof(*sg_list))
drivers/nvme/host/pci.c
1163
if (WARN_ON_ONCE(mapped == entries)) {
drivers/nvme/host/pci.c
1261
unsigned int entries = req->nr_integrity_segments;
drivers/nvme/host/pci.c
1287
entries = 1;
drivers/nvme/host/pci.c
1306
(entries == 1 && !(nvme_req(req)->flags & NVME_REQ_USERCMD))) {
drivers/nvme/host/pci.c
1323
if (entries == 1) {
drivers/nvme/host/zns.c
215
ret = nvme_zone_parse_entry(ns, &report->entries[i],
drivers/nvme/target/discovery.c
109
struct nvmf_disc_rsp_page_entry *e = &hdr->entries[numrec];
drivers/nvme/target/discovery.c
149
size_t entries = 1;
drivers/nvme/target/discovery.c
154
entries++;
drivers/nvme/target/discovery.c
157
entries++;
drivers/nvme/target/discovery.c
158
return entries;
drivers/nvmem/core.c
2115
void nvmem_add_cell_lookups(struct nvmem_cell_lookup *entries, size_t nentries)
drivers/nvmem/core.c
2121
list_add_tail(&entries[i].node, &nvmem_lookup_list);
drivers/nvmem/core.c
2133
void nvmem_del_cell_lookups(struct nvmem_cell_lookup *entries, size_t nentries)
drivers/nvmem/core.c
2139
list_del(&entries[i].node);
drivers/of/dynamic.c
657
INIT_LIST_HEAD(&ocs->entries);
drivers/of/dynamic.c
684
list_for_each_entry_safe_reverse(ce, cen, &ocs->entries, node)
drivers/of/dynamic.c
705
list_for_each_entry(ce, &ocs->entries, node) {
drivers/of/dynamic.c
709
list_for_each_entry_continue_reverse(ce, &ocs->entries,
drivers/of/dynamic.c
737
list_for_each_entry(ce, &ocs->entries, node) {
drivers/of/dynamic.c
809
list_for_each_entry_reverse(ce, &ocs->entries, node) {
drivers/of/dynamic.c
813
list_for_each_entry_continue(ce, &ocs->entries, node) {
drivers/of/dynamic.c
838
list_for_each_entry_reverse(ce, &ocs->entries, node) {
drivers/of/dynamic.c
921
list_add_tail(&ce->node, &ocs->entries);
drivers/of/fdt.c
630
int *entries)
drivers/of/fdt.c
637
*entries = 0;
drivers/of/fdt.c
641
*entries = len / elen;
drivers/of/fdt.c
649
int entries;
drivers/of/fdt.c
651
prop = of_flat_dt_get_addr_size_prop(node, name, &entries);
drivers/of/fdt.c
652
if (!prop || entries != 1)
drivers/of/overlay.c
1111
list_for_each_entry(ce, &ovcs->cset.entries, node) {
drivers/of/overlay.c
1144
list_for_each_entry(remove_ce, &remove_ovcs->cset.entries, node) {
drivers/of/overlay.c
538
list_for_each_entry_continue(ce_2, &ovcs->cset.entries, node) {
drivers/of/overlay.c
572
list_for_each_entry_continue(ce_2, &ovcs->cset.entries, node) {
drivers/of/overlay.c
610
list_for_each_entry(ce_1, &ovcs->cset.entries, node) {
drivers/of/overlay.c
854
if (ovcs->cset.entries.next)
drivers/pci/hotplug/rpaphp_core.c
231
unsigned int entries;
drivers/pci/hotplug/rpaphp_core.c
241
value = of_prop_next_u32(info, NULL, &entries);
drivers/pci/hotplug/rpaphp_core.c
247
for (j = 0; j < entries; j++) {
drivers/pci/msi/api.c
107
int pci_enable_msix_range(struct pci_dev *dev, struct msix_entry *entries,
drivers/pci/msi/api.c
110
return __pci_enable_msix_range(dev, entries, minvec, maxvec, NULL, 0);
drivers/pci/msi/msi.c
624
static int msix_setup_msi_descs(struct pci_dev *dev, struct msix_entry *entries,
drivers/pci/msi/msi.c
634
desc.msi_index = entries ? entries[i].entry : i;
drivers/pci/msi/msi.c
647
static void msix_update_entries(struct pci_dev *dev, struct msix_entry *entries)
drivers/pci/msi/msi.c
651
if (entries) {
drivers/pci/msi/msi.c
653
entries->vector = desc->irq;
drivers/pci/msi/msi.c
654
entries++;
drivers/pci/msi/msi.c
670
static int __msix_setup_interrupts(struct pci_dev *__dev, struct msix_entry *entries,
drivers/pci/msi/msi.c
675
int ret = msix_setup_msi_descs(dev, entries, nvec, masks);
drivers/pci/msi/msi.c
688
msix_update_entries(dev, entries);
drivers/pci/msi/msi.c
693
static int msix_setup_interrupts(struct pci_dev *dev, struct msix_entry *entries,
drivers/pci/msi/msi.c
700
return __msix_setup_interrupts(dev, entries, nvec, masks);
drivers/pci/msi/msi.c
714
static int msix_capability_init(struct pci_dev *dev, struct msix_entry *entries,
drivers/pci/msi/msi.c
740
ret = msix_setup_interrupts(dev, entries, nvec, affd);
drivers/pci/msi/msi.c
772
static bool pci_msix_validate_entries(struct pci_dev *dev, struct msix_entry *entries, int nvec)
drivers/pci/msi/msi.c
777
if (!entries)
drivers/pci/msi/msi.c
785
if (entries[i].entry == entries[j].entry)
drivers/pci/msi/msi.c
789
if (nogap && entries[i].entry != i)
drivers/pci/msi/msi.c
795
int __pci_enable_msix_range(struct pci_dev *dev, struct msix_entry *entries, int minvec,
drivers/pci/msi/msi.c
822
if (!pci_msix_validate_entries(dev, entries, nvec))
drivers/pci/msi/msi.c
850
rc = msix_capability_init(dev, entries, nvec, affd);
drivers/pci/msi/msi.h
97
int __pci_enable_msix_range(struct pci_dev *dev, struct msix_entry *entries, int minvec,
drivers/perf/arm_brbe.c
791
struct perf_branch_entry *pbe = &branch_stack->entries[nr_filtered];
drivers/perf/arm_pmuv3.c
1356
size_t size = struct_size_t(struct perf_branch_stack, entries,
drivers/phy/cadence/phy-cadence-torrent.c
443
const struct cdns_torrent_vals_entry *entries;
drivers/phy/cadence/phy-cadence-torrent.c
476
if (tbl->entries[i].key == key)
drivers/phy/cadence/phy-cadence-torrent.c
477
return tbl->entries[i].vals;
drivers/phy/cadence/phy-cadence-torrent.c
5355
.entries = link_cmn_vals_entries,
drivers/phy/cadence/phy-cadence-torrent.c
5359
.entries = xcvr_diag_vals_entries,
drivers/phy/cadence/phy-cadence-torrent.c
5363
.entries = pcs_cmn_vals_entries,
drivers/phy/cadence/phy-cadence-torrent.c
5367
.entries = cmn_vals_entries,
drivers/phy/cadence/phy-cadence-torrent.c
5371
.entries = cdns_tx_ln_vals_entries,
drivers/phy/cadence/phy-cadence-torrent.c
5375
.entries = cdns_rx_ln_vals_entries,
drivers/phy/cadence/phy-cadence-torrent.c
5489
.entries = link_cmn_vals_entries,
drivers/phy/cadence/phy-cadence-torrent.c
5493
.entries = xcvr_diag_vals_entries,
drivers/phy/cadence/phy-cadence-torrent.c
5497
.entries = pcs_cmn_vals_entries,
drivers/phy/cadence/phy-cadence-torrent.c
5501
.entries = j721e_phy_pma_cmn_vals_entries,
drivers/phy/cadence/phy-cadence-torrent.c
5505
.entries = cmn_vals_entries,
drivers/phy/cadence/phy-cadence-torrent.c
5509
.entries = ti_tx_ln_vals_entries,
drivers/phy/cadence/phy-cadence-torrent.c
5513
.entries = cdns_rx_ln_vals_entries,
drivers/phy/cadence/phy-cadence-torrent.c
5817
.entries = link_cmn_vals_entries,
drivers/phy/cadence/phy-cadence-torrent.c
5821
.entries = xcvr_diag_vals_entries,
drivers/phy/cadence/phy-cadence-torrent.c
5825
.entries = pcs_cmn_vals_entries,
drivers/phy/cadence/phy-cadence-torrent.c
5829
.entries = j721e_phy_pma_cmn_vals_entries,
drivers/phy/cadence/phy-cadence-torrent.c
5833
.entries = ti_j7200_cmn_vals_entries,
drivers/phy/cadence/phy-cadence-torrent.c
5837
.entries = ti_j7200_tx_ln_vals_entries,
drivers/phy/cadence/phy-cadence-torrent.c
5841
.entries = ti_j7200_rx_ln_vals_entries,
drivers/phy/phy-common-props-test.c
105
static const struct property_entry entries[] = {
drivers/phy/phy-common-props-test.c
114
node = fwnode_create_software_node(entries, NULL);
drivers/phy/phy-common-props-test.c
138
static const struct property_entry entries[] = {
drivers/phy/phy-common-props-test.c
147
node = fwnode_create_software_node(entries, NULL);
drivers/phy/phy-common-props-test.c
15
static const struct property_entry entries[] = {
drivers/phy/phy-common-props-test.c
161
static const struct property_entry entries[] = {
drivers/phy/phy-common-props-test.c
170
node = fwnode_create_software_node(entries, NULL);
drivers/phy/phy-common-props-test.c
185
static const struct property_entry entries[] = {
drivers/phy/phy-common-props-test.c
194
node = fwnode_create_software_node(entries, NULL);
drivers/phy/phy-common-props-test.c
206
static const struct property_entry entries[] = {
drivers/phy/phy-common-props-test.c
213
node = fwnode_create_software_node(entries, NULL);
drivers/phy/phy-common-props-test.c
22
node = fwnode_create_software_node(entries, NULL);
drivers/phy/phy-common-props-test.c
228
static const struct property_entry entries[] = {
drivers/phy/phy-common-props-test.c
237
node = fwnode_create_software_node(entries, NULL);
drivers/phy/phy-common-props-test.c
250
static const struct property_entry entries[] = {
drivers/phy/phy-common-props-test.c
258
node = fwnode_create_software_node(entries, NULL);
drivers/phy/phy-common-props-test.c
273
static const struct property_entry entries[] = {
drivers/phy/phy-common-props-test.c
282
node = fwnode_create_software_node(entries, NULL);
drivers/phy/phy-common-props-test.c
296
static const struct property_entry entries[] = {
drivers/phy/phy-common-props-test.c
305
node = fwnode_create_software_node(entries, NULL);
drivers/phy/phy-common-props-test.c
328
static const struct property_entry entries[] = {
drivers/phy/phy-common-props-test.c
337
node = fwnode_create_software_node(entries, NULL);
drivers/phy/phy-common-props-test.c
351
static const struct property_entry entries[] = {
drivers/phy/phy-common-props-test.c
360
node = fwnode_create_software_node(entries, NULL);
drivers/phy/phy-common-props-test.c
37
static const struct property_entry entries[] = {
drivers/phy/phy-common-props-test.c
375
static const struct property_entry entries[] = {
drivers/phy/phy-common-props-test.c
384
node = fwnode_create_software_node(entries, NULL);
drivers/phy/phy-common-props-test.c
46
node = fwnode_create_software_node(entries, NULL);
drivers/phy/phy-common-props-test.c
59
static const struct property_entry entries[] = {
drivers/phy/phy-common-props-test.c
67
node = fwnode_create_software_node(entries, NULL);
drivers/phy/phy-common-props-test.c
82
static const struct property_entry entries[] = {
drivers/phy/phy-common-props-test.c
91
node = fwnode_create_software_node(entries, NULL);
drivers/platform/chrome/cros_usbpd_logger.c
177
int entries = 0;
drivers/platform/chrome/cros_usbpd_logger.c
180
while (entries++ < CROS_USBPD_MAX_LOG_ENTRIES) {
drivers/platform/chrome/wilco_ec/event.c
109
q = kzalloc_flex(*q, entries, capacity);
drivers/platform/chrome/wilco_ec/event.c
121
return q->head == q->tail && !q->entries[q->head];
drivers/platform/chrome/wilco_ec/event.c
127
return q->head == q->tail && q->entries[q->head];
drivers/platform/chrome/wilco_ec/event.c
137
ev = q->entries[q->tail];
drivers/platform/chrome/wilco_ec/event.c
138
q->entries[q->tail] = NULL;
drivers/platform/chrome/wilco_ec/event.c
155
q->entries[q->head] = ev;
drivers/platform/chrome/wilco_ec/event.c
98
struct ec_event *entries[] __counted_by(capacity);
drivers/platform/x86/dell/dell-wmi-ddv.c
130
u64 entries;
drivers/platform/x86/dell/dell-wmi-ddv.c
268
u64 buffer_size, rem, entries;
drivers/platform/x86/dell/dell-wmi-ddv.c
288
entries = div64_u64_rem(buffer_size, entry_size, &rem);
drivers/platform/x86/dell/dell-wmi-ddv.c
294
if (!entries) {
drivers/platform/x86/dell/dell-wmi-ddv.c
300
sensors->entries = entries;
drivers/platform/x86/dell/dell-wmi-ddv.c
328
if (channel >= data->fans.entries)
drivers/platform/x86/dell/dell-wmi-ddv.c
354
if (channel >= data->temps.entries)
drivers/platform/x86/dell/dell-wmi-ddv.c
411
if (channel >= data->fans.entries)
drivers/platform/x86/dell/dell-wmi-ddv.c
442
if (channel >= data->temps.entries)
drivers/platform/x86/dell/dell-wmi-ddv.c
591
info = dell_wmi_ddv_channel_create(&wdev->dev, sensors->entries, type, config);
drivers/platform/x86/hp/hp-wmi.c
2566
min_rpm = fan_table->entries[0].cpu_rpm;
drivers/platform/x86/hp/hp-wmi.c
2567
max_rpm = fan_table->entries[fan_table->header.num_entries - 1].cpu_rpm;
drivers/platform/x86/hp/hp-wmi.c
2568
gpu_delta = fan_table->entries[0].gpu_rpm - fan_table->entries[0].cpu_rpm;
drivers/platform/x86/hp/hp-wmi.c
477
struct victus_s_fan_table_entry entries[];
drivers/rapidio/devices/tsi721.c
1903
int mbox, int entries)
drivers/rapidio/devices/tsi721.c
1909
if ((entries < TSI721_OMSGD_MIN_RING_SIZE) ||
drivers/rapidio/devices/tsi721.c
1910
(entries > (TSI721_OMSGD_RING_SIZE)) ||
drivers/rapidio/devices/tsi721.c
1911
(!is_power_of_2(entries)) || mbox >= RIO_MAX_MBOX) {
drivers/rapidio/devices/tsi721.c
1922
priv->omsg_ring[mbox].size = entries;
drivers/rapidio/devices/tsi721.c
1928
for (i = 0; i < entries; i++) {
drivers/rapidio/devices/tsi721.c
1945
(entries + 1) * sizeof(struct tsi721_omsg_desc),
drivers/rapidio/devices/tsi721.c
1957
priv->omsg_ring[mbox].sts_size = roundup_pow_of_two(entries + 1);
drivers/rapidio/devices/tsi721.c
2024
bd_ptr[entries].type_id = cpu_to_le32(DTYPE5 << 29);
drivers/rapidio/devices/tsi721.c
2025
bd_ptr[entries].msg_info = 0;
drivers/rapidio/devices/tsi721.c
2026
bd_ptr[entries].next_lo =
drivers/rapidio/devices/tsi721.c
2029
bd_ptr[entries].next_hi =
drivers/rapidio/devices/tsi721.c
2056
(entries + 1) * sizeof(struct tsi721_omsg_desc),
drivers/rapidio/devices/tsi721.c
2192
int mbox, int entries)
drivers/rapidio/devices/tsi721.c
2200
if ((entries < TSI721_IMSGD_MIN_RING_SIZE) ||
drivers/rapidio/devices/tsi721.c
2201
(entries > TSI721_IMSGD_RING_SIZE) ||
drivers/rapidio/devices/tsi721.c
2202
(!is_power_of_2(entries)) || mbox >= RIO_MAX_MBOX) {
drivers/rapidio/devices/tsi721.c
2214
priv->imsg_ring[mbox].size = entries;
drivers/rapidio/devices/tsi721.c
2225
entries * TSI721_MSG_BUFFER_SIZE,
drivers/rapidio/devices/tsi721.c
2239
entries * 8,
drivers/rapidio/devices/tsi721.c
2253
entries * sizeof(struct tsi721_imsg_desc),
drivers/rapidio/devices/tsi721.c
2266
for (i = 0; i < entries; i++)
drivers/rapidio/devices/tsi721.c
2294
iowrite32(TSI721_DMAC_DSSZ_SIZE(entries),
drivers/rapidio/devices/tsi721.c
2303
iowrite32(TSI721_DMAC_DSSZ_SIZE(entries),
drivers/rapidio/devices/tsi721.c
2344
priv->imsg_ring[mbox].fq_wrptr = entries - 1;
drivers/rapidio/devices/tsi721.c
2345
iowrite32(entries - 1, priv->regs + TSI721_IBDMAC_FQWP(ch));
drivers/rapidio/devices/tsi721.c
747
struct msix_entry entries[TSI721_VECT_MAX];
drivers/rapidio/devices/tsi721.c
751
entries[TSI721_VECT_IDB].entry = TSI721_MSIX_SR2PC_IDBQ_RCV(IDB_QUEUE);
drivers/rapidio/devices/tsi721.c
752
entries[TSI721_VECT_PWRX].entry = TSI721_MSIX_SRIO_MAC_INT;
drivers/rapidio/devices/tsi721.c
761
entries[TSI721_VECT_IMB0_RCV + i].entry =
drivers/rapidio/devices/tsi721.c
763
entries[TSI721_VECT_IMB0_INT + i].entry =
drivers/rapidio/devices/tsi721.c
765
entries[TSI721_VECT_OMB0_DONE + i].entry =
drivers/rapidio/devices/tsi721.c
767
entries[TSI721_VECT_OMB0_INT + i].entry =
drivers/rapidio/devices/tsi721.c
778
entries[TSI721_VECT_DMA0_DONE + i].entry =
drivers/rapidio/devices/tsi721.c
780
entries[TSI721_VECT_DMA0_INT + i].entry =
drivers/rapidio/devices/tsi721.c
785
err = pci_enable_msix_exact(priv->pdev, entries, ARRAY_SIZE(entries));
drivers/rapidio/devices/tsi721.c
795
priv->msix[TSI721_VECT_IDB].vector = entries[TSI721_VECT_IDB].vector;
drivers/rapidio/devices/tsi721.c
798
priv->msix[TSI721_VECT_PWRX].vector = entries[TSI721_VECT_PWRX].vector;
drivers/rapidio/devices/tsi721.c
804
entries[TSI721_VECT_IMB0_RCV + i].vector;
drivers/rapidio/devices/tsi721.c
810
entries[TSI721_VECT_IMB0_INT + i].vector;
drivers/rapidio/devices/tsi721.c
816
entries[TSI721_VECT_OMB0_DONE + i].vector;
drivers/rapidio/devices/tsi721.c
822
entries[TSI721_VECT_OMB0_INT + i].vector;
drivers/rapidio/devices/tsi721.c
831
entries[TSI721_VECT_DMA0_DONE + i].vector;
drivers/rapidio/devices/tsi721.c
837
entries[TSI721_VECT_DMA0_INT + i].vector;
drivers/rapidio/rio.c
235
int entries,
drivers/rapidio/rio.c
262
rc = mport->ops->open_inb_mbox(mport, dev_id, mbox, entries);
drivers/rapidio/rio.c
320
int entries,
drivers/rapidio/rio.c
346
rc = mport->ops->open_outb_mbox(mport, dev_id, mbox, entries);
drivers/rapidio/rio_cm.c
1618
u32 entries;
drivers/rapidio/rio_cm.c
1624
if (copy_from_user(&entries, arg, sizeof(entries)))
drivers/rapidio/rio_cm.c
1626
if (entries == 0 || entries > RIO_MAX_MPORTS)
drivers/rapidio/rio_cm.c
1628
buf = kcalloc(entries + 1, sizeof(u32), GFP_KERNEL);
drivers/rapidio/rio_cm.c
1636
if (count++ < entries) {
drivers/ras/amd/fmpm.c
108
struct cper_fru_poison_desc entries[];
drivers/ras/amd/fmpm.c
288
struct cper_fru_poison_desc *fpd_i = &rec->entries[i];
drivers/ras/amd/fmpm.c
380
fpd_dest = &rec->entries[entry];
drivers/ras/amd/fmpm.c
442
struct cper_fru_poison_desc *fpd = &rec->entries[i];
drivers/ras/amd/fmpm.c
920
fpd = &rec->entries[entry];
drivers/remoteproc/st_remoteproc.c
123
int entries;
drivers/remoteproc/st_remoteproc.c
125
entries = of_reserved_mem_region_count(np);
drivers/remoteproc/st_remoteproc.c
127
for (int index = 0; index < entries; index++) {
drivers/rpmsg/qcom_glink_rpm.c
248
id = le32_to_cpu(toc->entries[i].id);
drivers/rpmsg/qcom_glink_rpm.c
249
offset = le32_to_cpu(toc->entries[i].offset);
drivers/rpmsg/qcom_glink_rpm.c
250
size = le32_to_cpu(toc->entries[i].size);
drivers/rpmsg/qcom_glink_rpm.c
45
struct rpm_toc_entry entries[];
drivers/s390/block/dasd_int.h
241
__u8 entries; /* 0 Number of device entries */
drivers/s390/char/sclp.h
204
u32 entries[];
drivers/s390/char/sclp_early_core.c
383
if (!sccb->entries[sn])
drivers/s390/char/sclp_early_core.c
385
rn = sccb->entries[sn] >> 16;
drivers/s390/char/sclp_mem.c
146
if (sccb->entries[i])
drivers/s390/char/sclp_mem.c
147
sclp_unassign_storage(sccb->entries[i] >> 16);
drivers/s390/char/sclp_mem.c
505
if (!sccb->entries[i])
drivers/s390/char/sclp_mem.c
508
insert_increment(sccb->entries[i] >> 16, 0, 1);
drivers/s390/char/sclp_mem.c
515
if (!sccb->entries[i])
drivers/s390/char/sclp_mem.c
518
insert_increment(sccb->entries[i] >> 16, 1, 1);
drivers/s390/char/sclp_mem.c
67
u32 entries[];
drivers/s390/net/qeth_core.h
599
struct list_head entries;
drivers/s390/net/qeth_core_main.c
1605
INIT_LIST_HEAD(&card->ipato.entries);
drivers/s390/net/qeth_core_main.c
270
LIST_HEAD(entries);
drivers/s390/net/qeth_core_main.c
293
list_for_each_entry_safe(entry, tmp, &entries,
drivers/s390/net/qeth_core_main.c
302
list_add(&entry->init_list, &entries);
drivers/s390/net/qeth_core_main.c
307
list_splice(&entries, &pool->entry_list);
drivers/s390/net/qeth_l2_main.c
593
(*cb)(priv, &rr->entries[i]);
drivers/s390/net/qeth_l3_main.c
102
list_for_each_entry(ipatoe, &card->ipato.entries, entry) {
drivers/s390/net/qeth_l3_main.c
522
list_for_each_entry_safe(ipatoe, tmp, &card->ipato.entries, entry) {
drivers/s390/net/qeth_l3_main.c
541
list_for_each_entry(ipatoe, &card->ipato.entries, entry) {
drivers/s390/net/qeth_l3_main.c
553
list_add_tail(&new->entry, &card->ipato.entries);
drivers/s390/net/qeth_l3_main.c
573
list_for_each_entry_safe(ipatoe, tmp, &card->ipato.entries, entry) {
drivers/s390/net/qeth_l3_sys.c
374
list_for_each_entry(ipatoe, &card->ipato.entries, entry) {
drivers/scsi/aacraid/aacraid.h
987
u32 entries; /*Number of queue entries */
drivers/scsi/aacraid/comminit.c
272
q->entries = qsize;
drivers/scsi/aacraid/commsup.c
804
if (le32_to_cpu(*q->headers.consumer) >= q->entries)
drivers/scsi/aacraid/commsup.c
832
if (le32_to_cpu(*q->headers.consumer) >= q->entries)
drivers/scsi/arm/msgqueue.c
56
msgq->free = &msgq->entries[0];
drivers/scsi/arm/msgqueue.c
59
msgq->entries[i].next = &msgq->entries[i + 1];
drivers/scsi/arm/msgqueue.c
61
msgq->entries[NR_MESSAGES - 1].next = NULL;
drivers/scsi/arm/msgqueue.h
28
struct msgqueue_entry entries[NR_MESSAGES];
drivers/scsi/fnic/vnic_rq.h
66
#define VNIC_RQ_BUF_BLKS_NEEDED(entries) \
drivers/scsi/fnic/vnic_rq.h
67
DIV_ROUND_UP(entries, VNIC_RQ_BUF_BLK_ENTRIES)
drivers/scsi/fnic/vnic_wq.h
71
#define VNIC_WQ_BUF_BLKS_NEEDED(entries) \
drivers/scsi/fnic/vnic_wq.h
72
DIV_ROUND_UP(entries, VNIC_WQ_BUF_BLK_ENTRIES)
drivers/scsi/hisi_sas/hisi_sas_v2_hw.c
3522
int entries, entries_old = 0, time;
drivers/scsi/hisi_sas/hisi_sas_v2_hw.c
3525
entries = hisi_sas_read32(hisi_hba, CQE_SEND_CNT);
drivers/scsi/hisi_sas/hisi_sas_v2_hw.c
3526
if (entries == entries_old)
drivers/scsi/hisi_sas/hisi_sas_v2_hw.c
3529
entries_old = entries;
drivers/scsi/hisi_sas/hisi_sas_v3_hw.c
2828
int entries, entries_old = 0, time;
drivers/scsi/hisi_sas/hisi_sas_v3_hw.c
2831
entries = hisi_sas_read32(hisi_hba, CQE_SEND_CNT);
drivers/scsi/hisi_sas/hisi_sas_v3_hw.c
2832
if (entries == entries_old)
drivers/scsi/hisi_sas/hisi_sas_v3_hw.c
2835
entries_old = entries;
drivers/scsi/ipr.c
7006
int entries, found, flag, i;
drivers/scsi/ipr.c
7022
entries = be16_to_cpu(ioa_cfg->u.cfg_table64->hdr64.num_entries);
drivers/scsi/ipr.c
7024
entries = ioa_cfg->u.cfg_table->hdr.num_entries;
drivers/scsi/ipr.c
7026
for (i = 0; i < entries; i++) {
drivers/scsi/lpfc/lpfc.h
1575
u32 entries; /* storing number entries/size of ring */
drivers/scsi/lpfc/lpfc_crtn.h
97
u32 entries);
drivers/scsi/lpfc/lpfc_ct.c
490
uint32_t size, int *entries)
drivers/scsi/lpfc/lpfc_ct.c
541
*entries = i;
drivers/scsi/lpfc/lpfc_sli.c
8047
u32 entries)
drivers/scsi/lpfc/lpfc_sli.c
8049
rx_monitor->ring = kmalloc_objs(struct rx_info_entry, entries);
drivers/scsi/lpfc/lpfc_sli.c
8056
rx_monitor->entries = entries;
drivers/scsi/lpfc/lpfc_sli.c
8071
rx_monitor->entries = 0;
drivers/scsi/lpfc/lpfc_sli.c
8095
u32 ring_size = rx_monitor->entries;
drivers/scsi/lpfc/lpfc_sli.c
8133
u32 ring_size = rx_monitor->entries;
drivers/scsi/pmcraid.c
5029
cfgte = &pinstance->cfg_table->entries[i];
drivers/scsi/pmcraid.h
368
entries[PMCRAID_MAX_RESOURCES];
drivers/scsi/qla2xxx/qla_def.h
3226
}, entries);
drivers/scsi/qla2xxx/qla_def.h
3256
entries[MAX_FIBRE_DEVICES_MAX];
drivers/scsi/qla2xxx/qla_dfs.c
184
uint16_t entries;
drivers/scsi/qla2xxx/qla_dfs.c
198
&entries);
drivers/scsi/qla2xxx/qla_dfs.c
206
for (i = 0; i < entries; i++) {
drivers/scsi/qla2xxx/qla_fw.h
1389
struct mid_conf_entry_24xx entries[MAX_MULTI_ID_FABRIC];
drivers/scsi/qla2xxx/qla_fw.h
1724
__le16 entries;
drivers/scsi/qla2xxx/qla_fw.h
2190
struct mid_conf_entry_24xx entries[MAX_MULTI_ID_FABRIC];
drivers/scsi/qla2xxx/qla_gs.c
1595
qla2x00_hba_attributes(scsi_qla_host_t *vha, void *entries,
drivers/scsi/qla2xxx/qla_gs.c
1605
eiter = entries + size;
drivers/scsi/qla2xxx/qla_gs.c
1615
eiter = entries + size;
drivers/scsi/qla2xxx/qla_gs.c
1627
eiter = entries + size;
drivers/scsi/qla2xxx/qla_gs.c
1648
eiter = entries + size;
drivers/scsi/qla2xxx/qla_gs.c
1660
eiter = entries + size;
drivers/scsi/qla2xxx/qla_gs.c
1672
eiter = entries + size;
drivers/scsi/qla2xxx/qla_gs.c
1697
eiter = entries + size;
drivers/scsi/qla2xxx/qla_gs.c
1709
eiter = entries + size;
drivers/scsi/qla2xxx/qla_gs.c
1723
eiter = entries + size;
drivers/scsi/qla2xxx/qla_gs.c
1734
eiter = entries + size;
drivers/scsi/qla2xxx/qla_gs.c
1758
eiter = entries + size;
drivers/scsi/qla2xxx/qla_gs.c
1769
eiter = entries + size;
drivers/scsi/qla2xxx/qla_gs.c
1780
eiter = entries + size;
drivers/scsi/qla2xxx/qla_gs.c
1791
eiter = entries + size;
drivers/scsi/qla2xxx/qla_gs.c
1801
eiter = entries + size;
drivers/scsi/qla2xxx/qla_gs.c
1812
eiter = entries + size;
drivers/scsi/qla2xxx/qla_gs.c
1824
eiter = entries + size;
drivers/scsi/qla2xxx/qla_gs.c
1849
qla2x00_port_attributes(scsi_qla_host_t *vha, void *entries,
drivers/scsi/qla2xxx/qla_gs.c
1861
eiter = entries + size;
drivers/scsi/qla2xxx/qla_gs.c
1880
eiter = entries + size;
drivers/scsi/qla2xxx/qla_gs.c
1891
eiter = entries + size;
drivers/scsi/qla2xxx/qla_gs.c
1902
eiter = entries + size;
drivers/scsi/qla2xxx/qla_gs.c
1912
eiter = entries + size;
drivers/scsi/qla2xxx/qla_gs.c
1924
eiter = entries + size;
drivers/scsi/qla2xxx/qla_gs.c
1942
eiter = entries + size;
drivers/scsi/qla2xxx/qla_gs.c
1953
eiter = entries + size;
drivers/scsi/qla2xxx/qla_gs.c
1964
eiter = entries + size;
drivers/scsi/qla2xxx/qla_gs.c
1976
eiter = entries + size;
drivers/scsi/qla2xxx/qla_gs.c
1987
eiter = entries + size;
drivers/scsi/qla2xxx/qla_gs.c
1998
eiter = entries + size;
drivers/scsi/qla2xxx/qla_gs.c
2010
eiter = entries + size;
drivers/scsi/qla2xxx/qla_gs.c
2025
eiter = entries + size;
drivers/scsi/qla2xxx/qla_gs.c
2036
eiter = entries + size;
drivers/scsi/qla2xxx/qla_gs.c
2047
eiter = entries + size;
drivers/scsi/qla2xxx/qla_gs.c
2061
eiter = entries + size;
drivers/scsi/qla2xxx/qla_gs.c
2074
eiter = entries + size;
drivers/scsi/qla2xxx/qla_gs.c
2088
eiter = entries + size;
drivers/scsi/qla2xxx/qla_gs.c
2101
eiter = entries + size;
drivers/scsi/qla2xxx/qla_gs.c
2114
eiter = entries + size;
drivers/scsi/qla2xxx/qla_gs.c
2125
eiter = entries + size;
drivers/scsi/qla2xxx/qla_gs.c
2156
void *entries;
drivers/scsi/qla2xxx/qla_gs.c
2190
entries = &ct_req->req.rhba.attrs.entry;
drivers/scsi/qla2xxx/qla_gs.c
2192
size += qla2x00_hba_attributes(vha, entries, callopt);
drivers/scsi/qla2xxx/qla_gs.c
2203
entries, size);
drivers/scsi/qla2xxx/qla_gs.c
2290
void *entries;
drivers/scsi/qla2xxx/qla_gs.c
2315
entries = ct_req->req.rprt.attrs.entry;
drivers/scsi/qla2xxx/qla_gs.c
2316
size += qla2x00_port_attributes(vha, entries, callopt);
drivers/scsi/qla2xxx/qla_gs.c
2324
entries, size);
drivers/scsi/qla2xxx/qla_gs.c
2369
void *entries;
drivers/scsi/qla2xxx/qla_gs.c
2401
entries = ct_req->req.rpa.attrs.entry;
drivers/scsi/qla2xxx/qla_gs.c
2403
size += qla2x00_port_attributes(vha, entries, callopt);
drivers/scsi/qla2xxx/qla_gs.c
2412
entries, size);
drivers/scsi/qla2xxx/qla_gs.c
3346
d = &ct_rsp->entries[i];
drivers/scsi/qla2xxx/qla_gs.c
336
gid_data = &ct_rsp->rsp.gid_pt.entries[i];
drivers/scsi/qla2xxx/qla_init.c
5872
uint16_t entries;
drivers/scsi/qla2xxx/qla_init.c
5884
entries = MAX_FIBRE_DEVICES_LOOP;
drivers/scsi/qla2xxx/qla_init.c
5889
&entries);
drivers/scsi/qla2xxx/qla_init.c
5894
"Entries in ID list (%d).\n", entries);
drivers/scsi/qla2xxx/qla_init.c
5896
ha->gid_list, entries * sizeof(*ha->gid_list));
drivers/scsi/qla2xxx/qla_init.c
5898
if (entries == 0) {
drivers/scsi/qla2xxx/qla_init.c
5940
for (index = 0; index < entries; index++) {
drivers/scsi/qla2xxx/qla_init.c
9711
int i, entries;
drivers/scsi/qla2xxx/qla_init.c
9723
entries = ha->fcp_prio_cfg->num_entries;
drivers/scsi/qla2xxx/qla_init.c
9726
for (i = 0; i < entries; i++) {
drivers/scsi/qla2xxx/qla_mbx.c
2999
uint16_t *entries)
drivers/scsi/qla2xxx/qla_mbx.c
3037
*entries = mcp->mb[1];
drivers/scsi/qla2xxx/qla_mbx.c
6830
void *id_list, dma_addr_t id_list_dma, uint16_t *entries)
drivers/scsi/qla2xxx/qla_mbx.c
6852
*entries = mc.mb[1];
drivers/scsi/qla2xxx/qla_nx.c
1513
uint32_t entries = le32_to_cpu(directory->num_entries);
drivers/scsi/qla2xxx/qla_nx.c
1515
for (i = 0; i < entries; i++) {
drivers/scsi/qla2xxx/qla_nx.c
1779
uint32_t entries;
drivers/scsi/qla2xxx/qla_nx.c
1791
entries = le32_to_cpu(ptab_desc->num_entries);
drivers/scsi/qla2xxx/qla_nx.c
1793
for (i = 0; i < entries; i++) {
drivers/scsi/qla2xxx/qla_nx2.c
943
int index, entries;
drivers/scsi/qla2xxx/qla_nx2.c
949
entries = vha->reset_tmplt.hdr->entries;
drivers/scsi/qla2xxx/qla_nx2.c
952
for (; (!vha->reset_tmplt.seq_end) && (index < entries); index++) {
drivers/scsi/qla2xxx/qla_nx2.h
222
uint16_t entries;
drivers/scsi/qla2xxx/qla_sup.c
1169
le16_to_cpu(hdr.version), le16_to_cpu(hdr.entries),
drivers/scsi/qla2xxx/qla_sup.c
1184
cnt = (sizeof(hdr) + le16_to_cpu(hdr.entries) * sizeof(*entry)) >> 1;
drivers/scsi/qla2xxx/qla_sup.c
1191
le16_to_cpu(hdr.version), le16_to_cpu(hdr.entries),
drivers/scsi/qla2xxx/qla_sup.c
1197
cnt = le16_to_cpu(hdr.entries);
drivers/scsi/qla2xxx/qla_target.c
1330
uint16_t entries;
drivers/scsi/qla2xxx/qla_target.c
1342
rc = qla24xx_gidlist_wait(vha, gid_list, gid_list_dma, &entries);
drivers/scsi/qla2xxx/qla_target.c
1353
for (i = 0; i < entries; i++) {
drivers/scsi/qla4xxx/ql4_83xx.c
1117
int index, entries;
drivers/scsi/qla4xxx/ql4_83xx.c
1123
entries = ha->reset_tmplt.hdr->entries;
drivers/scsi/qla4xxx/ql4_83xx.c
1126
for (; (!ha->reset_tmplt.seq_end) && (index < entries); index++) {
drivers/scsi/qla4xxx/ql4_83xx.h
165
__le16 entries;
drivers/scsi/qlogicpti.c
1388
#define QSIZE(entries) (((entries) + 1) * QUEUE_ENTRY_LEN)
drivers/scsi/qlogicpti.c
1427
#define QSIZE(entries) (((entries) + 1) * QUEUE_ENTRY_LEN)
drivers/scsi/qlogicpti.c
821
#define QSIZE(entries) (((entries) + 1) * QUEUE_ENTRY_LEN)
drivers/scsi/snic/vnic_wq.h
49
#define VNIC_WQ_BUF_BLK_ENTRIES(entries) \
drivers/scsi/snic/vnic_wq.h
50
((unsigned int)(entries < VNIC_WQ_BUF_DFLT_BLK_ENTRIES) ? \
drivers/scsi/snic/vnic_wq.h
54
#define VNIC_WQ_BUF_BLKS_NEEDED(entries) \
drivers/scsi/snic/vnic_wq.h
55
DIV_ROUND_UP(entries, VNIC_WQ_BUF_DFLT_BLK_ENTRIES)
drivers/scsi/snic/vnic_wq.h
56
#define VNIC_WQ_BUF_BLKS_NEEDED(entries) \
drivers/scsi/snic/vnic_wq.h
57
DIV_ROUND_UP(entries, VNIC_WQ_BUF_DFLT_BLK_ENTRIES)
drivers/sh/intc/virq.c
203
struct intc_subgroup_entry *entries[32];
drivers/sh/intc/virq.c
212
(void ***)entries, 0, ARRAY_SIZE(entries),
drivers/sh/intc/virq.c
219
entry = radix_tree_deref_slot((void **)entries[i]);
drivers/sh/intc/virq.c
255
radix_tree_replace_slot(&d->tree, (void **)entries[i],
drivers/soc/qcom/smp2p.c
240
memcpy(buf, in->entries[i].name, sizeof(buf));
drivers/soc/qcom/smp2p.c
242
entry->value = &in->entries[i].value;
drivers/soc/qcom/smp2p.c
446
memcpy(out->entries[out->valid_entries].name, buf, SMP2P_MAX_ENTRY_NAME);
drivers/soc/qcom/smp2p.c
449
entry->value = &out->entries[out->valid_entries].value;
drivers/soc/qcom/smp2p.c
78
} entries[SMP2P_MAX_ENTRY];
drivers/soc/qcom/smsm.c
532
smsm->entries = devm_kcalloc(&pdev->dev,
drivers/soc/qcom/smsm.c
536
if (!smsm->entries)
drivers/soc/qcom/smsm.c
628
entry = &smsm->entries[id];
drivers/soc/qcom/smsm.c
650
if (smsm->entries[id].domain)
drivers/soc/qcom/smsm.c
651
irq_domain_remove(smsm->entries[id].domain);
drivers/soc/qcom/smsm.c
668
if (smsm->entries[id].domain)
drivers/soc/qcom/smsm.c
669
irq_domain_remove(smsm->entries[id].domain);
drivers/soc/qcom/smsm.c
91
struct smsm_entry *entries;
drivers/spi/spi-dw-core.c
567
u32 room, entries, sts;
drivers/spi/spi-dw-core.c
590
entries = readl_relaxed(dws->regs + DW_SPI_TXFLR);
drivers/spi/spi-dw-core.c
591
if (!entries) {
drivers/spi/spi-dw-core.c
595
room = min(dws->fifo_len - entries, len);
drivers/spi/spi-dw-core.c
608
entries = readl_relaxed(dws->regs + DW_SPI_RXFLR);
drivers/spi/spi-dw-core.c
609
if (!entries) {
drivers/spi/spi-dw-core.c
617
entries = min(entries, len);
drivers/spi/spi-dw-core.c
618
for (; entries; --entries, --len)
drivers/staging/media/ipu3/include/uapi/intel-ipu3.h
2256
entries[IPU3_UAPI_YUVP2_TCC_MACC_TABLE_ELEMENTS];
drivers/staging/media/ipu3/include/uapi/intel-ipu3.h
2269
__u16 entries[IPU3_UAPI_YUVP2_TCC_INV_Y_LUT_ELEMENTS];
drivers/staging/media/ipu3/include/uapi/intel-ipu3.h
2279
__u16 entries[IPU3_UAPI_YUVP2_TCC_GAIN_PCWL_LUT_ELEMENTS];
drivers/staging/media/ipu3/include/uapi/intel-ipu3.h
2289
__s16 entries[IPU3_UAPI_YUVP2_TCC_R_SQR_LUT_ELEMENTS];
drivers/staging/media/ipu3/ipu3-abi.h
909
u16 entries[IMGU_ABI_YUVP2_YTM_LUT_ENTRIES];
drivers/staging/media/ipu3/ipu3-css-params.c
2224
acc->ytm.entries[i] = i * 32;
drivers/staging/media/ipu3/ipu3-css-params.c
2259
acc->tcc.macc_table.entries[i].a = 1024;
drivers/staging/media/ipu3/ipu3-css-params.c
2260
acc->tcc.macc_table.entries[i].b = 0;
drivers/staging/media/ipu3/ipu3-css-params.c
2261
acc->tcc.macc_table.entries[i].c = 0;
drivers/staging/media/ipu3/ipu3-css-params.c
2262
acc->tcc.macc_table.entries[i].d = 1024;
drivers/staging/media/ipu3/ipu3-css-params.c
2265
acc->tcc.inv_y_lut.entries[6] = 1023;
drivers/staging/media/ipu3/ipu3-css-params.c
2267
acc->tcc.inv_y_lut.entries[i] = 1024 >> (i - 6);
drivers/staging/vme_user/vme.c
901
INIT_LIST_HEAD(&dma_list->entries);
drivers/staging/vme_user/vme_bridge.h
57
struct list_head entries;
drivers/staging/vme_user/vme_tsi148.c
1723
list_add_tail(&entry->list, &list->entries);
drivers/staging/vme_user/vme_tsi148.c
1736
if (entry->list.prev != &list->entries) {
drivers/staging/vme_user/vme_tsi148.c
1815
entry = list_first_entry(&list->entries, struct tsi148_dma_entry,
drivers/staging/vme_user/vme_tsi148.c
1881
list_for_each_safe(pos, temp, &list->entries) {
drivers/thunderbolt/property.c
164
const struct tb_property_entry *entries;
drivers/thunderbolt/property.c
187
entries = (const struct tb_property_entry *)&block[content_offset];
drivers/thunderbolt/property.c
188
nentries = content_len / (sizeof(*entries) / 4);
drivers/thunderbolt/property.c
195
property = tb_property_parse(block, block_len, &entries[i]);
drivers/thunderbolt/property.c
28
struct tb_property_entry entries[];
drivers/thunderbolt/property.c
33
struct tb_property_entry entries[];
drivers/thunderbolt/property.c
421
entry = pe->entries;
drivers/thunderbolt/property.c
428
entry = re->entries;
drivers/tty/vt/vt_ioctl.c
1030
compat_caddr_t entries;
drivers/tty/vt/vt_ioctl.c
1042
tmp_entries = compat_ptr(tmp.entries);
drivers/tty/vt/vt_ioctl.c
498
return con_set_unimap(vc, tmp.entry_ct, tmp.entries);
drivers/tty/vt/vt_ioctl.c
503
tmp.entries);
drivers/ufs/core/ufs-mcq.c
311
u32 entries = hwq->max_entries;
drivers/ufs/core/ufs-mcq.c
314
while (entries > 0) {
drivers/ufs/core/ufs-mcq.c
317
entries--;
drivers/usb/cdns3/cdnsp-gadget.h
1303
struct cdnsp_erst_entry *entries;
drivers/usb/cdns3/cdnsp-mem.c
1034
erst->entries = dma_alloc_coherent(pdev->dev, size,
drivers/usb/cdns3/cdnsp-mem.c
1036
if (!erst->entries)
drivers/usb/cdns3/cdnsp-mem.c
1043
entry = &erst->entries[val];
drivers/usb/cdns3/cdnsp-mem.c
1058
if (erst->entries)
drivers/usb/cdns3/cdnsp-mem.c
1059
dma_free_coherent(dev, size, erst->entries,
drivers/usb/cdns3/cdnsp-mem.c
1062
erst->entries = NULL;
drivers/usb/core/message.c
455
while (io->entries--)
drivers/usb/core/message.c
456
usb_free_urb(io->urbs[io->entries]);
drivers/usb/core/message.c
504
for (i = 0, found = 0; i < io->entries; i++) {
drivers/usb/core/message.c
580
io->entries = 1;
drivers/usb/core/message.c
583
io->entries = nents;
drivers/usb/core/message.c
587
io->urbs = kmalloc_objs(*io->urbs, io->entries, mem_flags);
drivers/usb/core/message.c
595
for_each_sg(sg, sg, io->entries, i) {
drivers/usb/core/message.c
601
io->entries = i;
drivers/usb/core/message.c
644
io->entries = i + 1;
drivers/usb/core/message.c
652
io->count = io->entries;
drivers/usb/core/message.c
712
int entries = io->entries;
drivers/usb/core/message.c
717
while (i < entries && !io->status) {
drivers/usb/core/message.c
756
io->count -= entries - i;
drivers/usb/core/message.c
794
for (i = io->entries - 1; i >= 0; --i) {
drivers/usb/host/xhci-dbgcap.c
425
erst->entries = dma_alloc_coherent(dev, sizeof(*erst->entries),
drivers/usb/host/xhci-dbgcap.c
427
if (!erst->entries)
drivers/usb/host/xhci-dbgcap.c
431
erst->entries[0].seg_addr = cpu_to_le64(evt_ring->first_seg->dma);
drivers/usb/host/xhci-dbgcap.c
432
erst->entries[0].seg_size = cpu_to_le32(TRBS_PER_SEGMENT);
drivers/usb/host/xhci-dbgcap.c
433
erst->entries[0].rsvd = 0;
drivers/usb/host/xhci-dbgcap.c
439
dma_free_coherent(dev, sizeof(*erst->entries), erst->entries,
drivers/usb/host/xhci-dbgcap.c
441
erst->entries = NULL;
drivers/usb/host/xhci-mem.c
1802
erst->entries = dma_alloc_coherent(xhci_to_hcd(xhci)->self.sysdev,
drivers/usb/host/xhci-mem.c
1804
if (!erst->entries)
drivers/usb/host/xhci-mem.c
1811
entry = &erst->entries[val];
drivers/usb/host/xhci-mem.c
1853
if (ir->erst.entries)
drivers/usb/host/xhci-mem.c
1855
ir->erst.entries,
drivers/usb/host/xhci-mem.c
1857
ir->erst.entries = NULL;
drivers/usb/host/xhci.h
1394
struct xhci_erst_entry *entries;
drivers/vdpa/mlx5/net/mlx5_vnet.c
1465
ent = &irqp->entries[i];
drivers/vdpa/mlx5/net/mlx5_vnet.c
1489
if (mvq->map.virq == irqp->entries[i].map.virq) {
drivers/vdpa/mlx5/net/mlx5_vnet.c
1490
free_irq(mvq->map.virq, irqp->entries[i].dev_id);
drivers/vdpa/mlx5/net/mlx5_vnet.c
1491
irqp->entries[i].used = false;
drivers/vdpa/mlx5/net/mlx5_vnet.c
3428
if (!ndev->irqp.entries)
drivers/vdpa/mlx5/net/mlx5_vnet.c
3432
ent = ndev->irqp.entries + i;
drivers/vdpa/mlx5/net/mlx5_vnet.c
3436
kfree(ndev->irqp.entries);
drivers/vdpa/mlx5/net/mlx5_vnet.c
3812
ndev->irqp.entries = kzalloc_objs(*ndev->irqp.entries,
drivers/vdpa/mlx5/net/mlx5_vnet.c
3814
if (!ndev->irqp.entries)
drivers/vdpa/mlx5/net/mlx5_vnet.c
3819
ent = ndev->irqp.entries + i;
drivers/vdpa/mlx5/net/mlx5_vnet.h
40
struct mlx5_vdpa_irq_pool_entry *entries;
drivers/vfio/vfio_iommu_spapr_tce.c
142
unsigned long entries = size >> PAGE_SHIFT;
drivers/vfio/vfio_iommu_spapr_tce.c
148
mem = mm_iommu_get(container->mm, vaddr, entries);
drivers/vfio/vfio_iommu_spapr_tce.c
157
ret = mm_iommu_new(container->mm, vaddr, entries, &mem);
drivers/video/fbdev/aty/aty128fb.c
487
static void do_wait_for_fifo(u16 entries, struct aty128fb_par *par);
drivers/video/fbdev/aty/aty128fb.c
488
static void wait_for_fifo(u16 entries, struct aty128fb_par *par);
drivers/video/fbdev/aty/aty128fb.c
632
static void do_wait_for_fifo(u16 entries, struct aty128fb_par *par)
drivers/video/fbdev/aty/aty128fb.c
639
if (par->fifo_slots >= entries)
drivers/video/fbdev/aty/aty128fb.c
666
static void wait_for_fifo(u16 entries, struct aty128fb_par *par)
drivers/video/fbdev/aty/aty128fb.c
668
if (par->fifo_slots < entries)
drivers/video/fbdev/aty/aty128fb.c
670
par->fifo_slots -= entries;
drivers/video/fbdev/aty/atyfb.h
349
static inline void wait_for_fifo(u16 entries, struct atyfb_par *par)
drivers/video/fbdev/aty/atyfb.h
352
while (entries > fifo_space) {
drivers/video/fbdev/aty/atyfb.h
355
par->fifo_space = fifo_space - entries;
drivers/video/fbdev/aty/radeon_base.c
355
void _radeon_fifo_wait(struct radeonfb_info *rinfo, int entries)
drivers/video/fbdev/aty/radeon_base.c
360
if ((INREG(RBBM_STATUS) & 0x7f) >= entries)
drivers/video/fbdev/aty/radeonfb.h
471
void _radeon_fifo_wait(struct radeonfb_info *rinfo, int entries);
drivers/video/fbdev/aty/radeonfb.h
476
#define radeon_fifo_wait(entries) _radeon_fifo_wait(rinfo,entries)
drivers/video/fbdev/uvesafb.c
1040
struct uvesafb_pal_entry *entries;
drivers/video/fbdev/uvesafb.c
1049
entries = kmalloc_objs(*entries, cmap->len);
drivers/video/fbdev/uvesafb.c
1050
if (!entries)
drivers/video/fbdev/uvesafb.c
1054
entries[i].red = cmap->red[i] >> shift;
drivers/video/fbdev/uvesafb.c
1055
entries[i].green = cmap->green[i] >> shift;
drivers/video/fbdev/uvesafb.c
1056
entries[i].blue = cmap->blue[i] >> shift;
drivers/video/fbdev/uvesafb.c
1057
entries[i].pad = 0;
drivers/video/fbdev/uvesafb.c
1059
err = uvesafb_setpalette(entries, cmap->len, cmap->start, info);
drivers/video/fbdev/uvesafb.c
1060
kfree(entries);
drivers/video/fbdev/uvesafb.c
922
static int uvesafb_setpalette(struct uvesafb_pal_entry *entries, int count,
drivers/video/fbdev/uvesafb.c
945
outb_p(entries[i].red, dac_val);
drivers/video/fbdev/uvesafb.c
946
outb_p(entries[i].green, dac_val);
drivers/video/fbdev/uvesafb.c
947
outb_p(entries[i].blue, dac_val);
drivers/video/fbdev/uvesafb.c
959
"D" (entries), /* EDI */
drivers/video/fbdev/uvesafb.c
976
task->buf = entries;
drivers/virtio/virtio_pci_admin_legacy_io.c
229
entry = &result->entries[i];
drivers/watchdog/wdat_wdt.c
315
const struct acpi_wdat_entry *entries;
drivers/watchdog/wdat_wdt.c
385
entries = (struct acpi_wdat_entry *)(tbl + 1);
drivers/watchdog/wdat_wdt.c
386
for (i = 0; i < tbl->entries; i++) {
drivers/watchdog/wdat_wdt.c
394
action = entries[i].action;
drivers/watchdog/wdat_wdt.c
407
instr->entry = entries[i];
drivers/watchdog/wdat_wdt.c
409
gas = &entries[i].register_region;
drivers/xen/xen-pciback/pciback_ops.c
202
struct msix_entry *entries;
drivers/xen/xen-pciback/pciback_ops.c
222
entries = kmalloc_objs(*entries, op->value);
drivers/xen/xen-pciback/pciback_ops.c
223
if (entries == NULL)
drivers/xen/xen-pciback/pciback_ops.c
227
entries[i].entry = op->msix_entries[i].entry;
drivers/xen/xen-pciback/pciback_ops.c
228
entries[i].vector = op->msix_entries[i].vector;
drivers/xen/xen-pciback/pciback_ops.c
231
result = pci_enable_msix_exact(dev, entries, op->value);
drivers/xen/xen-pciback/pciback_ops.c
234
op->msix_entries[i].entry = entries[i].entry;
drivers/xen/xen-pciback/pciback_ops.c
235
if (entries[i].vector) {
drivers/xen/xen-pciback/pciback_ops.c
237
xen_pirq_from_irq(entries[i].vector);
drivers/xen/xen-pciback/pciback_ops.c
245
kfree(entries);
fs/binfmt_misc.c
793
list_add(&e->list, &misc->entries);
fs/binfmt_misc.c
893
list_for_each_entry_safe(e, next, &misc->entries, list)
fs/binfmt_misc.c
968
INIT_LIST_HEAD(&misc->entries);
fs/binfmt_misc.c
98
list_for_each_entry(e, &misc->entries, list) {
fs/btrfs/fiemap.c
250
entry = &cache->entries[cache->entries_size - 1];
fs/btrfs/fiemap.c
257
entry = &cache->entries[cache->entries_pos];
fs/btrfs/fiemap.c
46
struct btrfs_fiemap_entry *entries;
fs/btrfs/fiemap.c
649
cache.entries = kmalloc_objs(struct btrfs_fiemap_entry,
fs/btrfs/fiemap.c
653
if (!cache.entries || !backref_ctx || !path) {
fs/btrfs/fiemap.c
79
struct btrfs_fiemap_entry *entry = &cache->entries[i];
fs/btrfs/fiemap.c
874
kfree(cache.entries);
fs/btrfs/free-space-cache.c
1073
int *entries, int *bitmaps,
fs/btrfs/free-space-cache.c
1100
*entries += 1;
fs/btrfs/free-space-cache.c
1135
*entries += 1;
fs/btrfs/free-space-cache.c
1150
int entries, int bitmaps)
fs/btrfs/free-space-cache.c
1186
btrfs_set_free_space_entries(leaf, header, entries);
fs/btrfs/free-space-cache.c
1198
int *entries)
fs/btrfs/free-space-cache.c
1230
*entries += 1;
fs/btrfs/free-space-cache.c
1308
io_ctl->entries, io_ctl->bitmaps);
fs/btrfs/free-space-cache.c
1377
int entries = 0;
fs/btrfs/free-space-cache.c
1420
block_group, &entries, &bitmaps,
fs/btrfs/free-space-cache.c
1433
ret = write_pinned_extent_entries(trans, block_group, io_ctl, &entries);
fs/btrfs/free-space-cache.c
1480
io_ctl->entries = entries;
fs/btrfs/free-space-cache.h
108
int entries;
fs/btrfs/inode.c
6286
static int btrfs_filldir(void *addr, int entries, struct dir_context *ctx)
fs/btrfs/inode.c
6288
while (entries--) {
fs/btrfs/inode.c
6319
int entries = 0;
fs/btrfs/inode.c
6362
ret = btrfs_filldir(private->filldir_buf, entries, ctx);
fs/btrfs/inode.c
6366
entries = 0;
fs/btrfs/inode.c
6381
entries++;
fs/btrfs/inode.c
6391
ret = btrfs_filldir(private->filldir_buf, entries, ctx);
fs/btrfs/lru_cache.c
118
ret = mtree_insert(&cache->entries, key, head, gfp);
fs/btrfs/lru_cache.c
124
head = mtree_load(&cache->entries, key);
fs/btrfs/lru_cache.c
165
ASSERT(mtree_empty(&cache->entries));
fs/btrfs/lru_cache.c
18
mt_init(&cache->entries);
fs/btrfs/lru_cache.c
51
head = mtree_load(&cache->entries, key);
fs/btrfs/lru_cache.c
76
ASSERT(!mtree_empty(&cache->entries));
fs/btrfs/lru_cache.c
89
head = mtree_erase(&cache->entries, entry->key);
fs/btrfs/lru_cache.h
44
struct maple_tree entries;
fs/btrfs/relocation.c
3864
struct btrfs_key *entries, unsigned int num_entries)
fs/btrfs/relocation.c
3879
batch.keys = entries;
fs/btrfs/relocation.c
3891
entries += max_items;
fs/btrfs/relocation.c
4433
struct btrfs_key *entries = NULL;
fs/btrfs/relocation.c
4548
entries = kmalloc(sizeof(*entries) * max_entries, GFP_NOFS);
fs/btrfs/relocation.c
4549
if (!entries) {
fs/btrfs/relocation.c
4557
entries[num_entries].objectid = bg->start;
fs/btrfs/relocation.c
4558
entries[num_entries].type = BTRFS_IDENTITY_REMAP_KEY;
fs/btrfs/relocation.c
4559
entries[num_entries].offset = bg->length;
fs/btrfs/relocation.c
4563
entries[num_entries].objectid = bg->start;
fs/btrfs/relocation.c
4564
entries[num_entries].type = BTRFS_IDENTITY_REMAP_KEY;
fs/btrfs/relocation.c
4565
entries[num_entries].offset = space_runs[0].start - bg->start;
fs/btrfs/relocation.c
4570
entries[num_entries].objectid = space_runs[i - 1].end;
fs/btrfs/relocation.c
4571
entries[num_entries].type = BTRFS_IDENTITY_REMAP_KEY;
fs/btrfs/relocation.c
4572
entries[num_entries].offset =
fs/btrfs/relocation.c
4578
entries[num_entries].objectid =
fs/btrfs/relocation.c
4580
entries[num_entries].type = BTRFS_IDENTITY_REMAP_KEY;
fs/btrfs/relocation.c
4581
entries[num_entries].offset =
fs/btrfs/relocation.c
4592
ret = add_remap_tree_entries(trans, path, entries, num_entries);
fs/btrfs/relocation.c
4595
kfree(entries);
fs/ceph/xattr.c
424
XATTR_NAME_CEPH(dir, entries, VXATTR_FLAG_DIRSTAT),
fs/ext4/extents.c
393
unsigned short entries;
fs/ext4/extents.c
400
entries = le16_to_cpu(eh->eh_entries);
fs/ext4/extents.c
413
while (entries) {
fs/ext4/extents.c
425
entries--;
fs/ext4/extents.c
437
while (entries) {
fs/ext4/extents.c
448
entries--;
fs/ext4/namei.c
1027
p->at = p->entries = ((struct dx_node *) bh->b_data)->entries;
fs/ext4/namei.c
1349
struct dx_entry *entries = frame->entries;
fs/ext4/namei.c
1351
int count = dx_get_count(entries);
fs/ext4/namei.c
1353
ASSERT(count < dx_get_limit(entries));
fs/ext4/namei.c
1354
ASSERT(old < entries + count);
fs/ext4/namei.c
1355
memmove(new + 1, new, (char *)(entries + count) - (char *)(new));
fs/ext4/namei.c
1358
dx_set_count(entries, count + 1);
fs/ext4/namei.c
2023
dxtrace(dx_show_index("frame", frame->entries));
fs/ext4/namei.c
2216
struct dx_entry *entries;
fs/ext4/namei.c
2292
entries = root->entries;
fs/ext4/namei.c
2293
dx_set_block(entries, 1);
fs/ext4/namei.c
2294
dx_set_count(entries, 1);
fs/ext4/namei.c
2295
dx_set_limit(entries, dx_root_limit(dir, sizeof(root->info)));
fs/ext4/namei.c
2315
frame->entries = entries;
fs/ext4/namei.c
2316
frame->at = entries;
fs/ext4/namei.c
2470
struct dx_entry *entries, *at;
fs/ext4/namei.c
2482
entries = frame->entries;
fs/ext4/namei.c
2503
dx_get_count(entries), dx_get_limit(entries)));
fs/ext4/namei.c
2505
if (dx_get_count(entries) == dx_get_limit(entries)) {
fs/ext4/namei.c
2515
if (dx_get_count((frame - 1)->entries) <
fs/ext4/namei.c
2516
dx_get_limit((frame - 1)->entries)) {
fs/ext4/namei.c
2522
entries = frame->entries;
fs/ext4/namei.c
2537
icount = dx_get_count(entries);
fs/ext4/namei.c
2544
entries2 = node2->entries;
fs/ext4/namei.c
2557
unsigned hash2 = dx_get_hash(entries + icount1);
fs/ext4/namei.c
2570
memcpy((char *) entries2, (char *) (entries + icount1),
fs/ext4/namei.c
2572
dx_set_count(entries, icount1);
fs/ext4/namei.c
2577
if (at - entries >= icount1) {
fs/ext4/namei.c
2578
frame->at = at - entries - icount1 + entries2;
fs/ext4/namei.c
2579
frame->entries = entries = entries2;
fs/ext4/namei.c
2583
dxtrace(dx_show_index("node", frame->entries));
fs/ext4/namei.c
2585
((struct dx_node *) bh2->b_data)->entries));
fs/ext4/namei.c
2602
memcpy((char *) entries2, (char *) entries,
fs/ext4/namei.c
2607
dx_set_count(entries, 1);
fs/ext4/namei.c
2608
dx_set_block(entries + 0, newblock);
fs/ext4/namei.c
262
struct dx_entry entries[];
fs/ext4/namei.c
268
struct dx_entry entries[];
fs/ext4/namei.c
275
struct dx_entry *entries;
fs/ext4/namei.c
557
static inline unsigned dx_get_count(struct dx_entry *entries)
fs/ext4/namei.c
559
return le16_to_cpu(((struct dx_countlimit *) entries)->count);
fs/ext4/namei.c
562
static inline unsigned dx_get_limit(struct dx_entry *entries)
fs/ext4/namei.c
564
return le16_to_cpu(((struct dx_countlimit *) entries)->limit);
fs/ext4/namei.c
567
static inline void dx_set_count(struct dx_entry *entries, unsigned value)
fs/ext4/namei.c
569
((struct dx_countlimit *) entries)->count = cpu_to_le16(value);
fs/ext4/namei.c
572
static inline void dx_set_limit(struct dx_entry *entries, unsigned value)
fs/ext4/namei.c
574
((struct dx_countlimit *) entries)->limit = cpu_to_le16(value);
fs/ext4/namei.c
602
static void dx_show_index(char * label, struct dx_entry *entries)
fs/ext4/namei.c
604
int i, n = dx_get_count (entries);
fs/ext4/namei.c
608
i ? dx_get_hash(entries + i) : 0,
fs/ext4/namei.c
609
(unsigned long)dx_get_block(entries + i));
fs/ext4/namei.c
711
struct dx_entry *entries, int levels)
fs/ext4/namei.c
714
unsigned count = dx_get_count(entries), names = 0, space = 0, i;
fs/ext4/namei.c
718
for (i = 0; i < count; i++, entries++)
fs/ext4/namei.c
720
ext4_lblk_t block = dx_get_block(entries);
fs/ext4/namei.c
721
ext4_lblk_t hash = i ? dx_get_hash(entries): 0;
fs/ext4/namei.c
722
u32 range = i < count - 1? (dx_get_hash(entries + 1) - hash): ~hash;
fs/ext4/namei.c
729
dx_show_entries(hinfo, dir, ((struct dx_node *) bh->b_data)->entries, levels - 1):
fs/ext4/namei.c
782
struct dx_entry *at, *entries, *p, *q, *m;
fs/ext4/namei.c
854
entries = (struct dx_entry *)(((char *)&root->info) +
fs/ext4/namei.c
857
if (dx_get_limit(entries) != dx_root_limit(dir,
fs/ext4/namei.c
860
dx_get_limit(entries),
fs/ext4/namei.c
869
count = dx_get_count(entries);
fs/ext4/namei.c
870
if (!count || count > dx_get_limit(entries)) {
fs/ext4/namei.c
873
count, dx_get_limit(entries));
fs/ext4/namei.c
877
p = entries + 1;
fs/ext4/namei.c
878
q = entries + count - 1;
fs/ext4/namei.c
888
htree_rep_invariant_check(entries, p, hash, count - 1);
fs/ext4/namei.c
892
at == entries ? 0 : dx_get_hash(at),
fs/ext4/namei.c
894
frame->entries = entries;
fs/ext4/namei.c
917
entries = ((struct dx_node *) frame->bh->b_data)->entries;
fs/ext4/namei.c
919
if (dx_get_limit(entries) != dx_node_limit(dir)) {
fs/ext4/namei.c
922
dx_get_limit(entries), dx_node_limit(dir));
fs/ext4/namei.c
994
if (++(p->at) < p->entries + dx_get_count(p->entries))
fs/f2fs/f2fs.h
552
(((struct nat_journal_entry *)(jnl)->nat_j.entries)[i].ne)
fs/f2fs/f2fs.h
554
(((struct nat_journal_entry *)(jnl)->nat_j.entries)[i].nid)
fs/f2fs/f2fs.h
556
(((struct sit_journal_entry *)(jnl)->sit_j.entries)[i].se)
fs/f2fs/f2fs.h
558
(((struct sit_journal_entry *)(jnl)->sit_j.entries)[i].segno)
fs/f2fs/gc.c
1871
submitted += gc_node_segment(sbi, sum->entries,
fs/f2fs/gc.c
1874
submitted += gc_data_segment(sbi, sum->entries,
fs/f2fs/node.c
2506
blk_addr = le32_to_cpu(nat_blk->entries[i].block_addr);
fs/f2fs/node.c
3081
if (le32_to_cpu(nat_blk->entries[i].block_addr) != NULL_ADDR)
fs/f2fs/node.c
3143
raw_ne = &nat_blk->entries[nid - start_nid];
fs/f2fs/node.c
629
ne = nat_blk->entries[nid - start_nid];
fs/f2fs/segment.c
2076
int entries = SIT_VBLOCK_MAP_SIZE / sizeof(unsigned long);
fs/f2fs/segment.c
2103
for (i = 0; i < entries; i++)
fs/f2fs/segment.c
3019
int entries = SIT_VBLOCK_MAP_SIZE / sizeof(unsigned long);
fs/f2fs/segment.c
3025
for (i = 0; i < entries; i++)
fs/f2fs/segment.c
4766
&raw_sit->entries[sit_offset]);
fs/f2fs/segment.c
4768
&raw_sit->entries[sit_offset]);
fs/f2fs/segment.c
5005
sit = sit_blk->entries[SIT_ENTRY_OFFSET(sit_i, start)];
fs/f2fs/segment.h
441
rs = &raw_sit->entries[i];
fs/gfs2/dir.c
1265
struct gfs2_dirent **darr, u32 entries,
fs/gfs2/dir.c
1273
if (sort_start < entries)
fs/gfs2/dir.c
1274
sort(&darr[sort_start], entries - sort_start,
fs/gfs2/dir.c
1280
for (x = 0, y = 1; x < entries; x++, y++) {
fs/gfs2/dir.c
1284
if (y < entries) {
fs/gfs2/dir.c
1336
unsigned entries)
fs/gfs2/dir.c
1341
for (i = 0; i < entries; i++) {
fs/gfs2/dir.c
1375
unsigned entries = 0, entries2 = 0;
fs/gfs2/dir.c
1390
entries += be16_to_cpu(lf->lf_entries);
fs/gfs2/dir.c
1401
if (!entries)
fs/gfs2/dir.c
1411
larr = gfs2_alloc_sort_buffer((leaves + entries + 99) * sizeof(void *));
fs/gfs2/dir.c
1457
BUG_ON(entries2 != entries);
fs/gfs2/dir.c
1458
error = do_filldir_main(ip, ctx, darr, entries, need_sort ?
fs/gfs2/dir.c
1459
sort_offset : entries, copied);
fs/gfs2/dir.c
1903
u16 entries = be16_to_cpu(leaf->lf_entries);
fs/gfs2/dir.c
1904
if (!entries)
fs/gfs2/dir.c
1906
leaf->lf_entries = cpu_to_be16(--entries);
fs/nfs/mount_clnt.c
354
u32 entries, i;
fs/nfs/mount_clnt.c
363
entries = be32_to_cpup(p);
fs/nfs/mount_clnt.c
364
dprintk("NFS: received %u auth flavors\n", entries);
fs/nfs/mount_clnt.c
365
if (entries > NFS_MAX_SECFLAVORS)
fs/nfs/mount_clnt.c
366
entries = NFS_MAX_SECFLAVORS;
fs/nfs/mount_clnt.c
368
p = xdr_inline_decode(xdr, 4 * entries);
fs/nfs/mount_clnt.c
372
if (entries > *count)
fs/nfs/mount_clnt.c
373
entries = *count;
fs/nfs/mount_clnt.c
375
for (i = 0; i < entries; i++) {
fs/nfs_common/nfsacl.c
100
.array_len = encode_entries ? entries : 0,
fs/nfs_common/nfsacl.c
111
if (entries > NFS_ACL_MAX_ENTRIES ||
fs/nfs_common/nfsacl.c
112
xdr_encode_word(buf, base, entries))
fs/nfs_common/nfsacl.c
159
u32 entries = (acl && acl->a_count) ? max_t(int, acl->a_count, 4) : 0;
fs/nfs_common/nfsacl.c
163
.array_len = encode_entries ? entries : 0,
fs/nfs_common/nfsacl.c
175
if (entries > NFS_ACL_MAX_ENTRIES)
fs/nfs_common/nfsacl.c
177
if (xdr_stream_encode_u32(xdr, entries) < 0)
fs/nfs_common/nfsacl.c
348
u32 entries;
fs/nfs_common/nfsacl.c
351
if (xdr_decode_word(buf, base, &entries) ||
fs/nfs_common/nfsacl.c
352
entries > NFS_ACL_MAX_ENTRIES)
fs/nfs_common/nfsacl.c
354
nfsacl_desc.desc.array_maxlen = entries;
fs/nfs_common/nfsacl.c
359
if (entries != nfsacl_desc.desc.array_len ||
fs/nfs_common/nfsacl.c
367
*aclcnt = entries;
fs/nfs_common/nfsacl.c
397
u32 entries;
fs/nfs_common/nfsacl.c
399
if (xdr_stream_decode_u32(xdr, &entries) < 0)
fs/nfs_common/nfsacl.c
401
if (entries > NFS_ACL_MAX_ENTRIES)
fs/nfs_common/nfsacl.c
405
if (!xdr_inline_decode(xdr, XDR_UNIT + elem_size * entries))
fs/nfs_common/nfsacl.c
407
nfsacl_desc.desc.array_maxlen = entries;
fs/nfs_common/nfsacl.c
412
if (entries != nfsacl_desc.desc.array_len ||
fs/nfs_common/nfsacl.c
420
*aclcnt = entries;
fs/nfs_common/nfsacl.c
96
int entries = (acl && acl->a_count) ? max_t(int, acl->a_count, 4) : 0;
fs/nfsd/acl.h
44
int nfs4_acl_bytes(int entries);
fs/nfsd/nfs4acl.c
827
int nfs4_acl_bytes(int entries)
fs/nfsd/nfs4acl.c
829
return sizeof(struct nfs4_acl) + entries * sizeof(struct nfs4_ace);
fs/nfsd/nfs4state.c
1071
int entries, old_entries;
fs/nfsd/nfs4state.c
1082
if (bd->entries == 0)
fs/nfsd/nfs4state.c
1087
bd->entries -= bd->old_entries;
fs/nfsd/nfs4state.c
1088
bd->old_entries = bd->entries;
fs/nfsd/nfs4state.c
1121
if (bd->entries == 0)
fs/nfsd/nfs4state.c
1123
bd->entries += 1;
fs/nfsd/nfscache.c
414
unsigned int entries = 0;
fs/nfsd/nfscache.c
418
++entries;
fs/nfsd/nfscache.c
436
if (entries > nn->longest_chain) {
fs/nfsd/nfscache.c
437
nn->longest_chain = entries;
fs/nfsd/nfscache.c
439
} else if (entries == nn->longest_chain) {
fs/ocfs2/xattr.c
4416
char *entries, *buf, *bucket_buf = NULL;
fs/ocfs2/xattr.c
4446
entries = (char *)xh->xh_entries;
fs/ocfs2/xattr.c
4458
sort(entries, le16_to_cpu(xh->xh_count),
fs/ocfs2/xattr.c
4500
sort(entries, le16_to_cpu(xh->xh_count),
fs/ocfs2/xattr.c
4609
struct ocfs2_xattr_entry *entries = xh->xh_entries;
fs/ocfs2/xattr.c
4621
if (cmp_xe(&entries[middle - delta - 1],
fs/ocfs2/xattr.c
4622
&entries[middle - delta]))
fs/ocfs2/xattr.c
4630
if (cmp_xe(&entries[middle + delta],
fs/ocfs2/xattr.c
4631
&entries[middle + delta + 1]))
fs/overlayfs/readdir.c
290
ovl_cache_free(&cache->entries);
fs/overlayfs/readdir.c
305
ovl_cache_free(&cache->entries);
fs/overlayfs/readdir.c
39
struct list_head entries;
fs/overlayfs/readdir.c
473
list_for_each(p, &od->cache->entries) {
fs/overlayfs/readdir.c
501
INIT_LIST_HEAD(&cache->entries);
fs/overlayfs/readdir.c
504
res = ovl_dir_read_merged(dentry, &cache->entries, &cache->root);
fs/overlayfs/readdir.c
506
ovl_cache_free(&cache->entries);
fs/overlayfs/readdir.c
713
res = ovl_dir_read_impure(path, &cache->entries, &cache->root);
fs/overlayfs/readdir.c
715
ovl_cache_free(&cache->entries);
fs/overlayfs/readdir.c
719
if (list_empty(&cache->entries)) {
fs/overlayfs/readdir.c
855
while (od->cursor != &od->cache->entries) {
fs/proc/base.c
193
static unsigned int __init pid_entry_nlink(const struct pid_entry *entries,
fs/proc/base.c
201
if (S_ISDIR(entries[i].mode))
fs/proc/base.c
466
unsigned long *entries;
fs/proc/base.c
483
entries = kmalloc_array(MAX_STACK_TRACE_DEPTH, sizeof(*entries),
fs/proc/base.c
485
if (!entries)
fs/proc/base.c
492
nr_entries = stack_trace_save_tsk(task, entries,
fs/proc/base.c
496
seq_printf(m, "[<0>] %pB\n", (void *)entries[i]);
fs/proc/base.c
501
kfree(entries);
fs/select.c
1011
struct pollfd *fds = walk->entries;
fs/select.c
150
} while (entry > p->entries);
fs/select.c
173
new_table->entry = new_table->entries;
fs/select.c
837
struct pollfd entries[] __counted_by(len);
fs/select.c
898
pfd = walk->entries;
fs/select.c
98
struct poll_table_entry entries[];
fs/select.c
987
if (copy_from_user(walk->entries, ufds + nfds-todo,
fs/select.c
996
walk = walk->next = kmalloc_flex(*walk, entries, len);
fs/smb/client/cached_dir.c
233
list_for_each_entry(parent_cfid, &cfids->entries, entry) {
fs/smb/client/cached_dir.c
33
list_for_each_entry(cfid, &cfids->entries, entry) {
fs/smb/client/cached_dir.c
428
list_for_each_entry(cfid, &cfids->entries, entry) {
fs/smb/client/cached_dir.c
558
list_for_each_entry(cfid, &cfids->entries, entry) {
fs/smb/client/cached_dir.c
58
list_add(&cfid->entry, &cfids->entries);
fs/smb/client/cached_dir.c
610
list_for_each_entry_safe(cfid, q, &cfids->entries, entry) {
fs/smb/client/cached_dir.c
670
list_for_each_entry(cfid, &cfids->entries, entry) {
fs/smb/client/cached_dir.c
713
INIT_LIST_HEAD(&cfid->dirents.entries);
fs/smb/client/cached_dir.c
732
list_for_each_entry_safe(dirent, q, &cfid->dirents.entries, entry) {
fs/smb/client/cached_dir.c
770
list_for_each_entry_safe(cfid, q, &cfids->entries, entry) {
fs/smb/client/cached_dir.c
820
INIT_LIST_HEAD(&cfids->entries);
fs/smb/client/cached_dir.c
848
list_for_each_entry_safe(cfid, q, &cfids->entries, entry) {
fs/smb/client/cached_dir.h
29
struct list_head entries;
fs/smb/client/cached_dir.h
65
struct list_head entries;
fs/smb/client/cifs_debug.c
329
list_for_each_entry(cfid, &cfids->entries, entry) {
fs/smb/client/cifs_debug.c
343
if (!list_empty(&cfid->dirents.entries))
fs/smb/client/dir.c
324
list_for_each_entry(parent_cfid, &tcon->cfids->entries, entry) {
fs/smb/client/readdir.c
824
list_for_each_entry(dirent, &cde->entries, entry) {
fs/smb/client/readdir.c
909
list_add_tail(&de->entry, &cde->entries);
fs/smb/server/ndr.c
315
ret = ndr_write_int16(n, acl->entries[i].type);
fs/smb/server/ndr.c
319
ret = ndr_write_int16(n, acl->entries[i].type);
fs/smb/server/ndr.c
323
if (acl->entries[i].type == SMB_ACL_USER) {
fs/smb/server/ndr.c
325
ret = ndr_write_int64(n, acl->entries[i].uid);
fs/smb/server/ndr.c
326
} else if (acl->entries[i].type == SMB_ACL_GROUP) {
fs/smb/server/ndr.c
328
ret = ndr_write_int64(n, acl->entries[i].gid);
fs/smb/server/ndr.c
334
ret = ndr_write_int32(n, acl->entries[i].perm);
fs/smb/server/vfs.c
1360
xa_entry = smb_acl->entries;
fs/smb/server/xattr.h
79
struct xattr_acl_entry entries[] __counted_by(count);
fs/squashfs/cache.c
204
for (i = 0; i < cache->entries; i++) {
fs/squashfs/cache.c
223
struct squashfs_cache *squashfs_cache_init(char *name, int entries,
fs/squashfs/cache.c
229
if (entries == 0)
fs/squashfs/cache.c
238
cache->entry = kzalloc_objs(*(cache->entry), entries);
fs/squashfs/cache.c
246
cache->unused = entries;
fs/squashfs/cache.c
247
cache->entries = entries;
fs/squashfs/cache.c
256
for (i = 0; i < entries; i++) {
fs/squashfs/cache.c
61
for (i = cache->curr_blk, n = 0; n < cache->entries; n++) {
fs/squashfs/cache.c
66
i = (i + 1) % cache->entries;
fs/squashfs/cache.c
69
if (n == cache->entries) {
fs/squashfs/cache.c
89
for (n = 0; n < cache->entries; n++) {
fs/squashfs/cache.c
92
i = (i + 1) % cache->entries;
fs/squashfs/cache.c
95
cache->next_blk = (i + 1) % cache->entries;
fs/squashfs/file.c
140
meta->entries = 0;
fs/squashfs/file.c
255
offset = index < meta->offset + meta->entries ? index :
fs/squashfs/file.c
256
meta->offset + meta->entries - 1;
fs/squashfs/file.c
264
meta->entries);
fs/squashfs/file.c
275
for (i = meta->offset + meta->entries; i <= index &&
fs/squashfs/file.c
282
if (meta->entries == 0)
fs/squashfs/file.c
298
meta->entries++;
fs/squashfs/file.c
303
meta->offset, meta->entries);
fs/squashfs/squashfs_fs.h
223
unsigned short entries;
fs/squashfs/squashfs_fs_sb.h
17
int entries;
fs/tracefs/event_inode.c
110
entry = &ei->entries[i];
fs/tracefs/event_inode.c
221
entry = &ei->entries[i];
fs/tracefs/event_inode.c
557
const struct eventfs_entry *entry = &ei->entries[i];
fs/tracefs/event_inode.c
622
entry = &ei->entries[i];
fs/tracefs/event_inode.c
713
const struct eventfs_entry *entries,
fs/tracefs/event_inode.c
725
ei->entries = entries;
fs/tracefs/event_inode.c
757
const struct eventfs_entry *entries,
fs/tracefs/event_inode.c
787
ei->entries = entries;
fs/tracefs/internal.h
54
const struct eventfs_entry *entries;
fs/vboxsf/dir.c
109
if (ctx->pos >= cur + b->entries) {
fs/vboxsf/dir.c
110
cur += b->entries;
fs/vboxsf/utils.c
491
b->entries = 0;
fs/vboxsf/utils.c
536
u32 entries, size;
fs/vboxsf/utils.c
552
&size, buf, &entries);
fs/vboxsf/utils.c
556
b->entries += entries;
fs/vboxsf/vfsmod.h
69
size_t entries;
fs/xfs/libxfs/xfs_attr_leaf.c
1302
struct xfs_attr_leaf_entry *entries;
fs/xfs/libxfs/xfs_attr_leaf.c
1347
entries = xfs_attr3_leaf_entryp(leaf);
fs/xfs/libxfs/xfs_attr_leaf.c
1350
icnodehdr.btree[0].hashval = entries[icleafhdr.count - 1].hashval;
fs/xfs/libxfs/xfs_attr_leaf.c
2551
struct xfs_attr_leaf_entry *entries;
fs/xfs/libxfs/xfs_attr_leaf.c
2562
entries = xfs_attr3_leaf_entryp(leaf);
fs/xfs/libxfs/xfs_attr_leaf.c
2574
for (entry = &entries[probe]; span > 4; entry = &entries[probe]) {
fs/xfs/libxfs/xfs_attr_leaf.c
2852
struct xfs_attr_leaf_entry *entries;
fs/xfs/libxfs/xfs_attr_leaf.c
2856
entries = xfs_attr3_leaf_entryp(bp->b_addr);
fs/xfs/libxfs/xfs_attr_leaf.c
2861
return be32_to_cpu(entries[ichdr.count - 1].hashval);
fs/xfs/libxfs/xfs_attr_leaf.c
2871
struct xfs_attr_leaf_entry *entries;
fs/xfs/libxfs/xfs_attr_leaf.c
2876
entries = xfs_attr3_leaf_entryp(leaf);
fs/xfs/libxfs/xfs_attr_leaf.c
2877
if (entries[index].flags & XFS_ATTR_LOCAL) {
fs/xfs/libxfs/xfs_attr_leaf.c
374
struct xfs_attr_leaf_entry *entries;
fs/xfs/libxfs/xfs_attr_leaf.c
398
entries = xfs_attr3_leaf_entryp(bp->b_addr);
fs/xfs/libxfs/xfs_attr_leaf.c
399
if ((char *)&entries[ichdr.count] >
fs/xfs/libxfs/xfs_attr_leaf.c
411
for (i = 0, ent = entries; i < ichdr.count; ent++, i++) {
fs/xfs/libxfs/xfs_da_format.h
661
xfs_attr_leaf_entry_t entries[]; /* sorted on key, not name */
fs/xfs/libxfs/xfs_da_format.h
693
struct xfs_attr_leaf_entry entries[];
fs/xfs/libxfs/xfs_da_format.h
760
return &((struct xfs_attr3_leafblock *)leafp)->entries[0];
fs/xfs/libxfs/xfs_da_format.h
761
return &leafp->entries[0];
fs/xfs/libxfs/xfs_da_format.h
770
struct xfs_attr_leaf_entry *entries = xfs_attr3_leaf_entryp(leafp);
fs/xfs/libxfs/xfs_da_format.h
772
return &((char *)leafp)[be16_to_cpu(entries[idx].nameidx)];
fs/xfs/libxfs/xfs_dir2_leaf.c
1715
int entries;
fs/xfs/libxfs/xfs_dir2_leaf.c
1718
entries = hdr->count - hdr->stale;
fs/xfs/libxfs/xfs_dir2_leaf.c
1725
return hdrsize + entries * sizeof(xfs_dir2_leaf_entry_t)
fs/xfs/scrub/agheader.c
697
xfs_agblock_t *entries;
fs/xfs/scrub/agheader.c
731
sai->entries[sai->nr_entries++] = agbno;
fs/xfs/scrub/agheader.c
819
sai.entries = kvzalloc_objs(xfs_agblock_t, sai.agflcount,
fs/xfs/scrub/agheader.c
821
if (!sai.entries) {
fs/xfs/scrub/agheader.c
842
sort(sai.entries, sai.nr_entries, sizeof(sai.entries[0]),
fs/xfs/scrub/agheader.c
845
if (sai.entries[i] == sai.entries[i - 1]) {
fs/xfs/scrub/agheader.c
852
kvfree(sai.entries);
fs/xfs/scrub/attr.c
369
struct xfs_attr_leaf_entry *entries;
fs/xfs/scrub/attr.c
426
entries = xfs_attr3_leaf_entryp(leaf);
fs/xfs/scrub/attr.c
427
if ((char *)&entries[leafhdr.count] > (char *)leaf + leafhdr.firstused)
fs/xfs/scrub/attr.c
434
for (i = 0, ent = entries; i < leafhdr.count; ent++, i++) {
fs/xfs/scrub/attr_repair.c
399
struct xfs_attr_leaf_entry *entries;
fs/xfs/scrub/attr_repair.c
415
entries = xfs_attr3_leaf_entryp(leaf);
fs/xfs/scrub/attr_repair.c
418
for (i = 0, ent = entries; i < leafhdr.count; ent++, i++) {
fs/xfs/xfs_attr_list.c
335
struct xfs_attr_leaf_entry *entries;
fs/xfs/xfs_attr_list.c
372
entries = xfs_attr3_leaf_entryp(leaf);
fs/xfs/xfs_attr_list.c
374
entries[leafhdr.count - 1].hashval)) {
fs/xfs/xfs_attr_list.c
379
entries[0].hashval)) {
fs/xfs/xfs_attr_list.c
440
struct xfs_attr_leaf_entry *entries;
fs/xfs/xfs_attr_list.c
449
entries = xfs_attr3_leaf_entryp(leaf);
fs/xfs/xfs_attr_list.c
457
entry = &entries[0];
fs/xfs/xfs_attr_list.c
476
entry = &entries[0];
fs/xfs/xfs_zone_info.c
50
unsigned int entries = zi->zi_used_bucket_entries[i];
fs/xfs/xfs_zone_info.c
55
entries);
fs/xfs/xfs_zone_info.c
56
reclaimable += entries;
include/acpi/actbl1.h
1287
u32 entries;
include/acpi/actbl1.h
1400
u32 entries;
include/acpi/actbl3.h
626
u32 entries; /* Number of watchdog entries that follow */
include/cxl/features.h
61
} *entries;
include/drm/display/drm_dp_mst_helper.h
45
} *entries;
include/drm/drm_debugfs_crc.h
63
struct drm_crtc_crc_entry *entries;
include/linux/binfmts.h
101
struct list_head entries;
include/linux/bnge/hsi.h
11682
__le32 entries;
include/linux/bnxt/hsi.h
10581
__le32 entries;
include/linux/bpf_verifier.h
708
} entries[BPF_ID_MAP_SIZE];
include/linux/can/can-ml.h
65
int entries;
include/linux/dqblk_qtree.h
50
unsigned long long entries = epb;
include/linux/dqblk_qtree.h
53
for (i = 1; entries < (1ULL << 32); i++)
include/linux/dqblk_qtree.h
54
entries *= epb;
include/linux/f2fs_fs.h
383
struct f2fs_nat_entry entries[NAT_ENTRY_PER_BLOCK];
include/linux/f2fs_fs.h
424
struct f2fs_sit_entry entries[SIT_ENTRY_PER_BLOCK];
include/linux/f2fs_fs.h
491
struct nat_journal_entry entries[0];
include/linux/f2fs_fs.h
508
struct sit_journal_entry entries[0];
include/linux/f2fs_fs.h
553
struct f2fs_summary entries[0];
include/linux/io_uring_types.h
264
void **entries;
include/linux/jump_label.h
104
struct jump_entry *entries;
include/linux/kvm_host.h
1784
struct kvm_kernel_irq_routing_entry *entries, int gsi);
include/linux/kvm_host.h
2189
const struct kvm_irq_routing_entry *entries,
include/linux/memcontrol.h
137
struct mem_cgroup_threshold entries[] __counted_by(size);
include/linux/mhi.h
98
u32 entries;
include/linux/mlx4/cq.h
185
int entries, struct mlx4_mtt *mtt);
include/linux/netfilter/x_tables.h
257
unsigned char entries[] __aligned(8);
include/linux/netfilter_bridge/ebtables.h
87
char *entries;
include/linux/nvme.h
1273
__le64 entries[32];
include/linux/nvme.h
1691
struct nvmf_disc_rsp_page_entry entries[];
include/linux/nvme.h
828
struct nvme_zone_descriptor entries[];
include/linux/nvmem-consumer.h
224
nvmem_add_cell_lookups(struct nvmem_cell_lookup *entries, size_t nentries) {}
include/linux/nvmem-consumer.h
226
nvmem_del_cell_lookups(struct nvmem_cell_lookup *entries, size_t nentries) {}
include/linux/nvmem-consumer.h
86
void nvmem_add_cell_lookups(struct nvmem_cell_lookup *entries,
include/linux/nvmem-consumer.h
88
void nvmem_del_cell_lookups(struct nvmem_cell_lookup *entries,
include/linux/objpool.h
153
obj = READ_ONCE(slot->entries[head & slot->mask]);
include/linux/objpool.h
206
WRITE_ONCE(slot->entries[tail & slot->mask], obj);
include/linux/objpool.h
58
void *entries[];
include/linux/of.h
1623
struct list_head entries;
include/linux/of_fdt.h
61
int *entries);
include/linux/pci.h
1751
int pci_enable_msix_range(struct pci_dev *dev, struct msix_entry *entries,
include/linux/pci.h
1754
struct msix_entry *entries, int nvec)
include/linux/pci.h
1756
int rc = pci_enable_msix_range(dev, entries, nvec, nvec);
include/linux/pci.h
1786
struct msix_entry *entries, int minvec, int maxvec)
include/linux/pci.h
1789
struct msix_entry *entries, int nvec)
include/linux/pds/pds_adminq.h
1366
u8 entries[] __counted_by_le(num_entries);
include/linux/perf_event.h
118
struct perf_branch_entry entries[];
include/linux/perf_event.h
2129
typedef int (perf_snapshot_branch_stack_t)(struct perf_branch_entry *entries,
include/linux/rio.h
411
int mbox, int entries);
include/linux/rio.h
414
int mbox, int entries);
include/linux/stackdepot.h
157
depot_stack_handle_t stack_depot_save_flags(unsigned long *entries,
include/linux/stackdepot.h
177
depot_stack_handle_t stack_depot_save(unsigned long *entries,
include/linux/stackdepot.h
200
unsigned long **entries);
include/linux/stackdepot.h
60
unsigned long entries[CONFIG_STACKDEPOT_MAX_FRAMES]; /* Frames */
include/linux/stacktrace.h
68
int stack_trace_snprint(char *buf, size_t size, const unsigned long *entries,
include/linux/stacktrace.h
78
unsigned int filter_irq_stacks(unsigned long *entries, unsigned int nr_entries);
include/linux/stacktrace.h
84
unsigned long *entries;
include/linux/sunrpc/cache.h
112
int entries;
include/linux/syscalls.h
334
asmlinkage long sys_io_uring_setup(u32 entries,
include/linux/tracefs.h
83
const struct eventfs_entry *entries,
include/linux/tracefs.h
87
const struct eventfs_entry *entries,
include/linux/unwind_deferred_types.h
11
unsigned long entries[];
include/linux/unwind_user_types.h
25
unsigned long *entries;
include/linux/usb.h
1967
int entries;
include/net/flow_offload.h
327
struct action_gate_entry *entries;
include/net/flow_offload.h
338
struct flow_action_entry entries[] __counted_by(num_entries);
include/net/flow_offload.h
360
return entry == &action->entries[action->num_entries - 1];
include/net/flow_offload.h
364
for (__i = 0, __act = &(__actions)->entries[0]; \
include/net/flow_offload.h
366
__act = &(__actions)->entries[++__i])
include/net/flow_offload.h
393
return &action->entries[0];
include/net/neighbour.h
237
atomic_t entries;
include/net/pkt_sched.h
238
struct tc_taprio_sched_entry entries[];
include/net/tc_act/tc_gate.h
130
list_for_each_entry(entry, &p->entries, list)
include/net/tc_act/tc_gate.h
141
list_for_each_entry(entry, &p->entries, list) {
include/net/tc_act/tc_gate.h
34
struct list_head entries;
include/rdma/ib_cache.h
98
struct ib_uverbs_gid_entry *entries,
include/rdma/rdma_vt.h
184
unsigned long *entries;
include/trace/events/neigh.h
34
__field(int, entries)
include/trace/events/neigh.h
46
__entry->entries = atomic_read(&tbl->gc_entries);
include/trace/events/neigh.h
67
__entry->family, __get_str(dev), __entry->entries,
include/trace/events/xen.h
443
TP_PROTO(const void *addr, unsigned entries),
include/trace/events/xen.h
444
TP_ARGS(addr, entries),
include/trace/events/xen.h
447
__field(unsigned, entries)
include/trace/events/xen.h
450
__entry->entries = entries),
include/trace/events/xen.h
452
__entry->addr, __entry->entries)
include/uapi/drm/amdgpu_drm.h
1575
struct drm_amdgpu_info_vce_clock_table_entry entries[AMDGPU_VCE_CLOCK_TABLE_ENTRIES];
include/uapi/drm/amdgpu_drm.h
839
__u64 entries;
include/uapi/linux/kd.h
68
struct unipair __user *entries;
include/uapi/linux/kvm.h
1054
struct kvm_irq_routing_entry entries[];
include/uapi/linux/mshv.h
182
struct mshv_user_irq_entry entries[];
include/uapi/linux/net_dropmon.h
24
__u32 entries;
include/uapi/linux/net_dropmon.h
29
__u32 entries;
include/uapi/linux/netfilter/x_tables.h
148
#define XT_ENTRY_ITERATE_CONTINUE(type, entries, size, n, fn, args...) \
include/uapi/linux/netfilter/x_tables.h
156
__entry = (void *)(entries) + __i; \
include/uapi/linux/netfilter/x_tables.h
168
#define XT_ENTRY_ITERATE(type, entries, size, fn, args...) \
include/uapi/linux/netfilter/x_tables.h
169
XT_ENTRY_ITERATE_CONTINUE(type, entries, size, 0, fn, args)
include/uapi/linux/netfilter_arp/arp_tables.h
184
struct arpt_entry entries[];
include/uapi/linux/netfilter_arp/arp_tables.h
32
#define ARPT_ENTRY_ITERATE(entries, size, fn, args...) \
include/uapi/linux/netfilter_arp/arp_tables.h
33
XT_ENTRY_ITERATE(struct arpt_entry, entries, size, fn, ## args)
include/uapi/linux/netfilter_bridge/ebtables.h
264
#define EBT_ENTRY_ITERATE(entries, size, fn, args...) \
include/uapi/linux/netfilter_bridge/ebtables.h
271
__entry = (void *)(entries) + __i; \
include/uapi/linux/netfilter_bridge/ebtables.h
57
char __user *entries;
include/uapi/linux/netfilter_bridge/ebtables.h
73
char *entries;
include/uapi/linux/netfilter_ipv4/ip_tables.h
206
struct ipt_entry entries[];
include/uapi/linux/netfilter_ipv4/ip_tables.h
66
#define IPT_ENTRY_ITERATE(entries, size, fn, args...) \
include/uapi/linux/netfilter_ipv4/ip_tables.h
67
XT_ENTRY_ITERATE(struct ipt_entry, entries, size, fn, ## args)
include/uapi/linux/netfilter_ipv6/ip6_tables.h
246
struct ip6t_entry entries[];
include/uapi/linux/netfilter_ipv6/ip6_tables.h
59
#define IP6T_ENTRY_ITERATE(entries, size, fn, args...) \
include/uapi/linux/netfilter_ipv6/ip6_tables.h
60
XT_ENTRY_ITERATE(struct ip6t_entry, entries, size, fn, ## args)
include/uapi/linux/raid/md_p.h
427
struct ppl_header_entry entries[PPL_HDR_MAX_ENTRIES];
include/uapi/linux/trace_mmap.h
38
__u64 entries;
include/uapi/linux/videodev2.h
2223
__u32 entries;
include/uapi/linux/virtio_net.h
294
__virtio32 entries;
include/uapi/linux/virtio_pci.h
313
struct virtio_admin_cmd_notify_info_data entries[VIRTIO_ADMIN_CMD_MAX_NOTIFY_INFO];
include/uapi/linux/xdp_diag.h
57
__u32 entries; /*num descs */
include/uapi/rdma/mana-abi.h
87
struct rss_resp_entry entries[64];
io_uring/alloc_cache.c
10
if (!cache->entries)
io_uring/alloc_cache.c
16
kvfree(cache->entries);
io_uring/alloc_cache.c
17
cache->entries = NULL;
io_uring/alloc_cache.c
25
cache->entries = kvmalloc_array(max_nr, sizeof(void *), GFP_KERNEL);
io_uring/alloc_cache.c
26
if (!cache->entries)
io_uring/alloc_cache.h
27
cache->entries[cache->nr_cached++] = entry;
io_uring/alloc_cache.h
36
void *entry = cache->entries[--cache->nr_cached];
io_uring/io_uring.c
2011
unsigned int entries;
io_uring/io_uring.c
2016
entries = ctx->sq_entries;
io_uring/io_uring.c
2018
entries = __io_sqring_entries(ctx);
io_uring/io_uring.c
2020
entries = min(nr, entries);
io_uring/io_uring.c
2021
if (unlikely(!entries))
io_uring/io_uring.c
2024
ret = left = entries;
io_uring/io_uring.c
2822
unsigned entries = p->sq_entries;
io_uring/io_uring.c
2824
if (!entries)
io_uring/io_uring.c
2826
if (entries > IORING_MAX_ENTRIES) {
io_uring/io_uring.c
2829
entries = IORING_MAX_ENTRIES;
io_uring/io_uring.c
2840
p->sq_entries = roundup_pow_of_two(entries);
io_uring/io_uring.c
3065
static long io_uring_setup(u32 entries, struct io_uring_params __user *params)
io_uring/io_uring.c
3077
config.p.sq_entries = entries;
io_uring/io_uring.c
3104
SYSCALL_DEFINE2(io_uring_setup, u32, entries,
io_uring/io_uring.c
3113
return io_uring_setup(entries, params);
io_uring/io_uring.h
469
unsigned int entries;
io_uring/io_uring.h
472
entries = smp_load_acquire(&rings->sq.tail) - ctx->cached_sq_head;
io_uring/io_uring.h
473
return min(entries, ctx->sq_entries);
io_uring/zcrx.c
912
u32 entries;
io_uring/zcrx.c
914
entries = smp_load_acquire(&ifq->rq_ring->tail) - ifq->cached_rq_head;
io_uring/zcrx.c
915
return min(entries, ifq->rq_entries);
io_uring/zcrx.c
953
unsigned int entries;
io_uring/zcrx.c
957
entries = io_zcrx_rqring_entries(ifq);
io_uring/zcrx.c
958
entries = min_t(unsigned, entries, PP_ALLOC_CACHE_REFILL);
io_uring/zcrx.c
959
if (unlikely(!entries))
io_uring/zcrx.c
983
} while (--entries);
kernel/backtracetest.c
43
unsigned long entries[8];
kernel/backtracetest.c
49
nr_entries = stack_trace_save(entries, ARRAY_SIZE(entries), 0);
kernel/backtracetest.c
50
stack_trace_print(entries, nr_entries, 0);
kernel/bpf/arraymap.c
785
u64 entries = map->max_entries;
kernel/bpf/arraymap.c
789
usage += entries * sizeof(void *);
kernel/bpf/arraymap.c
790
usage += entries * elem_size * num_possible_cpus();
kernel/bpf/arraymap.c
794
usage += PAGE_ALIGN(entries * elem_size);
kernel/bpf/arraymap.c
796
usage += entries * elem_size;
kernel/bpf/devmap.c
91
static struct hlist_head *dev_map_create_hash(unsigned int entries,
kernel/bpf/devmap.c
97
hash = bpf_map_area_alloc((u64) entries * sizeof(*hash), numa_node);
kernel/bpf/devmap.c
99
for (i = 0; i < entries; i++)
kernel/bpf/verifier.c
17433
e = &linked_regs->entries[i];
kernel/bpf/verifier.c
19718
if (idset->entries[i].id == id) {
kernel/bpf/verifier.c
19719
idset->entries[i].cnt++;
kernel/bpf/verifier.c
19725
idset->entries[idset->num_ids].id = id;
kernel/bpf/verifier.c
19726
idset->entries[idset->num_ids].cnt = 1;
kernel/bpf/verifier.c
19737
if (idset->entries[i].id == id)
kernel/bpf/verifier.c
19738
return idset->entries[i].cnt;
kernel/bpf/verifier.c
4029
struct linked_reg entries[LINKED_REGS_MAX];
kernel/bpf/verifier.c
4035
return &s->entries[s->cnt++];
kernel/bpf/verifier.c
4053
struct linked_reg *e = &s->entries[i];
kernel/bpf/verifier.c
4076
struct linked_reg *e = &s->entries[i];
kernel/bpf/verifier.c
4359
struct linked_reg *e = &linked_regs.entries[i];
kernel/bpf/verifier.c
4372
struct linked_reg *e = &linked_regs.entries[i];
kernel/events/callchain.c
104
kfree(entries->cpu_entries[cpu]);
kernel/events/callchain.c
105
kfree(entries);
kernel/events/callchain.c
157
struct callchain_cpus_entries *entries;
kernel/events/callchain.c
163
entries = rcu_dereference(callchain_cpus_entries);
kernel/events/callchain.c
164
if (!entries) {
kernel/events/callchain.c
171
return (((void *)entries->cpu_entries[cpu]) +
kernel/events/callchain.c
52
struct callchain_cpus_entries *entries;
kernel/events/callchain.c
55
entries = container_of(head, struct callchain_cpus_entries, rcu_head);
kernel/events/callchain.c
58
kfree(entries->cpu_entries[cpu]);
kernel/events/callchain.c
60
kfree(entries);
kernel/events/callchain.c
65
struct callchain_cpus_entries *entries;
kernel/events/callchain.c
67
entries = callchain_cpus_entries;
kernel/events/callchain.c
69
call_rcu(&entries->rcu_head, release_callchain_buffers_rcu);
kernel/events/callchain.c
76
struct callchain_cpus_entries *entries;
kernel/events/callchain.c
85
entries = kzalloc(size, GFP_KERNEL);
kernel/events/callchain.c
86
if (!entries)
kernel/events/callchain.c
92
entries->cpu_entries[cpu] = kmalloc_node(size, GFP_KERNEL,
kernel/events/callchain.c
94
if (!entries->cpu_entries[cpu])
kernel/events/callchain.c
98
rcu_assign_pointer(callchain_cpus_entries, entries);
kernel/events/core.c
10344
u64 entry = deferred_event->trace->entries[i];
kernel/events/core.c
8272
perf_output_copy(handle, data->br_stack->entries, size);
kernel/jump_label.c
445
struct jump_entry *entries)
kernel/jump_label.c
449
WARN_ON_ONCE((unsigned long)entries & JUMP_TYPE_MASK);
kernel/jump_label.c
451
key->entries = entries;
kernel/jump_label.c
624
struct jump_entry *entries;
kernel/jump_label.c
686
if (!mod->entries)
kernel/jump_label.c
694
__jump_label_update(key, mod->entries, stop,
kernel/jump_label.c
751
jlm2->entries = static_key_entries(key);
kernel/jump_label.c
757
jlm->entries = iter;
kernel/jump_label.c
818
static_key_set_entries(key, jlm->entries);
kernel/livepatch/transition.c
205
static int klp_check_stack_func(struct klp_func *func, unsigned long *entries,
kernel/livepatch/transition.c
241
address = entries[i];
kernel/livepatch/transition.c
256
unsigned long *entries = this_cpu_ptr(klp_stack_entries);
kernel/livepatch/transition.c
264
ret = stack_trace_save_tsk_reliable(task, entries, MAX_STACK_ENTRIES);
kernel/livepatch/transition.c
273
ret = klp_check_stack_func(func, entries, nr_entries);
kernel/locking/lockdep.c
1868
stack_trace_print(trace->entries, trace->nr_entries, spaces);
kernel/locking/lockdep.c
545
unsigned long entries[] __aligned(sizeof(unsigned long));
kernel/locking/lockdep.c
558
memcmp(t1->entries, t2->entries,
kernel/locking/lockdep.c
559
t1->nr_entries * sizeof(t1->entries[0])) == 0;
kernel/locking/lockdep.c
587
trace->nr_entries = stack_trace_save(trace->entries, max_entries, 3);
kernel/locking/lockdep.c
589
hash = jhash(trace->entries, trace->nr_entries *
kernel/locking/lockdep.c
590
sizeof(trace->entries[0]), 0);
kernel/power/swap.c
1058
offset = handle->cur->entries[handle->k];
kernel/power/swap.c
1349
handle->cur->entries[handle->k]) {
kernel/power/swap.c
437
handle->cur->entries[handle->k++] = offset;
kernel/power/swap.c
81
sector_t entries[MAP_PAGE_ENTRIES];
kernel/printk/index.c
22
struct pi_entry **entries;
kernel/printk/index.c
27
entries = mod->printk_index_start;
kernel/printk/index.c
33
entries = __start_printk_index;
kernel/printk/index.c
40
return entries[pos];
kernel/stacktrace.c
24
void stack_trace_print(const unsigned long *entries, unsigned int nr_entries,
kernel/stacktrace.c
272
.entries = store,
kernel/stacktrace.c
29
if (WARN_ON(!entries))
kernel/stacktrace.c
296
.entries = store,
kernel/stacktrace.c
320
.entries = store,
kernel/stacktrace.c
33
printk("%*c%pS\n", 1 + spaces, ' ', (void *)entries[i]);
kernel/stacktrace.c
346
.entries = store,
kernel/stacktrace.c
366
.entries = store,
kernel/stacktrace.c
392
unsigned int filter_irq_stacks(unsigned long *entries, unsigned int nr_entries)
kernel/stacktrace.c
397
if (in_irqentry_text(entries[i])) {
kernel/stacktrace.c
47
int stack_trace_snprint(char *buf, size_t size, const unsigned long *entries,
kernel/stacktrace.c
52
if (WARN_ON(!entries))
kernel/stacktrace.c
57
(void *)entries[i]);
kernel/trace/bpf_trace.c
1527
memcpy(buf, br_stack->entries, to_copy);
kernel/trace/ring_buffer.c
1474
old_entries = local_add_return(RB_WRITE_INTCNT, &next_page->entries);
kernel/trace/ring_buffer.c
1503
(void)local_cmpxchg(&next_page->entries, old_entries, eval);
kernel/trace/ring_buffer.c
1918
unsigned long entries = 0;
kernel/trace/ring_buffer.c
1934
entries += ret;
kernel/trace/ring_buffer.c
1936
local_set(&cpu_buffer->reader_page->entries, ret);
kernel/trace/ring_buffer.c
1969
local_set(&head_page->entries, ret);
kernel/trace/ring_buffer.c
1972
entries += ret;
kernel/trace/ring_buffer.c
2054
entries += ret;
kernel/trace/ring_buffer.c
2056
local_set(&head_page->entries, ret);
kernel/trace/ring_buffer.c
2068
local_set(&cpu_buffer->entries, entries);
kernel/trace/ring_buffer.c
2080
local_set(&cpu_buffer->reader_page->entries, 0);
kernel/trace/ring_buffer.c
2085
local_set(&head_page->entries, 0);
kernel/trace/ring_buffer.c
2712
return local_read(&bpage->entries) & RB_WRITE_MASK;
kernel/trace/ring_buffer.c
3340
int entries;
kernel/trace/ring_buffer.c
3344
entries = rb_page_entries(next_page);
kernel/trace/ring_buffer.c
3372
local_add(entries, &cpu_buffer->overrun);
kernel/trace/ring_buffer.c
364
local_t entries; /* entries on this page */
kernel/trace/ring_buffer.c
4018
local_inc(&cpu_buffer->entries);
kernel/trace/ring_buffer.c
4590
local_inc(&tail_page->entries);
kernel/trace/ring_buffer.c
4762
local_dec(&bpage->entries);
kernel/trace/ring_buffer.c
4774
local_dec(&bpage->entries);
kernel/trace/ring_buffer.c
4906
return local_read(&cpu_buffer->entries) -
kernel/trace/ring_buffer.c
5241
unsigned long entries = 0;
kernel/trace/ring_buffer.c
5247
entries += rb_num_of_entries(cpu_buffer);
kernel/trace/ring_buffer.c
5250
return entries;
kernel/trace/ring_buffer.c
535
local_t entries;
kernel/trace/ring_buffer.c
5479
local_set(&cpu_buffer->reader_page->entries, 0);
kernel/trace/ring_buffer.c
6103
local_set(&page->entries, 0);
kernel/trace/ring_buffer.c
6154
meta->entries = local_read(&cpu_buffer->entries);
kernel/trace/ring_buffer.c
6187
local_set(&cpu_buffer->entries, 0);
kernel/trace/ring_buffer.c
6751
local_set(&reader->entries, 0);
kernel/trace/ring_buffer_benchmark.c
236
unsigned long long entries;
kernel/trace/ring_buffer_benchmark.c
301
entries = ring_buffer_entries(buffer);
kernel/trace/ring_buffer_benchmark.c
334
trace_printk("Entries: %lld\n", entries);
kernel/trace/ring_buffer_benchmark.c
335
trace_printk("Total: %lld\n", entries + overruns + read);
kernel/trace/trace.c
3192
unsigned long entries = 0;
kernel/trace/trace.c
3211
entries++;
kernel/trace/trace.c
3217
per_cpu_ptr(iter->array_buffer->data, cpu)->skipped_entries = entries;
kernel/trace/trace.c
3294
unsigned long *entries, int cpu)
kernel/trace/trace.c
3311
*entries = count;
kernel/trace/trace.c
3316
unsigned long *total, unsigned long *entries)
kernel/trace/trace.c
3322
*entries = 0;
kernel/trace/trace.c
3327
*entries += e;
kernel/trace/trace.c
3333
unsigned long total, entries;
kernel/trace/trace.c
3338
get_total_entries_cpu(&tr->array_buffer, &total, &entries, cpu);
kernel/trace/trace.c
3340
return entries;
kernel/trace/trace.c
3345
unsigned long total, entries;
kernel/trace/trace.c
3350
get_total_entries(&tr->array_buffer, &total, &entries);
kernel/trace/trace.c
3352
return entries;
kernel/trace/trace.c
3371
unsigned long entries;
kernel/trace/trace.c
3373
get_total_entries(buf, &total, &entries);
kernel/trace/trace.c
3375
entries, total, num_online_cpus());
kernel/trace/trace.c
3416
unsigned long entries;
kernel/trace/trace.c
3420
get_total_entries(buf, &total, &entries);
kernel/trace/trace.c
3429
entries,
kernel/trace/trace.c
5230
per_cpu_ptr(buf->data, cpu)->entries = val;
kernel/trace/trace.c
5238
per_cpu_ptr(buf->data, cpu)->entries = ring_buffer_size(buf->buffer, cpu);
kernel/trace/trace.c
5252
per_cpu_ptr(size_buf->data, cpu)->entries, cpu);
kernel/trace/trace.c
5255
per_cpu_ptr(trace_buf->data, cpu)->entries =
kernel/trace/trace.c
5256
per_cpu_ptr(size_buf->data, cpu)->entries;
kernel/trace/trace.c
5260
per_cpu_ptr(size_buf->data, cpu_id)->entries, cpu_id);
kernel/trace/trace.c
5262
per_cpu_ptr(trace_buf->data, cpu_id)->entries =
kernel/trace/trace.c
5263
per_cpu_ptr(size_buf->data, cpu_id)->entries;
kernel/trace/trace.c
5356
struct trace_mod_entry entries[];
kernel/trace/trace.c
5395
tscratch->entries[0].mod_addr > addr) {
kernel/trace/trace.c
5404
tscratch->entries[nr_entries - 1].mod_addr < addr)
kernel/trace/trace.c
5408
tscratch->entries,
kernel/trace/trace.c
5410
sizeof(tscratch->entries[0]),
kernel/trace/trace.c
5413
idx = entry - tscratch->entries;
kernel/trace/trace.c
5432
if (struct_size(tscratch, entries, tscratch->nr_entries + 1) > size)
kernel/trace/trace.c
5435
entry = &tscratch->entries[tscratch->nr_entries];
kernel/trace/trace.c
5470
memset(tscratch->entries, 0,
kernel/trace/trace.c
5471
flex_array_size(tscratch, entries, tscratch->nr_entries));
kernel/trace/trace.c
6267
size = per_cpu_ptr(tr->array_buffer.data, cpu)->entries;
kernel/trace/trace.c
6268
if (size != per_cpu_ptr(tr->array_buffer.data, cpu)->entries) {
kernel/trace/trace.c
6284
r = sprintf(buf, "%lu\n", per_cpu_ptr(tr->array_buffer.data, cpu)->entries >> 10);
kernel/trace/trace.c
6331
size += per_cpu_ptr(tr->array_buffer.data, cpu)->entries >> 10;
kernel/trace/trace.c
6367
return &tscratch->entries[index];
kernel/trace/trace.c
8057
int entries, i;
kernel/trace/trace.c
8078
entries = ring_buffer_entries_cpu(iter->array_buffer->buffer, iter->cpu_file);
kernel/trace/trace.c
8080
for (i = 0; i < spd.nr_pages_max && len && entries; i++, len -= page_size) {
kernel/trace/trace.c
8119
entries = ring_buffer_entries_cpu(iter->array_buffer->buffer, iter->cpu_file);
kernel/trace/trace.c
9274
entry = &tscratch->entries[i];
kernel/trace/trace.c
9317
if (struct_size(tscratch, entries, tscratch->nr_entries) > size)
kernel/trace/trace.c
9324
entry = &tscratch->entries[i];
kernel/trace/trace.c
9338
sort_r(tscratch->entries, nr_entries, sizeof(struct trace_mod_entry),
kernel/trace/trace.c
9385
struct_size(tscratch, entries, 128));
kernel/trace/trace.h
194
unsigned long entries;
kernel/trace/trace_events_hist.c
5305
unsigned long entries[HIST_STACKTRACE_DEPTH];
kernel/trace/trace_events_hist.c
5400
unsigned long *entries = hist_pad->entries;
kernel/trace/trace_events_hist.c
5402
memset(entries, 0, HIST_STACKTRACE_SIZE);
kernel/trace/trace_events_hist.c
5409
memcpy(entries, ++stack, n_entries * sizeof(unsigned long));
kernel/trace/trace_events_hist.c
5411
stack_trace_save(entries, HIST_STACKTRACE_DEPTH,
kernel/trace/trace_events_hist.c
5414
key = entries;
kernel/trace/tracing_map.c
1019
const struct tracing_map_sort_entry **a = &entries[i];
kernel/trace/tracing_map.c
1020
const struct tracing_map_sort_entry **b = &entries[i + 1];
kernel/trace/tracing_map.c
1035
sort(&entries[start], n_sub,
kernel/trace/tracing_map.c
1076
struct tracing_map_sort_entry *sort_entry, **entries;
kernel/trace/tracing_map.c
1079
entries = vmalloc_array(map->max_elts, sizeof(sort_entry));
kernel/trace/tracing_map.c
1080
if (!entries)
kernel/trace/tracing_map.c
1091
entries[n_entries] = create_sort_entry(entry->val->key,
kernel/trace/tracing_map.c
1093
if (!entries[n_entries++]) {
kernel/trace/tracing_map.c
1105
*sort_entries = entries;
kernel/trace/tracing_map.c
1109
detect_dups(entries, n_entries, map->key_size);
kernel/trace/tracing_map.c
1118
sort(entries, n_entries, sizeof(struct tracing_map_sort_entry *),
kernel/trace/tracing_map.c
1123
(const struct tracing_map_sort_entry **)entries,
kernel/trace/tracing_map.c
1128
*sort_entries = entries;
kernel/trace/tracing_map.c
1132
tracing_map_destroy_sort_entries(entries, n_entries);
kernel/trace/tracing_map.c
936
void tracing_map_destroy_sort_entries(struct tracing_map_sort_entry **entries,
kernel/trace/tracing_map.c
942
destroy_sort_entry(entries[i]);
kernel/trace/tracing_map.c
944
vfree(entries);
kernel/trace/tracing_map.c
999
const struct tracing_map_sort_entry **entries,
kernel/trace/tracing_map.h
282
tracing_map_destroy_sort_entries(struct tracing_map_sort_entry **entries,
kernel/unwind/deferred.c
123
info->cache = kzalloc_flex(*cache, entries, UNWIND_MAX_ENTRIES);
kernel/unwind/deferred.c
129
trace->entries = cache->entries;
kernel/unwind/deferred.c
167
trace.entries = NULL;
kernel/unwind/user.c
155
trace->entries[trace->nr++] = state.ip;
kernel/user.c
26
.entries = LIST_HEAD_INIT(init_binfmt_misc.entries),
lib/fault-inject.c
100
if (attr->reject_start <= entries[n] &&
lib/fault-inject.c
101
entries[n] < attr->reject_end)
lib/fault-inject.c
103
if (attr->require_start <= entries[n] &&
lib/fault-inject.c
104
entries[n] < attr->require_end)
lib/fault-inject.c
91
unsigned long entries[MAX_STACK_TRACE_DEPTH];
lib/fault-inject.c
98
nr_entries = stack_trace_save(entries, depth, 1);
lib/objpool.c
24
void *obj = (void *)&slot->entries[pool->capacity];
lib/objpool.c
36
slot->entries[slot->tail & slot->mask] = obj;
lib/objpool.c
68
size = struct_size(slot, entries, pool->capacity) +
lib/ref_tracker.c
256
unsigned long entries[REF_TRACKER_STACK_ENTRIES];
lib/ref_tracker.c
276
nr_entries = stack_trace_save(entries, ARRAY_SIZE(entries), 1);
lib/ref_tracker.c
277
tracker->alloc_stack_handle = stack_depot_save(entries, nr_entries, gfp);
lib/ref_tracker.c
289
unsigned long entries[REF_TRACKER_STACK_ENTRIES];
lib/ref_tracker.c
306
nr_entries = stack_trace_save(entries, ARRAY_SIZE(entries), 1);
lib/ref_tracker.c
307
stack_handle = stack_depot_save(entries, nr_entries,
lib/stackdepot.c
141
static void init_stack_table(unsigned long entries)
lib/stackdepot.c
145
for (i = 0; i < entries; i++)
lib/stackdepot.c
152
unsigned long entries = 0;
lib/stackdepot.c
188
entries = 1UL << stack_bucket_number_order;
lib/stackdepot.c
192
entries,
lib/stackdepot.c
204
if (!entries) {
lib/stackdepot.c
209
entries = stack_hash_mask + 1;
lib/stackdepot.c
211
init_stack_table(entries);
lib/stackdepot.c
219
memblock_free(stack_table, entries * sizeof(struct list_head));
lib/stackdepot.c
231
unsigned long entries;
lib/stackdepot.c
244
entries = 1UL << stack_bucket_number_order;
lib/stackdepot.c
248
entries = nr_free_buffer_pages();
lib/stackdepot.c
249
entries = roundup_pow_of_two(entries);
lib/stackdepot.c
252
entries >>= (scale - PAGE_SHIFT);
lib/stackdepot.c
254
entries <<= (PAGE_SHIFT - scale);
lib/stackdepot.c
257
if (entries < 1UL << STACK_BUCKET_NUMBER_ORDER_MIN)
lib/stackdepot.c
258
entries = 1UL << STACK_BUCKET_NUMBER_ORDER_MIN;
lib/stackdepot.c
259
if (entries > 1UL << STACK_BUCKET_NUMBER_ORDER_MAX)
lib/stackdepot.c
260
entries = 1UL << STACK_BUCKET_NUMBER_ORDER_MAX;
lib/stackdepot.c
262
pr_info("allocating hash table of %lu entries via kvcalloc\n", entries);
lib/stackdepot.c
263
stack_table = kvzalloc_objs(struct list_head, entries);
lib/stackdepot.c
270
stack_hash_mask = entries - 1;
lib/stackdepot.c
271
init_stack_table(entries);
lib/stackdepot.c
424
const size_t used = flex_array_size(s, entries, nr_entries);
lib/stackdepot.c
425
const size_t unused = sizeof(s->entries) - used;
lib/stackdepot.c
427
WARN_ON_ONCE(sizeof(s->entries) < used);
lib/stackdepot.c
434
depot_alloc_stack(unsigned long *entries, unsigned int nr_entries, u32 hash, depot_flags_t flags, void **prealloc)
lib/stackdepot.c
471
memcpy(stack->entries, entries, flex_array_size(stack, entries, nr_entries));
lib/stackdepot.c
569
static inline u32 hash_stack(unsigned long *entries, unsigned int size)
lib/stackdepot.c
571
return jhash2((u32 *)entries,
lib/stackdepot.c
572
array_size(size, sizeof(*entries)) / sizeof(u32),
lib/stackdepot.c
593
unsigned long *entries, int size,
lib/stackdepot.c
618
if (data_race(stackdepot_memcmp(entries, stack->entries, size)))
lib/stackdepot.c
641
depot_stack_handle_t stack_depot_save_flags(unsigned long *entries,
lib/stackdepot.c
667
nr_entries = filter_irq_stacks(entries, nr_entries);
lib/stackdepot.c
672
hash = hash_stack(entries, nr_entries);
lib/stackdepot.c
676
found = find_stack(bucket, entries, nr_entries, hash, depot_flags);
lib/stackdepot.c
703
found = find_stack(bucket, entries, nr_entries, hash, depot_flags);
lib/stackdepot.c
706
depot_alloc_stack(entries, nr_entries, hash, depot_flags, &prealloc);
lib/stackdepot.c
743
depot_stack_handle_t stack_depot_save(unsigned long *entries,
lib/stackdepot.c
747
return stack_depot_save_flags(entries, nr_entries, alloc_flags,
lib/stackdepot.c
761
unsigned long **entries)
lib/stackdepot.c
765
*entries = NULL;
lib/stackdepot.c
770
kmsan_unpoison_memory(entries, sizeof(*entries));
lib/stackdepot.c
783
*entries = stack->entries;
lib/stackdepot.c
810
unsigned long *entries;
lib/stackdepot.c
813
nr_entries = stack_depot_fetch(stack, &entries);
lib/stackdepot.c
815
stack_trace_print(entries, nr_entries, 0);
lib/stackdepot.c
822
unsigned long *entries;
lib/stackdepot.c
825
nr_entries = stack_depot_fetch(handle, &entries);
lib/stackdepot.c
826
return nr_entries ? stack_trace_snprint(buf, size, entries, nr_entries,
lib/test_rhashtable.c
139
unsigned int entries)
lib/test_rhashtable.c
143
for (i = 0; i < entries; i++) {
lib/test_rhashtable.c
176
static void test_bucket_stats(struct rhashtable *ht, unsigned int entries)
lib/test_rhashtable.c
203
total, atomic_read(&ht->nelems), entries, chain_len);
lib/test_rhashtable.c
205
if (total != atomic_read(&ht->nelems) || total != entries)
lib/test_rhashtable.c
210
unsigned int entries)
lib/test_rhashtable.c
221
pr_info(" Adding %d keys\n", entries);
lib/test_rhashtable.c
223
for (i = 0; i < entries; i++) {
lib/test_rhashtable.c
238
test_bucket_stats(ht, entries);
lib/test_rhashtable.c
240
test_rht_lookup(ht, array, entries);
lib/test_rhashtable.c
243
test_bucket_stats(ht, entries);
lib/test_rhashtable.c
245
pr_info(" Deleting %d keys\n", entries);
lib/test_rhashtable.c
246
for (i = 0; i < entries; i++) {
lib/test_rhashtable.c
270
static int __init test_rhltable(unsigned int entries)
lib/test_rhashtable.c
277
if (entries == 0)
lib/test_rhashtable.c
278
entries = 1;
lib/test_rhashtable.c
280
rhl_test_objects = vzalloc(array_size(entries,
lib/test_rhashtable.c
287
BITS_TO_LONGS(entries)));
lib/test_rhashtable.c
297
for (i = 0; i < entries; i++) {
lib/test_rhashtable.c
310
pr_info("test %d add/delete pairs into rhlist\n", entries);
lib/test_rhashtable.c
311
for (i = 0; i < entries; i++) {
lib/test_rhashtable.c
321
if (WARN(!h, "key not found during iteration %d of %d", i, entries)) {
lib/test_rhashtable.c
359
for (i = 0; i < entries; i++) {
lib/test_rhashtable.c
370
pr_info("test %d random rhlist add/delete operations\n", entries);
lib/test_rhashtable.c
371
for (j = 0; j < entries; j++) {
lib/test_rhashtable.c
372
u32 i = get_random_u32_below(entries);
lib/test_rhashtable.c
400
i = get_random_u32_below(entries);
lib/test_rhashtable.c
415
for (i = 0; i < entries; i++) {
lib/test_rhashtable.c
436
unsigned int entries)
lib/test_rhashtable.c
441
test_rht_params.max_size = roundup_pow_of_two(entries / 8);
lib/test_rhashtable.c
589
unsigned int entries = tdata->entries;
lib/test_rhashtable.c
592
for (i = 0; i < entries; i++) {
lib/test_rhashtable.c
629
for (i = 0; i < tdata->entries; i++) {
lib/test_rhashtable.c
653
for (i = 0; i < tdata->entries; i += step) {
lib/test_rhashtable.c
684
unsigned int entries;
lib/test_rhashtable.c
693
entries = min(parm_entries, MAX_ENTRIES);
lib/test_rhashtable.c
696
test_rht_params.max_size = max_size ? : roundup_pow_of_two(entries);
lib/test_rhashtable.c
720
time = test_rhashtable(&ht, objs, entries);
lib/test_rhashtable.c
732
test_rht_params.max_size, test_rhashtable_max(objs, entries) == 0 ?
lib/test_rhashtable.c
74
unsigned int entries;
lib/test_rhashtable.c
750
objs = vzalloc(array3_size(sizeof(struct test_obj), tcount, entries));
lib/test_rhashtable.c
757
roundup_pow_of_two(tcount * entries);
lib/test_rhashtable.c
768
tdata[i].entries = entries;
lib/test_rhashtable.c
769
tdata[i].objs = objs + i * entries;
lib/test_rhashtable.c
801
err = test_rhltable(entries / 16);
lib/tests/hashtable_test.c
125
struct hashtable_test_entry entries[3];
lib/tests/hashtable_test.c
132
entries[i].key = i;
lib/tests/hashtable_test.c
133
entries[i].data = i + 10;
lib/tests/hashtable_test.c
134
entries[i].visited = 0;
lib/tests/hashtable_test.c
135
hash_add(hash, &entries[i].node, entries[i].key);
lib/tests/hashtable_test.c
149
KUNIT_EXPECT_EQ(test, entries[j].visited, 1);
lib/tests/hashtable_test.c
154
struct hashtable_test_entry entries[3];
lib/tests/hashtable_test.c
162
entries[i].key = i;
lib/tests/hashtable_test.c
163
entries[i].data = i + 10;
lib/tests/hashtable_test.c
164
entries[i].visited = 0;
lib/tests/hashtable_test.c
165
hash_add(hash, &entries[i].node, entries[i].key);
lib/tests/hashtable_test.c
182
KUNIT_EXPECT_EQ(test, entries[j].visited, 1);
lib/tests/hashtable_test.c
187
struct hashtable_test_entry entries[4];
lib/tests/hashtable_test.c
195
entries[i].key = 0;
lib/tests/hashtable_test.c
196
entries[i].data = i;
lib/tests/hashtable_test.c
197
entries[i].visited = 0;
lib/tests/hashtable_test.c
198
hash_add(hash, &entries[i].node, entries[i].key);
lib/tests/hashtable_test.c
202
entries[3].key = 1;
lib/tests/hashtable_test.c
203
entries[3].data = 3;
lib/tests/hashtable_test.c
204
entries[3].visited = 0;
lib/tests/hashtable_test.c
205
hash_add(hash, &entries[3].node, entries[3].key);
lib/tests/hashtable_test.c
217
KUNIT_EXPECT_EQ(test, entries[j].visited, 1);
lib/tests/hashtable_test.c
232
KUNIT_EXPECT_EQ(test, entries[3].visited, 1);
lib/tests/hashtable_test.c
235
KUNIT_EXPECT_EQ(test, entries[3].visited, 0);
lib/tests/hashtable_test.c
241
struct hashtable_test_entry entries[4];
lib/tests/hashtable_test.c
250
entries[i].key = 0;
lib/tests/hashtable_test.c
251
entries[i].data = i;
lib/tests/hashtable_test.c
252
entries[i].visited = 0;
lib/tests/hashtable_test.c
253
hash_add(hash, &entries[i].node, entries[i].key);
lib/tests/hashtable_test.c
257
entries[3].key = 1;
lib/tests/hashtable_test.c
258
entries[3].data = 3;
lib/tests/hashtable_test.c
259
entries[3].visited = 0;
lib/tests/hashtable_test.c
260
hash_add(hash, &entries[3].node, entries[3].key);
lib/tests/hashtable_test.c
275
KUNIT_EXPECT_EQ(test, entries[j].visited, 1);
lib/tests/hashtable_test.c
290
KUNIT_EXPECT_EQ(test, entries[3].visited, 1);
lib/tests/hashtable_test.c
293
KUNIT_EXPECT_EQ(test, entries[3].visited, 0);
lib/tests/list-test.c
1038
struct hlist_node entries[3], *cur;
lib/tests/list-test.c
1042
hlist_add_head(&entries[0], &list);
lib/tests/list-test.c
1043
hlist_add_behind(&entries[1], &entries[0]);
lib/tests/list-test.c
1044
hlist_add_behind(&entries[2], &entries[1]);
lib/tests/list-test.c
1047
KUNIT_EXPECT_PTR_EQ(test, cur, &entries[i]);
lib/tests/list-test.c
1057
struct hlist_node entries[3], *cur, *n;
lib/tests/list-test.c
1061
hlist_add_head(&entries[0], &list);
lib/tests/list-test.c
1062
hlist_add_behind(&entries[1], &entries[0]);
lib/tests/list-test.c
1063
hlist_add_behind(&entries[2], &entries[1]);
lib/tests/list-test.c
1066
KUNIT_EXPECT_PTR_EQ(test, cur, &entries[i]);
lib/tests/list-test.c
1067
hlist_del(&entries[i]);
lib/tests/list-test.c
1077
struct hlist_test_struct entries[5], *cur;
lib/tests/list-test.c
1081
entries[0].data = 0;
lib/tests/list-test.c
1082
hlist_add_head(&entries[0].list, &list);
lib/tests/list-test.c
1084
entries[i].data = i;
lib/tests/list-test.c
1085
hlist_add_behind(&entries[i].list, &entries[i-1].list);
lib/tests/list-test.c
1100
struct hlist_test_struct entries[5], *cur;
lib/tests/list-test.c
1104
entries[0].data = 0;
lib/tests/list-test.c
1105
hlist_add_head(&entries[0].list, &list);
lib/tests/list-test.c
1107
entries[i].data = i;
lib/tests/list-test.c
1108
hlist_add_behind(&entries[i].list, &entries[i-1].list);
lib/tests/list-test.c
1114
cur = &entries[0];
lib/tests/list-test.c
1124
KUNIT_EXPECT_EQ(test, entries[0].data, 0);
lib/tests/list-test.c
1126
KUNIT_EXPECT_EQ(test, entries[1].data, 42);
lib/tests/list-test.c
1131
struct hlist_test_struct entries[5], *cur;
lib/tests/list-test.c
1135
entries[0].data = 0;
lib/tests/list-test.c
1136
hlist_add_head(&entries[0].list, &list);
lib/tests/list-test.c
1138
entries[i].data = i;
lib/tests/list-test.c
1139
hlist_add_behind(&entries[i].list, &entries[i-1].list);
lib/tests/list-test.c
1144
cur = &entries[0];
lib/tests/list-test.c
1154
KUNIT_EXPECT_EQ(test, entries[0].data, 42);
lib/tests/list-test.c
1159
struct hlist_test_struct entries[5], *cur;
lib/tests/list-test.c
1164
entries[0].data = 0;
lib/tests/list-test.c
1165
hlist_add_head(&entries[0].list, &list);
lib/tests/list-test.c
1167
entries[i].data = i;
lib/tests/list-test.c
1168
hlist_add_behind(&entries[i].list, &entries[i-1].list);
lib/tests/list-test.c
391
struct list_head entries[3], *cur;
lib/tests/list-test.c
396
list_add_tail(&entries[0], &list1);
lib/tests/list-test.c
397
list_add_tail(&entries[1], &list1);
lib/tests/list-test.c
398
list_add_tail(&entries[2], &list1);
lib/tests/list-test.c
401
list_cut_position(&list2, &list1, &entries[1]);
lib/tests/list-test.c
405
KUNIT_EXPECT_PTR_EQ(test, cur, &entries[i]);
lib/tests/list-test.c
412
KUNIT_EXPECT_PTR_EQ(test, cur, &entries[i]);
lib/tests/list-test.c
421
struct list_head entries[3], *cur;
lib/tests/list-test.c
426
list_add_tail(&entries[0], &list1);
lib/tests/list-test.c
427
list_add_tail(&entries[1], &list1);
lib/tests/list-test.c
428
list_add_tail(&entries[2], &list1);
lib/tests/list-test.c
431
list_cut_before(&list2, &list1, &entries[1]);
lib/tests/list-test.c
435
KUNIT_EXPECT_PTR_EQ(test, cur, &entries[i]);
lib/tests/list-test.c
442
KUNIT_EXPECT_PTR_EQ(test, cur, &entries[i]);
lib/tests/list-test.c
451
struct list_head entries[5], *cur;
lib/tests/list-test.c
456
list_add_tail(&entries[0], &list1);
lib/tests/list-test.c
457
list_add_tail(&entries[1], &list1);
lib/tests/list-test.c
458
list_add_tail(&entries[2], &list2);
lib/tests/list-test.c
459
list_add_tail(&entries[3], &list2);
lib/tests/list-test.c
460
list_add_tail(&entries[4], &list1);
lib/tests/list-test.c
463
list_splice(&list2, &entries[1]);
lib/tests/list-test.c
467
KUNIT_EXPECT_PTR_EQ(test, cur, &entries[i]);
lib/tests/list-test.c
476
struct list_head entries[5], *cur;
lib/tests/list-test.c
481
list_add_tail(&entries[0], &list1);
lib/tests/list-test.c
482
list_add_tail(&entries[1], &list1);
lib/tests/list-test.c
483
list_add_tail(&entries[2], &list2);
lib/tests/list-test.c
484
list_add_tail(&entries[3], &list2);
lib/tests/list-test.c
485
list_add_tail(&entries[4], &list1);
lib/tests/list-test.c
488
list_splice_tail(&list2, &entries[4]);
lib/tests/list-test.c
492
KUNIT_EXPECT_PTR_EQ(test, cur, &entries[i]);
lib/tests/list-test.c
501
struct list_head entries[5], *cur;
lib/tests/list-test.c
506
list_add_tail(&entries[0], &list1);
lib/tests/list-test.c
507
list_add_tail(&entries[1], &list1);
lib/tests/list-test.c
508
list_add_tail(&entries[2], &list2);
lib/tests/list-test.c
509
list_add_tail(&entries[3], &list2);
lib/tests/list-test.c
510
list_add_tail(&entries[4], &list1);
lib/tests/list-test.c
513
list_splice_init(&list2, &entries[1]);
lib/tests/list-test.c
517
KUNIT_EXPECT_PTR_EQ(test, cur, &entries[i]);
lib/tests/list-test.c
528
struct list_head entries[5], *cur;
lib/tests/list-test.c
533
list_add_tail(&entries[0], &list1);
lib/tests/list-test.c
534
list_add_tail(&entries[1], &list1);
lib/tests/list-test.c
535
list_add_tail(&entries[2], &list2);
lib/tests/list-test.c
536
list_add_tail(&entries[3], &list2);
lib/tests/list-test.c
537
list_add_tail(&entries[4], &list1);
lib/tests/list-test.c
540
list_splice_tail_init(&list2, &entries[4]);
lib/tests/list-test.c
544
KUNIT_EXPECT_PTR_EQ(test, cur, &entries[i]);
lib/tests/list-test.c
651
struct list_head entries[3], *cur;
lib/tests/list-test.c
655
list_add_tail(&entries[0], &list);
lib/tests/list-test.c
656
list_add_tail(&entries[1], &list);
lib/tests/list-test.c
657
list_add_tail(&entries[2], &list);
lib/tests/list-test.c
660
KUNIT_EXPECT_PTR_EQ(test, cur, &entries[i]);
lib/tests/list-test.c
669
struct list_head entries[3], *cur;
lib/tests/list-test.c
673
list_add_tail(&entries[0], &list);
lib/tests/list-test.c
674
list_add_tail(&entries[1], &list);
lib/tests/list-test.c
675
list_add_tail(&entries[2], &list);
lib/tests/list-test.c
678
KUNIT_EXPECT_PTR_EQ(test, cur, &entries[i]);
lib/tests/list-test.c
687
struct list_head entries[3], *cur, *n;
lib/tests/list-test.c
692
list_add_tail(&entries[0], &list);
lib/tests/list-test.c
693
list_add_tail(&entries[1], &list);
lib/tests/list-test.c
694
list_add_tail(&entries[2], &list);
lib/tests/list-test.c
697
KUNIT_EXPECT_PTR_EQ(test, cur, &entries[i]);
lib/tests/list-test.c
698
list_del(&entries[i]);
lib/tests/list-test.c
708
struct list_head entries[3], *cur, *n;
lib/tests/list-test.c
712
list_add_tail(&entries[0], &list);
lib/tests/list-test.c
713
list_add_tail(&entries[1], &list);
lib/tests/list-test.c
714
list_add_tail(&entries[2], &list);
lib/tests/list-test.c
717
KUNIT_EXPECT_PTR_EQ(test, cur, &entries[i]);
lib/tests/list-test.c
718
list_del(&entries[i]);
lib/tests/list-test.c
728
struct list_test_struct entries[5], *cur;
lib/tests/list-test.c
733
entries[i].data = i;
lib/tests/list-test.c
734
list_add_tail(&entries[i].list, &list);
lib/tests/list-test.c
749
struct list_test_struct entries[5], *cur;
lib/tests/list-test.c
754
entries[i].data = i;
lib/tests/list-test.c
755
list_add_tail(&entries[i].list, &list);
mm/gup.c
2211
void **entries;
mm/gup.c
2226
pofs->entries[i] = NULL;
mm/kasan/common.c
54
unsigned long entries[KASAN_STACK_DEPTH];
mm/kasan/common.c
57
nr_entries = stack_trace_save(entries, ARRAY_SIZE(entries), 0);
mm/kasan/common.c
58
return stack_depot_save_flags(entries, nr_entries, flags, depot_flags);
mm/kasan/kasan.h
309
struct kasan_stack_ring_entry *entries;
mm/kasan/report_tags.c
59
entry = &stack_ring.entries[i % stack_ring.size];
mm/kasan/tags.c
117
entry = &stack_ring.entries[pos % stack_ring.size];
mm/kasan/tags.c
89
stack_ring.entries = memblock_alloc(
mm/kasan/tags.c
90
sizeof(stack_ring.entries[0]) * stack_ring.size,
mm/kasan/tags.c
92
if (WARN_ON(!stack_ring.entries))
mm/kmemleak.c
367
unsigned long *entries;
mm/kmemleak.c
370
nr_entries = stack_depot_fetch(object->trace_handle, &entries);
mm/kmemleak.c
380
void *ptr = (void *)entries[i];
mm/kmemleak.c
645
unsigned long entries[MAX_TRACE];
mm/kmemleak.c
655
nr_entries = stack_trace_save(entries, ARRAY_SIZE(entries), 3);
mm/kmemleak.c
656
trace_handle = stack_depot_save(entries, nr_entries, GFP_NOWAIT);
mm/kmsan/core.c
147
unsigned long entries[3];
mm/kmsan/core.c
178
entries[0] = KMSAN_CHAIN_MAGIC_ORIGIN;
mm/kmsan/core.c
179
entries[1] = kmsan_save_stack_with_flags(__GFP_HIGH, 0);
mm/kmsan/core.c
180
entries[2] = id;
mm/kmsan/core.c
186
kmsan_internal_unpoison_memory(entries, sizeof(entries), false);
mm/kmsan/core.c
187
handle = stack_depot_save(entries, ARRAY_SIZE(entries), __GFP_HIGH);
mm/kmsan/core.c
69
unsigned long entries[KMSAN_STACK_DEPTH];
mm/kmsan/core.c
73
nr_entries = stack_trace_save(entries, KMSAN_STACK_DEPTH, 0);
mm/kmsan/core.c
75
handle = stack_depot_save(entries, nr_entries, flags);
mm/kmsan/instrumentation.c
264
unsigned long entries[4];
mm/kmsan/instrumentation.c
271
entries[0] = KMSAN_ALLOCA_MAGIC_ORIGIN;
mm/kmsan/instrumentation.c
272
entries[1] = (u64)descr;
mm/kmsan/instrumentation.c
273
entries[2] = (u64)__builtin_return_address(0);
mm/kmsan/instrumentation.c
280
entries[3] = (u64)__builtin_return_address(1);
mm/kmsan/instrumentation.c
282
entries[3] = 0;
mm/kmsan/instrumentation.c
286
handle = stack_depot_save(entries, ARRAY_SIZE(entries), __GFP_HIGH);
mm/kmsan/report.c
100
nr_entries = stack_depot_fetch(origin, &entries);
mm/kmsan/report.c
102
magic = nr_entries ? entries[0] : 0;
mm/kmsan/report.c
104
descr = (char *)entries[1];
mm/kmsan/report.c
105
pc1 = (void *)entries[2];
mm/kmsan/report.c
106
pc2 = (void *)entries[3];
mm/kmsan/report.c
122
head = entries[1];
mm/kmsan/report.c
123
origin = entries[2];
mm/kmsan/report.c
140
skipnr = get_stack_skipnr(entries, nr_entries);
mm/kmsan/report.c
141
stack_trace_print(entries + skipnr, nr_entries - skipnr,
mm/kmsan/report.c
88
unsigned long *entries = NULL, *chained_entries = NULL;
mm/memcontrol-v1.c
482
for (; i >= 0 && unlikely(t->entries[i].threshold > usage); i--)
mm/memcontrol-v1.c
483
eventfd_signal(t->entries[i].eventfd);
mm/memcontrol-v1.c
494
for (; i < t->size && unlikely(t->entries[i].threshold <= usage); i++)
mm/memcontrol-v1.c
495
eventfd_signal(t->entries[i].eventfd);
mm/memcontrol-v1.c
786
new = kmalloc_flex(*new, entries, size, GFP_KERNEL_ACCOUNT);
mm/memcontrol-v1.c
795
memcpy(new->entries, thresholds->primary->entries,
mm/memcontrol-v1.c
796
flex_array_size(new, entries, size - 1));
mm/memcontrol-v1.c
799
new->entries[size - 1].eventfd = eventfd;
mm/memcontrol-v1.c
800
new->entries[size - 1].threshold = threshold;
mm/memcontrol-v1.c
803
sort(new->entries, size, sizeof(*new->entries),
mm/memcontrol-v1.c
809
if (new->entries[i].threshold <= usage) {
mm/memcontrol-v1.c
853
int i, j, size, entries;
mm/memcontrol-v1.c
873
size = entries = 0;
mm/memcontrol-v1.c
875
if (thresholds->primary->entries[i].eventfd != eventfd)
mm/memcontrol-v1.c
878
entries++;
mm/memcontrol-v1.c
884
if (!entries)
mm/memcontrol-v1.c
899
if (thresholds->primary->entries[i].eventfd == eventfd)
mm/memcontrol-v1.c
902
new->entries[j] = thresholds->primary->entries[i];
mm/memcontrol-v1.c
903
if (new->entries[j].threshold <= usage) {
mm/page_ext.c
122
int entries = ARRAY_SIZE(page_ext_ops);
mm/page_ext.c
125
for (i = 0; i < entries; i++) {
mm/page_ext.c
134
for (i = 0; i < entries; i++) {
mm/page_ext.c
148
int entries = ARRAY_SIZE(page_ext_ops);
mm/page_ext.c
150
for (i = 0; i < entries; i++) {
mm/page_owner.c
100
unsigned long entries[4];
mm/page_owner.c
103
nr_entries = stack_trace_save(entries, ARRAY_SIZE(entries), 0);
mm/page_owner.c
104
return stack_depot_save(entries, nr_entries, GFP_KERNEL);
mm/page_owner.c
157
unsigned long entries[PAGE_OWNER_STACK_DEPTH];
mm/page_owner.c
165
nr_entries = stack_trace_save(entries, ARRAY_SIZE(entries), 2);
mm/page_owner.c
166
handle = stack_depot_save(entries, nr_entries, flags);
mm/page_owner.c
896
unsigned long *entries;
mm/page_owner.c
912
entries = stack_record->entries;
mm/page_owner.c
914
seq_printf(m, " %pS\n", (void *)entries[i]);
mm/slub.c
1030
unsigned long entries[TRACK_ADDRS_COUNT];
mm/slub.c
1033
nr_entries = stack_trace_save(entries, ARRAY_SIZE(entries), 3);
mm/slub.c
1034
handle = stack_depot_save(entries, nr_entries, gfp_flags);
mm/slub.c
7942
unsigned long *entries;
mm/slub.c
7947
nr_entries = stack_depot_fetch(handle, &entries);
mm/slub.c
7949
kpp->kp_stack[i] = (void *)entries[i];
mm/slub.c
7955
nr_entries = stack_depot_fetch(handle, &entries);
mm/slub.c
7957
kpp->kp_free_stack[i] = (void *)entries[i];
mm/slub.c
9652
unsigned long *entries;
mm/slub.c
9657
nr_entries = stack_depot_fetch(handle, &entries);
mm/slub.c
9660
seq_printf(seq, " %pS\n", (void *)entries[j]);
mm/swap_table.h
11
atomic_long_t entries[SWAPFILE_CLUSTER];
net/bridge/netfilter/ebtable_broute.c
36
.entries = (char *)&initial_chain,
net/bridge/netfilter/ebtable_filter.c
43
.entries = (char *)initial_chains,
net/bridge/netfilter/ebtable_nat.c
43
.entries = (char *)initial_chains,
net/bridge/netfilter/ebtables.c
1083
EBT_ENTRY_ITERATE(table->entries, table->entries_size,
net/bridge/netfilter/ebtables.c
1086
vfree(table->entries);
net/bridge/netfilter/ebtables.c
1098
EBT_ENTRY_ITERATE(newinfo->entries, newinfo->entries_size,
net/bridge/netfilter/ebtables.c
1142
newinfo->entries = __vmalloc(tmp.entries_size, GFP_KERNEL_ACCOUNT);
net/bridge/netfilter/ebtables.c
1143
if (!newinfo->entries) {
net/bridge/netfilter/ebtables.c
1148
newinfo->entries, tmp.entries, tmp.entries_size) != 0) {
net/bridge/netfilter/ebtables.c
1157
vfree(newinfo->entries);
net/bridge/netfilter/ebtables.c
1170
EBT_ENTRY_ITERATE(table->private->entries, table->private->entries_size,
net/bridge/netfilter/ebtables.c
1174
vfree(table->private->entries);
net/bridge/netfilter/ebtables.c
1194
repl->entries == NULL || repl->entries_size == 0 ||
net/bridge/netfilter/ebtables.c
1215
memcpy(p, repl->entries, repl->entries_size);
net/bridge/netfilter/ebtables.c
1216
newinfo->entries = p;
net/bridge/netfilter/ebtables.c
1231
((char *)repl->hook_entry[i] - repl->entries);
net/bridge/netfilter/ebtables.c
1285
vfree(newinfo->entries);
net/bridge/netfilter/ebtables.c
1552
char *entries;
net/bridge/netfilter/ebtables.c
1557
entries = t->private->entries;
net/bridge/netfilter/ebtables.c
1562
entries = t->table->entries;
net/bridge/netfilter/ebtables.c
1585
return EBT_ENTRY_ITERATE(entries, entries_size,
net/bridge/netfilter/ebtables.c
1586
ebt_entry_to_user, entries, tmp.entries);
net/bridge/netfilter/ebtables.c
1602
compat_uptr_t entries;
net/bridge/netfilter/ebtables.c
1839
const void *entries = info->entries;
net/bridge/netfilter/ebtables.c
1847
return EBT_ENTRY_ITERATE(entries, size, compat_calc_entry, info,
net/bridge/netfilter/ebtables.c
1848
entries, newinfo);
net/bridge/netfilter/ebtables.c
1865
tinfo.entries = t->private->entries;
net/bridge/netfilter/ebtables.c
1870
tinfo.entries = t->table->entries;
net/bridge/netfilter/ebtables.c
1902
pos = compat_ptr(tmp.entries);
net/bridge/netfilter/ebtables.c
1903
return EBT_ENTRY_ITERATE(tinfo.entries, tinfo.entries_size,
net/bridge/netfilter/ebtables.c
224
base = private->entries;
net/bridge/netfilter/ebtables.c
2248
repl->entries = compat_ptr(tmp.entries);
net/bridge/netfilter/ebtables.c
2278
newinfo->entries = vmalloc(tmp.entries_size);
net/bridge/netfilter/ebtables.c
2279
if (!newinfo->entries) {
net/bridge/netfilter/ebtables.c
2284
newinfo->entries, tmp.entries, tmp.entries_size) != 0) {
net/bridge/netfilter/ebtables.c
2289
entries_tmp = newinfo->entries;
net/bridge/netfilter/ebtables.c
2306
newinfo->entries = vmalloc(size64);
net/bridge/netfilter/ebtables.c
2307
if (!newinfo->entries) {
net/bridge/netfilter/ebtables.c
2314
state.buf_kern_start = newinfo->entries;
net/bridge/netfilter/ebtables.c
2331
delta = usrptr - tmp.entries;
net/bridge/netfilter/ebtables.c
2344
vfree(newinfo->entries);
net/bridge/netfilter/ebtables.c
482
struct ebt_entry *e = (void *)newinfo->entries + offset;
net/bridge/netfilter/ebtables.c
491
repl->entries + offset)
net/bridge/netfilter/ebtables.c
880
if (newinfo->hook_entry[i] != (struct ebt_entries *)newinfo->entries)
net/bridge/netfilter/ebtables.c
902
ret = EBT_ENTRY_ITERATE(newinfo->entries, newinfo->entries_size,
net/bridge/netfilter/ebtables.c
945
EBT_ENTRY_ITERATE(newinfo->entries, newinfo->entries_size,
net/bridge/netfilter/ebtables.c
958
cl_s, udc_cnt, i, newinfo->entries)) {
net/bridge/netfilter/ebtables.c
976
ret = EBT_ENTRY_ITERATE(newinfo->entries, newinfo->entries_size,
net/bridge/netfilter/ebtables.c
979
EBT_ENTRY_ITERATE(newinfo->entries, newinfo->entries_size,
net/can/af_can.c
479
dev_rcv_lists->entries++;
net/can/af_can.c
556
dev_rcv_lists->entries--;
net/can/af_can.c
586
if (dev_rcv_lists->entries == 0)
net/core/drop_monitor.c
239
for (i = 0; i < msg->entries; i++) {
net/core/drop_monitor.c
246
if (msg->entries == dm_hit_limit)
net/core/drop_monitor.c
255
msg->entries++;
net/core/drop_monitor.c
309
hw_entries = kzalloc_flex(*hw_entries, entries, dm_hit_limit);
net/core/drop_monitor.c
362
rc = net_dm_hw_entry_put(msg, &hw_entries->entries[i]);
net/core/drop_monitor.c
459
hw_entry = &hw_entries->entries[i];
net/core/drop_monitor.c
469
hw_entry = &hw_entries->entries[hw_entries->num_entries];
net/core/drop_monitor.c
73
struct net_dm_hw_entry entries[];
net/core/flow_offload.c
15
rule = kzalloc_flex(*rule, action.entries, num_actions);
net/core/flow_offload.c
24
rule->action.entries[i].hw_stats = FLOW_ACTION_HW_STATS_DONT_CARE;
net/core/flow_offload.c
35
fl_action = kzalloc_flex(*fl_action, action.entries, num_actions);
net/core/flow_offload.c
44
fl_action->action.entries[i].hw_stats = FLOW_ACTION_HW_STATS_DONT_CARE;
net/core/neighbour.c
1001
if (atomic_read(&tbl->entries) < READ_ONCE(tbl->gc_thresh1))
net/core/neighbour.c
1901
if (atomic_read(&tbl->entries))
net/core/neighbour.c
2278
.ndtc_entries = atomic_read(&tbl->entries),
net/core/neighbour.c
3534
atomic_read(&tbl->entries),
net/core/neighbour.c
502
int entries, gc_thresh3;
net/core/neighbour.c
507
entries = atomic_inc_return(&tbl->gc_entries) - 1;
net/core/neighbour.c
509
if (entries >= gc_thresh3 ||
net/core/neighbour.c
510
(entries >= READ_ONCE(tbl->gc_thresh2) &&
net/core/neighbour.c
512
if (!neigh_forced_gc(tbl) && entries >= gc_thresh3) {
net/core/neighbour.c
543
atomic_inc(&tbl->entries);
net/core/neighbour.c
693
if (atomic_read(&tbl->entries) > (1 << nht->hash_shift))
net/core/neighbour.c
946
atomic_dec(&neigh->tbl->entries);
net/dsa/user.c
1397
act = &cls->rule->action.entries[0];
net/dsa/user.c
1491
act = &cls->rule->action.entries[0];
net/dsa/user.c
1526
switch (action->entries[0].id) {
net/ethtool/ioctl.c
3844
act = &flow->rule->action.entries[0];
net/ipv4/netfilter/arp_tables.c
1034
xt_entry_foreach(iter, private->entries, private->size) {
net/ipv4/netfilter/arp_tables.c
1062
struct compat_arpt_entry entries[];
net/ipv4/netfilter/arp_tables.c
1201
memset(newinfo->entries, 0, size);
net/ipv4/netfilter/arp_tables.c
1208
entry1 = newinfo->entries;
net/ipv4/netfilter/arp_tables.c
1280
loc_cpu_entry = newinfo->entries;
net/ipv4/netfilter/arp_tables.c
1357
xt_entry_foreach(iter, private->entries, total_size) {
net/ipv4/netfilter/arp_tables.c
1500
loc_cpu_entry = private->entries;
net/ipv4/netfilter/arp_tables.c
1525
loc_cpu_entry = newinfo->entries;
net/ipv4/netfilter/arp_tables.c
1526
memcpy(loc_cpu_entry, repl->entries, repl->size);
net/ipv4/netfilter/arp_tables.c
209
table_base = private->entries;
net/ipv4/netfilter/arp_tables.c
611
xt_entry_foreach(iter, t->entries, t->size) {
net/ipv4/netfilter/arp_tables.c
638
xt_entry_foreach(iter, t->entries, t->size) {
net/ipv4/netfilter/arp_tables.c
685
loc_cpu_entry = private->entries;
net/ipv4/netfilter/arp_tables.c
776
memcpy(newinfo, info, offsetof(struct xt_table_info, entries));
net/ipv4/netfilter/arp_tables.c
778
loc_cpu_entry = info->entries;
net/ipv4/netfilter/arp_tables.c
929
loc_cpu_old_entry = oldinfo->entries;
net/ipv4/netfilter/arp_tables.c
978
loc_cpu_entry = newinfo->entries;
net/ipv4/netfilter/ip_tables.c
1081
xt_entry_foreach(iter, oldinfo->entries, oldinfo->size)
net/ipv4/netfilter/ip_tables.c
1130
loc_cpu_entry = newinfo->entries;
net/ipv4/netfilter/ip_tables.c
1186
xt_entry_foreach(iter, private->entries, private->size) {
net/ipv4/netfilter/ip_tables.c
1214
struct compat_ipt_entry entries[];
net/ipv4/netfilter/ip_tables.c
1433
memset(newinfo->entries, 0, size);
net/ipv4/netfilter/ip_tables.c
1440
entry1 = newinfo->entries;
net/ipv4/netfilter/ip_tables.c
1518
loc_cpu_entry = newinfo->entries;
net/ipv4/netfilter/ip_tables.c
1567
xt_entry_foreach(iter, private->entries, total_size) {
net/ipv4/netfilter/ip_tables.c
1715
loc_cpu_entry = private->entries;
net/ipv4/netfilter/ip_tables.c
1739
loc_cpu_entry = newinfo->entries;
net/ipv4/netfilter/ip_tables.c
1740
memcpy(loc_cpu_entry, repl->entries, repl->size);
net/ipv4/netfilter/ip_tables.c
199
root = get_entry(private->entries, private->hook_entry[hook]);
net/ipv4/netfilter/ip_tables.c
262
table_base = private->entries;
net/ipv4/netfilter/ip_tables.c
750
xt_entry_foreach(iter, t->entries, t->size) {
net/ipv4/netfilter/ip_tables.c
777
xt_entry_foreach(iter, t->entries, t->size) {
net/ipv4/netfilter/ip_tables.c
825
loc_cpu_entry = private->entries;
net/ipv4/netfilter/ip_tables.c
931
memcpy(newinfo, info, offsetof(struct xt_table_info, entries));
net/ipv4/netfilter/ip_tables.c
933
loc_cpu_entry = info->entries;
net/ipv4/netfilter/iptable_filter.c
46
((struct ipt_standard *)repl->entries)[1].target.verdict =
net/ipv4/udp_tunnel_nic.c
140
if (!udp_tunnel_nic_entry_is_free(&utn->entries[i][j]))
net/ipv4/udp_tunnel_nic.c
160
if (udp_tunnel_nic_entry_is_free(&utn->entries[i][j]))
net/ipv4/udp_tunnel_nic.c
175
entry = &utn->entries[table][idx];
net/ipv4/udp_tunnel_nic.c
185
dev->udp_tunnel_nic->entries[table][idx].hw_priv = priv;
net/ipv4/udp_tunnel_nic.c
220
entry = &utn->entries[table][idx];
net/ipv4/udp_tunnel_nic.c
263
if (udp_tunnel_nic_entry_is_queued(&utn->entries[i][j]))
net/ipv4/udp_tunnel_nic.c
276
entry = &utn->entries[i][j];
net/ipv4/udp_tunnel_nic.c
346
entry = &utn->entries[i][j];
net/ipv4/udp_tunnel_nic.c
362
struct udp_tunnel_nic_table_entry *entry = &utn->entries[table][idx];
net/ipv4/udp_tunnel_nic.c
401
struct udp_tunnel_nic_table_entry *entry = &utn->entries[table][idx];
net/ipv4/udp_tunnel_nic.c
470
entry = &utn->entries[i][j];
net/ipv4/udp_tunnel_nic.c
53
struct udp_tunnel_nic_table_entry *entries[] __counted_by(n_tables);
net/ipv4/udp_tunnel_nic.c
560
entry = &utn->entries[i][j];
net/ipv4/udp_tunnel_nic.c
592
if (!udp_tunnel_nic_entry_is_present(&utn->entries[table][j]))
net/ipv4/udp_tunnel_nic.c
617
if (!udp_tunnel_nic_entry_is_present(&utn->entries[table][j]))
net/ipv4/udp_tunnel_nic.c
625
utn->entries[table][j].port) ||
net/ipv4/udp_tunnel_nic.c
627
ilog2(utn->entries[table][j].type)))
net/ipv4/udp_tunnel_nic.c
688
int adj_cnt = -utn->entries[i][j].use_cnt;
net/ipv4/udp_tunnel_nic.c
697
memset(utn->entries[i], 0, array_size(info->tables[i].n_entries,
net/ipv4/udp_tunnel_nic.c
698
sizeof(**utn->entries)));
net/ipv4/udp_tunnel_nic.c
715
udp_tunnel_nic_entry_freeze_used(&utn->entries[i][j]);
net/ipv4/udp_tunnel_nic.c
728
udp_tunnel_nic_entry_unfreeze(&utn->entries[i][j]);
net/ipv4/udp_tunnel_nic.c
756
utn = kzalloc_flex(*utn, entries, n_tables);
net/ipv4/udp_tunnel_nic.c
764
utn->entries[i] = kzalloc_objs(*utn->entries[i],
net/ipv4/udp_tunnel_nic.c
766
if (!utn->entries[i])
net/ipv4/udp_tunnel_nic.c
774
kfree(utn->entries[i]);
net/ipv4/udp_tunnel_nic.c
784
kfree(utn->entries[i]);
net/ipv6/netfilter/ip6_tables.c
1098
xt_entry_foreach(iter, oldinfo->entries, oldinfo->size)
net/ipv6/netfilter/ip6_tables.c
1147
loc_cpu_entry = newinfo->entries;
net/ipv6/netfilter/ip6_tables.c
1202
xt_entry_foreach(iter, private->entries, private->size) {
net/ipv6/netfilter/ip6_tables.c
1230
struct compat_ip6t_entry entries[];
net/ipv6/netfilter/ip6_tables.c
1448
memset(newinfo->entries, 0, size);
net/ipv6/netfilter/ip6_tables.c
1455
entry1 = newinfo->entries;
net/ipv6/netfilter/ip6_tables.c
1527
loc_cpu_entry = newinfo->entries;
net/ipv6/netfilter/ip6_tables.c
1576
xt_entry_foreach(iter, private->entries, total_size) {
net/ipv6/netfilter/ip6_tables.c
1724
loc_cpu_entry = private->entries;
net/ipv6/netfilter/ip6_tables.c
1748
loc_cpu_entry = newinfo->entries;
net/ipv6/netfilter/ip6_tables.c
1749
memcpy(loc_cpu_entry, repl->entries, repl->size);
net/ipv6/netfilter/ip6_tables.c
224
root = get_entry(private->entries, private->hook_entry[hook]);
net/ipv6/netfilter/ip6_tables.c
284
table_base = private->entries;
net/ipv6/netfilter/ip6_tables.c
767
xt_entry_foreach(iter, t->entries, t->size) {
net/ipv6/netfilter/ip6_tables.c
794
xt_entry_foreach(iter, t->entries, t->size) {
net/ipv6/netfilter/ip6_tables.c
841
loc_cpu_entry = private->entries;
net/ipv6/netfilter/ip6_tables.c
947
memcpy(newinfo, info, offsetof(struct xt_table_info, entries));
net/ipv6/netfilter/ip6_tables.c
949
loc_cpu_entry = info->entries;
net/ipv6/netfilter/ip6table_filter.c
45
((struct ip6t_standard *)repl->entries)[1].target.verdict =
net/ipv6/route.c
3370
int entries;
net/ipv6/route.c
3376
entries = dst_entries_get_slow(ops);
net/ipv6/route.c
3377
if (entries < ops->gc_thresh)
net/mac80211/ieee80211_i.h
741
atomic_t entries; /* Up to MAX_MESH_NEIGHBOURS */
net/mac80211/mesh.c
232
int entries = 0;
net/mac80211/mesh.c
244
++entries;
net/mac80211/mesh.c
246
entries == RMC_QUEUE_MAX_LEN) {
net/mac80211/mesh.c
249
--entries;
net/mac80211/mesh_pathtbl.c
806
atomic_dec(&tbl->entries);
net/mac80211/mesh_pathtbl.c
91
atomic_set(&tbl->entries, 0);
net/mac80211/rx.c
2254
for (i = 0; i < ARRAY_SIZE(cache->entries); i++)
net/mac80211/rx.c
2255
skb_queue_head_init(&cache->entries[i].skb_list);
net/mac80211/rx.c
2262
for (i = 0; i < ARRAY_SIZE(cache->entries); i++)
net/mac80211/rx.c
2263
__skb_queue_purge(&cache->entries[i].skb_list);
net/mac80211/rx.c
2273
entry = &cache->entries[cache->next++];
net/mac80211/rx.c
2308
entry = &cache->entries[idx];
net/mac80211/sta_info.h
465
struct ieee80211_fragment_entry entries[IEEE80211_FRAGMENT_MAX];
net/netfilter/ipvs/ip_vs_lblc.c
105
atomic_t entries; /* number of entries */
net/netfilter/ipvs/ip_vs_lblc.c
171
atomic_inc(&tbl->entries);
net/netfilter/ipvs/ip_vs_lblc.c
239
atomic_dec(&tbl->entries);
net/netfilter/ipvs/ip_vs_lblc.c
273
atomic_dec(&tbl->entries);
net/netfilter/ipvs/ip_vs_lblc.c
310
if (atomic_read(&tbl->entries) <= tbl->max_size) {
net/netfilter/ipvs/ip_vs_lblc.c
315
goal = (atomic_read(&tbl->entries) - tbl->max_size)*4/3;
net/netfilter/ipvs/ip_vs_lblc.c
328
atomic_dec(&tbl->entries);
net/netfilter/ipvs/ip_vs_lblc.c
369
atomic_set(&tbl->entries, 0);
net/netfilter/ipvs/ip_vs_lblcr.c
273
atomic_t entries; /* number of entries */
net/netfilter/ipvs/ip_vs_lblcr.c
334
atomic_inc(&tbl->entries);
net/netfilter/ipvs/ip_vs_lblcr.c
437
atomic_dec(&tbl->entries);
net/netfilter/ipvs/ip_vs_lblcr.c
474
if (atomic_read(&tbl->entries) <= tbl->max_size) {
net/netfilter/ipvs/ip_vs_lblcr.c
479
goal = (atomic_read(&tbl->entries) - tbl->max_size)*4/3;
net/netfilter/ipvs/ip_vs_lblcr.c
492
atomic_dec(&tbl->entries);
net/netfilter/ipvs/ip_vs_lblcr.c
532
atomic_set(&tbl->entries, 0);
net/netfilter/nf_dup_netdev.c
98
entry = &flow->rule->action.entries[ctx->num_actions++];
net/netfilter/nf_flow_table_offload.c
228
return &flow_rule->rule->action.entries[i];
net/netfilter/nf_flow_table_offload.c
849
entry = &flow_rule->rule->action.entries[i];
net/netfilter/nf_nat_core.c
1247
ret = nf_hook_entries_insert_raw(&priv->entries, ops);
net/netfilter/nf_nat_core.c
1286
nf_hook_entries_delete_raw(&priv->entries, ops);
net/netfilter/nf_nat_core.c
43
struct nf_hook_entries __rcu *entries;
net/netfilter/nf_nat_core.c
934
struct nf_hook_entries *e = rcu_dereference(lpriv->entries);
net/netfilter/nf_tables_api.c
10727
adp->entries += nf_tables_commit_audit_entrycount(trans);
net/netfilter/nf_tables_api.c
10742
audit_log_nfcfg(aubuf, adp->table->family, adp->entries,
net/netfilter/nf_tables_api.c
3814
unsigned int entries = 0;
net/netfilter/nf_tables_api.c
3838
entries++;
net/netfilter/nf_tables_api.c
3846
if (ctx->reset && entries)
net/netfilter/nf_tables_api.c
3847
audit_log_rule_reset(table, cb->seq, entries);
net/netfilter/nf_tables_api.c
78
int entries;
net/netfilter/nf_tables_api.c
8394
unsigned int entries = 0;
net/netfilter/nf_tables_api.c
8407
entries = 0;
net/netfilter/nf_tables_api.c
8429
entries++;
net/netfilter/nf_tables_api.c
8434
if (ctx->reset && entries)
net/netfilter/nf_tables_api.c
8435
audit_log_obj_reset(table, nft_base_seq(net), entries);
net/netfilter/nft_immediate.c
282
entry = &flow->rule->action.entries[ctx->num_actions++];
net/netfilter/xt_recent.c
149
t->entries--;
net/netfilter/xt_recent.c
185
if (t->entries >= ip_list_tot) {
net/netfilter/xt_recent.c
205
t->entries++;
net/netfilter/xt_recent.c
82
unsigned int entries;
net/netfilter/xt_repldata.h
23
struct type##_standard entries[]; \
net/netfilter/xt_repldata.h
26
size_t term_offset = (offsetof(typeof(*tbl), entries[nhooks]) + \
net/netfilter/xt_repldata.h
43
tbl->entries[i++] = (struct type##_standard) \
net/openvswitch/flow_table.c
790
struct mask_cache_entry *entries, *ce;
net/openvswitch/flow_table.c
813
entries = this_cpu_ptr(mc->mask_cache);
net/openvswitch/flow_table.c
820
e = &entries[index];
net/sched/act_ct.c
282
memset(&action->entries[i], 0, sizeof(action->entries[i]));
net/sched/act_ct.c
74
return &flow_action->entries[i];
net/sched/act_gate.c
223
static void release_entry_list(struct list_head *entries)
net/sched/act_gate.c
227
list_for_each_entry_safe(entry, e, entries, list) {
net/sched/act_gate.c
240
list_for_each_entry(entry, &src->entries, list) {
net/sched/act_gate.c
254
list_add_tail(&new->list, &dst->entries);
net/sched/act_gate.c
293
list_add_tail(&entry->list, &sched->entries);
net/sched/act_gate.c
302
release_entry_list(&sched->entries);
net/sched/act_gate.c
423
INIT_LIST_HEAD(&p->entries);
net/sched/act_gate.c
500
list_for_each_entry(entry, &p->entries, list)
net/sched/act_gate.c
521
gact->next_entry = list_first_entry(&p->entries,
net/sched/act_gate.c
539
release_entry_list(&p->entries);
net/sched/act_gate.c
559
release_entry_list(&p->entries);
net/sched/act_gate.c
652
list_for_each_entry(entry, &p->entries, list) {
net/sched/act_gate.c
697
entry->gate.entries = tcf_gate_get_list(act);
net/sched/act_gate.c
699
if (!entry->gate.entries)
net/sched/act_gate.c
703
entry->destructor_priv = entry->gate.entries;
net/sched/act_gate.c
95
if (list_is_last(&next->list, &p->entries))
net/sched/act_gate.c
96
next = list_first_entry(&p->entries,
net/sched/cls_api.c
3887
entry = &flow_action->entries[j];
net/sched/sch_taprio.c
1118
list_add_tail(&entry->list, &sched->entries);
net/sched/sch_taprio.c
1157
list_for_each_entry(entry, &new->entries, list)
net/sched/sch_taprio.c
1249
first = list_first_entry(&sched->entries,
net/sched/sch_taprio.c
127
list_for_each_entry(entry, &sched->entries, list) {
net/sched/sch_taprio.c
1369
list_for_each_entry(entry, &sched->entries, list) {
net/sched/sch_taprio.c
1379
__offload = kzalloc_flex(*__offload, offload.entries, num_entries);
net/sched/sch_taprio.c
1469
list_for_each_entry(entry, &sched->entries, list) {
net/sched/sch_taprio.c
1470
struct tc_taprio_sched_entry *e = &offload->entries[i];
net/sched/sch_taprio.c
150
cur = list_next_entry_circular(cur, &sched->entries, list);
net/sched/sch_taprio.c
1877
INIT_LIST_HEAD(&new_admin->entries);
net/sched/sch_taprio.c
201
list_for_each_entry_safe(entry, n, &sched->entries, list) {
net/sched/sch_taprio.c
2268
list_for_each_entry(entry, &root->entries, list) {
net/sched/sch_taprio.c
355
list_for_each_entry(entry, &sched->entries, list) {
net/sched/sch_taprio.c
80
struct list_head entries;
net/sched/sch_taprio.c
875
if (list_is_last(&entry->list, &oper->entries))
net/sched/sch_taprio.c
948
next = list_first_entry(&oper->entries, struct sched_entry,
net/sched/sch_taprio.c
955
next = list_first_entry(&oper->entries, struct sched_entry,
net/sunrpc/cache.c
138
detail->entries++;
net/sunrpc/cache.c
237
detail->entries++;
net/sunrpc/cache.c
405
cd->entries = 0;
net/sunrpc/cache.c
551
if (!detail->entries) {
net/sunrpc/cache.c
556
dprintk("RPC: %d entries in %s cache\n", detail->entries, detail->name);
net/sunrpc/cache.c
90
cd->entries --;
net/xdp/xsk.c
1143
static int xsk_init_queue(u32 entries, struct xsk_queue **queue,
net/xdp/xsk.c
1148
if (entries == 0 || *queue || !is_power_of_2(entries))
net/xdp/xsk.c
1151
q = xskq_create(entries, umem_queue);
net/xdp/xsk.c
1490
int entries;
net/xdp/xsk.c
1492
if (optlen < sizeof(entries))
net/xdp/xsk.c
1494
if (copy_from_sockptr(&entries, optval, sizeof(entries)))
net/xdp/xsk.c
1503
err = xsk_init_queue(entries, q, false);
net/xdp/xsk.c
1557
int entries;
net/xdp/xsk.c
1559
if (optlen < sizeof(entries))
net/xdp/xsk.c
1561
if (copy_from_sockptr(&entries, optval, sizeof(entries)))
net/xdp/xsk.c
1572
err = xsk_init_queue(entries, q, true);
net/xdp/xsk_buff_pool.c
58
u32 i, entries;
net/xdp/xsk_buff_pool.c
60
entries = unaligned ? umem->chunks : 0;
net/xdp/xsk_buff_pool.c
61
pool = kvzalloc_flex(*pool, free_heads, entries);
net/xdp/xsk_diag.c
31
dr.entries = queue->nentries;
net/xdp/xsk_queue.h
328
u32 entries = q->cached_prod - q->cached_cons;
net/xdp/xsk_queue.h
330
if (entries >= max)
net/xdp/xsk_queue.h
334
entries = q->cached_prod - q->cached_cons;
net/xdp/xsk_queue.h
336
return entries >= max ? max : entries;
net/xfrm/xfrm_algo.c
650
int entries;
net/xfrm/xfrm_algo.c
656
.entries = ARRAY_SIZE(aead_list),
net/xfrm/xfrm_algo.c
662
.entries = ARRAY_SIZE(aalg_list),
net/xfrm/xfrm_algo.c
668
.entries = ARRAY_SIZE(ealg_list),
net/xfrm/xfrm_algo.c
674
.entries = ARRAY_SIZE(calg_list),
net/xfrm/xfrm_algo.c
685
for (i = 0; i < algo_list->entries; i++) {
scripts/kallsyms.c
102
for (i = 0; i < entries; ++i) {
scripts/kallsyms.c
176
const struct addr_range *ranges, int entries)
scripts/kallsyms.c
181
for (i = 0; i < entries; ++i) {
scripts/kallsyms.c
97
struct addr_range *ranges, int entries)
scripts/kconfig/expr.h
290
struct list_head entries;
scripts/kconfig/mconf.c
309
struct list_head entries;
scripts/kconfig/mconf.c
326
list_for_each_entry(sp, &trail, entries) {
scripts/kconfig/mconf.c
423
list_add_tail(&stpart.entries, &trail);
scripts/kconfig/mconf.c
446
list_for_each_entry_safe(pos, tmp, &head, entries)
scripts/kconfig/mconf.c
767
list_add_tail(&stpart.entries, &trail);
scripts/kconfig/menu.c
671
list_add_tail(&jump->entries, head);
scripts/kconfig/mnconf-common.c
30
list_for_each_entry(pos, data->head, entries) {
security/integrity/ima/ima_policy.c
877
static void add_rules(struct ima_rule_entry *entries, int count,
security/integrity/ima/ima_policy.c
886
list_add_tail(&entries[i].list, &ima_default_rules);
security/integrity/ima/ima_policy.c
889
entry = kmemdup(&entries[i], sizeof(*entry),
security/integrity/ima/ima_policy.c
896
if (entries[i].action == APPRAISE) {
security/integrity/ima/ima_policy.c
897
if (entries != build_appraise_rules)
security/integrity/ima/ima_policy.c
899
ima_appraise_flag(entries[i].func);
security/integrity/ima/ima_policy.c
902
ima_appraise_flag(entries[i].func);
security/selinux/ss/sidtab.c
120
int entries = 0;
security/selinux/ss/sidtab.c
127
entries++;
security/selinux/ss/sidtab.c
147
entries, slots_used, SIDTAB_HASH_BUCKETS,
security/selinux/ss/sidtab.c
179
s->roots[l].ptr_inner->entries[0] = s->roots[l - 1];
security/selinux/ss/sidtab.c
204
entry = &entry->ptr_inner->entries[leaf_index >> capacity_shift];
security/selinux/ss/sidtab.c
222
return &entry->ptr_leaf->entries[index % SIDTAB_LEAF_ENTRIES];
security/selinux/ss/sidtab.c
394
rc = sidtab_convert_tree(&edst->ptr_inner->entries[i],
security/selinux/ss/sidtab.c
395
&esrc->ptr_inner->entries[i],
security/selinux/ss/sidtab.c
413
&esrc->ptr_leaf->entries[i].context,
security/selinux/ss/sidtab.c
414
&edst->ptr_leaf->entries[i].context,
security/selinux/ss/sidtab.c
527
sidtab_destroy_tree(node->entries[i], level - 1);
security/selinux/ss/sidtab.c
536
sidtab_destroy_entry(&node->entries[i]);
security/selinux/ss/sidtab.h
57
struct sidtab_entry entries[SIDTAB_LEAF_ENTRIES];
security/selinux/ss/sidtab.h
61
union sidtab_entry_inner entries[SIDTAB_INNER_ENTRIES];
sound/firewire/dice/dice-presonus.c
25
} *entry, entries[] = {
sound/firewire/dice/dice-presonus.c
41
for (i = 0; i < ARRAY_SIZE(entries); ++i) {
sound/firewire/dice/dice-presonus.c
42
entry = entries + i;
sound/firewire/dice/dice-presonus.c
46
if (i == ARRAY_SIZE(entries))
sound/firewire/dice/dice-tcelectronic.c
63
} *entry, entries[] = {
sound/firewire/dice/dice-tcelectronic.c
85
for (i = 0; i < ARRAY_SIZE(entries); ++i) {
sound/firewire/dice/dice-tcelectronic.c
86
entry = entries + i;
sound/firewire/dice/dice-tcelectronic.c
90
if (i == ARRAY_SIZE(entries))
sound/firewire/dice/dice-weiss.c
66
} *entry, entries[] = {
sound/firewire/dice/dice-weiss.c
90
for (i = 0; i < ARRAY_SIZE(entries); ++i) {
sound/firewire/dice/dice-weiss.c
91
entry = entries + i;
sound/firewire/dice/dice-weiss.c
95
if (i == ARRAY_SIZE(entries))
sound/firewire/tascam/tascam-hwdep.c
44
struct snd_firewire_tascam_change *entries = tscm->queue;
sound/firewire/tascam/tascam-hwdep.c
48
if (remained < sizeof(type) + sizeof(*entries)) {
sound/firewire/tascam/tascam-hwdep.c
71
length = (tail_pos - head_pos) * sizeof(*entries);
sound/firewire/tascam/tascam-hwdep.c
73
length = rounddown(remained, sizeof(*entries));
sound/firewire/tascam/tascam-hwdep.c
78
if (copy_to_user(pos, &entries[head_pos], length))
sound/pci/emu10k1/emuproc.c
304
const struct emu10k1_reg_entry *entries,
sound/pci/emu10k1/emuproc.c
308
unsigned base = entries[i].base;
sound/pci/emu10k1/emuproc.c
309
unsigned size = entries[i].size;
sound/pci/emu10k1/emuproc.c
313
const char *name = entries[i].name;
sound/pci/trident/trident.h
252
__le32 *entries; /* 16k-aligned TLB table */
sound/pci/trident/trident_main.c
2109
if (trident->tlb.entries) {
sound/pci/trident/trident_main.c
2124
if (trident->tlb.entries) {
sound/pci/trident/trident_main.c
2168
if (trident->tlb.entries)
sound/pci/trident/trident_main.c
2186
if (trident->tlb.entries)
sound/pci/trident/trident_main.c
3252
if (trident->tlb.entries) {
sound/pci/trident/trident_main.c
3296
trident->tlb.entries = (__le32 *)ALIGN((unsigned long)trident->tlb.buffer->area, SNDRV_TRIDENT_MAX_PAGES * 4);
sound/pci/trident/trident_main.c
3309
trident->tlb.entries[i] = cpu_to_le32(trident->tlb.silent_page->addr & ~(SNDRV_TRIDENT_PAGE_SIZE-1));
sound/pci/trident/trident_main.c
3411
if (trident->tlb.entries != NULL) {
sound/pci/trident/trident_main.c
3518
trident->tlb.entries = NULL;
sound/pci/trident/trident_main.c
783
if (trident->tlb.entries) {
sound/pci/trident/trident_main.c
876
if (trident->tlb.entries) {
sound/pci/trident/trident_memory.c
23
(trident)->tlb.entries[page] = cpu_to_le32((addr) & ~(SNDRV_TRIDENT_PAGE_SIZE-1))
sound/pci/trident/trident_memory.c
25
(dma_addr_t)le32_to_cpu((trident->tlb.entries[page]) & ~(SNDRV_TRIDENT_PAGE_SIZE - 1))
sound/soc/intel/avs/loader.c
67
u32 entries;
sound/soc/intel/avs/messages.h
593
struct avs_module_entry entries[];
sound/soc/intel/avs/utils.c
206
max_id = adev->mods_info->entries[idx].instance_max_count - 1;
sound/soc/intel/avs/utils.c
23
module = &adev->mods_info->entries[i];
sound/soc/intel/avs/utils.c
39
module = &adev->mods_info->entries[i];
sound/soc/intel/avs/utils.c
55
memcpy(entry, &adev->mods_info->entries[idx], sizeof(*entry));
sound/soc/intel/avs/utils.c
69
memcpy(entry, &adev->mods_info->entries[idx], sizeof(*entry));
sound/soc/intel/catpt/messages.h
187
struct catpt_module_entry entries[];
sound/soc/intel/catpt/pcm.c
1152
entry = &template->entries[j];
sound/soc/intel/catpt/pcm.c
20
struct catpt_module_entry entries[];
sound/soc/intel/catpt/pcm.c
27
.entries = {{ CATPT_MODID_PCM_SYSTEM, 0 }},
sound/soc/intel/catpt/pcm.c
34
.entries = {{ CATPT_MODID_PCM_CAPTURE, 0 }},
sound/soc/intel/catpt/pcm.c
406
stream->template->entries,
sound/soc/intel/catpt/pcm.c
41
.entries = {{ CATPT_MODID_PCM, 0 }},
sound/soc/intel/catpt/pcm.c
48
.entries = {{ CATPT_MODID_PCM_REFERENCE, 0 }},
sound/soc/intel/catpt/pcm.c
55
.entries = {{ CATPT_MODID_BLUETOOTH_RENDER, 0 }},
sound/soc/intel/catpt/pcm.c
62
.entries = {{ CATPT_MODID_BLUETOOTH_CAPTURE, 0 }},
tools/arch/x86/include/uapi/asm/kvm.h
200
struct kvm_msr_entry entries[];
tools/arch/x86/include/uapi/asm/kvm.h
248
struct kvm_cpuid_entry entries[];
tools/arch/x86/include/uapi/asm/kvm.h
270
struct kvm_cpuid_entry2 entries[];
tools/build/feature/test-backtrace.c
10
entries = backtrace(backtrace_fns, 10);
tools/build/feature/test-backtrace.c
11
backtrace_symbols_fd(backtrace_fns, entries, 1);
tools/build/feature/test-backtrace.c
8
int entries;
tools/include/io_uring/mini_liburing.h
125
static inline int io_uring_setup(unsigned int entries,
tools/include/io_uring/mini_liburing.h
128
return syscall(__NR_io_uring_setup, entries, p);
tools/include/io_uring/mini_liburing.h
139
static inline int io_uring_queue_init_params(unsigned int entries,
tools/include/io_uring/mini_liburing.h
147
fd = io_uring_setup(entries, p);
tools/include/io_uring/mini_liburing.h
160
static inline int io_uring_queue_init(unsigned int entries,
tools/include/io_uring/mini_liburing.h
169
return io_uring_queue_init_params(entries, ring, &p);
tools/include/uapi/linux/kvm.h
1054
struct kvm_irq_routing_entry entries[];
tools/lib/api/fd/array.c
118
if (!fda->entries[fd].events)
tools/lib/api/fd/array.c
121
if (fda->entries[fd].revents & revents) {
tools/lib/api/fd/array.c
125
fda->entries[fd].revents = fda->entries[fd].events = 0;
tools/lib/api/fd/array.c
138
return poll(fda->entries, fda->nr, timeout);
tools/lib/api/fd/array.c
146
printed += fprintf(fp, "%s%d", fd ? ", " : "", fda->entries[fd].fd);
tools/lib/api/fd/array.c
15
fda->entries = NULL;
tools/lib/api/fd/array.c
27
struct pollfd *entries = realloc(fda->entries, size);
tools/lib/api/fd/array.c
29
if (entries == NULL)
tools/lib/api/fd/array.c
34
free(entries);
tools/lib/api/fd/array.c
38
memset(&entries[fda->nr_alloc], 0, sizeof(struct pollfd) * nr);
tools/lib/api/fd/array.c
42
fda->entries = entries;
tools/lib/api/fd/array.c
65
free(fda->entries);
tools/lib/api/fd/array.c
84
fda->entries[fda->nr].fd = fd;
tools/lib/api/fd/array.c
85
fda->entries[fda->nr].events = revents;
tools/lib/api/fd/array.c
99
entry = &from->entries[pos];
tools/lib/api/fd/array.h
23
struct pollfd *entries;
tools/lib/perf/evlist.c
139
list_for_each_entry_from(next, &evlist->entries, node)
tools/lib/perf/evlist.c
168
list_for_each_entry_safe(evsel, n, &evlist->entries, node) {
tools/lib/perf/evlist.c
180
list_add_tail(&evsel->node, &evlist->entries);
tools/lib/perf/evlist.c
210
next = list_first_entry(&evlist->entries,
tools/lib/perf/evlist.c
218
if (&next->node == &evlist->entries)
tools/lib/perf/evlist.c
30
INIT_LIST_HEAD(&evlist->entries);
tools/lib/perf/evlist.c
744
if (evlist->pollfd.entries == NULL && perf_evlist__alloc_pollfd(evlist) < 0)
tools/lib/perf/evlist.c
809
struct perf_evsel *first = list_entry(evlist->entries.next,
tools/lib/perf/evlist.c
812
__perf_evlist__set_leader(&evlist->entries, first);
tools/lib/perf/include/internal/evlist.h
113
__perf_evlist__for_each_entry_safe(&(evlist)->entries, tmp, evsel)
tools/lib/perf/include/internal/evlist.h
117
return list_entry(evlist->entries.next, struct perf_evsel, node);
tools/lib/perf/include/internal/evlist.h
122
return list_entry(evlist->entries.prev, struct perf_evsel, node);
tools/lib/perf/include/internal/evlist.h
18
struct list_head entries;
tools/lib/perf/include/internal/evlist.h
79
__perf_evlist__for_each_entry(&(evlist)->entries, evsel)
tools/lib/perf/include/internal/evlist.h
95
__perf_evlist__for_each_entry_reverse(&(evlist)->entries, evsel)
tools/lib/perf/include/internal/xyarray.h
11
size_t entries;
tools/lib/perf/include/perf/event.h
341
struct id_index_entry entries[];
tools/lib/perf/include/perf/event.h
405
struct perf_record_thread_map_entry entries[];
tools/lib/perf/include/perf/event.h
496
struct perf_record_bpf_metadata_entry entries[];
tools/lib/perf/xyarray.c
15
xy->entries = xlen * ylen;
tools/lib/perf/xyarray.c
25
size_t n = xy->entries * xy->entry_size;
tools/objtool/special.c
141
for (entry = entries; entry->sec; entry++) {
tools/objtool/special.c
28
static const struct special_entry entries[] = {
tools/perf/builtin-annotate.c
413
struct rb_node *nd = rb_first_cached(&hists->entries), *next;
tools/perf/builtin-c2c.c
2385
struct rb_node *next = rb_first_cached(&hists->entries);
tools/perf/builtin-c2c.c
2528
nd = rb_first_cached(&c2c.hists.hists.entries);
tools/perf/builtin-c2c.c
2633
struct rb_node *nd = rb_first_cached(&hb->hists->entries);
tools/perf/builtin-daemon.c
1328
if (fda.entries[sock_pos].revents & POLLIN)
tools/perf/builtin-daemon.c
1330
if (fda.entries[file_pos].revents & POLLIN)
tools/perf/builtin-daemon.c
1332
if (fda.entries[signal_pos].revents & POLLIN)
tools/perf/builtin-kvm.c
1078
for (nd = rb_first_cached(&kvm_hists.hists.entries); nd; nd = rb_next(nd)) {
tools/perf/builtin-kvm.c
1464
if (fda->entries[nr_stdin].revents & POLLIN)
tools/perf/builtin-kvm.c
519
struct rb_node *nd = rb_first_cached(&hb->hists->entries);
tools/perf/builtin-kwork.c
1779
list_for_each_entry(evsel, &session->evlist->core.entries, core.node) {
tools/perf/builtin-record.c
1128
thread_data, pos, evlist->core.pollfd.entries[f].fd);
tools/perf/builtin-record.c
1172
struct pollfd *e_entries = evlist->core.pollfd.entries;
tools/perf/builtin-record.c
1173
struct pollfd *t_entries = thread_data->pollfd.entries;
tools/perf/builtin-record.c
1208
thread_data, ret, fda->entries[i].fd);
tools/perf/builtin-record.c
1780
if (pollfd->entries[ctlfd_pos].revents & POLLHUP) {
tools/perf/builtin-record.c
1784
pollfd->entries[ctlfd_pos].fd = -1;
tools/perf/builtin-record.c
1785
pollfd->entries[ctlfd_pos].events = 0;
tools/perf/builtin-record.c
1788
pollfd->entries[ctlfd_pos].revents = 0;
tools/perf/builtin-record.c
1810
if (!have_tracepoints(&rec->evlist->core.entries))
tools/perf/builtin-record.c
4318
evlist__splice_list_tail(rec->evlist, &def_evlist->core.entries);
tools/perf/builtin-sched.c
3214
list_for_each_entry(evsel, &evlist->core.entries, core.node) {
tools/perf/builtin-script.c
1010
printed += print_bstack_flags(fp, entries + i);
tools/perf/builtin-script.c
1021
struct branch_entry *entries = perf_sample__branch_entries(sample);
tools/perf/builtin-script.c
1033
from = entries[i].from;
tools/perf/builtin-script.c
1034
to = entries[i].to;
tools/perf/builtin-script.c
1046
printed += print_bstack_flags(fp, entries + i);
tools/perf/builtin-script.c
1059
struct branch_entry *entries = perf_sample__branch_entries(sample);
tools/perf/builtin-script.c
1071
from = entries[i].from;
tools/perf/builtin-script.c
1072
to = entries[i].to;
tools/perf/builtin-script.c
1088
printed += print_bstack_flags(fp, entries + i);
tools/perf/builtin-script.c
1357
struct branch_entry *entries = perf_sample__branch_entries(sample);
tools/perf/builtin-script.c
1383
len = grab_bb(buffer, entries[nr-1].from,
tools/perf/builtin-script.c
1384
entries[nr-1].from,
tools/perf/builtin-script.c
1387
printed += ip__fprintf_sym(entries[nr - 1].from, thread,
tools/perf/builtin-script.c
1389
printed += ip__fprintf_jump(entries[nr - 1].from, &entries[nr - 1],
tools/perf/builtin-script.c
1393
printed += print_srccode(thread, x.cpumode, entries[nr - 1].from);
tools/perf/builtin-script.c
1398
if (entries[i].from || entries[i].to)
tools/perf/builtin-script.c
1400
entries[i].from,
tools/perf/builtin-script.c
1401
entries[i].to);
tools/perf/builtin-script.c
1402
start = entries[i + 1].to;
tools/perf/builtin-script.c
1403
end = entries[i].from;
tools/perf/builtin-script.c
1408
end = entries[--i].from;
tools/perf/builtin-script.c
1423
printed += ip__fprintf_jump(ip, &entries[i], &x, buffer + off, len - off, ++insn, fp,
tools/perf/builtin-script.c
1450
if (entries[0].from == sample->ip)
tools/perf/builtin-script.c
1452
if (entries[0].flags.abort)
tools/perf/builtin-script.c
1463
start = entries[0].to;
tools/perf/builtin-script.c
982
struct branch_entry *entries = perf_sample__branch_entries(sample);
tools/perf/builtin-script.c
990
from = entries[i].from;
tools/perf/builtin-script.c
991
to = entries[i].to;
tools/perf/builtin-stat.c
1202
if (!list_empty(&evlist->core.entries)) {
tools/perf/builtin-stat.c
2101
evlist__splice_list_tail(evlist, &metric_evlist->core.entries);
tools/perf/builtin-stat.c
2107
list_sort(/*priv=*/NULL, &evlist->core.entries, default_evlist_evsel_cmp);
tools/perf/builtin-stat.c
2122
evlist__splice_list_tail(evsel_list, &evlist->core.entries);
tools/perf/builtin-top.c
1703
evlist__splice_list_tail(top.evlist, &def_evlist->core.entries);
tools/perf/builtin-top.c
416
next = rb_first_cached(&hists->entries);
tools/perf/builtin-trace.c
173
int *entries;
tools/perf/builtin-trace.c
177
pid_t *entries;
tools/perf/builtin-trace.c
2267
trace->ev_qualifier_ids.entries = malloc(nr_allocated *
tools/perf/builtin-trace.c
2268
sizeof(trace->ev_qualifier_ids.entries[0]));
tools/perf/builtin-trace.c
2270
if (trace->ev_qualifier_ids.entries == NULL) {
tools/perf/builtin-trace.c
2301
trace->ev_qualifier_ids.entries[nr_used++] = id;
tools/perf/builtin-trace.c
2310
void *entries;
tools/perf/builtin-trace.c
2313
entries = realloc(trace->ev_qualifier_ids.entries,
tools/perf/builtin-trace.c
2314
nr_allocated * sizeof(trace->ev_qualifier_ids.entries[0]));
tools/perf/builtin-trace.c
2315
if (entries == NULL) {
tools/perf/builtin-trace.c
2320
trace->ev_qualifier_ids.entries = entries;
tools/perf/builtin-trace.c
2322
trace->ev_qualifier_ids.entries[nr_used++] = id;
tools/perf/builtin-trace.c
2327
qsort(trace->ev_qualifier_ids.entries, nr_used, sizeof(int), intcmp);
tools/perf/builtin-trace.c
2333
zfree(&trace->ev_qualifier_ids.entries);
tools/perf/builtin-trace.c
2345
in_ev_qualifier = bsearch(&id, trace->ev_qualifier_ids.entries,
tools/perf/builtin-trace.c
3710
trace->ev_qualifier_ids.entries);
tools/perf/builtin-trace.c
4160
trace->filter_pids.entries);
tools/perf/builtin-trace.c
4163
trace->filter_pids.entries);
tools/perf/builtin-trace.c
4850
struct syscall_entry *entries;
tools/perf/builtin-trace.c
4852
entries = syscall__sort_stats(syscall_stats);
tools/perf/builtin-trace.c
4853
if (entries == NULL)
tools/perf/builtin-trace.c
4863
struct syscall_entry *entry = &entries[i];
tools/perf/builtin-trace.c
4900
free(entries);
tools/perf/builtin-trace.c
5033
trace->filter_pids.entries = calloc(i, sizeof(pid_t));
tools/perf/builtin-trace.c
5035
if (trace->filter_pids.entries == NULL)
tools/perf/builtin-trace.c
5038
trace->filter_pids.entries[0] = getpid();
tools/perf/builtin-trace.c
5041
trace->filter_pids.entries[i] = intlist__entry(list, i - 1)->i;
tools/perf/builtin-trace.c
5272
if (!list_empty(&trace->evlist->core.entries)) {
tools/perf/builtin-trace.c
5350
zfree(&trace->ev_qualifier_ids.entries);
tools/perf/builtin-trace.c
551
if (idx < 0 || idx >= sa->nr_entries || sa->entries[idx] == NULL) {
tools/perf/builtin-trace.c
558
return scnprintf(bf, size, "%s%s", sa->entries[idx], show_suffix ? sa->prefix : "");
tools/perf/builtin-trace.c
565
if (idx < 0 || idx >= sa->nr_entries || sa->entries[idx] == NULL) {
tools/perf/builtin-trace.c
572
return scnprintf(bf, size, "%s%s", show_prefix ? sa->prefix : "", sa->entries[idx]);
tools/perf/builtin-trace.c
616
struct strarray *sa = sas->entries[i];
tools/perf/builtin-trace.c
620
if (sa->entries[idx] == NULL)
tools/perf/builtin-trace.c
622
return scnprintf(bf, size, "%s%s", show_prefix ? sa->prefix : "", sa->entries[idx]);
tools/perf/builtin-trace.c
628
printed += scnprintf(bf + printed, size - printed, " /* %s??? */", sas->entries[0]->prefix);
tools/perf/builtin-trace.c
637
if (sa->entries[i] && strncmp(sa->entries[i], bf, size) == 0 && sa->entries[i][size] == '\0') {
tools/perf/builtin-trace.c
691
struct strarray *sa = sas->entries[i];
tools/perf/pmu-events/empty-pmu-events.c
16
const struct compact_pmu_event *entries;
tools/perf/pmu-events/empty-pmu-events.c
2611
.entries = pmu_events__common_default_core,
tools/perf/pmu-events/empty-pmu-events.c
2616
.entries = pmu_events__common_software,
tools/perf/pmu-events/empty-pmu-events.c
2621
.entries = pmu_events__common_tool,
tools/perf/pmu-events/empty-pmu-events.c
2650
.entries = pmu_metrics__common_default_core,
tools/perf/pmu-events/empty-pmu-events.c
2685
.entries = pmu_events__test_soc_cpu_default_core,
tools/perf/pmu-events/empty-pmu-events.c
2690
.entries = pmu_events__test_soc_cpu_hisi_sccl_ddrc,
tools/perf/pmu-events/empty-pmu-events.c
2695
.entries = pmu_events__test_soc_cpu_hisi_sccl_l3c,
tools/perf/pmu-events/empty-pmu-events.c
2700
.entries = pmu_events__test_soc_cpu_uncore_cbox,
tools/perf/pmu-events/empty-pmu-events.c
2705
.entries = pmu_events__test_soc_cpu_uncore_imc,
tools/perf/pmu-events/empty-pmu-events.c
2710
.entries = pmu_events__test_soc_cpu_uncore_imc_free_running,
tools/perf/pmu-events/empty-pmu-events.c
2737
.entries = pmu_metrics__test_soc_cpu_default_core,
tools/perf/pmu-events/empty-pmu-events.c
2756
.entries = pmu_events__test_soc_sys_uncore_sys_ccn_pmu,
tools/perf/pmu-events/empty-pmu-events.c
2761
.entries = pmu_events__test_soc_sys_uncore_sys_cmn_pmu,
tools/perf/pmu-events/empty-pmu-events.c
2766
.entries = pmu_events__test_soc_sys_uncore_sys_ddr_pmu,
tools/perf/pmu-events/empty-pmu-events.c
2928
decompress_event(pmu->entries[i].offset, &pe);
tools/perf/pmu-events/empty-pmu-events.c
2952
decompress_event(pmu->entries[mid].offset, &pe);
tools/perf/pmu-events/empty-pmu-events.c
3054
decompress_metric(pmu->entries[i].offset, &pm);
tools/perf/pmu-events/empty-pmu-events.c
3078
decompress_metric(pmu->entries[mid].offset, &pm);
tools/perf/tests/fdarray.c
103
if (fda->entries[_idx].fd != _fd) { \
tools/perf/tests/fdarray.c
105
__LINE__, _idx, fda->entries[1].fd, _fd); \
tools/perf/tests/fdarray.c
108
if (fda->entries[_idx].events != (_revents)) { \
tools/perf/tests/fdarray.c
110
__LINE__, _idx, fda->entries[_idx].fd, _revents); \
tools/perf/tests/fdarray.c
134
if (fda->entries == NULL) {
tools/perf/tests/fdarray.c
14
fda->entries[fd].fd = fda->nr - fd;
tools/perf/tests/fdarray.c
15
fda->entries[fd].events = revents;
tools/perf/tests/fdarray.c
16
fda->entries[fd].revents = revents;
tools/perf/tests/fdarray.c
58
fda->entries[2].revents = POLLIN;
tools/perf/tests/fdarray.c
70
fda->entries[0].revents = POLLIN;
tools/perf/tests/fdarray.c
71
fda->entries[3].revents = POLLIN;
tools/perf/tests/hists_common.c
201
root = &hists->entries;
tools/perf/tests/hists_cumulate.c
142
root_out = &hists->entries;
tools/perf/tests/hists_cumulate.c
215
root = &hists->entries.rb_root;
tools/perf/tests/hists_output.c
107
root_out = &hists->entries;
tools/perf/tests/hists_output.c
178
root = &hists->entries;
tools/perf/tests/hists_output.c
278
root = &hists->entries;
tools/perf/tests/hists_output.c
332
root = &hists->entries;
tools/perf/tests/hists_output.c
410
root = &hists->entries;
tools/perf/tests/hists_output.c
513
root = &hists->entries;
tools/perf/tests/sample-parsing.c
124
MCOMP(branch_stack->entries[i]);
tools/perf/tests/sample-parsing.c
41
#define FLAG(s) s->branch_stack->entries[i].flags
tools/perf/tests/thread-map.c
72
TEST_ASSERT_VAL("wrong pid", map->entries[0].pid == (u64) getpid());
tools/perf/tests/thread-map.c
73
TEST_ASSERT_VAL("wrong comm", !strcmp(map->entries[0].comm, NAME));
tools/perf/trace/beauty/beauty.h
14
const char * const *entries;
tools/perf/trace/beauty/beauty.h
19
.entries = array, \
tools/perf/trace/beauty/beauty.h
26
.entries = array, \
tools/perf/trace/beauty/beauty.h
49
struct strarray **entries;
tools/perf/trace/beauty/beauty.h
54
.entries = array, \
tools/perf/trace/beauty/ioctl.c
109
if (nr < strarray__perf_ioctl_cmds.nr_entries && strarray__perf_ioctl_cmds.entries[nr] != NULL)
tools/perf/trace/beauty/ioctl.c
110
return scnprintf(bf, size, "PERF_%s", strarray__perf_ioctl_cmds.entries[nr]);
tools/perf/trace/beauty/ioctl.c
120
if (nr < strarray__usbdevfs_ioctl_cmds.nr_entries && strarray__usbdevfs_ioctl_cmds.entries[nr] != NULL)
tools/perf/trace/beauty/ioctl.c
121
return scnprintf(bf, size, "USBDEVFS_%s", strarray__usbdevfs_ioctl_cmds.entries[nr]);
tools/perf/trace/beauty/ioctl.c
41
if (nr < strarray__ioctl_tty_cmd.nr_entries && strarray__ioctl_tty_cmd.entries[nr] != NULL)
tools/perf/trace/beauty/ioctl.c
42
return scnprintf(bf, size, "%s", strarray__ioctl_tty_cmd.entries[nr]);
tools/perf/trace/beauty/ioctl.c
52
if (nr < strarray__drm_ioctl_cmds.nr_entries && strarray__drm_ioctl_cmds.entries[nr] != NULL)
tools/perf/trace/beauty/ioctl.c
53
return scnprintf(bf, size, "DRM_%s", strarray__drm_ioctl_cmds.entries[nr]);
tools/perf/trace/beauty/ioctl.c
63
if (nr < strarray__sndrv_pcm_ioctl_cmds.nr_entries && strarray__sndrv_pcm_ioctl_cmds.entries[nr] != NULL)
tools/perf/trace/beauty/ioctl.c
64
return scnprintf(bf, size, "SNDRV_PCM_%s", strarray__sndrv_pcm_ioctl_cmds.entries[nr]);
tools/perf/trace/beauty/ioctl.c
74
if (nr < strarray__sndrv_ctl_ioctl_cmds.nr_entries && strarray__sndrv_ctl_ioctl_cmds.entries[nr] != NULL)
tools/perf/trace/beauty/ioctl.c
75
return scnprintf(bf, size, "SNDRV_CTL_%s", strarray__sndrv_ctl_ioctl_cmds.entries[nr]);
tools/perf/trace/beauty/ioctl.c
85
if (nr < strarray__kvm_ioctl_cmds.nr_entries && strarray__kvm_ioctl_cmds.entries[nr] != NULL)
tools/perf/trace/beauty/ioctl.c
86
return scnprintf(bf, size, "KVM_%s", strarray__kvm_ioctl_cmds.entries[nr]);
tools/perf/trace/beauty/ioctl.c
98
if (nr < s->nr_entries && s->entries[nr] != NULL)
tools/perf/trace/beauty/ioctl.c
99
return scnprintf(bf, size, "VHOST_%s", s->entries[nr]);
tools/perf/trace/beauty/mmap.c
70
if (behavior < strarray__madvise_advices.nr_entries && strarray__madvise_advices.entries[behavior] != NULL)
tools/perf/trace/beauty/mmap.c
71
return scnprintf(bf, size, "MADV_%s", strarray__madvise_advices.entries[behavior]);
tools/perf/trace/beauty/pkey_alloc.c
17
const char *s = sa->entries[0];
tools/perf/trace/beauty/pkey_alloc.c
32
if (sa->entries[i] != NULL)
tools/perf/trace/beauty/pkey_alloc.c
33
printed += scnprintf(bf + printed, size - printed, "%s%s", show_prefix ? sa->prefix : "", sa->entries[i]);
tools/perf/ui/browser.c
105
struct list_head *head = browser->entries;
tools/perf/ui/browser.c
140
struct rb_root *root = browser->entries;
tools/perf/ui/browser.c
174
browser->top = rb_first(browser->entries);
tools/perf/ui/browser.c
513
struct list_head *head = browser->entries;
tools/perf/ui/browser.c
516
if (browser->top == NULL || browser->top == browser->entries)
tools/perf/ui/browser.c
624
browser->top = browser->entries;
tools/perf/ui/browser.c
630
browser->top = (char **)browser->entries + browser->nr_entries - 1 + offset;
tools/perf/ui/browser.c
635
assert((char **)browser->top < (char **)browser->entries + browser->nr_entries);
tools/perf/ui/browser.c
636
assert((char **)browser->top >= (char **)browser->entries);
tools/perf/ui/browser.c
645
browser->top = browser->entries;
tools/perf/ui/browser.c
650
assert(pos < (char **)browser->entries + browser->nr_entries);
tools/perf/ui/browser.c
85
} while (pos != browser->entries);
tools/perf/ui/browser.c
98
} while (pos != browser->entries);
tools/perf/ui/browser.h
19
void *top, *entries;
tools/perf/ui/browsers/annotate-data.c
108
parent_list = &browser->entries;
tools/perf/ui/browsers/annotate-data.c
147
list_for_each_entry(entry, &browser->entries, node)
tools/perf/ui/browsers/annotate-data.c
159
INIT_LIST_HEAD(&browser->entries);
tools/perf/ui/browsers/annotate-data.c
164
browser->b.entries = &browser->entries;
tools/perf/ui/browsers/annotate-data.c
173
list_for_each_entry_safe(pos, tmp, &browser->entries, node) {
tools/perf/ui/browsers/annotate-data.c
220
first = list_first_entry(&browser->entries, struct browser_entry, node);
tools/perf/ui/browsers/annotate-data.c
243
last = list_last_entry(&browser->entries, struct browser_entry, node);
tools/perf/ui/browsers/annotate-data.c
273
entry = list_first_entry(&browser->entries, typeof(*entry), node);
tools/perf/ui/browsers/annotate-data.c
281
entry = list_last_entry(&browser->entries, typeof(*entry), node);
tools/perf/ui/browsers/annotate-data.c
310
if (uib->top == NULL || uib->top == uib->entries)
tools/perf/ui/browsers/annotate-data.c
34
struct list_head entries;
tools/perf/ui/browsers/annotate.c
1227
browser.b.entries = ¬es->src->source;
tools/perf/ui/browsers/annotate.c
285
struct rb_root *root = &browser->entries;
tools/perf/ui/browsers/annotate.c
30
struct rb_root entries;
tools/perf/ui/browsers/annotate.c
346
browser->entries = RB_ROOT;
tools/perf/ui/browsers/annotate.c
379
browser->curr_hot = rb_last(&browser->entries);
tools/perf/ui/browsers/annotate.c
387
struct list_head *head = browser->b.entries;
tools/perf/ui/browsers/annotate.c
406
list_for_each_entry_continue(it, browser->b.entries, node) {
tools/perf/ui/browsers/annotate.c
413
list_for_each_entry_continue_reverse(it, browser->b.entries, node) {
tools/perf/ui/browsers/annotate.c
479
browser->b.entries = ¬es->src->source;
tools/perf/ui/browsers/annotate.c
943
nd = rb_last(&browser->entries);
tools/perf/ui/browsers/annotate.c
951
nd = rb_first(&browser->entries);
tools/perf/ui/browsers/header.c
86
.entries = (void *)argv,
tools/perf/ui/browsers/hists.c
1852
browser->top = rb_first_cached(&hb->hists->entries);
tools/perf/ui/browsers/hists.c
1964
nd = hists__filter_entries(rb_first(browser->entries),
tools/perf/ui/browsers/hists.c
1971
nd = rb_hierarchy_last(rb_last(browser->entries));
tools/perf/ui/browsers/hists.c
2194
struct rb_node *nd = hists__filter_entries(rb_first(browser->b.entries),
tools/perf/ui/browsers/hists.c
2941
struct rb_node *nd = rb_first_cached(&hb->hists->entries);
tools/perf/ui/browsers/hists.c
2961
struct rb_node *nd = rb_first_cached(&hb->hists->entries);
tools/perf/ui/browsers/hists.c
3562
if (pos->core.node.next == &evlist->core.entries)
tools/perf/ui/browsers/hists.c
3568
if (pos->core.node.prev == &evlist->core.entries)
tools/perf/ui/browsers/hists.c
3621
.entries = &evlist->core.entries,
tools/perf/ui/browsers/hists.c
609
nd = rb_first_cached(&browser->hists->entries);
tools/perf/ui/browsers/hists.c
68
for (nd = rb_first_cached(&hists->entries);
tools/perf/ui/browsers/hists.c
751
browser->b.entries = &browser->hists->entries;
tools/perf/ui/browsers/map.c
111
.entries = dso__symbols(map__dso(map)),
tools/perf/ui/browsers/map.c
122
for (nd = rb_first(mb.b.entries); nd; nd = rb_next(nd)) {
tools/perf/ui/gtk/hists.c
358
for (nd = rb_first_cached(&hists->entries); nd; nd = rb_next(nd)) {
tools/perf/ui/gtk/hists.c
583
perf_gtk__add_hierarchy_entries(hists, &hists->entries, store,
tools/perf/ui/hist.c
176
total += hists->mem_stat_total[mem_stat_idx].entries[i];
tools/perf/ui/hist.c
180
u64 val = he->mem_stat[mem_stat_idx].entries[i];
tools/perf/ui/hist.c
182
if (hists->mem_stat_total[mem_stat_idx].entries[i] == 0)
tools/perf/ui/hist.c
436
if (hists->mem_stat_total[mem_stat_idx].entries[i])
tools/perf/ui/hist.c
462
if (hists->mem_stat_total[mem_stat_idx].entries[i] == 0)
tools/perf/ui/stdio/hist.c
869
for (nd = rb_first_cached(&hists->entries); nd;
tools/perf/ui/tui/util.c
66
.entries = (void *)argv,
tools/perf/util/arm-spe.c
409
bstack->entries[i].from = record->from_ip;
tools/perf/util/arm-spe.c
410
bstack->entries[i].to = record->to_ip;
tools/perf/util/arm-spe.c
412
bs_flags = &bstack->entries[i].flags;
tools/perf/util/arm-spe.c
461
bs_flags = &bstack->entries[i].flags;
tools/perf/util/arm-spe.c
463
bstack->entries[i].to = record->prev_br_tgt;
tools/perf/util/arm64-frame-pointer-unwind-support.c
32
struct entries *entries = arg;
tools/perf/util/arm64-frame-pointer-unwind-support.c
34
entries->stack[entries->length++] = entry->ip;
tools/perf/util/arm64-frame-pointer-unwind-support.c
41
struct entries entries = {};
tools/perf/util/arm64-frame-pointer-unwind-support.c
64
ret = unwind__get_entries(add_entry, &entries, thread, sample, 2, true);
tools/perf/util/arm64-frame-pointer-unwind-support.c
67
if (ret || entries.length != 2)
tools/perf/util/arm64-frame-pointer-unwind-support.c
70
return callchain_param.order == ORDER_CALLER ? entries.stack[0] : entries.stack[1];
tools/perf/util/auxtrace.c
1026
auxtrace_index->entries[nr].file_offset =
tools/perf/util/auxtrace.c
1028
auxtrace_index->entries[nr].sz = bswap_64(ent.sz);
tools/perf/util/auxtrace.c
1030
auxtrace_index->entries[nr].file_offset = ent.file_offset;
tools/perf/util/auxtrace.c
1031
auxtrace_index->entries[nr].sz = ent.sz;
tools/perf/util/auxtrace.c
1086
ent = &auxtrace_index->entries[i];
tools/perf/util/auxtrace.c
967
auxtrace_index->entries[nr].file_offset = file_offset;
tools/perf/util/auxtrace.c
968
auxtrace_index->entries[nr].sz = event->header.size;
tools/perf/util/auxtrace.c
981
ent.file_offset = auxtrace_index->entries[i].file_offset;
tools/perf/util/auxtrace.c
982
ent.sz = auxtrace_index->entries[i].sz;
tools/perf/util/auxtrace.h
189
struct auxtrace_index_entry entries[PERF_AUXTRACE_INDEX_ENTRY_COUNT];
tools/perf/util/block-info.c
443
struct rb_node *next = rb_first_cached(&hists->entries);
tools/perf/util/bpf-event.c
326
entry = &bpf_metadata_event->entries[nr_entries];
tools/perf/util/bpf-event.c
367
nr_variables * sizeof(metadata->event->bpf_metadata.entries[0]);
tools/perf/util/branch.h
52
struct branch_entry entries[];
tools/perf/util/callchain.c
1782
struct rb_node *next = rb_first_cached(&hists->entries);
tools/perf/util/cgroup.c
370
if (list_empty(&evlist->core.entries)) {
tools/perf/util/cgroup.c
440
evlist__splice_list_tail(orig_list, &evlist->core.entries);
tools/perf/util/cgroup.c
494
evlist__splice_list_tail(evlist, &tmp_list->core.entries);
tools/perf/util/cgroup.c
498
if (list_empty(&evlist->core.entries)) {
tools/perf/util/cs-etm.c
1310
memcpy(&bs_dst->entries[0],
tools/perf/util/cs-etm.c
1311
&bs_src->entries[tidq->last_branch_pos],
tools/perf/util/cs-etm.c
1322
memcpy(&bs_dst->entries[nr],
tools/perf/util/cs-etm.c
1323
&bs_src->entries[0],
tools/perf/util/cs-etm.c
1410
be = &bs->entries[tidq->last_branch_pos];
tools/perf/util/cs-etm.c
1626
struct branch_entry entries;
tools/perf/util/cs-etm.c
1661
.entries = {
tools/perf/util/cs-etm.c
3162
ent = &auxtrace_index->entries[i];
tools/perf/util/dwarf-regs-arch/dwarf-regs-x86.c
145
static int get_regnum(const struct dwarf_regs_idx *entries, size_t num_entries, const char *name)
tools/perf/util/dwarf-regs-arch/dwarf-regs-x86.c
152
if (!strcmp(entries[i].name, name))
tools/perf/util/dwarf-regs-arch/dwarf-regs-x86.c
153
return entries[i].dwarf_regnum;
tools/perf/util/event.c
523
metadata->entries[i].key,
tools/perf/util/event.c
524
metadata->entries[i].value);
tools/perf/util/evlist.c
1750
list_splice(&move, &evlist->core.entries);
tools/perf/util/evlist.c
2051
struct pollfd *entries = evlist->core.pollfd.entries;
tools/perf/util/evlist.c
2056
entries[evlist->ctl_fd.pos].fd = -1;
tools/perf/util/evlist.c
2057
entries[evlist->ctl_fd.pos].events = 0;
tools/perf/util/evlist.c
2058
entries[evlist->ctl_fd.pos].revents = 0;
tools/perf/util/evlist.c
2239
struct pollfd *entries = evlist->core.pollfd.entries;
tools/perf/util/evlist.c
2241
if (!evlist__ctlfd_initialized(evlist) || !entries[ctlfd_pos].revents)
tools/perf/util/evlist.c
2244
if (entries[ctlfd_pos].revents & POLLIN) {
tools/perf/util/evlist.c
2273
if (entries[ctlfd_pos].revents & (POLLHUP | POLLERR))
tools/perf/util/evlist.c
2276
entries[ctlfd_pos].revents = 0;
tools/perf/util/evlist.c
2461
struct pollfd *entries;
tools/perf/util/evlist.c
2467
entries = eet->evlist->core.pollfd.entries;
tools/perf/util/evlist.c
2468
revents = entries[eet->pollfd_pos].revents;
tools/perf/util/evlist.c
2469
entries[eet->pollfd_pos].revents = 0;
tools/perf/util/evlist.h
238
return list_empty(&evlist->core.entries);
tools/perf/util/evlist.h
280
__evlist__for_each_entry(&(evlist)->core.entries, evsel)
tools/perf/util/evlist.h
296
__evlist__for_each_entry_continue(&(evlist)->core.entries, evsel)
tools/perf/util/evlist.h
312
__evlist__for_each_entry_from(&(evlist)->core.entries, evsel)
tools/perf/util/evlist.h
328
__evlist__for_each_entry_reverse(&(evlist)->core.entries, evsel)
tools/perf/util/evlist.h
346
__evlist__for_each_entry_safe(&(evlist)->core.entries, tmp, evsel)
tools/perf/util/evsel.c
3436
e = &data->branch_stack->entries[0];
tools/perf/util/evsel.h
499
for_each_group_member_head(_evsel, _leader, &(_leader)->evlist->core.entries)
tools/perf/util/evsel.h
509
for_each_group_evsel_head(_evsel, _leader, &(_leader)->evlist->core.entries)
tools/perf/util/header.c
316
return read_tracing_data(ff->fd, &evlist->core.entries);
tools/perf/util/hist.c
1905
node = rb_first_cached(&hists->entries);
tools/perf/util/hist.c
2015
static void __hists__insert_output_entry(struct rb_root_cached *entries,
tools/perf/util/hist.c
2020
struct rb_node **p = &entries->rb_root.rb_node;
tools/perf/util/hist.c
2052
rb_insert_color_cached(&he->rb_node, entries, leftmost);
tools/perf/util/hist.c
2083
&hists->entries,
tools/perf/util/hist.c
2096
hists->entries = RB_ROOT_CACHED;
tools/perf/util/hist.c
2105
__hists__insert_output_entry(&hists->entries, n, min_callchain_hits, use_callchain);
tools/perf/util/hist.c
2340
for (nd = rb_first_cached(&hists->entries); nd; nd = rb_next(nd)) {
tools/perf/util/hist.c
2401
nd = rb_first_cached(&hists->entries);
tools/perf/util/hist.c
2445
nd = rb_first_cached(&hists->entries);
tools/perf/util/hist.c
2450
rb_erase_cached(&h->rb_node, &hists->entries);
tools/perf/util/hist.c
2455
hists->entries = new_root;
tools/perf/util/hist.c
265
struct rb_node *next = rb_first_cached(&hists->entries);
tools/perf/util/hist.c
2829
struct branch_entry *entries = perf_sample__branch_entries(sample);
tools/perf/util/hist.c
2832
if (bs && bs->nr && entries[0].flags.cycles) {
tools/perf/util/hist.c
3017
hists->entries = RB_ROOT_CACHED;
tools/perf/util/hist.c
356
he->mem_stat[i].entries[idx] += period;
tools/perf/util/hist.c
357
hists->mem_stat_total[i].entries[idx] += period;
tools/perf/util/hist.c
370
dst->mem_stat[i].entries[k] += src->mem_stat[i].entries[k];
tools/perf/util/hist.c
386
dst->mem_stat[i].entries[k] = src->mem_stat[i].entries[k];
tools/perf/util/hist.c
398
he->mem_stat[i].entries[k] = (he->mem_stat[i].entries[k] * 7) / 8;
tools/perf/util/hist.c
458
root_out = &hists->entries;
tools/perf/util/hist.c
473
struct rb_node *next = rb_first_cached(&hists->entries);
tools/perf/util/hist.c
489
struct rb_node *next = rb_first_cached(&hists->entries);
tools/perf/util/hist.c
502
struct rb_node *next = rb_first_cached(&hists->entries);
tools/perf/util/hist.h
109
u64 entries[MEM_STAT_LEN];
tools/perf/util/hist.h
115
struct rb_root_cached entries;
tools/perf/util/intel-pt.c
1773
struct branch_entry entries;
tools/perf/util/intel-pt.c
1797
.entries = {
tools/perf/util/intel-pt.c
2260
to = &br_stack->entries[0].from;
tools/perf/util/intlist.h
48
struct rb_node *rn = rb_first_cached(&ilist->rblist.entries);
tools/perf/util/jitdump.c
322
jr->info.entries[n].addr = bswap_64(jr->info.entries[n].addr);
tools/perf/util/jitdump.c
323
jr->info.entries[n].lineno = bswap_32(jr->info.entries[n].lineno);
tools/perf/util/jitdump.c
324
jr->info.entries[n].discrim = bswap_32(jr->info.entries[n].discrim);
tools/perf/util/jitdump.c
664
memcpy(data, &jr->info.entries, sz);
tools/perf/util/jitdump.h
104
struct debug_entry entries[];
tools/perf/util/machine.c
2246
struct branch_entry *entries = perf_sample__branch_entries(sample);
tools/perf/util/machine.c
2254
ip__resolve_ams(al->thread, &bi[i].to, entries[i].to);
tools/perf/util/machine.c
2255
ip__resolve_ams(al->thread, &bi[i].from, entries[i].from);
tools/perf/util/machine.c
2256
bi[i].flags = entries[i].flags;
tools/perf/util/machine.c
2403
struct branch_entry *entries = perf_sample__branch_entries(sample);
tools/perf/util/machine.c
2433
ip = entries[0].to;
tools/perf/util/machine.c
2434
flags = &entries[0].flags;
tools/perf/util/machine.c
2435
*branch_from = entries[0].from;
tools/perf/util/machine.c
2459
ip = entries[i].from;
tools/perf/util/machine.c
2460
flags = &entries[i].flags;
tools/perf/util/machine.c
2474
ip = entries[i].from;
tools/perf/util/machine.c
2475
flags = &entries[i].flags;
tools/perf/util/machine.c
2493
ip = entries[0].to;
tools/perf/util/machine.c
2494
flags = &entries[0].flags;
tools/perf/util/machine.c
2495
*branch_from = entries[0].from;
tools/perf/util/machine.c
2789
struct branch_entry *entries = perf_sample__branch_entries(sample);
tools/perf/util/machine.c
2842
be[i] = entries[i];
tools/perf/util/machine.c
2861
be[i] = entries[branch->nr - i - 1];
tools/perf/util/mem2node.c
101
entries = tmp_entries;
tools/perf/util/mem2node.c
105
entries[i].node, entries[i].start, entries[i].end);
tools/perf/util/mem2node.c
107
phys_entry__insert(&entries[i], &map->root);
tools/perf/util/mem2node.c
110
map->entries = entries;
tools/perf/util/mem2node.c
116
zfree(&map->entries);
tools/perf/util/mem2node.c
50
struct phys_entry *entries, *tmp_entries;
tools/perf/util/mem2node.c
62
entries = zalloc(sizeof(*entries) * max);
tools/perf/util/mem2node.c
63
if (!entries)
tools/perf/util/mem2node.c
84
struct phys_entry *prev = &entries[j - 1];
tools/perf/util/mem2node.c
93
phys_entry__init(&entries[j++], start, bsize, n->node);
tools/perf/util/mem2node.c
98
tmp_entries = realloc(entries, sizeof(*entries) * j);
tools/perf/util/mem2node.h
12
struct phys_entry *entries;
tools/perf/util/metricgroup.c
1539
evlist__splice_list_tail(perf_evlist, &combined_evlist->core.entries);
tools/perf/util/metricgroup.c
1545
evlist__splice_list_tail(perf_evlist, &m->evlist->core.entries);
tools/perf/util/parse-events.c
2554
if (last->core.node.prev == &evlist->core.entries)
tools/perf/util/print-events.c
300
for (node = rb_first_cached(&groups.entries); node; node = next) {
tools/perf/util/print-events.c
76
list_for_each_entry(ent, &pcache->entries, node) {
tools/perf/util/probe-file.c
1031
list_for_each_entry_safe(entry, tmp, &pcache->entries, node) {
tools/perf/util/probe-file.c
1073
if (!list_empty(&pcache->entries)) {
tools/perf/util/probe-file.c
533
list_add_tail(&entry->node, &pcache->entries);
tools/perf/util/probe-file.c
556
INIT_LIST_HEAD(&pcache->entries);
tools/perf/util/probe-file.c
566
list_for_each_entry_safe(entry, n, &pcache->entries, node) {
tools/perf/util/probe-file.c
712
list_add_tail(&entry->node, &pcache->entries);
tools/perf/util/probe-file.c
923
list_add_tail(&entry->node, &pcache->entries);
tools/perf/util/probe-file.h
21
struct list_head entries;
tools/perf/util/probe-file.h
36
list_for_each_entry(entry, &pcache->entries, node)
tools/perf/util/pstack.c
18
void *entries[];
tools/perf/util/pstack.c
45
if (pstack->entries[i] == key) {
tools/perf/util/pstack.c
47
memmove(pstack->entries + i,
tools/perf/util/pstack.c
48
pstack->entries + i + 1,
tools/perf/util/pstack.c
63
pstack->entries[pstack->top++] = key;
tools/perf/util/pstack.c
70
return pstack->entries[pstack->top - 1];
tools/perf/util/python.c
1315
for (node = rb_first_cached(&pevlist->evlist.metric_events.entries); node;
tools/perf/util/python.c
1421
for (node = rb_first_cached(&pevlist->evlist.metric_events.entries);
tools/perf/util/python.c
1528
file = PyFile_FromFd(evlist->core.pollfd.entries[i].fd, "perf", "r", -1,
tools/perf/util/python.c
2041
for (node = rb_first_cached(&pevlist->evlist.metric_events.entries); node;
tools/perf/util/rblist.c
103
rblist->entries = RB_ROOT_CACHED;
tools/perf/util/rblist.c
112
struct rb_node *pos, *next = rb_first_cached(&rblist->entries);
tools/perf/util/rblist.c
133
for (node = rb_first_cached(&rblist->entries); node;
tools/perf/util/rblist.c
15
struct rb_node **p = &rblist->entries.rb_root.rb_node;
tools/perf/util/rblist.c
40
rb_insert_color_cached(new_node, &rblist->entries, leftmost);
tools/perf/util/rblist.c
48
rb_erase_cached(rb_node, &rblist->entries);
tools/perf/util/rblist.c
57
struct rb_node **p = &rblist->entries.rb_root.rb_node;
tools/perf/util/rblist.c
82
&rblist->entries, leftmost);
tools/perf/util/rblist.h
23
struct rb_root_cached entries;
tools/perf/util/scripting-engines/trace-event-python.c
488
struct branch_entry *entries = perf_sample__branch_entries(sample);
tools/perf/util/scripting-engines/trace-event-python.c
509
PyLong_FromUnsignedLongLong(entries[i].from));
tools/perf/util/scripting-engines/trace-event-python.c
511
PyLong_FromUnsignedLongLong(entries[i].to));
tools/perf/util/scripting-engines/trace-event-python.c
513
PyBool_FromLong(entries[i].flags.mispred));
tools/perf/util/scripting-engines/trace-event-python.c
515
PyBool_FromLong(entries[i].flags.predicted));
tools/perf/util/scripting-engines/trace-event-python.c
517
PyBool_FromLong(entries[i].flags.in_tx));
tools/perf/util/scripting-engines/trace-event-python.c
519
PyBool_FromLong(entries[i].flags.abort));
tools/perf/util/scripting-engines/trace-event-python.c
521
PyLong_FromUnsignedLongLong(entries[i].flags.cycles));
tools/perf/util/scripting-engines/trace-event-python.c
525
entries[i].from, &al);
tools/perf/util/scripting-engines/trace-event-python.c
531
entries[i].to, &al);
tools/perf/util/scripting-engines/trace-event-python.c
576
struct branch_entry *entries = perf_sample__branch_entries(sample);
tools/perf/util/scripting-engines/trace-event-python.c
598
entries[i].from, &al);
tools/perf/util/scripting-engines/trace-event-python.c
604
entries[i].to, &al);
tools/perf/util/scripting-engines/trace-event-python.c
609
get_br_mspred(&entries[i].flags, bf, sizeof(bf));
tools/perf/util/scripting-engines/trace-event-python.c
613
if (entries[i].flags.in_tx) {
tools/perf/util/scripting-engines/trace-event-python.c
621
if (entries[i].flags.abort) {
tools/perf/util/session.c
2892
struct id_index_entry *e = &ie->entries[i];
tools/perf/util/session.c
616
event->thread_map.entries[i].pid = bswap_64(event->thread_map.entries[i].pid);
tools/perf/util/session.c
817
struct branch_entry *entries = perf_sample__branch_entries(sample);
tools/perf/util/session.c
854
(int)(kernel_callchain_nr), entries[0].to);
tools/perf/util/session.c
857
(int)(i + kernel_callchain_nr + 1), entries[i].from);
tools/perf/util/session.c
906
struct branch_entry *entries = perf_sample__branch_entries(sample);
tools/perf/util/session.c
927
struct branch_entry *e = &entries[i];
tools/perf/util/stat.c
544
alias = list_prepare_entry(evsel, &(evlist->core.entries), core.node);
tools/perf/util/stat.c
545
list_for_each_entry_continue(alias, &evlist->core.entries, core.node) {
tools/perf/util/stream.c
114
struct rb_node *next = rb_first_cached(&hists->entries);
tools/perf/util/strlist.h
58
struct rb_node *rn = rb_first_cached(&slist->rblist.entries);
tools/perf/util/synthetic-events.c
1221
size += threads->nr * sizeof(event->thread_map.entries[0]);
tools/perf/util/synthetic-events.c
1232
struct perf_record_thread_map_entry *entry = &event->thread_map.entries[i];
tools/perf/util/synthetic-events.c
1910
e = &ev->id_index.entries[i];
tools/perf/util/synthetic-events.c
2222
tdata = tracing_data_get(&evlist->core.entries, fd, true);
tools/perf/util/synthetic-events.c
2484
if (have_tracepoints(&evlist->core.entries)) {
tools/perf/util/thread-stack.c
393
be = &bs->entries[ts->br_stack_pos];
tools/perf/util/thread-stack.c
637
be = &dst->entries[0];
tools/perf/util/thread-stack.c
639
memcpy(be, &src->entries[ts->br_stack_pos], bsz * nr);
tools/perf/util/thread-stack.c
643
be = &dst->entries[nr];
tools/perf/util/thread-stack.c
645
memcpy(be, &src->entries[0], bsz * ts->br_stack_pos);
tools/perf/util/thread-stack.c
697
spos = &src->entries[ts->br_stack_pos];
tools/perf/util/thread-stack.c
698
ssz = &src->entries[ts->br_stack_sz];
tools/perf/util/thread-stack.c
700
d = &dst->entries[0];
tools/perf/util/thread-stack.c
716
for (s = &src->entries[0]; s < spos && nr < sz; s++) {
tools/perf/util/thread-stack.c
740
for (s = &src->entries[0]; s < spos && nr < sz; s++) {
tools/perf/util/thread_map.c
347
perf_thread_map__set_pid(threads, i, (pid_t) event->entries[i].pid);
tools/perf/util/thread_map.c
348
threads->map[i].comm = strndup(event->entries[i].comm, 16);
tools/perf/util/unwind-libdw.c
154
struct unwind_entry *e = &ui->entries[ui->idx++];
tools/perf/util/unwind-libdw.c
361
ui = zalloc(sizeof(*ui) + sizeof(ui->entries[0]) * max_stack);
tools/perf/util/unwind-libdw.c
422
err = ui->entries[j].ip ? ui->cb(&ui->entries[j], ui->arg) : 0;
tools/perf/util/unwind-libdw.c
430
map_symbol__exit(&ui->entries[i].ms);
tools/perf/util/unwind-libdw.h
26
struct unwind_entry entries[];
tools/power/x86/turbostat/turbostat.c
2215
} entries[IDX_COUNT];
tools/power/x86/turbostat/turbostat.c
6714
msr_last = per_cpu_msr_sum[cpu].entries[idx].last;
tools/power/x86/turbostat/turbostat.c
6716
*msr = msr_last + per_cpu_msr_sum[cpu].entries[idx].sum;
tools/power/x86/turbostat/turbostat.c
6749
msr_last = per_cpu_msr_sum[cpu].entries[i].last;
tools/power/x86/turbostat/turbostat.c
6750
per_cpu_msr_sum[cpu].entries[i].last = msr_cur & 0xffffffff;
tools/power/x86/turbostat/turbostat.c
6753
per_cpu_msr_sum[cpu].entries[i].sum += msr_last;
tools/sched_ext/scx_userland.c
217
LIST_INSERT_HEAD(&vruntime_head, curr, entries);
tools/sched_ext/scx_userland.c
221
LIST_FOREACH(enqueued, &vruntime_head, entries) {
tools/sched_ext/scx_userland.c
223
LIST_INSERT_BEFORE(enqueued, curr, entries);
tools/sched_ext/scx_userland.c
229
LIST_INSERT_AFTER(prev, curr, entries);
tools/sched_ext/scx_userland.c
271
LIST_REMOVE(task, entries);
tools/sched_ext/scx_userland.c
279
LIST_INSERT_HEAD(&vruntime_head, task, entries);
tools/sched_ext/scx_userland.c
69
LIST_ENTRY(enqueued_task) entries;
tools/testing/cxl/test/mem.c
135
.entries = cpu_to_le16(1),
tools/testing/selftests/bpf/prog_tests/fd_htab_lookup.c
100
static int setup_htab(int fd, unsigned int entries)
tools/testing/selftests/bpf/prog_tests/fd_htab_lookup.c
104
for (i = 0; i < entries; i++) {
tools/testing/selftests/bpf/prog_tests/fd_htab_lookup.c
11
unsigned int entries;
tools/testing/selftests/bpf/prog_tests/fd_htab_lookup.c
155
ctx.entries = 8;
tools/testing/selftests/bpf/prog_tests/fd_htab_lookup.c
157
err = setup_htab(ctx.fd, ctx.entries);
tools/testing/selftests/bpf/prog_tests/fd_htab_lookup.c
25
for (j = 0; j < ctx->entries; j++) {
tools/testing/selftests/bpf/prog_tests/fd_htab_lookup.c
70
for (j = 0; j < ctx->entries; j++) {
tools/testing/selftests/bpf/prog_tests/percpu_alloc.c
119
static void test_percpu_map_op_cpu_flag(struct bpf_map *map, void *keys, size_t key_sz, u32 entries,
tools/testing/selftests/bpf/prog_tests/percpu_alloc.c
132
values = calloc(entries, value_sz_cpus);
tools/testing/selftests/bpf/prog_tests/percpu_alloc.c
136
values_percpu = calloc(entries, roundup(value_sz, 8) * nr_cpus);
tools/testing/selftests/bpf/prog_tests/percpu_alloc.c
142
value_sz_total = value_sz_cpus * entries;
tools/testing/selftests/bpf/prog_tests/percpu_alloc.c
191
for (i = 0; i < entries; i++) {
tools/testing/selftests/bpf/prog_tests/percpu_alloc.c
199
for (i = 0; i < entries; i++) {
tools/testing/selftests/bpf/prog_tests/percpu_alloc.c
224
count = entries;
tools/testing/selftests/bpf/prog_tests/percpu_alloc.c
234
count = entries;
tools/testing/selftests/bpf/prog_tests/percpu_alloc.c
239
if (!ASSERT_EQ(count, entries, "bpf_map_update_batch count"))
tools/testing/selftests/bpf/prog_tests/percpu_alloc.c
243
for (i = 0; i < entries; i++)
tools/testing/selftests/bpf/prog_tests/percpu_alloc.c
246
count = entries;
tools/testing/selftests/bpf/prog_tests/percpu_alloc.c
251
if (!ASSERT_EQ(count, entries, "bpf_map_update_batch count"))
tools/testing/selftests/bpf/prog_tests/percpu_alloc.c
256
count = entries;
tools/testing/selftests/bpf/prog_tests/percpu_alloc.c
257
memset(values, 0, entries * value_sz);
tools/testing/selftests/bpf/prog_tests/percpu_alloc.c
261
if (!ASSERT_EQ(count, entries, "bpf_map_lookup_batch count"))
tools/testing/selftests/bpf/prog_tests/percpu_alloc.c
264
for (i = 0; i < entries; i++)
tools/testing/selftests/bpf/prog_tests/percpu_alloc.c
271
count = entries;
tools/testing/selftests/bpf/prog_tests/percpu_alloc.c
273
memset(values_percpu, 0, roundup(value_sz, 8) * nr_cpus * entries);
tools/testing/selftests/bpf/prog_tests/percpu_alloc.c
278
if (!ASSERT_EQ(count, entries, "bpf_map_lookup_batch count"))
tools/testing/selftests/bpf/prog_tests/percpu_alloc.c
281
for (i = 0; i < entries; i++) {
tools/testing/selftests/bpf/progs/bpf_iter_task_stack.c
22
retlen = bpf_get_task_stack(task, entries,
tools/testing/selftests/bpf/progs/bpf_iter_task_stack.c
31
BPF_SEQ_PRINTF(seq, "[<0>] %pB\n", (void *)entries[i]);
tools/testing/selftests/bpf/progs/bpf_iter_task_stack.c
51
res = bpf_get_task_stack(task, entries,
tools/testing/selftests/bpf/progs/bpf_iter_task_stack.c
66
bpf_seq_write(seq, &entries, buf_sz);
tools/testing/selftests/bpf/progs/bpf_iter_task_stack.c
9
unsigned long entries[MAX_STACK_TRACE_DEPTH] = {};
tools/testing/selftests/bpf/progs/get_branch_snapshot.c
16
struct perf_branch_entry entries[ENTRY_CNT] = {};
tools/testing/selftests/bpf/progs/get_branch_snapshot.c
28
total_entries = bpf_get_branch_snapshot(entries, sizeof(entries), 0);
tools/testing/selftests/bpf/progs/get_branch_snapshot.c
34
if (gbs_in_range(entries[i].from) && gbs_in_range(entries[i].to))
tools/testing/selftests/bpf/progs/strobemeta.h
112
struct strobe_map_entry entries[STROBE_MAX_MAP_ENTRIES];
tools/testing/selftests/bpf/progs/strobemeta.h
434
map.entries[i].key);
tools/testing/selftests/bpf/progs/strobemeta.h
441
map.entries[i].val);
tools/testing/selftests/bpf/progs/test_perf_branches.c
25
__u64 entries[4 * 3] = {0};
tools/testing/selftests/bpf/progs/test_perf_branches.c
31
written_stack = bpf_read_branch_records(ctx, entries, sizeof(entries), 0);
tools/testing/selftests/bpf/xsk.h
108
__u32 entries = r->cached_prod - r->cached_cons;
tools/testing/selftests/bpf/xsk.h
110
if (entries == 0) {
tools/testing/selftests/bpf/xsk.h
112
entries = r->cached_prod - r->cached_cons;
tools/testing/selftests/bpf/xsk.h
115
return (entries > nb) ? nb : entries;
tools/testing/selftests/bpf/xsk.h
144
__u32 entries = xsk_cons_nb_avail(cons, nb);
tools/testing/selftests/bpf/xsk.h
146
if (entries > 0) {
tools/testing/selftests/bpf/xsk.h
148
cons->cached_cons += entries;
tools/testing/selftests/bpf/xsk.h
151
return entries;
tools/testing/selftests/kvm/include/x86/processor.h
904
r, r < 0 || r >= msrs->nmsrs ? -1 : msrs->entries[r].index);
tools/testing/selftests/kvm/include/x86/processor.h
912
r, r < 0 || r >= msrs->nmsrs ? -1 : msrs->entries[r].index);
tools/testing/selftests/kvm/lib/kvm_util.c
1924
routing->entries[i].gsi = gsi;
tools/testing/selftests/kvm/lib/kvm_util.c
1925
routing->entries[i].type = KVM_IRQ_ROUTING_IRQCHIP;
tools/testing/selftests/kvm/lib/kvm_util.c
1926
routing->entries[i].flags = 0;
tools/testing/selftests/kvm/lib/kvm_util.c
1927
routing->entries[i].u.irqchip.irqchip = 0;
tools/testing/selftests/kvm/lib/kvm_util.c
1928
routing->entries[i].u.irqchip.pin = pin;
tools/testing/selftests/kvm/lib/x86/hyperv.c
46
if (cpuid_sys->entries[i].function >= 0x40000000 &&
tools/testing/selftests/kvm/lib/x86/hyperv.c
47
cpuid_sys->entries[i].function < 0x40000100)
tools/testing/selftests/kvm/lib/x86/hyperv.c
49
cpuid_full->entries[nent] = cpuid_sys->entries[i];
tools/testing/selftests/kvm/lib/x86/hyperv.c
53
memcpy(&cpuid_full->entries[nent], cpuid_hv->entries,
tools/testing/selftests/kvm/lib/x86/processor.c
1245
state = malloc(sizeof(*state) + msr_list->nmsrs * sizeof(state->msrs.entries[0]));
tools/testing/selftests/kvm/lib/x86/processor.c
1271
state->msrs.entries[i].index = msr_list->indices[i];
tools/testing/selftests/kvm/lib/x86/processor.c
1331
if (cpuid->entries[i].function == function &&
tools/testing/selftests/kvm/lib/x86/processor.c
1332
cpuid->entries[i].index == index)
tools/testing/selftests/kvm/lib/x86/processor.c
1333
return &cpuid->entries[i];
tools/testing/selftests/kvm/lib/x86/processor.c
914
entry = &cpuid->entries[i];
tools/testing/selftests/kvm/s390/irq_routing.c
43
routing->entries[0] = ue;
tools/testing/selftests/kvm/s390/irq_routing.c
44
routing->entries[0].u.adapter.summary_addr = (uintptr_t)mem;
tools/testing/selftests/kvm/s390/irq_routing.c
45
routing->entries[0].u.adapter.ind_addr = (uintptr_t)mem;
tools/testing/selftests/kvm/s390/irq_routing.c
47
routing->entries[0].u.adapter.summary_offset = 4096 * 8;
tools/testing/selftests/kvm/s390/irq_routing.c
51
routing->entries[0].u.adapter.summary_offset -= 4;
tools/testing/selftests/kvm/s390/irq_routing.c
55
routing->entries[0].u.adapter.ind_offset = 4096 * 8;
tools/testing/selftests/kvm/s390/irq_routing.c
59
routing->entries[0].u.adapter.ind_offset -= 4;
tools/testing/selftests/kvm/s390/ucontrol_test.c
791
routing->entries[0] = ue;
tools/testing/selftests/kvm/x86/cpuid_test.c
145
int size = sizeof(*cpuid) + cpuid->nent * sizeof(cpuid->entries[0]);
tools/testing/selftests/kvm/x86/cpuid_test.c
33
__cpuid(guest_cpuid->entries[i].function,
tools/testing/selftests/kvm/x86/cpuid_test.c
34
guest_cpuid->entries[i].index,
tools/testing/selftests/kvm/x86/cpuid_test.c
37
GUEST_ASSERT_EQ(eax, guest_cpuid->entries[i].eax);
tools/testing/selftests/kvm/x86/cpuid_test.c
38
GUEST_ASSERT_EQ(ebx, guest_cpuid->entries[i].ebx);
tools/testing/selftests/kvm/x86/cpuid_test.c
39
GUEST_ASSERT_EQ(ecx, guest_cpuid->entries[i].ecx);
tools/testing/selftests/kvm/x86/cpuid_test.c
40
GUEST_ASSERT_EQ(edx, guest_cpuid->entries[i].edx);
tools/testing/selftests/kvm/x86/cpuid_test.c
95
e1 = &cpuid1->entries[i];
tools/testing/selftests/kvm/x86/cpuid_test.c
96
e2 = &cpuid2->entries[i];
tools/testing/selftests/kvm/x86/hyperv_cpuid.c
44
const struct kvm_cpuid_entry2 *entry = &hv_cpuid_entries->entries[i];
tools/testing/selftests/kvm/x86/xen_shinfo_test.c
166
struct kvm_irq_routing_entry entries[2];
tools/testing/selftests/kvm/x86/xen_shinfo_test.c
555
irq_routes.entries[0].gsi = 32;
tools/testing/selftests/kvm/x86/xen_shinfo_test.c
556
irq_routes.entries[0].type = KVM_IRQ_ROUTING_XEN_EVTCHN;
tools/testing/selftests/kvm/x86/xen_shinfo_test.c
557
irq_routes.entries[0].u.xen_evtchn.port = EVTCHN_TEST1;
tools/testing/selftests/kvm/x86/xen_shinfo_test.c
558
irq_routes.entries[0].u.xen_evtchn.vcpu = vcpu->id;
tools/testing/selftests/kvm/x86/xen_shinfo_test.c
559
irq_routes.entries[0].u.xen_evtchn.priority = KVM_IRQ_ROUTING_XEN_EVTCHN_PRIO_2LEVEL;
tools/testing/selftests/kvm/x86/xen_shinfo_test.c
561
irq_routes.entries[1].gsi = 33;
tools/testing/selftests/kvm/x86/xen_shinfo_test.c
562
irq_routes.entries[1].type = KVM_IRQ_ROUTING_XEN_EVTCHN;
tools/testing/selftests/kvm/x86/xen_shinfo_test.c
563
irq_routes.entries[1].u.xen_evtchn.port = EVTCHN_TEST2;
tools/testing/selftests/kvm/x86/xen_shinfo_test.c
564
irq_routes.entries[1].u.xen_evtchn.vcpu = vcpu->id;
tools/testing/selftests/kvm/x86/xen_shinfo_test.c
565
irq_routes.entries[1].u.xen_evtchn.priority = KVM_IRQ_ROUTING_XEN_EVTCHN_PRIO_2LEVEL;
tools/testing/selftests/ring-buffer/map_test.c
188
ASSERT_EQ(desc->meta->entries, 0);
tools/testing/selftests/ring-buffer/map_test.c
204
ASSERT_EQ(desc->meta->entries, 16);
tools/testing/selftests/x86/lam.c
473
int sys_uring_setup(unsigned int entries, struct io_uring_params *p)
tools/testing/selftests/x86/lam.c
475
return (int)syscall(__NR_io_uring_setup, entries, p);
tools/thermal/tmon/tui.c
111
int entries = ptdata.nr_cooling_dev + 1;
tools/thermal/tmon/tui.c
112
int rows = max(DIAG_DEV_ROWS, (entries + 1) / 2);
tools/thermal/tmon/tui.c
113
return min(rows, entries);
tools/tracing/latency/latency-collector.c
222
struct entry entries[QUEUE_SIZE];
tools/tracing/latency/latency-collector.c
826
q->entries[q->next_prod_idx] = *e;
tools/tracing/latency/latency-collector.c
832
struct entry e = q->entries[q->next_cons_idx];
tools/tracing/rtla/src/common.h
67
int entries;
tools/tracing/rtla/src/osnoise_hist.c
105
int entries = data->entries;
tools/tracing/rtla/src/osnoise_hist.c
122
if (bucket < entries)
tools/tracing/rtla/src/osnoise_hist.c
125
hist[entries] += count;
tools/tracing/rtla/src/osnoise_hist.c
30
int entries;
tools/tracing/rtla/src/osnoise_hist.c
347
for (bucket = 0; bucket < data->entries; bucket++) {
tools/tracing/rtla/src/osnoise_hist.c
396
data->hist[cpu].samples[data->entries]);
tools/tracing/rtla/src/osnoise_hist.c
479
params->common.hist.entries = 256;
tools/tracing/rtla/src/osnoise_hist.c
536
params->common.hist.entries = get_llong_from_str(optarg);
tools/tracing/rtla/src/osnoise_hist.c
537
if (params->common.hist.entries < 10 ||
tools/tracing/rtla/src/osnoise_hist.c
538
params->common.hist.entries > 9999999)
tools/tracing/rtla/src/osnoise_hist.c
65
*osnoise_alloc_histogram(int nr_cpus, int entries, int bucket_size)
tools/tracing/rtla/src/osnoise_hist.c
658
tool->data = osnoise_alloc_histogram(nr_cpus, params->hist.entries,
tools/tracing/rtla/src/osnoise_hist.c
74
data->entries = entries;
tools/tracing/rtla/src/osnoise_hist.c
83
data->hist[cpu].samples = calloc(1, sizeof(*data->hist->samples) * (entries + 1));
tools/tracing/rtla/src/timerlat.bpf.c
109
if (bucket >= entries)
tools/tracing/rtla/src/timerlat.bpf.c
57
const volatile int entries = 256;
tools/tracing/rtla/src/timerlat.bpf.c
88
if (entries == 0)
tools/tracing/rtla/src/timerlat.bpf.c
92
if (bucket >= entries)
tools/tracing/rtla/src/timerlat_bpf.c
29
bpf->rodata->entries = params->common.hist.entries;
tools/tracing/rtla/src/timerlat_bpf.c
34
if (params->common.hist.entries != 0) {
tools/tracing/rtla/src/timerlat_bpf.c
39
bpf_map__set_max_entries(bpf->maps.hist_irq, params->common.hist.entries);
tools/tracing/rtla/src/timerlat_bpf.c
40
bpf_map__set_max_entries(bpf->maps.hist_thread, params->common.hist.entries);
tools/tracing/rtla/src/timerlat_bpf.c
41
bpf_map__set_max_entries(bpf->maps.hist_user, params->common.hist.entries);
tools/tracing/rtla/src/timerlat_hist.c
1042
tool->data = timerlat_alloc_histogram(nr_cpus, params->hist.entries,
tools/tracing/rtla/src/timerlat_hist.c
106
data->hist[cpu].irq = calloc(1, sizeof(*data->hist->irq) * (entries + 1));
tools/tracing/rtla/src/timerlat_hist.c
110
data->hist[cpu].thread = calloc(1, sizeof(*data->hist->thread) * (entries + 1));
tools/tracing/rtla/src/timerlat_hist.c
114
data->hist[cpu].user = calloc(1, sizeof(*data->hist->user) * (entries + 1));
tools/tracing/rtla/src/timerlat_hist.c
143
int entries = data->entries;
tools/tracing/rtla/src/timerlat_hist.c
172
if (bucket < entries)
tools/tracing/rtla/src/timerlat_hist.c
175
hist[entries]++;
tools/tracing/rtla/src/timerlat_hist.c
212
for (i = 0; i < data->entries; i++) {
tools/tracing/rtla/src/timerlat_hist.c
275
data->hist[i].irq[data->entries] = value_irq[i];
tools/tracing/rtla/src/timerlat_hist.c
276
data->hist[i].thread[data->entries] = value_thread[i];
tools/tracing/rtla/src/timerlat_hist.c
277
data->hist[i].user[data->entries] = value_user[i];
tools/tracing/rtla/src/timerlat_hist.c
45
int entries;
tools/tracing/rtla/src/timerlat_hist.c
623
for (bucket = 0; bucket < data->entries; bucket++) {
tools/tracing/rtla/src/timerlat_hist.c
675
data->hist[cpu].irq[data->entries]);
tools/tracing/rtla/src/timerlat_hist.c
679
data->hist[cpu].thread[data->entries]);
tools/tracing/rtla/src/timerlat_hist.c
683
data->hist[cpu].user[data->entries]);
tools/tracing/rtla/src/timerlat_hist.c
785
params->common.hist.entries = 256;
tools/tracing/rtla/src/timerlat_hist.c
858
params->common.hist.entries = get_llong_from_str(optarg);
tools/tracing/rtla/src/timerlat_hist.c
859
if (params->common.hist.entries < 10 ||
tools/tracing/rtla/src/timerlat_hist.c
86
*timerlat_alloc_histogram(int nr_cpus, int entries, int bucket_size)
tools/tracing/rtla/src/timerlat_hist.c
860
params->common.hist.entries > 9999999)
tools/tracing/rtla/src/timerlat_hist.c
95
data->entries = entries;
virt/kvm/eventfd.c
275
struct kvm_kernel_irq_routing_entry entries[KVM_NR_IRQCHIPS];
virt/kvm/eventfd.c
280
n_entries = kvm_irq_map_gsi(kvm, entries, irqfd->gsi);
virt/kvm/eventfd.c
284
e = entries;
virt/kvm/irqchip.c
22
struct kvm_kernel_irq_routing_entry *entries, int gsi)
virt/kvm/irqchip.c
32
entries[n] = *e;
virt/kvm/kvm_main.c
5308
struct kvm_irq_routing_entry *entries = NULL;
virt/kvm/kvm_main.c
5322
entries = vmemdup_array_user(urouting->entries,
virt/kvm/kvm_main.c
5323
routing.nr, sizeof(*entries));
virt/kvm/kvm_main.c
5324
if (IS_ERR(entries)) {
virt/kvm/kvm_main.c
5325
r = PTR_ERR(entries);
virt/kvm/kvm_main.c
5329
r = kvm_set_irq_routing(kvm, entries, routing.nr,
virt/kvm/kvm_main.c
5331
kvfree(entries);