arch/alpha/include/asm/cacheflush.h
46
mm->context[smp_processor_id()] = 0;
arch/alpha/include/asm/mmu_context.h
147
mmc = next_mm->context[cpu];
arch/alpha/include/asm/mmu_context.h
150
next_mm->context[cpu] = mmc;
arch/alpha/include/asm/mmu_context.h
176
if (!mm->context[cpu]) \
arch/alpha/include/asm/mmu_context.h
200
mm->context[i] = 0;
arch/alpha/include/asm/tlbflush.h
58
unsigned long *mmc = &mm->context[smp_processor_id()];
arch/alpha/kernel/smp.c
646
if (mm->context[cpu])
arch/alpha/kernel/smp.c
647
mm->context[cpu] = 0;
arch/alpha/kernel/smp.c
693
if (mm->context[cpu])
arch/alpha/kernel/smp.c
694
mm->context[cpu] = 0;
arch/alpha/kernel/smp.c
747
if (mm->context[cpu])
arch/alpha/kernel/smp.c
748
mm->context[cpu] = 0;
arch/alpha/mm/fault.c
48
next_mm->context[smp_processor_id()] = mmc;
arch/alpha/mm/tlbflush.c
105
if (READ_ONCE(mm->context[cpu]))
arch/alpha/mm/tlbflush.c
106
WRITE_ONCE(mm->context[cpu], 0);
arch/arc/include/asm/entry-arcv2.h
237
; INPUT: r0 has STAT32 of calling context
arch/arc/include/asm/entry-arcv2.h
258
; INPUT: r0 has STAT32 of calling context
arch/arc/include/asm/mmu_context.h
49
#define asid_mm(mm, cpu) mm->context.asid[cpu]
arch/arc/kernel/asm-offsets.c
42
DEFINE(MM_CTXT, offsetof(struct mm_struct, context));
arch/arm/include/asm/elf.h
147
(elf_addr_t)current->mm->context.vdso); \
arch/arm/include/asm/mmu.h
27
#define ASID(mm) ((unsigned int)((mm)->context.id.counter & ~ASID_MASK))
arch/arm/include/asm/mmu_context.h
30
unlikely(atomic_read(&mm->context.vmalloc_seq) !=
arch/arm/include/asm/mmu_context.h
31
atomic_read(&init_mm.context.vmalloc_seq)))
arch/arm/include/asm/mmu_context.h
44
atomic64_set(&mm->context.id, 0);
arch/arm/include/asm/mmu_context.h
75
mm->context.switch_pending = 1;
arch/arm/include/asm/mmu_context.h
87
if (mm && mm->context.switch_pending) {
arch/arm/include/asm/mmu_context.h
95
if (mm->context.switch_pending) {
arch/arm/include/asm/mmu_context.h
96
mm->context.switch_pending = 0;
arch/arm/kernel/asm-offsets.c
108
DEFINE(MM_CONTEXT_ID, offsetof(struct mm_struct, context.id.counter));
arch/arm/kernel/process.c
385
current->mm->context.sigpage = new_vma->vm_start;
arch/arm/kernel/process.c
430
mm->context.sigpage = addr;
arch/arm/kernel/signal.c
142
struct sigcontext context;
arch/arm/kernel/signal.c
151
err |= __copy_from_user(&context, &sf->uc.uc_mcontext, sizeof(context));
arch/arm/kernel/signal.c
153
regs->ARM_r0 = context.arm_r0;
arch/arm/kernel/signal.c
154
regs->ARM_r1 = context.arm_r1;
arch/arm/kernel/signal.c
155
regs->ARM_r2 = context.arm_r2;
arch/arm/kernel/signal.c
156
regs->ARM_r3 = context.arm_r3;
arch/arm/kernel/signal.c
157
regs->ARM_r4 = context.arm_r4;
arch/arm/kernel/signal.c
158
regs->ARM_r5 = context.arm_r5;
arch/arm/kernel/signal.c
159
regs->ARM_r6 = context.arm_r6;
arch/arm/kernel/signal.c
160
regs->ARM_r7 = context.arm_r7;
arch/arm/kernel/signal.c
161
regs->ARM_r8 = context.arm_r8;
arch/arm/kernel/signal.c
162
regs->ARM_r9 = context.arm_r9;
arch/arm/kernel/signal.c
163
regs->ARM_r10 = context.arm_r10;
arch/arm/kernel/signal.c
164
regs->ARM_fp = context.arm_fp;
arch/arm/kernel/signal.c
165
regs->ARM_ip = context.arm_ip;
arch/arm/kernel/signal.c
166
regs->ARM_sp = context.arm_sp;
arch/arm/kernel/signal.c
167
regs->ARM_lr = context.arm_lr;
arch/arm/kernel/signal.c
168
regs->ARM_pc = context.arm_pc;
arch/arm/kernel/signal.c
169
regs->ARM_cpsr = context.arm_cpsr;
arch/arm/kernel/signal.c
254
struct sigcontext context;
arch/arm/kernel/signal.c
257
context = (struct sigcontext) {
arch/arm/kernel/signal.c
282
err |= __copy_to_user(&sf->uc.uc_mcontext, &context, sizeof(context));
arch/arm/kernel/signal.c
420
retcode = mm->context.sigpage + signal_return_offset +
arch/arm/kernel/traps.c
962
atomic_inc_return_release(&init_mm.context.vmalloc_seq);
arch/arm/kernel/vdso.c
213
mm->context.vdso = 0;
arch/arm/kernel/vdso.c
230
mm->context.vdso = addr;
arch/arm/kernel/vdso.c
40
current->mm->context.vdso = new_vma->vm_start;
arch/arm/mach-omap2/clockdomain.h
142
u32 context;
arch/arm/mach-omap2/cm33xx.c
364
clkdm->context = am33xx_cm_read_reg_bits(clkdm->cm_inst,
arch/arm/mach-omap2/cm33xx.c
379
switch (clkdm->context) {
arch/arm/mach-omap2/cminst44xx.c
489
clkdm->context = omap4_cminst_read_inst_reg(clkdm->prcm_partition,
arch/arm/mach-omap2/cminst44xx.c
493
clkdm->context &= OMAP4430_MODULEMODE_MASK;
arch/arm/mach-omap2/cminst44xx.c
505
switch (clkdm->context) {
arch/arm/mach-omap2/powerdomain.h
144
u32 context;
arch/arm/mach-omap2/prm33xx.c
345
pwrdm->context = am33xx_prm_read_reg(pwrdm->prcm_offs,
arch/arm/mach-omap2/prm33xx.c
351
pwrdm->context &= ~AM33XX_LOWPOWERSTATECHANGE_MASK;
arch/arm/mach-omap2/prm33xx.c
361
am33xx_prm_write_reg(pwrdm->context, pwrdm->prcm_offs,
arch/arm/mach-omap2/prm33xx.c
366
ctrl = OMAP_POWERSTATEST_MASK & pwrdm->context;
arch/arm/mach-omap2/prm44xx.c
685
pwrdm->context = omap4_prminst_read_inst_reg(pwrdm->prcm_partition,
arch/arm/mach-omap2/prm44xx.c
693
pwrdm->context &= ~OMAP4430_LOWPOWERSTATECHANGE_MASK;
arch/arm/mach-omap2/prm44xx.c
711
omap4_prminst_write_inst_reg(pwrdm->context,
arch/arm/mach-omap2/prm44xx.c
718
ctrl = OMAP_POWERSTATEST_MASK & pwrdm->context;
arch/arm/mm/context.c
192
u64 asid = atomic64_read(&mm->context.id);
arch/arm/mm/context.c
252
asid = atomic64_read(&mm->context.id);
arch/arm/mm/context.c
259
asid = atomic64_read(&mm->context.id);
arch/arm/mm/context.c
262
atomic64_set(&mm->context.id, asid);
arch/arm/mm/context.c
59
context_id = mm->context.id.counter;
arch/arm/mm/ioremap.c
144
seq = atomic_read_acquire(&init_mm.context.vmalloc_seq);
arch/arm/mm/ioremap.c
158
atomic_set_release(&mm->context.vmalloc_seq, seq);
arch/arm/mm/ioremap.c
159
} while (seq != atomic_read(&init_mm.context.vmalloc_seq));
arch/arm/mm/ioremap.c
190
atomic_inc_return_release(&init_mm.context.vmalloc_seq);
arch/arm64/include/asm/elf.h
171
(elf_addr_t)current->mm->context.vdso); \
arch/arm64/include/asm/elf.h
238
(Elf64_Off)current->mm->context.vdso); \
arch/arm64/include/asm/mmu.h
64
#define ASID(mm) (atomic64_read(&(mm)->context.id) & 0xffff)
arch/arm64/include/asm/mmu_context.h
169
atomic64_set(&mm->context.id, 0);
arch/arm64/include/asm/mmu_context.h
170
refcount_set(&mm->context.pinned, 0);
arch/arm64/include/asm/mmu_context.h
173
mm->context.pkey_allocation_map = BIT(0);
arch/arm64/include/asm/mmu_context.h
182
mm->context.pkey_allocation_map = oldmm->context.pkey_allocation_map;
arch/arm64/include/asm/pkeys.h
43
#define mm_pkey_allocation_map(mm) (mm)->context.pkey_allocation_map
arch/arm64/kernel/idle.c
25
struct arm_cpuidle_irq_context context;
arch/arm64/kernel/idle.c
27
arm_cpuidle_save_irq_context(&context);
arch/arm64/kernel/idle.c
32
arm_cpuidle_restore_irq_context(&context);
arch/arm64/kernel/probes/uprobes.c
49
if (mm->context.flags & MMCF_AARCH32)
arch/arm64/kernel/process.c
834
current->mm->context.flags = mmflags;
arch/arm64/kernel/signal.c
1452
sigtramp = VDSO_SYMBOL(current->mm->context.vdso, sigtramp);
arch/arm64/kernel/signal32.c
349
retcode = (unsigned long)current->mm->context.sigpage +
arch/arm64/kernel/suspend.c
102
struct arm_cpuidle_irq_context context;
arch/arm64/kernel/suspend.c
138
arm_cpuidle_save_irq_context(&context);
arch/arm64/kernel/suspend.c
162
arm_cpuidle_restore_irq_context(&context);
arch/arm64/kernel/vdso.c
128
mm->context.vdso = (void *)vdso_base;
arch/arm64/kernel/vdso.c
140
mm->context.vdso = NULL;
arch/arm64/kernel/vdso.c
160
current->mm->context.sigpage = (void *)new_vma->vm_start;
arch/arm64/kernel/vdso.c
288
mm->context.sigpage = (void *)addr;
arch/arm64/kernel/vdso.c
63
current->mm->context.vdso = (void *)new_vma->vm_start;
arch/arm64/mm/context.c
161
u64 asid = atomic64_read(&mm->context.id);
arch/arm64/mm/context.c
179
if (refcount_read(&mm->context.pinned))
arch/arm64/mm/context.c
224
asid = atomic64_read(&mm->context.id);
arch/arm64/mm/context.c
248
asid = atomic64_read(&mm->context.id);
arch/arm64/mm/context.c
251
atomic64_set(&mm->context.id, asid);
arch/arm64/mm/context.c
283
asid = atomic64_read(&mm->context.id);
arch/arm64/mm/context.c
285
if (refcount_inc_not_zero(&mm->context.pinned))
arch/arm64/mm/context.c
299
atomic64_set(&mm->context.id, asid);
arch/arm64/mm/context.c
304
refcount_set(&mm->context.pinned, 1);
arch/arm64/mm/context.c
322
u64 asid = atomic64_read(&mm->context.id);
arch/arm64/mm/context.c
329
if (refcount_dec_and_test(&mm->context.pinned)) {
arch/csky/abiv2/cacheflush.c
42
cpumask_t *mask = &mm->context.icache_stale_mask;
arch/csky/abiv2/cacheflush.c
72
mask = &mm->context.icache_stale_mask;
arch/csky/include/asm/mmu_context.h
17
#define cpu_asid(mm) (atomic64_read(&mm->context.asid) & ASID_MASK)
arch/csky/include/asm/mmu_context.h
19
#define init_new_context(tsk,mm) ({ atomic64_set(&(mm)->context.asid, 0); 0; })
arch/csky/include/asm/mmu_context.h
32
setup_pgd(next->pgd, next->context.asid.counter);
arch/csky/kernel/signal.c
159
current->mm->context.vdso, rt_sigreturn);
arch/csky/kernel/vdso.c
65
mm->context.vdso = (void *)vdso_base;
arch/csky/kernel/vdso.c
75
mm->context.vdso = NULL;
arch/csky/mm/context.c
21
asid_check_context(&asid_info, &mm->context.asid, cpu, mm);
arch/hexagon/include/asm/mmu_context.h
38
if (next->context.generation < prev->context.generation) {
arch/hexagon/include/asm/mmu_context.h
42
next->context.generation = prev->context.generation;
arch/hexagon/include/asm/mmu_context.h
45
__vmnewmap((void *)next->context.ptbase);
arch/hexagon/include/asm/pgalloc.h
36
mm->context.generation = kmap_generation;
arch/hexagon/include/asm/pgalloc.h
39
mm->context.ptbase = __pa(pgd);
arch/hexagon/include/asm/pgalloc.h
73
mm->context.generation = kmap_generation;
arch/hexagon/include/asm/pgalloc.h
74
current->active_mm->context.generation = kmap_generation;
arch/hexagon/kernel/signal.c
100
struct hexagon_vdso *vdso = current->mm->context.vdso;
arch/hexagon/kernel/vdso.c
83
mm->context.vdso = (void *)vdso_base;
arch/hexagon/kernel/vdso.c
92
if (vma->vm_mm && vma->vm_start == (long)vma->vm_mm->context.vdso)
arch/hexagon/mm/init.c
75
init_mm.context.ptbase = __pa(init_mm.pgd);
arch/hexagon/mm/vm_tlb.c
31
if (mm->context.ptbase == current->active_mm->context.ptbase)
arch/hexagon/mm/vm_tlb.c
60
if (current->active_mm->context.ptbase == mm->context.ptbase)
arch/hexagon/mm/vm_tlb.c
71
if (mm->context.ptbase == current->active_mm->context.ptbase)
arch/loongarch/include/asm/elf.h
345
(unsigned long)current->mm->context.vdso); \
arch/loongarch/include/asm/mmu_context.h
34
#define cpu_context(cpu, mm) ((mm)->context.asid[cpu])
arch/loongarch/kernel/asm-offsets.c
195
OFFSET(MM_CONTEXT, mm_struct, context);
arch/loongarch/kernel/signal.c
987
void *vdso = current->mm->context.vdso;
arch/loongarch/kernel/vdso.c
120
mm->context.vdso = (void *)vdso_addr;
arch/loongarch/kernel/vdso.c
30
current->mm->context.vdso = (void *)(new_vma->vm_start);
arch/loongarch/kvm/main.c
208
struct kvm_context *context;
arch/loongarch/kvm/main.c
210
context = per_cpu_ptr(vcpu->kvm->arch.vmcs, cpu);
arch/loongarch/kvm/main.c
211
vpid = context->vpid_cache + 1;
arch/loongarch/kvm/main.c
223
context->vpid_cache = vpid;
arch/loongarch/kvm/main.c
232
struct kvm_context *context;
arch/loongarch/kvm/main.c
239
context = per_cpu_ptr(vcpu->kvm->arch.vmcs, cpu);
arch/loongarch/kvm/main.c
250
old = context->vpid_cache & ~vpid_mask;
arch/loongarch/kvm/main.c
353
struct kvm_context *context;
arch/loongarch/kvm/main.c
399
context = per_cpu_ptr(vmcs, cpu);
arch/loongarch/kvm/main.c
400
context->vpid_cache = vpid_mask + 1;
arch/loongarch/kvm/main.c
401
context->last_vcpu = NULL;
arch/loongarch/kvm/vcpu.c
1596
struct kvm_context *context;
arch/loongarch/kvm/vcpu.c
1608
context = per_cpu_ptr(vcpu->kvm->arch.vmcs, cpu);
arch/loongarch/kvm/vcpu.c
1609
if (context->last_vcpu == vcpu)
arch/loongarch/kvm/vcpu.c
1610
context->last_vcpu = NULL;
arch/loongarch/kvm/vcpu.c
1617
struct kvm_context *context;
arch/loongarch/kvm/vcpu.c
1630
context = per_cpu_ptr(vcpu->kvm->arch.vmcs, cpu);
arch/loongarch/kvm/vcpu.c
1631
if (migrated || (context->last_vcpu != vcpu))
arch/loongarch/kvm/vcpu.c
1633
context->last_vcpu = vcpu;
arch/loongarch/kvm/vcpu.c
43
struct kvm_context *context;
arch/loongarch/kvm/vcpu.c
45
context = this_cpu_ptr(vcpu->kvm->arch.vmcs);
arch/loongarch/kvm/vcpu.c
46
context->perf_cntr[0] = read_csr_perfcntr0();
arch/loongarch/kvm/vcpu.c
47
context->perf_cntr[1] = read_csr_perfcntr1();
arch/loongarch/kvm/vcpu.c
48
context->perf_cntr[2] = read_csr_perfcntr2();
arch/loongarch/kvm/vcpu.c
49
context->perf_cntr[3] = read_csr_perfcntr3();
arch/loongarch/kvm/vcpu.c
50
context->perf_ctrl[0] = write_csr_perfctrl0(0);
arch/loongarch/kvm/vcpu.c
51
context->perf_ctrl[1] = write_csr_perfctrl1(0);
arch/loongarch/kvm/vcpu.c
52
context->perf_ctrl[2] = write_csr_perfctrl2(0);
arch/loongarch/kvm/vcpu.c
53
context->perf_ctrl[3] = write_csr_perfctrl3(0);
arch/loongarch/kvm/vcpu.c
58
struct kvm_context *context;
arch/loongarch/kvm/vcpu.c
60
context = this_cpu_ptr(vcpu->kvm->arch.vmcs);
arch/loongarch/kvm/vcpu.c
61
write_csr_perfcntr0(context->perf_cntr[0]);
arch/loongarch/kvm/vcpu.c
62
write_csr_perfcntr1(context->perf_cntr[1]);
arch/loongarch/kvm/vcpu.c
63
write_csr_perfcntr2(context->perf_cntr[2]);
arch/loongarch/kvm/vcpu.c
64
write_csr_perfcntr3(context->perf_cntr[3]);
arch/loongarch/kvm/vcpu.c
65
write_csr_perfctrl0(context->perf_ctrl[0]);
arch/loongarch/kvm/vcpu.c
66
write_csr_perfctrl1(context->perf_ctrl[1]);
arch/loongarch/kvm/vcpu.c
67
write_csr_perfctrl2(context->perf_ctrl[2]);
arch/loongarch/kvm/vcpu.c
68
write_csr_perfctrl3(context->perf_ctrl[3]);
arch/m68k/include/asm/mmu_context.h
146
asid = mm->context & 0xff;
arch/m68k/include/asm/mmu_context.h
174
extern void clear_context(unsigned long context);
arch/m68k/include/asm/mmu_context.h
181
mm->context = SUN3_INVALID_CONTEXT;
arch/m68k/include/asm/mmu_context.h
189
if (mm->context == SUN3_INVALID_CONTEXT)
arch/m68k/include/asm/mmu_context.h
190
mm->context = get_free_context(mm);
arch/m68k/include/asm/mmu_context.h
197
if (mm->context != SUN3_INVALID_CONTEXT)
arch/m68k/include/asm/mmu_context.h
198
clear_context(mm->context);
arch/m68k/include/asm/mmu_context.h
204
sun3_put_context(mm->context);
arch/m68k/include/asm/mmu_context.h
230
mm->context = virt_to_phys(mm->pgd);
arch/m68k/include/asm/mmu_context.h
237
0x80000000 | _PAGE_TABLE, mm->context
arch/m68k/include/asm/mmu_context.h
275
asm volatile ("movec %0,%%urp" : : "r" (mm->context));
arch/m68k/include/asm/mmu_context.h
305
next_mm->context = virt_to_phys(next_mm->pgd);
arch/m68k/include/asm/mmu_context.h
32
if (mm->context != NO_CONTEXT)
arch/m68k/include/asm/mmu_context.h
45
mm->context = ctx;
arch/m68k/include/asm/mmu_context.h
52
#define init_new_context(tsk, mm) (((mm)->context = NO_CONTEXT), 0)
arch/m68k/include/asm/mmu_context.h
60
if (mm->context != NO_CONTEXT) {
arch/m68k/include/asm/mmu_context.h
61
clear_bit(mm->context, context_map);
arch/m68k/include/asm/mmu_context.h
62
mm->context = NO_CONTEXT;
arch/m68k/include/asm/mmu_context.h
67
static inline void set_context(mm_context_t context, pgd_t *pgd)
arch/m68k/include/asm/mmu_context.h
69
__asm__ __volatile__ ("movec %0,%%asid" : : "d" (context));
arch/m68k/include/asm/mmu_context.h
76
set_context(tsk->mm->context, next->pgd);
arch/m68k/include/asm/mmu_context.h
88
set_context(mm->context, mm->pgd);
arch/m68k/include/asm/oplib.h
184
int context, char *program_counter);
arch/m68k/include/asm/oplib.h
210
extern void prom_putsegment(int context, unsigned long virt_addr,
arch/m68k/include/asm/tlbflush.h
146
sun3_put_context(mm->context);
arch/m68k/include/asm/tlbflush.h
171
sun3_put_context(vma->vm_mm->context);
arch/m68k/include/asm/tlbflush.h
194
sun3_put_context(mm->context);
arch/m68k/include/asm/tlbflush.h
200
if(pmeg_ctx[seg] == mm->context) {
arch/m68k/kernel/signal.c
682
struct sigcontext context;
arch/m68k/kernel/signal.c
690
if (copy_from_user(&context, usc, sizeof(context)))
arch/m68k/kernel/signal.c
694
regs->d0 = context.sc_d0;
arch/m68k/kernel/signal.c
695
regs->d1 = context.sc_d1;
arch/m68k/kernel/signal.c
696
regs->a0 = context.sc_a0;
arch/m68k/kernel/signal.c
697
regs->a1 = context.sc_a1;
arch/m68k/kernel/signal.c
698
regs->sr = (regs->sr & 0xff00) | (context.sc_sr & 0xff);
arch/m68k/kernel/signal.c
699
regs->pc = context.sc_pc;
arch/m68k/kernel/signal.c
701
wrusp(context.sc_usp);
arch/m68k/kernel/signal.c
702
formatvec = context.sc_formatvec;
arch/m68k/kernel/signal.c
704
if (restore_fpu_state(&context))
arch/m68k/kernel/signal.c
882
struct sigcontext context;
arch/m68k/kernel/signal.c
905
setup_sigcontext(&context, regs, set->sig[0]);
arch/m68k/kernel/signal.c
906
err |= copy_to_user (&frame->sc, &context, sizeof(context));
arch/m68k/mm/mcfmmu.c
126
asid = mm->context & 0xff;
arch/m68k/mm/sun3kmap.c
25
extern void mmu_emu_map_pmeg (int context, int vaddr);
arch/m68k/sun3/mmu_emu.c
209
void clear_context(unsigned long context)
arch/m68k/sun3/mmu_emu.c
214
if (context) {
arch/m68k/sun3/mmu_emu.c
215
if (!ctx_alloc[context])
arch/m68k/sun3/mmu_emu.c
218
ctx_alloc[context]->context = SUN3_INVALID_CONTEXT;
arch/m68k/sun3/mmu_emu.c
219
ctx_alloc[context] = (struct mm_struct *)0;
arch/m68k/sun3/mmu_emu.c
225
sun3_put_context(context);
arch/m68k/sun3/mmu_emu.c
228
if ((pmeg_ctx[i] == context) && (pmeg_alloc[i] == 1)) {
arch/m68k/sun3/mmu_emu.c
283
inline void mmu_emu_map_pmeg (int context, int vaddr)
arch/m68k/sun3/mmu_emu.c
298
curr_pmeg, context, vaddr);
arch/m68k/sun3/mmu_emu.c
305
sun3_put_context(context);
arch/m68k/sun3/mmu_emu.c
318
sun3_put_context(context);
arch/m68k/sun3/mmu_emu.c
325
pmeg_ctx[curr_pmeg] = context;
arch/m68k/sun3/mmu_emu.c
358
unsigned char context;
arch/m68k/sun3/mmu_emu.c
364
context = 0;
arch/m68k/sun3/mmu_emu.c
366
context = current->mm->context;
arch/m68k/sun3/mmu_emu.c
401
mmu_emu_map_pmeg (context, vaddr);
arch/microblaze/include/asm/mmu_context_mm.h
107
if (mm->context != NO_CONTEXT) {
arch/microblaze/include/asm/mmu_context_mm.h
108
clear_bit(mm->context, context_map);
arch/microblaze/include/asm/mmu_context_mm.h
109
mm->context = NO_CONTEXT;
arch/microblaze/include/asm/mmu_context_mm.h
119
set_context(next->context, next->pgd);
arch/microblaze/include/asm/mmu_context_mm.h
132
set_context(mm->context, mm->pgd);
arch/microblaze/include/asm/mmu_context_mm.h
50
extern void set_context(mm_context_t context, pgd_t *pgd);
arch/microblaze/include/asm/mmu_context_mm.h
81
if (mm->context != NO_CONTEXT)
arch/microblaze/include/asm/mmu_context_mm.h
92
mm->context = ctx;
arch/microblaze/include/asm/mmu_context_mm.h
99
# define init_new_context(tsk, mm) (((mm)->context = NO_CONTEXT), 0)
arch/mips/include/asm/elf.h
464
(unsigned long)current->mm->context.vdso); \
arch/mips/include/asm/kvm_host.h
631
__BUILD_KVM_RW_HW(context, l, MIPS_CP0_TLB_CONTEXT, 0)
arch/mips/include/asm/mmu_context.h
109
return atomic64_read(&mm->context.mmid);
arch/mips/include/asm/mmu_context.h
111
return mm->context.asid[cpu];
arch/mips/include/asm/mmu_context.h
118
atomic64_set(&mm->context.mmid, ctx);
arch/mips/include/asm/mmu_context.h
120
mm->context.asid[cpu] = ctx;
arch/mips/include/asm/mmu_context.h
148
mm->context.bd_emupage_allocmap = NULL;
arch/mips/include/asm/mmu_context.h
149
spin_lock_init(&mm->context.bd_emupage_lock);
arch/mips/include/asm/mmu_context.h
150
init_waitqueue_head(&mm->context.bd_emupage_queue);
arch/mips/include/asm/sn/sn0/hubio.h
715
context: 15, /* Bit vector:
arch/mips/include/asm/sn/sn0/hubio.h
727
#define icrbd_context icrbd_field_s.context
arch/mips/kernel/asm-offsets.c
202
OFFSET(MM_CONTEXT, mm_struct, context);
arch/mips/kernel/signal.c
821
void *vdso = current->mm->context.vdso;
arch/mips/kernel/vdso.c
177
mm->context.vdso = (void *)vdso_addr;
arch/mips/kvm/entry.c
309
offsetof(struct kvm, arch.gpa_mm.context.asid));
arch/mips/math-emu/dsemul.c
124
mm_context_t *mm_ctx = &mm->context;
arch/mips/math-emu/dsemul.c
204
mm_context_t *mm_ctx = &mm->context;
arch/mips/math-emu/dsemul.c
77
mm_context_t *mm_ctx = ¤t->mm->context;
arch/nios2/include/asm/mmu_context.h
39
mm->context = 0;
arch/nios2/mm/mmu_context.c
108
next->context = get_new_context();
arch/nios2/mm/mmu_context.c
109
set_context(next->context);
arch/nios2/mm/mmu_context.c
113
unsigned long get_pid_from_context(mm_context_t *context)
arch/nios2/mm/mmu_context.c
115
return CTX_PID((*context));
arch/nios2/mm/mmu_context.c
53
static void set_context(mm_context_t context)
arch/nios2/mm/mmu_context.c
55
set_mmu_pid(CTX_PID(context));
arch/nios2/mm/mmu_context.c
89
if (unlikely(CTX_VERSION(next->context) !=
arch/nios2/mm/mmu_context.c
91
next->context = get_new_context();
arch/nios2/mm/mmu_context.c
97
set_context(next->context);
arch/nios2/mm/tlb.c
105
unsigned long mmu_pid = get_pid_from_context(&vma->vm_mm->context);
arch/nios2/mm/tlb.c
115
unsigned long mmu_pid = get_pid_from_context(&vma->vm_mm->context);
arch/nios2/mm/tlb.c
260
unsigned long mmu_pid = get_pid_from_context(&mm->context);
arch/nios2/mm/tlb.c
263
memset(&mm->context, 0, sizeof(mm_context_t));
arch/openrisc/mm/tlb.c
171
mm->context = NO_CONTEXT;
arch/parisc/include/asm/elf.h
359
#define VDSO_CURRENT_BASE current->mm->context.vdso_base
arch/parisc/include/asm/mmu_context.h
23
mm->context.space_id = alloc_sid();
arch/parisc/include/asm/mmu_context.h
31
free_sid(mm->context.space_id);
arch/parisc/include/asm/mmu_context.h
32
mm->context.space_id = 0;
arch/parisc/include/asm/mmu_context.h
35
static inline unsigned long __space_to_prot(mm_context_t context)
arch/parisc/include/asm/mmu_context.h
38
return context.space_id << 1;
arch/parisc/include/asm/mmu_context.h
40
return context.space_id >> (SPACEID_SHIFT - 1);
arch/parisc/include/asm/mmu_context.h
44
static inline void load_context(mm_context_t context)
arch/parisc/include/asm/mmu_context.h
46
mtsp(context.space_id, SR_USER);
arch/parisc/include/asm/mmu_context.h
47
mtctl(__space_to_prot(context), 8);
arch/parisc/include/asm/mmu_context.h
61
load_context(next->context);
arch/parisc/include/asm/mmu_context.h
92
if (next->context.space_id == 0)
arch/parisc/include/asm/mmu_context.h
93
next->context.space_id = alloc_sid();
arch/parisc/include/asm/pgtable.h
58
mtsp(mm->context.space_id, SR_TEMP1);
arch/parisc/include/asm/processor.h
243
__u32 spaceid = (__u32)current->mm->context.space_id; \
arch/parisc/include/asm/tlbflush.h
20
__flush_tlb_range((vma)->vm_mm->context.space_id, start, end)
arch/parisc/include/asm/tlbflush.h
55
if (mm->context != 0)
arch/parisc/include/asm/tlbflush.h
56
free_sid(mm->context);
arch/parisc/include/asm/tlbflush.h
57
mm->context = alloc_sid();
arch/parisc/include/asm/tlbflush.h
59
load_context(mm->context);
arch/parisc/include/asm/vdso.h
12
#define VDSO64_SYMBOL(tsk, name) ((tsk)->mm->context.vdso_base + (vdso64_offset_##name))
arch/parisc/include/asm/vdso.h
13
#define VDSO32_SYMBOL(tsk, name) ((tsk)->mm->context.vdso_base + (vdso32_offset_##name))
arch/parisc/kernel/vdso.c
32
current->mm->context.vdso_base = vma->vm_start;
arch/parisc/kernel/vdso.c
91
current->mm->context.vdso_base = vdso_text_start;
arch/powerpc/include/asm/book3s/32/mmu-hash.h
183
#define INIT_MM_CONTEXT(mm) .context.sr0 = SR_NX
arch/powerpc/include/asm/book3s/32/pgtable.h
229
extern int flush_hash_pages(unsigned context, unsigned long va,
arch/powerpc/include/asm/book3s/32/pgtable.h
233
extern void add_hash_page(unsigned context, unsigned long va,
arch/powerpc/include/asm/book3s/32/pgtable.h
242
flush_hash_pages(mm->context.id, addr, ptephys, 1);
arch/powerpc/include/asm/book3s/64/mmu-hash.h
777
static inline unsigned long get_vsid(unsigned long context, unsigned long ea,
arch/powerpc/include/asm/book3s/64/mmu-hash.h
795
protovsid = (context << ESID_BITS) |
arch/powerpc/include/asm/book3s/64/mmu-hash.h
801
protovsid = (context << ESID_BITS_1T) |
arch/powerpc/include/asm/book3s/64/mmu-hash.h
845
unsigned long context;
arch/powerpc/include/asm/book3s/64/mmu-hash.h
850
context = get_kernel_context(ea);
arch/powerpc/include/asm/book3s/64/mmu-hash.h
851
return get_vsid(context, ea, ssize);
arch/powerpc/include/asm/book3s/64/mmu.h
285
unsigned long context = get_user_context(ctx, ea);
arch/powerpc/include/asm/book3s/64/mmu.h
287
return get_vsid(context, ea, ssize);
arch/powerpc/include/asm/elf.h
170
VDSO_AUX_ENT(AT_SYSINFO_EHDR, (unsigned long)current->mm->context.vdso);\
arch/powerpc/include/asm/mmu_context.h
122
atomic_inc(&mm->context.active_cpus);
arch/powerpc/include/asm/mmu_context.h
127
VM_WARN_ON_ONCE(atomic_read(&mm->context.active_cpus) <= 0);
arch/powerpc/include/asm/mmu_context.h
128
atomic_dec(&mm->context.active_cpus);
arch/powerpc/include/asm/mmu_context.h
138
if (atomic_inc_return(&mm->context.copros) == 1)
arch/powerpc/include/asm/mmu_context.h
167
c = atomic_dec_if_positive(&mm->context.copros);
arch/powerpc/include/asm/mmu_context.h
189
atomic_inc(&mm->context.vas_windows);
arch/powerpc/include/asm/mmu_context.h
198
v = atomic_dec_if_positive(&mm->context.vas_windows);
arch/powerpc/include/asm/mmu_context.h
83
VM_WARN_ON(mm->context.extended_id[index]);
arch/powerpc/include/asm/mmu_context.h
84
mm->context.extended_id[index] = context_id;
arch/powerpc/include/asm/mmu_context.h
92
context_id = get_user_context(&mm->context, ea);
arch/powerpc/include/asm/nohash/tlbflush.h
37
unsigned int pid = READ_ONCE(mm->context.id);
arch/powerpc/include/asm/pkeys.h
52
#define mm_pkey_allocation_map(mm) (mm->context.pkey_allocation_map)
arch/powerpc/include/asm/tlb.h
62
if (atomic_read(&mm->context.active_cpus) > 1)
arch/powerpc/kernel/paca.c
315
mm_context_t *context = &mm->context;
arch/powerpc/kernel/paca.c
317
VM_BUG_ON(!mm_ctx_slb_addr_limit(context));
arch/powerpc/kernel/paca.c
318
memcpy(&get_paca()->mm_ctx_low_slices_psize, mm_ctx_low_slices(context),
arch/powerpc/kernel/paca.c
320
memcpy(&get_paca()->mm_ctx_high_slices_psize, mm_ctx_high_slices(context),
arch/powerpc/kernel/paca.c
321
TASK_SLICE_ARRAY_SZ(context));
arch/powerpc/kernel/process.c
1301
atomic_read(&new->mm->context.vas_windows)))
arch/powerpc/kernel/signal_32.c
785
if (tsk->mm->context.vdso) {
arch/powerpc/kernel/signal_32.c
786
tramp = VDSO32_SYMBOL(tsk->mm->context.vdso, sigtramp_rt32);
arch/powerpc/kernel/signal_32.c
881
if (tsk->mm->context.vdso) {
arch/powerpc/kernel/signal_32.c
882
tramp = VDSO32_SYMBOL(tsk->mm->context.vdso, sigtramp32);
arch/powerpc/kernel/signal_64.c
924
if (tsk->mm->context.vdso) {
arch/powerpc/kernel/signal_64.c
925
regs_set_return_ip(regs, VDSO64_SYMBOL(tsk->mm->context.vdso, sigtramp_rt64));
arch/powerpc/kernel/vdso.c
151
mm->context.vdso = (void __user *)vdso_base + vvar_size;
arch/powerpc/kernel/vdso.c
161
mm->context.vdso = NULL;
arch/powerpc/kernel/vdso.c
52
current->mm->context.vdso = (void __user *)new_vma->vm_start;
arch/powerpc/kernel/vdso.c
76
if (vma->vm_start != (unsigned long)mm->context.vdso)
arch/powerpc/kernel/vdso.c
79
mm->context.vdso = NULL;
arch/powerpc/mm/book3s32/kuap.c
11
init_mm.context.sr0 |= SR_KS;
arch/powerpc/mm/book3s32/mmu.c
309
add_hash_page(mm->context.id, ea, pmd_val(*pmd));
arch/powerpc/mm/book3s32/mmu_context.c
116
long id = next->context.id;
arch/powerpc/mm/book3s32/mmu_context.c
123
update_user_segments(next->context.sr0);
arch/powerpc/mm/book3s32/mmu_context.c
71
mm->context.id = __init_new_context();
arch/powerpc/mm/book3s32/mmu_context.c
72
mm->context.sr0 = CTX_TO_VSID(mm->context.id, 0);
arch/powerpc/mm/book3s32/mmu_context.c
75
mm->context.sr0 |= SR_NX;
arch/powerpc/mm/book3s32/mmu_context.c
77
mm->context.sr0 |= SR_KS;
arch/powerpc/mm/book3s32/mmu_context.c
97
if (mm->context.id != NO_CONTEXT) {
arch/powerpc/mm/book3s32/mmu_context.c
98
__destroy_context(mm->context.id);
arch/powerpc/mm/book3s32/mmu_context.c
99
mm->context.id = NO_CONTEXT;
arch/powerpc/mm/book3s32/tlb.c
105
flush_hash_pages(mm->context.id, vmaddr, pmd_val(*pmd), 1);
arch/powerpc/mm/book3s32/tlb.c
55
unsigned int ctx = mm->context.id;
arch/powerpc/mm/book3s64/hash_pgtable.c
335
vsid = get_user_vsid(&mm->context, addr, ssize);
arch/powerpc/mm/book3s64/hash_tlb.c
91
vsid = get_user_vsid(&mm->context, addr, ssize);
arch/powerpc/mm/book3s64/hash_utils.c
1533
init_mm.context.hash_context = &init_hash_mm_context;
arch/powerpc/mm/book3s64/hash_utils.c
1534
mm_ctx_set_slb_addr_limit(&init_mm.context, SLB_ADDR_LIMIT_DEFAULT);
arch/powerpc/mm/book3s64/hash_utils.c
1643
struct subpage_prot_table *spt = mm_ctx_subpage_prot(&mm->context);
arch/powerpc/mm/book3s64/hash_utils.c
1747
vsid = get_user_vsid(&mm->context, ea, ssize);
arch/powerpc/mm/book3s64/hash_utils.c
2020
if (unlikely(psize != mm_ctx_user_psize(&mm->context)))
arch/powerpc/mm/book3s64/hash_utils.c
2056
vsid = get_user_vsid(&mm->context, ea, ssize);
arch/powerpc/mm/book3s64/hash_utils.c
2093
if (mm_ctx_user_psize(&mm->context) == MMU_PAGE_64K)
arch/powerpc/mm/book3s64/hash_utils.c
2106
mm_ctx_user_psize(&mm->context),
arch/powerpc/mm/book3s64/hash_utils.c
2107
mm_ctx_user_psize(&mm->context),
arch/powerpc/mm/book3s64/iommu_api.c
132
list_for_each_entry_rcu(mem2, &mm->context.iommu_group_mem_list, next,
arch/powerpc/mm/book3s64/iommu_api.c
165
list_add_rcu(&mem->next, &mm->context.iommu_group_mem_list);
arch/powerpc/mm/book3s64/iommu_api.c
294
list_for_each_entry_rcu(mem, &mm->context.iommu_group_mem_list, next) {
arch/powerpc/mm/book3s64/iommu_api.c
315
list_for_each_entry_rcu(mem, &mm->context.iommu_group_mem_list, next,
arch/powerpc/mm/book3s64/iommu_api.c
361
list_for_each_entry_rcu(mem, &mm->context.iommu_group_mem_list, next) {
arch/powerpc/mm/book3s64/iommu_api.c
401
INIT_LIST_HEAD_RCU(&mm->context.iommu_group_mem_list);
arch/powerpc/mm/book3s64/iommu_api.c
52
return !list_empty(&mm->context.iommu_group_mem_list);
arch/powerpc/mm/book3s64/mmu_context.c
100
if (!mm->context.hash_context)
arch/powerpc/mm/book3s64/mmu_context.c
117
if (mm->context.id == 0) {
arch/powerpc/mm/book3s64/mmu_context.c
118
memset(mm->context.hash_context, 0, sizeof(struct hash_mm_context));
arch/powerpc/mm/book3s64/mmu_context.c
122
memcpy(mm->context.hash_context, current->mm->context.hash_context, sizeof(struct hash_mm_context));
arch/powerpc/mm/book3s64/mmu_context.c
125
if (current->mm->context.hash_context->spt) {
arch/powerpc/mm/book3s64/mmu_context.c
126
mm->context.hash_context->spt = kmalloc_obj(struct subpage_prot_table);
arch/powerpc/mm/book3s64/mmu_context.c
127
if (!mm->context.hash_context->spt) {
arch/powerpc/mm/book3s64/mmu_context.c
128
kfree(mm->context.hash_context);
arch/powerpc/mm/book3s64/mmu_context.c
135
index = realloc_context_ids(&mm->context);
arch/powerpc/mm/book3s64/mmu_context.c
138
kfree(mm->context.hash_context->spt);
arch/powerpc/mm/book3s64/mmu_context.c
140
kfree(mm->context.hash_context);
arch/powerpc/mm/book3s64/mmu_context.c
185
mm->context.hash_context = NULL;
arch/powerpc/mm/book3s64/mmu_context.c
203
mm->context.id = index;
arch/powerpc/mm/book3s64/mmu_context.c
205
mm->context.pte_frag = NULL;
arch/powerpc/mm/book3s64/mmu_context.c
206
mm->context.pmd_frag = NULL;
arch/powerpc/mm/book3s64/mmu_context.c
210
atomic_set(&mm->context.active_cpus, 0);
arch/powerpc/mm/book3s64/mmu_context.c
211
atomic_set(&mm->context.copros, 0);
arch/powerpc/mm/book3s64/mmu_context.c
261
frag = mm->context.pte_frag;
arch/powerpc/mm/book3s64/mmu_context.c
265
frag = mm->context.pmd_frag;
arch/powerpc/mm/book3s64/mmu_context.c
274
WARN_ON_ONCE(!list_empty(&mm->context.iommu_group_mem_list));
arch/powerpc/mm/book3s64/mmu_context.c
290
process_tb[mm->context.id].prtb0 = 0;
arch/powerpc/mm/book3s64/mmu_context.c
293
destroy_contexts(&mm->context);
arch/powerpc/mm/book3s64/mmu_context.c
294
mm->context.id = MMU_NO_CONTEXT;
arch/powerpc/mm/book3s64/mmu_context.c
316
process_tb[mm->context.id].prtb0 = 0;
arch/powerpc/mm/book3s64/mmu_context.c
323
mtspr(SPRN_PID, next->context.id);
arch/powerpc/mm/book3s64/mmu_context.c
99
mm->context.hash_context = kmalloc_obj(struct hash_mm_context);
arch/powerpc/mm/book3s64/pgtable.c
400
ret = mm->context.pmd_frag;
arch/powerpc/mm/book3s64/pgtable.c
408
mm->context.pmd_frag = pmd_frag;
arch/powerpc/mm/book3s64/pgtable.c
446
if (likely(!mm->context.pmd_frag)) {
arch/powerpc/mm/book3s64/pgtable.c
448
mm->context.pmd_frag = ret + PMD_FRAG_SIZE;
arch/powerpc/mm/book3s64/pkeys.c
312
mm->context.execute_only_pkey = execute_only_key;
arch/powerpc/mm/book3s64/pkeys.c
376
return mm->context.execute_only_pkey;
arch/powerpc/mm/book3s64/pkeys.c
385
return (vma_pkey(vma) == vma->vm_mm->context.execute_only_pkey);
arch/powerpc/mm/book3s64/pkeys.c
468
mm->context.execute_only_pkey = oldmm->context.execute_only_pkey;
arch/powerpc/mm/book3s64/radix_hugetlbpage.c
59
atomic_read(&mm->context.copros) > 0)
arch/powerpc/mm/book3s64/radix_pgtable.c
1574
atomic_read(&mm->context.copros) > 0) {
arch/powerpc/mm/book3s64/radix_pgtable.c
1609
(atomic_read(&mm->context.copros) > 0))
arch/powerpc/mm/book3s64/radix_pgtable.c
487
init_mm.context.id = mmu_base_pid;
arch/powerpc/mm/book3s64/radix_tlb.c
1028
pid = mm->context.id;
arch/powerpc/mm/book3s64/radix_tlb.c
1059
if (atomic_read(&mm->context.copros) > 0)
arch/powerpc/mm/book3s64/radix_tlb.c
1246
pid = mm->context.id;
arch/powerpc/mm/book3s64/radix_tlb.c
1270
if (atomic_read(&mm->context.copros) > 0)
arch/powerpc/mm/book3s64/radix_tlb.c
1321
pid = mm->context.id;
arch/powerpc/mm/book3s64/radix_tlb.c
1348
if (atomic_read(&mm->context.copros) > 0)
arch/powerpc/mm/book3s64/radix_tlb.c
380
if (atomic_read(&mm->context.copros) > 0)
arch/powerpc/mm/book3s64/radix_tlb.c
513
if (atomic_read(&mm->context.copros) > 0)
arch/powerpc/mm/book3s64/radix_tlb.c
567
if (atomic_read(&mm->context.copros) > 0)
arch/powerpc/mm/book3s64/radix_tlb.c
584
unsigned long pid = mm->context.id;
arch/powerpc/mm/book3s64/radix_tlb.c
598
unsigned long pid = mm->context.id;
arch/powerpc/mm/book3s64/radix_tlb.c
618
unsigned long pid = mm->context.id;
arch/powerpc/mm/book3s64/radix_tlb.c
654
if (atomic_read(&mm->context.copros) > 0)
arch/powerpc/mm/book3s64/radix_tlb.c
665
unsigned long pid = mm->context.id;
arch/powerpc/mm/book3s64/radix_tlb.c
767
int active_cpus = atomic_read(&mm->context.active_cpus);
arch/powerpc/mm/book3s64/radix_tlb.c
788
if (atomic_read(&mm->context.copros) > 0)
arch/powerpc/mm/book3s64/radix_tlb.c
839
pid = mm->context.id;
arch/powerpc/mm/book3s64/radix_tlb.c
857
if (atomic_read(&mm->context.copros) > 0)
arch/powerpc/mm/book3s64/radix_tlb.c
880
pid = mm->context.id;
arch/powerpc/mm/book3s64/radix_tlb.c
895
if (atomic_read(&mm->context.copros) > 0)
arch/powerpc/mm/book3s64/radix_tlb.c
920
pid = mm->context.id;
arch/powerpc/mm/book3s64/radix_tlb.c
937
if (atomic_read(&mm->context.copros) > 0)
arch/powerpc/mm/book3s64/slb.c
596
static long slb_insert_entry(unsigned long ea, unsigned long context,
arch/powerpc/mm/book3s64/slb.c
603
vsid = get_vsid(context, ea, ssize);
arch/powerpc/mm/book3s64/slb.c
664
unsigned long context;
arch/powerpc/mm/book3s64/slb.c
706
context = get_kernel_context(ea);
arch/powerpc/mm/book3s64/slb.c
708
return slb_insert_entry(ea, context, flags, ssize, true);
arch/powerpc/mm/book3s64/slb.c
713
unsigned long context;
arch/powerpc/mm/book3s64/slb.c
722
if (ea >= mm_ctx_slb_addr_limit(&mm->context))
arch/powerpc/mm/book3s64/slb.c
725
context = get_user_context(&mm->context, ea);
arch/powerpc/mm/book3s64/slb.c
726
if (!context)
arch/powerpc/mm/book3s64/slb.c
739
return slb_insert_entry(ea, context, flags, ssize, false);
arch/powerpc/mm/book3s64/slice.c
200
psize_mask = slice_mask_for_size(&mm->context, psize);
arch/powerpc/mm/book3s64/slice.c
207
lpsizes = mm_ctx_low_slices(&mm->context);
arch/powerpc/mm/book3s64/slice.c
217
old_mask = slice_mask_for_size(&mm->context, old_psize);
arch/powerpc/mm/book3s64/slice.c
226
hpsizes = mm_ctx_high_slices(&mm->context);
arch/powerpc/mm/book3s64/slice.c
227
for (i = 0; i < GET_HIGH_SLICE_INDEX(mm_ctx_slb_addr_limit(&mm->context)); i++) {
arch/powerpc/mm/book3s64/slice.c
236
old_mask = slice_mask_for_size(&mm->context, old_psize);
arch/powerpc/mm/book3s64/slice.c
246
(unsigned long)mm_ctx_low_slices(&mm->context),
arch/powerpc/mm/book3s64/slice.c
247
(unsigned long)mm_ctx_high_slices(&mm->context));
arch/powerpc/mm/book3s64/slice.c
343
addr += mm_ctx_slb_addr_limit(&mm->context) - DEFAULT_MAP_WINDOW;
arch/powerpc/mm/book3s64/slice.c
455
if (high_limit > mm_ctx_slb_addr_limit(&mm->context)) {
arch/powerpc/mm/book3s64/slice.c
461
mm_ctx_set_slb_addr_limit(&mm->context, high_limit);
arch/powerpc/mm/book3s64/slice.c
468
BUG_ON(mm_ctx_slb_addr_limit(&mm->context) == 0);
arch/powerpc/mm/book3s64/slice.c
488
maskp = slice_mask_for_size(&mm->context, psize);
arch/powerpc/mm/book3s64/slice.c
515
compat_maskp = slice_mask_for_size(&mm->context, MMU_PAGE_4K);
arch/powerpc/mm/book3s64/slice.c
667
psize = mm_ctx_user_psize(¤t->mm->context);
arch/powerpc/mm/book3s64/slice.c
687
psize = mm_ctx_user_psize(¤t->mm->context);
arch/powerpc/mm/book3s64/slice.c
700
psizes = mm_ctx_low_slices(&mm->context);
arch/powerpc/mm/book3s64/slice.c
703
psizes = mm_ctx_high_slices(&mm->context);
arch/powerpc/mm/book3s64/slice.c
724
mm_ctx_set_slb_addr_limit(&mm->context, SLB_ADDR_LIMIT_DEFAULT);
arch/powerpc/mm/book3s64/slice.c
725
mm_ctx_set_user_psize(&mm->context, psize);
arch/powerpc/mm/book3s64/slice.c
730
lpsizes = mm_ctx_low_slices(&mm->context);
arch/powerpc/mm/book3s64/slice.c
733
hpsizes = mm_ctx_high_slices(&mm->context);
arch/powerpc/mm/book3s64/slice.c
739
mask = slice_mask_for_size(&mm->context, psize);
arch/powerpc/mm/book3s64/slice.c
754
mm_ctx_set_slb_addr_limit(&mm->context, DEFAULT_MAP_WINDOW);
arch/powerpc/mm/book3s64/slice.c
792
unsigned int psize = mm_ctx_user_psize(&mm->context);
arch/powerpc/mm/book3s64/slice.c
796
maskp = slice_mask_for_size(&mm->context, psize);
arch/powerpc/mm/book3s64/slice.c
803
compat_maskp = slice_mask_for_size(&mm->context, MMU_PAGE_4K);
arch/powerpc/mm/book3s64/slice.c
91
if ((mm_ctx_slb_addr_limit(&mm->context) - len) < addr)
arch/powerpc/mm/book3s64/subpage_prot.c
101
spt = mm_ctx_subpage_prot(&mm->context);
arch/powerpc/mm/book3s64/subpage_prot.c
218
spt = mm_ctx_subpage_prot(&mm->context);
arch/powerpc/mm/book3s64/subpage_prot.c
229
mm->context.hash_context->spt = spt;
arch/powerpc/mm/book3s64/subpage_prot.c
24
struct subpage_prot_table *spt = mm_ctx_subpage_prot(&mm->context);
arch/powerpc/mm/copro_fault.c
93
vsid = get_user_vsid(&mm->context, ea, ssize);
arch/powerpc/mm/mmu_context.c
112
void *frag = pte_frag_get(&mm->context);
arch/powerpc/mm/mmu_context.c
22
tsk->thread.sr0 = mm->context.sr0;
arch/powerpc/mm/mmu_context.c
25
tsk->thread.pid = mm->context.id;
arch/powerpc/mm/mmu_context.c
35
tsk->thread.pid = mm->context.id;
arch/powerpc/mm/nohash/e500_hugetlbpage.c
144
if (unlikely(book3e_tlb_exists(ea, mm->context.id))) {
arch/powerpc/mm/nohash/e500_hugetlbpage.c
154
mas1 = MAS1_VALID | MAS1_TID(mm->context.id) | MAS1_TSIZE(tsize);
arch/powerpc/mm/nohash/mmu_context.c
112
if (mm->context.active) {
arch/powerpc/mm/nohash/mmu_context.c
120
mm->context.id = MMU_NO_CONTEXT;
arch/powerpc/mm/nohash/mmu_context.c
160
mm->context.id = MMU_NO_CONTEXT;
arch/powerpc/mm/nohash/mmu_context.c
194
mm->context.id = MMU_NO_CONTEXT;
arch/powerpc/mm/nohash/mmu_context.c
231
next->context.active++;
arch/powerpc/mm/nohash/mmu_context.c
233
WARN_ON(prev->context.active < 1);
arch/powerpc/mm/nohash/mmu_context.c
234
prev->context.active--;
arch/powerpc/mm/nohash/mmu_context.c
241
id = next->context.id;
arch/powerpc/mm/nohash/mmu_context.c
276
next->context.id = id;
arch/powerpc/mm/nohash/mmu_context.c
309
mm->context.id = MMU_NO_CONTEXT;
arch/powerpc/mm/nohash/mmu_context.c
310
mm->context.active = 0;
arch/powerpc/mm/nohash/mmu_context.c
311
pte_frag_set(&mm->context, NULL);
arch/powerpc/mm/nohash/mmu_context.c
323
if (mm->context.id == MMU_NO_CONTEXT)
arch/powerpc/mm/nohash/mmu_context.c
326
WARN_ON(mm->context.active != 0);
arch/powerpc/mm/nohash/mmu_context.c
329
id = mm->context.id;
arch/powerpc/mm/nohash/mmu_context.c
332
mm->context.id = MMU_NO_CONTEXT;
arch/powerpc/mm/nohash/mmu_context.c
375
init_mm.context.active = NR_CPUS;
arch/powerpc/mm/nohash/tlb.c
133
pid = mm->context.id;
arch/powerpc/mm/nohash/tlb.c
146
pid = mm ? mm->context.id : 0;
arch/powerpc/mm/nohash/tlb.c
218
pid = mm->context.id;
arch/powerpc/mm/nohash/tlb.c
247
pid = mm->context.id;
arch/powerpc/mm/pgtable-frag.c
41
ret = pte_frag_get(&mm->context);
arch/powerpc/mm/pgtable-frag.c
49
pte_frag_set(&mm->context, pte_frag);
arch/powerpc/mm/pgtable-frag.c
87
if (likely(!pte_frag_get(&mm->context))) {
arch/powerpc/mm/pgtable-frag.c
89
pte_frag_set(&mm->context, ret + PTE_FRAG_SIZE);
arch/powerpc/perf/callchain_32.c
62
if (current->mm->context.vdso &&
arch/powerpc/perf/callchain_32.c
63
nip == VDSO32_SYMBOL(current->mm->context.vdso, sigtramp32))
arch/powerpc/perf/callchain_32.c
73
if (current->mm->context.vdso &&
arch/powerpc/perf/callchain_32.c
74
nip == VDSO32_SYMBOL(current->mm->context.vdso, sigtramp_rt32))
arch/powerpc/perf/callchain_64.c
44
if (current->mm->context.vdso &&
arch/powerpc/perf/callchain_64.c
45
nip == VDSO64_SYMBOL(current->mm->context.vdso, sigtramp_rt64))
arch/powerpc/platforms/44x/warp.c
118
static irqreturn_t temp_isr(int irq, void *context)
arch/riscv/include/asm/elf.h
94
(elf_addr_t)(ulong)current->mm->context.vdso); \
arch/riscv/include/asm/kvm_vcpu_fp.h
18
void __kvm_riscv_fp_f_save(struct kvm_cpu_context *context);
arch/riscv/include/asm/kvm_vcpu_fp.h
19
void __kvm_riscv_fp_f_restore(struct kvm_cpu_context *context);
arch/riscv/include/asm/kvm_vcpu_fp.h
20
void __kvm_riscv_fp_d_save(struct kvm_cpu_context *context);
arch/riscv/include/asm/kvm_vcpu_fp.h
21
void __kvm_riscv_fp_d_restore(struct kvm_cpu_context *context);
arch/riscv/include/asm/kvm_vcpu_vector.h
19
static __always_inline void __kvm_riscv_vector_save(struct kvm_cpu_context *context)
arch/riscv/include/asm/kvm_vcpu_vector.h
21
__riscv_v_vstate_save(&context->vector, context->vector.datap);
arch/riscv/include/asm/kvm_vcpu_vector.h
24
static __always_inline void __kvm_riscv_vector_restore(struct kvm_cpu_context *context)
arch/riscv/include/asm/kvm_vcpu_vector.h
26
__riscv_v_vstate_restore(&context->vector, context->vector.datap);
arch/riscv/include/asm/mmu_context.h
24
next->context.pmlen = 0;
arch/riscv/include/asm/mmu_context.h
34
atomic_long_set(&mm->context.id, 0);
arch/riscv/include/asm/mmu_context.h
37
clear_bit(MM_CONTEXT_LOCK_PMLEN, &mm->context.flags);
arch/riscv/include/asm/mmu_context.h
47
return -1UL >> mm->context.pmlen;
arch/riscv/include/asm/suspend.h
34
int __cpu_suspend_enter(struct suspend_context *context);
arch/riscv/include/asm/suspend.h
40
unsigned long context));
arch/riscv/include/asm/suspend.h
43
int __cpu_resume_enter(unsigned long hartid, unsigned long context);
arch/riscv/include/asm/suspend.h
46
void suspend_save_csrs(struct suspend_context *context);
arch/riscv/include/asm/suspend.h
47
void suspend_restore_csrs(struct suspend_context *context);
arch/riscv/include/asm/switch_to.h
97
bool stale_mm = task->mm && task->mm->context.force_icache_flush;
arch/riscv/include/asm/sync_core.h
21
cpumask_setall(&mm->context.icache_stale_mask);
arch/riscv/include/asm/uaccess.h
19
u8 pmlen = mm->context.pmlen;
arch/riscv/kernel/compat_signal.c
221
current->mm->context.vdso, rt_sigreturn);
arch/riscv/kernel/process.c
248
set_bit(MM_CONTEXT_LOCK_PMLEN, &p->mm->context.flags);
arch/riscv/kernel/process.c
358
if (test_bit(MM_CONTEXT_LOCK_PMLEN, &mm->context.flags) && mm->context.pmlen != pmlen) {
arch/riscv/kernel/process.c
364
mm->context.pmlen = pmlen;
arch/riscv/kernel/process.c
395
if (task->mm->context.pmlen)
arch/riscv/kernel/signal.c
439
current->mm->context.vdso, rt_sigreturn);
arch/riscv/kernel/suspend.c
106
suspend_restore_csrs(&context);
arch/riscv/kernel/suspend.c
15
void suspend_save_csrs(struct suspend_context *context)
arch/riscv/kernel/suspend.c
18
context->envcfg = csr_read(CSR_ENVCFG);
arch/riscv/kernel/suspend.c
19
context->tvec = csr_read(CSR_TVEC);
arch/riscv/kernel/suspend.c
20
context->ie = csr_read(CSR_IE);
arch/riscv/kernel/suspend.c
34
context->stimecmp = csr_read(CSR_STIMECMP);
arch/riscv/kernel/suspend.c
36
context->stimecmph = csr_read(CSR_STIMECMPH);
arch/riscv/kernel/suspend.c
40
context->satp = csr_read(CSR_SATP);
arch/riscv/kernel/suspend.c
44
void suspend_restore_csrs(struct suspend_context *context)
arch/riscv/kernel/suspend.c
48
csr_write(CSR_ENVCFG, context->envcfg);
arch/riscv/kernel/suspend.c
49
csr_write(CSR_TVEC, context->tvec);
arch/riscv/kernel/suspend.c
50
csr_write(CSR_IE, context->ie);
arch/riscv/kernel/suspend.c
56
csr_write(CSR_STIMECMPH, context->stimecmph);
arch/riscv/kernel/suspend.c
58
csr_write(CSR_STIMECMP, context->stimecmp);
arch/riscv/kernel/suspend.c
61
csr_write(CSR_SATP, context->satp);
arch/riscv/kernel/suspend.c
68
unsigned long context))
arch/riscv/kernel/suspend.c
71
struct suspend_context context = { 0 };
arch/riscv/kernel/suspend.c
78
suspend_save_csrs(&context);
arch/riscv/kernel/suspend.c
88
if (__cpu_suspend_enter(&context)) {
arch/riscv/kernel/suspend.c
91
(ulong)&context);
arch/riscv/kernel/vdso.c
140
mm->context.vdso = (void *)vdso_base;
arch/riscv/kernel/vdso.c
153
mm->context.vdso = NULL;
arch/riscv/kernel/vdso.c
39
current->mm->context.vdso = (void *)new_vma->vm_start;
arch/riscv/mm/cacheflush.c
192
mask = ¤t->mm->context.icache_stale_mask;
arch/riscv/mm/cacheflush.c
253
current->mm->context.force_icache_flush = true;
arch/riscv/mm/cacheflush.c
266
current->mm->context.force_icache_flush = false;
arch/riscv/mm/cacheflush.c
65
mask = &mm->context.icache_stale_mask;
arch/riscv/mm/context.c
150
cntx = atomic_long_read(&mm->context.id);
arch/riscv/mm/context.c
178
cntx = atomic_long_read(&mm->context.id);
arch/riscv/mm/context.c
181
atomic_long_set(&mm->context.id, cntx);
arch/riscv/mm/context.c
302
if (cpumask_test_and_clear_cpu(cpu, &mm->context.icache_stale_mask)) {
arch/riscv/mm/context.c
97
unsigned long cntx = atomic_long_read(&mm->context.id);
arch/riscv/mm/tlbflush.c
115
return mm ? cntx2asid(atomic_long_read(&mm->context.id)) : FLUSH_TLB_NO_ASID;
arch/s390/boot/vmem.c
554
init_mm.context.asce = get_lowcore()->kernel_asce.val;
arch/s390/include/asm/elf.h
237
(unsigned long)current->mm->context.vdso_base); \
arch/s390/include/asm/mmu.h
29
.context.lock = __SPIN_LOCK_UNLOCKED(name.context.lock), \
arch/s390/include/asm/mmu.h
30
.context.gmap_list = LIST_HEAD_INIT(name.context.gmap_list),
arch/s390/include/asm/mmu_context.h
104
while (atomic_read(&mm->context.flush_count))
arch/s390/include/asm/mmu_context.h
25
spin_lock_init(&mm->context.lock);
arch/s390/include/asm/mmu_context.h
26
INIT_LIST_HEAD(&mm->context.gmap_list);
arch/s390/include/asm/mmu_context.h
27
cpumask_clear(&mm->context.cpu_attach_mask);
arch/s390/include/asm/mmu_context.h
28
atomic_set(&mm->context.flush_count, 0);
arch/s390/include/asm/mmu_context.h
29
atomic_set(&mm->context.protected_count, 0);
arch/s390/include/asm/mmu_context.h
30
mm->context.gmap_asce = 0;
arch/s390/include/asm/mmu_context.h
31
mm->context.flush_mm = 0;
arch/s390/include/asm/mmu_context.h
33
mm->context.allow_cow_sharing = 1;
arch/s390/include/asm/mmu_context.h
35
switch (mm->context.asce_limit) {
arch/s390/include/asm/mmu_context.h
41
VM_BUG_ON(mm->context.asce_limit);
arch/s390/include/asm/mmu_context.h
43
mm->context.asce_limit = _REGION2_SIZE;
arch/s390/include/asm/mmu_context.h
61
mm->context.asce = __pa(mm->pgd) | _ASCE_TABLE_LENGTH |
arch/s390/include/asm/mmu_context.h
75
get_lowcore()->user_asce.val = next->context.asce;
arch/s390/include/asm/mmu_context.h
76
cpumask_set_cpu(cpu, &next->context.cpu_attach_mask);
arch/s390/include/asm/mmu_context.h
81
cpumask_clear_cpu(cpu, &prev->context.cpu_attach_mask);
arch/s390/include/asm/pgalloc.h
42
if (addr + len > mm->context.asce_limit &&
arch/s390/include/asm/pgtable.h
1464
return end <= current->mm->context.asce_limit;
arch/s390/include/asm/pgtable.h
531
return mm->context.asce_limit <= _REGION1_SIZE;
arch/s390/include/asm/pgtable.h
537
return mm->context.asce_limit <= _REGION2_SIZE;
arch/s390/include/asm/pgtable.h
543
return mm->context.asce_limit <= _REGION3_SIZE;
arch/s390/include/asm/pgtable.h
550
if (unlikely(atomic_read(&mm->context.protected_count)))
arch/s390/include/asm/pgtable.h
595
if (!mm->context.allow_cow_sharing)
arch/s390/include/asm/tlb.h
101
tlb->mm->context.flush_mm = 1;
arch/s390/include/asm/tlb.h
120
tlb->mm->context.flush_mm = 1;
arch/s390/include/asm/tlb.h
138
tlb->mm->context.flush_mm = 1;
arch/s390/include/asm/tlb.h
82
tlb->mm->context.flush_mm = 1;
arch/s390/include/asm/tlbflush.h
52
atomic_inc(&mm->context.flush_count);
arch/s390/include/asm/tlbflush.h
54
cpumask_copy(mm_cpumask(mm), &mm->context.cpu_attach_mask);
arch/s390/include/asm/tlbflush.h
56
gmap_asce = READ_ONCE(mm->context.gmap_asce);
arch/s390/include/asm/tlbflush.h
60
__tlb_flush_idte(mm->context.asce);
arch/s390/include/asm/tlbflush.h
65
atomic_dec(&mm->context.flush_count);
arch/s390/include/asm/tlbflush.h
71
__tlb_flush_idte(init_mm.context.asce);
arch/s390/include/asm/tlbflush.h
76
spin_lock(&mm->context.lock);
arch/s390/include/asm/tlbflush.h
77
if (mm->context.flush_mm) {
arch/s390/include/asm/tlbflush.h
78
mm->context.flush_mm = 0;
arch/s390/include/asm/tlbflush.h
81
spin_unlock(&mm->context.lock);
arch/s390/include/asm/vdso-symbols.h
7
#define VDSO_SYMBOL(tsk, name) ((tsk)->mm->context.vdso_base + (vdso_offset_##name))
arch/s390/kernel/smp.c
247
cpumask_set_cpu(cpu, &init_mm.context.cpu_attach_mask);
arch/s390/kernel/smp.c
927
cpumask_clear_cpu(cpu, &init_mm.context.cpu_attach_mask);
arch/s390/kernel/stacktrace.c
90
if (ip >= current->mm->context.asce_limit)
arch/s390/kernel/stacktrace.c
97
return in_range(ip, current->mm->context.vdso_base, vdso_text_size());
arch/s390/kernel/vdso.c
30
current->mm->context.vdso_base = vma->vm_start;
arch/s390/kernel/vdso.c
76
current->mm->context.vdso_base = vdso_text_start;
arch/s390/kvm/gaccess.c
1106
struct cmpxchg_key_context *context = f->priv;
arch/s390/kvm/gaccess.c
1108
context->exception = __cmpxchg_with_key(__va(PFN_PHYS(f->pfn) | context->offset),
arch/s390/kvm/gaccess.c
1109
context->old, context->new, context->len,
arch/s390/kvm/gaccess.c
1110
context->access_key);
arch/s390/kvm/gaccess.c
1140
struct cmpxchg_key_context context = {
arch/s390/kvm/gaccess.c
1149
.priv = &context,
arch/s390/kvm/gaccess.c
1163
*success = !context.exception;
arch/s390/kvm/gaccess.c
1164
if (context.exception == 1)
arch/s390/kvm/gaccess.c
1166
return context.exception;
arch/s390/kvm/gaccess.c
868
struct acc_page_key_context *context = f->priv;
arch/s390/kvm/gaccess.c
872
ptr = __va(PFN_PHYS(f->pfn) | context->offset);
arch/s390/kvm/gaccess.c
874
if (context->store)
arch/s390/kvm/gaccess.c
875
r = mvcos_key(ptr, context->data, context->len, context->access_key, 0);
arch/s390/kvm/gaccess.c
877
r = mvcos_key(context->data, ptr, context->len, 0, context->access_key);
arch/s390/kvm/gaccess.c
879
context->exception = r;
arch/s390/kvm/gaccess.c
885
struct acc_page_key_context context = {
arch/s390/kvm/gaccess.c
894
.priv = &context,
arch/s390/kvm/gaccess.c
900
if (KVM_BUG_ON((len + context.offset) > PAGE_SIZE, kvm))
arch/s390/kvm/gaccess.c
906
return context.exception;
arch/s390/kvm/gmap.c
1161
struct gmap_protect_asce_top_level *context)
arch/s390/kvm/gmap.c
1168
if (kvm_s390_array_needs_retry_safe(sg->kvm, context->seq, context->f))
arch/s390/kvm/gmap.c
1178
if (!context->f[i].valid)
arch/s390/kvm/gmap.c
1180
rc = gmap_protect_rmap(mc, sg, context->f[i].gfn, 0, context->f[i].pfn,
arch/s390/kvm/gmap.c
1181
TABLE_TYPE_REGION1 + 1, context->f[i].writable);
arch/s390/kvm/gmap.c
1188
kvm_s390_release_faultin_array(sg->kvm, context->f, false);
arch/s390/kvm/gmap.c
1193
struct gmap_protect_asce_top_level *context)
arch/s390/kvm/gmap.c
1197
if (kvm_s390_array_needs_retry_unsafe(sg->kvm, context->seq, context->f))
arch/s390/kvm/gmap.c
1206
rc = __gmap_protect_asce_top_level(mc, sg, context);
arch/s390/kvm/gmap.c
1215
struct gmap_protect_asce_top_level context = {};
arch/s390/kvm/gmap.c
1221
context.seq = sg->kvm->mmu_invalidate_seq;
arch/s390/kvm/gmap.c
1225
rc = kvm_s390_get_guest_pages(sg->kvm, context.f, asce.rsto, asce.dt + 1, false);
arch/s390/kvm/gmap.c
1229
rc = _gmap_protect_asce_top_level(mc, sg, &context);
arch/s390/kvm/gmap.c
1231
kvm_s390_release_faultin_array(sg->kvm, context.f, true);
arch/s390/kvm/pv.c
400
atomic_dec(&kvm->mm->context.protected_count);
arch/s390/kvm/pv.c
524
atomic_dec(&kvm->mm->context.protected_count);
arch/s390/kvm/pv.c
563
if (!atomic_inc_not_zero(&kvm->mm->context.protected_count))
arch/s390/kvm/pv.c
610
atomic_dec(&kvm->mm->context.protected_count);
arch/s390/kvm/pv.c
70
return atomic_read(&mm->context.protected_count) > 1;
arch/s390/kvm/pv.c
736
atomic_inc(&kvm->mm->context.protected_count);
arch/s390/kvm/pv.c
741
atomic_dec(&kvm->mm->context.protected_count);
arch/s390/mm/gmap_helpers.c
286
if (!mm->context.allow_cow_sharing)
arch/s390/mm/gmap_helpers.c
289
mm->context.allow_cow_sharing = 0;
arch/s390/mm/gmap_helpers.c
301
mm->context.allow_cow_sharing = 1;
arch/s390/mm/init.c
158
cpumask_set_cpu(0, &init_mm.context.cpu_attach_mask);
arch/s390/mm/pgalloc.c
100
mm->context.asce = __pa(mm->pgd) | _ASCE_TABLE_LENGTH |
arch/s390/mm/pgalloc.c
47
asce.val = mm->context.asce;
arch/s390/mm/pgalloc.c
59
unsigned long asce_limit = mm->context.asce_limit;
arch/s390/mm/pgalloc.c
90
mm->context.asce_limit = _REGION1_SIZE;
arch/s390/mm/pgalloc.c
91
mm->context.asce = __pa(mm->pgd) | _ASCE_TABLE_LENGTH |
arch/s390/mm/pgalloc.c
99
mm->context.asce_limit = TASK_SIZE_MAX;
arch/s390/mm/pgtable.c
107
atomic_inc(&mm->context.flush_count);
arch/s390/mm/pgtable.c
108
if (cpumask_equal(&mm->context.cpu_attach_mask,
arch/s390/mm/pgtable.c
111
mm->context.flush_mm = 1;
arch/s390/mm/pgtable.c
114
atomic_dec(&mm->context.flush_count);
arch/s390/mm/pgtable.c
139
atomic_inc(&mm->context.flush_count);
arch/s390/mm/pgtable.c
151
atomic_dec(&mm->context.flush_count);
arch/s390/mm/pgtable.c
185
__pmdp_idte(addr, pmdp, IDTE_NODAT | IDTE_GUEST_ASCE, mm->context.asce, IDTE_LOCAL);
arch/s390/mm/pgtable.c
195
mm->context.asce, IDTE_GLOBAL);
arch/s390/mm/pgtable.c
209
atomic_inc(&mm->context.flush_count);
arch/s390/mm/pgtable.c
215
atomic_dec(&mm->context.flush_count);
arch/s390/mm/pgtable.c
227
atomic_inc(&mm->context.flush_count);
arch/s390/mm/pgtable.c
228
if (cpumask_equal(&mm->context.cpu_attach_mask,
arch/s390/mm/pgtable.c
231
mm->context.flush_mm = 1;
arch/s390/mm/pgtable.c
235
atomic_dec(&mm->context.flush_count);
arch/s390/mm/pgtable.c
270
mm->context.asce, IDTE_LOCAL);
arch/s390/mm/pgtable.c
280
mm->context.asce, IDTE_GLOBAL);
arch/s390/mm/pgtable.c
293
atomic_inc(&mm->context.flush_count);
arch/s390/mm/pgtable.c
299
atomic_dec(&mm->context.flush_count);
arch/s390/mm/pgtable.c
46
asce = READ_ONCE(mm->context.gmap_asce);
arch/s390/mm/pgtable.c
50
asce = asce ? : mm->context.asce;
arch/s390/mm/pgtable.c
66
asce = READ_ONCE(mm->context.gmap_asce);
arch/s390/mm/pgtable.c
70
asce = asce ? : mm->context.asce;
arch/s390/mm/pgtable.c
88
atomic_inc(&mm->context.flush_count);
arch/s390/mm/pgtable.c
94
atomic_dec(&mm->context.flush_count);
arch/sh/include/asm/elf.h
176
#define VDSO_BASE ((unsigned long)current->mm->context.vdso)
arch/sh/include/asm/mmu_context.h
40
#define cpu_context(cpu, mm) ((mm)->context.id[cpu])
arch/sh/kernel/dumpstack.c
105
context, &graph);
arch/sh/kernel/dumpstack.c
92
struct thread_info *context;
arch/sh/kernel/dumpstack.c
95
context = (struct thread_info *)
arch/sh/kernel/signal_32.c
291
} else if (likely(current->mm->context.vdso)) {
arch/sh/kernel/signal_32.c
361
} else if (likely(current->mm->context.vdso)) {
arch/sh/kernel/vsyscall/vsyscall.c
108
current->mm->context.vdso = (void *)addr;
arch/sh/kernel/vsyscall/vsyscall.c
118
if (vma->vm_mm && vma->vm_start == (long)vma->vm_mm->context.vdso)
arch/sparc/include/asm/elf_64.h
222
(unsigned long)current->mm->context.vdso); \
arch/sparc/include/asm/mman.h
36
if (!current->mm->context.adi) {
arch/sparc/include/asm/mman.h
39
current->mm->context.adi = true;
arch/sparc/include/asm/mmu_context_64.h
124
tsb_context_switch_ctx(mm, CTX_HWBITS(mm->context));
arch/sparc/include/asm/mmu_context_64.h
132
__flush_tlb_mm(CTX_HWBITS(mm->context),
arch/sparc/include/asm/mmu_context_64.h
135
spin_unlock_irqrestore(&mm->context.lock, flags);
arch/sparc/include/asm/mmu_context_64.h
179
if (current && current->mm && current->mm->context.adi) {
arch/sparc/include/asm/mmu_context_64.h
41
&mm->context.tsb_block[MM_TSB_BASE],
arch/sparc/include/asm/mmu_context_64.h
43
(mm->context.tsb_block[MM_TSB_HUGE].tsb ?
arch/sparc/include/asm/mmu_context_64.h
44
&mm->context.tsb_block[MM_TSB_HUGE] :
arch/sparc/include/asm/mmu_context_64.h
49
, __pa(&mm->context.tsb_descr[MM_TSB_BASE]),
arch/sparc/include/asm/mmu_context_64.h
74
: "r" (CTX_HWBITS((__mm)->context)), \
arch/sparc/include/asm/mmu_context_64.h
89
spin_lock_irqsave(&mm->context.lock, flags);
arch/sparc/include/asm/mmu_context_64.h
90
ctx_valid = CTX_VALID(mm->context);
arch/sparc/include/asm/oplib_32.h
107
int context, char *program_counter);
arch/sparc/include/asm/pgtsrmmu.h
114
void srmmu_set_context(int context);
arch/sparc/include/asm/tlb_64.h
19
#define do_flush_tlb_mm(mm) __flush_tlb_mm(CTX_HWBITS(mm->context), SECONDARY_CONTEXT)
arch/sparc/include/asm/tlbflush_64.h
48
void __flush_tlb_page(unsigned long context, unsigned long vaddr);
arch/sparc/include/asm/tlbflush_64.h
55
__flush_tlb_page(CTX_HWBITS(mm->context), vaddr);
arch/sparc/kernel/adi_64.c
135
if (mm->context.tag_store) {
arch/sparc/kernel/adi_64.c
136
tag_desc = mm->context.tag_store;
arch/sparc/kernel/adi_64.c
137
spin_lock_irqsave(&mm->context.tag_lock, flags);
arch/sparc/kernel/adi_64.c
144
spin_unlock_irqrestore(&mm->context.tag_lock, flags);
arch/sparc/kernel/adi_64.c
174
spin_lock_irqsave(&mm->context.tag_lock, flags);
arch/sparc/kernel/adi_64.c
175
if (mm->context.tag_store) {
arch/sparc/kernel/adi_64.c
176
tag_desc = mm->context.tag_store;
arch/sparc/kernel/adi_64.c
205
mm->context.tag_store = kzalloc(size, GFP_NOWAIT);
arch/sparc/kernel/adi_64.c
206
if (mm->context.tag_store == NULL) {
arch/sparc/kernel/adi_64.c
210
tag_desc = mm->context.tag_store;
arch/sparc/kernel/adi_64.c
213
open_desc = mm->context.tag_store;
arch/sparc/kernel/adi_64.c
295
spin_unlock_irqrestore(&mm->context.tag_lock, flags);
arch/sparc/kernel/adi_64.c
304
spin_lock_irqsave(&mm->context.tag_lock, flags);
arch/sparc/kernel/adi_64.c
312
if (tag_desc != mm->context.tag_store) {
arch/sparc/kernel/adi_64.c
317
spin_unlock_irqrestore(&mm->context.tag_lock, flags);
arch/sparc/kernel/asm-offsets.c
54
DEFINE(AOFF_mm_context, offsetof(struct mm_struct, context));
arch/sparc/kernel/entry.h
165
unsigned long context);
arch/sparc/kernel/smp_64.c
1063
u32 ctx = CTX_HWBITS(mm->context);
arch/sparc/kernel/smp_64.c
1091
u32 ctx = CTX_HWBITS(mm->context);
arch/sparc/kernel/smp_64.c
1110
unsigned long context = CTX_HWBITS(mm->context);
arch/sparc/kernel/smp_64.c
1115
context, vaddr, 0,
arch/sparc/kernel/smp_64.c
1118
__flush_tlb_page(context, vaddr);
arch/sparc/kernel/traps_64.c
2658
unsigned long context)
arch/sparc/kernel/traps_64.c
2686
__func__, addr, context);
arch/sparc/kernel/unaligned_32.c
228
(current->mm ? current->mm->context :
arch/sparc/kernel/unaligned_32.c
229
current->active_mm->context));
arch/sparc/kernel/unaligned_64.c
279
(current->mm ? CTX_HWBITS(current->mm->context) :
arch/sparc/kernel/unaligned_64.c
280
CTX_HWBITS(current->active_mm->context)));
arch/sparc/mm/fault_32.c
51
(tsk->mm ? tsk->mm->context : tsk->active_mm->context));
arch/sparc/mm/fault_64.c
468
mm_rss -= (mm->context.thp_pte_count * (HPAGE_SIZE / PAGE_SIZE));
arch/sparc/mm/fault_64.c
471
mm->context.tsb_block[MM_TSB_BASE].tsb_rss_limit))
arch/sparc/mm/fault_64.c
474
mm_rss = mm->context.hugetlb_pte_count + mm->context.thp_pte_count;
arch/sparc/mm/fault_64.c
477
mm->context.tsb_block[MM_TSB_HUGE].tsb_rss_limit)) {
arch/sparc/mm/fault_64.c
478
if (mm->context.tsb_block[MM_TSB_HUGE].tsb)
arch/sparc/mm/fault_64.c
53
CTX_HWBITS(tsk->mm->context) :
arch/sparc/mm/fault_64.c
54
CTX_HWBITS(tsk->active_mm->context)));
arch/sparc/mm/hugetlbpage.c
260
mm->context.hugetlb_pte_count += nptes;
arch/sparc/mm/hugetlbpage.c
304
mm->context.hugetlb_pte_count -= nptes;
arch/sparc/mm/init_64.c
2955
spin_lock_irqsave(&mm->context.lock, flags);
arch/sparc/mm/init_64.c
2957
if (mm->context.tsb_block[MM_TSB_HUGE].tsb != NULL)
arch/sparc/mm/init_64.c
2961
spin_unlock_irqrestore(&mm->context.lock, flags);
arch/sparc/mm/init_64.c
2992
tp = &mm->context.tsb_block[MM_TSB_HUGE];
arch/sparc/mm/init_64.c
3007
ctx = mm->context.sparc64_ctx_val;
arch/sparc/mm/init_64.c
3012
if (ctx != mm->context.sparc64_ctx_val) {
arch/sparc/mm/init_64.c
3024
mm->context.sparc64_ctx_val = ctx;
arch/sparc/mm/init_64.c
321
struct tsb *tsb = mm->context.tsb_block[tsb_index].tsb;
arch/sparc/mm/init_64.c
328
(mm->context.tsb_block[tsb_index].tsb_nentries - 1UL));
arch/sparc/mm/init_64.c
413
spin_lock_irqsave(&mm->context.lock, flags);
arch/sparc/mm/init_64.c
417
if (mm->context.hugetlb_pte_count || mm->context.thp_pte_count) {
arch/sparc/mm/init_64.c
454
spin_unlock_irqrestore(&mm->context.lock, flags);
arch/sparc/mm/init_64.c
798
old_ctx = mm->context.sparc64_ctx_val;
arch/sparc/mm/init_64.c
802
mm->context.sparc64_ctx_val = new_ctx;
arch/sparc/mm/init_64.c
825
if (unlikely(CTX_VALID(mm->context)))
arch/sparc/mm/init_64.c
827
orig_pgsz_bits = (mm->context.sparc64_ctx_val & CTX_PGSZ_MASK);
arch/sparc/mm/init_64.c
837
if (mm->context.sparc64_ctx_val)
arch/sparc/mm/init_64.c
842
mm->context.sparc64_ctx_val = new_ctx | orig_pgsz_bits;
arch/sparc/mm/srmmu.c
1000
srmmu_ctxd_set(&srmmu_context_table[mm->context], srmmu_swapper_pg_dir);
arch/sparc/mm/srmmu.c
1003
free_context(mm->context);
arch/sparc/mm/srmmu.c
1005
mm->context = NO_CONTEXT;
arch/sparc/mm/srmmu.c
1652
if (mm->context != NO_CONTEXT) {
arch/sparc/mm/srmmu.c
1661
if (mm->context != NO_CONTEXT) {
arch/sparc/mm/srmmu.c
1678
if (mm->context != NO_CONTEXT) {
arch/sparc/mm/srmmu.c
1692
if (mm->context != NO_CONTEXT) {
arch/sparc/mm/srmmu.c
1704
if (mm->context != NO_CONTEXT) {
arch/sparc/mm/srmmu.c
1715
if (mm->context != NO_CONTEXT) {
arch/sparc/mm/srmmu.c
416
mm->context = ctxp->ctx_number;
arch/sparc/mm/srmmu.c
429
ctxp->ctx_mm->context = NO_CONTEXT;
arch/sparc/mm/srmmu.c
431
mm->context = ctxp->ctx_number;
arch/sparc/mm/srmmu.c
434
static inline void free_context(int context)
arch/sparc/mm/srmmu.c
438
ctx_old = ctx_list_pool + context;
arch/sparc/mm/srmmu.c
469
if (mm->context == NO_CONTEXT) {
arch/sparc/mm/srmmu.c
473
srmmu_ctxd_set(&srmmu_context_table[mm->context], mm->pgd);
arch/sparc/mm/srmmu.c
482
srmmu_set_context(mm->context);
arch/sparc/mm/srmmu.c
590
if ((ctx1 = vma->vm_mm->context) != -1) {
arch/sparc/mm/srmmu.c
75
#define FLUSH_BEGIN(mm) if ((mm)->context != NO_CONTEXT) {
arch/sparc/mm/srmmu.c
906
init_mm.context = (unsigned long) NO_CONTEXT;
arch/sparc/mm/srmmu.c
990
mm->context = NO_CONTEXT;
arch/sparc/mm/srmmu.c
998
if (mm->context != NO_CONTEXT) {
arch/sparc/mm/tlb.c
196
mm->context.hugetlb_pte_count++;
arch/sparc/mm/tlb.c
198
mm->context.thp_pte_count++;
arch/sparc/mm/tlb.c
201
mm->context.hugetlb_pte_count--;
arch/sparc/mm/tlb.c
203
mm->context.thp_pte_count--;
arch/sparc/mm/tlb.c
273
(vma->vm_mm)->context.thp_pte_count--;
arch/sparc/mm/tlb.c
35
if (CTX_VALID(mm->context)) {
arch/sparc/mm/tlb.c
43
__flush_tlb_pending(CTX_HWBITS(tb->mm->context),
arch/sparc/mm/tsb.c
124
spin_lock_irqsave(&mm->context.lock, flags);
arch/sparc/mm/tsb.c
127
base = (unsigned long) mm->context.tsb_block[MM_TSB_BASE].tsb;
arch/sparc/mm/tsb.c
128
nentries = mm->context.tsb_block[MM_TSB_BASE].tsb_nentries;
arch/sparc/mm/tsb.c
140
else if (mm->context.tsb_block[MM_TSB_HUGE].tsb) {
arch/sparc/mm/tsb.c
141
base = (unsigned long) mm->context.tsb_block[MM_TSB_HUGE].tsb;
arch/sparc/mm/tsb.c
142
nentries = mm->context.tsb_block[MM_TSB_HUGE].tsb_nentries;
arch/sparc/mm/tsb.c
149
spin_unlock_irqrestore(&mm->context.lock, flags);
arch/sparc/mm/tsb.c
157
spin_lock_irqsave(&mm->context.lock, flags);
arch/sparc/mm/tsb.c
160
base = (unsigned long) mm->context.tsb_block[MM_TSB_BASE].tsb;
arch/sparc/mm/tsb.c
161
nentries = mm->context.tsb_block[MM_TSB_BASE].tsb_nentries;
arch/sparc/mm/tsb.c
174
else if (mm->context.tsb_block[MM_TSB_HUGE].tsb) {
arch/sparc/mm/tsb.c
175
base = (unsigned long) mm->context.tsb_block[MM_TSB_HUGE].tsb;
arch/sparc/mm/tsb.c
176
nentries = mm->context.tsb_block[MM_TSB_HUGE].tsb_nentries;
arch/sparc/mm/tsb.c
183
spin_unlock_irqrestore(&mm->context.lock, flags);
arch/sparc/mm/tsb.c
199
mm->context.tsb_block[tsb_idx].tsb_nentries =
arch/sparc/mm/tsb.c
216
tsb_paddr = __pa(mm->context.tsb_block[tsb_idx].tsb);
arch/sparc/mm/tsb.c
277
mm->context.tsb_block[tsb_idx].tsb_reg_val = tsb_reg;
arch/sparc/mm/tsb.c
278
mm->context.tsb_block[tsb_idx].tsb_map_vaddr = 0;
arch/sparc/mm/tsb.c
279
mm->context.tsb_block[tsb_idx].tsb_map_pte = 0;
arch/sparc/mm/tsb.c
285
mm->context.tsb_block[tsb_idx].tsb_reg_val = tsb_reg;
arch/sparc/mm/tsb.c
286
mm->context.tsb_block[tsb_idx].tsb_map_vaddr = base;
arch/sparc/mm/tsb.c
287
mm->context.tsb_block[tsb_idx].tsb_map_pte = tte;
arch/sparc/mm/tsb.c
292
struct hv_tsb_descr *hp = &mm->context.tsb_descr[tsb_idx];
arch/sparc/mm/tsb.c
432
if (mm->context.tsb_block[tsb_index].tsb == NULL &&
arch/sparc/mm/tsb.c
443
if (mm->context.tsb_block[tsb_index].tsb != NULL)
arch/sparc/mm/tsb.c
444
mm->context.tsb_block[tsb_index].tsb_rss_limit = ~0UL;
arch/sparc/mm/tsb.c
473
spin_lock_irqsave(&mm->context.lock, flags);
arch/sparc/mm/tsb.c
475
old_tsb = mm->context.tsb_block[tsb_index].tsb;
arch/sparc/mm/tsb.c
477
(mm->context.tsb_block[tsb_index].tsb_reg_val & 0x7UL);
arch/sparc/mm/tsb.c
478
old_size = (mm->context.tsb_block[tsb_index].tsb_nentries *
arch/sparc/mm/tsb.c
487
(rss < mm->context.tsb_block[tsb_index].tsb_rss_limit))) {
arch/sparc/mm/tsb.c
488
spin_unlock_irqrestore(&mm->context.lock, flags);
arch/sparc/mm/tsb.c
494
mm->context.tsb_block[tsb_index].tsb_rss_limit = new_rss_limit;
arch/sparc/mm/tsb.c
514
mm->context.tsb_block[tsb_index].tsb = new_tsb;
arch/sparc/mm/tsb.c
517
spin_unlock_irqrestore(&mm->context.lock, flags);
arch/sparc/mm/tsb.c
545
spin_lock_init(&mm->context.lock);
arch/sparc/mm/tsb.c
547
mm->context.sparc64_ctx_val = 0UL;
arch/sparc/mm/tsb.c
549
mm->context.tag_store = NULL;
arch/sparc/mm/tsb.c
550
spin_lock_init(&mm->context.tag_lock);
arch/sparc/mm/tsb.c
557
saved_hugetlb_pte_count = mm->context.hugetlb_pte_count;
arch/sparc/mm/tsb.c
558
saved_thp_pte_count = mm->context.thp_pte_count;
arch/sparc/mm/tsb.c
559
mm->context.hugetlb_pte_count = 0;
arch/sparc/mm/tsb.c
560
mm->context.thp_pte_count = 0;
arch/sparc/mm/tsb.c
570
mm->context.tsb_block[i].tsb = NULL;
arch/sparc/mm/tsb.c
584
if (unlikely(!mm->context.tsb_block[MM_TSB_BASE].tsb))
arch/sparc/mm/tsb.c
607
tsb_destroy_one(&mm->context.tsb_block[i]);
arch/sparc/mm/tsb.c
611
if (CTX_VALID(mm->context)) {
arch/sparc/mm/tsb.c
612
unsigned long nr = CTX_NRBITS(mm->context);
arch/sparc/mm/tsb.c
619
if (mm->context.tag_store) {
arch/sparc/mm/tsb.c
624
tag_desc = mm->context.tag_store;
arch/sparc/mm/tsb.c
632
kfree(mm->context.tag_store);
arch/sparc/mm/tsb.c
633
mm->context.tag_store = NULL;
arch/sparc/power/hibernate.c
40
tsb_context_switch_ctx(mm, CTX_HWBITS(mm->context));
arch/sparc/vdso/vma.c
394
current->mm->context.vdso = (void __user *)text_start;
arch/sparc/vdso/vma.c
424
current->mm->context.vdso = NULL;
arch/um/drivers/mconsole.h
52
enum mc_context context;
arch/um/drivers/mconsole_kern.c
87
if (req.cmd->context == MCONSOLE_INTR)
arch/um/include/asm/mmu.h
27
.context = { \
arch/um/include/asm/mmu.h
28
.turnstile = __MUTEX_INITIALIZER(mm.context.turnstile), \
arch/um/include/asm/mmu.h
29
.sync_tlb_lock = __SPIN_LOCK_INITIALIZER(mm.context.sync_tlb_lock), \
arch/um/include/asm/pgtable.h
230
guard(spinlock_irqsave)(&mm->context.sync_tlb_lock);
arch/um/include/asm/pgtable.h
232
if (!mm->context.sync_tlb_range_to) {
arch/um/include/asm/pgtable.h
233
mm->context.sync_tlb_range_from = start;
arch/um/include/asm/pgtable.h
234
mm->context.sync_tlb_range_to = end;
arch/um/include/asm/pgtable.h
236
if (start < mm->context.sync_tlb_range_from)
arch/um/include/asm/pgtable.h
237
mm->context.sync_tlb_range_from = start;
arch/um/include/asm/pgtable.h
238
if (end > mm->context.sync_tlb_range_to)
arch/um/include/asm/pgtable.h
239
mm->context.sync_tlb_range_to = end;
arch/um/kernel/reboot.c
32
pid = t->mm->context.id.pid;
arch/um/kernel/skas/mmu.c
45
struct mm_id *new_id = &mm->context.id;
arch/um/kernel/skas/mmu.c
49
mutex_init(&mm->context.turnstile);
arch/um/kernel/skas/mmu.c
50
spin_lock_init(&mm->context.sync_tlb_lock);
arch/um/kernel/skas/mmu.c
62
list_add(&mm->context.list, &mm_list);
arch/um/kernel/skas/mmu.c
82
struct mm_context *mmu = &mm->context;
arch/um/kernel/skas/mmu.c
99
list_del(&mm->context.list);
arch/um/kernel/skas/process.c
50
return current->mm->context.id.stack;
arch/um/kernel/skas/process.c
58
return ¤t->mm->context.id;
arch/um/kernel/smp.c
69
os_alarm_process(current->mm->context.id.pid);
arch/um/kernel/time.c
876
os_alarm_process(get_current()->mm->context.id.pid);
arch/um/kernel/tlb.c
168
guard(spinlock_irqsave)(&mm->context.sync_tlb_lock);
arch/um/kernel/tlb.c
170
if (mm->context.sync_tlb_range_to == 0)
arch/um/kernel/tlb.c
173
ops.mm_idp = &mm->context.id;
arch/um/kernel/tlb.c
182
addr = mm->context.sync_tlb_range_from;
arch/um/kernel/tlb.c
185
next = pgd_addr_end(addr, mm->context.sync_tlb_range_to);
arch/um/kernel/tlb.c
195
((addr < mm->context.sync_tlb_range_to) && !ret));
arch/um/kernel/tlb.c
200
mm->context.sync_tlb_range_from = 0;
arch/um/kernel/tlb.c
201
mm->context.sync_tlb_range_to = 0;
arch/x86/boot/compressed/ident_map_64.c
120
mapping_info.context = &pgt_data;
arch/x86/boot/compressed/ident_map_64.c
201
pte = (pte_t *)info->alloc_pgt_page(info->context);
arch/x86/boot/compressed/ident_map_64.c
49
static void *alloc_pgt_page(void *context)
arch/x86/boot/compressed/ident_map_64.c
51
struct alloc_pgt_data *pages = (struct alloc_pgt_data *)context;
arch/x86/entry/syscall_32.c
321
unsigned long landing_pad = (unsigned long)current->mm->context.vdso +
arch/x86/entry/vdso/extable.c
15
const struct vdso_image *image = current->mm->context.vdso_image;
arch/x86/entry/vdso/extable.c
28
if (!current->mm->context.vdso)
arch/x86/entry/vdso/extable.c
31
base = (unsigned long)current->mm->context.vdso + image->extable_base;
arch/x86/entry/vdso/vma.c
189
current->mm->context.vdso = (void __user *)text_start;
arch/x86/entry/vdso/vma.c
190
current->mm->context.vdso_image = image;
arch/x86/entry/vdso/vma.c
263
const struct vdso_image *image = current->mm->context.vdso_image;
arch/x86/entry/vdso/vma.c
264
unsigned long vdso = (unsigned long) current->mm->context.vdso;
arch/x86/entry/vdso/vma.c
55
const struct vdso_image *image = vma->vm_mm->context.vdso_image;
arch/x86/entry/vdso/vma.c
70
(unsigned long)current->mm->context.vdso;
arch/x86/entry/vdso/vma.c
79
const struct vdso_image *image = current->mm->context.vdso_image;
arch/x86/entry/vdso/vma.c
82
current->mm->context.vdso = (void __user *)new_vma->vm_start;
arch/x86/entry/vsyscall/vsyscall_64.c
306
if (!mm || !test_bit(MM_CONTEXT_HAS_VSYSCALL, &mm->context.flags))
arch/x86/events/core.c
2589
if (atomic_inc_return(&mm->context.perf_rdpmc_allowed) == 1)
arch/x86/events/core.c
2598
if (atomic_dec_and_test(&mm->context.perf_rdpmc_allowed))
arch/x86/events/core.c
2901
ldt = smp_load_acquire(¤t->mm->context.ldt);
arch/x86/include/asm/elf.h
335
(unsigned long __force)current->mm->context.vdso); \
arch/x86/include/asm/elf.h
344
(unsigned long __force)current->mm->context.vdso); \
arch/x86/include/asm/elf.h
360
#define VDSO_CURRENT_BASE ((unsigned long)current->mm->context.vdso)
arch/x86/include/asm/elf.h
363
((unsigned long)current->mm->context.vdso + \
arch/x86/include/asm/init.h
8
void *context; /* context for alloc_pgt_page */
arch/x86/include/asm/mmu.h
87
.context = { \
arch/x86/include/asm/mmu.h
89
.lock = __MUTEX_INITIALIZER(mm.context.lock), \
arch/x86/include/asm/mmu_context.h
102
mm->context.lam_cr3_mask = oldmm->context.lam_cr3_mask;
arch/x86/include/asm/mmu_context.h
103
mm->context.untag_mask = oldmm->context.untag_mask;
arch/x86/include/asm/mmu_context.h
109
return mm->context.untag_mask;
arch/x86/include/asm/mmu_context.h
114
mm->context.untag_mask = -1UL;
arch/x86/include/asm/mmu_context.h
121
test_bit(MM_CONTEXT_FORCE_TAGGED_SVA, &mm->context.flags);
arch/x86/include/asm/mmu_context.h
153
mutex_init(&mm->context.lock);
arch/x86/include/asm/mmu_context.h
155
mm->context.ctx_id = atomic64_inc_return(&last_mm_ctx_id);
arch/x86/include/asm/mmu_context.h
156
atomic64_set(&mm->context.tlb_gen, 0);
arch/x86/include/asm/mmu_context.h
157
mm->context.next_trim_cpumask = jiffies + HZ;
arch/x86/include/asm/mmu_context.h
162
mm->context.pkey_allocation_map = 0x1;
arch/x86/include/asm/mmu_context.h
164
mm->context.execute_only_pkey = -1;
arch/x86/include/asm/mmu_context.h
216
mm->context.pkey_allocation_map = oldmm->context.pkey_allocation_map;
arch/x86/include/asm/mmu_context.h
217
mm->context.execute_only_pkey = oldmm->context.execute_only_pkey;
arch/x86/include/asm/mmu_context.h
239
!test_bit(MM_CONTEXT_UPROBE_IA32, &mm->context.flags);
arch/x86/include/asm/mmu_context.h
250
return test_bit(MM_CONTEXT_NOTRACK, &mm->context.flags);
arch/x86/include/asm/mmu_context.h
255
set_bit(MM_CONTEXT_NOTRACK, &mm->context.flags);
arch/x86/include/asm/mmu_context.h
57
mm->context.ldt = NULL;
arch/x86/include/asm/mmu_context.h
58
init_rwsem(&mm->context.ldt_usr_sem);
arch/x86/include/asm/mmu_context.h
97
return READ_ONCE(mm->context.lam_cr3_mask);
arch/x86/include/asm/pkeys.h
46
#define mm_pkey_allocation_map(mm) (mm->context.pkey_allocation_map)
arch/x86/include/asm/pkeys.h
71
if (pkey == mm->context.execute_only_pkey)
arch/x86/include/asm/tlbflush.h
256
asid = smp_load_acquire(&mm->context.global_asid);
arch/x86/include/asm/tlbflush.h
267
mm->context.global_asid = 0;
arch/x86/include/asm/tlbflush.h
268
mm->context.asid_transition = false;
arch/x86/include/asm/tlbflush.h
279
mm->context.asid_transition = true;
arch/x86/include/asm/tlbflush.h
280
smp_store_release(&mm->context.global_asid, asid);
arch/x86/include/asm/tlbflush.h
285
WRITE_ONCE(mm->context.asid_transition, false);
arch/x86/include/asm/tlbflush.h
293
return mm && READ_ONCE(mm->context.asid_transition);
arch/x86/include/asm/tlbflush.h
346
return atomic64_inc_return(&mm->context.tlb_gen);
arch/x86/include/asm/uaccess_64.h
46
return addr & (mm)->context.untag_mask;
arch/x86/kernel/cpu/mce/inject.c
208
int context = MCJ_CTX(m->inject_flags);
arch/x86/kernel/cpu/mce/inject.c
214
switch (context) {
arch/x86/kernel/cpu/mce/inject.c
242
int context = MCJ_CTX(m->inject_flags);
arch/x86/kernel/cpu/mce/inject.c
246
if (context == MCJ_CTX_RANDOM)
arch/x86/kernel/cpu/mce/severity.c
383
enum context ctx = error_context(m, regs);
arch/x86/kernel/cpu/mce/severity.c
395
if (s->context && ctx != s->context)
arch/x86/kernel/cpu/mce/severity.c
45
unsigned char context;
arch/x86/kernel/cpu/mce/severity.c
56
#define KERNEL .context = IN_KERNEL
arch/x86/kernel/cpu/mce/severity.c
57
#define USER .context = IN_USER
arch/x86/kernel/cpu/mce/severity.c
58
#define KERNEL_RECOV .context = IN_KERNEL_RECOV
arch/x86/kernel/ldt.c
109
if (unlikely((unsigned long)prev->context.ldt |
arch/x86/kernel/ldt.c
110
(unsigned long)next->context.ldt))
arch/x86/kernel/ldt.c
193
if (mm->context.ldt) {
arch/x86/kernel/ldt.c
243
if (boot_cpu_has(X86_FEATURE_PTI) && !mm->context.ldt)
arch/x86/kernel/ldt.c
268
if (boot_cpu_has(X86_FEATURE_PTI) && !mm->context.ldt)
arch/x86/kernel/ldt.c
423
mutex_lock(&mm->context.lock);
arch/x86/kernel/ldt.c
426
smp_store_release(&mm->context.ldt, ldt);
arch/x86/kernel/ldt.c
431
mutex_unlock(&mm->context.lock);
arch/x86/kernel/ldt.c
459
mutex_lock(&old_mm->context.lock);
arch/x86/kernel/ldt.c
460
if (!old_mm->context.ldt)
arch/x86/kernel/ldt.c
463
new_ldt = alloc_ldt_struct(old_mm->context.ldt->nr_entries);
arch/x86/kernel/ldt.c
469
memcpy(new_ldt->entries, old_mm->context.ldt->entries,
arch/x86/kernel/ldt.c
47
ldt = READ_ONCE(mm->context.ldt);
arch/x86/kernel/ldt.c
479
mm->context.ldt = new_ldt;
arch/x86/kernel/ldt.c
482
mutex_unlock(&old_mm->context.lock);
arch/x86/kernel/ldt.c
493
free_ldt_struct(mm->context.ldt);
arch/x86/kernel/ldt.c
494
mm->context.ldt = NULL;
arch/x86/kernel/ldt.c
508
down_read(&mm->context.ldt_usr_sem);
arch/x86/kernel/ldt.c
510
if (!mm->context.ldt) {
arch/x86/kernel/ldt.c
518
entries_size = mm->context.ldt->nr_entries * LDT_ENTRY_SIZE;
arch/x86/kernel/ldt.c
522
if (copy_to_user(ptr, mm->context.ldt->entries, entries_size)) {
arch/x86/kernel/ldt.c
537
up_read(&mm->context.ldt_usr_sem);
arch/x86/kernel/ldt.c
619
if (down_write_killable(&mm->context.ldt_usr_sem))
arch/x86/kernel/ldt.c
622
old_ldt = mm->context.ldt;
arch/x86/kernel/ldt.c
662
up_write(&mm->context.ldt_usr_sem);
arch/x86/kernel/machine_kexec_64.c
243
.context = image,
arch/x86/kernel/process.c
204
set_bit(MM_CONTEXT_LOCK_LAM, &p->mm->context.flags);
arch/x86/kernel/process_64.c
440
mutex_lock(&task->mm->context.lock);
arch/x86/kernel/process_64.c
441
ldt = task->mm->context.ldt;
arch/x86/kernel/process_64.c
446
mutex_unlock(&task->mm->context.lock);
arch/x86/kernel/process_64.c
726
__set_bit(MM_CONTEXT_HAS_VSYSCALL, ¤t->mm->context.flags);
arch/x86/kernel/process_64.c
739
current->mm->context.flags = 0;
arch/x86/kernel/process_64.c
763
__set_bit(MM_CONTEXT_UPROBE_IA32, ¤t->mm->context.flags);
arch/x86/kernel/process_64.c
816
mm->context.lam_cr3_mask = X86_CR3_LAM_U57;
arch/x86/kernel/process_64.c
817
mm->context.untag_mask = ~GENMASK(62, 57);
arch/x86/kernel/process_64.c
825
set_bit(MM_CONTEXT_LOCK_LAM, &mm->context.flags);
arch/x86/kernel/process_64.c
838
!test_bit(MM_CONTEXT_FORCE_TAGGED_SVA, &mm->context.flags))
arch/x86/kernel/process_64.c
848
if (test_bit(MM_CONTEXT_LOCK_LAM, &mm->context.flags)) {
arch/x86/kernel/process_64.c
955
return put_user(task->mm->context.untag_mask,
arch/x86/kernel/process_64.c
962
set_bit(MM_CONTEXT_FORCE_TAGGED_SVA, &task->mm->context.flags);
arch/x86/kernel/signal_32.c
283
if (current->mm->context.vdso)
arch/x86/kernel/signal_32.c
284
restorer = current->mm->context.vdso +
arch/x86/kernel/signal_32.c
370
restorer = current->mm->context.vdso +
arch/x86/kernel/step.c
38
mutex_lock(&child->mm->context.lock);
arch/x86/kernel/step.c
39
if (unlikely(!child->mm->context.ldt ||
arch/x86/kernel/step.c
40
seg >= child->mm->context.ldt->nr_entries))
arch/x86/kernel/step.c
43
desc = &child->mm->context.ldt->entries[seg];
arch/x86/kernel/step.c
51
mutex_unlock(&child->mm->context.lock);
arch/x86/kernel/umip.c
269
down_read(¤t->mm->context.ldt_usr_sem);
arch/x86/kernel/umip.c
270
if (current->mm->context.ldt)
arch/x86/kernel/umip.c
274
up_read(¤t->mm->context.ldt_usr_sem);
arch/x86/kvm/mmu/mmu.c
5103
static void nonpaging_init_context(struct kvm_mmu *context)
arch/x86/kvm/mmu/mmu.c
5105
context->page_fault = nonpaging_page_fault;
arch/x86/kvm/mmu/mmu.c
5106
context->gva_to_gpa = nonpaging_gva_to_gpa;
arch/x86/kvm/mmu/mmu.c
5107
context->sync_spte = NULL;
arch/x86/kvm/mmu/mmu.c
5372
struct kvm_mmu *context)
arch/x86/kvm/mmu/mmu.c
5374
__reset_rsvds_bits_mask(&context->guest_rsvd_check,
arch/x86/kvm/mmu/mmu.c
5376
context->cpu_role.base.level, is_efer_nx(context),
arch/x86/kvm/mmu/mmu.c
5378
is_cr4_pse(context),
arch/x86/kvm/mmu/mmu.c
5421
struct kvm_mmu *context, bool execonly, int huge_page_level)
arch/x86/kvm/mmu/mmu.c
5423
__reset_rsvds_bits_mask_ept(&context->guest_rsvd_check,
arch/x86/kvm/mmu/mmu.c
5439
struct kvm_mmu *context)
arch/x86/kvm/mmu/mmu.c
5448
WARN_ON_ONCE(context->root_role.level < PT32E_ROOT_LEVEL);
arch/x86/kvm/mmu/mmu.c
5450
shadow_zero_check = &context->shadow_zero_check;
arch/x86/kvm/mmu/mmu.c
5452
context->root_role.level,
arch/x86/kvm/mmu/mmu.c
5453
context->root_role.efer_nx,
arch/x86/kvm/mmu/mmu.c
5460
for (i = context->root_role.level; --i >= 0;) {
arch/x86/kvm/mmu/mmu.c
5485
static void reset_tdp_shadow_zero_bits_mask(struct kvm_mmu *context)
arch/x86/kvm/mmu/mmu.c
5490
shadow_zero_check = &context->shadow_zero_check;
arch/x86/kvm/mmu/mmu.c
5494
context->root_role.level, true,
arch/x86/kvm/mmu/mmu.c
5505
for (i = context->root_role.level; --i >= 0;) {
arch/x86/kvm/mmu/mmu.c
5516
reset_ept_shadow_zero_bits_mask(struct kvm_mmu *context, bool execonly)
arch/x86/kvm/mmu/mmu.c
5518
__reset_rsvds_bits_mask_ept(&context->shadow_zero_check,
arch/x86/kvm/mmu/mmu.c
5684
static void paging64_init_context(struct kvm_mmu *context)
arch/x86/kvm/mmu/mmu.c
5686
context->page_fault = paging64_page_fault;
arch/x86/kvm/mmu/mmu.c
5687
context->gva_to_gpa = paging64_gva_to_gpa;
arch/x86/kvm/mmu/mmu.c
5688
context->sync_spte = paging64_sync_spte;
arch/x86/kvm/mmu/mmu.c
5691
static void paging32_init_context(struct kvm_mmu *context)
arch/x86/kvm/mmu/mmu.c
5693
context->page_fault = paging32_page_fault;
arch/x86/kvm/mmu/mmu.c
5694
context->gva_to_gpa = paging32_gva_to_gpa;
arch/x86/kvm/mmu/mmu.c
5695
context->sync_spte = paging32_sync_spte;
arch/x86/kvm/mmu/mmu.c
5800
struct kvm_mmu *context = &vcpu->arch.root_mmu;
arch/x86/kvm/mmu/mmu.c
5803
if (cpu_role.as_u64 == context->cpu_role.as_u64 &&
arch/x86/kvm/mmu/mmu.c
5804
root_role.word == context->root_role.word)
arch/x86/kvm/mmu/mmu.c
5807
context->cpu_role.as_u64 = cpu_role.as_u64;
arch/x86/kvm/mmu/mmu.c
5808
context->root_role.word = root_role.word;
arch/x86/kvm/mmu/mmu.c
5809
context->page_fault = kvm_tdp_page_fault;
arch/x86/kvm/mmu/mmu.c
5810
context->sync_spte = NULL;
arch/x86/kvm/mmu/mmu.c
5811
context->get_guest_pgd = get_guest_cr3;
arch/x86/kvm/mmu/mmu.c
5812
context->get_pdptr = kvm_pdptr_read;
arch/x86/kvm/mmu/mmu.c
5813
context->inject_page_fault = kvm_inject_page_fault;
arch/x86/kvm/mmu/mmu.c
5815
if (!is_cr0_pg(context))
arch/x86/kvm/mmu/mmu.c
5816
context->gva_to_gpa = nonpaging_gva_to_gpa;
arch/x86/kvm/mmu/mmu.c
5817
else if (is_cr4_pae(context))
arch/x86/kvm/mmu/mmu.c
5818
context->gva_to_gpa = paging64_gva_to_gpa;
arch/x86/kvm/mmu/mmu.c
5820
context->gva_to_gpa = paging32_gva_to_gpa;
arch/x86/kvm/mmu/mmu.c
5822
reset_guest_paging_metadata(vcpu, context);
arch/x86/kvm/mmu/mmu.c
5823
reset_tdp_shadow_zero_bits_mask(context);
arch/x86/kvm/mmu/mmu.c
5826
static void shadow_mmu_init_context(struct kvm_vcpu *vcpu, struct kvm_mmu *context,
arch/x86/kvm/mmu/mmu.c
5830
if (cpu_role.as_u64 == context->cpu_role.as_u64 &&
arch/x86/kvm/mmu/mmu.c
5831
root_role.word == context->root_role.word)
arch/x86/kvm/mmu/mmu.c
5834
context->cpu_role.as_u64 = cpu_role.as_u64;
arch/x86/kvm/mmu/mmu.c
5835
context->root_role.word = root_role.word;
arch/x86/kvm/mmu/mmu.c
5837
if (!is_cr0_pg(context))
arch/x86/kvm/mmu/mmu.c
5838
nonpaging_init_context(context);
arch/x86/kvm/mmu/mmu.c
5839
else if (is_cr4_pae(context))
arch/x86/kvm/mmu/mmu.c
5840
paging64_init_context(context);
arch/x86/kvm/mmu/mmu.c
5842
paging32_init_context(context);
arch/x86/kvm/mmu/mmu.c
5844
reset_guest_paging_metadata(vcpu, context);
arch/x86/kvm/mmu/mmu.c
5845
reset_shadow_zero_bits_mask(vcpu, context);
arch/x86/kvm/mmu/mmu.c
5851
struct kvm_mmu *context = &vcpu->arch.root_mmu;
arch/x86/kvm/mmu/mmu.c
5870
shadow_mmu_init_context(vcpu, context, cpu_role, root_role);
arch/x86/kvm/mmu/mmu.c
5876
struct kvm_mmu *context = &vcpu->arch.guest_mmu;
arch/x86/kvm/mmu/mmu.c
5894
shadow_mmu_init_context(vcpu, context, cpu_role, root_role);
arch/x86/kvm/mmu/mmu.c
5928
struct kvm_mmu *context = &vcpu->arch.guest_mmu;
arch/x86/kvm/mmu/mmu.c
5934
if (new_mode.as_u64 != context->cpu_role.as_u64) {
arch/x86/kvm/mmu/mmu.c
5936
context->cpu_role.as_u64 = new_mode.as_u64;
arch/x86/kvm/mmu/mmu.c
5937
context->root_role.word = new_mode.base.word;
arch/x86/kvm/mmu/mmu.c
5939
context->page_fault = ept_page_fault;
arch/x86/kvm/mmu/mmu.c
5940
context->gva_to_gpa = ept_gva_to_gpa;
arch/x86/kvm/mmu/mmu.c
5941
context->sync_spte = ept_sync_spte;
arch/x86/kvm/mmu/mmu.c
5943
update_permission_bitmask(context, true);
arch/x86/kvm/mmu/mmu.c
5944
context->pkru_mask = 0;
arch/x86/kvm/mmu/mmu.c
5945
reset_rsvds_bits_mask_ept(vcpu, context, execonly, huge_page_level);
arch/x86/kvm/mmu/mmu.c
5946
reset_ept_shadow_zero_bits_mask(context, execonly);
arch/x86/kvm/mmu/mmu.c
5956
struct kvm_mmu *context = &vcpu->arch.root_mmu;
arch/x86/kvm/mmu/mmu.c
5960
context->get_guest_pgd = get_guest_cr3;
arch/x86/kvm/mmu/mmu.c
5961
context->get_pdptr = kvm_pdptr_read;
arch/x86/kvm/mmu/mmu.c
5962
context->inject_page_fault = kvm_inject_page_fault;
arch/x86/kvm/svm/sev.c
2178
void *context;
arch/x86/kvm/svm/sev.c
2182
context = snp_alloc_firmware_page(GFP_KERNEL_ACCOUNT);
arch/x86/kvm/svm/sev.c
2183
if (!context)
arch/x86/kvm/svm/sev.c
2186
data.address = __psp_pa(context);
arch/x86/kvm/svm/sev.c
2191
snp_free_firmware_page(context);
arch/x86/kvm/svm/sev.c
2195
return context;
arch/x86/lib/insn-eval.c
641
mutex_lock(¤t->mm->context.lock);
arch/x86/lib/insn-eval.c
642
ldt = current->mm->context.ldt;
arch/x86/lib/insn-eval.c
648
mutex_unlock(¤t->mm->context.lock);
arch/x86/math-emu/fpu_system.h
30
mutex_lock(¤t->mm->context.lock);
arch/x86/math-emu/fpu_system.h
31
if (current->mm->context.ldt && seg < current->mm->context.ldt->nr_entries)
arch/x86/math-emu/fpu_system.h
32
ret = current->mm->context.ldt->entries[seg];
arch/x86/math-emu/fpu_system.h
33
mutex_unlock(¤t->mm->context.lock);
arch/x86/mm/ident_map.c
11
info->free_pgt_page(pte, info->context);
arch/x86/mm/ident_map.c
135
pmd = (pmd_t *)info->alloc_pgt_page(info->context);
arch/x86/mm/ident_map.c
164
pud = (pud_t *)info->alloc_pgt_page(info->context);
arch/x86/mm/ident_map.c
206
p4d = (p4d_t *)info->alloc_pgt_page(info->context);
arch/x86/mm/ident_map.c
29
info->free_pgt_page(pmd, info->context);
arch/x86/mm/ident_map.c
47
info->free_pgt_page(pud, info->context);
arch/x86/mm/ident_map.c
63
info->free_pgt_page(p4d, info->context);
arch/x86/mm/ident_map.c
77
info->free_pgt_page(pgd, info->context);
arch/x86/mm/pkeys.c
17
int execute_only_pkey = mm->context.execute_only_pkey;
arch/x86/mm/pkeys.c
58
mm->context.execute_only_pkey = execute_only_pkey;
arch/x86/mm/pkeys.c
67
if (vma_pkey(vma) != vma->vm_mm->context.execute_only_pkey)
arch/x86/mm/tlb.c
1083
u64 tlb_gen = atomic64_read(&init_mm.context.tlb_gen);
arch/x86/mm/tlb.c
1109
this_cpu_write(cpu_tlbstate.ctxs[0].ctx_id, mm->context.ctx_id);
arch/x86/mm/tlb.c
1172
loaded_mm->context.ctx_id);
arch/x86/mm/tlb.c
1205
mm_tlb_gen = atomic64_read(&loaded_mm->context.tlb_gen);
arch/x86/mm/tlb.c
1337
if (time_after(jiffies, READ_ONCE(mm->context.next_trim_cpumask))) {
arch/x86/mm/tlb.c
1338
WRITE_ONCE(mm->context.next_trim_cpumask, jiffies + HZ);
arch/x86/mm/tlb.c
255
next->context.ctx_id)
arch/x86/mm/tlb.c
416
__set_bit(mm->context.global_asid, global_asid_freed);
arch/x86/mm/tlb.c
418
mm->context.global_asid = 0;
arch/x86/mm/tlb.c
757
atomic_read(&mm->context.perf_rdpmc_allowed))) {
arch/x86/mm/tlb.c
845
next->context.ctx_id);
arch/x86/mm/tlb.c
864
next_tlb_gen = atomic64_read(&next->context.tlb_gen);
arch/x86/mm/tlb.c
891
next_tlb_gen = atomic64_read(&next->context.tlb_gen);
arch/x86/mm/tlb.c
940
next_tlb_gen = atomic64_read(&next->context.tlb_gen);
arch/x86/mm/tlb.c
949
this_cpu_write(cpu_tlbstate.ctxs[ns.asid].ctx_id, next->context.ctx_id);
arch/x86/pci/mmconfig-shared.c
416
void *context, void **rv)
arch/x86/pci/mmconfig-shared.c
418
struct resource *mcfg_res = context;
arch/x86/pci/mmconfig-shared.c
421
check_mcfg_resource, context);
arch/x86/platform/olpc/olpc-xo15-sci.c
133
static u32 xo15_sci_gpe_handler(acpi_handle gpe_device, u32 gpe, void *context)
arch/x86/power/hibernate_64.c
87
static void *alloc_pgt_page(void *context)
arch/x86/um/tls_32.c
35
struct stub_data *data = (void *)task->mm->context.id.stack;
arch/x86/um/tls_32.c
43
ret = os_set_thread_area(info, task->mm->context.id.pid);
arch/xtensa/include/asm/mmu_context.h
104
set_rasid_register(ASID_INSERT(mm->context.asid[cpu]));
arch/xtensa/include/asm/mmu_context.h
120
mm->context.asid[cpu] = NO_CONTEXT;
arch/xtensa/include/asm/mmu_context.h
122
mm->context.cpu = -1;
arch/xtensa/include/asm/mmu_context.h
130
int migrated = next->context.cpu != cpu;
arch/xtensa/include/asm/mmu_context.h
134
next->context.cpu = cpu;
arch/xtensa/include/asm/mmu_context.h
82
mm->context.asid[cpu] = asid;
arch/xtensa/include/asm/mmu_context.h
83
mm->context.cpu = cpu;
arch/xtensa/include/asm/mmu_context.h
93
unsigned long asid = mm->context.asid[cpu];
arch/xtensa/kernel/asm-offsets.c
121
DEFINE(MM_CONTEXT, offsetof (struct mm_struct, context));
arch/xtensa/mm/tlb.c
100
(unsigned long)mm->context.asid[cpu], start, end);
arch/xtensa/mm/tlb.c
106
set_rasid_register(ASID_INSERT(mm->context.asid[cpu]));
arch/xtensa/mm/tlb.c
134
if (mm->context.asid[cpu] == NO_CONTEXT)
arch/xtensa/mm/tlb.c
140
set_rasid_register(ASID_INSERT(mm->context.asid[cpu]));
arch/xtensa/mm/tlb.c
71
mm->context.asid[cpu] = NO_CONTEXT;
arch/xtensa/mm/tlb.c
75
mm->context.asid[cpu] = NO_CONTEXT;
arch/xtensa/mm/tlb.c
76
mm->context.cpu = -1;
arch/xtensa/mm/tlb.c
96
if (mm->context.asid[cpu] == NO_CONTEXT)
crypto/asymmetric_keys/mscode_parser.c
116
int mscode_note_digest(void *context, size_t hdrlen,
crypto/asymmetric_keys/mscode_parser.c
120
struct pefile_context *ctx = context;
crypto/asymmetric_keys/mscode_parser.c
36
int mscode_note_content_type(void *context, size_t hdrlen,
crypto/asymmetric_keys/mscode_parser.c
68
int mscode_note_digest_algo(void *context, size_t hdrlen,
crypto/asymmetric_keys/mscode_parser.c
72
struct pefile_context *ctx = context;
crypto/asymmetric_keys/pkcs7_parser.c
218
int pkcs7_note_OID(void *context, size_t hdrlen,
crypto/asymmetric_keys/pkcs7_parser.c
222
struct pkcs7_parse_context *ctx = context;
crypto/asymmetric_keys/pkcs7_parser.c
237
int pkcs7_sig_note_digest_algo(void *context, size_t hdrlen,
crypto/asymmetric_keys/pkcs7_parser.c
241
struct pkcs7_parse_context *ctx = context;
crypto/asymmetric_keys/pkcs7_parser.c
287
int pkcs7_sig_note_pkey_algo(void *context, size_t hdrlen,
crypto/asymmetric_keys/pkcs7_parser.c
291
struct pkcs7_parse_context *ctx = context;
crypto/asymmetric_keys/pkcs7_parser.c
339
int pkcs7_check_content_type(void *context, size_t hdrlen,
crypto/asymmetric_keys/pkcs7_parser.c
343
struct pkcs7_parse_context *ctx = context;
crypto/asymmetric_keys/pkcs7_parser.c
356
int pkcs7_note_signeddata_version(void *context, size_t hdrlen,
crypto/asymmetric_keys/pkcs7_parser.c
360
struct pkcs7_parse_context *ctx = context;
crypto/asymmetric_keys/pkcs7_parser.c
390
int pkcs7_note_signerinfo_version(void *context, size_t hdrlen,
crypto/asymmetric_keys/pkcs7_parser.c
394
struct pkcs7_parse_context *ctx = context;
crypto/asymmetric_keys/pkcs7_parser.c
433
int pkcs7_extract_cert(void *context, size_t hdrlen,
crypto/asymmetric_keys/pkcs7_parser.c
437
struct pkcs7_parse_context *ctx = context;
crypto/asymmetric_keys/pkcs7_parser.c
473
int pkcs7_note_certificate_list(void *context, size_t hdrlen,
crypto/asymmetric_keys/pkcs7_parser.c
477
struct pkcs7_parse_context *ctx = context;
crypto/asymmetric_keys/pkcs7_parser.c
491
int pkcs7_note_content(void *context, size_t hdrlen,
crypto/asymmetric_keys/pkcs7_parser.c
495
struct pkcs7_parse_context *ctx = context;
crypto/asymmetric_keys/pkcs7_parser.c
511
int pkcs7_note_data(void *context, size_t hdrlen,
crypto/asymmetric_keys/pkcs7_parser.c
515
struct pkcs7_parse_context *ctx = context;
crypto/asymmetric_keys/pkcs7_parser.c
528
int pkcs7_sig_note_authenticated_attr(void *context, size_t hdrlen,
crypto/asymmetric_keys/pkcs7_parser.c
532
struct pkcs7_parse_context *ctx = context;
crypto/asymmetric_keys/pkcs7_parser.c
611
int pkcs7_sig_note_set_of_authattrs(void *context, size_t hdrlen,
crypto/asymmetric_keys/pkcs7_parser.c
615
struct pkcs7_parse_context *ctx = context;
crypto/asymmetric_keys/pkcs7_parser.c
639
int pkcs7_sig_note_serial(void *context, size_t hdrlen,
crypto/asymmetric_keys/pkcs7_parser.c
643
struct pkcs7_parse_context *ctx = context;
crypto/asymmetric_keys/pkcs7_parser.c
652
int pkcs7_sig_note_issuer(void *context, size_t hdrlen,
crypto/asymmetric_keys/pkcs7_parser.c
656
struct pkcs7_parse_context *ctx = context;
crypto/asymmetric_keys/pkcs7_parser.c
665
int pkcs7_sig_note_skid(void *context, size_t hdrlen,
crypto/asymmetric_keys/pkcs7_parser.c
669
struct pkcs7_parse_context *ctx = context;
crypto/asymmetric_keys/pkcs7_parser.c
681
int pkcs7_sig_note_signature(void *context, size_t hdrlen,
crypto/asymmetric_keys/pkcs7_parser.c
685
struct pkcs7_parse_context *ctx = context;
crypto/asymmetric_keys/pkcs7_parser.c
698
int pkcs7_note_signed_info(void *context, size_t hdrlen,
crypto/asymmetric_keys/pkcs7_parser.c
702
struct pkcs7_parse_context *ctx = context;
crypto/asymmetric_keys/pkcs8_parser.c
33
int pkcs8_note_OID(void *context, size_t hdrlen,
crypto/asymmetric_keys/pkcs8_parser.c
37
struct pkcs8_parse_context *ctx = context;
crypto/asymmetric_keys/pkcs8_parser.c
53
int pkcs8_note_version(void *context, size_t hdrlen,
crypto/asymmetric_keys/pkcs8_parser.c
67
int pkcs8_note_algo(void *context, size_t hdrlen,
crypto/asymmetric_keys/pkcs8_parser.c
71
struct pkcs8_parse_context *ctx = context;
crypto/asymmetric_keys/pkcs8_parser.c
83
int pkcs8_note_key(void *context, size_t hdrlen,
crypto/asymmetric_keys/pkcs8_parser.c
87
struct pkcs8_parse_context *ctx = context;
crypto/asymmetric_keys/x509_cert_parser.c
141
int x509_note_OID(void *context, size_t hdrlen,
crypto/asymmetric_keys/x509_cert_parser.c
145
struct x509_parse_context *ctx = context;
crypto/asymmetric_keys/x509_cert_parser.c
161
int x509_note_tbs_certificate(void *context, size_t hdrlen,
crypto/asymmetric_keys/x509_cert_parser.c
165
struct x509_parse_context *ctx = context;
crypto/asymmetric_keys/x509_cert_parser.c
178
int x509_note_sig_algo(void *context, size_t hdrlen, unsigned char tag,
crypto/asymmetric_keys/x509_cert_parser.c
181
struct x509_parse_context *ctx = context;
crypto/asymmetric_keys/x509_cert_parser.c
297
int x509_note_signature(void *context, size_t hdrlen,
crypto/asymmetric_keys/x509_cert_parser.c
301
struct x509_parse_context *ctx = context;
crypto/asymmetric_keys/x509_cert_parser.c
336
int x509_note_serial(void *context, size_t hdrlen,
crypto/asymmetric_keys/x509_cert_parser.c
340
struct x509_parse_context *ctx = context;
crypto/asymmetric_keys/x509_cert_parser.c
349
int x509_extract_name_segment(void *context, size_t hdrlen,
crypto/asymmetric_keys/x509_cert_parser.c
353
struct x509_parse_context *ctx = context;
crypto/asymmetric_keys/x509_cert_parser.c
452
int x509_note_issuer(void *context, size_t hdrlen,
crypto/asymmetric_keys/x509_cert_parser.c
456
struct x509_parse_context *ctx = context;
crypto/asymmetric_keys/x509_cert_parser.c
472
int x509_note_subject(void *context, size_t hdrlen,
crypto/asymmetric_keys/x509_cert_parser.c
476
struct x509_parse_context *ctx = context;
crypto/asymmetric_keys/x509_cert_parser.c
485
int x509_note_params(void *context, size_t hdrlen,
crypto/asymmetric_keys/x509_cert_parser.c
489
struct x509_parse_context *ctx = context;
crypto/asymmetric_keys/x509_cert_parser.c
506
int x509_extract_key_data(void *context, size_t hdrlen,
crypto/asymmetric_keys/x509_cert_parser.c
510
struct x509_parse_context *ctx = context;
crypto/asymmetric_keys/x509_cert_parser.c
570
int x509_process_extension(void *context, size_t hdrlen,
crypto/asymmetric_keys/x509_cert_parser.c
574
struct x509_parse_context *ctx = context;
crypto/asymmetric_keys/x509_cert_parser.c
762
int x509_note_not_before(void *context, size_t hdrlen,
crypto/asymmetric_keys/x509_cert_parser.c
766
struct x509_parse_context *ctx = context;
crypto/asymmetric_keys/x509_cert_parser.c
770
int x509_note_not_after(void *context, size_t hdrlen,
crypto/asymmetric_keys/x509_cert_parser.c
774
struct x509_parse_context *ctx = context;
crypto/asymmetric_keys/x509_cert_parser.c
781
int x509_akid_note_kid(void *context, size_t hdrlen,
crypto/asymmetric_keys/x509_cert_parser.c
785
struct x509_parse_context *ctx = context;
crypto/asymmetric_keys/x509_cert_parser.c
804
int x509_akid_note_name(void *context, size_t hdrlen,
crypto/asymmetric_keys/x509_cert_parser.c
808
struct x509_parse_context *ctx = context;
crypto/asymmetric_keys/x509_cert_parser.c
820
int x509_akid_note_serial(void *context, size_t hdrlen,
crypto/asymmetric_keys/x509_cert_parser.c
824
struct x509_parse_context *ctx = context;
crypto/ecdsa-x962.c
58
int ecdsa_get_signature_r(void *context, size_t hdrlen, unsigned char tag,
crypto/ecdsa-x962.c
61
struct ecdsa_x962_signature_ctx *sig_ctx = context;
crypto/ecdsa-x962.c
67
int ecdsa_get_signature_s(void *context, size_t hdrlen, unsigned char tag,
crypto/ecdsa-x962.c
70
struct ecdsa_x962_signature_ctx *sig_ctx = context;
crypto/ecrdsa.c
142
int ecrdsa_param_curve(void *context, size_t hdrlen, unsigned char tag,
crypto/ecrdsa.c
145
struct ecrdsa_ctx *ctx = context;
crypto/ecrdsa.c
155
int ecrdsa_param_digest(void *context, size_t hdrlen, unsigned char tag,
crypto/ecrdsa.c
158
struct ecrdsa_ctx *ctx = context;
crypto/ecrdsa.c
166
int ecrdsa_parse_pub_key(void *context, size_t hdrlen, unsigned char tag,
crypto/ecrdsa.c
169
struct ecrdsa_ctx *ctx = context;
crypto/krb5/rfc8009_aes2.c
31
const struct krb5_buffer *context,
crypto/krb5/rfc8009_aes2.c
60
data.len = 4 + label->len + 1 + context->len + 4;
crypto/krb5/rfc8009_aes2.c
82
memcpy(p, context->data, context->len);
crypto/krb5/rfc8009_aes2.c
83
p += context->len;
crypto/rsa_helper.c
106
int rsa_get_dp(void *context, size_t hdrlen, unsigned char tag,
crypto/rsa_helper.c
109
struct rsa_key *key = context;
crypto/rsa_helper.c
121
int rsa_get_dq(void *context, size_t hdrlen, unsigned char tag,
crypto/rsa_helper.c
124
struct rsa_key *key = context;
crypto/rsa_helper.c
136
int rsa_get_qinv(void *context, size_t hdrlen, unsigned char tag,
crypto/rsa_helper.c
139
struct rsa_key *key = context;
crypto/rsa_helper.c
16
int rsa_get_n(void *context, size_t hdrlen, unsigned char tag,
crypto/rsa_helper.c
19
struct rsa_key *key = context;
crypto/rsa_helper.c
46
int rsa_get_e(void *context, size_t hdrlen, unsigned char tag,
crypto/rsa_helper.c
49
struct rsa_key *key = context;
crypto/rsa_helper.c
61
int rsa_get_d(void *context, size_t hdrlen, unsigned char tag,
crypto/rsa_helper.c
64
struct rsa_key *key = context;
crypto/rsa_helper.c
76
int rsa_get_p(void *context, size_t hdrlen, unsigned char tag,
crypto/rsa_helper.c
79
struct rsa_key *key = context;
crypto/rsa_helper.c
91
int rsa_get_q(void *context, size_t hdrlen, unsigned char tag,
crypto/rsa_helper.c
94
struct rsa_key *key = context;
drivers/accel/qaic/sahara.c
226
static bool is_streaming(struct sahara_context *context)
drivers/accel/qaic/sahara.c
228
return !context->non_streaming;
drivers/accel/qaic/sahara.c
231
static int sahara_find_image(struct sahara_context *context, u32 image_id)
drivers/accel/qaic/sahara.c
235
if (image_id == context->active_image_id)
drivers/accel/qaic/sahara.c
238
if (context->active_image_id != SAHARA_IMAGE_ID_NONE) {
drivers/accel/qaic/sahara.c
239
dev_err(&context->mhi_dev->dev, "image id %d is not valid as %d is active\n",
drivers/accel/qaic/sahara.c
240
image_id, context->active_image_id);
drivers/accel/qaic/sahara.c
244
if (image_id >= context->table_size || !context->image_table[image_id]) {
drivers/accel/qaic/sahara.c
245
dev_err(&context->mhi_dev->dev, "request for unknown image: %d\n", image_id);
drivers/accel/qaic/sahara.c
254
ret = firmware_request_nowarn(&context->firmware,
drivers/accel/qaic/sahara.c
255
context->image_table[image_id],
drivers/accel/qaic/sahara.c
256
&context->mhi_dev->dev);
drivers/accel/qaic/sahara.c
258
dev_dbg(&context->mhi_dev->dev, "request for image id %d / file %s failed %d\n",
drivers/accel/qaic/sahara.c
259
image_id, context->image_table[image_id], ret);
drivers/accel/qaic/sahara.c
263
context->active_image_id = image_id;
drivers/accel/qaic/sahara.c
268
static void sahara_release_image(struct sahara_context *context)
drivers/accel/qaic/sahara.c
270
if (context->active_image_id != SAHARA_IMAGE_ID_NONE)
drivers/accel/qaic/sahara.c
271
release_firmware(context->firmware);
drivers/accel/qaic/sahara.c
272
context->active_image_id = SAHARA_IMAGE_ID_NONE;
drivers/accel/qaic/sahara.c
275
static void sahara_send_reset(struct sahara_context *context)
drivers/accel/qaic/sahara.c
279
context->is_mem_dump_mode = false;
drivers/accel/qaic/sahara.c
280
context->read_data_offset = 0;
drivers/accel/qaic/sahara.c
281
context->read_data_length = 0;
drivers/accel/qaic/sahara.c
283
context->tx[0]->cmd = cpu_to_le32(SAHARA_RESET_CMD);
drivers/accel/qaic/sahara.c
284
context->tx[0]->length = cpu_to_le32(SAHARA_RESET_LENGTH);
drivers/accel/qaic/sahara.c
286
ret = mhi_queue_buf(context->mhi_dev, DMA_TO_DEVICE, context->tx[0],
drivers/accel/qaic/sahara.c
289
dev_err(&context->mhi_dev->dev, "Unable to send reset response %d\n", ret);
drivers/accel/qaic/sahara.c
292
static void sahara_hello(struct sahara_context *context)
drivers/accel/qaic/sahara.c
296
dev_dbg(&context->mhi_dev->dev,
drivers/accel/qaic/sahara.c
298
le32_to_cpu(context->rx->length),
drivers/accel/qaic/sahara.c
299
le32_to_cpu(context->rx->hello.version),
drivers/accel/qaic/sahara.c
300
le32_to_cpu(context->rx->hello.version_compat),
drivers/accel/qaic/sahara.c
301
le32_to_cpu(context->rx->hello.max_length),
drivers/accel/qaic/sahara.c
302
le32_to_cpu(context->rx->hello.mode));
drivers/accel/qaic/sahara.c
304
if (le32_to_cpu(context->rx->length) != SAHARA_HELLO_LENGTH) {
drivers/accel/qaic/sahara.c
305
dev_err(&context->mhi_dev->dev, "Malformed hello packet - length %d\n",
drivers/accel/qaic/sahara.c
306
le32_to_cpu(context->rx->length));
drivers/accel/qaic/sahara.c
309
if (le32_to_cpu(context->rx->hello.version) != SAHARA_VERSION) {
drivers/accel/qaic/sahara.c
310
dev_err(&context->mhi_dev->dev, "Unsupported hello packet - version %d\n",
drivers/accel/qaic/sahara.c
311
le32_to_cpu(context->rx->hello.version));
drivers/accel/qaic/sahara.c
315
if (le32_to_cpu(context->rx->hello.mode) != SAHARA_MODE_IMAGE_TX_PENDING &&
drivers/accel/qaic/sahara.c
316
le32_to_cpu(context->rx->hello.mode) != SAHARA_MODE_IMAGE_TX_COMPLETE &&
drivers/accel/qaic/sahara.c
317
le32_to_cpu(context->rx->hello.mode) != SAHARA_MODE_MEMORY_DEBUG) {
drivers/accel/qaic/sahara.c
318
dev_err(&context->mhi_dev->dev, "Unsupported hello packet - mode %d\n",
drivers/accel/qaic/sahara.c
319
le32_to_cpu(context->rx->hello.mode));
drivers/accel/qaic/sahara.c
323
context->tx[0]->cmd = cpu_to_le32(SAHARA_HELLO_RESP_CMD);
drivers/accel/qaic/sahara.c
324
context->tx[0]->length = cpu_to_le32(SAHARA_HELLO_LENGTH);
drivers/accel/qaic/sahara.c
325
context->tx[0]->hello_resp.version = cpu_to_le32(SAHARA_VERSION);
drivers/accel/qaic/sahara.c
326
context->tx[0]->hello_resp.version_compat = cpu_to_le32(SAHARA_VERSION);
drivers/accel/qaic/sahara.c
327
context->tx[0]->hello_resp.status = cpu_to_le32(SAHARA_SUCCESS);
drivers/accel/qaic/sahara.c
328
context->tx[0]->hello_resp.mode = context->rx->hello_resp.mode;
drivers/accel/qaic/sahara.c
330
ret = mhi_queue_buf(context->mhi_dev, DMA_TO_DEVICE, context->tx[0],
drivers/accel/qaic/sahara.c
333
dev_err(&context->mhi_dev->dev, "Unable to send hello response %d\n", ret);
drivers/accel/qaic/sahara.c
336
static int read_data_helper(struct sahara_context *context, int buf_index)
drivers/accel/qaic/sahara.c
342
pkt_data_len = min(context->read_data_length, SAHARA_PACKET_MAX_SIZE);
drivers/accel/qaic/sahara.c
344
memcpy(context->tx[buf_index],
drivers/accel/qaic/sahara.c
345
&context->firmware->data[context->read_data_offset],
drivers/accel/qaic/sahara.c
348
context->read_data_offset += pkt_data_len;
drivers/accel/qaic/sahara.c
349
context->read_data_length -= pkt_data_len;
drivers/accel/qaic/sahara.c
351
if (is_streaming(context) || !context->read_data_length)
drivers/accel/qaic/sahara.c
356
ret = mhi_queue_buf(context->mhi_dev, DMA_TO_DEVICE,
drivers/accel/qaic/sahara.c
357
context->tx[buf_index], pkt_data_len, mhi_flag);
drivers/accel/qaic/sahara.c
359
dev_err(&context->mhi_dev->dev, "Unable to send read_data response %d\n", ret);
drivers/accel/qaic/sahara.c
366
static void sahara_read_data(struct sahara_context *context)
drivers/accel/qaic/sahara.c
372
dev_dbg(&context->mhi_dev->dev,
drivers/accel/qaic/sahara.c
374
le32_to_cpu(context->rx->length),
drivers/accel/qaic/sahara.c
375
le32_to_cpu(context->rx->read_data.image),
drivers/accel/qaic/sahara.c
376
le32_to_cpu(context->rx->read_data.offset),
drivers/accel/qaic/sahara.c
377
le32_to_cpu(context->rx->read_data.length));
drivers/accel/qaic/sahara.c
379
if (le32_to_cpu(context->rx->length) != SAHARA_READ_DATA_LENGTH) {
drivers/accel/qaic/sahara.c
380
dev_err(&context->mhi_dev->dev, "Malformed read_data packet - length %d\n",
drivers/accel/qaic/sahara.c
381
le32_to_cpu(context->rx->length));
drivers/accel/qaic/sahara.c
385
image_id = le32_to_cpu(context->rx->read_data.image);
drivers/accel/qaic/sahara.c
386
data_offset = le32_to_cpu(context->rx->read_data.offset);
drivers/accel/qaic/sahara.c
387
data_len = le32_to_cpu(context->rx->read_data.length);
drivers/accel/qaic/sahara.c
389
ret = sahara_find_image(context, image_id);
drivers/accel/qaic/sahara.c
391
sahara_send_reset(context);
drivers/accel/qaic/sahara.c
404
if (context->non_streaming && data_len > SAHARA_TRANSFER_MAX_SIZE) {
drivers/accel/qaic/sahara.c
405
dev_err(&context->mhi_dev->dev, "Malformed read_data packet - data len %d exceeds max xfer size %d\n",
drivers/accel/qaic/sahara.c
407
sahara_send_reset(context);
drivers/accel/qaic/sahara.c
411
if (data_offset >= context->firmware->size) {
drivers/accel/qaic/sahara.c
412
dev_err(&context->mhi_dev->dev, "Malformed read_data packet - data offset %d exceeds file size %zu\n",
drivers/accel/qaic/sahara.c
413
data_offset, context->firmware->size);
drivers/accel/qaic/sahara.c
414
sahara_send_reset(context);
drivers/accel/qaic/sahara.c
418
if (size_add(data_offset, data_len) > context->firmware->size) {
drivers/accel/qaic/sahara.c
419
dev_err(&context->mhi_dev->dev, "Malformed read_data packet - data offset %d and length %d exceeds file size %zu\n",
drivers/accel/qaic/sahara.c
420
data_offset, data_len, context->firmware->size);
drivers/accel/qaic/sahara.c
421
sahara_send_reset(context);
drivers/accel/qaic/sahara.c
425
context->read_data_offset = data_offset;
drivers/accel/qaic/sahara.c
426
context->read_data_length = data_len;
drivers/accel/qaic/sahara.c
428
if (is_streaming(context)) {
drivers/accel/qaic/sahara.c
429
schedule_work(&context->read_data_work);
drivers/accel/qaic/sahara.c
433
for (i = 0; i < SAHARA_NUM_TX_BUF && context->read_data_length; ++i) {
drivers/accel/qaic/sahara.c
434
ret = read_data_helper(context, i);
drivers/accel/qaic/sahara.c
440
static void sahara_end_of_image(struct sahara_context *context)
drivers/accel/qaic/sahara.c
444
dev_dbg(&context->mhi_dev->dev,
drivers/accel/qaic/sahara.c
446
le32_to_cpu(context->rx->length),
drivers/accel/qaic/sahara.c
447
le32_to_cpu(context->rx->end_of_image.image),
drivers/accel/qaic/sahara.c
448
le32_to_cpu(context->rx->end_of_image.status));
drivers/accel/qaic/sahara.c
450
if (le32_to_cpu(context->rx->length) != SAHARA_END_OF_IMAGE_LENGTH) {
drivers/accel/qaic/sahara.c
451
dev_err(&context->mhi_dev->dev, "Malformed end_of_image packet - length %d\n",
drivers/accel/qaic/sahara.c
452
le32_to_cpu(context->rx->length));
drivers/accel/qaic/sahara.c
456
if (context->active_image_id != SAHARA_IMAGE_ID_NONE &&
drivers/accel/qaic/sahara.c
457
le32_to_cpu(context->rx->end_of_image.image) != context->active_image_id) {
drivers/accel/qaic/sahara.c
458
dev_err(&context->mhi_dev->dev, "Malformed end_of_image packet - image %d is not the active image\n",
drivers/accel/qaic/sahara.c
459
le32_to_cpu(context->rx->end_of_image.image));
drivers/accel/qaic/sahara.c
463
sahara_release_image(context);
drivers/accel/qaic/sahara.c
465
if (le32_to_cpu(context->rx->end_of_image.status))
drivers/accel/qaic/sahara.c
468
context->tx[0]->cmd = cpu_to_le32(SAHARA_DONE_CMD);
drivers/accel/qaic/sahara.c
469
context->tx[0]->length = cpu_to_le32(SAHARA_DONE_LENGTH);
drivers/accel/qaic/sahara.c
471
ret = mhi_queue_buf(context->mhi_dev, DMA_TO_DEVICE, context->tx[0],
drivers/accel/qaic/sahara.c
474
dev_dbg(&context->mhi_dev->dev, "Unable to send done response %d\n", ret);
drivers/accel/qaic/sahara.c
477
static void sahara_memory_debug64(struct sahara_context *context)
drivers/accel/qaic/sahara.c
481
dev_dbg(&context->mhi_dev->dev,
drivers/accel/qaic/sahara.c
483
le32_to_cpu(context->rx->length),
drivers/accel/qaic/sahara.c
484
le64_to_cpu(context->rx->memory_debug64.table_address),
drivers/accel/qaic/sahara.c
485
le64_to_cpu(context->rx->memory_debug64.table_length));
drivers/accel/qaic/sahara.c
487
if (le32_to_cpu(context->rx->length) != SAHARA_MEM_DEBUG64_LENGTH) {
drivers/accel/qaic/sahara.c
488
dev_err(&context->mhi_dev->dev, "Malformed memory debug64 packet - length %d\n",
drivers/accel/qaic/sahara.c
489
le32_to_cpu(context->rx->length));
drivers/accel/qaic/sahara.c
493
context->dump_table_address = le64_to_cpu(context->rx->memory_debug64.table_address);
drivers/accel/qaic/sahara.c
494
context->dump_table_length = le64_to_cpu(context->rx->memory_debug64.table_length);
drivers/accel/qaic/sahara.c
496
if (context->dump_table_length % sizeof(struct sahara_debug_table_entry64) != 0 ||
drivers/accel/qaic/sahara.c
497
!context->dump_table_length) {
drivers/accel/qaic/sahara.c
498
dev_err(&context->mhi_dev->dev, "Malformed memory debug64 packet - table length %lld\n",
drivers/accel/qaic/sahara.c
499
context->dump_table_length);
drivers/accel/qaic/sahara.c
511
context->is_mem_dump_mode = true;
drivers/accel/qaic/sahara.c
518
if (context->dump_table_length > SAHARA_PACKET_MAX_SIZE) {
drivers/accel/qaic/sahara.c
519
dev_err(&context->mhi_dev->dev, "Memory dump table length %lld exceeds supported size. Discarding dump\n",
drivers/accel/qaic/sahara.c
520
context->dump_table_length);
drivers/accel/qaic/sahara.c
521
sahara_send_reset(context);
drivers/accel/qaic/sahara.c
525
context->tx[0]->cmd = cpu_to_le32(SAHARA_MEM_READ64_CMD);
drivers/accel/qaic/sahara.c
526
context->tx[0]->length = cpu_to_le32(SAHARA_MEM_READ64_LENGTH);
drivers/accel/qaic/sahara.c
527
context->tx[0]->memory_read64.memory_address = cpu_to_le64(context->dump_table_address);
drivers/accel/qaic/sahara.c
528
context->tx[0]->memory_read64.memory_length = cpu_to_le64(context->dump_table_length);
drivers/accel/qaic/sahara.c
530
context->rx_size_requested = context->dump_table_length;
drivers/accel/qaic/sahara.c
532
ret = mhi_queue_buf(context->mhi_dev, DMA_TO_DEVICE, context->tx[0],
drivers/accel/qaic/sahara.c
535
dev_err(&context->mhi_dev->dev, "Unable to send read for dump table %d\n", ret);
drivers/accel/qaic/sahara.c
540
struct sahara_context *context = container_of(work, struct sahara_context, fw_work);
drivers/accel/qaic/sahara.c
543
switch (le32_to_cpu(context->rx->cmd)) {
drivers/accel/qaic/sahara.c
545
sahara_hello(context);
drivers/accel/qaic/sahara.c
548
sahara_read_data(context);
drivers/accel/qaic/sahara.c
551
sahara_end_of_image(context);
drivers/accel/qaic/sahara.c
560
sahara_memory_debug64(context);
drivers/accel/qaic/sahara.c
563
dev_err(&context->mhi_dev->dev, "Unknown command %d\n",
drivers/accel/qaic/sahara.c
564
le32_to_cpu(context->rx->cmd));
drivers/accel/qaic/sahara.c
568
ret = mhi_queue_buf(context->mhi_dev, DMA_FROM_DEVICE, context->rx,
drivers/accel/qaic/sahara.c
571
dev_err(&context->mhi_dev->dev, "Unable to requeue rx buf %d\n", ret);
drivers/accel/qaic/sahara.c
574
static void sahara_parse_dump_table(struct sahara_context *context)
drivers/accel/qaic/sahara.c
585
table_nents = context->dump_table_length / sizeof(*dev_table);
drivers/accel/qaic/sahara.c
586
context->dump_images_left = table_nents;
drivers/accel/qaic/sahara.c
589
dev_table = (struct sahara_debug_table_entry64 *)(context->rx);
drivers/accel/qaic/sahara.c
599
sahara_send_reset(context);
drivers/accel/qaic/sahara.c
603
dev_dbg(&context->mhi_dev->dev,
drivers/accel/qaic/sahara.c
615
sahara_send_reset(context);
drivers/accel/qaic/sahara.c
620
sahara_send_reset(context);
drivers/accel/qaic/sahara.c
625
sahara_send_reset(context);
drivers/accel/qaic/sahara.c
629
context->mem_dump_sz = dump_length;
drivers/accel/qaic/sahara.c
630
context->mem_dump = vzalloc(dump_length);
drivers/accel/qaic/sahara.c
631
if (!context->mem_dump) {
drivers/accel/qaic/sahara.c
633
sahara_send_reset(context);
drivers/accel/qaic/sahara.c
638
dump_meta = context->mem_dump;
drivers/accel/qaic/sahara.c
642
dump_meta->table_size = context->dump_table_length;
drivers/accel/qaic/sahara.c
644
image_out_table = context->mem_dump + sizeof(*dump_meta);
drivers/accel/qaic/sahara.c
656
context->mem_dump_freespace = &image_out_table[i];
drivers/accel/qaic/sahara.c
659
context->dump_table_length = 0;
drivers/accel/qaic/sahara.c
662
context->dump_image = &image_out_table[0];
drivers/accel/qaic/sahara.c
663
dump_length = min_t(u64, context->dump_image->length, SAHARA_READ_MAX_SIZE);
drivers/accel/qaic/sahara.c
668
context->dump_image_offset = dump_length;
drivers/accel/qaic/sahara.c
670
context->tx[0]->cmd = cpu_to_le32(SAHARA_MEM_READ64_CMD);
drivers/accel/qaic/sahara.c
671
context->tx[0]->length = cpu_to_le32(SAHARA_MEM_READ64_LENGTH);
drivers/accel/qaic/sahara.c
672
context->tx[0]->memory_read64.memory_address = cpu_to_le64(context->dump_image->address);
drivers/accel/qaic/sahara.c
673
context->tx[0]->memory_read64.memory_length = cpu_to_le64(dump_length);
drivers/accel/qaic/sahara.c
675
context->rx_size_requested = dump_length;
drivers/accel/qaic/sahara.c
677
ret = mhi_queue_buf(context->mhi_dev, DMA_TO_DEVICE, context->tx[0],
drivers/accel/qaic/sahara.c
680
dev_err(&context->mhi_dev->dev, "Unable to send read for dump content %d\n", ret);
drivers/accel/qaic/sahara.c
683
static void sahara_parse_dump_image(struct sahara_context *context)
drivers/accel/qaic/sahara.c
688
memcpy(context->mem_dump_freespace, context->rx, context->rx_size);
drivers/accel/qaic/sahara.c
689
context->mem_dump_freespace += context->rx_size;
drivers/accel/qaic/sahara.c
691
if (context->dump_image_offset >= context->dump_image->length) {
drivers/accel/qaic/sahara.c
693
context->dump_image++;
drivers/accel/qaic/sahara.c
694
context->dump_images_left--;
drivers/accel/qaic/sahara.c
695
context->dump_image_offset = 0;
drivers/accel/qaic/sahara.c
697
if (!context->dump_images_left) {
drivers/accel/qaic/sahara.c
699
dev_coredumpv(context->mhi_dev->mhi_cntrl->cntrl_dev,
drivers/accel/qaic/sahara.c
700
context->mem_dump,
drivers/accel/qaic/sahara.c
701
context->mem_dump_sz,
drivers/accel/qaic/sahara.c
703
context->mem_dump = NULL;
drivers/accel/qaic/sahara.c
704
sahara_send_reset(context);
drivers/accel/qaic/sahara.c
710
dump_length = context->dump_image->length - context->dump_image_offset;
drivers/accel/qaic/sahara.c
716
context->tx[0]->cmd = cpu_to_le32(SAHARA_MEM_READ64_CMD);
drivers/accel/qaic/sahara.c
717
context->tx[0]->length = cpu_to_le32(SAHARA_MEM_READ64_LENGTH);
drivers/accel/qaic/sahara.c
718
context->tx[0]->memory_read64.memory_address =
drivers/accel/qaic/sahara.c
719
cpu_to_le64(context->dump_image->address + context->dump_image_offset);
drivers/accel/qaic/sahara.c
720
context->tx[0]->memory_read64.memory_length = cpu_to_le64(dump_length);
drivers/accel/qaic/sahara.c
722
context->dump_image_offset += dump_length;
drivers/accel/qaic/sahara.c
723
context->rx_size_requested = dump_length;
drivers/accel/qaic/sahara.c
725
ret = mhi_queue_buf(context->mhi_dev, DMA_TO_DEVICE, context->tx[0],
drivers/accel/qaic/sahara.c
728
dev_err(&context->mhi_dev->dev,
drivers/accel/qaic/sahara.c
734
struct sahara_context *context = container_of(work, struct sahara_context, dump_work);
drivers/accel/qaic/sahara.c
741
if (context->rx_size != context->rx_size_requested &&
drivers/accel/qaic/sahara.c
742
context->rx_size != SAHARA_END_OF_IMAGE_LENGTH) {
drivers/accel/qaic/sahara.c
743
dev_err(&context->mhi_dev->dev,
drivers/accel/qaic/sahara.c
745
context->rx_size_requested,
drivers/accel/qaic/sahara.c
746
context->rx_size);
drivers/accel/qaic/sahara.c
750
if (context->rx_size == SAHARA_END_OF_IMAGE_LENGTH &&
drivers/accel/qaic/sahara.c
751
le32_to_cpu(context->rx->cmd) == SAHARA_END_OF_IMAGE_CMD) {
drivers/accel/qaic/sahara.c
752
dev_err(&context->mhi_dev->dev,
drivers/accel/qaic/sahara.c
754
le32_to_cpu(context->rx->end_of_image.status));
drivers/accel/qaic/sahara.c
758
if (context->rx_size == SAHARA_END_OF_IMAGE_LENGTH &&
drivers/accel/qaic/sahara.c
759
le32_to_cpu(context->rx->cmd) != SAHARA_END_OF_IMAGE_CMD) {
drivers/accel/qaic/sahara.c
760
dev_err(&context->mhi_dev->dev,
drivers/accel/qaic/sahara.c
762
le32_to_cpu(context->rx->cmd));
drivers/accel/qaic/sahara.c
772
if (context->dump_table_length)
drivers/accel/qaic/sahara.c
773
sahara_parse_dump_table(context);
drivers/accel/qaic/sahara.c
775
sahara_parse_dump_image(context);
drivers/accel/qaic/sahara.c
777
ret = mhi_queue_buf(context->mhi_dev, DMA_FROM_DEVICE, context->rx,
drivers/accel/qaic/sahara.c
780
dev_err(&context->mhi_dev->dev, "Unable to requeue rx buf %d\n", ret);
drivers/accel/qaic/sahara.c
785
vfree(context->mem_dump);
drivers/accel/qaic/sahara.c
786
context->mem_dump = NULL;
drivers/accel/qaic/sahara.c
787
sahara_send_reset(context);
drivers/accel/qaic/sahara.c
792
struct sahara_context *context = container_of(work, struct sahara_context, read_data_work);
drivers/accel/qaic/sahara.c
794
read_data_helper(context, 0);
drivers/accel/qaic/sahara.c
799
struct sahara_context *context;
drivers/accel/qaic/sahara.c
803
context = devm_kzalloc(&mhi_dev->dev, sizeof(*context), GFP_KERNEL);
drivers/accel/qaic/sahara.c
804
if (!context)
drivers/accel/qaic/sahara.c
807
context->rx = devm_kzalloc(&mhi_dev->dev, SAHARA_PACKET_MAX_SIZE, GFP_KERNEL);
drivers/accel/qaic/sahara.c
808
if (!context->rx)
drivers/accel/qaic/sahara.c
812
context->image_table = aic200_image_table;
drivers/accel/qaic/sahara.c
813
context->table_size = ARRAY_SIZE(aic200_image_table);
drivers/accel/qaic/sahara.c
815
context->image_table = aic100_image_table;
drivers/accel/qaic/sahara.c
816
context->table_size = ARRAY_SIZE(aic100_image_table);
drivers/accel/qaic/sahara.c
817
context->non_streaming = true;
drivers/accel/qaic/sahara.c
848
context->tx[i] = devm_kzalloc(&mhi_dev->dev,
drivers/accel/qaic/sahara.c
851
if (!context->tx[i])
drivers/accel/qaic/sahara.c
853
if (is_streaming(context))
drivers/accel/qaic/sahara.c
857
context->mhi_dev = mhi_dev;
drivers/accel/qaic/sahara.c
858
INIT_WORK(&context->fw_work, sahara_processing);
drivers/accel/qaic/sahara.c
859
INIT_WORK(&context->dump_work, sahara_dump_processing);
drivers/accel/qaic/sahara.c
860
INIT_WORK(&context->read_data_work, sahara_read_data_processing);
drivers/accel/qaic/sahara.c
862
context->active_image_id = SAHARA_IMAGE_ID_NONE;
drivers/accel/qaic/sahara.c
863
dev_set_drvdata(&mhi_dev->dev, context);
drivers/accel/qaic/sahara.c
869
ret = mhi_queue_buf(mhi_dev, DMA_FROM_DEVICE, context->rx, SAHARA_PACKET_MAX_SIZE, MHI_EOT);
drivers/accel/qaic/sahara.c
880
struct sahara_context *context = dev_get_drvdata(&mhi_dev->dev);
drivers/accel/qaic/sahara.c
882
cancel_work_sync(&context->fw_work);
drivers/accel/qaic/sahara.c
883
cancel_work_sync(&context->dump_work);
drivers/accel/qaic/sahara.c
884
vfree(context->mem_dump);
drivers/accel/qaic/sahara.c
885
sahara_release_image(context);
drivers/accel/qaic/sahara.c
891
struct sahara_context *context = dev_get_drvdata(&mhi_dev->dev);
drivers/accel/qaic/sahara.c
893
if (!mhi_result->transaction_status && context->read_data_length && is_streaming(context))
drivers/accel/qaic/sahara.c
894
schedule_work(&context->read_data_work);
drivers/accel/qaic/sahara.c
899
struct sahara_context *context = dev_get_drvdata(&mhi_dev->dev);
drivers/accel/qaic/sahara.c
902
context->rx_size = mhi_result->bytes_xferd;
drivers/accel/qaic/sahara.c
903
if (context->is_mem_dump_mode)
drivers/accel/qaic/sahara.c
904
schedule_work(&context->dump_work);
drivers/accel/qaic/sahara.c
906
schedule_work(&context->fw_work);
drivers/acpi/acpi_dbg.c
386
void *context;
drivers/acpi/acpi_dbg.c
392
context = acpi_aml_io.context;
drivers/acpi/acpi_dbg.c
397
function(context);
drivers/acpi/acpi_dbg.c
418
static int acpi_aml_create_thread(acpi_osd_exec_callback function, void *context)
drivers/acpi/acpi_dbg.c
424
acpi_aml_io.context = context;
drivers/acpi/acpi_dbg.c
58
void *context;
drivers/acpi/acpi_memhotplug.c
61
acpi_memory_get_resource(struct acpi_resource *resource, void *context)
drivers/acpi/acpi_memhotplug.c
63
struct acpi_memory_device *mem_device = context;
drivers/acpi/acpi_processor.c
577
void *context, void **rv)
drivers/acpi/acpi_processor.c
725
void *context,
drivers/acpi/acpi_video.c
1685
acpi_video_bus_match(acpi_handle handle, u32 level, void *context,
drivers/acpi/acpi_video.c
1688
struct acpi_device *device = context;
drivers/acpi/acpica/acdebug.h
246
void ACPI_SYSTEM_XFACE acpi_db_execute_thread(void *context);
drivers/acpi/acpica/acdispat.h
238
union acpi_generic_state *state, void *context);
drivers/acpi/acpica/acevents.h
106
void *context);
drivers/acpi/acpica/acevents.h
127
u32 level, void *context, void **return_value);
drivers/acpi/acpica/acevents.h
133
acpi_ev_walk_gpe_list(acpi_gpe_callback gpe_walk_callback, void *context);
drivers/acpi/acpica/acevents.h
137
struct acpi_gpe_block_info *gpe_block, void *context);
drivers/acpi/acpica/acevents.h
148
void *context);
drivers/acpi/acpica/acevents.h
168
acpi_adr_space_setup setup, void *context);
drivers/acpi/acpica/acevents.h
243
u32 ACPI_SYSTEM_XFACE acpi_ev_gpe_xrupt_handler(void *context);
drivers/acpi/acpica/achware.h
111
void *context);
drivers/acpi/acpica/achware.h
92
struct acpi_gpe_block_info *gpe_block, void *context);
drivers/acpi/acpica/achware.h
98
struct acpi_gpe_block_info *gpe_block, void *context);
drivers/acpi/acpica/aclocal.h
414
void *context; /* Context to be passed to handler */
drivers/acpi/acpica/aclocal.h
421
void *context; /* Context to be passed to handler */
drivers/acpi/acpica/aclocal.h
522
gpe_block, void *context);
drivers/acpi/acpica/aclocal.h
528
void *context; /* Context to be passed to handler */
drivers/acpi/acpica/aclocal.h
662
void *context;
drivers/acpi/acpica/acnamesp.h
191
u32 level, void *context, void **return_value);
drivers/acpi/acpica/acnamesp.h
63
u32 level, void *context, void **return_value);
drivers/acpi/acpica/acnamesp.h
83
void *context, void **return_value);
drivers/acpi/acpica/acobject.h
308
void *context;
drivers/acpi/acpica/acobject.h
318
void *context;
drivers/acpi/acpica/acresrc.h
211
u32 offset, u8 resource_index, void **context);
drivers/acpi/acpica/acstruct.h
118
void *context;
drivers/acpi/acpica/acutils.h
126
u8 resource_index, void **context);
drivers/acpi/acpica/acutils.h
132
void *context);
drivers/acpi/acpica/acutils.h
566
acpi_pkg_callback walk_callback, void *context);
drivers/acpi/acpica/acutils.h
598
void **context);
drivers/acpi/acpica/dbcmds.c
30
acpi_db_resource_callback(struct acpi_resource *resource, void *context);
drivers/acpi/acpica/dbcmds.c
34
u32 nesting_level, void *context, void **return_value);
drivers/acpi/acpica/dbcmds.c
733
acpi_db_resource_callback(struct acpi_resource *resource, void *context)
drivers/acpi/acpica/dbcmds.c
753
u32 nesting_level, void *context, void **return_value)
drivers/acpi/acpica/dbdisply.c
1057
void *context, void **return_value)
drivers/acpi/acpica/dbdisply.c
29
void *context, void **return_value);
drivers/acpi/acpica/dbexec.c
28
static void ACPI_SYSTEM_XFACE acpi_db_method_thread(void *context);
drivers/acpi/acpica/dbexec.c
293
u32 nesting_level, void *context, void **return_value)
drivers/acpi/acpica/dbexec.c
32
u32 nesting_level, void *context, void **return_value);
drivers/acpi/acpica/dbexec.c
34
static void ACPI_SYSTEM_XFACE acpi_db_single_execution_thread(void *context);
drivers/acpi/acpica/dbexec.c
507
static void ACPI_SYSTEM_XFACE acpi_db_method_thread(void *context)
drivers/acpi/acpica/dbexec.c
510
struct acpi_db_method_info *info = context;
drivers/acpi/acpica/dbexec.c
611
static void ACPI_SYSTEM_XFACE acpi_db_single_execution_thread(void *context)
drivers/acpi/acpica/dbexec.c
613
struct acpi_db_method_info *info = context;
drivers/acpi/acpica/dbinput.c
1212
void ACPI_SYSTEM_XFACE acpi_db_execute_thread(void *context)
drivers/acpi/acpica/dbmethod.c
22
u32 nesting_level, void *context, void **return_value);
drivers/acpi/acpica/dbmethod.c
449
u32 nesting_level, void *context, void **return_value)
drivers/acpi/acpica/dbmethod.c
454
(struct acpi_db_execute_walk *)context;
drivers/acpi/acpica/dbmethod.c
499
void *context, void **return_value)
drivers/acpi/acpica/dbmethod.c
504
(struct acpi_db_execute_walk *)context;
drivers/acpi/acpica/dbnames.c
22
void *context, void **return_value);
drivers/acpi/acpica/dbnames.c
27
void *context, void **return_value);
drivers/acpi/acpica/dbnames.c
294
void *context, void **return_value)
drivers/acpi/acpica/dbnames.c
297
char *requested_name = (char *)context;
drivers/acpi/acpica/dbnames.c
32
void *context, void **return_value);
drivers/acpi/acpica/dbnames.c
37
void *context, void **return_value);
drivers/acpi/acpica/dbnames.c
398
void *context, void **return_value)
drivers/acpi/acpica/dbnames.c
402
u32 *count = (u32 *)context;
drivers/acpi/acpica/dbnames.c
41
u32 nesting_level, void *context, void **return_value);
drivers/acpi/acpica/dbnames.c
46
void *context, void **return_value);
drivers/acpi/acpica/dbnames.c
490
void *context, void **return_value)
drivers/acpi/acpica/dbnames.c
492
struct acpi_object_info *info = (struct acpi_object_info *)context;
drivers/acpi/acpica/dbnames.c
50
u32 nesting_level, void *context, void **return_value);
drivers/acpi/acpica/dbnames.c
520
u32 nesting_level, void *context, void **return_value)
drivers/acpi/acpica/dbnames.c
524
(struct acpi_region_walk_info *)context;
drivers/acpi/acpica/dbnames.c
605
void *context, void **return_value)
drivers/acpi/acpica/dbnames.c
607
struct acpi_walk_info *info = (struct acpi_walk_info *)context;
drivers/acpi/acpica/dbnames.c
766
u32 nesting_level, void *context, void **return_value)
drivers/acpi/acpica/dbnames.c
769
(struct acpi_integrity_info *)context;
drivers/acpi/acpica/dbnames.c
866
void *context, void **return_value)
drivers/acpi/acpica/dbnames.c
869
(union acpi_operand_object *)context;
drivers/acpi/acpica/dbnames.c
934
u32 nesting_level, void *context, void **return_value)
drivers/acpi/acpica/dbstats.c
216
void *context, void **return_value)
drivers/acpi/acpica/dbstats.c
24
void *context, void **return_value);
drivers/acpi/acpica/dbtest.c
23
u32 nesting_level, void *context, void **return_value);
drivers/acpi/acpica/dbtest.c
241
u32 nesting_level, void *context, void **return_value)
drivers/acpi/acpica/dbtest.c
53
void *context, void **return_value);
drivers/acpi/acpica/dbtest.c
973
void *context, void **return_value)
drivers/acpi/acpica/dbtest.c
978
(struct acpi_db_execute_walk *)context;
drivers/acpi/acpica/dsinit.c
23
u32 level, void *context, void **return_value);
drivers/acpi/acpica/dsinit.c
47
u32 level, void *context, void **return_value)
drivers/acpi/acpica/dsinit.c
50
(struct acpi_init_walk_info *)context;
drivers/acpi/acpica/dspkginit.c
326
union acpi_generic_state *state, void *context)
drivers/acpi/acpica/dspkginit.c
342
if (context) {
drivers/acpi/acpica/dspkginit.c
346
element_ptr = (union acpi_operand_object **)context;
drivers/acpi/acpica/evevent.c
137
acpi_gbl_fixed_event_handlers[i].context = NULL;
drivers/acpi/acpica/evevent.c
265
handler) (acpi_gbl_fixed_event_handlers[event].context));
drivers/acpi/acpica/evglock.c
119
static u32 acpi_ev_global_lock_handler(void *context)
drivers/acpi/acpica/evglock.c
19
static u32 acpi_ev_global_lock_handler(void *context);
drivers/acpi/acpica/evgpe.c
19
static void ACPI_SYSTEM_XFACE acpi_ev_asynch_execute_gpe_method(void *context);
drivers/acpi/acpica/evgpe.c
21
static void ACPI_SYSTEM_XFACE acpi_ev_asynch_enable_gpe(void *context);
drivers/acpi/acpica/evgpe.c
455
static void ACPI_SYSTEM_XFACE acpi_ev_asynch_execute_gpe_method(void *context)
drivers/acpi/acpica/evgpe.c
457
struct acpi_gpe_event_info *gpe_event_info = context;
drivers/acpi/acpica/evgpe.c
552
static void ACPI_SYSTEM_XFACE acpi_ev_asynch_enable_gpe(void *context)
drivers/acpi/acpica/evgpe.c
554
struct acpi_gpe_event_info *gpe_event_info = context;
drivers/acpi/acpica/evgpe.c
718
gpe_handler_info->context);
drivers/acpi/acpica/evgpe.c
808
context);
drivers/acpi/acpica/evgpeblk.c
420
void *context)
drivers/acpi/acpica/evgpeblk.c
428
u8 *is_polling_needed = context;
drivers/acpi/acpica/evgpeinit.c
293
u32 level, void *context, void **return_value)
drivers/acpi/acpica/evgpeinit.c
298
ACPI_CAST_PTR(struct acpi_gpe_walk_info, context);
drivers/acpi/acpica/evgpeutil.c
272
void *context)
drivers/acpi/acpica/evgpeutil.c
31
acpi_ev_walk_gpe_list(acpi_gpe_callback gpe_walk_callback, void *context)
drivers/acpi/acpica/evgpeutil.c
56
context);
drivers/acpi/acpica/evgpeutil.c
90
struct acpi_gpe_block_info *gpe_block, void *context)
drivers/acpi/acpica/evgpeutil.c
92
struct acpi_gpe_device_info *info = context;
drivers/acpi/acpica/evhandler.c
166
u32 level, void *context, void **return_value)
drivers/acpi/acpica/evhandler.c
176
handler_obj = (union acpi_operand_object *)context;
drivers/acpi/acpica/evhandler.c
22
u32 level, void *context, void **return_value);
drivers/acpi/acpica/evhandler.c
331
acpi_adr_space_setup setup, void *context)
drivers/acpi/acpica/evhandler.c
504
handler_obj->address_space.context = context;
drivers/acpi/acpica/evmisc.c
161
static void ACPI_SYSTEM_XFACE acpi_ev_notify_dispatch(void *context)
drivers/acpi/acpica/evmisc.c
163
union acpi_generic_state *info = (union acpi_generic_state *)context;
drivers/acpi/acpica/evmisc.c
173
info->notify.global->context);
drivers/acpi/acpica/evmisc.c
182
handler_obj->notify.context);
drivers/acpi/acpica/evmisc.c
19
static void ACPI_SYSTEM_XFACE acpi_ev_notify_dispatch(void *context);
drivers/acpi/acpica/evregion.c
115
struct acpi_connection_info *context;
drivers/acpi/acpica/evregion.c
141
context = handler_desc->address_space.context;
drivers/acpi/acpica/evregion.c
170
handler_desc->address_space.context;
drivers/acpi/acpica/evregion.c
181
handler_desc->address_space.context;
drivers/acpi/acpica/evregion.c
195
context, ®ion_context);
drivers/acpi/acpica/evregion.c
271
context && field_obj) {
drivers/acpi/acpica/evregion.c
283
context->connection = field_obj->field.resource_buffer;
drivers/acpi/acpica/evregion.c
284
context->length = field_obj->field.resource_length;
drivers/acpi/acpica/evregion.c
285
context->access_length = field_obj->field.access_length;
drivers/acpi/acpica/evregion.c
29
u32 level, void *context, void **return_value);
drivers/acpi/acpica/evregion.c
295
status = handler(function, address, bit_width, value, context,
drivers/acpi/acpica/evregion.c
434
context, region_context);
drivers/acpi/acpica/evregion.c
756
u32 level, void *context, void **return_value)
drivers/acpi/acpica/evregion.c
763
info = ACPI_CAST_PTR(struct acpi_reg_walk_info, context);
drivers/acpi/acpica/evsci.c
120
u32 ACPI_SYSTEM_XFACE acpi_ev_gpe_xrupt_handler(void *context)
drivers/acpi/acpica/evsci.c
122
struct acpi_gpe_xrupt_info *gpe_xrupt_list = context;
drivers/acpi/acpica/evsci.c
17
static u32 ACPI_SYSTEM_XFACE acpi_ev_sci_xrupt_handler(void *context);
drivers/acpi/acpica/evsci.c
54
int_status |= sci_handler->address(sci_handler->context);
drivers/acpi/acpica/evsci.c
76
static u32 ACPI_SYSTEM_XFACE acpi_ev_sci_xrupt_handler(void *context)
drivers/acpi/acpica/evsci.c
78
struct acpi_gpe_xrupt_info *gpe_xrupt_list = context;
drivers/acpi/acpica/evxface.c
168
handler_obj->notify.context = context;
drivers/acpi/acpica/evxface.c
253
acpi_gbl_global_notify[i].context = NULL;
drivers/acpi/acpica/evxface.c
27
acpi_gpe_handler address, void *context);
drivers/acpi/acpica/evxface.c
389
acpi_status acpi_install_sci_handler(acpi_sci_handler address, void *context)
drivers/acpi/acpica/evxface.c
410
new_sci_handler->context = context;
drivers/acpi/acpica/evxface.c
534
acpi_install_global_event_handler(acpi_gbl_event_handler handler, void *context)
drivers/acpi/acpica/evxface.c
559
acpi_gbl_global_event_handler_context = context;
drivers/acpi/acpica/evxface.c
585
acpi_event_handler handler, void *context)
drivers/acpi/acpica/evxface.c
59
acpi_notify_handler handler, void *context)
drivers/acpi/acpica/evxface.c
612
acpi_gbl_fixed_event_handlers[event].context = context;
drivers/acpi/acpica/evxface.c
625
acpi_gbl_fixed_event_handlers[event].context = NULL;
drivers/acpi/acpica/evxface.c
677
acpi_gbl_fixed_event_handlers[event].context = NULL;
drivers/acpi/acpica/evxface.c
720
acpi_gpe_handler address, void *context)
drivers/acpi/acpica/evxface.c
769
handler->context = context;
drivers/acpi/acpica/evxface.c
842
u32 type, acpi_gpe_handler address, void *context)
drivers/acpi/acpica/evxface.c
849
FALSE, address, context);
drivers/acpi/acpica/evxface.c
876
u32 type, acpi_gpe_handler address, void *context)
drivers/acpi/acpica/evxface.c
883
TRUE, address, context);
drivers/acpi/acpica/evxface.c
98
acpi_gbl_global_notify[i].context = context;
drivers/acpi/acpica/evxfregn.c
101
acpi_adr_space_setup setup, void *context)
drivers/acpi/acpica/evxfregn.c
105
context, TRUE);
drivers/acpi/acpica/evxfregn.c
114
void *context)
drivers/acpi/acpica/evxfregn.c
118
context, FALSE);
drivers/acpi/acpica/evxfregn.c
50
void *context, u8 run_reg)
drivers/acpi/acpica/evxfregn.c
80
context);
drivers/acpi/acpica/exdump.c
214
{ACPI_EXD_POINTER, ACPI_EXD_OFFSET(address_space.context), "Context"}
drivers/acpi/acpica/exdump.c
222
{ACPI_EXD_POINTER, ACPI_EXD_OFFSET(notify.context), "Context"},
drivers/acpi/acpica/hwgpe.c
21
void *context);
drivers/acpi/acpica/hwgpe.c
367
struct acpi_gpe_block_info *gpe_block, void *context)
drivers/acpi/acpica/hwgpe.c
404
struct acpi_gpe_block_info *gpe_block, void *context)
drivers/acpi/acpica/hwgpe.c
442
void *context)
drivers/acpi/acpica/hwgpe.c
490
void *context)
drivers/acpi/acpica/hwgpe.c
540
void *context)
drivers/acpi/acpica/hwgpe.c
542
struct acpi_gpe_block_status_context *c = context;
drivers/acpi/acpica/hwgpe.c
659
struct acpi_gpe_block_status_context context = {
drivers/acpi/acpica/hwgpe.c
673
context.gpe_skip_register_info = gpe_event_info->register_info;
drivers/acpi/acpica/hwgpe.c
674
context.gpe_skip_mask = acpi_hw_get_gpe_register_bit(gpe_event_info);
drivers/acpi/acpica/hwgpe.c
679
(void)acpi_ev_walk_gpe_list(acpi_hw_get_gpe_block_status, &context);
drivers/acpi/acpica/hwgpe.c
680
return (context.retval != 0);
drivers/acpi/acpica/nsdump.c
141
u32 level, void *context, void **return_value)
drivers/acpi/acpica/nsdump.c
143
struct acpi_walk_info *info = (struct acpi_walk_info *)context;
drivers/acpi/acpica/nsdump.c
24
u32 level, void *context, void **return_value);
drivers/acpi/acpica/nsdump.c
31
u32 level, void *context, void **return_value);
drivers/acpi/acpica/nsdump.c
35
u32 level, void *context, void **return_value);
drivers/acpi/acpica/nsdump.c
676
u32 level, void *context, void **return_value)
drivers/acpi/acpica/nsdump.c
678
u32 max_level = *((u32 *)context);
drivers/acpi/acpica/nsdump.c
713
u32 level, void *context, void **return_value)
drivers/acpi/acpica/nsdump.c
715
u32 *max_level = (u32 *)context;
drivers/acpi/acpica/nsdumpdv.c
36
u32 level, void *context, void **return_value)
drivers/acpi/acpica/nsdumpdv.c
46
acpi_ns_dump_one_object(obj_handle, level, context, return_value);
drivers/acpi/acpica/nsinit.c
23
u32 level, void *context, void **return_value);
drivers/acpi/acpica/nsinit.c
261
u32 level, void *context, void **return_value)
drivers/acpi/acpica/nsinit.c
27
u32 nesting_level, void *context, void **return_value);
drivers/acpi/acpica/nsinit.c
31
u32 nesting_level, void *context, void **return_value);
drivers/acpi/acpica/nsinit.c
317
u32 level, void *context, void **return_value)
drivers/acpi/acpica/nsinit.c
322
(struct acpi_init_walk_info *)context;
drivers/acpi/acpica/nsinit.c
451
u32 nesting_level, void *context, void **return_value)
drivers/acpi/acpica/nsinit.c
454
ACPI_CAST_PTR(struct acpi_device_walk_info, context);
drivers/acpi/acpica/nsinit.c
516
u32 nesting_level, void *context, void **return_value)
drivers/acpi/acpica/nsinit.c
519
ACPI_CAST_PTR(struct acpi_device_walk_info, context);
drivers/acpi/acpica/nswalk.c
156
void *context, void **return_value)
drivers/acpi/acpica/nswalk.c
237
level, context,
drivers/acpi/acpica/nswalk.c
244
level, context,
drivers/acpi/acpica/nsxfeval.c
559
void *context, void **return_value)
drivers/acpi/acpica/nsxfeval.c
609
context, return_value);
drivers/acpi/acpica/nsxfeval.c
637
void *context, void **return_value)
drivers/acpi/acpica/nsxfeval.c
639
struct acpi_get_devices_info *info = context;
drivers/acpi/acpica/nsxfeval.c
741
info->context, return_value);
drivers/acpi/acpica/nsxfeval.c
773
void *context, void **return_value)
drivers/acpi/acpica/nsxfeval.c
791
info.context = context;
drivers/acpi/acpica/rslist.c
32
u32 offset, u8 resource_index, void **context)
drivers/acpi/acpica/rslist.c
35
ACPI_CAST_INDIRECT_PTR(struct acpi_resource, context);
drivers/acpi/acpica/rsxface.c
35
acpi_rs_match_vendor_resource(struct acpi_resource *resource, void *context);
drivers/acpi/acpica/rsxface.c
442
acpi_rs_match_vendor_resource(struct acpi_resource *resource, void *context)
drivers/acpi/acpica/rsxface.c
444
struct acpi_vendor_walk_info *info = context;
drivers/acpi/acpica/rsxface.c
508
void *context)
drivers/acpi/acpica/rsxface.c
547
status = user_function(resource, context);
drivers/acpi/acpica/rsxface.c
596
acpi_walk_resource_callback user_function, void *context)
drivers/acpi/acpica/rsxface.c
623
status = acpi_walk_resource_buffer(&buffer, user_function, context);
drivers/acpi/acpica/tbxface.c
457
acpi_install_table_handler(acpi_table_handler handler, void *context)
drivers/acpi/acpica/tbxface.c
482
acpi_gbl_table_handler_context = context;
drivers/acpi/acpica/utcopy.c
215
void *context)
drivers/acpi/acpica/utcopy.c
218
struct acpi_pkg_info *info = (struct acpi_pkg_info *)context;
drivers/acpi/acpica/utcopy.c
28
void *context);
drivers/acpi/acpica/utcopy.c
50
void *context);
drivers/acpi/acpica/utcopy.c
794
void *context)
drivers/acpi/acpica/utdelete.c
246
context,
drivers/acpi/acpica/utmisc.c
199
acpi_pkg_callback walk_callback, void *context)
drivers/acpi/acpica/utmisc.c
238
this_source_obj, state, context);
drivers/acpi/acpica/utmisc.c
278
this_source_obj, state, context);
drivers/acpi/acpica/utobject.c
30
union acpi_generic_state *state, void *context);
drivers/acpi/acpica/utobject.c
557
union acpi_generic_state *state, void *context)
drivers/acpi/acpica/utobject.c
560
struct acpi_pkg_info *info = (struct acpi_pkg_info *)context;
drivers/acpi/acpica/utresrc.c
144
acpi_walk_aml_callback user_function, void **context)
drivers/acpi/acpica/utresrc.c
190
context);
drivers/acpi/acpica/utresrc.c
218
*context = aml;
drivers/acpi/acpica/utresrc.c
239
user_function(end_tag, 2, offset, resource_index, context);
drivers/acpi/apei/apei-base.c
781
struct acpi_osc_context context = {
drivers/acpi/apei/apei-base.c
793
|| ACPI_FAILURE(acpi_run_osc(handle, &context)))
drivers/acpi/apei/apei-base.c
796
kfree(context.ret.pointer);
drivers/acpi/arm64/iort.c
146
(struct acpi_iort_node *node, void *context);
drivers/acpi/arm64/iort.c
229
void *context)
drivers/acpi/arm64/iort.c
251
ACPI_SUCCESS(callback(iort_node, context)))
drivers/acpi/arm64/iort.c
262
void *context)
drivers/acpi/arm64/iort.c
264
struct device *dev = context;
drivers/acpi/arm64/iort.c
328
static acpi_status iort_match_iwb_callback(struct acpi_iort_node *node, void *context)
drivers/acpi/arm64/iort.c
331
u32 *id = context;
drivers/acpi/bus.c
1189
static int acpi_dev_for_one_check(struct device *dev, void *context)
drivers/acpi/bus.c
1191
struct acpi_dev_walk_context *adwc = context;
drivers/acpi/bus.c
1382
static acpi_status acpi_bus_table_handler(u32 event, void *table, void *context)
drivers/acpi/bus.c
1387
return acpi_sysfs_table_handler(event, table, context);
drivers/acpi/bus.c
139
void *context)
drivers/acpi/bus.c
287
acpi_status acpi_run_osc(acpi_handle handle, struct acpi_osc_context *context)
drivers/acpi/bus.c
296
if (!context || !context->cap.pointer ||
drivers/acpi/bus.c
297
context->cap.length < 2 * sizeof(u32) ||
drivers/acpi/bus.c
298
guid_parse(context->uuid_str, &guid))
drivers/acpi/bus.c
301
ret = acpi_eval_osc(handle, &guid, context->rev, &context->cap,
drivers/acpi/bus.c
309
if (acpi_osc_error_check(handle, &guid, context->rev, &context->cap, retbuf)) {
drivers/acpi/bus.c
314
context->ret.length = out_obj->buffer.length;
drivers/acpi/bus.c
315
context->ret.pointer = kmemdup(retbuf, context->ret.length, GFP_KERNEL);
drivers/acpi/bus.c
316
if (!context->ret.pointer) {
drivers/acpi/bus.c
660
acpi_notify_handler handler, void *context)
drivers/acpi/bus.c
665
handler, context);
drivers/acpi/device_pm.c
546
if (adev->wakeup.context.func) {
drivers/acpi/device_pm.c
548
adev->wakeup.context.func,
drivers/acpi/device_pm.c
549
dev_name(adev->wakeup.context.dev));
drivers/acpi/device_pm.c
550
adev->wakeup.context.func(&adev->wakeup.context);
drivers/acpi/device_pm.c
571
void (*func)(struct acpi_device_wakeup_context *context))
drivers/acpi/device_pm.c
590
adev->wakeup.context.dev = dev;
drivers/acpi/device_pm.c
591
adev->wakeup.context.func = func;
drivers/acpi/device_pm.c
620
adev->wakeup.context.func = NULL;
drivers/acpi/device_pm.c
621
adev->wakeup.context.dev = NULL;
drivers/acpi/device_pm.c
836
static void acpi_pm_notify_work_func(struct acpi_device_wakeup_context *context)
drivers/acpi/device_pm.c
838
struct device *dev = context->dev;
drivers/acpi/ec.c
1412
ec_parse_io_ports(struct acpi_resource *resource, void *context);
drivers/acpi/ec.c
1444
void *context, void **return_value)
drivers/acpi/ec.c
1448
struct acpi_ec *ec = context;
drivers/acpi/ec.c
1460
ec_parse_device(acpi_handle handle, u32 Level, void *context, void **retval)
drivers/acpi/ec.c
1464
struct acpi_ec *ec = context;
drivers/acpi/ec.c
1776
ec_parse_io_ports(struct acpi_resource *resource, void *context)
drivers/acpi/ec.c
1778
struct acpi_ec *ec = context;
drivers/acpi/evged.c
69
void *context)
drivers/acpi/evged.c
75
struct acpi_ged_device *geddev = context;
drivers/acpi/fan_core.c
462
static void acpi_fan_notify_handler(acpi_handle handle, u32 event, void *context)
drivers/acpi/fan_core.c
464
struct device *dev = context;
drivers/acpi/internal.h
85
acpi_status acpi_sysfs_table_handler(u32 event, void *table, void *context);
drivers/acpi/ioapic.c
128
ioapic->root_handle = (acpi_handle)context;
drivers/acpi/ioapic.c
98
void *context, void **rv)
drivers/acpi/irq.c
198
void *context)
drivers/acpi/irq.c
200
struct acpi_irq_parse_one_ctx *ctx = context;
drivers/acpi/mipi-disco-img.c
37
static void acpi_mipi_data_tag(acpi_handle handle, void *context)
drivers/acpi/mipi-disco-img.c
63
static acpi_status parse_csi2_resource(struct acpi_resource *res, void *context)
drivers/acpi/mipi-disco-img.c
65
struct csi2_resources_walk_data *crwd = context;
drivers/acpi/osl.c
1093
acpi_osd_exec_callback function, void *context)
drivers/acpi/osl.c
1100
function, context));
drivers/acpi/osl.c
1103
ret = acpi_debugger_create_thread(function, context);
drivers/acpi/osl.c
1125
dpc->context = context;
drivers/acpi/osl.c
47
void *context;
drivers/acpi/osl.c
558
void *context)
drivers/acpi/osl.c
580
acpi_irq_context = context;
drivers/acpi/osl.c
879
dpc->function(dpc->context);
drivers/acpi/osl.c
918
int acpi_debugger_create_thread(acpi_osd_exec_callback function, void *context)
drivers/acpi/osl.c
939
ret = func(function, context);
drivers/acpi/pci_link.c
174
void *context)
drivers/acpi/pci_link.c
176
int *irq = context;
drivers/acpi/pci_link.c
85
void *context)
drivers/acpi/pci_link.c
87
struct acpi_pci_link *link = context;
drivers/acpi/pci_root.c
225
struct acpi_osc_context context = {
drivers/acpi/pci_root.c
233
status = acpi_run_osc(root->device->handle, &context);
drivers/acpi/pci_root.c
235
*pci_control = acpi_osc_ctx_get_pci_control(&context);
drivers/acpi/pci_root.c
237
*cxl_control = acpi_osc_ctx_get_cxl_control(&context);
drivers/acpi/pci_root.c
238
kfree(context.ret.pointer);
drivers/acpi/pci_slot.c
84
register_slot(acpi_handle handle, u32 lvl, void *context, void **rv)
drivers/acpi/pci_slot.c
91
struct pci_bus *pci_bus = context;
drivers/acpi/prmt.c
272
struct prm_context_buffer context;
drivers/acpi/prmt.c
278
memset(&context, 0, sizeof(context));
drivers/acpi/prmt.c
279
ACPI_COPY_NAMESEG(context.signature, "PRMC");
drivers/acpi/prmt.c
280
context.identifier = handler->guid;
drivers/acpi/prmt.c
281
context.static_data_buffer = handler->static_data_buffer_addr;
drivers/acpi/prmt.c
282
context.mmio_ranges = module->mmio_info;
drivers/acpi/prmt.c
286
&context);
drivers/acpi/prmt.c
312
struct prm_context_buffer context;
drivers/acpi/prmt.c
336
ACPI_COPY_NAMESEG(context.signature, "PRMC");
drivers/acpi/prmt.c
337
context.revision = 0x0;
drivers/acpi/prmt.c
338
context.reserved = 0x0;
drivers/acpi/prmt.c
339
context.identifier = handler->guid;
drivers/acpi/prmt.c
340
context.static_data_buffer = handler->static_data_buffer_addr;
drivers/acpi/prmt.c
341
context.mmio_ranges = module->mmio_info;
drivers/acpi/prmt.c
345
&context);
drivers/acpi/processor_pdc.c
103
early_init_pdc(acpi_handle handle, u32 lvl, void *context, void **rv)
drivers/acpi/property.c
397
static void acpi_nondev_subnode_tag(acpi_handle handle, void *context)
drivers/acpi/resource.c
1143
void *context, void **ret)
drivers/acpi/resource.c
1145
struct resource *res = context;
drivers/acpi/resource.c
909
void *context)
drivers/acpi/resource.c
911
struct res_proc_context *c = context;
drivers/acpi/riscv/irq.c
172
void *context, void **return_value)
drivers/acpi/riscv/irq.c
199
void *context, void **return_value)
drivers/acpi/riscv/irq.c
272
static acpi_status riscv_acpi_irq_get_parent(struct acpi_resource *ares, void *context)
drivers/acpi/riscv/irq.c
274
struct acpi_irq_dep_ctx *ctx = context;
drivers/acpi/riscv/rimt.c
149
void *context)
drivers/acpi/riscv/rimt.c
171
ACPI_SUCCESS(rimt_match_node_callback(rimt_node, context)))
drivers/acpi/riscv/rimt.c
65
void *context)
drivers/acpi/riscv/rimt.c
68
struct device *dev = context;
drivers/acpi/sbs.c
604
static void acpi_sbs_callback(void *context)
drivers/acpi/sbs.c
607
struct acpi_sbs *sbs = context;
drivers/acpi/sbshc.c
181
smbus_alarm_callback callback, void *context)
drivers/acpi/sbshc.c
185
hc->context = context;
drivers/acpi/sbshc.c
196
hc->context = NULL;
drivers/acpi/sbshc.c
204
static inline void acpi_smbus_callback(void *context)
drivers/acpi/sbshc.c
206
struct acpi_smb_hc *hc = context;
drivers/acpi/sbshc.c
208
hc->callback(hc->context);
drivers/acpi/sbshc.c
211
static int smbus_alarm(void *context)
drivers/acpi/sbshc.c
213
struct acpi_smb_hc *hc = context;
drivers/acpi/sbshc.c
31
void *context;
drivers/acpi/sbshc.h
24
typedef void (*smbus_alarm_callback)(void *context);
drivers/acpi/sbshc.h
31
smbus_alarm_callback callback, void *context);
drivers/acpi/scan.c
1268
acpi_backlight_cap_match(acpi_handle handle, u32 level, void *context,
drivers/acpi/scan.c
1271
long *cap = context;
drivers/acpi/scan.c
1919
void *context)
drivers/acpi/scan.c
1921
struct resource *res = context;
drivers/acpi/scan.c
606
static void acpi_scan_drop_device(acpi_handle handle, void *context)
drivers/acpi/scan.c
609
struct acpi_device *adev = context;
drivers/acpi/sysfs.c
382
acpi_status acpi_sysfs_table_handler(u32 event, void *table, void *context)
drivers/acpi/sysfs.c
638
u32 event_number, void *context)
drivers/acpi/video_detect.c
64
find_video(acpi_handle handle, u32 lvl, void *context, void **rv)
drivers/acpi/video_detect.c
67
long *cap = context;
drivers/acpi/wakeup.c
111
int acpi_register_wakeup_handler(int wake_irq, bool (*wakeup)(void *context),
drivers/acpi/wakeup.c
112
void *context)
drivers/acpi/wakeup.c
128
handler->context = context;
drivers/acpi/wakeup.c
143
void acpi_unregister_wakeup_handler(bool (*wakeup)(void *context),
drivers/acpi/wakeup.c
144
void *context)
drivers/acpi/wakeup.c
150
if (handler->wakeup == wakeup && handler->context == context) {
drivers/acpi/wakeup.c
166
if (handler->wakeup(handler->context))
drivers/acpi/wakeup.c
17
bool (*wakeup)(void *context);
drivers/acpi/wakeup.c
18
void *context;
drivers/android/binder.c
1074
offset = (node == proc->context->binder_context_mgr_node) ? 0 : 1;
drivers/android/binder.c
3004
const char *context = proc->context->name;
drivers/android/binder.c
3012
trace_binder_netlink_report(context, t, data_size, error);
drivers/android/binder.c
3023
nla_put_string(skb, BINDER_A_REPORT_CONTEXT, context) ||
drivers/android/binder.c
3079
struct binder_context *context = proc->context;
drivers/android/binder.c
3098
strscpy(e->context_name, proc->context->name, BINDERFS_MAX_NAME);
drivers/android/binder.c
3213
mutex_lock(&context->context_mgr_node_lock);
drivers/android/binder.c
3214
target_node = context->binder_context_mgr_node;
drivers/android/binder.c
3221
mutex_unlock(&context->context_mgr_node_lock);
drivers/android/binder.c
3397
if (lsmctx.context) {
drivers/android/binder.c
3407
lsmctx.context, lsmctx.len);
drivers/android/binder.c
3413
lsmctx.context = NULL;
drivers/android/binder.c
3849
if (lsmctx.context)
drivers/android/binder.c
4121
struct binder_context *context = proc->context;
drivers/android/binder.c
4157
mutex_lock(&context->context_mgr_node_lock);
drivers/android/binder.c
4158
ctx_mgr_node = context->binder_context_mgr_node;
drivers/android/binder.c
4163
mutex_unlock(&context->context_mgr_node_lock);
drivers/android/binder.c
4170
mutex_unlock(&context->context_mgr_node_lock);
drivers/android/binder.c
5317
device = container_of(proc->context, struct binder_device, context);
drivers/android/binder.c
5320
kfree(proc->context->name);
drivers/android/binder.c
5511
struct binder_context *context = proc->context;
drivers/android/binder.c
5515
guard(mutex)(&context->context_mgr_node_lock);
drivers/android/binder.c
5516
if (context->binder_context_mgr_node) {
drivers/android/binder.c
5523
if (uid_valid(context->binder_context_mgr_uid)) {
drivers/android/binder.c
5524
if (!uid_eq(context->binder_context_mgr_uid, curr_euid)) {
drivers/android/binder.c
5528
context->binder_context_mgr_uid));
drivers/android/binder.c
5532
context->binder_context_mgr_uid = curr_euid;
drivers/android/binder.c
5542
context->binder_context_mgr_node = new_node;
drivers/android/binder.c
5552
struct binder_context *context = proc->context;
drivers/android/binder.c
5563
mutex_lock(&context->context_mgr_node_lock);
drivers/android/binder.c
5564
if (!context->binder_context_mgr_node ||
drivers/android/binder.c
5565
context->binder_context_mgr_node->proc != proc) {
drivers/android/binder.c
5566
mutex_unlock(&context->context_mgr_node_lock);
drivers/android/binder.c
5569
mutex_unlock(&context->context_mgr_node_lock);
drivers/android/binder.c
6086
proc->context = &binder_dev->context;
drivers/android/binder.c
6262
struct binder_context *context = proc->context;
drivers/android/binder.c
6270
mutex_lock(&context->context_mgr_node_lock);
drivers/android/binder.c
6271
if (context->binder_context_mgr_node &&
drivers/android/binder.c
6272
context->binder_context_mgr_node->proc == proc) {
drivers/android/binder.c
6276
context->binder_context_mgr_node = NULL;
drivers/android/binder.c
6278
mutex_unlock(&context->context_mgr_node_lock);
drivers/android/binder.c
6630
seq_printf(m, "context %s\n", proc->context->name);
drivers/android/binder.c
6794
seq_printf(m, "context %s\n", proc->context->name);
drivers/android/binder.c
7076
binder_device->context.binder_context_mgr_uid = INVALID_UID;
drivers/android/binder.c
7077
binder_device->context.name = name;
drivers/android/binder.c
7078
mutex_init(&binder_device->context.context_mgr_node_lock);
drivers/android/binder.c
834
node == node->proc->context->binder_context_mgr_node &&
drivers/android/binder_internal.h
36
struct binder_context context;
drivers/android/binder_internal.h
447
struct binder_context *context;
drivers/android/binder_trace.h
406
TP_PROTO(const char *context,
drivers/android/binder_trace.h
410
TP_ARGS(context, t, data_size, error),
drivers/android/binder_trace.h
412
__field(const char *, context)
drivers/android/binder_trace.h
424
__entry->context = context;
drivers/android/binder_trace.h
438
__entry->context, __entry->error, __entry->is_reply,
drivers/android/binderfs.c
171
device->context.binder_context_mgr_uid = INVALID_UID;
drivers/android/binderfs.c
172
device->context.name = name;
drivers/android/binderfs.c
175
mutex_init(&device->context.context_mgr_node_lock);
drivers/android/binderfs.c
267
kfree(device->context.name);
drivers/ata/libata-acpi.c
183
struct ata_acpi_hotplug_context *context;
drivers/ata/libata-acpi.c
197
context = kzalloc_obj(*context);
drivers/ata/libata-acpi.c
198
if (!context)
drivers/ata/libata-acpi.c
201
context->data.ap = ap;
drivers/ata/libata-acpi.c
202
acpi_initialize_hp_context(adev, &context->hp, ata_acpi_ap_notify_dock,
drivers/ata/libata-acpi.c
212
struct ata_acpi_hotplug_context *context;
drivers/ata/libata-acpi.c
239
context = kzalloc_obj(*context);
drivers/ata/libata-acpi.c
240
if (!context)
drivers/ata/libata-acpi.c
243
context->data.dev = dev;
drivers/ata/libata-acpi.c
244
acpi_initialize_hp_context(adev, &context->hp, ata_acpi_dev_notify_dock,
drivers/ata/libata-acpi.c
50
#define ata_hotplug_data(context) (container_of((context), struct ata_acpi_hotplug_context, hp)->data)
drivers/ata/libata-zpodd.c
239
static void zpodd_wake_dev(acpi_handle handle, u32 event, void *context)
drivers/ata/libata-zpodd.c
241
struct ata_device *ata_dev = context;
drivers/ata/sata_sil24.c
1013
u32 context, cerr;
drivers/ata/sata_sil24.c
1033
context = readl(port + PORT_CONTEXT);
drivers/ata/sata_sil24.c
1034
pmp = (context >> 5) & 0xf;
drivers/base/firmware_loader/main.c
1138
void *context;
drivers/base/firmware_loader/main.c
1139
void (*cont)(const struct firmware *fw, void *context);
drivers/base/firmware_loader/main.c
1152
fw_work->cont(fw, fw_work->context);
drivers/base/firmware_loader/main.c
1163
const char *name, struct device *device, gfp_t gfp, void *context,
drivers/base/firmware_loader/main.c
1164
void (*cont)(const struct firmware *fw, void *context), bool nowarn)
drivers/base/firmware_loader/main.c
1179
fw_work->context = context;
drivers/base/firmware_loader/main.c
1228
const char *name, struct device *device, gfp_t gfp, void *context,
drivers/base/firmware_loader/main.c
1229
void (*cont)(const struct firmware *fw, void *context))
drivers/base/firmware_loader/main.c
1232
context, cont, false);
drivers/base/firmware_loader/main.c
1254
struct device *device, gfp_t gfp, void *context,
drivers/base/firmware_loader/main.c
1255
void (*cont)(const struct firmware *fw, void *context))
drivers/base/firmware_loader/main.c
1258
gfp, context, cont, true);
drivers/base/regmap/internal.h
111
int (*reg_read)(void *context, unsigned int reg, unsigned int *val);
drivers/base/regmap/internal.h
112
int (*reg_write)(void *context, unsigned int reg, unsigned int val);
drivers/base/regmap/internal.h
113
int (*reg_update_bits)(void *context, unsigned int reg,
drivers/base/regmap/internal.h
116
int (*read)(void *context, const void *reg_buf, size_t reg_size,
drivers/base/regmap/internal.h
118
int (*write)(void *context, const void *data, size_t count);
drivers/base/regmap/regmap-ac97.c
44
static int regmap_ac97_reg_read(void *context, unsigned int reg,
drivers/base/regmap/regmap-ac97.c
47
struct snd_ac97 *ac97 = context;
drivers/base/regmap/regmap-ac97.c
54
static int regmap_ac97_reg_write(void *context, unsigned int reg,
drivers/base/regmap/regmap-ac97.c
57
struct snd_ac97 *ac97 = context;
drivers/base/regmap/regmap-fsi.c
107
static int regmap_fsi16le_reg_write(void *context, unsigned int reg, unsigned int val)
drivers/base/regmap/regmap-fsi.c
115
return fsi_slave_write(context, reg, &v, sizeof(v));
drivers/base/regmap/regmap-fsi.c
123
static int regmap_fsi8_reg_read(void *context, unsigned int reg, unsigned int *val)
drivers/base/regmap/regmap-fsi.c
128
ret = fsi_slave_read(context, reg, &v, sizeof(v));
drivers/base/regmap/regmap-fsi.c
136
static int regmap_fsi8_reg_write(void *context, unsigned int reg, unsigned int val)
drivers/base/regmap/regmap-fsi.c
144
return fsi_slave_write(context, reg, &v, sizeof(v));
drivers/base/regmap/regmap-fsi.c
15
static int regmap_fsi32_reg_read(void *context, unsigned int reg, unsigned int *val)
drivers/base/regmap/regmap-fsi.c
20
ret = fsi_slave_read(context, reg, &v, sizeof(v));
drivers/base/regmap/regmap-fsi.c
28
static int regmap_fsi32_reg_write(void *context, unsigned int reg, unsigned int val)
drivers/base/regmap/regmap-fsi.c
32
return fsi_slave_write(context, reg, &v, sizeof(v));
drivers/base/regmap/regmap-fsi.c
40
static int regmap_fsi32le_reg_read(void *context, unsigned int reg, unsigned int *val)
drivers/base/regmap/regmap-fsi.c
45
ret = fsi_slave_read(context, reg, &v, sizeof(v));
drivers/base/regmap/regmap-fsi.c
53
static int regmap_fsi32le_reg_write(void *context, unsigned int reg, unsigned int val)
drivers/base/regmap/regmap-fsi.c
57
return fsi_slave_write(context, reg, &v, sizeof(v));
drivers/base/regmap/regmap-fsi.c
65
static int regmap_fsi16_reg_read(void *context, unsigned int reg, unsigned int *val)
drivers/base/regmap/regmap-fsi.c
70
ret = fsi_slave_read(context, reg, &v, sizeof(v));
drivers/base/regmap/regmap-fsi.c
78
static int regmap_fsi16_reg_write(void *context, unsigned int reg, unsigned int val)
drivers/base/regmap/regmap-fsi.c
86
return fsi_slave_write(context, reg, &v, sizeof(v));
drivers/base/regmap/regmap-fsi.c
94
static int regmap_fsi16le_reg_read(void *context, unsigned int reg, unsigned int *val)
drivers/base/regmap/regmap-fsi.c
99
ret = fsi_slave_read(context, reg, &v, sizeof(v));
drivers/base/regmap/regmap-i2c.c
106
static int regmap_smbus_word_write_swapped(void *context, unsigned int reg,
drivers/base/regmap/regmap-i2c.c
109
struct device *dev = context;
drivers/base/regmap/regmap-i2c.c
123
static int regmap_i2c_write(void *context, const void *data, size_t count)
drivers/base/regmap/regmap-i2c.c
125
struct device *dev = context;
drivers/base/regmap/regmap-i2c.c
138
static int regmap_i2c_gather_write(void *context,
drivers/base/regmap/regmap-i2c.c
142
struct device *dev = context;
drivers/base/regmap/regmap-i2c.c
15
static int regmap_smbus_byte_reg_read(void *context, unsigned int reg,
drivers/base/regmap/regmap-i2c.c
172
static int regmap_i2c_read(void *context,
drivers/base/regmap/regmap-i2c.c
176
struct device *dev = context;
drivers/base/regmap/regmap-i2c.c
18
struct device *dev = context;
drivers/base/regmap/regmap-i2c.c
208
static int regmap_i2c_smbus_i2c_write(void *context, const void *data,
drivers/base/regmap/regmap-i2c.c
211
struct device *dev = context;
drivers/base/regmap/regmap-i2c.c
222
static int regmap_i2c_smbus_i2c_read(void *context, const void *reg,
drivers/base/regmap/regmap-i2c.c
226
struct device *dev = context;
drivers/base/regmap/regmap-i2c.c
249
static int regmap_i2c_smbus_i2c_write_reg16(void *context, const void *data,
drivers/base/regmap/regmap-i2c.c
252
struct device *dev = context;
drivers/base/regmap/regmap-i2c.c
263
static int regmap_i2c_smbus_i2c_read_reg16(void *context, const void *reg,
drivers/base/regmap/regmap-i2c.c
267
struct device *dev = context;
drivers/base/regmap/regmap-i2c.c
34
static int regmap_smbus_byte_reg_write(void *context, unsigned int reg,
drivers/base/regmap/regmap-i2c.c
37
struct device *dev = context;
drivers/base/regmap/regmap-i2c.c
51
static int regmap_smbus_word_reg_read(void *context, unsigned int reg,
drivers/base/regmap/regmap-i2c.c
54
struct device *dev = context;
drivers/base/regmap/regmap-i2c.c
70
static int regmap_smbus_word_reg_write(void *context, unsigned int reg,
drivers/base/regmap/regmap-i2c.c
73
struct device *dev = context;
drivers/base/regmap/regmap-i2c.c
87
static int regmap_smbus_word_read_swapped(void *context, unsigned int reg,
drivers/base/regmap/regmap-i2c.c
90
struct device *dev = context;
drivers/base/regmap/regmap-i3c.c
10
static int regmap_i3c_write(void *context, const void *data, size_t count)
drivers/base/regmap/regmap-i3c.c
12
struct device *dev = context;
drivers/base/regmap/regmap-i3c.c
25
static int regmap_i3c_read(void *context,
drivers/base/regmap/regmap-i3c.c
29
struct device *dev = context;
drivers/base/regmap/regmap-mdio.c
13
static int regmap_mdio_c22_read(void *context, unsigned int reg, unsigned int *val)
drivers/base/regmap/regmap-mdio.c
15
struct mdio_device *mdio_dev = context;
drivers/base/regmap/regmap-mdio.c
30
static int regmap_mdio_c22_write(void *context, unsigned int reg, unsigned int val)
drivers/base/regmap/regmap-mdio.c
32
struct mdio_device *mdio_dev = context;
drivers/base/regmap/regmap-mdio.c
45
static int regmap_mdio_c45_read(void *context, unsigned int reg, unsigned int *val)
drivers/base/regmap/regmap-mdio.c
47
struct mdio_device *mdio_dev = context;
drivers/base/regmap/regmap-mdio.c
66
static int regmap_mdio_c45_write(void *context, unsigned int reg, unsigned int val)
drivers/base/regmap/regmap-mdio.c
68
struct mdio_device *mdio_dev = context;
drivers/base/regmap/regmap-mmio.c
151
static int regmap_mmio_write(void *context, unsigned int reg, unsigned int val)
drivers/base/regmap/regmap-mmio.c
153
struct regmap_mmio_context *ctx = context;
drivers/base/regmap/regmap-mmio.c
170
static int regmap_mmio_noinc_write(void *context, unsigned int reg,
drivers/base/regmap/regmap-mmio.c
173
struct regmap_mmio_context *ctx = context;
drivers/base/regmap/regmap-mmio.c
311
static int regmap_mmio_read(void *context, unsigned int reg, unsigned int *val)
drivers/base/regmap/regmap-mmio.c
313
struct regmap_mmio_context *ctx = context;
drivers/base/regmap/regmap-mmio.c
330
static int regmap_mmio_noinc_read(void *context, unsigned int reg,
drivers/base/regmap/regmap-mmio.c
333
struct regmap_mmio_context *ctx = context;
drivers/base/regmap/regmap-mmio.c
385
static void regmap_mmio_free_context(void *context)
drivers/base/regmap/regmap-mmio.c
387
struct regmap_mmio_context *ctx = context;
drivers/base/regmap/regmap-mmio.c
394
kfree(context);
drivers/base/regmap/regmap-ram.c
19
static int regmap_ram_write(void *context, unsigned int reg, unsigned int val)
drivers/base/regmap/regmap-ram.c
21
struct regmap_ram_data *data = context;
drivers/base/regmap/regmap-ram.c
29
static int regmap_ram_read(void *context, unsigned int reg, unsigned int *val)
drivers/base/regmap/regmap-ram.c
31
struct regmap_ram_data *data = context;
drivers/base/regmap/regmap-ram.c
39
static void regmap_ram_free_context(void *context)
drivers/base/regmap/regmap-ram.c
41
struct regmap_ram_data *data = context;
drivers/base/regmap/regmap-raw-ram.c
29
static int regmap_raw_ram_gather_write(void *context,
drivers/base/regmap/regmap-raw-ram.c
33
struct regmap_ram_data *data = context;
drivers/base/regmap/regmap-raw-ram.c
57
static int regmap_raw_ram_write(void *context, const void *data, size_t count)
drivers/base/regmap/regmap-raw-ram.c
59
return regmap_raw_ram_gather_write(context, data, 2,
drivers/base/regmap/regmap-raw-ram.c
63
static int regmap_raw_ram_read(void *context,
drivers/base/regmap/regmap-raw-ram.c
67
struct regmap_ram_data *data = context;
drivers/base/regmap/regmap-raw-ram.c
92
static void regmap_raw_ram_free_context(void *context)
drivers/base/regmap/regmap-raw-ram.c
94
struct regmap_ram_data *data = context;
drivers/base/regmap/regmap-sccb.c
40
static int regmap_sccb_read(void *context, unsigned int reg, unsigned int *val)
drivers/base/regmap/regmap-sccb.c
42
struct device *dev = context;
drivers/base/regmap/regmap-sccb.c
75
static int regmap_sccb_write(void *context, unsigned int reg, unsigned int val)
drivers/base/regmap/regmap-sccb.c
77
struct device *dev = context;
drivers/base/regmap/regmap-sdw-mbq.c
100
struct regmap_mbq_context *ctx = context;
drivers/base/regmap/regmap-sdw-mbq.c
157
static int regmap_sdw_mbq_read(void *context, unsigned int reg, unsigned int *val)
drivers/base/regmap/regmap-sdw-mbq.c
159
struct regmap_mbq_context *ctx = context;
drivers/base/regmap/regmap-sdw-mbq.c
98
static int regmap_sdw_mbq_write(void *context, unsigned int reg, unsigned int val)
drivers/base/regmap/regmap-sdw.c
12
static int regmap_sdw_write(void *context, const void *val_buf, size_t val_size)
drivers/base/regmap/regmap-sdw.c
14
struct device *dev = context;
drivers/base/regmap/regmap-sdw.c
23
static int regmap_sdw_gather_write(void *context,
drivers/base/regmap/regmap-sdw.c
27
struct device *dev = context;
drivers/base/regmap/regmap-sdw.c
34
static int regmap_sdw_read(void *context,
drivers/base/regmap/regmap-sdw.c
38
struct device *dev = context;
drivers/base/regmap/regmap-slimbus.c
10
static int regmap_slimbus_write(void *context, const void *data, size_t count)
drivers/base/regmap/regmap-slimbus.c
12
struct slim_device *sdev = context;
drivers/base/regmap/regmap-slimbus.c
17
static int regmap_slimbus_read(void *context, const void *reg, size_t reg_size,
drivers/base/regmap/regmap-slimbus.c
20
struct slim_device *sdev = context;
drivers/base/regmap/regmap-spi-avmm.c
546
static int do_reg_access(void *context, bool is_read, unsigned int reg,
drivers/base/regmap/regmap-spi-avmm.c
549
struct spi_avmm_bridge *br = context;
drivers/base/regmap/regmap-spi-avmm.c
578
static int regmap_spi_avmm_gather_write(void *context,
drivers/base/regmap/regmap-spi-avmm.c
588
return do_reg_access(context, false, *(u32 *)reg_buf, (u32 *)val_buf,
drivers/base/regmap/regmap-spi-avmm.c
592
static int regmap_spi_avmm_write(void *context, const void *data, size_t bytes)
drivers/base/regmap/regmap-spi-avmm.c
597
return regmap_spi_avmm_gather_write(context, data, SPI_AVMM_REG_SIZE,
drivers/base/regmap/regmap-spi-avmm.c
602
static int regmap_spi_avmm_read(void *context,
drivers/base/regmap/regmap-spi-avmm.c
612
return do_reg_access(context, true, *(u32 *)reg_buf, val_buf,
drivers/base/regmap/regmap-spi-avmm.c
651
static void spi_avmm_bridge_ctx_free(void *context)
drivers/base/regmap/regmap-spi-avmm.c
653
kfree(context);
drivers/base/regmap/regmap-spi.c
28
static int regmap_spi_write(void *context, const void *data, size_t count)
drivers/base/regmap/regmap-spi.c
30
struct device *dev = context;
drivers/base/regmap/regmap-spi.c
36
static int regmap_spi_gather_write(void *context,
drivers/base/regmap/regmap-spi.c
40
struct device *dev = context;
drivers/base/regmap/regmap-spi.c
53
static int regmap_spi_async_write(void *context,
drivers/base/regmap/regmap-spi.c
61
struct device *dev = context;
drivers/base/regmap/regmap-spi.c
75
async->m.context = async;
drivers/base/regmap/regmap-spi.c
91
static int regmap_spi_read(void *context,
drivers/base/regmap/regmap-spi.c
95
struct device *dev = context;
drivers/base/regmap/regmap-spmi.c
105
static int regmap_spmi_ext_read(void *context,
drivers/base/regmap/regmap-spmi.c
124
err = spmi_ext_register_read(context, addr, val, len);
drivers/base/regmap/regmap-spmi.c
136
err = spmi_ext_register_readl(context, addr, val, len);
drivers/base/regmap/regmap-spmi.c
149
static int regmap_spmi_ext_gather_write(void *context,
drivers/base/regmap/regmap-spmi.c
16
static int regmap_spmi_base_read(void *context,
drivers/base/regmap/regmap-spmi.c
164
err = spmi_ext_register_write(context, addr, val, len);
drivers/base/regmap/regmap-spmi.c
176
err = spmi_ext_register_writel(context, addr, val, len);
drivers/base/regmap/regmap-spmi.c
189
static int regmap_spmi_ext_write(void *context, const void *data,
drivers/base/regmap/regmap-spmi.c
193
return regmap_spmi_ext_gather_write(context, data, 2, data + 2,
drivers/base/regmap/regmap-spmi.c
26
err = spmi_register_read(context, addr++, val++);
drivers/base/regmap/regmap-spmi.c
31
static int regmap_spmi_base_gather_write(void *context,
drivers/base/regmap/regmap-spmi.c
46
err = spmi_register_zero_write(context, *data);
drivers/base/regmap/regmap-spmi.c
56
err = spmi_register_write(context, addr, *data);
drivers/base/regmap/regmap-spmi.c
69
static int regmap_spmi_base_write(void *context, const void *data,
drivers/base/regmap/regmap-spmi.c
73
return regmap_spmi_base_gather_write(context, data, 1, data + 1,
drivers/base/regmap/regmap-w1.c
121
static int w1_reg_a16_v16_read(void *context, unsigned int reg,
drivers/base/regmap/regmap-w1.c
124
struct device *dev = context;
drivers/base/regmap/regmap-w1.c
146
static int w1_reg_a16_v16_write(void *context, unsigned int reg,
drivers/base/regmap/regmap-w1.c
149
struct device *dev = context;
drivers/base/regmap/regmap-w1.c
21
static int w1_reg_a8_v8_read(void *context, unsigned int reg, unsigned int *val)
drivers/base/regmap/regmap-w1.c
23
struct device *dev = context;
drivers/base/regmap/regmap-w1.c
43
static int w1_reg_a8_v8_write(void *context, unsigned int reg, unsigned int val)
drivers/base/regmap/regmap-w1.c
45
struct device *dev = context;
drivers/base/regmap/regmap-w1.c
69
static int w1_reg_a8_v16_read(void *context, unsigned int reg,
drivers/base/regmap/regmap-w1.c
72
struct device *dev = context;
drivers/base/regmap/regmap-w1.c
93
static int w1_reg_a8_v16_write(void *context, unsigned int reg,
drivers/base/regmap/regmap-w1.c
96
struct device *dev = context;
drivers/base/regmap/regmap.c
1873
static int _regmap_bus_formatted_write(void *context, unsigned int reg,
drivers/base/regmap/regmap.c
1878
struct regmap *map = context;
drivers/base/regmap/regmap.c
1901
static int _regmap_bus_reg_write(void *context, unsigned int reg,
drivers/base/regmap/regmap.c
1904
struct regmap *map = context;
drivers/base/regmap/regmap.c
1919
static int _regmap_bus_raw_write(void *context, unsigned int reg,
drivers/base/regmap/regmap.c
1922
struct regmap *map = context;
drivers/base/regmap/regmap.c
1945
void *context = _regmap_map_get_context(map);
drivers/base/regmap/regmap.c
1960
ret = map->reg_write(context, reg, val);
drivers/base/regmap/regmap.c
2789
static int _regmap_bus_reg_read(void *context, unsigned int reg,
drivers/base/regmap/regmap.c
2792
struct regmap *map = context;
drivers/base/regmap/regmap.c
2807
static int _regmap_bus_read(void *context, unsigned int reg,
drivers/base/regmap/regmap.c
2811
struct regmap *map = context;
drivers/base/regmap/regmap.c
2829
void *context = _regmap_map_get_context(map);
drivers/base/regmap/regmap.c
2843
ret = map->reg_read(context, reg, val);
drivers/base/regmap/regmap.c
49
static int _regmap_bus_reg_read(void *context, unsigned int reg,
drivers/base/regmap/regmap.c
51
static int _regmap_bus_read(void *context, unsigned int reg,
drivers/base/regmap/regmap.c
53
static int _regmap_bus_formatted_write(void *context, unsigned int reg,
drivers/base/regmap/regmap.c
55
static int _regmap_bus_reg_write(void *context, unsigned int reg,
drivers/base/regmap/regmap.c
57
static int _regmap_bus_raw_write(void *context, unsigned int reg,
drivers/block/drbd/drbd_actlog.c
367
buffer->context[i] = cpu_to_be32(extent_nr);
drivers/block/drbd/drbd_actlog.c
370
buffer->context[i] = cpu_to_be32(LC_FREE);
drivers/block/drbd/drbd_actlog.c
79
__be32 context[AL_CONTEXT_PER_TRANSACTION];
drivers/block/zram/backend_842.c
21
kfree(ctx->context);
drivers/block/zram/backend_842.c
26
ctx->context = kmalloc(SW842_MEM_COMPRESS, GFP_KERNEL);
drivers/block/zram/backend_842.c
27
if (!ctx->context)
drivers/block/zram/backend_842.c
39
ctx->context);
drivers/block/zram/backend_deflate.c
118
struct deflate_ctx *zctx = ctx->context;
drivers/block/zram/backend_deflate.c
35
struct deflate_ctx *zctx = ctx->context;
drivers/block/zram/backend_deflate.c
61
ctx->context = zctx;
drivers/block/zram/backend_deflate.c
92
struct deflate_ctx *zctx = ctx->context;
drivers/block/zram/backend_lz4.c
29
struct lz4_ctx *zctx = ctx->context;
drivers/block/zram/backend_lz4.c
48
ctx->context = zctx;
drivers/block/zram/backend_lz4.c
73
struct lz4_ctx *zctx = ctx->context;
drivers/block/zram/backend_lz4.c
98
struct lz4_ctx *zctx = ctx->context;
drivers/block/zram/backend_lz4hc.c
29
struct lz4hc_ctx *zctx = ctx->context;
drivers/block/zram/backend_lz4hc.c
48
ctx->context = zctx;
drivers/block/zram/backend_lz4hc.c
73
struct lz4hc_ctx *zctx = ctx->context;
drivers/block/zram/backend_lz4hc.c
99
struct lz4hc_ctx *zctx = ctx->context;
drivers/block/zram/backend_lzo.c
20
ctx->context = kzalloc(LZO1X_MEM_COMPRESS, GFP_KERNEL);
drivers/block/zram/backend_lzo.c
21
if (!ctx->context)
drivers/block/zram/backend_lzo.c
28
kfree(ctx->context);
drivers/block/zram/backend_lzo.c
37
&req->dst_len, ctx->context);
drivers/block/zram/backend_lzorle.c
20
ctx->context = kzalloc(LZO1X_MEM_COMPRESS, GFP_KERNEL);
drivers/block/zram/backend_lzorle.c
21
if (!ctx->context)
drivers/block/zram/backend_lzorle.c
28
kfree(ctx->context);
drivers/block/zram/backend_lzorle.c
37
&req->dst_len, ctx->context);
drivers/block/zram/backend_zstd.c
129
ctx->context = zctx;
drivers/block/zram/backend_zstd.c
173
struct zstd_ctx *zctx = ctx->context;
drivers/block/zram/backend_zstd.c
194
struct zstd_ctx *zctx = ctx->context;
drivers/block/zram/backend_zstd.c
94
struct zstd_ctx *zctx = ctx->context;
drivers/block/zram/zcomp.h
37
void *context;
drivers/bluetooth/bcm203x.c
65
struct bcm203x_data *data = urb->context;
drivers/bluetooth/bfusb.c
175
struct sk_buff *skb = (struct sk_buff *) urb->context;
drivers/bluetooth/bfusb.c
335
struct sk_buff *skb = (struct sk_buff *) urb->context;
drivers/bluetooth/bpa10x.c
49
struct sk_buff *skb = urb->context;
drivers/bluetooth/bpa10x.c
87
struct hci_dev *hdev = urb->context;
drivers/bluetooth/btintel.c
721
static int regmap_ibt_read(void *context, const void *addr, size_t reg_size,
drivers/bluetooth/btintel.c
724
struct regmap_ibt_context *ctx = context;
drivers/bluetooth/btintel.c
785
static int regmap_ibt_gather_write(void *context,
drivers/bluetooth/btintel.c
789
struct regmap_ibt_context *ctx = context;
drivers/bluetooth/btintel.c
839
static int regmap_ibt_write(void *context, const void *data, size_t count)
drivers/bluetooth/btintel.c
847
return regmap_ibt_gather_write(context, data, 4, data + 4, count - 4);
drivers/bluetooth/btintel.c
850
static void regmap_ibt_free_context(void *context)
drivers/bluetooth/btintel.c
852
kfree(context);
drivers/bluetooth/btmtk.c
1118
struct hci_dev *hdev = urb->context;
drivers/bluetooth/btmtk.c
447
struct hci_dev *hdev = urb->context;
drivers/bluetooth/btusb.c
1430
struct hci_dev *hdev = urb->context;
drivers/bluetooth/btusb.c
1554
struct hci_dev *hdev = urb->context;
drivers/bluetooth/btusb.c
1643
struct hci_dev *hdev = urb->context;
drivers/bluetooth/btusb.c
1808
struct hci_dev *hdev = urb->context;
drivers/bluetooth/btusb.c
1895
struct sk_buff *skb = urb->context;
drivers/bluetooth/btusb.c
1926
struct sk_buff *skb = urb->context;
drivers/bus/sunxi-rsb.c
430
static int regmap_sunxi_rsb_reg_read(void *context, unsigned int reg,
drivers/bus/sunxi-rsb.c
433
struct sunxi_rsb_ctx *ctx = context;
drivers/bus/sunxi-rsb.c
442
static int regmap_sunxi_rsb_reg_write(void *context, unsigned int reg,
drivers/bus/sunxi-rsb.c
445
struct sunxi_rsb_ctx *ctx = context;
drivers/bus/sunxi-rsb.c
451
static void regmap_sunxi_rsb_free_ctx(void *context)
drivers/bus/sunxi-rsb.c
453
struct sunxi_rsb_ctx *ctx = context;
drivers/bus/vexpress-config.c
127
bridge->ops->regmap_exit(regmap, bridge->context);
drivers/bus/vexpress-config.c
145
regmap = (bridge->ops->regmap_init)(dev, bridge->context);
drivers/bus/vexpress-config.c
219
static int vexpress_syscfg_read(void *context, unsigned int index,
drivers/bus/vexpress-config.c
222
struct vexpress_syscfg_func *func = context;
drivers/bus/vexpress-config.c
227
static int vexpress_syscfg_write(void *context, unsigned int index,
drivers/bus/vexpress-config.c
230
struct vexpress_syscfg_func *func = context;
drivers/bus/vexpress-config.c
248
void *context)
drivers/bus/vexpress-config.c
251
struct vexpress_syscfg *syscfg = context;
drivers/bus/vexpress-config.c
328
static void vexpress_syscfg_regmap_exit(struct regmap *regmap, void *context)
drivers/bus/vexpress-config.c
330
struct vexpress_syscfg *syscfg = context;
drivers/bus/vexpress-config.c
373
bridge->context = syscfg;
drivers/bus/vexpress-config.c
61
struct regmap * (*regmap_init)(struct device *dev, void *context);
drivers/bus/vexpress-config.c
62
void (*regmap_exit)(struct regmap *regmap, void *context);
drivers/bus/vexpress-config.c
67
void *context;
drivers/cdx/controller/mcdi.c
48
static void cdx_mcdi_cmd_work(struct work_struct *context);
drivers/cdx/controller/mcdi.c
632
static void cdx_mcdi_cmd_work(struct work_struct *context)
drivers/cdx/controller/mcdi.c
635
container_of(context, struct cdx_mcdi_cmd, work);
drivers/char/ipmi/ipmi_si_platform.c
59
u32 gpe_number, void *context)
drivers/char/ipmi/ipmi_si_platform.c
61
struct si_sm_io *io = context;
drivers/char/xillybus/xillyusb.c
648
struct xillybuffer *xb = urb->context;
drivers/char/xillybus/xillyusb.c
678
struct xillybuffer *xb = urb->context;
drivers/clk/clk-cdce925.c
517
void *context, const void *data, size_t count)
drivers/clk/clk-cdce925.c
519
struct device *dev = context;
drivers/clk/clk-cdce925.c
543
static int cdce925_regmap_i2c_read(void *context,
drivers/clk/clk-cdce925.c
546
struct device *dev = context;
drivers/clk/clk-fixed-rate_test.c
46
clk_hw_register_fixed_rate_kunit_init(struct kunit_resource *res, void *context)
drivers/clk/clk-fixed-rate_test.c
48
struct clk_hw_fixed_rate_kunit_params *params = context;
drivers/clk/clk-renesas-pcie.c
113
static int rs9_regmap_i2c_read(void *context,
drivers/clk/clk-renesas-pcie.c
116
struct i2c_client *i2c = context;
drivers/clk/clk-renesas-pcie.c
96
static int rs9_regmap_i2c_write(void *context,
drivers/clk/clk-renesas-pcie.c
99
struct i2c_client *i2c = context;
drivers/clk/clk-rpmi.c
148
static u32 rpmi_clk_get_num_clocks(struct rpmi_clk_context *context)
drivers/clk/clk-rpmi.c
157
ret = rpmi_mbox_send_message(context->chan, &msg);
drivers/clk/clk-rpmi.c
170
struct rpmi_clk_context *context = rpmi_clk->context;
drivers/clk/clk-rpmi.c
181
ret = rpmi_mbox_send_message(context->chan, &msg);
drivers/clk/clk-rpmi.c
207
struct rpmi_clk_context *context = rpmi_clk->context;
drivers/clk/clk-rpmi.c
224
kzalloc(context->max_msg_data_size, GFP_KERNEL);
drivers/clk/clk-rpmi.c
229
&tx, sizeof(tx), rx, context->max_msg_data_size);
drivers/clk/clk-rpmi.c
231
ret = rpmi_mbox_send_message(context->chan, &msg);
drivers/clk/clk-rpmi.c
264
rx, context->max_msg_data_size);
drivers/clk/clk-rpmi.c
266
ret = rpmi_mbox_send_message(context->chan, &msg);
drivers/clk/clk-rpmi.c
304
struct rpmi_clk_context *context = rpmi_clk->context;
drivers/clk/clk-rpmi.c
315
ret = rpmi_mbox_send_message(context->chan, &msg);
drivers/clk/clk-rpmi.c
366
struct rpmi_clk_context *context = rpmi_clk->context;
drivers/clk/clk-rpmi.c
379
ret = rpmi_mbox_send_message(context->chan, &msg);
drivers/clk/clk-rpmi.c
395
struct rpmi_clk_context *context = rpmi_clk->context;
drivers/clk/clk-rpmi.c
407
ret = rpmi_mbox_send_message(context->chan, &msg);
drivers/clk/clk-rpmi.c
423
struct rpmi_clk_context *context = rpmi_clk->context;
drivers/clk/clk-rpmi.c
434
rpmi_mbox_send_message(context->chan, &msg);
drivers/clk/clk-rpmi.c
445
static struct clk_hw *rpmi_clk_enumerate(struct rpmi_clk_context *context, u32 clkid)
drivers/clk/clk-rpmi.c
447
struct device *dev = context->dev;
drivers/clk/clk-rpmi.c
463
rpmi_clk->context = context;
drivers/clk/clk-rpmi.c
516
struct rpmi_clk_context *context;
drivers/clk/clk-rpmi.c
521
context = devm_kzalloc(dev, sizeof(*context), GFP_KERNEL);
drivers/clk/clk-rpmi.c
522
if (!context)
drivers/clk/clk-rpmi.c
524
context->dev = dev;
drivers/clk/clk-rpmi.c
525
platform_set_drvdata(pdev, context);
drivers/clk/clk-rpmi.c
527
context->client.dev = context->dev;
drivers/clk/clk-rpmi.c
528
context->client.rx_callback = NULL;
drivers/clk/clk-rpmi.c
529
context->client.tx_block = false;
drivers/clk/clk-rpmi.c
530
context->client.knows_txdone = true;
drivers/clk/clk-rpmi.c
531
context->client.tx_tout = 0;
drivers/clk/clk-rpmi.c
533
context->chan = mbox_request_channel(&context->client, 0);
drivers/clk/clk-rpmi.c
534
if (IS_ERR(context->chan))
drivers/clk/clk-rpmi.c
535
return PTR_ERR(context->chan);
drivers/clk/clk-rpmi.c
537
ret = devm_add_action_or_reset(dev, rpmi_clk_mbox_chan_release, context->chan);
drivers/clk/clk-rpmi.c
542
ret = rpmi_mbox_send_message(context->chan, &msg);
drivers/clk/clk-rpmi.c
552
ret = rpmi_mbox_send_message(context->chan, &msg);
drivers/clk/clk-rpmi.c
562
ret = rpmi_mbox_send_message(context->chan, &msg);
drivers/clk/clk-rpmi.c
572
ret = rpmi_mbox_send_message(context->chan, &msg);
drivers/clk/clk-rpmi.c
576
context->max_msg_data_size = msg.attr.value;
drivers/clk/clk-rpmi.c
577
num_clocks = rpmi_clk_get_num_clocks(context);
drivers/clk/clk-rpmi.c
588
hw_ptr = rpmi_clk_enumerate(context, i);
drivers/clk/clk-rpmi.c
59
struct rpmi_clk_context *context;
drivers/clk/clk-si521xx.c
112
static int si521xx_regmap_i2c_read(void *context, unsigned int reg,
drivers/clk/clk-si521xx.c
115
struct i2c_client *i2c = context;
drivers/clk/clk-si521xx.c
95
static int si521xx_regmap_i2c_write(void *context, unsigned int reg,
drivers/clk/clk-si521xx.c
98
struct i2c_client *i2c = context;
drivers/clk/qcom/nsscc-qca8k.c
2074
static int qca8k_regmap_read(void *context, unsigned int regaddr, unsigned int *val)
drivers/clk/qcom/nsscc-qca8k.c
2076
struct mii_bus *bus = context;
drivers/clk/qcom/nsscc-qca8k.c
2095
static int qca8k_regmap_write(void *context, unsigned int regaddr, unsigned int val)
drivers/clk/qcom/nsscc-qca8k.c
2097
struct mii_bus *bus = context;
drivers/clk/qcom/nsscc-qca8k.c
2116
static int qca8k_regmap_update_bits(void *context, unsigned int regaddr,
drivers/clk/qcom/nsscc-qca8k.c
2119
struct mii_bus *bus = context;
drivers/clk/ti/clock.h
21
u32 context;
drivers/clk/ti/divider.c
280
divider->context = val & divider->mask;
drivers/clk/ti/divider.c
298
val |= divider->context << divider->shift;
drivers/clk/ti/dpll3xxx.c
888
clk->context = (v & dd->enable_mask) >> __ffs(dd->enable_mask);
drivers/clk/ti/dpll3xxx.c
890
if (clk->context == DPLL_LOCKED) {
drivers/clk/ti/dpll3xxx.c
916
if (clk->context == DPLL_LOCKED) {
drivers/clk/ti/dpll3xxx.c
929
_omap3_dpll_write_clken(clk, clk->context);
drivers/clk/ti/dpll3xxx.c
949
clk->context = (v & dd->enable_mask) >> __ffs(dd->enable_mask);
drivers/clk/ti/dpll3xxx.c
951
if (clk->context == DPLL_LOCKED) {
drivers/clk/ti/dpll3xxx.c
980
if (clk->context == ((ctrl & dd->enable_mask) >>
drivers/clk/ti/dpll3xxx.c
990
if (clk->context == DPLL_LOCKED)
drivers/clk/ti/dpll3xxx.c
993
_omap3_dpll_write_clken(clk, clk->context);
drivers/clocksource/timer-ti-dm.c
1093
timer->context.tcrr = value;
drivers/clocksource/timer-ti-dm.c
136
struct timer_regs context;
drivers/clocksource/timer-ti-dm.c
254
timer->context.tsicr = OMAP_TIMER_CTRL_POSTED;
drivers/clocksource/timer-ti-dm.c
302
dmtimer_write(timer, OMAP_TIMER_OCP_CFG_OFFSET, timer->context.ocp_cfg);
drivers/clocksource/timer-ti-dm.c
304
dmtimer_write(timer, OMAP_TIMER_WAKEUP_EN_REG, timer->context.twer);
drivers/clocksource/timer-ti-dm.c
305
dmtimer_write(timer, OMAP_TIMER_COUNTER_REG, timer->context.tcrr);
drivers/clocksource/timer-ti-dm.c
306
dmtimer_write(timer, OMAP_TIMER_LOAD_REG, timer->context.tldr);
drivers/clocksource/timer-ti-dm.c
307
dmtimer_write(timer, OMAP_TIMER_MATCH_REG, timer->context.tmar);
drivers/clocksource/timer-ti-dm.c
308
dmtimer_write(timer, OMAP_TIMER_IF_CTRL_REG, timer->context.tsicr);
drivers/clocksource/timer-ti-dm.c
309
dmtimer_write(timer, timer->irq_ena, timer->context.tier);
drivers/clocksource/timer-ti-dm.c
310
dmtimer_write(timer, OMAP_TIMER_CTRL_REG, timer->context.tclr);
drivers/clocksource/timer-ti-dm.c
315
timer->context.ocp_cfg = dmtimer_read(timer, OMAP_TIMER_OCP_CFG_OFFSET);
drivers/clocksource/timer-ti-dm.c
317
timer->context.tclr = dmtimer_read(timer, OMAP_TIMER_CTRL_REG);
drivers/clocksource/timer-ti-dm.c
318
timer->context.twer = dmtimer_read(timer, OMAP_TIMER_WAKEUP_EN_REG);
drivers/clocksource/timer-ti-dm.c
319
timer->context.tldr = dmtimer_read(timer, OMAP_TIMER_LOAD_REG);
drivers/clocksource/timer-ti-dm.c
320
timer->context.tmar = dmtimer_read(timer, OMAP_TIMER_MATCH_REG);
drivers/clocksource/timer-ti-dm.c
321
timer->context.tier = dmtimer_read(timer, timer->irq_ena);
drivers/clocksource/timer-ti-dm.c
322
timer->context.tsicr = dmtimer_read(timer, OMAP_TIMER_IF_CTRL_REG);
drivers/comedi/comedi_pci.c
141
unsigned long context)
drivers/comedi/comedi_pci.c
143
return comedi_auto_config(&pcidev->dev, driver, context);
drivers/comedi/comedi_usb.c
64
unsigned long context)
drivers/comedi/comedi_usb.c
66
return comedi_auto_config(&intf->dev, driver, context);
drivers/comedi/drivers.c
1114
struct comedi_driver *driver, unsigned long context)
drivers/comedi/drivers.c
1148
ret = driver->auto_attach(dev, context);
drivers/comedi/drivers.c
301
unsigned long context),
drivers/comedi/drivers.c
302
unsigned long context)
drivers/comedi/drivers.c
308
ret = cb(dev, s, insn, context);
drivers/comedi/drivers.c
916
unsigned long context),
drivers/comedi/drivers.c
917
unsigned long context)
drivers/comedi/drivers.c
927
ret = cb(dev, fw->data, fw->size, context);
drivers/comedi/drivers/8255_pci.c
197
unsigned long context)
drivers/comedi/drivers/8255_pci.c
205
if (context < ARRAY_SIZE(pci_8255_boards))
drivers/comedi/drivers/8255_pci.c
206
board = &pci_8255_boards[context];
drivers/comedi/drivers/addi_apci_1500.c
763
unsigned long context)
drivers/comedi/drivers/addi_apci_1516.c
108
unsigned long context)
drivers/comedi/drivers/addi_apci_1516.c
116
if (context < ARRAY_SIZE(apci1516_boardtypes))
drivers/comedi/drivers/addi_apci_1516.c
117
board = &apci1516_boardtypes[context];
drivers/comedi/drivers/addi_apci_16xx.c
88
unsigned long context)
drivers/comedi/drivers/addi_apci_16xx.c
98
if (context < ARRAY_SIZE(apci16xx_boardtypes))
drivers/comedi/drivers/addi_apci_16xx.c
99
board = &apci16xx_boardtypes[context];
drivers/comedi/drivers/addi_apci_3120.c
1008
context == BOARD_APCI3001)
drivers/comedi/drivers/addi_apci_3120.c
693
unsigned long context)
drivers/comedi/drivers/addi_apci_3120.c
750
unsigned long context)
drivers/comedi/drivers/addi_apci_3120.c
965
unsigned long context)
drivers/comedi/drivers/addi_apci_3120.c
974
if (context < ARRAY_SIZE(apci3120_boardtypes))
drivers/comedi/drivers/addi_apci_3120.c
975
board = &apci3120_boardtypes[context];
drivers/comedi/drivers/addi_apci_3xxx.c
421
unsigned long context)
drivers/comedi/drivers/addi_apci_3xxx.c
598
unsigned long context)
drivers/comedi/drivers/addi_apci_3xxx.c
753
unsigned long context)
drivers/comedi/drivers/addi_apci_3xxx.c
763
if (context < ARRAY_SIZE(apci3xxx_boardtypes))
drivers/comedi/drivers/addi_apci_3xxx.c
764
board = &apci3xxx_boardtypes[context];
drivers/comedi/drivers/adl_pci6208.c
44
unsigned long context)
drivers/comedi/drivers/adl_pci7x3x.c
342
unsigned long context)
drivers/comedi/drivers/adl_pci7x3x.c
353
if (context < ARRAY_SIZE(adl_pci7x3x_boards))
drivers/comedi/drivers/adl_pci7x3x.c
354
board = &adl_pci7x3x_boards[context];
drivers/comedi/drivers/adl_pci9111.c
518
unsigned long context)
drivers/comedi/drivers/adl_pci9118.c
1304
unsigned long context)
drivers/comedi/drivers/adl_pci9118.c
1666
unsigned long context)
drivers/comedi/drivers/adl_pci9118.c
1671
if (context < ARRAY_SIZE(pci9118_boards))
drivers/comedi/drivers/adl_pci9118.c
1672
board = &pci9118_boards[context];
drivers/comedi/drivers/adq12b.c
95
unsigned long context)
drivers/comedi/drivers/adv_pci1710.c
289
unsigned long context)
drivers/comedi/drivers/adv_pci1710.c
745
unsigned long context)
drivers/comedi/drivers/adv_pci1710.c
754
if (context < ARRAY_SIZE(boardtypes))
drivers/comedi/drivers/adv_pci1710.c
755
board = &boardtypes[context];
drivers/comedi/drivers/adv_pci1720.c
112
unsigned long context)
drivers/comedi/drivers/adv_pci1724.c
75
unsigned long context)
drivers/comedi/drivers/adv_pci1760.c
339
unsigned long context)
drivers/comedi/drivers/adv_pci_dio.c
548
unsigned long context)
drivers/comedi/drivers/adv_pci_dio.c
556
if (context < ARRAY_SIZE(boardtypes))
drivers/comedi/drivers/adv_pci_dio.c
557
board = &boardtypes[context];
drivers/comedi/drivers/adv_pci_dio.c
570
if (context == TYPE_PCI1736)
drivers/comedi/drivers/adv_pci_dio.c
575
dev_private->boardtype = context;
drivers/comedi/drivers/adv_pci_dio.c
576
pci_dio_reset(dev, context);
drivers/comedi/drivers/aio_aio12_8.c
101
unsigned long context)
drivers/comedi/drivers/amplc_dio200_common.c
206
offset = (void __iomem *)i8254->context - dev->mmio;
drivers/comedi/drivers/amplc_dio200_common.c
208
offset = i8254->context - dev->iobase;
drivers/comedi/drivers/amplc_pci230.c
709
unsigned long context)
drivers/comedi/drivers/c6xdigio.c
47
static int c6xdigio_chk_status(struct comedi_device *dev, unsigned long context)
drivers/comedi/drivers/c6xdigio.c
54
if ((status & 0x80) != context)
drivers/comedi/drivers/cb_das16_cs.c
127
unsigned long context)
drivers/comedi/drivers/cb_das16_cs.c
340
unsigned long context)
drivers/comedi/drivers/cb_pcidas.c
1247
unsigned long context)
drivers/comedi/drivers/cb_pcidas.c
1256
if (context < ARRAY_SIZE(cb_pcidas_boards))
drivers/comedi/drivers/cb_pcidas.c
1257
board = &cb_pcidas_boards[context];
drivers/comedi/drivers/cb_pcidas.c
312
unsigned long context)
drivers/comedi/drivers/cb_pcidas.c
470
unsigned long context)
drivers/comedi/drivers/cb_pcidas64.c
1723
unsigned long context)
drivers/comedi/drivers/cb_pcidas64.c
3962
unsigned long context)
drivers/comedi/drivers/cb_pcidas64.c
3970
if (context < ARRAY_SIZE(pcidas64_boards))
drivers/comedi/drivers/cb_pcidas64.c
3971
board = &pcidas64_boards[context];
drivers/comedi/drivers/cb_pcidda.c
326
unsigned long context)
drivers/comedi/drivers/cb_pcidda.c
335
if (context < ARRAY_SIZE(cb_pcidda_boards))
drivers/comedi/drivers/cb_pcidda.c
336
board = &cb_pcidda_boards[context];
drivers/comedi/drivers/cb_pcimdas.c
159
unsigned long context)
drivers/comedi/drivers/comedi_8254.c
130
unsigned long iobase = i8254->context;
drivers/comedi/drivers/comedi_8254.c
144
unsigned long iobase = i8254->context;
drivers/comedi/drivers/comedi_8254.c
158
unsigned long iobase = i8254->context;
drivers/comedi/drivers/comedi_8254.c
174
void __iomem *mmiobase = (void __iomem *)i8254->context;
drivers/comedi/drivers/comedi_8254.c
188
void __iomem *mmiobase = (void __iomem *)i8254->context;
drivers/comedi/drivers/comedi_8254.c
202
void __iomem *mmiobase = (void __iomem *)i8254->context;
drivers/comedi/drivers/comedi_8254.c
620
unsigned long context,
drivers/comedi/drivers/comedi_8254.c
641
i8254->context = context;
drivers/comedi/drivers/comedi_8255.c
101
unsigned long context = spriv->context;
drivers/comedi/drivers/comedi_8255.c
115
spriv->io(dev, 1, I8255_CTRL_REG, config, context);
drivers/comedi/drivers/comedi_8255.c
149
unsigned long context),
drivers/comedi/drivers/comedi_8255.c
150
unsigned long context)
drivers/comedi/drivers/comedi_8255.c
161
spriv->context = context;
drivers/comedi/drivers/comedi_8255.c
239
int data, unsigned long context),
drivers/comedi/drivers/comedi_8255.c
240
unsigned long context)
drivers/comedi/drivers/comedi_8255.c
242
return __subdev_8255_init(dev, s, io, context);
drivers/comedi/drivers/comedi_8255.c
258
return spriv->context;
drivers/comedi/drivers/comedi_8255.c
36
unsigned long context;
drivers/comedi/drivers/comedi_8255.c
38
unsigned long context);
drivers/comedi/drivers/comedi_8255.c
71
unsigned long context = spriv->context;
drivers/comedi/drivers/comedi_8255.c
79
s->state & 0xff, context);
drivers/comedi/drivers/comedi_8255.c
82
(s->state >> 8) & 0xff, context);
drivers/comedi/drivers/comedi_8255.c
85
(s->state >> 16) & 0xff, context);
drivers/comedi/drivers/comedi_8255.c
88
v = spriv->io(dev, 0, I8255_DATA_A_REG, 0, context);
drivers/comedi/drivers/comedi_8255.c
89
v |= (spriv->io(dev, 0, I8255_DATA_B_REG, 0, context) << 8);
drivers/comedi/drivers/comedi_8255.c
90
v |= (spriv->io(dev, 0, I8255_DATA_C_REG, 0, context) << 16);
drivers/comedi/drivers/daqboard2000.c
307
struct comedi_insn *insn, unsigned long context)
drivers/comedi/drivers/daqboard2000.c
312
if (status & context)
drivers/comedi/drivers/daqboard2000.c
383
struct comedi_insn *insn, unsigned long context)
drivers/comedi/drivers/daqboard2000.c
531
size_t len, unsigned long context)
drivers/comedi/drivers/daqboard2000.c
677
static int db2k_auto_attach(struct comedi_device *dev, unsigned long context)
drivers/comedi/drivers/daqboard2000.c
685
if (context >= ARRAY_SIZE(db2k_boardtypes))
drivers/comedi/drivers/daqboard2000.c
687
board = &db2k_boardtypes[context];
drivers/comedi/drivers/das08.c
157
unsigned long context)
drivers/comedi/drivers/das08_cs.c
50
unsigned long context)
drivers/comedi/drivers/das16.c
811
unsigned long context)
drivers/comedi/drivers/das16m1.c
311
unsigned long context)
drivers/comedi/drivers/das1800.c
928
unsigned long context)
drivers/comedi/drivers/das6402.c
366
unsigned long context)
drivers/comedi/drivers/das800.c
509
unsigned long context)
drivers/comedi/drivers/dmm32at.c
192
unsigned long context)
drivers/comedi/drivers/dmm32at.c
196
status = inb(dev->iobase + context);
drivers/comedi/drivers/dmm32at.c
457
unsigned long context)
drivers/comedi/drivers/dt2811.c
461
unsigned long context)
drivers/comedi/drivers/dt2814.c
52
unsigned long context)
drivers/comedi/drivers/dt2814.c
57
if (context)
drivers/comedi/drivers/dt2814.c
58
*(unsigned int *)context = status;
drivers/comedi/drivers/dt2814.c
89
unsigned long context)
drivers/comedi/drivers/dt2815.c
60
unsigned long context)
drivers/comedi/drivers/dt2815.c
65
if (status == context)
drivers/comedi/drivers/dt282x.c
559
unsigned long context)
drivers/comedi/drivers/dt282x.c
564
switch (context) {
drivers/comedi/drivers/dt3000.c
609
unsigned long context)
drivers/comedi/drivers/dt3000.c
617
if (context < ARRAY_SIZE(dt3k_boardtypes))
drivers/comedi/drivers/dt3000.c
618
board = &dt3k_boardtypes[context];
drivers/comedi/drivers/dt9812.c
809
unsigned long context)
drivers/comedi/drivers/dyna_pci10xx.c
51
unsigned long context)
drivers/comedi/drivers/icp_multi.c
143
unsigned long context)
drivers/comedi/drivers/icp_multi.c
90
unsigned long context)
drivers/comedi/drivers/ii_pci20kc.c
171
unsigned long context)
drivers/comedi/drivers/jr3_pci.c
408
unsigned long context)
drivers/comedi/drivers/jr3_pci.c
657
unsigned long context)
drivers/comedi/drivers/jr3_pci.c
670
if (context < ARRAY_SIZE(jr3_pci_boards))
drivers/comedi/drivers/jr3_pci.c
671
board = &jr3_pci_boards[context];
drivers/comedi/drivers/me4000.c
1103
unsigned long context)
drivers/comedi/drivers/me4000.c
1111
if (context < ARRAY_SIZE(me4000_boards))
drivers/comedi/drivers/me4000.c
1112
board = &me4000_boards[context];
drivers/comedi/drivers/me4000.c
309
unsigned long context)
drivers/comedi/drivers/me4000.c
457
unsigned long context)
drivers/comedi/drivers/me_daq.c
217
unsigned long context)
drivers/comedi/drivers/me_daq.c
340
unsigned long context)
drivers/comedi/drivers/me_daq.c
430
unsigned long context)
drivers/comedi/drivers/me_daq.c
438
if (context < ARRAY_SIZE(me_boards))
drivers/comedi/drivers/me_daq.c
439
board = &me_boards[context];
drivers/comedi/drivers/mf6x4.c
109
unsigned long context)
drivers/comedi/drivers/mf6x4.c
179
static int mf6x4_auto_attach(struct comedi_device *dev, unsigned long context)
drivers/comedi/drivers/mf6x4.c
187
if (context < ARRAY_SIZE(mf6x4_boards))
drivers/comedi/drivers/mf6x4.c
188
board = &mf6x4_boards[context];
drivers/comedi/drivers/mpc624.c
190
unsigned long context)
drivers/comedi/drivers/multiq3.c
88
unsigned long context)
drivers/comedi/drivers/multiq3.c
93
if (status & context)
drivers/comedi/drivers/ni_6527.c
372
unsigned long context)
drivers/comedi/drivers/ni_6527.c
380
if (context < ARRAY_SIZE(ni6527_boards))
drivers/comedi/drivers/ni_6527.c
381
board = &ni6527_boards[context];
drivers/comedi/drivers/ni_65xx.c
629
unsigned long context)
drivers/comedi/drivers/ni_65xx.c
637
if (context < ARRAY_SIZE(ni_65xx_boards))
drivers/comedi/drivers/ni_65xx.c
638
board = &ni_65xx_boards[context];
drivers/comedi/drivers/ni_660x.c
1010
if (context < ARRAY_SIZE(ni_660x_boards))
drivers/comedi/drivers/ni_660x.c
1011
board = &ni_660x_boards[context];
drivers/comedi/drivers/ni_660x.c
997
unsigned long context)
drivers/comedi/drivers/ni_670x.c
156
unsigned long context)
drivers/comedi/drivers/ni_670x.c
165
if (context < ARRAY_SIZE(ni_670x_boards))
drivers/comedi/drivers/ni_670x.c
166
board = &ni_670x_boards[context];
drivers/comedi/drivers/ni_at_a2150.c
566
unsigned long context)
drivers/comedi/drivers/ni_atmio16d.c
443
unsigned long context)
drivers/comedi/drivers/ni_daq_700.c
115
unsigned long context)
drivers/comedi/drivers/ni_daq_700.c
209
unsigned long context)
drivers/comedi/drivers/ni_daq_dio24.c
30
unsigned long context)
drivers/comedi/drivers/ni_labpc_common.c
1150
unsigned long context)
drivers/comedi/drivers/ni_labpc_common.c
219
unsigned long context)
drivers/comedi/drivers/ni_labpc_cs.c
55
unsigned long context)
drivers/comedi/drivers/ni_labpc_pci.c
67
unsigned long context)
drivers/comedi/drivers/ni_labpc_pci.c
73
if (context < ARRAY_SIZE(labpc_pci_boards))
drivers/comedi/drivers/ni_labpc_pci.c
74
board = &labpc_pci_boards[context];
drivers/comedi/drivers/ni_mio_cs.c
148
unsigned long context)
drivers/comedi/drivers/ni_pcidio.c
755
unsigned long context)
drivers/comedi/drivers/ni_pcidio.c
758
int fpga_index = context;
drivers/comedi/drivers/ni_pcidio.c
878
unsigned long context)
drivers/comedi/drivers/ni_pcidio.c
887
if (context < ARRAY_SIZE(nidio_boards))
drivers/comedi/drivers/ni_pcidio.c
888
board = &nidio_boards[context];
drivers/comedi/drivers/ni_pcimio.c
1300
unsigned long context)
drivers/comedi/drivers/ni_pcimio.c
1308
if (context < ARRAY_SIZE(ni_boards))
drivers/comedi/drivers/ni_pcimio.c
1309
board = &ni_boards[context];
drivers/comedi/drivers/ni_usb6501.c
517
unsigned long context)
drivers/comedi/drivers/pcl711.c
236
unsigned long context)
drivers/comedi/drivers/pcl812.c
613
unsigned long context)
drivers/comedi/drivers/pcl816.c
198
unsigned long context)
drivers/comedi/drivers/pcl818.c
412
unsigned long context)
drivers/comedi/drivers/pcmad.c
57
unsigned long context)
drivers/comedi/drivers/pcmmio.c
519
unsigned long context)
drivers/comedi/drivers/pcmmio.c
602
unsigned long context)
drivers/comedi/drivers/quatech_daqp_cs.c
284
unsigned long context)
drivers/comedi/drivers/quatech_daqp_cs.c
620
unsigned long context)
drivers/comedi/drivers/quatech_daqp_cs.c
698
unsigned long context)
drivers/comedi/drivers/rtd520.c
1207
unsigned long context)
drivers/comedi/drivers/rtd520.c
1215
if (context < ARRAY_SIZE(rtd520_boards))
drivers/comedi/drivers/rtd520.c
1216
board = &rtd520_boards[context];
drivers/comedi/drivers/rtd520.c
521
unsigned long context)
drivers/comedi/drivers/rtd520.c
995
unsigned long context)
drivers/comedi/drivers/rti800.c
136
unsigned long context)
drivers/comedi/drivers/s526.c
419
unsigned long context)
drivers/comedi/drivers/s526.c
424
if (status & context) {
drivers/comedi/drivers/s526.c
426
outw(context, dev->iobase + S526_INT_STATUS_REG);
drivers/comedi/drivers/s626.c
1474
unsigned long context)
drivers/comedi/drivers/s626.c
230
unsigned long context)
drivers/comedi/drivers/s626.c
320
unsigned long context)
drivers/comedi/drivers/s626.c
324
switch (context) {
drivers/comedi/drivers/usbdux.c
1151
struct comedi_device *dev = urb->context;
drivers/comedi/drivers/usbdux.c
1366
unsigned long context)
drivers/comedi/drivers/usbdux.c
1462
urb->context = dev;
drivers/comedi/drivers/usbdux.c
1484
urb->context = dev;
drivers/comedi/drivers/usbdux.c
289
struct comedi_device *dev = urb->context;
drivers/comedi/drivers/usbdux.c
436
struct comedi_device *dev = urb->context;
drivers/comedi/drivers/usbdux.c
494
urb->context = dev;
drivers/comedi/drivers/usbduxfast.c
260
struct comedi_device *dev = urb->context;
drivers/comedi/drivers/usbduxfast.c
843
unsigned long context)
drivers/comedi/drivers/usbduxsigma.c
1013
struct comedi_device *dev = urb->context;
drivers/comedi/drivers/usbduxsigma.c
1260
unsigned long context)
drivers/comedi/drivers/usbduxsigma.c
1354
urb->context = NULL;
drivers/comedi/drivers/usbduxsigma.c
1376
urb->context = NULL;
drivers/comedi/drivers/usbduxsigma.c
251
struct comedi_device *dev = urb->context;
drivers/comedi/drivers/usbduxsigma.c
390
struct comedi_device *dev = urb->context;
drivers/comedi/drivers/usbduxsigma.c
446
urb->context = dev;
drivers/comedi/drivers/vmk80xx.c
774
unsigned long context)
drivers/comedi/drivers/vmk80xx.c
781
if (context < ARRAY_SIZE(vmk80xx_boardinfo))
drivers/comedi/drivers/vmk80xx.c
782
board = &vmk80xx_boardinfo[context];
drivers/cpufreq/longhaul.c
669
void *context, void **return_value)
drivers/crypto/caam/blob_gen.c
44
static void caam_blob_job_done(struct device *dev, u32 *desc, u32 err, void *context)
drivers/crypto/caam/blob_gen.c
46
struct caam_blob_job_result *res = context;
drivers/crypto/caam/caamalg.c
1047
void *context)
drivers/crypto/caam/caamalg.c
1049
struct aead_request *req = context;
drivers/crypto/caam/caamalg.c
1086
void *context)
drivers/crypto/caam/caamalg.c
1088
struct skcipher_request *req = context;
drivers/crypto/caam/caamhash.c
126
void *context);
drivers/crypto/caam/caamhash.c
586
void *context, enum dma_data_direction dir)
drivers/crypto/caam/caamhash.c
588
struct ahash_request *req = context;
drivers/crypto/caam/caamhash.c
625
void *context)
drivers/crypto/caam/caamhash.c
627
ahash_done_cpy(jrdev, desc, err, context, DMA_FROM_DEVICE);
drivers/crypto/caam/caamhash.c
631
void *context)
drivers/crypto/caam/caamhash.c
633
ahash_done_cpy(jrdev, desc, err, context, DMA_BIDIRECTIONAL);
drivers/crypto/caam/caamhash.c
637
void *context, enum dma_data_direction dir)
drivers/crypto/caam/caamhash.c
639
struct ahash_request *req = context;
drivers/crypto/caam/caamhash.c
688
void *context)
drivers/crypto/caam/caamhash.c
690
ahash_done_switch(jrdev, desc, err, context, DMA_BIDIRECTIONAL);
drivers/crypto/caam/caamhash.c
694
void *context)
drivers/crypto/caam/caamhash.c
696
ahash_done_switch(jrdev, desc, err, context, DMA_FROM_DEVICE);
drivers/crypto/caam/caamhash.c
789
u32 err, void *context),
drivers/crypto/caam/caampkc.c
123
static void rsa_pub_done(struct device *dev, u32 *desc, u32 err, void *context)
drivers/crypto/caam/caampkc.c
125
struct akcipher_request *req = context;
drivers/crypto/caam/caampkc.c
153
void *context)
drivers/crypto/caam/caampkc.c
155
struct akcipher_request *req = context;
drivers/crypto/caam/caampkc.c
669
u32 err, void *context),
drivers/crypto/caam/caampkc.h
115
void *context);
drivers/crypto/caam/caamprng.c
39
void *context)
drivers/crypto/caam/caamprng.c
41
struct caam_prng_ctx *jctx = context;
drivers/crypto/caam/caamrng.c
58
void *context)
drivers/crypto/caam/caamrng.c
60
struct caam_rng_job_ctx *jctx = context;
drivers/crypto/caam/key_gen.c
15
void *context)
drivers/crypto/caam/key_gen.c
17
struct split_key_result *res = context;
drivers/crypto/caam/key_gen.h
44
void split_key_done(struct device *dev, u32 *desc, u32 err, void *context);
drivers/crypto/inside-secure/safexcel.h
907
dma_addr_t context,
drivers/crypto/inside-secure/safexcel_ring.c
184
dma_addr_t context,
drivers/crypto/inside-secure/safexcel_ring.c
216
cdesc->control_data.context_lo = lower_32_bits(context) |
drivers/crypto/inside-secure/safexcel_ring.c
218
cdesc->control_data.context_hi = upper_32_bits(context);
drivers/crypto/mxs-dcp.c
1034
static irqreturn_t mxs_dcp_irq(int irq, void *context)
drivers/crypto/mxs-dcp.c
1036
struct dcp *sdcp = context;
drivers/crypto/sahara.c
172
u8 context[SHA256_DIGEST_SIZE + 4];
drivers/crypto/sahara.c
948
memcpy(dev->context_base, rctx->context, rctx->context_size);
drivers/crypto/sahara.c
978
memcpy(rctx->context, dev->context_base, rctx->context_size);
drivers/crypto/sahara.c
981
memcpy(req->result, rctx->context, rctx->digest_size);
drivers/crypto/talitos.c
1023
struct talitos_desc *desc, void *context,
drivers/crypto/talitos.c
1026
struct aead_request *areq = context;
drivers/crypto/talitos.c
1044
void *context, int err)
drivers/crypto/talitos.c
1046
struct aead_request *req = context;
drivers/crypto/talitos.c
1071
void *context, int err)
drivers/crypto/talitos.c
1073
struct aead_request *req = context;
drivers/crypto/talitos.c
1197
void *context, int error))
drivers/crypto/talitos.c
1547
struct talitos_desc *desc, void *context,
drivers/crypto/talitos.c
1550
struct skcipher_request *areq = context;
drivers/crypto/talitos.c
1570
void *context, int error))
drivers/crypto/talitos.c
1745
struct talitos_desc *desc, void *context,
drivers/crypto/talitos.c
1748
struct ahash_request *areq = context;
drivers/crypto/talitos.c
1790
void *context, int error))
drivers/crypto/talitos.c
272
void *context, int error),
drivers/crypto/talitos.c
273
void *context)
drivers/crypto/talitos.c
304
request->context = context;
drivers/crypto/talitos.c
377
saved_req.context = request->context;
drivers/crypto/talitos.c
390
saved_req.callback(dev, saved_req.desc, saved_req.context,
drivers/crypto/talitos.h
84
void *context, int error);
drivers/crypto/talitos.h
85
void *context;
drivers/dma-buf/dma-fence-array.c
200
u64 context, unsigned seqno,
drivers/dma-buf/dma-fence-array.c
209
context, seqno);
drivers/dma-buf/dma-fence-array.c
254
u64 context, unsigned seqno,
drivers/dma-buf/dma-fence-array.c
264
context, seqno, signal_on_any);
drivers/dma-buf/dma-fence-array.c
279
bool dma_fence_match_context(struct dma_fence *fence, u64 context)
drivers/dma-buf/dma-fence-array.c
285
return fence->context == context;
drivers/dma-buf/dma-fence-array.c
288
if (array->fences[i]->context != context)
drivers/dma-buf/dma-fence-chain.c
102
if ((*pfence)->context != chain->base.context ||
drivers/dma-buf/dma-fence-chain.c
246
uint64_t context;
drivers/dma-buf/dma-fence-chain.c
255
context = prev->context;
drivers/dma-buf/dma-fence-chain.c
258
context = dma_fence_context_alloc(1);
drivers/dma-buf/dma-fence-chain.c
265
context, seqno);
drivers/dma-buf/dma-fence-unwrap.c
107
if (fences[i]->context == fences[j]->context)
drivers/dma-buf/dma-fence-unwrap.c
69
if (a->context < b->context)
drivers/dma-buf/dma-fence-unwrap.c
71
else if (a->context > b->context)
drivers/dma-buf/dma-fence.c
1037
fence->context, fence->seqno, timeline, driver,
drivers/dma-buf/dma-fence.c
1046
spinlock_t *lock, u64 context, u64 seqno, unsigned long flags)
drivers/dma-buf/dma-fence.c
1055
fence->context = context;
drivers/dma-buf/dma-fence.c
1080
spinlock_t *lock, u64 context, u64 seqno)
drivers/dma-buf/dma-fence.c
1082
__dma_fence_init(fence, ops, lock, context, seqno, 0UL);
drivers/dma-buf/dma-fence.c
1103
spinlock_t *lock, u64 context, u64 seqno)
drivers/dma-buf/dma-fence.c
1105
__dma_fence_init(fence, ops, lock, context, seqno,
drivers/dma-buf/dma-fence.c
581
fence->context, fence->seqno);
drivers/dma-buf/dma-resv.c
310
if ((old->context == fence->context && old_usage >= usage &&
drivers/dma-buf/dma-resv.c
343
void dma_resv_replace_fences(struct dma_resv *obj, uint64_t context,
drivers/dma-buf/dma-resv.c
357
if (old->context != context)
drivers/dma-buf/st-dma-fence-unwrap.c
31
static struct dma_fence *__mock_fence(u64 context, u64 seqno)
drivers/dma-buf/st-dma-fence-unwrap.c
40
dma_fence_init(&f->base, &mock_ops, &f->lock, context, seqno);
drivers/dma-buf/sw_sync.c
109
obj->context = dma_fence_context_alloc(1);
drivers/dma-buf/sw_sync.c
261
obj->context, value);
drivers/dma-buf/sync_debug.h
38
u64 context;
drivers/dma-buf/sync_file.c
147
fence->context,
drivers/dma/altera-msgdma.c
390
unsigned long flags, void *context)
drivers/dma/amba-pl08x.c
2044
unsigned long flags, void *context)
drivers/dma/amd/qdma/qdma.c
766
unsigned long flags, void *context)
drivers/dma/at_hdmac.c
1251
unsigned long flags, void *context)
drivers/dma/at_xdmac.c
761
unsigned long flags, void *context)
drivers/dma/bcm2835-dma.c
627
unsigned long flags, void *context)
drivers/dma/bestcomm/bestcomm.c
316
bcom_eng->tdt[task].context = ctx_pa;
drivers/dma/dma-axi-dmac.c
668
unsigned long flags, void *context)
drivers/dma/dma-jz4780.c
364
void *context)
drivers/dma/dw-axi-dmac/dw-axi-dmac-platform.c
844
unsigned long flags, void *context)
drivers/dma/dw-edma/dw-edma-core.c
538
unsigned long flags, void *context)
drivers/dma/dw/core.c
617
unsigned long flags, void *context)
drivers/dma/ep93xx_dma.c
1087
unsigned long flags, void *context)
drivers/dma/fsl-edma-common.c
679
unsigned long flags, void *context)
drivers/dma/fsl-edma-common.h
502
unsigned long flags, void *context);
drivers/dma/hsu/hsu.c
272
unsigned long flags, void *context)
drivers/dma/idma64.c
303
unsigned long flags, void *context)
drivers/dma/img-mdc-dma.c
449
unsigned long flags, void *context)
drivers/dma/imx-dma.c
796
unsigned long flags, void *context)
drivers/dma/imx-sdma.c
1115
struct sdma_context_data *context = sdma->context;
drivers/dma/imx-sdma.c
1141
memset(context, 0, sizeof(*context));
drivers/dma/imx-sdma.c
1142
context->channel_state.pc = load_address;
drivers/dma/imx-sdma.c
1148
context->gReg[4] = sdmac->per_addr;
drivers/dma/imx-sdma.c
1149
context->gReg[6] = sdmac->shp_addr;
drivers/dma/imx-sdma.c
1151
context->gReg[0] = sdmac->event_mask[1];
drivers/dma/imx-sdma.c
1152
context->gReg[1] = sdmac->event_mask[0];
drivers/dma/imx-sdma.c
1153
context->gReg[2] = sdmac->per_addr;
drivers/dma/imx-sdma.c
1154
context->gReg[6] = sdmac->shp_addr;
drivers/dma/imx-sdma.c
1155
context->gReg[7] = sdmac->watermark_level;
drivers/dma/imx-sdma.c
1160
bd0->mode.count = sizeof(*context) / 4;
drivers/dma/imx-sdma.c
1162
bd0->ext_buffer_addr = 2048 + (sizeof(*context) / 4) * channel;
drivers/dma/imx-sdma.c
1641
unsigned long flags, void *context)
drivers/dma/imx-sdma.c
1974
static void sdma_load_firmware(const struct firmware *fw, void *context)
drivers/dma/imx-sdma.c
1976
struct sdma_engine *sdma = context;
drivers/dma/imx-sdma.c
2149
sdma->context = (void *)sdma->channel_control +
drivers/dma/imx-sdma.c
528
struct sdma_context_data *context;
drivers/dma/k3dma.c
532
enum dma_transfer_direction dir, unsigned long flags, void *context)
drivers/dma/lgm/lgm-dma.c
1159
unsigned long flags, void *context)
drivers/dma/loongson1-apb-dma.c
298
unsigned long flags, void *context)
drivers/dma/loongson2-apb-dma.c
323
unsigned long flags, void *context)
drivers/dma/mediatek/mtk-uart-apdma.c
345
unsigned long tx_flags, void *context)
drivers/dma/milbeaut-hdmac.c
259
unsigned long flags, void *context)
drivers/dma/mmp_pdma.c
685
unsigned long flags, void *context)
drivers/dma/moxart-dma.c
264
unsigned long tx_flags, void *context)
drivers/dma/mpc512x_dma.c
686
unsigned long flags, void *context)
drivers/dma/mxs-dma.c
477
unsigned long flags, void *context)
drivers/dma/nbpfaxi.c
1027
enum dma_transfer_direction direction, unsigned long flags, void *context)
drivers/dma/owl-dma.c
920
unsigned long flags, void *context)
drivers/dma/pch_dma.c
564
void *context)
drivers/dma/pl330.c
2843
unsigned long flg, void *context)
drivers/dma/pxa_dma.c
963
unsigned long flags, void *context)
drivers/dma/qcom/bam_dma.c
648
void *context)
drivers/dma/qcom/gpi.c
1800
unsigned long flags, void *context)
drivers/dma/qcom/qcom_adm.c
351
void *context)
drivers/dma/sa11x0-dma.c
524
enum dma_transfer_direction dir, unsigned long flags, void *context)
drivers/dma/sh/rcar-dmac.c
1202
unsigned long flags, void *context)
drivers/dma/sh/rz-dmac.c
526
unsigned long flags, void *context)
drivers/dma/sh/shdma-base.c
675
enum dma_transfer_direction direction, unsigned long flags, void *context)
drivers/dma/sh/usb-dmac.c
418
unsigned long dma_flags, void *context)
drivers/dma/sprd-dma.c
952
unsigned long flags, void *context)
drivers/dma/sprd-dma.c
966
if (context) {
drivers/dma/sprd-dma.c
968
(struct sprd_dma_linklist *)context;
drivers/dma/st_fdma.c
510
unsigned long flags, void *context)
drivers/dma/ste_dma40.c
2514
unsigned long dma_flags, void *context)
drivers/dma/stm32/stm32-dma.c
1085
unsigned long flags, void *context)
drivers/dma/stm32/stm32-dma3.c
1297
unsigned long flags, void *context)
drivers/dma/stm32/stm32-mdma.c
787
unsigned long flags, void *context)
drivers/dma/sun4i-dma.c
880
unsigned long flags, void *context)
drivers/dma/sun6i-dma.c
718
unsigned long flags, void *context)
drivers/dma/tegra186-gpc-dma.c
1008
unsigned long flags, void *context)
drivers/dma/tegra20-apb-dma.c
1040
void *context)
drivers/dma/ti/cppi41.c
583
enum dma_transfer_direction dir, unsigned long tx_flags, void *context)
drivers/dma/ti/edma.c
1012
unsigned long tx_flags, void *context)
drivers/dma/ti/k3-udma.c
2885
unsigned long tx_flags, void *context)
drivers/dma/ti/k3-udma.c
2970
unsigned long tx_flags, void *context)
drivers/dma/ti/k3-udma.c
3235
unsigned long tx_flags, void *context)
drivers/dma/ti/k3-udma.c
3420
unsigned long tx_flags, void *context)
drivers/dma/ti/k3-udma.c
3455
context);
drivers/dma/ti/k3-udma.c
3458
context);
drivers/dma/ti/k3-udma.c
3461
tx_flags, context);
drivers/dma/ti/omap-dma.c
1577
od->context.irqenable_l0 = omap_dma_glbl_read(od, IRQENABLE_L0);
drivers/dma/ti/omap-dma.c
1578
od->context.irqenable_l1 = omap_dma_glbl_read(od, IRQENABLE_L1);
drivers/dma/ti/omap-dma.c
1579
od->context.ocp_sysconfig = omap_dma_glbl_read(od, OCP_SYSCONFIG);
drivers/dma/ti/omap-dma.c
1580
od->context.gcr = omap_dma_glbl_read(od, GCR);
drivers/dma/ti/omap-dma.c
1587
omap_dma_glbl_write(od, GCR, od->context.gcr);
drivers/dma/ti/omap-dma.c
1588
omap_dma_glbl_write(od, OCP_SYSCONFIG, od->context.ocp_sysconfig);
drivers/dma/ti/omap-dma.c
1589
omap_dma_glbl_write(od, IRQENABLE_L0, od->context.irqenable_l0);
drivers/dma/ti/omap-dma.c
1590
omap_dma_glbl_write(od, IRQENABLE_L1, od->context.irqenable_l1);
drivers/dma/ti/omap-dma.c
50
struct omap_dma_context context;
drivers/dma/ti/omap-dma.c
961
enum dma_transfer_direction dir, unsigned long tx_flags, void *context)
drivers/dma/timb_dma.c
492
void *context)
drivers/dma/txx9dmac.c
805
unsigned long flags, void *context)
drivers/dma/uniphier-mdmac.c
233
unsigned long flags, void *context)
drivers/dma/uniphier-xdmac.c
322
unsigned long flags, void *context)
drivers/dma/xilinx/xdma.c
603
unsigned long flags, void *context)
drivers/dma/xilinx/xilinx_dma.c
2318
void *context)
drivers/dma/xilinx/xilinx_dma.c
2323
u32 *app_w = (u32 *)context;
drivers/dma/xilinx/xilinx_dma.c
2528
unsigned long flags, void *context)
drivers/dma/xilinx/xilinx_dma.c
2533
u32 *app_w = (u32 *)context;
drivers/edac/aspeed_edac.c
41
static int regmap_reg_write(void *context, unsigned int reg, unsigned int val)
drivers/edac/aspeed_edac.c
43
void __iomem *regs = (void __iomem *)context;
drivers/edac/aspeed_edac.c
57
static int regmap_reg_read(void *context, unsigned int reg, unsigned int *val)
drivers/edac/aspeed_edac.c
59
void __iomem *regs = (void __iomem *)context;
drivers/firewire/core-cdev.c
1003
static void iso_mc_callback(struct fw_iso_context *context,
drivers/firewire/core-cdev.c
1021
static enum dma_data_direction iso_dma_direction(struct fw_iso_context *context)
drivers/firewire/core-cdev.c
1023
if (context->type == FW_ISO_CONTEXT_TRANSMIT)
drivers/firewire/core-cdev.c
1032
struct fw_iso_context *context;
drivers/firewire/core-cdev.c
1060
context = fw_iso_mc_context_create(client->device->card, iso_mc_callback, client);
drivers/firewire/core-cdev.c
1062
context = fw_iso_context_create(client->device->card, a->type, a->channel, a->speed,
drivers/firewire/core-cdev.c
1064
if (IS_ERR(context))
drivers/firewire/core-cdev.c
1065
return PTR_ERR(context);
drivers/firewire/core-cdev.c
1067
context->flags |= FW_ISO_CONTEXT_FLAG_DROP_OVERFLOW_HEADERS;
drivers/firewire/core-cdev.c
1072
fw_iso_context_destroy(context);
drivers/firewire/core-cdev.c
1080
iso_dma_direction(context));
drivers/firewire/core-cdev.c
1082
fw_iso_context_destroy(context);
drivers/firewire/core-cdev.c
1088
client->iso_context = context;
drivers/firewire/core-cdev.c
984
static void iso_callback(struct fw_iso_context *context, u32 cycle,
drivers/firewire/net.c
1105
struct fw_iso_context *context;
drivers/firewire/net.c
1127
context = fw_iso_context_create(dev->card, FW_ISO_CONTEXT_RECEIVE,
drivers/firewire/net.c
1131
if (IS_ERR(context)) {
drivers/firewire/net.c
1132
retval = PTR_ERR(context);
drivers/firewire/net.c
1151
dev->broadcast_rcv_context = context;
drivers/firewire/net.c
1162
retval = fw_iso_context_queue(context, &packet,
drivers/firewire/net.c
1172
retval = fw_iso_context_start(context, -1, 0,
drivers/firewire/net.c
748
static void fwnet_receive_broadcast(struct fw_iso_context *context,
drivers/firewire/net.c
783
context->card->generation, true);
drivers/firewire/ohci.c
1005
static int context_add_buffer(struct context *ctx)
drivers/firewire/ohci.c
1039
static int context_init(struct context *ctx, struct fw_ohci *ohci,
drivers/firewire/ohci.c
1071
static void context_release(struct context *ctx)
drivers/firewire/ohci.c
108
struct context;
drivers/firewire/ohci.c
1083
static struct descriptor *context_get_descriptors(struct context *ctx,
drivers/firewire/ohci.c
110
typedef int (*descriptor_callback_t)(struct context *ctx,
drivers/firewire/ohci.c
1114
static void context_run(struct context *ctx, u32 extra)
drivers/firewire/ohci.c
1126
static void context_append(struct context *ctx,
drivers/firewire/ohci.c
1162
static void context_stop(struct context *ctx)
drivers/firewire/ohci.c
1194
struct context *context = &ctx->context;
drivers/firewire/ohci.c
1195
struct fw_ohci *ohci = context->ohci;
drivers/firewire/ohci.c
1202
d = context_get_descriptors(context, 4, &d_bus);
drivers/firewire/ohci.c
1321
context_append(context, d, z, 4 - z);
drivers/firewire/ohci.c
1323
if (context->running)
drivers/firewire/ohci.c
1324
reg_write(ohci, CONTROL_SET(context->regs), CONTEXT_WAKE);
drivers/firewire/ohci.c
1326
context_run(context, 0);
drivers/firewire/ohci.c
1354
static int handle_at_packet(struct context *context,
drivers/firewire/ohci.c
1358
struct at_context *ctx = container_of(context, struct at_context, context);
drivers/firewire/ohci.c
1359
struct fw_ohci *ohci = ctx->context.ohci;
drivers/firewire/ohci.c
1538
struct fw_ohci *ohci = ctx->context.ohci;
drivers/firewire/ohci.c
1575
struct fw_ohci *ohci = ctx->context.ohci;
drivers/firewire/ohci.c
163
struct context context;
drivers/firewire/ohci.c
170
struct context context;
drivers/firewire/ohci.c
2000
context_stop(&ohci->at_request_ctx.context);
drivers/firewire/ohci.c
2001
context_stop(&ohci->at_response_ctx.context);
drivers/firewire/ohci.c
2781
static int handle_ir_packet_per_buffer(struct context *context,
drivers/firewire/ohci.c
2786
container_of(context, struct iso_context, context);
drivers/firewire/ohci.c
2800
dma_sync_single_range_for_cpu(context->ohci->card.device,
drivers/firewire/ohci.c
2816
static int handle_ir_buffer_fill(struct context *context,
drivers/firewire/ohci.c
2821
container_of(context, struct iso_context, context);
drivers/firewire/ohci.c
2839
dma_sync_single_range_for_cpu(context->ohci->card.device,
drivers/firewire/ohci.c
2859
dma_sync_single_range_for_cpu(ctx->context.ohci->card.device,
drivers/firewire/ohci.c
2872
static inline void sync_it_packet_for_cpu(struct context *context,
drivers/firewire/ohci.c
2891
(context->current_bus & PAGE_MASK)) {
drivers/firewire/ohci.c
2899
dma_sync_single_range_for_cpu(context->ohci->card.device,
drivers/firewire/ohci.c
2909
static int handle_it_packet(struct context *context,
drivers/firewire/ohci.c
2914
container_of(context, struct iso_context, context);
drivers/firewire/ohci.c
2925
sync_it_packet_for_cpu(context, d);
drivers/firewire/ohci.c
3026
ret = context_init(&ctx->context, ohci, regs, callback);
drivers/firewire/ohci.c
3060
struct fw_ohci *ohci = ctx->context.ohci;
drivers/firewire/ohci.c
3065
if (ctx->context.last->branch_address == 0)
drivers/firewire/ohci.c
3078
context_run(&ctx->context, match);
drivers/firewire/ohci.c
3094
reg_write(ohci, CONTEXT_MATCH(ctx->context.regs), match);
drivers/firewire/ohci.c
3095
context_run(&ctx->context, control);
drivers/firewire/ohci.c
3125
context_stop(&ctx->context);
drivers/firewire/ohci.c
3137
context_release(&ctx->context);
drivers/firewire/ohci.c
3198
if (ctx->context.running)
drivers/firewire/ohci.c
3204
if (ctx->context.running)
drivers/firewire/ohci.c
3244
d = context_get_descriptors(&ctx->context, z + header_z, &d_bus);
drivers/firewire/ohci.c
3290
dma_sync_single_range_for_device(ctx->context.ohci->card.device,
drivers/firewire/ohci.c
3308
context_append(&ctx->context, d, z, header_z);
drivers/firewire/ohci.c
3318
struct device *device = ctx->context.ohci->card.device;
drivers/firewire/ohci.c
3341
d = context_get_descriptors(&ctx->context,
drivers/firewire/ohci.c
3388
context_append(&ctx->context, d, z, header_z);
drivers/firewire/ohci.c
3414
d = context_get_descriptors(&ctx->context, 1, &d_bus);
drivers/firewire/ohci.c
3436
dma_sync_single_range_for_device(ctx->context.ohci->card.device,
drivers/firewire/ohci.c
3444
context_append(&ctx->context, d, 1, 0);
drivers/firewire/ohci.c
3457
guard(spinlock_irqsave)(&ctx->context.ohci->lock);
drivers/firewire/ohci.c
3473
struct context *ctx =
drivers/firewire/ohci.c
3474
&container_of(base, struct iso_context, base)->context;
drivers/firewire/ohci.c
3655
err = context_init(&ohci->at_request_ctx.context, ohci,
drivers/firewire/ohci.c
3661
err = context_init(&ohci->at_response_ctx.context, ohci,
drivers/firewire/ohci.c
946
static void context_retire_descriptors(struct context *ctx)
drivers/firewire/ohci.c
990
context_retire_descriptors(&ctx->context);
drivers/firewire/ohci.c
998
context_retire_descriptors(&isoc_ctx->context);
drivers/firmware/cirrus/test/cs_dsp_mock_regmap.c
14
static int cs_dsp_mock_regmap_read(void *context, const void *reg_buf,
drivers/firmware/cirrus/test/cs_dsp_mock_regmap.c
18
struct cs_dsp_test *priv = context;
drivers/firmware/cirrus/test/cs_dsp_mock_regmap.c
26
static int cs_dsp_mock_regmap_gather_write(void *context,
drivers/firmware/cirrus/test/cs_dsp_mock_regmap.c
30
struct cs_dsp_test *priv = context;
drivers/firmware/cirrus/test/cs_dsp_mock_regmap.c
40
static int cs_dsp_mock_regmap_write(void *context, const void *val_buf, size_t val_size)
drivers/firmware/cirrus/test/cs_dsp_mock_regmap.c
42
struct cs_dsp_test *priv = context;
drivers/firmware/efi/runtime-wrappers.c
115
void *context;
drivers/firmware/efi/runtime-wrappers.c
300
args->ACPI_PRM_HANDLER.context);
drivers/firmware/efi/runtime-wrappers.c
587
u64 param_buffer_addr, void *context)
drivers/firmware/efi/runtime-wrappers.c
594
param_buffer_addr, context);
drivers/firmware/psci/psci.c
506
struct arm_cpuidle_irq_context context;
drivers/firmware/psci/psci.c
509
arm_cpuidle_save_irq_context(&context);
drivers/firmware/psci/psci.c
511
arm_cpuidle_restore_irq_context(&context);
drivers/fpga/dfl-n3000-nios.c
478
static int n3000_nios_reg_write(void *context, unsigned int reg, unsigned int val)
drivers/fpga/dfl-n3000-nios.c
480
struct n3000_nios *nn = context;
drivers/fpga/dfl-n3000-nios.c
497
static int n3000_nios_reg_read(void *context, unsigned int reg, unsigned int *val)
drivers/fpga/dfl-n3000-nios.c
499
struct n3000_nios *nn = context;
drivers/gnss/usb.c
35
struct gnss_usb *gusb = urb->context;
drivers/gpib/agilent_82357a/agilent_82357a.c
1093
struct gpib_board *board = urb->context;
drivers/gpib/agilent_82357a/agilent_82357a.c
125
struct agilent_82357a_urb_ctx *context = &a_priv->context;
drivers/gpib/agilent_82357a/agilent_82357a.c
146
init_completion(&context->complete);
drivers/gpib/agilent_82357a/agilent_82357a.c
147
context->timed_out = 0;
drivers/gpib/agilent_82357a/agilent_82357a.c
149
&agilent_82357a_bulk_complete, context);
drivers/gpib/agilent_82357a/agilent_82357a.c
161
if (wait_for_completion_interruptible(&context->complete)) {
drivers/gpib/agilent_82357a/agilent_82357a.c
165
if (context->timed_out) {
drivers/gpib/agilent_82357a/agilent_82357a.c
35
struct agilent_82357a_urb_ctx *context = urb->context;
drivers/gpib/agilent_82357a/agilent_82357a.c
37
complete(&context->complete);
drivers/gpib/agilent_82357a/agilent_82357a.c
44
struct agilent_82357a_urb_ctx *context = &a_priv->context;
drivers/gpib/agilent_82357a/agilent_82357a.c
46
context->timed_out = 1;
drivers/gpib/agilent_82357a/agilent_82357a.c
47
complete(&context->complete);
drivers/gpib/agilent_82357a/agilent_82357a.c
57
struct agilent_82357a_urb_ctx *context = &a_priv->context;
drivers/gpib/agilent_82357a/agilent_82357a.c
78
init_completion(&context->complete);
drivers/gpib/agilent_82357a/agilent_82357a.c
79
context->timed_out = 0;
drivers/gpib/agilent_82357a/agilent_82357a.c
81
&agilent_82357a_bulk_complete, context);
drivers/gpib/agilent_82357a/agilent_82357a.c
93
if (wait_for_completion_interruptible(&context->complete)) {
drivers/gpib/agilent_82357a/agilent_82357a.c
97
if (context->timed_out) {
drivers/gpib/agilent_82357a/agilent_82357a.h
137
struct agilent_82357a_urb_ctx context;
drivers/gpib/lpvo_usb_gpib/lpvo_usb_gpib.c
1352
dev = urb->context;
drivers/gpib/lpvo_usb_gpib/lpvo_usb_gpib.c
1565
dev = urb->context;
drivers/gpib/ni_usb/ni_usb_gpib.c
100
context->timed_out = 1;
drivers/gpib/ni_usb/ni_usb_gpib.c
101
complete(&context->complete);
drivers/gpib/ni_usb/ni_usb_gpib.c
112
struct ni_usb_urb_ctx *context = &ni_priv->context;
drivers/gpib/ni_usb/ni_usb_gpib.c
131
init_completion(&context->complete);
drivers/gpib/ni_usb/ni_usb_gpib.c
132
context->timed_out = 0;
drivers/gpib/ni_usb/ni_usb_gpib.c
134
&ni_usb_bulk_complete, context);
drivers/gpib/ni_usb/ni_usb_gpib.c
150
wait_for_completion(&context->complete); // wait for ni_usb_bulk_complete
drivers/gpib/ni_usb/ni_usb_gpib.c
151
if (context->timed_out) {
drivers/gpib/ni_usb/ni_usb_gpib.c
1818
struct gpib_board *board = urb->context;
drivers/gpib/ni_usb/ni_usb_gpib.c
198
struct ni_usb_urb_ctx *context = &ni_priv->context;
drivers/gpib/ni_usb/ni_usb_gpib.c
217
init_completion(&context->complete);
drivers/gpib/ni_usb/ni_usb_gpib.c
218
context->timed_out = 0;
drivers/gpib/ni_usb/ni_usb_gpib.c
220
&ni_usb_bulk_complete, context);
drivers/gpib/ni_usb/ni_usb_gpib.c
236
if (wait_for_completion_interruptible(&context->complete)) {
drivers/gpib/ni_usb/ni_usb_gpib.c
250
wait_for_completion(&context->complete);
drivers/gpib/ni_usb/ni_usb_gpib.c
253
wait_for_completion(&context->complete);
drivers/gpib/ni_usb/ni_usb_gpib.c
255
if (context->timed_out) {
drivers/gpib/ni_usb/ni_usb_gpib.c
89
struct ni_usb_urb_ctx *context = urb->context;
drivers/gpib/ni_usb/ni_usb_gpib.c
91
complete(&context->complete);
drivers/gpib/ni_usb/ni_usb_gpib.c
98
struct ni_usb_urb_ctx *context = &ni_priv->context;
drivers/gpib/ni_usb/ni_usb_gpib.h
80
struct ni_usb_urb_ctx context;
drivers/gpio/gpio-davinci.c
598
struct davinci_gpio_regs *context;
drivers/gpio/gpio-davinci.c
607
context = &chips->context[bank];
drivers/gpio/gpio-davinci.c
608
context->dir = readl_relaxed(&g->dir);
drivers/gpio/gpio-davinci.c
609
context->set_data = readl_relaxed(&g->set_data);
drivers/gpio/gpio-davinci.c
610
context->set_rising = readl_relaxed(&g->set_rising);
drivers/gpio/gpio-davinci.c
611
context->set_falling = readl_relaxed(&g->set_falling);
drivers/gpio/gpio-davinci.c
622
struct davinci_gpio_regs *context;
drivers/gpio/gpio-davinci.c
63
struct davinci_gpio_regs context[MAX_REGS_BANKS];
drivers/gpio/gpio-davinci.c
633
context = &chips->context[bank];
drivers/gpio/gpio-davinci.c
634
if (readl_relaxed(&g->dir) != context->dir)
drivers/gpio/gpio-davinci.c
635
writel_relaxed(context->dir, &g->dir);
drivers/gpio/gpio-davinci.c
636
if (readl_relaxed(&g->set_data) != context->set_data)
drivers/gpio/gpio-davinci.c
637
writel_relaxed(context->set_data, &g->set_data);
drivers/gpio/gpio-davinci.c
638
if (readl_relaxed(&g->set_rising) != context->set_rising)
drivers/gpio/gpio-davinci.c
639
writel_relaxed(context->set_rising, &g->set_rising);
drivers/gpio/gpio-davinci.c
640
if (readl_relaxed(&g->set_falling) != context->set_falling)
drivers/gpio/gpio-davinci.c
641
writel_relaxed(context->set_falling, &g->set_falling);
drivers/gpio/gpio-ljca.c
284
static void ljca_gpio_event_cb(void *context, u8 cmd, const void *evt_data,
drivers/gpio/gpio-ljca.c
288
struct ljca_gpio_dev *ljca_gpio = context;
drivers/gpio/gpio-omap.c
1027
bank->context.oe = readl_relaxed(bank->base + bank->regs->direction);
drivers/gpio/gpio-omap.c
1101
p->context.sysconfig = readl_relaxed(base + regs->sysconfig);
drivers/gpio/gpio-omap.c
1102
p->context.ctrl = readl_relaxed(base + regs->ctrl);
drivers/gpio/gpio-omap.c
1103
p->context.oe = readl_relaxed(base + regs->direction);
drivers/gpio/gpio-omap.c
1104
p->context.wake_en = readl_relaxed(base + regs->wkup_en);
drivers/gpio/gpio-omap.c
1105
p->context.leveldetect0 = readl_relaxed(base + regs->leveldetect0);
drivers/gpio/gpio-omap.c
1106
p->context.leveldetect1 = readl_relaxed(base + regs->leveldetect1);
drivers/gpio/gpio-omap.c
1107
p->context.risingdetect = readl_relaxed(base + regs->risingdetect);
drivers/gpio/gpio-omap.c
1108
p->context.fallingdetect = readl_relaxed(base + regs->fallingdetect);
drivers/gpio/gpio-omap.c
1109
p->context.irqenable1 = readl_relaxed(base + regs->irqenable);
drivers/gpio/gpio-omap.c
1110
p->context.irqenable2 = readl_relaxed(base + regs->irqenable2);
drivers/gpio/gpio-omap.c
1111
p->context.dataout = readl_relaxed(base + regs->dataout);
drivers/gpio/gpio-omap.c
112
bank->context.oe = omap_gpio_rmw(bank->base + bank->regs->direction,
drivers/gpio/gpio-omap.c
1121
writel_relaxed(bank->context.sysconfig, base + regs->sysconfig);
drivers/gpio/gpio-omap.c
1122
writel_relaxed(bank->context.wake_en, base + regs->wkup_en);
drivers/gpio/gpio-omap.c
1123
writel_relaxed(bank->context.ctrl, base + regs->ctrl);
drivers/gpio/gpio-omap.c
1124
writel_relaxed(bank->context.leveldetect0, base + regs->leveldetect0);
drivers/gpio/gpio-omap.c
1125
writel_relaxed(bank->context.leveldetect1, base + regs->leveldetect1);
drivers/gpio/gpio-omap.c
1126
writel_relaxed(bank->context.risingdetect, base + regs->risingdetect);
drivers/gpio/gpio-omap.c
1127
writel_relaxed(bank->context.fallingdetect, base + regs->fallingdetect);
drivers/gpio/gpio-omap.c
1128
writel_relaxed(bank->context.dataout, base + regs->dataout);
drivers/gpio/gpio-omap.c
1129
writel_relaxed(bank->context.oe, base + regs->direction);
drivers/gpio/gpio-omap.c
1132
writel_relaxed(bank->context.debounce, base + regs->debounce);
drivers/gpio/gpio-omap.c
1133
writel_relaxed(bank->context.debounce_en,
drivers/gpio/gpio-omap.c
1137
writel_relaxed(bank->context.irqenable1, base + regs->irqenable);
drivers/gpio/gpio-omap.c
1138
writel_relaxed(bank->context.irqenable2, base + regs->irqenable2);
drivers/gpio/gpio-omap.c
1151
bank->context.sysconfig = readl_relaxed(base + bank->regs->sysconfig);
drivers/gpio/gpio-omap.c
1157
mask = bank->enabled_non_wakeup_gpios & bank->context.fallingdetect;
drivers/gpio/gpio-omap.c
1158
mask &= ~bank->context.risingdetect;
drivers/gpio/gpio-omap.c
1162
mask = bank->enabled_non_wakeup_gpios & bank->context.risingdetect;
drivers/gpio/gpio-omap.c
1163
mask &= ~bank->context.fallingdetect;
drivers/gpio/gpio-omap.c
1222
writel_relaxed(bank->context.fallingdetect,
drivers/gpio/gpio-omap.c
1224
writel_relaxed(bank->context.risingdetect,
drivers/gpio/gpio-omap.c
1243
gen0 = l & bank->context.fallingdetect;
drivers/gpio/gpio-omap.c
1246
gen1 = l & bank->context.risingdetect;
drivers/gpio/gpio-omap.c
1250
gen = l & (~(bank->context.fallingdetect) &
drivers/gpio/gpio-omap.c
1251
~(bank->context.risingdetect));
drivers/gpio/gpio-omap.c
126
bank->context.dataout |= l;
drivers/gpio/gpio-omap.c
129
bank->context.dataout &= ~l;
drivers/gpio/gpio-omap.c
139
bank->context.dataout = omap_gpio_rmw(bank->base + bank->regs->dataout,
drivers/gpio/gpio-omap.c
216
bank->context.debounce = debounce;
drivers/gpio/gpio-omap.c
217
bank->context.debounce_en = val;
drivers/gpio/gpio-omap.c
244
bank->context.debounce_en &= ~gpio_bit;
drivers/gpio/gpio-omap.c
245
writel_relaxed(bank->context.debounce_en,
drivers/gpio/gpio-omap.c
249
bank->context.debounce = 0;
drivers/gpio/gpio-omap.c
250
writel_relaxed(bank->context.debounce, bank->base +
drivers/gpio/gpio-omap.c
294
bank->context.leveldetect0 =
drivers/gpio/gpio-omap.c
296
bank->context.leveldetect1 =
drivers/gpio/gpio-omap.c
298
bank->context.risingdetect =
drivers/gpio/gpio-omap.c
300
bank->context.fallingdetect =
drivers/gpio/gpio-omap.c
303
bank->level_mask = bank->context.leveldetect0 |
drivers/gpio/gpio-omap.c
304
bank->context.leveldetect1;
drivers/gpio/gpio-omap.c
391
bank->context.ctrl = ctrl;
drivers/gpio/gpio-omap.c
405
bank->context.ctrl = ctrl;
drivers/gpio/gpio-omap.c
516
bank->context.irqenable1 |= gpio_mask;
drivers/gpio/gpio-omap.c
519
bank->context.irqenable1 &= ~gpio_mask;
drivers/gpio/gpio-omap.c
523
bank->context.irqenable1 =
drivers/gpio/gpio-omap.c
536
bank->context.wake_en =
drivers/gpio/gpio-omap.c
55
struct gpio_regs context;
drivers/gpio/gpio-omap.c
758
writel_relaxed(0xffff & ~bank->context.wake_en, mask_reg);
drivers/gpio/gpio-omap.c
772
writel_relaxed(bank->context.wake_en, mask_reg);
drivers/gpio/gpio-omap.c
983
bank->context.dataout = l;
drivers/gpio/gpio-sch.c
261
static u32 sch_gpio_gpe_handler(acpi_handle gpe_device, u32 gpe, void *context)
drivers/gpio/gpio-sch.c
263
struct sch_gpio *sch = context;
drivers/gpio/gpio-zynq.c
132
struct gpio_regs context;
drivers/gpio/gpio-zynq.c
681
gpio->context.datalsw[bank_num] =
drivers/gpio/gpio-zynq.c
684
gpio->context.datamsw[bank_num] =
drivers/gpio/gpio-zynq.c
687
gpio->context.dirm[bank_num] = readl_relaxed(gpio->base_addr +
drivers/gpio/gpio-zynq.c
689
gpio->context.int_en[bank_num] = readl_relaxed(gpio->base_addr +
drivers/gpio/gpio-zynq.c
691
gpio->context.int_type[bank_num] =
drivers/gpio/gpio-zynq.c
694
gpio->context.int_polarity[bank_num] =
drivers/gpio/gpio-zynq.c
697
gpio->context.int_any[bank_num] =
drivers/gpio/gpio-zynq.c
712
writel_relaxed(gpio->context.datalsw[bank_num],
drivers/gpio/gpio-zynq.c
715
writel_relaxed(gpio->context.datamsw[bank_num],
drivers/gpio/gpio-zynq.c
718
writel_relaxed(gpio->context.dirm[bank_num],
drivers/gpio/gpio-zynq.c
721
writel_relaxed(gpio->context.int_type[bank_num],
drivers/gpio/gpio-zynq.c
724
writel_relaxed(gpio->context.int_polarity[bank_num],
drivers/gpio/gpio-zynq.c
727
writel_relaxed(gpio->context.int_any[bank_num],
drivers/gpio/gpio-zynq.c
730
writel_relaxed(~(gpio->context.int_en[bank_num]),
drivers/gpio/gpiolib-acpi-core.c
344
void *context)
drivers/gpio/gpiolib-acpi-core.c
346
struct acpi_gpio_chip *acpi_gpio = context;
drivers/gpu/drm/amd/amdgpu/amdgpu_aca.c
928
struct aca_dump_context context = {
drivers/gpu/drm/amd/amdgpu/amdgpu_aca.c
933
return aca_banks_update(adev, type, handler_aca_bank_dump, NULL, (void *)&context);
drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd.h
193
struct amdgpu_amdkfd_fence *amdgpu_amdkfd_fence_create(u64 context,
drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_fence.c
63
struct amdgpu_amdkfd_fence *amdgpu_amdkfd_fence_create(u64 context,
drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_fence.c
82
context, atomic_inc_return(&fence_seq));
drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c
3083
process_info->eviction_fence->base.context,
drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c
383
dma_resv_replace_fences(bo->tbo.base.resv, ef->base.context,
drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c
411
dma_resv_replace_fences(resv, fence->context, stub,
drivers/gpu/drm/amd/amdgpu/amdgpu_dev_coredump.c
128
adev->psp.xgmi_context.context.bin_desc.feature_version,
drivers/gpu/drm/amd/amdgpu/amdgpu_dev_coredump.c
129
adev->psp.xgmi_context.context.bin_desc.fw_version);
drivers/gpu/drm/amd/amdgpu/amdgpu_dev_coredump.c
131
adev->psp.ras_context.context.bin_desc.feature_version,
drivers/gpu/drm/amd/amdgpu/amdgpu_dev_coredump.c
132
adev->psp.ras_context.context.bin_desc.fw_version);
drivers/gpu/drm/amd/amdgpu/amdgpu_dev_coredump.c
134
adev->psp.hdcp_context.context.bin_desc.feature_version,
drivers/gpu/drm/amd/amdgpu/amdgpu_dev_coredump.c
135
adev->psp.hdcp_context.context.bin_desc.fw_version);
drivers/gpu/drm/amd/amdgpu/amdgpu_dev_coredump.c
137
adev->psp.dtm_context.context.bin_desc.feature_version,
drivers/gpu/drm/amd/amdgpu/amdgpu_dev_coredump.c
138
adev->psp.dtm_context.context.bin_desc.fw_version);
drivers/gpu/drm/amd/amdgpu/amdgpu_dev_coredump.c
140
adev->psp.rap_context.context.bin_desc.feature_version,
drivers/gpu/drm/amd/amdgpu/amdgpu_dev_coredump.c
141
adev->psp.rap_context.context.bin_desc.fw_version);
drivers/gpu/drm/amd/amdgpu/amdgpu_dev_coredump.c
144
adev->psp.securedisplay_context.context.bin_desc.feature_version,
drivers/gpu/drm/amd/amdgpu/amdgpu_dev_coredump.c
145
adev->psp.securedisplay_context.context.bin_desc.fw_version);
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
746
else if (fence->context == af->context)
drivers/gpu/drm/amd/amdgpu/amdgpu_fence.c
806
(!guilty_fence || (fence->context != guilty_fence->context))) {
drivers/gpu/drm/amd/amdgpu/amdgpu_ib.c
152
job->base.s_fence->finished.context : 0;
drivers/gpu/drm/amd/amdgpu/amdgpu_ib.c
162
af->context = fence_ctx;
drivers/gpu/drm/amd/amdgpu/amdgpu_ib.c
164
job->hw_vm_fence->context = fence_ctx;
drivers/gpu/drm/amd/amdgpu/amdgpu_ids.c
274
((*id)->last_flush->context != fence_context &&
drivers/gpu/drm/amd/amdgpu/amdgpu_ids.c
347
((*id)->last_flush->context != fence_context &&
drivers/gpu/drm/amd/amdgpu/amdgpu_kms.c
289
fw_info->ver = adev->psp.xgmi_context.context.bin_desc.fw_version;
drivers/gpu/drm/amd/amdgpu/amdgpu_kms.c
290
fw_info->feature = adev->psp.xgmi_context.context
drivers/gpu/drm/amd/amdgpu/amdgpu_kms.c
294
fw_info->ver = adev->psp.ras_context.context.bin_desc.fw_version;
drivers/gpu/drm/amd/amdgpu/amdgpu_kms.c
295
fw_info->feature = adev->psp.ras_context.context
drivers/gpu/drm/amd/amdgpu/amdgpu_kms.c
299
fw_info->ver = adev->psp.hdcp_context.context.bin_desc.fw_version;
drivers/gpu/drm/amd/amdgpu/amdgpu_kms.c
300
fw_info->feature = adev->psp.hdcp_context.context
drivers/gpu/drm/amd/amdgpu/amdgpu_kms.c
304
fw_info->ver = adev->psp.dtm_context.context.bin_desc.fw_version;
drivers/gpu/drm/amd/amdgpu/amdgpu_kms.c
305
fw_info->feature = adev->psp.dtm_context.context
drivers/gpu/drm/amd/amdgpu/amdgpu_kms.c
309
fw_info->ver = adev->psp.rap_context.context.bin_desc.fw_version;
drivers/gpu/drm/amd/amdgpu/amdgpu_kms.c
310
fw_info->feature = adev->psp.rap_context.context
drivers/gpu/drm/amd/amdgpu/amdgpu_kms.c
314
fw_info->ver = adev->psp.securedisplay_context.context.bin_desc.fw_version;
drivers/gpu/drm/amd/amdgpu/amdgpu_kms.c
316
adev->psp.securedisplay_context.context.bin_desc
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
1258
int psp_ta_unload(struct psp_context *psp, struct ta_context *context)
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
1263
psp_prep_ta_unload_cmd_buf(cmd, context->session_id);
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
1267
context->resp_status = cmd->resp.status;
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
1322
struct ta_context *context)
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
1324
cmd->cmd_id = context->ta_load_type;
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
1327
cmd->cmd.cmd_load_ta.app_len = context->bin_desc.size_bytes;
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
1330
lower_32_bits(context->mem_context.shared_mc_addr);
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
1332
upper_32_bits(context->mem_context.shared_mc_addr);
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
1333
cmd->cmd.cmd_load_ta.cmd_buf_len = context->mem_context.shared_mem_size;
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
1362
struct ta_context *context)
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
1367
psp_prep_ta_invoke_cmd_buf(cmd, ta_cmd_id, context->session_id);
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
1372
context->resp_status = cmd->resp.status;
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
1379
int psp_ta_load(struct psp_context *psp, struct ta_context *context)
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
1386
psp_copy_fw(psp, context->bin_desc.start_addr,
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
1387
context->bin_desc.size_bytes);
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
1390
context->mem_context.shared_bo)
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
1391
context->mem_context.shared_mc_addr =
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
1392
amdgpu_bo_fb_aper_addr(context->mem_context.shared_bo);
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
1394
psp_prep_ta_load_cmd_buf(cmd, psp->fw_pri_mc_addr, context);
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
1399
context->resp_status = cmd->resp.status;
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
1402
context->session_id = cmd->resp.session_id;
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
1411
return psp_ta_invoke(psp, ta_cmd_id, &psp->xgmi_context.context);
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
1425
if (!psp->xgmi_context.context.initialized)
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
1428
ret = psp_ta_unload(psp, &psp->xgmi_context.context);
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
1430
psp->xgmi_context.context.initialized = false;
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
1441
!psp->xgmi_context.context.bin_desc.size_bytes ||
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
1442
!psp->xgmi_context.context.bin_desc.start_addr)
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
1448
psp->xgmi_context.context.mem_context.shared_mem_size = PSP_XGMI_SHARED_MEM_SIZE;
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
1449
psp->xgmi_context.context.ta_load_type = GFX_CMD_ID_LOAD_TA;
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
1451
if (!psp->xgmi_context.context.mem_context.shared_buf) {
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
1452
ret = psp_ta_init_shared_buf(psp, &psp->xgmi_context.context.mem_context);
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
1458
ret = psp_ta_load(psp, &psp->xgmi_context.context);
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
1460
psp->xgmi_context.context.initialized = true;
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
1466
xgmi_cmd = (struct ta_xgmi_shared_memory *)(psp->xgmi_context.context.mem_context.shared_buf);
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
1483
xgmi_cmd = (struct ta_xgmi_shared_memory *)psp->xgmi_context.context.mem_context.shared_buf;
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
1503
xgmi_cmd = (struct ta_xgmi_shared_memory *)psp->xgmi_context.context.mem_context.shared_buf;
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
1522
psp->xgmi_context.context.bin_desc.fw_version >= 0x2000000b) ||
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
1600
xgmi_cmd = (struct ta_xgmi_shared_memory *)psp->xgmi_context.context.mem_context.shared_buf;
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
1718
xgmi_cmd = (struct ta_xgmi_shared_memory *)psp->xgmi_context.context.mem_context.shared_buf;
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
1740
(struct ta_ras_shared_memory *)psp->ras_context.context.mem_context.shared_buf;
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
1776
ras_cmd = (struct ta_ras_shared_memory *)psp->ras_context.context.mem_context.shared_buf;
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
1830
ras_cmd = (struct ta_ras_shared_memory *)psp->ras_context.context.mem_context.shared_buf;
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
1838
ret = psp_ta_invoke(psp, ta_cmd_id, &psp->ras_context.context);
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
1869
if (!psp->ras_context.context.initialized || !info)
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
1891
if (!psp->ras_context.context.initialized)
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
1894
ret = psp_ta_unload(psp, &psp->ras_context.context);
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
1896
psp->ras_context.context.initialized = false;
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
1916
if (!adev->psp.ras_context.context.bin_desc.size_bytes ||
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
1917
!adev->psp.ras_context.context.bin_desc.start_addr) {
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
1975
psp->ras_context.context.mem_context.shared_mem_size = PSP_RAS_SHARED_MEM_SIZE;
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
1976
psp->ras_context.context.ta_load_type = GFX_CMD_ID_LOAD_TA;
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
1978
if (!psp->ras_context.context.mem_context.shared_buf) {
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
1979
ret = psp_ta_init_shared_buf(psp, &psp->ras_context.context.mem_context);
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
1984
ras_cmd = (struct ta_ras_shared_memory *)psp->ras_context.context.mem_context.shared_buf;
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
2000
ret = psp_ta_load(psp, &psp->ras_context.context);
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
2003
psp->ras_context.context.initialized = true;
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
2010
psp->ras_context.context.initialized = false;
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
2024
if (!psp->ras_context.context.initialized || !info)
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
2073
if (!psp->ras_context.context.initialized ||
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
2099
if (!psp->hdcp_context.context.bin_desc.size_bytes ||
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
2100
!psp->hdcp_context.context.bin_desc.start_addr) {
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
2105
psp->hdcp_context.context.mem_context.shared_mem_size = PSP_HDCP_SHARED_MEM_SIZE;
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
2106
psp->hdcp_context.context.ta_load_type = GFX_CMD_ID_LOAD_TA;
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
2108
if (!psp->hdcp_context.context.mem_context.shared_buf) {
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
2109
ret = psp_ta_init_shared_buf(psp, &psp->hdcp_context.context.mem_context);
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
2114
ret = psp_ta_load(psp, &psp->hdcp_context.context);
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
2116
psp->hdcp_context.context.initialized = true;
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
2131
if (!psp->hdcp_context.context.initialized)
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
2134
return psp_ta_invoke(psp, ta_cmd_id, &psp->hdcp_context.context);
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
2147
if (!psp->hdcp_context.context.initialized)
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
2150
ret = psp_ta_unload(psp, &psp->hdcp_context.context);
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
2152
psp->hdcp_context.context.initialized = false;
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
2173
if (!psp->dtm_context.context.bin_desc.size_bytes ||
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
2174
!psp->dtm_context.context.bin_desc.start_addr) {
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
2179
psp->dtm_context.context.mem_context.shared_mem_size = PSP_DTM_SHARED_MEM_SIZE;
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
2180
psp->dtm_context.context.ta_load_type = GFX_CMD_ID_LOAD_TA;
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
2182
if (!psp->dtm_context.context.mem_context.shared_buf) {
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
2183
ret = psp_ta_init_shared_buf(psp, &psp->dtm_context.context.mem_context);
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
2188
ret = psp_ta_load(psp, &psp->dtm_context.context);
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
2190
psp->dtm_context.context.initialized = true;
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
2205
if (!psp->dtm_context.context.initialized)
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
2208
return psp_ta_invoke(psp, ta_cmd_id, &psp->dtm_context.context);
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
2221
if (!psp->dtm_context.context.initialized)
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
2224
ret = psp_ta_unload(psp, &psp->dtm_context.context);
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
2226
psp->dtm_context.context.initialized = false;
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
2244
if (!psp->rap_context.context.bin_desc.size_bytes ||
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
2245
!psp->rap_context.context.bin_desc.start_addr) {
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
2250
psp->rap_context.context.mem_context.shared_mem_size = PSP_RAP_SHARED_MEM_SIZE;
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
2251
psp->rap_context.context.ta_load_type = GFX_CMD_ID_LOAD_TA;
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
2253
if (!psp->rap_context.context.mem_context.shared_buf) {
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
2254
ret = psp_ta_init_shared_buf(psp, &psp->rap_context.context.mem_context);
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
2259
ret = psp_ta_load(psp, &psp->rap_context.context);
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
2261
psp->rap_context.context.initialized = true;
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
2270
psp_ta_free_shared_buf(&psp->rap_context.context.mem_context);
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
2285
if (!psp->rap_context.context.initialized)
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
2288
ret = psp_ta_unload(psp, &psp->rap_context.context);
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
2290
psp->rap_context.context.initialized = false;
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
2300
if (!psp->rap_context.context.initialized)
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
2310
psp->rap_context.context.mem_context.shared_buf;
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
2316
ret = psp_ta_invoke(psp, rap_cmd->cmd_id, &psp->rap_context.context);
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
2346
if (!psp->securedisplay_context.context.bin_desc.size_bytes ||
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
2347
!psp->securedisplay_context.context.bin_desc.start_addr) {
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
2353
psp->securedisplay_context.context.mem_context.shared_mem_size =
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
2355
psp->securedisplay_context.context.ta_load_type = GFX_CMD_ID_LOAD_TA;
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
2357
if (!psp->securedisplay_context.context.initialized) {
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
2359
&psp->securedisplay_context.context.mem_context);
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
2364
ret = psp_ta_load(psp, &psp->securedisplay_context.context);
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
2365
if (!ret && !psp->securedisplay_context.context.resp_status) {
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
2366
psp->securedisplay_context.context.initialized = true;
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
2370
psp->securedisplay_context.context.bin_desc.size_bytes = 0;
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
2386
psp_ta_free_shared_buf(&psp->securedisplay_context.context.mem_context);
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
2396
psp->securedisplay_context.context.bin_desc.size_bytes = 0;
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
2412
if (!psp->securedisplay_context.context.initialized)
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
2415
ret = psp_ta_unload(psp, &psp->securedisplay_context.context);
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
2417
psp->securedisplay_context.context.initialized = false;
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
2426
if (!psp->securedisplay_context.context.initialized)
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
2434
ret = psp_ta_invoke(psp, ta_cmd_id, &psp->securedisplay_context.context);
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
307
psp_ta_free_shared_buf(&psp->xgmi_context.context.mem_context);
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
310
psp_ta_free_shared_buf(&psp->ras_context.context.mem_context);
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
313
psp_ta_free_shared_buf(&psp->hdcp_context.context.mem_context);
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
316
psp_ta_free_shared_buf(&psp->dtm_context.context.mem_context);
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
319
psp_ta_free_shared_buf(&psp->rap_context.context.mem_context);
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
322
psp_ta_free_shared_buf(&psp->securedisplay_context.context.mem_context);
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
3267
psp->xgmi_context.context.initialized) {
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
3844
psp->xgmi_context.context.bin_desc.fw_version = le32_to_cpu(desc->fw_version);
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
3845
psp->xgmi_context.context.bin_desc.size_bytes = le32_to_cpu(desc->size_bytes);
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
3846
psp->xgmi_context.context.bin_desc.start_addr = ucode_start_addr;
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
3849
psp->ras_context.context.bin_desc.fw_version = le32_to_cpu(desc->fw_version);
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
3850
psp->ras_context.context.bin_desc.size_bytes = le32_to_cpu(desc->size_bytes);
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
3851
psp->ras_context.context.bin_desc.start_addr = ucode_start_addr;
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
3854
psp->hdcp_context.context.bin_desc.fw_version = le32_to_cpu(desc->fw_version);
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
3855
psp->hdcp_context.context.bin_desc.size_bytes = le32_to_cpu(desc->size_bytes);
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
3856
psp->hdcp_context.context.bin_desc.start_addr = ucode_start_addr;
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
3859
psp->dtm_context.context.bin_desc.fw_version = le32_to_cpu(desc->fw_version);
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
3860
psp->dtm_context.context.bin_desc.size_bytes = le32_to_cpu(desc->size_bytes);
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
3861
psp->dtm_context.context.bin_desc.start_addr = ucode_start_addr;
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
3864
psp->rap_context.context.bin_desc.fw_version = le32_to_cpu(desc->fw_version);
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
3865
psp->rap_context.context.bin_desc.size_bytes = le32_to_cpu(desc->size_bytes);
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
3866
psp->rap_context.context.bin_desc.start_addr = ucode_start_addr;
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
3869
psp->securedisplay_context.context.bin_desc.fw_version =
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
3871
psp->securedisplay_context.context.bin_desc.size_bytes =
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
3873
psp->securedisplay_context.context.bin_desc.start_addr =
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
3894
adev->psp.xgmi_context.context.bin_desc.fw_version =
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
3896
adev->psp.xgmi_context.context.bin_desc.size_bytes =
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
3898
adev->psp.xgmi_context.context.bin_desc.start_addr =
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
3902
adev->psp.ras_context.context.bin_desc.fw_version =
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
3904
adev->psp.ras_context.context.bin_desc.size_bytes =
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
3906
adev->psp.ras_context.context.bin_desc.start_addr =
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
3907
(uint8_t *)adev->psp.xgmi_context.context.bin_desc.start_addr +
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
3910
adev->psp.hdcp_context.context.bin_desc.fw_version =
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
3912
adev->psp.hdcp_context.context.bin_desc.size_bytes =
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
3914
adev->psp.hdcp_context.context.bin_desc.start_addr =
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
3918
adev->psp.dtm_context.context.bin_desc.fw_version =
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
3920
adev->psp.dtm_context.context.bin_desc.size_bytes =
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
3922
adev->psp.dtm_context.context.bin_desc.start_addr =
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
3923
(uint8_t *)adev->psp.hdcp_context.context.bin_desc.start_addr +
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
3926
adev->psp.securedisplay_context.context.bin_desc.fw_version =
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
3928
adev->psp.securedisplay_context.context.bin_desc.size_bytes =
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
3930
adev->psp.securedisplay_context.context.bin_desc.start_addr =
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.c
3931
(uint8_t *)adev->psp.hdcp_context.context.bin_desc.start_addr +
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.h
225
struct ta_context context;
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.h
230
struct ta_context context;
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.h
237
struct ta_context context;
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.h
559
int psp_ta_unload(struct psp_context *psp, struct ta_context *context);
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.h
560
int psp_ta_load(struct psp_context *psp, struct ta_context *context);
drivers/gpu/drm/amd/amdgpu/amdgpu_psp.h
563
struct ta_context *context);
drivers/gpu/drm/amd/amdgpu/amdgpu_psp_ta.c
154
struct ta_context *context = NULL;
drivers/gpu/drm/amd/amdgpu/amdgpu_psp_ta.c
179
set_ta_context_funcs(psp, ta_type, &context);
drivers/gpu/drm/amd/amdgpu/amdgpu_psp_ta.c
191
if (!context->mem_context.shared_buf) {
drivers/gpu/drm/amd/amdgpu/amdgpu_psp_ta.c
192
ret = psp_ta_init_shared_buf(psp, &context->mem_context);
drivers/gpu/drm/amd/amdgpu/amdgpu_psp_ta.c
200
if (ret || context->resp_status) {
drivers/gpu/drm/amd/amdgpu/amdgpu_psp_ta.c
203
ret, context->resp_status);
drivers/gpu/drm/amd/amdgpu/amdgpu_psp_ta.c
210
context->ta_type = ta_type;
drivers/gpu/drm/amd/amdgpu/amdgpu_psp_ta.c
211
context->bin_desc.fw_version = get_bin_version(ta_bin);
drivers/gpu/drm/amd/amdgpu/amdgpu_psp_ta.c
212
context->bin_desc.size_bytes = ta_bin_len;
drivers/gpu/drm/amd/amdgpu/amdgpu_psp_ta.c
213
context->bin_desc.start_addr = ta_bin;
drivers/gpu/drm/amd/amdgpu/amdgpu_psp_ta.c
222
if (ret || context->resp_status) {
drivers/gpu/drm/amd/amdgpu/amdgpu_psp_ta.c
224
ret, context->resp_status);
drivers/gpu/drm/amd/amdgpu/amdgpu_psp_ta.c
230
if (copy_to_user((char *)buf, (void *)&context->session_id, sizeof(uint32_t)))
drivers/gpu/drm/amd/amdgpu/amdgpu_psp_ta.c
235
if (ret && context->mem_context.shared_buf)
drivers/gpu/drm/amd/amdgpu/amdgpu_psp_ta.c
236
psp_ta_free_shared_buf(&context->mem_context);
drivers/gpu/drm/amd/amdgpu/amdgpu_psp_ta.c
252
struct ta_context *context = NULL;
drivers/gpu/drm/amd/amdgpu/amdgpu_psp_ta.c
267
set_ta_context_funcs(psp, ta_type, &context);
drivers/gpu/drm/amd/amdgpu/amdgpu_psp_ta.c
268
context->session_id = ta_id;
drivers/gpu/drm/amd/amdgpu/amdgpu_psp_ta.c
276
if (ret || context->resp_status) {
drivers/gpu/drm/amd/amdgpu/amdgpu_psp_ta.c
278
ret, context->resp_status);
drivers/gpu/drm/amd/amdgpu/amdgpu_psp_ta.c
283
if (context->mem_context.shared_buf)
drivers/gpu/drm/amd/amdgpu/amdgpu_psp_ta.c
284
psp_ta_free_shared_buf(&context->mem_context);
drivers/gpu/drm/amd/amdgpu/amdgpu_psp_ta.c
301
struct ta_context *context = NULL;
drivers/gpu/drm/amd/amdgpu/amdgpu_psp_ta.c
330
set_ta_context_funcs(psp, ta_type, &context);
drivers/gpu/drm/amd/amdgpu/amdgpu_psp_ta.c
332
if (!context || !context->initialized) {
drivers/gpu/drm/amd/amdgpu/amdgpu_psp_ta.c
344
context->session_id = ta_id;
drivers/gpu/drm/amd/amdgpu/amdgpu_psp_ta.c
347
ret = prep_ta_mem_context(&context->mem_context, shared_buf, shared_buf_len);
drivers/gpu/drm/amd/amdgpu/amdgpu_psp_ta.c
352
if (ret || context->resp_status) {
drivers/gpu/drm/amd/amdgpu/amdgpu_psp_ta.c
354
ret, context->resp_status);
drivers/gpu/drm/amd/amdgpu/amdgpu_psp_ta.c
361
if (copy_to_user((char *)&buf[copy_pos], context->mem_context.shared_buf, shared_buf_len))
drivers/gpu/drm/amd/amdgpu/amdgpu_psp_ta.c
78
*pcontext = &psp->ras_context.context;
drivers/gpu/drm/amd/amdgpu/amdgpu_rap.c
120
if (!adev->psp.rap_context.context.initialized)
drivers/gpu/drm/amd/amdgpu/amdgpu_rap.c
79
adev->psp.rap_context.context.mem_context.shared_buf;
drivers/gpu/drm/amd/amdgpu/amdgpu_reset.h
62
struct amdgpu_reset_context *context);
drivers/gpu/drm/amd/amdgpu/amdgpu_reset.h
64
struct amdgpu_reset_context *context);
drivers/gpu/drm/amd/amdgpu/amdgpu_reset.h
66
struct amdgpu_reset_context *context);
drivers/gpu/drm/amd/amdgpu/amdgpu_reset.h
68
struct amdgpu_reset_context *context);
drivers/gpu/drm/amd/amdgpu/amdgpu_reset.h
70
struct amdgpu_reset_context *context);
drivers/gpu/drm/amd/amdgpu/amdgpu_reset.h
85
struct amdgpu_reset_context *context);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.h
152
u64 context;
drivers/gpu/drm/amd/amdgpu/amdgpu_securedisplay.c
180
if (!adev->psp.securedisplay_context.context.initialized)
drivers/gpu/drm/amd/amdgpu/amdgpu_securedisplay.c
83
*cmd = (struct ta_securedisplay_cmd *)psp->securedisplay_context.context.mem_context.shared_buf;
drivers/gpu/drm/amd/amdgpu/amdgpu_sync.c
137
hash_for_each_possible(sync->fences, e, node, f->context) {
drivers/gpu/drm/amd/amdgpu/amdgpu_sync.c
144
if (likely(e->fence->context == f->context)) {
drivers/gpu/drm/amd/amdgpu/amdgpu_sync.c
176
hash_add(sync->fences, &e->node, f->context);
drivers/gpu/drm/amd/amdgpu/amdgpu_trace.h
171
__field(u64, context)
drivers/gpu/drm/amd/amdgpu/amdgpu_trace.h
180
__entry->context = job->base.s_fence->finished.context;
drivers/gpu/drm/amd/amdgpu/amdgpu_trace.h
186
__get_str(timeline), __entry->context,
drivers/gpu/drm/amd/amdgpu/amdgpu_trace.h
195
__field(u64, context)
drivers/gpu/drm/amd/amdgpu/amdgpu_trace.h
203
__entry->context = job->base.s_fence->finished.context;
drivers/gpu/drm/amd/amdgpu/amdgpu_trace.h
209
__get_str(timeline), __entry->context,
drivers/gpu/drm/amd/amdgpu/amdgpu_trace.h
558
__entry->ctx = fence->context;
drivers/gpu/drm/amd/amdgpu/amdgpu_ucode.c
792
FW_VERSION_ATTR(ta_ras_fw_version, 0444, psp.ras_context.context.bin_desc.fw_version);
drivers/gpu/drm/amd/amdgpu/amdgpu_ucode.c
793
FW_VERSION_ATTR(ta_xgmi_fw_version, 0444, psp.xgmi_context.context.bin_desc.fw_version);
drivers/gpu/drm/amd/amdgpu/amdgpu_userq.c
1359
f->context, f->seqno);
drivers/gpu/drm/amd/amdgpu/amdgpu_userq.c
440
f->context, f->seqno);
drivers/gpu/drm/amd/amdgpu/amdgpu_userq_fence.c
104
fence_drv->context = dma_fence_context_alloc(1);
drivers/gpu/drm/amd/amdgpu/amdgpu_userq_fence.c
256
fence_drv->context, seq);
drivers/gpu/drm/amd/amdgpu/amdgpu_userq_fence.h
50
u64 context;
drivers/gpu/drm/amd/amdgpu/amdgpu_virt.c
582
adev->psp.ras_context.context.bin_desc.fw_version);
drivers/gpu/drm/amd/amdgpu/amdgpu_virt.c
584
adev->psp.xgmi_context.context.bin_desc.fw_version);
drivers/gpu/drm/amd/amdgpu/psp_v10_0.c
63
(psp->securedisplay_context.context.bin_desc.fw_version >=
drivers/gpu/drm/amd/amdgpu/psp_v10_0.c
65
adev->psp.securedisplay_context.context.bin_desc.size_bytes = 0;
drivers/gpu/drm/amd/amdgpu/psp_v11_0.c
108
adev->psp.securedisplay_context.context.bin_desc.size_bytes = 0;
drivers/gpu/drm/amd/amdgpu/psp_v11_0.c
120
adev->psp.securedisplay_context.context.bin_desc.size_bytes = 0;
drivers/gpu/drm/amd/amdgpu/psp_v12_0.c
64
adev->psp.securedisplay_context.context.bin_desc.size_bytes = 0;
drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm.c
10889
if (dm_state && dm_state->context) {
drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm.c
10890
dc_state = dm_state->context;
drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm.c
11582
dm_state->context,
drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm.c
11625
dm_state->context,
drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm.c
12043
dm_state->context)) {
drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm.c
12115
dm_state->context)) {
drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm.c
12870
ret = compute_mst_dsc_configs_for_state(state, dm_state->context, vars);
drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm.c
12879
ret = dm_update_mst_vcpi_slots_for_dsc(state, dm_state->context, vars);
drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm.c
12896
status = dc_validate_global_state(dc, dm_state->context, DC_VALIDATE_MODE_ONLY);
drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm.c
3084
struct dc_state *context __free(state_release) = NULL;
drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm.c
3092
context = dc_state_create_current_copy(dc);
drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm.c
3093
if (context == NULL)
drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm.c
3097
for (i = 0; i < context->stream_count; i++) {
drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm.c
3098
struct dc_stream_state *stream = context->streams[i];
drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm.c
3107
if (!dc_state_rem_all_planes_for_stream(dc, del_streams[i], context))
drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm.c
3110
res = dc_state_remove_stream(dc, context, del_streams[i]);
drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm.c
3115
params.streams = context->streams;
drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm.c
3116
params.stream_count = context->stream_count;
drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm.c
3568
dc_state_release(dm_state->context);
drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm.c
3569
dm_state->context = dc_state_create(dm->dc, NULL);
drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm.c
4891
if (old_state && old_state->context)
drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm.c
4892
new_state->context = dc_state_create_copy(old_state->context);
drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm.c
4894
if (!new_state->context) {
drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm.c
4907
if (dm_state && dm_state->context)
drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm.c
4908
dc_state_release(dm_state->context);
drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm.c
4944
state->context = dc_state_create_current_copy(adev->dm.dc);
drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm.c
4945
if (!state->context) {
drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm.c
4957
dc_state_release(state->context);
drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm.c
4964
dc_state_release(state->context);
drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm.c
4972
dc_state_release(state->context);
drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm.c
6947
static void dm_enable_per_frame_crtc_master_sync(struct dc_state *context)
drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm.c
6952
if (context->stream_count < 2)
drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm.c
6954
for (i = 0; i < context->stream_count ; i++) {
drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm.c
6955
if (!context->streams[i])
drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm.c
6964
set_master_stream(context->streams, context->stream_count);
drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm.c
6966
for (i = 0; i < context->stream_count ; i++) {
drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm.c
6967
stream = context->streams[i];
drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm.h
1011
struct dc_state *context;
drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm_crc.c
365
if (!psp->securedisplay_context.context.initialized) {
drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm_hdcp.c
113
if (!psp->hdcp_context.context.initialized) {
drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm_hdcp.c
118
hdcp_cmd = (struct ta_hdcp_shared_memory *)psp->hdcp_context.context.mem_context.shared_buf;
drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm_hdcp.c
138
if (!psp->hdcp_context.context.initialized) {
drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm_hdcp.c
143
hdcp_cmd = (struct ta_hdcp_shared_memory *)psp->hdcp_context.context.mem_context.shared_buf;
drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm_hdcp.c
505
if (!psp->dtm_context.context.initialized) {
drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm_hdcp.c
511
dtm_cmd = (struct ta_dtm_shared_memory *)psp->dtm_context.context.mem_context.shared_buf;
drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm_mst_types.c
1686
memcpy(local_dc_state, dm_state->context, sizeof(struct dc_state));
drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm_mst_types.c
1689
struct dc_stream_state *stream = dm_state->context->streams[i];
drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm_mst_types.c
1738
struct dc_stream_state *stream = dm_state->context->streams[i];
drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm_mst_types.c
1757
struct dc_stream_state *stream = dm_state->context->streams[i];
drivers/gpu/drm/amd/display/dc/clk_mgr/clk_mgr.c
55
struct dc_state *context)
drivers/gpu/drm/amd/display/dc/clk_mgr/clk_mgr.c
60
for (i = 0; i < context->stream_count; i++) {
drivers/gpu/drm/amd/display/dc/clk_mgr/clk_mgr.c
61
const struct dc_stream_state *stream = context->streams[i];
drivers/gpu/drm/amd/display/dc/clk_mgr/clk_mgr.c
62
const struct dc_stream_status *stream_status = &context->stream_status[i];
drivers/gpu/drm/amd/display/dc/clk_mgr/clk_mgr.c
67
if (dc_state_get_stream_subvp_type(context, stream) == SUBVP_PHANTOM)
drivers/gpu/drm/amd/display/dc/clk_mgr/clk_mgr.c
79
struct dc_state *context)
drivers/gpu/drm/amd/display/dc/clk_mgr/clk_mgr.c
84
for (i = 0; i < context->stream_count; i++) {
drivers/gpu/drm/amd/display/dc/clk_mgr/clk_mgr.c
85
const struct dc_stream_status stream_status = context->stream_status[i];
drivers/gpu/drm/amd/display/dc/clk_mgr/dce100/dce_clk_mgr.c
165
uint32_t dce_get_max_pixel_clock_for_all_paths(struct dc_state *context)
drivers/gpu/drm/amd/display/dc/clk_mgr/dce100/dce_clk_mgr.c
171
struct pipe_ctx *pipe_ctx = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/clk_mgr/dce100/dce_clk_mgr.c
196
struct dc_state *context)
drivers/gpu/drm/amd/display/dc/clk_mgr/dce100/dce_clk_mgr.c
201
int max_pix_clk = dce_get_max_pixel_clock_for_all_paths(context);
drivers/gpu/drm/amd/display/dc/clk_mgr/dce100/dce_clk_mgr.c
208
if (context->bw_ctx.bw.dce.dispclk_khz >
drivers/gpu/drm/amd/display/dc/clk_mgr/dce100/dce_clk_mgr.c
218
< context->bw_ctx.bw.dce.dispclk_khz)
drivers/gpu/drm/amd/display/dc/clk_mgr/dce100/dce_clk_mgr.c
390
struct dc_state *context)
drivers/gpu/drm/amd/display/dc/clk_mgr/dce100/dce_clk_mgr.c
392
struct dm_pp_display_configuration *pp_display_cfg = &context->pp_display_cfg;
drivers/gpu/drm/amd/display/dc/clk_mgr/dce100/dce_clk_mgr.c
394
dce110_fill_display_configs(context, pp_display_cfg);
drivers/gpu/drm/amd/display/dc/clk_mgr/dce100/dce_clk_mgr.c
401
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/clk_mgr/dce100/dce_clk_mgr.c
408
int patched_disp_clk = MIN(max_disp_clk, context->bw_ctx.bw.dce.dispclk_khz);
drivers/gpu/drm/amd/display/dc/clk_mgr/dce100/dce_clk_mgr.c
410
level_change_req.power_level = dce_get_required_clocks_state(clk_mgr_base, context);
drivers/gpu/drm/amd/display/dc/clk_mgr/dce100/dce_clk_mgr.c
422
dce_pplib_apply_display_requirements(clk_mgr_base->ctx->dc, context);
drivers/gpu/drm/amd/display/dc/clk_mgr/dce100/dce_clk_mgr.h
37
struct dc_state *context);
drivers/gpu/drm/amd/display/dc/clk_mgr/dce100/dce_clk_mgr.h
39
uint32_t dce_get_max_pixel_clock_for_all_paths(struct dc_state *context);
drivers/gpu/drm/amd/display/dc/clk_mgr/dce110/dce110_clk_mgr.c
120
const struct dc_state *context,
drivers/gpu/drm/amd/display/dc/clk_mgr/dce110/dce110_clk_mgr.c
123
struct dc *dc = context->clk_mgr->ctx->dc;
drivers/gpu/drm/amd/display/dc/clk_mgr/dce110/dce110_clk_mgr.c
127
pp_display_cfg->avail_mclk_switch_time_us = dce110_get_min_vblank_time_us(context);
drivers/gpu/drm/amd/display/dc/clk_mgr/dce110/dce110_clk_mgr.c
132
for (j = 0; j < context->stream_count; j++) {
drivers/gpu/drm/amd/display/dc/clk_mgr/dce110/dce110_clk_mgr.c
135
const struct dc_stream_state *stream = context->streams[j];
drivers/gpu/drm/amd/display/dc/clk_mgr/dce110/dce110_clk_mgr.c
141
if (stream == context->res_ctx.pipe_ctx[k].stream) {
drivers/gpu/drm/amd/display/dc/clk_mgr/dce110/dce110_clk_mgr.c
142
pipe_ctx = &context->res_ctx.pipe_ctx[k];
drivers/gpu/drm/amd/display/dc/clk_mgr/dce110/dce110_clk_mgr.c
179
&context->streams[0]->timing;
drivers/gpu/drm/amd/display/dc/clk_mgr/dce110/dce110_clk_mgr.c
197
struct dc_state *context)
drivers/gpu/drm/amd/display/dc/clk_mgr/dce110/dce110_clk_mgr.c
199
struct dm_pp_display_configuration *pp_display_cfg = &context->pp_display_cfg;
drivers/gpu/drm/amd/display/dc/clk_mgr/dce110/dce110_clk_mgr.c
206
context->bw_ctx.bw.dce.all_displays_in_sync;
drivers/gpu/drm/amd/display/dc/clk_mgr/dce110/dce110_clk_mgr.c
208
context->bw_ctx.bw.dce.nbp_state_change_enable == false;
drivers/gpu/drm/amd/display/dc/clk_mgr/dce110/dce110_clk_mgr.c
210
context->bw_ctx.bw.dce.cpuc_state_change_enable == false;
drivers/gpu/drm/amd/display/dc/clk_mgr/dce110/dce110_clk_mgr.c
212
context->bw_ctx.bw.dce.cpup_state_change_enable == false;
drivers/gpu/drm/amd/display/dc/clk_mgr/dce110/dce110_clk_mgr.c
214
context->bw_ctx.bw.dce.blackout_recovery_time_us;
drivers/gpu/drm/amd/display/dc/clk_mgr/dce110/dce110_clk_mgr.c
222
ASICREV_IS_VEGA20_P(dc->ctx->asic_id.hw_internal_rev) && (context->stream_count >= 2)) {
drivers/gpu/drm/amd/display/dc/clk_mgr/dce110/dce110_clk_mgr.c
228
pp_display_cfg->min_memory_clock_khz = context->bw_ctx.bw.dce.yclk_khz
drivers/gpu/drm/amd/display/dc/clk_mgr/dce110/dce110_clk_mgr.c
234
context->bw_ctx.bw.dce.sclk_khz);
drivers/gpu/drm/amd/display/dc/clk_mgr/dce110/dce110_clk_mgr.c
243
pp_display_cfg->min_dcfclock_khz = (context->stream_count > 4) ?
drivers/gpu/drm/amd/display/dc/clk_mgr/dce110/dce110_clk_mgr.c
247
= context->bw_ctx.bw.dce.sclk_deep_sleep_khz;
drivers/gpu/drm/amd/display/dc/clk_mgr/dce110/dce110_clk_mgr.c
249
dce110_fill_display_configs(context, pp_display_cfg);
drivers/gpu/drm/amd/display/dc/clk_mgr/dce110/dce110_clk_mgr.c
256
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/clk_mgr/dce110/dce110_clk_mgr.c
261
int patched_disp_clk = context->bw_ctx.bw.dce.dispclk_khz;
drivers/gpu/drm/amd/display/dc/clk_mgr/dce110/dce110_clk_mgr.c
267
level_change_req.power_level = dce_get_required_clocks_state(clk_mgr_base, context);
drivers/gpu/drm/amd/display/dc/clk_mgr/dce110/dce110_clk_mgr.c
276
context->bw_ctx.bw.dce.dispclk_khz = dce_set_clock(clk_mgr_base, patched_disp_clk);
drivers/gpu/drm/amd/display/dc/clk_mgr/dce110/dce110_clk_mgr.c
279
dce11_pplib_apply_display_requirements(clk_mgr_base->ctx->dc, context);
drivers/gpu/drm/amd/display/dc/clk_mgr/dce110/dce110_clk_mgr.c
92
uint32_t dce110_get_min_vblank_time_us(const struct dc_state *context)
drivers/gpu/drm/amd/display/dc/clk_mgr/dce110/dce110_clk_mgr.c
97
for (j = 0; j < context->stream_count; j++) {
drivers/gpu/drm/amd/display/dc/clk_mgr/dce110/dce110_clk_mgr.c
98
struct dc_stream_state *stream = context->streams[j];
drivers/gpu/drm/amd/display/dc/clk_mgr/dce110/dce110_clk_mgr.h
34
const struct dc_state *context,
drivers/gpu/drm/amd/display/dc/clk_mgr/dce110/dce110_clk_mgr.h
40
struct dc_state *context);
drivers/gpu/drm/amd/display/dc/clk_mgr/dce110/dce110_clk_mgr.h
42
uint32_t dce110_get_min_vblank_time_us(const struct dc_state *context);
drivers/gpu/drm/amd/display/dc/clk_mgr/dce112/dce112_clk_mgr.c
192
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/clk_mgr/dce112/dce112_clk_mgr.c
197
int patched_disp_clk = context->bw_ctx.bw.dce.dispclk_khz;
drivers/gpu/drm/amd/display/dc/clk_mgr/dce112/dce112_clk_mgr.c
203
level_change_req.power_level = dce_get_required_clocks_state(clk_mgr_base, context);
drivers/gpu/drm/amd/display/dc/clk_mgr/dce112/dce112_clk_mgr.c
215
dce11_pplib_apply_display_requirements(clk_mgr_base->ctx->dc, context);
drivers/gpu/drm/amd/display/dc/clk_mgr/dce120/dce120_clk_mgr.c
119
dce11_pplib_apply_display_requirements(clk_mgr_base->ctx->dc, context);
drivers/gpu/drm/amd/display/dc/clk_mgr/dce120/dce120_clk_mgr.c
85
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/clk_mgr/dce120/dce120_clk_mgr.c
90
int max_pix_clk = dce_get_max_pixel_clock_for_all_paths(context);
drivers/gpu/drm/amd/display/dc/clk_mgr/dce120/dce120_clk_mgr.c
91
int patched_disp_clk = context->bw_ctx.bw.dce.dispclk_khz;
drivers/gpu/drm/amd/display/dc/clk_mgr/dce60/dce60_clk_mgr.c
101
struct dm_pp_display_configuration *pp_display_cfg = &context->pp_display_cfg;
drivers/gpu/drm/amd/display/dc/clk_mgr/dce60/dce60_clk_mgr.c
103
dce110_fill_display_configs(context, pp_display_cfg);
drivers/gpu/drm/amd/display/dc/clk_mgr/dce60/dce60_clk_mgr.c
110
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/clk_mgr/dce60/dce60_clk_mgr.c
117
int patched_disp_clk = MIN(max_disp_clk, context->bw_ctx.bw.dce.dispclk_khz);
drivers/gpu/drm/amd/display/dc/clk_mgr/dce60/dce60_clk_mgr.c
119
level_change_req.power_level = dce_get_required_clocks_state(clk_mgr_base, context);
drivers/gpu/drm/amd/display/dc/clk_mgr/dce60/dce60_clk_mgr.c
131
dce60_pplib_apply_display_requirements(clk_mgr_base->ctx->dc, context);
drivers/gpu/drm/amd/display/dc/clk_mgr/dce60/dce60_clk_mgr.c
99
struct dc_state *context)
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn10/rv1_clk_mgr.c
188
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn10/rv1_clk_mgr.c
194
struct dc_clocks *new_clocks = &context->bw_ctx.bw.dcn.clk;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn10/rv1_clk_mgr.c
209
display_count = clk_mgr_helper_get_active_display_cnt(dc, context);
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn20/dcn20_clk_mgr.c
105
struct dc_state *context, bool safe_to_lower)
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn20/dcn20_clk_mgr.c
117
dppclk_khz = context->res_ctx.pipe_ctx[i].plane_res.bw.dppclk_khz;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn20/dcn20_clk_mgr.c
127
void dcn20_update_clocks_update_dentist(struct clk_mgr_internal *clk_mgr, struct dc_state *context)
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn20/dcn20_clk_mgr.c
153
struct pipe_ctx *pipe_ctx = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn20/dcn20_clk_mgr.c
184
struct pipe_ctx *pipe_ctx = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn20/dcn20_clk_mgr.c
217
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn20/dcn20_clk_mgr.c
221
struct dc_clocks *new_clocks = &context->bw_ctx.bw.dcn.clk;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn20/dcn20_clk_mgr.c
246
display_count = clk_mgr_helper_get_active_display_cnt(dc, context);
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn20/dcn20_clk_mgr.c
281
total_plane_count = clk_mgr_helper_get_active_plane_cnt(dc, context);
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn20/dcn20_clk_mgr.c
324
dcn20_update_clocks_update_dpp_dto(clk_mgr, context, safe_to_lower);
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn20/dcn20_clk_mgr.c
325
dcn20_update_clocks_update_dentist(clk_mgr, context);
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn20/dcn20_clk_mgr.c
329
dcn20_update_clocks_update_dentist(clk_mgr, context);
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn20/dcn20_clk_mgr.c
331
dcn20_update_clocks_update_dpp_dto(clk_mgr, context, safe_to_lower);
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn20/dcn20_clk_mgr.c
344
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn20/dcn20_clk_mgr.c
349
struct dc_clocks *new_clocks = &context->bw_ctx.bw.dcn.clk;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn20/dcn20_clk_mgr.c
450
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn20/dcn20_clk_mgr.c
456
clock_cfg->max_clock_khz = context->bw_ctx.bw.dcn.clk.max_supported_dispclk_khz;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn20/dcn20_clk_mgr.c
459
clock_cfg->bw_requirequired_clock_khz = context->bw_ctx.bw.dcn.clk.bw_dispclk_khz;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn20/dcn20_clk_mgr.c
462
clock_cfg->max_clock_khz = context->bw_ctx.bw.dcn.clk.max_supported_dppclk_khz;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn20/dcn20_clk_mgr.c
465
clock_cfg->bw_requirequired_clock_khz = context->bw_ctx.bw.dcn.clk.bw_dppclk_khz;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn20/dcn20_clk_mgr.h
30
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn20/dcn20_clk_mgr.h
34
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn20/dcn20_clk_mgr.h
37
struct dc_state *context, bool safe_to_lower);
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn20/dcn20_clk_mgr.h
49
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn20/dcn20_clk_mgr.h
54
struct dc_state *context);
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn201/dcn201_clk_mgr.c
133
total_plane_count = clk_mgr_helper_get_active_plane_cnt(dc, context);
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn201/dcn201_clk_mgr.c
160
dcn20_update_clocks_update_dpp_dto(clk_mgr, context, safe_to_lower);
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn201/dcn201_clk_mgr.c
161
dcn20_update_clocks_update_dentist(clk_mgr, context);
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn201/dcn201_clk_mgr.c
165
dcn20_update_clocks_update_dentist(clk_mgr, context);
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn201/dcn201_clk_mgr.c
167
dcn20_update_clocks_update_dpp_dto(clk_mgr, context, safe_to_lower);
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn201/dcn201_clk_mgr.c
85
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn201/dcn201_clk_mgr.c
89
struct dc_clocks *new_clocks = &context->bw_ctx.bw.dcn.clk;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn21/rn_clk_mgr.c
107
struct dc_state *context, int ref_dpp_clk, bool safe_to_lower)
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn21/rn_clk_mgr.c
120
dppclk_khz = context->res_ctx.pipe_ctx[i].plane_res.bw.dppclk_khz;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn21/rn_clk_mgr.c
132
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn21/rn_clk_mgr.c
136
struct dc_clocks *new_clocks = &context->bw_ctx.bw.dcn.clk;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn21/rn_clk_mgr.c
156
display_count = rn_get_active_display_cnt_wa(dc, context);
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn21/rn_clk_mgr.c
217
context,
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn21/rn_clk_mgr.c
227
context,
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn21/rn_clk_mgr.c
240
context,
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn21/rn_clk_mgr.c
55
static int rn_get_active_display_cnt_wa(struct dc *dc, struct dc_state *context)
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn21/rn_clk_mgr.c
61
for (i = 0; i < context->stream_count; i++) {
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn21/rn_clk_mgr.c
62
const struct dc_stream_state *stream = context->streams[i];
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn21/rn_clk_mgr.c
91
struct dc_state *context = dc->current_state;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn21/rn_clk_mgr.c
95
display_count = rn_get_active_display_cnt_wa(dc, context);
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn30/dcn30_clk_mgr.c
194
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn30/dcn30_clk_mgr.c
198
struct dc_clocks *new_clocks = &context->bw_ctx.bw.dcn.clk;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn30/dcn30_clk_mgr.c
223
display_count = clk_mgr_helper_get_active_display_cnt(dc, context);
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn30/dcn30_clk_mgr.c
304
dcn20_update_clocks_update_dpp_dto(clk_mgr, context, safe_to_lower);
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn30/dcn30_clk_mgr.c
305
dcn20_update_clocks_update_dentist(clk_mgr, context);
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn30/dcn30_clk_mgr.c
309
dcn20_update_clocks_update_dentist(clk_mgr, context);
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn30/dcn30_clk_mgr.c
313
dcn20_update_clocks_update_dpp_dto(clk_mgr, context, safe_to_lower);
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn301/vg_clk_mgr.c
100
struct dc_clocks *new_clocks = &context->bw_ctx.bw.dcn.clk;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn301/vg_clk_mgr.c
118
display_count = vg_get_active_display_cnt_wa(dc, context);
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn301/vg_clk_mgr.c
173
dcn20_update_clocks_update_dpp_dto(clk_mgr, context, safe_to_lower);
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn301/vg_clk_mgr.c
180
dcn20_update_clocks_update_dpp_dto(clk_mgr, context, safe_to_lower);
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn301/vg_clk_mgr.c
64
struct dc_state *context)
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn301/vg_clk_mgr.c
70
for (i = 0; i < context->stream_count; i++) {
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn301/vg_clk_mgr.c
71
const struct dc_stream_state *stream = context->streams[i];
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn301/vg_clk_mgr.c
96
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn31/dcn31_clk_mgr.c
114
static void dcn31_disable_otg_wa(struct clk_mgr *clk_mgr_base, struct dc_state *context, bool disable)
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn31/dcn31_clk_mgr.c
127
reset_sync_context_for_pipe(dc, context, i);
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn31/dcn31_clk_mgr.c
135
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn31/dcn31_clk_mgr.c
140
struct dc_clocks *new_clocks = &context->bw_ctx.bw.dcn.clk;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn31/dcn31_clk_mgr.c
168
display_count = dcn31_get_active_display_cnt_wa(dc, context);
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn31/dcn31_clk_mgr.c
225
dcn31_disable_otg_wa(clk_mgr_base, context, true);
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn31/dcn31_clk_mgr.c
229
dcn31_disable_otg_wa(clk_mgr_base, context, false);
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn31/dcn31_clk_mgr.c
236
dcn20_update_clocks_update_dpp_dto(clk_mgr, context, safe_to_lower);
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn31/dcn31_clk_mgr.c
244
dcn20_update_clocks_update_dpp_dto(clk_mgr, context, safe_to_lower);
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn31/dcn31_clk_mgr.c
638
struct dc_state *context = dc->current_state;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn31/dcn31_clk_mgr.c
641
display_count = dcn31_get_active_display_cnt_wa(dc, context);
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn31/dcn31_clk_mgr.c
78
struct dc_state *context)
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn31/dcn31_clk_mgr.c
84
for (i = 0; i < context->stream_count; i++) {
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn31/dcn31_clk_mgr.c
85
const struct dc_stream_state *stream = context->streams[i];
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn31/dcn31_clk_mgr.h
46
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn314/dcn314_clk_mgr.c
112
struct dc_state *context)
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn314/dcn314_clk_mgr.c
118
for (i = 0; i < context->stream_count; i++) {
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn314/dcn314_clk_mgr.c
119
const struct dc_stream_state *stream = context->streams[i];
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn314/dcn314_clk_mgr.c
148
static void dcn314_disable_otg_wa(struct clk_mgr *clk_mgr_base, struct dc_state *context,
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn314/dcn314_clk_mgr.c
156
? &context->res_ctx.pipe_ctx[i]
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn314/dcn314_clk_mgr.c
166
reset_sync_context_for_pipe(dc, context, i);
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn314/dcn314_clk_mgr.c
206
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn314/dcn314_clk_mgr.c
211
struct dc_clocks *new_clocks = &context->bw_ctx.bw.dcn.clk;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn314/dcn314_clk_mgr.c
221
display_count = dcn314_get_active_display_cnt_wa(dc, context);
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn314/dcn314_clk_mgr.c
300
dcn314_disable_otg_wa(clk_mgr_base, context, safe_to_lower, true);
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn314/dcn314_clk_mgr.c
308
dcn314_disable_otg_wa(clk_mgr_base, context, safe_to_lower, false);
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn314/dcn314_clk_mgr.c
315
dcn20_update_clocks_update_dpp_dto(clk_mgr, context, safe_to_lower);
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn314/dcn314_clk_mgr.c
323
dcn20_update_clocks_update_dpp_dto(clk_mgr, context, safe_to_lower);
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn314/dcn314_clk_mgr.h
58
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn315/dcn315_clk_mgr.c
100
static void dcn315_disable_otg_wa(struct clk_mgr *clk_mgr_base, struct dc_state *context, bool disable)
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn315/dcn315_clk_mgr.c
117
reset_sync_context_for_pipe(dc, context, i);
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn315/dcn315_clk_mgr.c
126
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn315/dcn315_clk_mgr.c
131
struct dc_clocks *new_clocks = &context->bw_ctx.bw.dcn.clk;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn315/dcn315_clk_mgr.c
141
display_count = dcn315_get_active_display_cnt_wa(dc, context);
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn315/dcn315_clk_mgr.c
208
dcn315_disable_otg_wa(clk_mgr_base, context, true);
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn315/dcn315_clk_mgr.c
216
dcn315_disable_otg_wa(clk_mgr_base, context, false);
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn315/dcn315_clk_mgr.c
223
dcn20_update_clocks_update_dpp_dto(clk_mgr, context, safe_to_lower);
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn315/dcn315_clk_mgr.c
231
dcn20_update_clocks_update_dpp_dto(clk_mgr, context, safe_to_lower);
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn315/dcn315_clk_mgr.c
59
struct dc_state *context)
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn315/dcn315_clk_mgr.c
65
for (i = 0; i < context->stream_count; i++) {
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn315/dcn315_clk_mgr.c
66
const struct dc_stream_state *stream = context->streams[i];
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn316/dcn316_clk_mgr.c
102
static void dcn316_disable_otg_wa(struct clk_mgr *clk_mgr_base, struct dc_state *context,
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn316/dcn316_clk_mgr.c
110
? &context->res_ctx.pipe_ctx[i]
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn316/dcn316_clk_mgr.c
121
reset_sync_context_for_pipe(dc, context, i);
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn316/dcn316_clk_mgr.c
136
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn316/dcn316_clk_mgr.c
141
struct dc_clocks *new_clocks = &context->bw_ctx.bw.dcn.clk;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn316/dcn316_clk_mgr.c
163
display_count = dcn316_get_active_display_cnt_wa(dc, context);
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn316/dcn316_clk_mgr.c
216
dcn316_disable_otg_wa(clk_mgr_base, context, safe_to_lower, true);
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn316/dcn316_clk_mgr.c
224
dcn316_disable_otg_wa(clk_mgr_base, context, safe_to_lower, false);
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn316/dcn316_clk_mgr.c
231
dcn20_update_clocks_update_dpp_dto(clk_mgr, context, safe_to_lower);
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn316/dcn316_clk_mgr.c
239
dcn20_update_clocks_update_dpp_dto(clk_mgr, context, safe_to_lower);
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn316/dcn316_clk_mgr.c
71
struct dc_state *context)
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn316/dcn316_clk_mgr.c
77
for (i = 0; i < context->stream_count; i++) {
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn316/dcn316_clk_mgr.c
78
const struct dc_stream_state *stream = context->streams[i];
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn32/dcn32_clk_mgr.c
267
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn32/dcn32_clk_mgr.c
275
struct pipe_ctx *pipe_ctx = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn32/dcn32_clk_mgr.c
315
struct dc_state *context, bool safe_to_lower)
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn32/dcn32_clk_mgr.c
323
dppclk_khz = context->res_ctx.pipe_ctx[i].plane_res.bw.dppclk_khz;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn32/dcn32_clk_mgr.c
325
if (context->res_ctx.pipe_ctx[i].plane_res.dpp)
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn32/dcn32_clk_mgr.c
326
dpp_inst = context->res_ctx.pipe_ctx[i].plane_res.dpp->inst;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn32/dcn32_clk_mgr.c
327
else if (!context->res_ctx.pipe_ctx[i].plane_res.dpp && dppclk_khz == 0) {
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn32/dcn32_clk_mgr.c
332
} else if (!context->res_ctx.pipe_ctx[i].plane_res.dpp && dppclk_khz > 0) {
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn32/dcn32_clk_mgr.c
350
struct dc_state *context)
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn32/dcn32_clk_mgr.c
372
struct pipe_ctx *pipe_ctx = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn32/dcn32_clk_mgr.c
426
struct pipe_ctx *pipe_ctx = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn32/dcn32_clk_mgr.c
508
struct dc_state *context)
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn32/dcn32_clk_mgr.c
518
struct pipe_ctx *pipe_ctx = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn32/dcn32_clk_mgr.c
520
if (pipe_ctx->stream && dc_state_get_pipe_subvp_type(context, pipe_ctx) != SUBVP_PHANTOM) {
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn32/dcn32_clk_mgr.c
528
mall_ss_size_bytes = context->bw_ctx.bw.dcn.mall_ss_size_bytes;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn32/dcn32_clk_mgr.c
622
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn32/dcn32_clk_mgr.c
626
struct dc_clocks *new_clocks = &context->bw_ctx.bw.dcn.clk;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn32/dcn32_clk_mgr.c
652
display_count = clk_mgr_helper_get_active_display_cnt(dc, context);
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn32/dcn32_clk_mgr.c
729
if (context->bw_ctx.bw.dcn.clk.fw_based_mclk_switching)
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn32/dcn32_clk_mgr.c
808
dcn32_update_clocks_update_dtb_dto(clk_mgr, context, clk_mgr_base->clks.ref_dtbclk_khz);
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn32/dcn32_clk_mgr.c
814
dcn32_update_clocks_update_dpp_dto(clk_mgr, context, safe_to_lower);
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn32/dcn32_clk_mgr.c
815
dcn32_update_clocks_update_dentist(clk_mgr, context);
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn32/dcn32_clk_mgr.c
829
dcn32_update_clocks_update_dentist(clk_mgr, context);
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn32/dcn32_clk_mgr.c
834
dcn32_update_clocks_update_dpp_dto(clk_mgr, context, safe_to_lower);
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn32/dcn32_clk_mgr.c
844
dcn32_auto_dpm_test_log(new_clocks, clk_mgr, context);
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn32/dcn32_clk_mgr.h
36
struct dc_state *context, bool safe_to_lower);
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn35/dcn35_clk_mgr.c
1194
struct dc_state *context = dc->current_state;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn35/dcn35_clk_mgr.c
1197
display_count = dcn35_get_active_display_cnt_wa(dc, context, NULL);
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn35/dcn35_clk_mgr.c
1228
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn35/dcn35_clk_mgr.c
1232
struct dc_clocks *new_clocks = &context->bw_ctx.bw.dcn.clk;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn35/dcn35_clk_mgr.c
1291
dcn35_update_clocks_update_dpp_dto(clk_mgr_int, context, safe_to_lower);
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn35/dcn35_clk_mgr.c
1293
dcn35_update_clocks_update_dtb_dto(clk_mgr_int, context, clk_mgr->clks.ref_dtbclk_khz);
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn35/dcn35_clk_mgr.c
158
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn35/dcn35_clk_mgr.c
164
for (i = 0; i < context->stream_count; i++) {
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn35/dcn35_clk_mgr.c
165
const struct dc_stream_state *stream = context->streams[i];
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn35/dcn35_clk_mgr.c
189
void dcn35_disable_otg_wa(struct clk_mgr *clk_mgr_base, struct dc_state *context,
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn35/dcn35_clk_mgr.c
200
struct pipe_ctx *new_pipe = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn35/dcn35_clk_mgr.c
204
? &context->res_ctx.pipe_ctx[i]
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn35/dcn35_clk_mgr.c
246
reset_sync_context_for_pipe(dc, context, i);
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn35/dcn35_clk_mgr.c
255
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn35/dcn35_clk_mgr.c
263
struct pipe_ctx *pipe_ctx = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn35/dcn35_clk_mgr.c
281
struct dc_state *context, bool safe_to_lower)
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn35/dcn35_clk_mgr.c
291
dppclk_khz = context->res_ctx.pipe_ctx[i].plane_res.bw.dppclk_khz;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn35/dcn35_clk_mgr.c
293
if (context->res_ctx.pipe_ctx[i].plane_res.dpp)
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn35/dcn35_clk_mgr.c
294
dpp_inst = context->res_ctx.pipe_ctx[i].plane_res.dpp->inst;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn35/dcn35_clk_mgr.c
295
else if (!context->res_ctx.pipe_ctx[i].plane_res.dpp && dppclk_khz == 0) {
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn35/dcn35_clk_mgr.c
300
} else if (!context->res_ctx.pipe_ctx[i].plane_res.dpp && dppclk_khz > 0) {
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn35/dcn35_clk_mgr.c
341
static void dcn35_notify_host_router_bw(struct clk_mgr *clk_mgr_base, struct dc_state *context,
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn35/dcn35_clk_mgr.c
344
struct dc_clocks *new_clocks = &context->bw_ctx.bw.dcn.clk;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn35/dcn35_clk_mgr.c
348
for (i = 0; i < context->stream_count; ++i) {
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn35/dcn35_clk_mgr.c
349
const struct dc_stream_state *stream = context->streams[i];
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn35/dcn35_clk_mgr.c
378
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn35/dcn35_clk_mgr.c
383
struct dc_clocks *new_clocks = &context->bw_ctx.bw.dcn.clk;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn35/dcn35_clk_mgr.c
394
display_count = dcn35_get_active_display_cnt_wa(dc, context, &all_active_disps);
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn35/dcn35_clk_mgr.c
433
dcn35_update_clocks_update_dtb_dto(clk_mgr, context, new_clocks->ref_dtbclk_khz);
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn35/dcn35_clk_mgr.c
483
dcn35_disable_otg_wa(clk_mgr_base, context, safe_to_lower, true);
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn35/dcn35_clk_mgr.c
492
dcn35_disable_otg_wa(clk_mgr_base, context, safe_to_lower, false);
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn35/dcn35_clk_mgr.c
501
dcn35_update_clocks_update_dtb_dto(clk_mgr, context, new_clocks->ref_dtbclk_khz);
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn35/dcn35_clk_mgr.c
507
dcn35_update_clocks_update_dpp_dto(clk_mgr, context, safe_to_lower);
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn35/dcn35_clk_mgr.c
513
dcn35_update_clocks_update_dpp_dto(clk_mgr, context, safe_to_lower);
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn35/dcn35_clk_mgr.c
518
dcn35_notify_host_router_bw(clk_mgr_base, context, safe_to_lower);
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn35/dcn35_clk_mgr.h
53
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn35/dcn35_clk_mgr.h
69
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn401/dcn401_clk_mgr.c
1079
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn401/dcn401_clk_mgr.c
1129
block_sequence[num_steps].params.update_dtbclk_dto_params.context = context;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn401/dcn401_clk_mgr.c
1160
block_sequence[num_steps].params.update_dppclk_dto_params.context = context;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn401/dcn401_clk_mgr.c
1166
block_sequence[num_steps].params.update_dentist_params.context = context;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn401/dcn401_clk_mgr.c
1177
block_sequence[num_steps].params.update_dppclk_dto_params.context = context;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn401/dcn401_clk_mgr.c
1194
block_sequence[num_steps].params.update_dentist_params.context = context;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn401/dcn401_clk_mgr.c
1199
block_sequence[num_steps].params.update_dppclk_dto_params.context = context;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn401/dcn401_clk_mgr.c
1219
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn401/dcn401_clk_mgr.c
1228
context,
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn401/dcn401_clk_mgr.c
1229
&context->bw_ctx.bw.dcn.clk,
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn401/dcn401_clk_mgr.c
1237
context,
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn401/dcn401_clk_mgr.c
1238
&context->bw_ctx.bw.dcn.clk,
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn401/dcn401_clk_mgr.c
1245
dcn401_auto_dpm_test_log(&context->bw_ctx.bw.dcn.clk, TO_CLK_MGR_INTERNAL(clk_mgr_base), context);
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn401/dcn401_clk_mgr.c
1331
struct dc_state *context = dc->current_state;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn401/dcn401_clk_mgr.c
1342
new_clocks.dramclk_khz = context->bw_ctx.bw.dcn.clk.dramclk_khz;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn401/dcn401_clk_mgr.c
1343
new_clocks.idle_dramclk_khz = context->bw_ctx.bw.dcn.clk.idle_dramclk_khz;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn401/dcn401_clk_mgr.c
1344
new_clocks.p_state_change_support = context->bw_ctx.bw.dcn.clk.p_state_change_support;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn401/dcn401_clk_mgr.c
1352
context,
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn401/dcn401_clk_mgr.c
413
struct dc_state *context)
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn401/dcn401_clk_mgr.c
422
struct pipe_ctx *pipe_ctx = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn401/dcn401_clk_mgr.c
424
if (pipe_ctx->stream && dc_state_get_pipe_subvp_type(context, pipe_ctx) != SUBVP_PHANTOM) {
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn401/dcn401_clk_mgr.c
432
mall_ss_size_bytes = context->bw_ctx.bw.dcn.mall_ss_size_bytes;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn401/dcn401_clk_mgr.c
525
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn401/dcn401_clk_mgr.c
534
for (i = 0; i < context->stream_count; i++) {
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn401/dcn401_clk_mgr.c
536
&context->res_ctx, context->streams[i]);
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn401/dcn401_clk_mgr.c
557
struct dc_state *context, bool safe_to_lower, int ref_dppclk_khz)
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn401/dcn401_clk_mgr.c
565
dppclk_khz = context->res_ctx.pipe_ctx[i].plane_res.bw.dppclk_khz;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn401/dcn401_clk_mgr.c
567
if (context->res_ctx.pipe_ctx[i].plane_res.dpp)
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn401/dcn401_clk_mgr.c
568
dpp_inst = context->res_ctx.pipe_ctx[i].plane_res.dpp->inst;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn401/dcn401_clk_mgr.c
569
else if (!context->res_ctx.pipe_ctx[i].plane_res.dpp && dppclk_khz == 0) {
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn401/dcn401_clk_mgr.c
574
} else if (!context->res_ctx.pipe_ctx[i].plane_res.dpp && dppclk_khz > 0) {
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn401/dcn401_clk_mgr.c
615
struct dc_state *context)
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn401/dcn401_clk_mgr.c
736
params->update_dppclk_dto_params.context,
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn401/dcn401_clk_mgr.c
743
params->update_dtbclk_dto_params.context,
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn401/dcn401_clk_mgr.c
749
params->update_dentist_params.context);
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn401/dcn401_clk_mgr.c
766
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn401/dcn401_clk_mgr.c
787
int total_plane_count = clk_mgr_helper_get_active_plane_cnt(dc, context);
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn401/dcn401_clk_mgr.c
802
display_count = clk_mgr_helper_get_active_display_cnt(dc, context);
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn401/dcn401_clk_mgr.h
56
struct dc_state *context;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn401/dcn401_clk_mgr.h
62
struct dc_state *context;
drivers/gpu/drm/amd/display/dc/clk_mgr/dcn401/dcn401_clk_mgr.h
67
struct dc_state *context;
drivers/gpu/drm/amd/display/dc/core/dc.c
1207
struct dc_state *context)
drivers/gpu/drm/amd/display/dc/core/dc.c
1216
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/core/dc.c
1224
dc->hwss.interdependent_update_lock(dc, context, lock);
drivers/gpu/drm/amd/display/dc/core/dc.c
1227
struct pipe_ctx *pipe_ctx = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/core/dc.c
1240
static void dc_update_visual_confirm_color(struct dc *dc, struct dc_state *context, struct pipe_ctx *pipe_ctx)
drivers/gpu/drm/amd/display/dc/core/dc.c
1274
get_fams2_visual_confirm_color(dc, context, pipe_ctx, &(pipe_ctx->visual_confirm_color));
drivers/gpu/drm/amd/display/dc/core/dc.c
1326
static void disable_dangling_plane(struct dc *dc, struct dc_state *context)
drivers/gpu/drm/amd/display/dc/core/dc.c
1343
if ((context->res_ctx.pipe_ctx[i].top_pipe) &&
drivers/gpu/drm/amd/display/dc/core/dc.c
1345
pipe_split_change = context->res_ctx.pipe_ctx[i].top_pipe->pipe_idx !=
drivers/gpu/drm/amd/display/dc/core/dc.c
1348
pipe_split_change = context->res_ctx.pipe_ctx[i].top_pipe !=
drivers/gpu/drm/amd/display/dc/core/dc.c
1351
for (j = 0; j < context->stream_count; j++) {
drivers/gpu/drm/amd/display/dc/core/dc.c
1352
if (old_stream == context->streams[j]) {
drivers/gpu/drm/amd/display/dc/core/dc.c
1358
dc->current_state->stream_count != context->stream_count)
drivers/gpu/drm/amd/display/dc/core/dc.c
1366
new_pipe = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/core/dc.c
1398
set_p_state_switch_method(dc, context, pipe);
drivers/gpu/drm/amd/display/dc/core/dc.c
1399
dc_update_visual_confirm_color(dc, context, pipe);
drivers/gpu/drm/amd/display/dc/core/dc.c
1436
struct dc_state *context)
drivers/gpu/drm/amd/display/dc/core/dc.c
1446
pipe = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/core/dc.c
2001
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/core/dc.c
2009
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/core/dc.c
2018
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/core/dc.c
2028
if (context != NULL) {
drivers/gpu/drm/amd/display/dc/core/dc.c
2029
pipe = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/core/dc.c
2031
context = dc->current_state;
drivers/gpu/drm/amd/display/dc/core/dc.c
2036
if (should_update_pipe_for_stream(context, pipe, streams[j]) &&
drivers/gpu/drm/amd/display/dc/core/dc.c
2043
void dc_trigger_sync(struct dc *dc, struct dc_state *context)
drivers/gpu/drm/amd/display/dc/core/dc.c
2045
if (context->stream_count > 1 && !dc->debug.disable_timing_sync) {
drivers/gpu/drm/amd/display/dc/core/dc.c
2048
enable_timing_multisync(dc, context);
drivers/gpu/drm/amd/display/dc/core/dc.c
2049
program_timing_sync(dc, context);
drivers/gpu/drm/amd/display/dc/core/dc.c
2053
static uint8_t get_stream_mask(struct dc *dc, struct dc_state *context)
drivers/gpu/drm/amd/display/dc/core/dc.c
2059
if (context->res_ctx.pipe_ctx[i].stream)
drivers/gpu/drm/amd/display/dc/core/dc.c
2086
static void determine_pipe_unlock_order(struct dc *dc, struct dc_state *context)
drivers/gpu/drm/amd/display/dc/core/dc.c
2097
pipe = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/core/dc.c
2102
dc_state_get_pipe_subvp_type(context, pipe) == SUBVP_PHANTOM) {
drivers/gpu/drm/amd/display/dc/core/dc.c
2106
if (resource_calculate_det_for_stream(context, pipe) <
drivers/gpu/drm/amd/display/dc/core/dc.c
2124
static enum dc_status dc_commit_state_no_check(struct dc *dc, struct dc_state *context)
drivers/gpu/drm/amd/display/dc/core/dc.c
2146
for (i = 0; i < context->stream_count; i++)
drivers/gpu/drm/amd/display/dc/core/dc.c
2147
dc_streams[i] = context->streams[i];
drivers/gpu/drm/amd/display/dc/core/dc.c
2150
disable_vbios_mode_if_required(dc, context);
drivers/gpu/drm/amd/display/dc/core/dc.c
2151
dc->hwss.enable_accelerated_mode(dc, context);
drivers/gpu/drm/amd/display/dc/core/dc.c
2159
disable_vbios_mode_if_required(dc, context);
drivers/gpu/drm/amd/display/dc/core/dc.c
2164
pipe = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/core/dc.c
2174
if (context->stream_count > get_seamless_boot_stream_count(context) ||
drivers/gpu/drm/amd/display/dc/core/dc.c
2175
context->stream_count == 0)
drivers/gpu/drm/amd/display/dc/core/dc.c
2176
dc->hwss.prepare_bandwidth(dc, context);
drivers/gpu/drm/amd/display/dc/core/dc.c
2182
dc->hwss.subvp_pipe_control_lock(dc, context, true, true, NULL, subvp_prev_use);
drivers/gpu/drm/amd/display/dc/core/dc.c
2184
dc->hwss.dmub_hw_control_lock(dc, context, true);
drivers/gpu/drm/amd/display/dc/core/dc.c
2187
dc->hwss.update_dsc_pg(dc, context, false);
drivers/gpu/drm/amd/display/dc/core/dc.c
2189
disable_dangling_plane(dc, context);
drivers/gpu/drm/amd/display/dc/core/dc.c
2194
for (i = 0; i < context->stream_count; i++) {
drivers/gpu/drm/amd/display/dc/core/dc.c
2195
if (context->streams[i]->mode_changed)
drivers/gpu/drm/amd/display/dc/core/dc.c
2197
apply_ctx_interdependent_lock(dc, context, context->streams[i], true);
drivers/gpu/drm/amd/display/dc/core/dc.c
2199
dc, context->streams[i],
drivers/gpu/drm/amd/display/dc/core/dc.c
2200
context->stream_status[i].plane_count,
drivers/gpu/drm/amd/display/dc/core/dc.c
2201
context); /* use new pipe config in new context */
drivers/gpu/drm/amd/display/dc/core/dc.c
2202
apply_ctx_interdependent_lock(dc, context, context->streams[i], false);
drivers/gpu/drm/amd/display/dc/core/dc.c
2203
dc->hwss.post_unlock_program_front_end(dc, context);
drivers/gpu/drm/amd/display/dc/core/dc.c
2209
pipe = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/core/dc.c
2216
result = dc->hwss.apply_ctx_to_hw(dc, context);
drivers/gpu/drm/amd/display/dc/core/dc.c
2218
for (i = 0; i < context->stream_count; i++)
drivers/gpu/drm/amd/display/dc/core/dc.c
2219
dc_dmub_srv_control_cursor_offload(dc, context, context->streams[i], true);
drivers/gpu/drm/amd/display/dc/core/dc.c
2227
dc_trigger_sync(dc, context);
drivers/gpu/drm/amd/display/dc/core/dc.c
2230
for (i = 0; i < context->stream_count; i++) {
drivers/gpu/drm/amd/display/dc/core/dc.c
2231
uint32_t prev_dsc_changed = context->streams[i]->update_flags.bits.dsc_changed;
drivers/gpu/drm/amd/display/dc/core/dc.c
2233
context->streams[i]->update_flags.raw = 0xFFFFFFFF;
drivers/gpu/drm/amd/display/dc/core/dc.c
2234
context->streams[i]->update_flags.bits.dsc_changed = prev_dsc_changed;
drivers/gpu/drm/amd/display/dc/core/dc.c
2237
determine_pipe_unlock_order(dc, context);
drivers/gpu/drm/amd/display/dc/core/dc.c
2240
dc->res_pool->funcs->prepare_mcache_programming(dc, context);
drivers/gpu/drm/amd/display/dc/core/dc.c
2242
dc->hwss.interdependent_update_lock(dc, context, true);
drivers/gpu/drm/amd/display/dc/core/dc.c
2243
dc->hwss.program_front_end_for_ctx(dc, context);
drivers/gpu/drm/amd/display/dc/core/dc.c
2247
pipe = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/core/dc.c
2252
dc->hwss.interdependent_update_lock(dc, context, false);
drivers/gpu/drm/amd/display/dc/core/dc.c
2253
dc->hwss.post_unlock_program_front_end(dc, context);
drivers/gpu/drm/amd/display/dc/core/dc.c
2257
dc->hwss.commit_subvp_config(dc, context);
drivers/gpu/drm/amd/display/dc/core/dc.c
2259
dc->hwss.subvp_pipe_control_lock(dc, context, false, true, NULL, subvp_prev_use);
drivers/gpu/drm/amd/display/dc/core/dc.c
2261
dc->hwss.dmub_hw_control_lock(dc, context, false);
drivers/gpu/drm/amd/display/dc/core/dc.c
2263
for (i = 0; i < context->stream_count; i++) {
drivers/gpu/drm/amd/display/dc/core/dc.c
2264
const struct dc_link *link = context->streams[i]->link;
drivers/gpu/drm/amd/display/dc/core/dc.c
2266
if (!context->streams[i]->mode_changed)
drivers/gpu/drm/amd/display/dc/core/dc.c
2270
apply_ctx_interdependent_lock(dc, context, context->streams[i], true);
drivers/gpu/drm/amd/display/dc/core/dc.c
2272
dc, context->streams[i],
drivers/gpu/drm/amd/display/dc/core/dc.c
2273
context->stream_status[i].plane_count,
drivers/gpu/drm/amd/display/dc/core/dc.c
2274
context);
drivers/gpu/drm/amd/display/dc/core/dc.c
2275
apply_ctx_interdependent_lock(dc, context, context->streams[i], false);
drivers/gpu/drm/amd/display/dc/core/dc.c
2276
dc->hwss.post_unlock_program_front_end(dc, context);
drivers/gpu/drm/amd/display/dc/core/dc.c
2284
pipe = &context->res_ctx.pipe_ctx[k];
drivers/gpu/drm/amd/display/dc/core/dc.c
2286
for (l = 0 ; pipe && l < context->stream_count; l++) {
drivers/gpu/drm/amd/display/dc/core/dc.c
2287
if (context->streams[l] &&
drivers/gpu/drm/amd/display/dc/core/dc.c
2288
context->streams[l] == pipe->stream &&
drivers/gpu/drm/amd/display/dc/core/dc.c
2295
context->streams[i]->timing.h_addressable,
drivers/gpu/drm/amd/display/dc/core/dc.c
2296
context->streams[i]->timing.v_addressable,
drivers/gpu/drm/amd/display/dc/core/dc.c
2297
context->streams[i]->timing.h_total,
drivers/gpu/drm/amd/display/dc/core/dc.c
2298
context->streams[i]->timing.v_total,
drivers/gpu/drm/amd/display/dc/core/dc.c
2299
context->streams[i]->timing.pix_clk_100hz / 10);
drivers/gpu/drm/amd/display/dc/core/dc.c
2302
dc_enable_stereo(dc, context, dc_streams, context->stream_count);
drivers/gpu/drm/amd/display/dc/core/dc.c
2304
if (get_seamless_boot_stream_count(context) == 0 ||
drivers/gpu/drm/amd/display/dc/core/dc.c
2305
context->stream_count == 0) {
drivers/gpu/drm/amd/display/dc/core/dc.c
2307
hwss_wait_for_no_pipes_pending(dc, context);
drivers/gpu/drm/amd/display/dc/core/dc.c
2312
hwss_wait_for_odm_update_pending_complete(dc, context);
drivers/gpu/drm/amd/display/dc/core/dc.c
2314
dc->hwss.optimize_bandwidth(dc, context);
drivers/gpu/drm/amd/display/dc/core/dc.c
2318
dc_trigger_sync(dc, context);
drivers/gpu/drm/amd/display/dc/core/dc.c
2322
dc->hwss.update_dsc_pg(dc, context, true);
drivers/gpu/drm/amd/display/dc/core/dc.c
2325
TRACE_DCN_CLOCK_STATE(&context->bw_ctx.bw.dcn.clk);
drivers/gpu/drm/amd/display/dc/core/dc.c
2327
TRACE_DCE_CLOCK_STATE(&context->bw_ctx.bw.dce);
drivers/gpu/drm/amd/display/dc/core/dc.c
2329
context->stream_mask = get_stream_mask(dc, context);
drivers/gpu/drm/amd/display/dc/core/dc.c
2331
if (context->stream_mask != dc->current_state->stream_mask)
drivers/gpu/drm/amd/display/dc/core/dc.c
2332
dc_dmub_srv_notify_stream_mask(dc->ctx->dmub_srv, context->stream_mask);
drivers/gpu/drm/amd/display/dc/core/dc.c
2334
for (i = 0; i < context->stream_count; i++)
drivers/gpu/drm/amd/display/dc/core/dc.c
2335
context->streams[i]->mode_changed = false;
drivers/gpu/drm/amd/display/dc/core/dc.c
2338
for (i = 0; i < context->stream_count; i++) {
drivers/gpu/drm/amd/display/dc/core/dc.c
2339
context->streams[i]->update_flags.raw = 0x0;
drivers/gpu/drm/amd/display/dc/core/dc.c
2343
dc->current_state = context;
drivers/gpu/drm/amd/display/dc/core/dc.c
2370
struct dc_state *context;
drivers/gpu/drm/amd/display/dc/core/dc.c
2430
context = dc_state_create_current_copy(dc);
drivers/gpu/drm/amd/display/dc/core/dc.c
2431
if (!context)
drivers/gpu/drm/amd/display/dc/core/dc.c
2434
context->power_source = params->power_source;
drivers/gpu/drm/amd/display/dc/core/dc.c
2436
res = dc_validate_with_context(dc, set, params->stream_count, context, DC_VALIDATE_MODE_AND_PROGRAMMING);
drivers/gpu/drm/amd/display/dc/core/dc.c
2443
dc, context, context->streams, context->stream_count);
drivers/gpu/drm/amd/display/dc/core/dc.c
2454
!dc->hwss.is_pipe_topology_transition_seamless(dc, dc->current_state, context)) {
drivers/gpu/drm/amd/display/dc/core/dc.c
2455
res = commit_minimal_transition_state(dc, context);
drivers/gpu/drm/amd/display/dc/core/dc.c
2462
res = dc_commit_state_no_check(dc, context);
drivers/gpu/drm/amd/display/dc/core/dc.c
2465
for (j = 0; j < context->stream_count; j++) {
drivers/gpu/drm/amd/display/dc/core/dc.c
2466
if (params->streams[i]->stream_id == context->streams[j]->stream_id)
drivers/gpu/drm/amd/display/dc/core/dc.c
2467
params->streams[i]->out.otg_offset = context->stream_status[j].primary_otg_inst;
drivers/gpu/drm/amd/display/dc/core/dc.c
2470
struct dc_stream_status *status = dc_state_get_stream_status(context, params->streams[i]);
drivers/gpu/drm/amd/display/dc/core/dc.c
2476
status->is_abm_supported = dc->hwss.is_abm_supported(dc, context, params->streams[i]);
drivers/gpu/drm/amd/display/dc/core/dc.c
2484
dc_state_release(context);
drivers/gpu/drm/amd/display/dc/core/dc.c
2529
static bool is_flip_pending_in_pipes(struct dc *dc, struct dc_state *context)
drivers/gpu/drm/amd/display/dc/core/dc.c
2535
pipe = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/core/dc.c
2538
if (!pipe->plane_state || (dc_state_get_pipe_subvp_type(context, pipe) == SUBVP_PHANTOM))
drivers/gpu/drm/amd/display/dc/core/dc.c
2572
struct dc_state *context = dc->current_state;
drivers/gpu/drm/amd/display/dc/core/dc.c
2574
if ((!dc->optimized_required) || get_seamless_boot_stream_count(context) > 0)
drivers/gpu/drm/amd/display/dc/core/dc.c
2585
TRACE_DCE_CLOCK_STATE(&context->bw_ctx.bw.dce);
drivers/gpu/drm/amd/display/dc/core/dc.c
2587
TRACE_DCN_CLOCK_STATE(&context->bw_ctx.bw.dcn.clk);
drivers/gpu/drm/amd/display/dc/core/dc.c
2589
if (is_flip_pending_in_pipes(dc, context))
drivers/gpu/drm/amd/display/dc/core/dc.c
2593
if (context->res_ctx.pipe_ctx[i].stream == NULL ||
drivers/gpu/drm/amd/display/dc/core/dc.c
2594
context->res_ctx.pipe_ctx[i].plane_state == NULL) {
drivers/gpu/drm/amd/display/dc/core/dc.c
2595
context->res_ctx.pipe_ctx[i].pipe_idx = i;
drivers/gpu/drm/amd/display/dc/core/dc.c
2596
dc->hwss.disable_plane(dc, context, &context->res_ctx.pipe_ctx[i]);
drivers/gpu/drm/amd/display/dc/core/dc.c
2601
dc->hwss.optimize_bandwidth(dc, context);
drivers/gpu/drm/amd/display/dc/core/dc.c
2604
dc->hwss.update_dsc_pg(dc, context, true);
drivers/gpu/drm/amd/display/dc/core/dc.c
2659
const struct dc_state *context,
drivers/gpu/drm/amd/display/dc/core/dc.c
2665
const struct pipe_ctx *pipe_ctx = &context->res_ctx.pipe_ctx[j];
drivers/gpu/drm/amd/display/dc/core/dc.c
3233
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/core/dc.c
3427
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/core/dc.c
3431
if (get_seamless_boot_stream_count(context) > 0 && (surface_count > 0 || stream->dpms_off)) {
drivers/gpu/drm/amd/display/dc/core/dc.c
3441
if (get_seamless_boot_stream_count(context) == 0)
drivers/gpu/drm/amd/display/dc/core/dc.c
3462
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/core/dc.c
3475
for (i = 0; i < context->stream_count; i++) {
drivers/gpu/drm/amd/display/dc/core/dc.c
3476
policy->force_odm[i] = context->streams[i]->debug.force_odm_combine_segments;
drivers/gpu/drm/amd/display/dc/core/dc.c
3477
if (context->streams[i]->debug.allow_transition_for_forced_odm)
drivers/gpu/drm/amd/display/dc/core/dc.c
3478
context->streams[i]->debug.force_odm_combine_segments = 0;
drivers/gpu/drm/amd/display/dc/core/dc.c
3483
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/core/dc.c
3493
for (i = 0; i < context->stream_count; i++)
drivers/gpu/drm/amd/display/dc/core/dc.c
3494
context->streams[i]->debug.force_odm_combine_segments = policy->force_odm[i];
drivers/gpu/drm/amd/display/dc/core/dc.c
3526
struct dc_state *context;
drivers/gpu/drm/amd/display/dc/core/dc.c
3541
context = dc->current_state;
drivers/gpu/drm/amd/display/dc/core/dc.c
3556
copy_stream_update_to_stream(dc, context, stream, stream_update);
drivers/gpu/drm/amd/display/dc/core/dc.c
3601
context = dc_state_create_copy(dc->current_state);
drivers/gpu/drm/amd/display/dc/core/dc.c
3602
if (context == NULL) {
drivers/gpu/drm/amd/display/dc/core/dc.c
3610
dc_state_remove_phantom_streams_and_planes(dc, context);
drivers/gpu/drm/amd/display/dc/core/dc.c
3611
dc_state_release_phantom_streams_and_planes(dc, context);
drivers/gpu/drm/amd/display/dc/core/dc.c
3614
if (!dc_state_rem_all_planes_for_stream(dc, stream, context)) {
drivers/gpu/drm/amd/display/dc/core/dc.c
3621
if (!dc_state_add_all_planes_for_stream(dc, stream, new_planes, surface_count, context)) {
drivers/gpu/drm/amd/display/dc/core/dc.c
3636
struct pipe_ctx *pipe_ctx = &context->res_ctx.pipe_ctx[j];
drivers/gpu/drm/amd/display/dc/core/dc.c
3660
backup_and_set_minimal_pipe_split_policy(dc, context, &policy);
drivers/gpu/drm/amd/display/dc/core/dc.c
3662
if (dc->res_pool->funcs->validate_bandwidth(dc, context, DC_VALIDATE_MODE_AND_PROGRAMMING) != DC_OK) {
drivers/gpu/drm/amd/display/dc/core/dc.c
3664
restore_minimal_pipe_split_policy(dc, context, &policy);
drivers/gpu/drm/amd/display/dc/core/dc.c
3670
restore_minimal_pipe_split_policy(dc, context, &policy);
drivers/gpu/drm/amd/display/dc/core/dc.c
3672
update_seamless_boot_flags(dc, context, surface_count, stream);
drivers/gpu/drm/amd/display/dc/core/dc.c
3674
*new_context = context;
drivers/gpu/drm/amd/display/dc/core/dc.c
3682
dc_state_release(context);
drivers/gpu/drm/amd/display/dc/core/dc.c
3692
struct dc_state *context)
drivers/gpu/drm/amd/display/dc/core/dc.c
3698
struct pipe_ctx *pipe_ctx = &context->res_ctx.pipe_ctx[j];
drivers/gpu/drm/amd/display/dc/core/dc.c
3791
resource_build_test_pattern_params(&context->res_ctx, pipe_ctx);
drivers/gpu/drm/amd/display/dc/core/dc.c
3804
if (get_seamless_boot_stream_count(context) == 0)
drivers/gpu/drm/amd/display/dc/core/dc.c
3858
struct dc_state *context)
drivers/gpu/drm/amd/display/dc/core/dc.c
3897
struct pipe_ctx *pipe_ctx = &context->res_ctx.pipe_ctx[j];
drivers/gpu/drm/amd/display/dc/core/dc.c
3917
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/core/dc.c
3958
struct pipe_ctx *pipe_ctx = &context->res_ctx.pipe_ctx[j];
drivers/gpu/drm/amd/display/dc/core/dc.c
4005
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/core/dc.c
4011
build_dmub_update_dirty_rect(dc, surface_count, stream, srf_updates, context, dc_dmub_cmd, dmub_cmd_count);
drivers/gpu/drm/amd/display/dc/core/dc.c
4018
struct dc_state *context)
drivers/gpu/drm/amd/display/dc/core/dc.c
4024
srf_updates, context);
drivers/gpu/drm/amd/display/dc/core/dc.c
4031
struct pipe_ctx *pipe_ctx = &context->res_ctx.pipe_ctx[j];
drivers/gpu/drm/amd/display/dc/core/dc.c
4033
if (!should_update_pipe_for_stream(context, pipe_ctx, stream))
drivers/gpu/drm/amd/display/dc/core/dc.c
4036
if (!should_update_pipe_for_plane(context, pipe_ctx, plane_state))
drivers/gpu/drm/amd/display/dc/core/dc.c
4047
context,
drivers/gpu/drm/amd/display/dc/core/dc.c
4059
struct dc_state *context)
drivers/gpu/drm/amd/display/dc/core/dc.c
4068
determine_pipe_unlock_order(dc, context);
drivers/gpu/drm/amd/display/dc/core/dc.c
4072
dc_state_is_fams2_in_use(dc, context)) {
drivers/gpu/drm/amd/display/dc/core/dc.c
4095
&context->res_ctx,
drivers/gpu/drm/amd/display/dc/core/dc.c
4102
struct pipe_ctx *pipe = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/core/dc.c
4106
set_p_state_switch_method(dc, context, pipe);
drivers/gpu/drm/amd/display/dc/core/dc.c
4109
dc_update_visual_confirm_color(dc, context, pipe);
drivers/gpu/drm/amd/display/dc/core/dc.c
4117
struct pipe_ctx *pipe_ctx = &context->res_ctx.pipe_ctx[j];
drivers/gpu/drm/amd/display/dc/core/dc.c
4121
if (!should_update_pipe_for_plane(context, pipe_ctx, plane_state))
drivers/gpu/drm/amd/display/dc/core/dc.c
4134
stream_status = dc_state_get_stream_status(context, stream);
drivers/gpu/drm/amd/display/dc/core/dc.c
4141
context);
drivers/gpu/drm/amd/display/dc/core/dc.c
4147
context,
drivers/gpu/drm/amd/display/dc/core/dc.c
4148
context->dc_dmub_cmd,
drivers/gpu/drm/amd/display/dc/core/dc.c
4149
&(context->dmub_cmd_count));
drivers/gpu/drm/amd/display/dc/core/dc.c
4151
context->dc_dmub_cmd,
drivers/gpu/drm/amd/display/dc/core/dc.c
4152
context->dmub_cmd_count,
drivers/gpu/drm/amd/display/dc/core/dc.c
4153
context->block_sequence,
drivers/gpu/drm/amd/display/dc/core/dc.c
4154
&(context->block_sequence_steps),
drivers/gpu/drm/amd/display/dc/core/dc.c
4157
context);
drivers/gpu/drm/amd/display/dc/core/dc.c
4159
context->block_sequence,
drivers/gpu/drm/amd/display/dc/core/dc.c
4160
context->block_sequence_steps);
drivers/gpu/drm/amd/display/dc/core/dc.c
4178
struct dc_state *context)
drivers/gpu/drm/amd/display/dc/core/dc.c
4188
determine_pipe_unlock_order(dc, context);
drivers/gpu/drm/amd/display/dc/core/dc.c
4200
dc->res_pool->funcs->prepare_mcache_programming(dc, context);
drivers/gpu/drm/amd/display/dc/core/dc.c
4203
struct pipe_ctx *pipe = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/core/dc.c
4207
set_p_state_switch_method(dc, context, pipe);
drivers/gpu/drm/amd/display/dc/core/dc.c
4210
dc_update_visual_confirm_color(dc, context, pipe);
drivers/gpu/drm/amd/display/dc/core/dc.c
4217
if (get_seamless_boot_stream_count(context) == 0)
drivers/gpu/drm/amd/display/dc/core/dc.c
4218
dc->hwss.prepare_bandwidth(dc, context);
drivers/gpu/drm/amd/display/dc/core/dc.c
4221
dc->hwss.update_dsc_pg(dc, context, false);
drivers/gpu/drm/amd/display/dc/core/dc.c
4223
context_clock_trace(dc, context);
drivers/gpu/drm/amd/display/dc/core/dc.c
4230
&context->res_ctx,
drivers/gpu/drm/amd/display/dc/core/dc.c
4244
struct pipe_ctx *pipe = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/core/dc.c
4246
if (dc_state_get_pipe_subvp_type(context, pipe) == SUBVP_PHANTOM) {
drivers/gpu/drm/amd/display/dc/core/dc.c
4289
dc->hwss.subvp_pipe_control_lock(dc, context, true, should_lock_all_pipes, NULL, subvp_prev_use);
drivers/gpu/drm/amd/display/dc/core/dc.c
4292
dc->hwss.dmub_hw_control_lock(dc, context, true);
drivers/gpu/drm/amd/display/dc/core/dc.c
4294
dc->hwss.interdependent_update_lock(dc, context, true);
drivers/gpu/drm/amd/display/dc/core/dc.c
4297
dc->hwss.subvp_pipe_control_lock(dc, context, true, should_lock_all_pipes, top_pipe_to_program, subvp_prev_use);
drivers/gpu/drm/amd/display/dc/core/dc.c
4300
dc->hwss.dmub_hw_control_lock(dc, context, true);
drivers/gpu/drm/amd/display/dc/core/dc.c
4309
dc_dmub_update_dirty_rect(dc, surface_count, stream, srf_updates, context);
drivers/gpu/drm/amd/display/dc/core/dc.c
4313
commit_planes_do_stream_update(dc, stream, stream_update, update_type, context);
drivers/gpu/drm/amd/display/dc/core/dc.c
4321
dc->hwss.apply_ctx_for_surface(dc, stream, 0, context);
drivers/gpu/drm/amd/display/dc/core/dc.c
4323
dc->hwss.program_front_end_for_ctx(dc, context);
drivers/gpu/drm/amd/display/dc/core/dc.c
4326
dc->hwss.interdependent_update_lock(dc, context, false);
drivers/gpu/drm/amd/display/dc/core/dc.c
4330
dc->hwss.post_unlock_program_front_end(dc, context);
drivers/gpu/drm/amd/display/dc/core/dc.c
4334
dc->hwss.commit_subvp_config(dc, context);
drivers/gpu/drm/amd/display/dc/core/dc.c
4340
dc->hwss.subvp_pipe_control_lock(dc, context, false, should_lock_all_pipes,
drivers/gpu/drm/amd/display/dc/core/dc.c
4344
dc->hwss.dmub_hw_control_lock(dc, context, false);
drivers/gpu/drm/amd/display/dc/core/dc.c
4350
struct pipe_ctx *pipe_ctx = &context->res_ctx.pipe_ctx[j];
drivers/gpu/drm/amd/display/dc/core/dc.c
4370
struct pipe_ctx *pipe_ctx = &context->res_ctx.pipe_ctx[j];
drivers/gpu/drm/amd/display/dc/core/dc.c
4373
if (!should_update_pipe_for_plane(context, pipe_ctx, plane_state))
drivers/gpu/drm/amd/display/dc/core/dc.c
4392
struct pipe_ctx *pipe_ctx = &context->res_ctx.pipe_ctx[j];
drivers/gpu/drm/amd/display/dc/core/dc.c
4396
should_update_pipe_for_stream(context, pipe_ctx, stream)) {
drivers/gpu/drm/amd/display/dc/core/dc.c
4407
stream_get_status(context, pipe_ctx->stream);
drivers/gpu/drm/amd/display/dc/core/dc.c
4411
dc, pipe_ctx->stream, stream_status->plane_count, context);
drivers/gpu/drm/amd/display/dc/core/dc.c
4416
struct pipe_ctx *pipe_ctx = &context->res_ctx.pipe_ctx[j];
drivers/gpu/drm/amd/display/dc/core/dc.c
4434
dc->hwss.program_front_end_for_ctx(dc, context);
drivers/gpu/drm/amd/display/dc/core/dc.c
4439
struct pipe_ctx *pipe_ctx = &context->res_ctx.pipe_ctx[j];
drivers/gpu/drm/amd/display/dc/core/dc.c
4447
struct pipe_ctx *cur_pipe = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/core/dc.c
4453
&context->res_ctx.pipe_ctx[i].rq_regs,
drivers/gpu/drm/amd/display/dc/core/dc.c
4454
&context->res_ctx.pipe_ctx[i].dlg_regs,
drivers/gpu/drm/amd/display/dc/core/dc.c
4455
&context->res_ctx.pipe_ctx[i].ttu_regs);
drivers/gpu/drm/amd/display/dc/core/dc.c
4467
struct pipe_ctx *pipe_ctx = &context->res_ctx.pipe_ctx[j];
drivers/gpu/drm/amd/display/dc/core/dc.c
4469
if (!should_update_pipe_for_stream(context, pipe_ctx, stream))
drivers/gpu/drm/amd/display/dc/core/dc.c
4472
if (!should_update_pipe_for_plane(context, pipe_ctx, plane_state))
drivers/gpu/drm/amd/display/dc/core/dc.c
4486
struct pipe_ctx *pipe_ctx = &context->res_ctx.pipe_ctx[j];
drivers/gpu/drm/amd/display/dc/core/dc.c
4488
if (!should_update_pipe_for_stream(context, pipe_ctx, stream))
drivers/gpu/drm/amd/display/dc/core/dc.c
4491
if (!should_update_pipe_for_plane(context, pipe_ctx, plane_state))
drivers/gpu/drm/amd/display/dc/core/dc.c
4515
dc->hwss.interdependent_update_lock(dc, context, false);
drivers/gpu/drm/amd/display/dc/core/dc.c
4555
dc->hwss.enable_phantom_streams(dc, context);
drivers/gpu/drm/amd/display/dc/core/dc.c
4560
dc->hwss.post_unlock_program_front_end(dc, context);
drivers/gpu/drm/amd/display/dc/core/dc.c
4569
dc->hwss.disable_phantom_streams(dc, context);
drivers/gpu/drm/amd/display/dc/core/dc.c
4574
dc->hwss.commit_subvp_config(dc, context);
drivers/gpu/drm/amd/display/dc/core/dc.c
4580
dc->hwss.subvp_pipe_control_lock(dc, context, false, should_lock_all_pipes, NULL, subvp_prev_use);
drivers/gpu/drm/amd/display/dc/core/dc.c
4582
dc->hwss.dmub_hw_control_lock(dc, context, false);
drivers/gpu/drm/amd/display/dc/core/dc.c
4585
dc->hwss.subvp_pipe_control_lock(dc, context, false, should_lock_all_pipes, top_pipe_to_program, subvp_prev_use);
drivers/gpu/drm/amd/display/dc/core/dc.c
4587
dc->hwss.dmub_hw_control_lock(dc, context, false);
drivers/gpu/drm/amd/display/dc/core/dc.c
4592
struct pipe_ctx *pipe_ctx = &context->res_ctx.pipe_ctx[j];
drivers/gpu/drm/amd/display/dc/core/dc.c
4598
!pipe_ctx->stream || !should_update_pipe_for_stream(context, pipe_ctx, stream) ||
drivers/gpu/drm/amd/display/dc/core/dc.c
4609
current_stream_mask = get_stream_mask(dc, context);
drivers/gpu/drm/amd/display/dc/core/dc.c
4610
if (current_stream_mask != context->stream_mask) {
drivers/gpu/drm/amd/display/dc/core/dc.c
4611
context->stream_mask = current_stream_mask;
drivers/gpu/drm/amd/display/dc/core/dc.c
4719
static void force_vsync_flip_in_minimal_transition_context(struct dc_state *context)
drivers/gpu/drm/amd/display/dc/core/dc.c
4725
for (i = 0; i < context->stream_count; i++) {
drivers/gpu/drm/amd/display/dc/core/dc.c
4726
stream_status = &context->stream_status[i];
drivers/gpu/drm/amd/display/dc/core/dc.c
5117
const struct dc_state *context = dc->current_state;
drivers/gpu/drm/amd/display/dc/core/dc.c
5120
if (!is_surface_in_context(context, srf_updates[i].surface))
drivers/gpu/drm/amd/display/dc/core/dc.c
5219
struct dc_state *context;
drivers/gpu/drm/amd/display/dc/core/dc.c
5253
&context))
drivers/gpu/drm/amd/display/dc/core/dc.c
5258
if (!commit_minimal_transition_state(dc, context)) {
drivers/gpu/drm/amd/display/dc/core/dc.c
5259
dc_state_release(context);
drivers/gpu/drm/amd/display/dc/core/dc.c
5267
dc, dc->current_state, context))
drivers/gpu/drm/amd/display/dc/core/dc.c
5268
commit_minimal_transition_state_in_dc_update(dc, context, stream,
drivers/gpu/drm/amd/display/dc/core/dc.c
5278
context);
drivers/gpu/drm/amd/display/dc/core/dc.c
5283
dc, dc->current_state, context)) {
drivers/gpu/drm/amd/display/dc/core/dc.c
5294
context);
drivers/gpu/drm/amd/display/dc/core/dc.c
5296
if (dc->current_state != context)
drivers/gpu/drm/amd/display/dc/core/dc.c
5297
swap_and_release_current_context(dc, context, stream);
drivers/gpu/drm/amd/display/dc/core/dc.c
5696
struct dc_state *context = dc->current_state;
drivers/gpu/drm/amd/display/dc/core/dc.c
5730
if (dc->res_pool && context) {
drivers/gpu/drm/amd/display/dc/core/dc.c
5732
pipe = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/core/dc.c
5733
subvp_pipe_type[i] = dc_state_get_pipe_subvp_type(context, pipe);
drivers/gpu/drm/amd/display/dc/core/dc.c
5788
struct dc_state *context = dc->current_state;
drivers/gpu/drm/amd/display/dc/core/dc.c
5794
pipe = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/core/dc.c
5814
pipe = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/core/dc.c
6481
struct dc_power_profile dc_get_power_profile_for_dc_state(const struct dc_state *context)
drivers/gpu/drm/amd/display/dc/core/dc.c
6485
profile.power_level = !context->bw_ctx.bw.dcn.clk.p_state_change_support;
drivers/gpu/drm/amd/display/dc/core/dc.c
6486
if (!context->clk_mgr || !context->clk_mgr->ctx || !context->clk_mgr->ctx->dc)
drivers/gpu/drm/amd/display/dc/core/dc.c
6488
struct dc *dc = context->clk_mgr->ctx->dc;
drivers/gpu/drm/amd/display/dc/core/dc.c
6491
profile.power_level = dc->res_pool->funcs->get_power_profile(context);
drivers/gpu/drm/amd/display/dc/core/dc.c
6504
unsigned int dc_get_det_buffer_size_from_state(const struct dc_state *context)
drivers/gpu/drm/amd/display/dc/core/dc.c
6506
struct dc *dc = context->clk_mgr->ctx->dc;
drivers/gpu/drm/amd/display/dc/core/dc.c
6509
return dc->res_pool->funcs->get_det_buffer_size(context);
drivers/gpu/drm/amd/display/dc/core/dc.c
6595
struct dc_state *context;
drivers/gpu/drm/amd/display/dc/core/dc.c
6608
context = dc->current_state;
drivers/gpu/drm/amd/display/dc/core/dc.c
6609
res_ctx = &context->res_ctx;
drivers/gpu/drm/amd/display/dc/core/dc.c
6613
state->active_stream_count = context->stream_count;
drivers/gpu/drm/amd/display/dc/core/dc.c
6763
state->hubbub.compbuf_size = context->bw_ctx.bw.dcn.arb_regs.compbuf_size;
drivers/gpu/drm/amd/display/dc/core/dc_debug.c
186
struct dc_state *context)
drivers/gpu/drm/amd/display/dc/core/dc_debug.c
191
context->bw_ctx.bw.dcn.clk.dispclk_khz,
drivers/gpu/drm/amd/display/dc/core/dc_debug.c
192
context->bw_ctx.bw.dcn.clk.dppclk_khz,
drivers/gpu/drm/amd/display/dc/core/dc_debug.c
193
context->bw_ctx.bw.dcn.clk.dcfclk_khz,
drivers/gpu/drm/amd/display/dc/core/dc_debug.c
194
context->bw_ctx.bw.dcn.clk.dcfclk_deep_sleep_khz,
drivers/gpu/drm/amd/display/dc/core/dc_debug.c
195
context->bw_ctx.bw.dcn.clk.fclk_khz,
drivers/gpu/drm/amd/display/dc/core/dc_debug.c
196
context->bw_ctx.bw.dcn.clk.socclk_khz);
drivers/gpu/drm/amd/display/dc/core/dc_debug.c
199
context->bw_ctx.bw.dcn.clk.dispclk_khz,
drivers/gpu/drm/amd/display/dc/core/dc_debug.c
200
context->bw_ctx.bw.dcn.clk.dppclk_khz,
drivers/gpu/drm/amd/display/dc/core/dc_debug.c
201
context->bw_ctx.bw.dcn.clk.dcfclk_khz,
drivers/gpu/drm/amd/display/dc/core/dc_debug.c
202
context->bw_ctx.bw.dcn.clk.dcfclk_deep_sleep_khz,
drivers/gpu/drm/amd/display/dc/core/dc_debug.c
203
context->bw_ctx.bw.dcn.clk.fclk_khz,
drivers/gpu/drm/amd/display/dc/core/dc_debug.c
204
context->bw_ctx.bw.dcn.clk.socclk_khz);
drivers/gpu/drm/amd/display/dc/core/dc_hw_sequencer.c
1046
params->update_phantom_vp_position_params.context,
drivers/gpu/drm/amd/display/dc/core/dc_hw_sequencer.c
1614
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/core/dc_hw_sequencer.c
1619
seq_state->steps[*seq_state->num_steps].params.update_phantom_vp_position_params.context = context;
drivers/gpu/drm/amd/display/dc/core/dc_hw_sequencer.c
2086
struct dc_state *context)
drivers/gpu/drm/amd/display/dc/core/dc_hw_sequencer.c
2096
opp_head = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/core/dc_hw_sequencer.c
2099
dc_state_get_pipe_subvp_type(context, opp_head) == SUBVP_PHANTOM)
drivers/gpu/drm/amd/display/dc/core/dc_hw_sequencer.c
2106
void hwss_wait_for_odm_update_pending_complete(struct dc *dc, struct dc_state *context)
drivers/gpu/drm/amd/display/dc/core/dc_hw_sequencer.c
2113
otg_master = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/core/dc_hw_sequencer.c
2115
dc_state_get_pipe_subvp_type(context, otg_master) == SUBVP_PHANTOM)
drivers/gpu/drm/amd/display/dc/core/dc_hw_sequencer.c
2125
hwss_wait_for_all_blank_complete(dc, context);
drivers/gpu/drm/amd/display/dc/core/dc_hw_sequencer.c
2128
void hwss_wait_for_no_pipes_pending(struct dc *dc, struct dc_state *context)
drivers/gpu/drm/amd/display/dc/core/dc_hw_sequencer.c
2134
struct pipe_ctx *pipe = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/core/dc_hw_sequencer.c
2136
if (!pipe->plane_state || dc_state_get_pipe_subvp_type(context, pipe) == SUBVP_PHANTOM)
drivers/gpu/drm/amd/display/dc/core/dc_hw_sequencer.c
2619
struct dc_state *context = params->update_force_pstate_params.context;
drivers/gpu/drm/amd/display/dc/core/dc_hw_sequencer.c
2623
hwseq->funcs.update_force_pstate(dc, context);
drivers/gpu/drm/amd/display/dc/core/dc_hw_sequencer.c
4029
struct dc_state *context)
drivers/gpu/drm/amd/display/dc/core/dc_hw_sequencer.c
4034
seq_state->steps[*seq_state->num_steps].params.update_force_pstate_params.context = context;
drivers/gpu/drm/amd/display/dc/core/dc_hw_sequencer.c
639
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/core/dc_hw_sequencer.c
642
struct vba_vars_st *vba = &context->bw_ctx.dml.vba;
drivers/gpu/drm/amd/display/dc/core/dc_hw_sequencer.c
656
if (context->bw_ctx.bw.dcn.clk.fw_based_mclk_switching) {
drivers/gpu/drm/amd/display/dc/core/dc_hw_sequencer.c
669
struct pipe_ctx *pipe = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/core/dc_hw_sequencer.c
671
if (pipe->stream && dc_state_get_paired_subvp_stream(context, pipe->stream) &&
drivers/gpu/drm/amd/display/dc/core/dc_hw_sequencer.c
672
dc_state_get_pipe_subvp_type(context, pipe) == SUBVP_MAIN) {
drivers/gpu/drm/amd/display/dc/core/dc_hw_sequencer.c
683
if (enable_subvp && dc_state_get_pipe_subvp_type(context, pipe_ctx) == SUBVP_NONE) {
drivers/gpu/drm/amd/display/dc/core/dc_hw_sequencer.c
712
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/core/dc_hw_sequencer.c
718
if (!dc->ctx || !dc->ctx->dmub_srv || !pipe_ctx || !context || !dc->debug.fams2_config.bits.enable)
drivers/gpu/drm/amd/display/dc/core/dc_hw_sequencer.c
722
if (!dc_state_is_fams2_in_use(dc, context)) {
drivers/gpu/drm/amd/display/dc/core/dc_hw_sequencer.c
737
struct dc_state *context)
drivers/gpu/drm/amd/display/dc/core/dc_hw_sequencer.c
767
is_dmub_lock_required = dc_state_is_fams2_in_use(dc, context) ||
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
1742
struct dc_state *context)
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
1747
if (context->res_ctx.pipe_ctx[i].plane_state != NULL &&
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
1748
context->res_ctx.pipe_ctx[i].stream != NULL)
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
1749
if (!resource_build_scaling_params(&context->res_ctx.pipe_ctx[i]))
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
2555
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
2563
pipe = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
2571
resource_build_test_pattern_params(&context->res_ctx, otg_master);
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
2578
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
2586
pipe = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
2842
static void swap_dio_link_enc_to_muxable_ctx(struct dc_state *context,
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
2847
struct resource_context *res_ctx = &context->res_ctx;
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
2848
int stream_count = context->stream_count;
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
2856
struct dc_stream_state *stream = context->streams[i];
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
2857
struct pipe_ctx *pipe_ctx = resource_get_otg_master_for_stream(&context->res_ctx, stream);
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
2865
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
2870
struct resource_context *res_ctx = &context->res_ctx;
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
2892
swap_dio_link_enc_to_muxable_ctx(context, pool, new_enc_index, enc_index);
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
2923
static int get_num_of_free_pipes(const struct resource_pool *pool, const struct dc_state *context)
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
2929
if (resource_is_pipe_type(&context->res_ctx.pipe_ctx[i], FREE_PIPE))
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
2943
void resource_remove_otg_master_for_stream_output(struct dc_state *context,
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
2948
&context->res_ctx, stream);
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
2957
&context->res_ctx,
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
2964
&context->res_ctx, pool,
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
2968
&context->res_ctx, otg_master, stream);
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
2972
remove_dio_link_enc_from_ctx(&context->res_ctx, otg_master, stream);
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
2976
&context->res_ctx,
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
2981
resource_unreference_clock_source(&context->res_ctx,
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
2987
stream->ctx->dc, context, stream);
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
3018
struct dc_state *context)
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
3142
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
3149
struct pipe_ctx *pipe_ctx = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
3289
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
3306
pool->funcs->release_pipe(context, tail_pipe->bottom_pipe, pool);
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
3310
pool->funcs->release_pipe(context, last_opp_head, pool);
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
3411
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
3429
pool->funcs->release_pipe(context, last_dpp_pipe, pool);
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
3662
struct dc_state *context)
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
3666
for (i = 0; i < context->stream_count; i++) {
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
3667
struct dc_stream_state *stream_has_pll = context->streams[i];
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
3957
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
3974
&context->res_ctx,
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
3987
context, pool, stream);
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
3989
pipe_ctx = resource_get_otg_master_for_stream(&context->res_ctx, stream);
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
3996
&context->res_ctx, pool, stream);
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
4002
&context->res_ctx, pool,
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
4022
&context->res_ctx, pool, stream);
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
4028
&context->res_ctx, pool,
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
4031
if (!add_hpo_dp_link_enc_to_ctx(&context->res_ctx, pool, pipe_ctx, stream))
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
4037
if (!add_dio_link_enc_to_ctx(dc, context, pool, pipe_ctx, stream))
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
4047
&context->res_ctx, pool, pipe_ctx->stream_res.stream_enc->id, dc_ctx->dce_version);
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
4055
update_audio_usage(&context->res_ctx, pool,
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
4067
for (i = 0; i < context->stream_count; i++)
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
4068
if (context->streams[i] == stream) {
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
4069
context->stream_status[i].primary_otg_inst = pipe_ctx->stream_res.tg->inst;
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
4070
context->stream_status[i].stream_enc_inst = pipe_ctx->stream_res.stream_enc->stream_enc_inst;
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
4071
context->stream_status[i].audio_inst =
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
4089
static bool planes_changed_for_existing_stream(struct dc_state *context,
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
4097
for (i = 0; i < context->stream_count; i++) {
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
4098
if (context->streams[i] == stream) {
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
4099
stream_status = &context->stream_status[i];
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
4172
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
4178
int old_stream_count = context->stream_count;
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
4190
struct dc_stream_state *stream = context->streams[i];
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
4210
if (stream == context->streams[j]) {
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
4252
if (planes_changed_for_existing_stream(context,
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
4259
context)) {
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
4279
if (dc_state_get_stream_subvp_type(context, del_streams[i]) == SUBVP_PHANTOM) {
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
4281
if (!dc_state_rem_all_phantom_planes_for_stream(dc, del_streams[i], context, true)) {
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
4286
res = dc_state_remove_phantom_stream(dc, context, del_streams[i]);
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
4287
dc_state_release_phantom_stream(dc, context, del_streams[i]);
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
4289
if (!dc_state_rem_all_planes_for_stream(dc, del_streams[i], context)) {
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
4294
res = dc_state_remove_stream(dc, context, del_streams[i]);
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
4319
res = dc_state_add_stream(dc, context, add_streams[i]);
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
4323
if (!add_all_planes_for_stream(dc, add_streams[i], set, set_count, context)) {
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
4331
if (planes_changed_for_existing_stream(context,
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
4335
if (!add_all_planes_for_stream(dc, unchanged_streams[i], set, set_count, context)) {
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
4343
for (i = 0; i < context->stream_count; i++) {
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
4344
dc_state_set_stream_subvp_cursor_limit(context->streams[i], context, false);
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
4347
res = dc_validate_global_state(dc, context, validate_mode);
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
4352
dc->hwss.calculate_pix_rate_divider(dc, context, add_streams[i]);
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
4930
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
4936
&context->res_ctx, stream);
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
4949
&context->res_ctx,
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
4955
&context->res_ctx,
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
4963
&context->res_ctx, pool,
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
5351
struct dc_state *context)
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
5359
pipe_ctx = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
5369
pipe_ctx_syncd = &context->res_ctx.pipe_ctx[j];
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
5379
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
5385
pipe_ctx = &context->res_ctx.pipe_ctx[disabled_master_pipe_idx];
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
5392
pipe_ctx_check = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
5413
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
5421
pipe_ctx_reset = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
5584
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
5591
&context->res_ctx, dc->res_pool, pipe_ctx->stream);
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
5597
&context->res_ctx, dc->res_pool,
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
5603
if (!add_hpo_dp_link_enc_to_ctx(&context->res_ctx, dc->res_pool, pipe_ctx, pipe_ctx->stream))
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
5609
&context->res_ctx, dc->res_pool,
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
5615
remove_hpo_dp_link_enc_from_ctx(&context->res_ctx, pipe_ctx, pipe_ctx->stream);
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
5619
if (!add_dio_link_enc_to_ctx(dc, context, dc->res_pool, pipe_ctx, pipe_ctx->stream))
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
5630
static bool resource_allocate_mcache(struct dc_state *context, const struct dc_mcache_params *mcache_params)
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
5632
if (context->clk_mgr->ctx->dc->res_pool->funcs->program_mcache_pipe_config)
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
5633
context->clk_mgr->ctx->dc->res_pool->funcs->program_mcache_pipe_config(context, mcache_params);
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
5704
bool resource_is_hpo_acquired(struct dc_state *context)
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
5709
if (context->res_ctx.is_hpo_dp_stream_enc_acquired[i]) {
drivers/gpu/drm/amd/display/dc/dc.h
1950
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/dc.h
2821
struct dc_power_profile dc_get_power_profile_for_dc_state(const struct dc_state *context);
drivers/gpu/drm/amd/display/dc/dc.h
2823
unsigned int dc_get_det_buffer_size_from_state(const struct dc_state *context);
drivers/gpu/drm/amd/display/dc/dc.h
496
bool (*get_subvp_en)(struct dc *dc, struct dc_state *context);
drivers/gpu/drm/amd/display/dc/dc_dmub_srv.c
1226
void dc_dmub_srv_control_cursor_offload(struct dc *dc, struct dc_state *context,
drivers/gpu/drm/amd/display/dc/dc_dmub_srv.c
1239
pipe_ctx = resource_get_otg_master_for_stream(&context->res_ctx, stream);
drivers/gpu/drm/amd/display/dc/dc_dmub_srv.c
1800
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/dc_dmub_srv.c
1817
memcpy(&global_cmd->config.global, &context->bw_ctx.bw.dcn.fams2_global_config, sizeof(struct dmub_cmd_fams2_global_config));
drivers/gpu/drm/amd/display/dc/dc_dmub_srv.c
1825
for (i = 0; i < context->bw_ctx.bw.dcn.fams2_global_config.num_streams; i++) {
drivers/gpu/drm/amd/display/dc/dc_dmub_srv.c
1827
struct dmub_rb_cmd_fams2 *stream_sub_state_cmd = &cmd[i+1+context->bw_ctx.bw.dcn.fams2_global_config.num_streams].fams2_config;
drivers/gpu/drm/amd/display/dc/dc_dmub_srv.c
1842
&context->bw_ctx.bw.dcn.fams2_stream_base_params[i],
drivers/gpu/drm/amd/display/dc/dc_dmub_srv.c
1846
&context->bw_ctx.bw.dcn.fams2_stream_sub_params[i],
drivers/gpu/drm/amd/display/dc/dc_dmub_srv.c
1853
global_cmd->config.global.features.bits.enable = enable && context->bw_ctx.bw.dcn.fams2_global_config.features.bits.enable;
drivers/gpu/drm/amd/display/dc/dc_dmub_srv.c
1859
cmd[2 * context->bw_ctx.bw.dcn.fams2_global_config.num_streams].fams2_config.header.multi_cmd_pending = 0;
drivers/gpu/drm/amd/display/dc/dc_dmub_srv.c
1860
num_cmds += 2 * context->bw_ctx.bw.dcn.fams2_global_config.num_streams;
drivers/gpu/drm/amd/display/dc/dc_dmub_srv.c
1867
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/dc_dmub_srv.c
1885
memcpy(&config->global, &context->bw_ctx.bw.dcn.fams2_global_config,
drivers/gpu/drm/amd/display/dc/dc_dmub_srv.c
1894
for (i = 0; i < context->bw_ctx.bw.dcn.fams2_global_config.num_streams; i++) {
drivers/gpu/drm/amd/display/dc/dc_dmub_srv.c
1897
&context->bw_ctx.bw.dcn.fams2_stream_base_params[i],
drivers/gpu/drm/amd/display/dc/dc_dmub_srv.c
1902
&context->bw_ctx.bw.dcn.fams2_stream_sub_params_v2[i],
drivers/gpu/drm/amd/display/dc/dc_dmub_srv.c
1908
config->global.features.bits.enable = enable && context->bw_ctx.bw.dcn.fams2_global_config.features.bits.enable;
drivers/gpu/drm/amd/display/dc/dc_dmub_srv.c
1915
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/dc_dmub_srv.c
1919
dc_dmub_srv_rb_based_fams2_update_config(dc, context, enable);
drivers/gpu/drm/amd/display/dc/dc_dmub_srv.c
1921
dc_dmub_srv_ib_based_fams2_update_config(dc, context, enable);
drivers/gpu/drm/amd/display/dc/dc_dmub_srv.c
429
static void dc_dmub_srv_populate_fams_pipe_info(struct dc *dc, struct dc_state *context,
drivers/gpu/drm/amd/display/dc/dc_dmub_srv.c
438
struct pipe_ctx *split_pipe = &context->res_ctx.pipe_ctx[j];
drivers/gpu/drm/amd/display/dc/dc_dmub_srv.c
447
bool dc_dmub_srv_p_state_delegate(struct dc *dc, bool should_manage_pstate, struct dc_state *context)
drivers/gpu/drm/amd/display/dc/dc_dmub_srv.c
469
struct pipe_ctx *pipe = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/dc_dmub_srv.c
478
stream_status = dc_state_get_stream_status(context, pipe->stream);
drivers/gpu/drm/amd/display/dc/dc_dmub_srv.c
486
for (i = 0, k = 0; context && i < dc->res_pool->pipe_count; i++) {
drivers/gpu/drm/amd/display/dc/dc_dmub_srv.c
487
struct pipe_ctx *pipe = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/dc_dmub_srv.c
492
stream_status = dc_state_get_stream_status(context, pipe->stream);
drivers/gpu/drm/amd/display/dc/dc_dmub_srv.c
494
struct pipe_ctx *pipe = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/dc_dmub_srv.c
501
dc_dmub_srv_populate_fams_pipe_info(dc, context, pipe, &config_data->pipe_data[k]);
drivers/gpu/drm/amd/display/dc/dc_dmub_srv.c
583
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/dc_dmub_srv.c
588
struct dc_stream_state *phantom_stream = dc_state_get_paired_subvp_stream(context, subvp_pipe->stream);
drivers/gpu/drm/amd/display/dc/dc_dmub_srv.c
662
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/dc_dmub_srv.c
674
pipe = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/dc_dmub_srv.c
683
if (dc_state_get_pipe_subvp_type(context, pipe) == SUBVP_MAIN)
drivers/gpu/drm/amd/display/dc/dc_dmub_srv.c
700
populate_subvp_cmd_drr_info(dc, context, pipe, vblank_pipe, pipe_data);
drivers/gpu/drm/amd/display/dc/dc_dmub_srv.c
718
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/dc_dmub_srv.c
731
phantom_stream0 = dc_state_get_paired_subvp_stream(context, subvp_pipes[0]->stream);
drivers/gpu/drm/amd/display/dc/dc_dmub_srv.c
735
phantom_stream1 = dc_state_get_paired_subvp_stream(context, subvp_pipes[1]->stream);
drivers/gpu/drm/amd/display/dc/dc_dmub_srv.c
783
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/dc_dmub_srv.c
791
struct dc_stream_state *phantom_stream = dc_state_get_paired_subvp_stream(context, subvp_pipe->stream);
drivers/gpu/drm/amd/display/dc/dc_dmub_srv.c
850
struct pipe_ctx *phantom_pipe = &context->res_ctx.pipe_ctx[j];
drivers/gpu/drm/amd/display/dc/dc_dmub_srv.c
853
phantom_pipe->stream == dc_state_get_paired_subvp_stream(context, subvp_pipe->stream)) {
drivers/gpu/drm/amd/display/dc/dc_dmub_srv.c
878
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/dc_dmub_srv.c
897
struct pipe_ctx *pipe = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/dc_dmub_srv.c
903
dc_state_get_pipe_subvp_type(context, pipe) == SUBVP_MAIN)
drivers/gpu/drm/amd/display/dc/dc_dmub_srv.c
910
struct pipe_ctx *pipe = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/dc_dmub_srv.c
911
pipe_mall_type = dc_state_get_pipe_subvp_type(context, pipe);
drivers/gpu/drm/amd/display/dc/dc_dmub_srv.c
923
populate_subvp_cmd_pipe_info(dc, context, &cmd, pipe, cmd_pipe_index++);
drivers/gpu/drm/amd/display/dc/dc_dmub_srv.c
929
populate_subvp_cmd_vblank_pipe_info(dc, context, &cmd, pipe, cmd_pipe_index++);
drivers/gpu/drm/amd/display/dc/dc_dmub_srv.c
934
update_subvp_prefetch_end_to_mall_start(dc, context, &cmd, subvp_pipes);
drivers/gpu/drm/amd/display/dc/dc_dmub_srv.c
941
wm_val_refclk = context->bw_ctx.bw.dcn.watermarks.a.cstate_pstate.pstate_change_ns *
drivers/gpu/drm/amd/display/dc/dc_dmub_srv.h
100
void dc_dmub_setup_subvp_dmub_command(struct dc *dc, struct dc_state *context, bool enable);
drivers/gpu/drm/amd/display/dc/dc_dmub_srv.h
198
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/dc_dmub_srv.h
344
void dc_dmub_srv_control_cursor_offload(struct dc *dc, struct dc_state *context,
drivers/gpu/drm/amd/display/dc/dc_dmub_srv.h
90
bool dc_dmub_srv_p_state_delegate(struct dc *dc, bool enable_pstate, struct dc_state *context);
drivers/gpu/drm/amd/display/dc/dc_stream.h
507
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/dc_stream.h
512
void dc_trigger_sync(struct dc *dc, struct dc_state *context);
drivers/gpu/drm/amd/display/dc/dc_stream.h
640
struct dc_state *context);
drivers/gpu/drm/amd/display/dc/dce/dce_clk_mgr.c
184
static uint32_t get_max_pixel_clock_for_all_paths(struct dc_state *context)
drivers/gpu/drm/amd/display/dc/dce/dce_clk_mgr.c
190
struct pipe_ctx *pipe_ctx = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/dce/dce_clk_mgr.c
215
struct dc_state *context)
drivers/gpu/drm/amd/display/dc/dce/dce_clk_mgr.c
220
int max_pix_clk = get_max_pixel_clock_for_all_paths(context);
drivers/gpu/drm/amd/display/dc/dce/dce_clk_mgr.c
227
if (context->bw_ctx.bw.dce.dispclk_khz >
drivers/gpu/drm/amd/display/dc/dce/dce_clk_mgr.c
237
< context->bw_ctx.bw.dce.dispclk_khz)
drivers/gpu/drm/amd/display/dc/dce/dce_clk_mgr.c
496
const struct dc_state *context,
drivers/gpu/drm/amd/display/dc/dce/dce_clk_mgr.c
502
for (j = 0; j < context->stream_count; j++) {
drivers/gpu/drm/amd/display/dc/dce/dce_clk_mgr.c
505
const struct dc_stream_state *stream = context->streams[j];
drivers/gpu/drm/amd/display/dc/dce/dce_clk_mgr.c
511
if (stream == context->res_ctx.pipe_ctx[k].stream) {
drivers/gpu/drm/amd/display/dc/dce/dce_clk_mgr.c
512
pipe_ctx = &context->res_ctx.pipe_ctx[k];
drivers/gpu/drm/amd/display/dc/dce/dce_clk_mgr.c
548
static uint32_t dce110_get_min_vblank_time_us(const struct dc_state *context)
drivers/gpu/drm/amd/display/dc/dce/dce_clk_mgr.c
553
for (j = 0; j < context->stream_count; j++) {
drivers/gpu/drm/amd/display/dc/dce/dce_clk_mgr.c
554
struct dc_stream_state *stream = context->streams[j];
drivers/gpu/drm/amd/display/dc/dce/dce_clk_mgr.c
600
struct dc_state *context)
drivers/gpu/drm/amd/display/dc/dce/dce_clk_mgr.c
602
struct dm_pp_display_configuration *pp_display_cfg = &context->pp_display_cfg;
drivers/gpu/drm/amd/display/dc/dce/dce_clk_mgr.c
604
pp_display_cfg->avail_mclk_switch_time_us = dce110_get_min_vblank_time_us(context);
drivers/gpu/drm/amd/display/dc/dce/dce_clk_mgr.c
606
dce110_fill_display_configs(context, pp_display_cfg);
drivers/gpu/drm/amd/display/dc/dce/dce_clk_mgr.c
614
struct dc_state *context)
drivers/gpu/drm/amd/display/dc/dce/dce_clk_mgr.c
616
struct dm_pp_display_configuration *pp_display_cfg = &context->pp_display_cfg;
drivers/gpu/drm/amd/display/dc/dce/dce_clk_mgr.c
619
context->bw_ctx.bw.dce.all_displays_in_sync;
drivers/gpu/drm/amd/display/dc/dce/dce_clk_mgr.c
621
context->bw_ctx.bw.dce.nbp_state_change_enable == false;
drivers/gpu/drm/amd/display/dc/dce/dce_clk_mgr.c
623
context->bw_ctx.bw.dce.cpuc_state_change_enable == false;
drivers/gpu/drm/amd/display/dc/dce/dce_clk_mgr.c
625
context->bw_ctx.bw.dce.cpup_state_change_enable == false;
drivers/gpu/drm/amd/display/dc/dce/dce_clk_mgr.c
627
context->bw_ctx.bw.dce.blackout_recovery_time_us;
drivers/gpu/drm/amd/display/dc/dce/dce_clk_mgr.c
629
pp_display_cfg->min_memory_clock_khz = context->bw_ctx.bw.dce.yclk_khz
drivers/gpu/drm/amd/display/dc/dce/dce_clk_mgr.c
634
context->bw_ctx.bw.dce.sclk_khz);
drivers/gpu/drm/amd/display/dc/dce/dce_clk_mgr.c
643
pp_display_cfg->min_dcfclock_khz = (context->stream_count > 4) ?
drivers/gpu/drm/amd/display/dc/dce/dce_clk_mgr.c
647
= context->bw_ctx.bw.dce.sclk_deep_sleep_khz;
drivers/gpu/drm/amd/display/dc/dce/dce_clk_mgr.c
650
dce110_get_min_vblank_time_us(context);
drivers/gpu/drm/amd/display/dc/dce/dce_clk_mgr.c
656
dce110_fill_display_configs(context, pp_display_cfg);
drivers/gpu/drm/amd/display/dc/dce/dce_clk_mgr.c
661
&context->streams[0]->timing;
drivers/gpu/drm/amd/display/dc/dce/dce_clk_mgr.c
673
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/dce/dce_clk_mgr.c
678
int patched_disp_clk = context->bw_ctx.bw.dce.dispclk_khz;
drivers/gpu/drm/amd/display/dc/dce/dce_clk_mgr.c
684
level_change_req.power_level = dce_get_required_clocks_state(clk_mgr, context);
drivers/gpu/drm/amd/display/dc/dce/dce_clk_mgr.c
696
dce_pplib_apply_display_requirements(clk_mgr->ctx->dc, context);
drivers/gpu/drm/amd/display/dc/dce/dce_clk_mgr.c
700
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/dce/dce_clk_mgr.c
705
int patched_disp_clk = context->bw_ctx.bw.dce.dispclk_khz;
drivers/gpu/drm/amd/display/dc/dce/dce_clk_mgr.c
711
level_change_req.power_level = dce_get_required_clocks_state(clk_mgr, context);
drivers/gpu/drm/amd/display/dc/dce/dce_clk_mgr.c
720
context->bw_ctx.bw.dce.dispclk_khz = dce_set_clock(clk_mgr, patched_disp_clk);
drivers/gpu/drm/amd/display/dc/dce/dce_clk_mgr.c
723
dce11_pplib_apply_display_requirements(clk_mgr->ctx->dc, context);
drivers/gpu/drm/amd/display/dc/dce/dce_clk_mgr.c
727
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/dce/dce_clk_mgr.c
732
int patched_disp_clk = context->bw_ctx.bw.dce.dispclk_khz;
drivers/gpu/drm/amd/display/dc/dce/dce_clk_mgr.c
738
level_change_req.power_level = dce_get_required_clocks_state(clk_mgr, context);
drivers/gpu/drm/amd/display/dc/dce/dce_clk_mgr.c
750
dce11_pplib_apply_display_requirements(clk_mgr->ctx->dc, context);
drivers/gpu/drm/amd/display/dc/dce/dce_clk_mgr.c
754
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/dce/dce_clk_mgr.c
759
int max_pix_clk = get_max_pixel_clock_for_all_paths(context);
drivers/gpu/drm/amd/display/dc/dce/dce_clk_mgr.c
760
int patched_disp_clk = context->bw_ctx.bw.dce.dispclk_khz;
drivers/gpu/drm/amd/display/dc/dce/dce_clk_mgr.c
788
dce11_pplib_apply_display_requirements(clk_mgr->ctx->dc, context);
drivers/gpu/drm/amd/display/dc/dce/dmub_hw_lock_mgr.c
86
bool dmub_hw_lock_mgr_does_context_require_lock(const struct dc *dc, const struct dc_state *context)
drivers/gpu/drm/amd/display/dc/dce/dmub_hw_lock_mgr.c
88
if (!context)
drivers/gpu/drm/amd/display/dc/dce/dmub_hw_lock_mgr.c
90
for (int i = 0; i < context->stream_count; i++) {
drivers/gpu/drm/amd/display/dc/dce/dmub_hw_lock_mgr.c
91
const struct dc_link *link = context->streams[i]->link;
drivers/gpu/drm/amd/display/dc/dce/dmub_hw_lock_mgr.h
50
bool dmub_hw_lock_mgr_does_context_require_lock(const struct dc *dc, const struct dc_state *context);
drivers/gpu/drm/amd/display/dc/dml/calcs/dcn_calcs.c
1057
hack_bounding_box(v, &dc->debug, context);
drivers/gpu/drm/amd/display/dc/dml/calcs/dcn_calcs.c
1063
&& context->stream_count == 1
drivers/gpu/drm/amd/display/dc/dml/calcs/dcn_calcs.c
1078
context->bw_ctx.dml.soc.sr_enter_plus_exit_time_us = v->sr_enter_plus_exit_time;
drivers/gpu/drm/amd/display/dc/dml/calcs/dcn_calcs.c
1079
context->bw_ctx.dml.soc.sr_exit_time_us = v->sr_exit_time;
drivers/gpu/drm/amd/display/dc/dml/calcs/dcn_calcs.c
1145
context->bw_ctx.bw.dcn.watermarks.a.cstate_pstate.cstate_exit_ns =
drivers/gpu/drm/amd/display/dc/dml/calcs/dcn_calcs.c
1147
context->bw_ctx.bw.dcn.watermarks.a.cstate_pstate.cstate_enter_plus_exit_ns =
drivers/gpu/drm/amd/display/dc/dml/calcs/dcn_calcs.c
1149
context->bw_ctx.bw.dcn.watermarks.a.cstate_pstate.pstate_change_ns =
drivers/gpu/drm/amd/display/dc/dml/calcs/dcn_calcs.c
1151
context->bw_ctx.bw.dcn.watermarks.a.pte_meta_urgent_ns = v->ptemeta_urgent_watermark * 1000;
drivers/gpu/drm/amd/display/dc/dml/calcs/dcn_calcs.c
1152
context->bw_ctx.bw.dcn.watermarks.a.urgent_ns = v->urgent_watermark * 1000;
drivers/gpu/drm/amd/display/dc/dml/calcs/dcn_calcs.c
1153
context->bw_ctx.bw.dcn.watermarks.b = context->bw_ctx.bw.dcn.watermarks.a;
drivers/gpu/drm/amd/display/dc/dml/calcs/dcn_calcs.c
1154
context->bw_ctx.bw.dcn.watermarks.c = context->bw_ctx.bw.dcn.watermarks.a;
drivers/gpu/drm/amd/display/dc/dml/calcs/dcn_calcs.c
1155
context->bw_ctx.bw.dcn.watermarks.d = context->bw_ctx.bw.dcn.watermarks.a;
drivers/gpu/drm/amd/display/dc/dml/calcs/dcn_calcs.c
1157
context->bw_ctx.bw.dcn.clk.fclk_khz = (int)(bw_consumed * 1000000 /
drivers/gpu/drm/amd/display/dc/dml/calcs/dcn_calcs.c
1160
context->bw_ctx.bw.dcn.clk.fclk_khz = (int)(bw_consumed * 1000000 / 32);
drivers/gpu/drm/amd/display/dc/dml/calcs/dcn_calcs.c
1162
context->bw_ctx.bw.dcn.clk.dcfclk_deep_sleep_khz = (int)(v->dcf_clk_deep_sleep * 1000);
drivers/gpu/drm/amd/display/dc/dml/calcs/dcn_calcs.c
1163
context->bw_ctx.bw.dcn.clk.dcfclk_khz = (int)(v->dcfclk * 1000);
drivers/gpu/drm/amd/display/dc/dml/calcs/dcn_calcs.c
1165
context->bw_ctx.bw.dcn.clk.dispclk_khz = (int)(v->dispclk * 1000);
drivers/gpu/drm/amd/display/dc/dml/calcs/dcn_calcs.c
1167
context->bw_ctx.bw.dcn.clk.dispclk_khz = (int)(dc->dcn_soc->max_dispclk_vmax0p9 * 1000);
drivers/gpu/drm/amd/display/dc/dml/calcs/dcn_calcs.c
1169
if (context->bw_ctx.bw.dcn.clk.dispclk_khz <
drivers/gpu/drm/amd/display/dc/dml/calcs/dcn_calcs.c
1171
context->bw_ctx.bw.dcn.clk.dispclk_khz =
drivers/gpu/drm/amd/display/dc/dml/calcs/dcn_calcs.c
1175
context->bw_ctx.bw.dcn.clk.dppclk_khz = context->bw_ctx.bw.dcn.clk.dispclk_khz /
drivers/gpu/drm/amd/display/dc/dml/calcs/dcn_calcs.c
1177
context->bw_ctx.bw.dcn.clk.phyclk_khz = v->phyclk_per_state[v->voltage_level];
drivers/gpu/drm/amd/display/dc/dml/calcs/dcn_calcs.c
1180
context->bw_ctx.bw.dcn.clk.max_supported_dppclk_khz =
drivers/gpu/drm/amd/display/dc/dml/calcs/dcn_calcs.c
1184
context->bw_ctx.bw.dcn.clk.max_supported_dppclk_khz =
drivers/gpu/drm/amd/display/dc/dml/calcs/dcn_calcs.c
1188
context->bw_ctx.bw.dcn.clk.max_supported_dppclk_khz =
drivers/gpu/drm/amd/display/dc/dml/calcs/dcn_calcs.c
1192
context->bw_ctx.bw.dcn.clk.max_supported_dppclk_khz =
drivers/gpu/drm/amd/display/dc/dml/calcs/dcn_calcs.c
1200
struct pipe_ctx *pipe = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/dml/calcs/dcn_calcs.c
1261
hsplit_pipe = resource_find_free_secondary_pipe_legacy(&context->res_ctx, pool, pipe);
drivers/gpu/drm/amd/display/dc/dml/calcs/dcn_calcs.c
1263
split_stream_across_pipes(&context->res_ctx, pool, pipe, hsplit_pipe);
drivers/gpu/drm/amd/display/dc/dml/calcs/dcn_calcs.c
1294
context->bw_ctx.dml.soc.sr_enter_plus_exit_time_us =
drivers/gpu/drm/amd/display/dc/dml/calcs/dcn_calcs.c
1296
context->bw_ctx.dml.soc.sr_exit_time_us = dc->dcn_soc->sr_exit_time;
drivers/gpu/drm/amd/display/dc/dml/calcs/dcn_calcs.c
556
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/dml/calcs/dcn_calcs.c
566
context->bw_ctx.bw.dcn.watermarks.b.cstate_pstate.cstate_exit_ns =
drivers/gpu/drm/amd/display/dc/dml/calcs/dcn_calcs.c
568
context->bw_ctx.bw.dcn.watermarks.b.cstate_pstate.cstate_enter_plus_exit_ns =
drivers/gpu/drm/amd/display/dc/dml/calcs/dcn_calcs.c
570
context->bw_ctx.bw.dcn.watermarks.b.cstate_pstate.pstate_change_ns =
drivers/gpu/drm/amd/display/dc/dml/calcs/dcn_calcs.c
572
context->bw_ctx.bw.dcn.watermarks.b.pte_meta_urgent_ns = v->ptemeta_urgent_watermark * 1000;
drivers/gpu/drm/amd/display/dc/dml/calcs/dcn_calcs.c
573
context->bw_ctx.bw.dcn.watermarks.b.urgent_ns = v->urgent_watermark * 1000;
drivers/gpu/drm/amd/display/dc/dml/calcs/dcn_calcs.c
580
context->bw_ctx.bw.dcn.watermarks.c.cstate_pstate.cstate_exit_ns =
drivers/gpu/drm/amd/display/dc/dml/calcs/dcn_calcs.c
582
context->bw_ctx.bw.dcn.watermarks.c.cstate_pstate.cstate_enter_plus_exit_ns =
drivers/gpu/drm/amd/display/dc/dml/calcs/dcn_calcs.c
584
context->bw_ctx.bw.dcn.watermarks.c.cstate_pstate.pstate_change_ns =
drivers/gpu/drm/amd/display/dc/dml/calcs/dcn_calcs.c
586
context->bw_ctx.bw.dcn.watermarks.c.pte_meta_urgent_ns = v->ptemeta_urgent_watermark * 1000;
drivers/gpu/drm/amd/display/dc/dml/calcs/dcn_calcs.c
587
context->bw_ctx.bw.dcn.watermarks.c.urgent_ns = v->urgent_watermark * 1000;
drivers/gpu/drm/amd/display/dc/dml/calcs/dcn_calcs.c
601
context->bw_ctx.bw.dcn.watermarks.d.cstate_pstate.cstate_exit_ns =
drivers/gpu/drm/amd/display/dc/dml/calcs/dcn_calcs.c
603
context->bw_ctx.bw.dcn.watermarks.d.cstate_pstate.cstate_enter_plus_exit_ns =
drivers/gpu/drm/amd/display/dc/dml/calcs/dcn_calcs.c
605
context->bw_ctx.bw.dcn.watermarks.d.cstate_pstate.pstate_change_ns =
drivers/gpu/drm/amd/display/dc/dml/calcs/dcn_calcs.c
607
context->bw_ctx.bw.dcn.watermarks.d.pte_meta_urgent_ns = v->ptemeta_urgent_watermark * 1000;
drivers/gpu/drm/amd/display/dc/dml/calcs/dcn_calcs.c
608
context->bw_ctx.bw.dcn.watermarks.d.urgent_ns = v->urgent_watermark * 1000;
drivers/gpu/drm/amd/display/dc/dml/calcs/dcn_calcs.c
621
context->bw_ctx.bw.dcn.watermarks.a.cstate_pstate.cstate_exit_ns =
drivers/gpu/drm/amd/display/dc/dml/calcs/dcn_calcs.c
623
context->bw_ctx.bw.dcn.watermarks.a.cstate_pstate.cstate_enter_plus_exit_ns =
drivers/gpu/drm/amd/display/dc/dml/calcs/dcn_calcs.c
625
context->bw_ctx.bw.dcn.watermarks.a.cstate_pstate.pstate_change_ns =
drivers/gpu/drm/amd/display/dc/dml/calcs/dcn_calcs.c
627
context->bw_ctx.bw.dcn.watermarks.a.pte_meta_urgent_ns = v->ptemeta_urgent_watermark * 1000;
drivers/gpu/drm/amd/display/dc/dml/calcs/dcn_calcs.c
628
context->bw_ctx.bw.dcn.watermarks.a.urgent_ns = v->urgent_watermark * 1000;
drivers/gpu/drm/amd/display/dc/dml/calcs/dcn_calcs.c
630
context->bw_ctx.bw.dcn.watermarks.b = context->bw_ctx.bw.dcn.watermarks.a;
drivers/gpu/drm/amd/display/dc/dml/calcs/dcn_calcs.c
631
context->bw_ctx.bw.dcn.watermarks.c = context->bw_ctx.bw.dcn.watermarks.a;
drivers/gpu/drm/amd/display/dc/dml/calcs/dcn_calcs.c
634
context->bw_ctx.bw.dcn.watermarks.d = context->bw_ctx.bw.dcn.watermarks.a;
drivers/gpu/drm/amd/display/dc/dml/calcs/dcn_calcs.c
705
struct dc_state *context)
drivers/gpu/drm/amd/display/dc/dml/calcs/dcn_calcs.c
710
struct pipe_ctx *pipe = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/dml/calcs/dcn_calcs.c
731
context->stream_count >= 2)
drivers/gpu/drm/amd/display/dc/dml/calcs/dcn_calcs.c
734
if (context->stream_count == 1 &&
drivers/gpu/drm/amd/display/dc/dml/calcs/dcn_calcs.c
736
hack_force_pipe_split(v, context->streams[0]->timing.pix_clk_100hz);
drivers/gpu/drm/amd/display/dc/dml/calcs/dcn_calcs.c
750
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/dml/calcs/dcn_calcs.c
760
struct dcn_bw_internal_vars *v = &context->dcn_bw_vars;
drivers/gpu/drm/amd/display/dc/dml/calcs/dcn_calcs.c
894
struct pipe_ctx *pipe = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
1030
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
1039
wb_arb_params->cli_watermark[k] = get_wm_writeback_urgent(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000;
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
1040
wb_arb_params->pstate_watermark[k] = get_wm_writeback_dram_clock_change(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000;
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
1042
wb_arb_params->time_per_pixel = 16.0 * 1000 / (context->res_ctx.pipe_ctx[i].stream->phy_pix_clk / 1000); /* 4 bit fraction, ms */
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
1045
static bool is_dtbclk_required(struct dc *dc, struct dc_state *context)
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
1049
if (!context->res_ctx.pipe_ctx[i].stream)
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
1051
if (dc->link_srv->dp_is_128b_132b_signal(&context->res_ctx.pipe_ctx[i]))
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
1057
static enum dcn_zstate_support_state decide_zstate_support(struct dc *dc, struct dc_state *context)
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
1064
if (context->res_ctx.pipe_ctx[i].plane_state)
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
1081
else if (context->stream_count == 1 && context->streams[0]->signal == SIGNAL_TYPE_EDP) {
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
1082
struct dc_link *link = context->streams[0]->sink->link;
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
1083
struct dc_stream_status *stream_status = &context->stream_status[0];
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
1085
bool allow_z8 = context->bw_ctx.dml.vba.StutterPeriod > (double)minmum_z8_residency;
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
1095
if (is_pwrseq0 && context->bw_ctx.dml.vba.StutterPeriod > 5000.0)
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
1141
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
1151
dc->res_pool->funcs->set_mcif_arb_params(dc, context, pipes, pipe_cnt);
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
1153
context->bw_ctx.bw.dcn.clk.dispclk_khz = context->bw_ctx.dml.vba.DISPCLK * 1000;
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
1154
context->bw_ctx.bw.dcn.clk.dcfclk_khz = context->bw_ctx.dml.vba.DCFCLK * 1000;
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
1155
context->bw_ctx.bw.dcn.clk.socclk_khz = context->bw_ctx.dml.vba.SOCCLK * 1000;
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
1156
context->bw_ctx.bw.dcn.clk.dramclk_khz = context->bw_ctx.dml.vba.DRAMSpeed * 1000 / 16;
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
1158
if (dc->debug.min_dram_clk_khz > context->bw_ctx.bw.dcn.clk.dramclk_khz)
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
1159
context->bw_ctx.bw.dcn.clk.dramclk_khz = dc->debug.min_dram_clk_khz;
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
1161
context->bw_ctx.bw.dcn.clk.dcfclk_deep_sleep_khz = context->bw_ctx.dml.vba.DCFCLKDeepSleep * 1000;
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
1162
context->bw_ctx.bw.dcn.clk.fclk_khz = context->bw_ctx.dml.vba.FabricClock * 1000;
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
1163
context->bw_ctx.bw.dcn.clk.p_state_change_support =
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
1164
context->bw_ctx.dml.vba.DRAMClockChangeSupport[vlevel][context->bw_ctx.dml.vba.maxMpcComb]
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
1170
context->bw_ctx.bw.dcn.clk.p_state_change_support |= context->bw_ctx.bw.dcn.clk.fw_based_mclk_switching;
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
1172
context->bw_ctx.bw.dcn.clk.dppclk_khz = 0;
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
1174
context->bw_ctx.bw.dcn.clk.dtbclk_en = is_dtbclk_required(dc, context);
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
1176
if (context->bw_ctx.bw.dcn.clk.dispclk_khz < dc->debug.min_disp_clk_khz)
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
1177
context->bw_ctx.bw.dcn.clk.dispclk_khz = dc->debug.min_disp_clk_khz;
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
1180
if (!context->res_ctx.pipe_ctx[i].stream)
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
1182
if (context->res_ctx.pipe_ctx[i].plane_state)
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
1184
pipes[pipe_idx].pipe.dest.vstartup_start = get_vstartup(&context->bw_ctx.dml, pipes, pipe_cnt, pipe_idx);
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
1185
pipes[pipe_idx].pipe.dest.vupdate_offset = get_vupdate_offset(&context->bw_ctx.dml, pipes, pipe_cnt, pipe_idx);
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
1186
pipes[pipe_idx].pipe.dest.vupdate_width = get_vupdate_width(&context->bw_ctx.dml, pipes, pipe_cnt, pipe_idx);
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
1187
pipes[pipe_idx].pipe.dest.vready_offset = get_vready_offset(&context->bw_ctx.dml, pipes, pipe_cnt, pipe_idx);
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
1189
if (dc_state_get_pipe_subvp_type(context, &context->res_ctx.pipe_ctx[i]) == SUBVP_PHANTOM) {
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
1191
context->res_ctx.pipe_ctx[i].det_buffer_size_kb = 0;
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
1192
context->res_ctx.pipe_ctx[i].unbounded_req = false;
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
1194
context->res_ctx.pipe_ctx[i].det_buffer_size_kb = context->bw_ctx.dml.ip.det_buffer_size_kbytes;
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
1195
context->res_ctx.pipe_ctx[i].unbounded_req = pipes[pipe_idx].pipe.src.unbounded_req_mode;
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
1198
if (context->bw_ctx.bw.dcn.clk.dppclk_khz < pipes[pipe_idx].clks_cfg.dppclk_mhz * 1000)
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
1199
context->bw_ctx.bw.dcn.clk.dppclk_khz = pipes[pipe_idx].clks_cfg.dppclk_mhz * 1000;
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
1200
context->res_ctx.pipe_ctx[i].plane_res.bw.dppclk_khz =
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
1202
context->res_ctx.pipe_ctx[i].pipe_dlg_param = pipes[pipe_idx].pipe.dest;
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
1204
context->res_ctx.pipe_ctx[i].stream->adaptive_sync_infopacket.valid)
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
1206
&context->res_ctx.pipe_ctx[i].stream->timing,
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
1207
&context->res_ctx.pipe_ctx[i].pipe_dlg_param.vstartup_start);
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
1213
context->bw_ctx.bw.dcn.clk.p_state_change_support = true;
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
1216
context->bw_ctx.bw.dcn.clk.bw_dppclk_khz = context->bw_ctx.bw.dcn.clk.dppclk_khz;
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
1217
context->bw_ctx.bw.dcn.clk.bw_dispclk_khz = context->bw_ctx.bw.dcn.clk.dispclk_khz;
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
1218
context->bw_ctx.bw.dcn.clk.max_supported_dppclk_khz = context->bw_ctx.dml.soc.clock_limits[vlevel].dppclk_mhz * 1000;
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
1219
context->bw_ctx.bw.dcn.clk.max_supported_dispclk_khz = context->bw_ctx.dml.soc.clock_limits[vlevel].dispclk_mhz * 1000;
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
1221
context->bw_ctx.bw.dcn.compbuf_size_kb = context->bw_ctx.dml.ip.config_return_buffer_size_in_kbytes
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
1222
- context->bw_ctx.dml.ip.det_buffer_size_kbytes * pipe_idx;
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
1225
bool cstate_en = context->bw_ctx.dml.vba.PrefetchMode[vlevel][context->bw_ctx.dml.vba.maxMpcComb] != 2;
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
1227
if (!context->res_ctx.pipe_ctx[i].stream)
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
1234
context->bw_ctx.dml.funcs.rq_dlg_get_dlg_reg(&context->bw_ctx.dml,
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
1235
&context->res_ctx.pipe_ctx[i].dlg_regs,
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
1236
&context->res_ctx.pipe_ctx[i].ttu_regs,
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
1241
context->bw_ctx.bw.dcn.clk.p_state_change_support,
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
1244
context->bw_ctx.dml.funcs.rq_dlg_get_rq_reg(&context->bw_ctx.dml,
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
1245
&context->res_ctx.pipe_ctx[i].rq_regs,
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
1249
context->bw_ctx.bw.dcn.clk.zstate_support = decide_zstate_support(dc, context);
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
1315
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
1321
struct resource_context *res_ctx = &context->res_ctx;
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
1539
dc_state_get_pipe_subvp_type(context, &res_ctx->pipe_ctx[i]) == SUBVP_PHANTOM))
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
1730
void dcn20_calculate_wm(struct dc *dc, struct dc_state *context,
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
1742
if (!context->res_ctx.pipe_ctx[i].stream)
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
1746
pipes[pipe_cnt].clks_cfg.dispclk_mhz = context->bw_ctx.dml.vba.RequiredDISPCLK[vlevel][context->bw_ctx.dml.vba.maxMpcComb];
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
1750
context->bw_ctx.dml.vba.RequiredDPPCLK[vlevel][context->bw_ctx.dml.vba.maxMpcComb][pipe_idx];
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
1751
if (context->bw_ctx.dml.vba.BlendingAndTiming[pipe_idx] == pipe_idx)
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
1753
context->bw_ctx.dml.vba.ODMCombineEnabled[pipe_idx];
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
1759
context->bw_ctx.dml.vba.RequiredDPPCLK[vlevel][context->bw_ctx.dml.vba.maxMpcComb][pipe_split_from[i]];
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
1760
if (context->bw_ctx.dml.vba.BlendingAndTiming[pipe_split_from[i]] == pipe_split_from[i])
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
1762
context->bw_ctx.dml.vba.ODMCombineEnabled[pipe_split_from[i]];
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
1768
pipes[pipe_cnt].clks_cfg.dispclk_mhz = context->bw_ctx.dml.soc.clock_limits[0].dispclk_mhz;
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
1769
pipes[pipe_cnt].clks_cfg.dppclk_mhz = context->bw_ctx.dml.soc.clock_limits[0].dppclk_mhz;
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
1782
context, pipes, validate_mode);
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
1785
context, pipes, validate_mode);
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
1791
pipes[0].clks_cfg.dcfclk_mhz = context->bw_ctx.dml.soc.clock_limits[vlevel].dcfclk_mhz;
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
1792
pipes[0].clks_cfg.socclk_mhz = context->bw_ctx.dml.soc.clock_limits[vlevel].socclk_mhz;
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
1797
pipes[0].clks_cfg.dcfclk_mhz = context->bw_ctx.dml.soc.clock_limits[1].dcfclk_mhz;
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
1798
pipes[0].clks_cfg.socclk_mhz = context->bw_ctx.dml.soc.clock_limits[1].socclk_mhz;
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
1800
context->bw_ctx.bw.dcn.watermarks.b.urgent_ns = get_wm_urgent(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000;
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
1801
context->bw_ctx.bw.dcn.watermarks.b.cstate_pstate.cstate_enter_plus_exit_ns = get_wm_stutter_enter_exit(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000;
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
1802
context->bw_ctx.bw.dcn.watermarks.b.cstate_pstate.cstate_exit_ns = get_wm_stutter_exit(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000;
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
1803
context->bw_ctx.bw.dcn.watermarks.b.cstate_pstate.pstate_change_ns = get_wm_dram_clock_change(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000;
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
1804
context->bw_ctx.bw.dcn.watermarks.b.pte_meta_urgent_ns = get_wm_memory_trip(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000;
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
1805
context->bw_ctx.bw.dcn.watermarks.b.frac_urg_bw_nom = get_fraction_of_urgent_bandwidth(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000;
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
1806
context->bw_ctx.bw.dcn.watermarks.b.frac_urg_bw_flip = get_fraction_of_urgent_bandwidth_imm_flip(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000;
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
1807
context->bw_ctx.bw.dcn.watermarks.b.urgent_latency_ns = get_urgent_latency(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000;
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
1811
pipes[0].clks_cfg.dcfclk_mhz = context->bw_ctx.dml.soc.clock_limits[2].dcfclk_mhz;
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
1812
pipes[0].clks_cfg.socclk_mhz = context->bw_ctx.dml.soc.clock_limits[2].socclk_mhz;
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
1814
context->bw_ctx.bw.dcn.watermarks.c.urgent_ns = get_wm_urgent(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000;
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
1815
context->bw_ctx.bw.dcn.watermarks.c.cstate_pstate.cstate_enter_plus_exit_ns = get_wm_stutter_enter_exit(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000;
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
1816
context->bw_ctx.bw.dcn.watermarks.c.cstate_pstate.cstate_exit_ns = get_wm_stutter_exit(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000;
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
1817
context->bw_ctx.bw.dcn.watermarks.c.cstate_pstate.pstate_change_ns = get_wm_dram_clock_change(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000;
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
1818
context->bw_ctx.bw.dcn.watermarks.c.pte_meta_urgent_ns = get_wm_memory_trip(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000;
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
1819
context->bw_ctx.bw.dcn.watermarks.c.frac_urg_bw_nom = get_fraction_of_urgent_bandwidth(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000;
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
1820
context->bw_ctx.bw.dcn.watermarks.c.frac_urg_bw_flip = get_fraction_of_urgent_bandwidth_imm_flip(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000;
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
1824
pipes[0].clks_cfg.dcfclk_mhz = context->bw_ctx.dml.soc.clock_limits[2].dcfclk_mhz;
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
1825
pipes[0].clks_cfg.socclk_mhz = context->bw_ctx.dml.soc.clock_limits[2].socclk_mhz;
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
1827
context->bw_ctx.bw.dcn.watermarks.d.urgent_ns = get_wm_urgent(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000;
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
1828
context->bw_ctx.bw.dcn.watermarks.d.cstate_pstate.cstate_enter_plus_exit_ns = get_wm_stutter_enter_exit(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000;
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
1829
context->bw_ctx.bw.dcn.watermarks.d.cstate_pstate.cstate_exit_ns = get_wm_stutter_exit(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000;
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
1830
context->bw_ctx.bw.dcn.watermarks.d.cstate_pstate.pstate_change_ns = get_wm_dram_clock_change(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000;
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
1831
context->bw_ctx.bw.dcn.watermarks.d.pte_meta_urgent_ns = get_wm_memory_trip(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000;
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
1832
context->bw_ctx.bw.dcn.watermarks.d.frac_urg_bw_nom = get_fraction_of_urgent_bandwidth(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000;
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
1833
context->bw_ctx.bw.dcn.watermarks.d.frac_urg_bw_flip = get_fraction_of_urgent_bandwidth_imm_flip(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000;
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
1836
pipes[0].clks_cfg.dcfclk_mhz = context->bw_ctx.dml.soc.clock_limits[vlevel].dcfclk_mhz;
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
1837
pipes[0].clks_cfg.socclk_mhz = context->bw_ctx.dml.soc.clock_limits[vlevel].socclk_mhz;
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
1838
context->bw_ctx.bw.dcn.watermarks.a.urgent_ns = get_wm_urgent(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000;
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
1839
context->bw_ctx.bw.dcn.watermarks.a.cstate_pstate.cstate_enter_plus_exit_ns = get_wm_stutter_enter_exit(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000;
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
1840
context->bw_ctx.bw.dcn.watermarks.a.cstate_pstate.cstate_exit_ns = get_wm_stutter_exit(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000;
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
1841
context->bw_ctx.bw.dcn.watermarks.a.cstate_pstate.pstate_change_ns = get_wm_dram_clock_change(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000;
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
1842
context->bw_ctx.bw.dcn.watermarks.a.pte_meta_urgent_ns = get_wm_memory_trip(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000;
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
1843
context->bw_ctx.bw.dcn.watermarks.a.frac_urg_bw_nom = get_fraction_of_urgent_bandwidth(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000;
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
1844
context->bw_ctx.bw.dcn.watermarks.a.frac_urg_bw_flip = get_fraction_of_urgent_bandwidth_imm_flip(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000;
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
2028
static bool dcn20_validate_bandwidth_internal(struct dc *dc, struct dc_state *context,
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
2042
out = dcn20_fast_validate_bw(dc, context, pipes, &pipe_cnt, pipe_split_from, &vlevel, validate_mode);
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
2057
dcn20_calculate_wm(dc, context, pipes, &pipe_cnt, pipe_split_from, vlevel, validate_mode);
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
2058
dcn20_calculate_dlg_params(dc, context, pipes, pipe_cnt, vlevel);
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
2066
dml_get_status_message(context->bw_ctx.dml.vba.ValidationStatus[context->bw_ctx.dml.vba.soc.num_states]));
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
2078
bool dcn20_validate_bandwidth_fp(struct dc *dc, struct dc_state *context,
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
2088
p_state_latency_us = context->bw_ctx.dml.soc.dram_clock_change_latency_us;
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
2089
context->bw_ctx.dml.soc.disable_dram_clock_change_vactive_support =
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
2091
context->bw_ctx.dml.soc.allow_dram_clock_one_display_vactive =
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
2095
ASSERT(context != dc->current_state);
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
2098
return dcn20_validate_bandwidth_internal(dc, context, validate_mode, pipes);
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
2101
voltage_supported = dcn20_validate_bandwidth_internal(dc, context, DC_VALIDATE_MODE_AND_PROGRAMMING, pipes);
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
2102
full_pstate_supported = context->bw_ctx.bw.dcn.clk.p_state_change_support;
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
2104
if (context->bw_ctx.dml.soc.dummy_pstate_latency_us == 0 ||
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
2106
context->bw_ctx.bw.dcn.clk.p_state_change_support = full_pstate_supported;
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
2111
context->bw_ctx.dml.soc.dram_clock_change_latency_us = context->bw_ctx.dml.soc.dummy_pstate_latency_us;
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
2114
voltage_supported = dcn20_validate_bandwidth_internal(dc, context, DC_VALIDATE_MODE_AND_PROGRAMMING, pipes);
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
2115
dummy_pstate_supported = context->bw_ctx.bw.dcn.clk.p_state_change_support;
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
2117
if (voltage_supported && (dummy_pstate_supported || !(context->stream_count))) {
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
2118
context->bw_ctx.bw.dcn.clk.p_state_change_support = false;
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
2126
context->bw_ctx.dml.soc.dram_clock_change_latency_us = p_state_latency_us;
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
2155
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
2164
pipe_cnt = dcn20_populate_dml_pipes_from_context(dc, context, pipes, validate_mode);
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
2235
static void dcn21_calculate_wm(struct dc *dc, struct dc_state *context,
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
2249
patch_bounding_box(dc, &context->bw_ctx.dml.soc);
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
2252
if (!context->res_ctx.pipe_ctx[i].stream)
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
2256
pipes[pipe_cnt].clks_cfg.dispclk_mhz = context->bw_ctx.dml.vba.RequiredDISPCLK[vlevel_req][context->bw_ctx.dml.vba.maxMpcComb];
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
2260
context->bw_ctx.dml.vba.RequiredDPPCLK[vlevel_req][context->bw_ctx.dml.vba.maxMpcComb][pipe_idx];
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
2261
if (context->bw_ctx.dml.vba.BlendingAndTiming[pipe_idx] == pipe_idx)
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
2263
context->bw_ctx.dml.vba.ODMCombineEnablePerState[vlevel_req][pipe_idx];
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
2269
context->bw_ctx.dml.vba.RequiredDPPCLK[vlevel_req][context->bw_ctx.dml.vba.maxMpcComb][pipe_split_from[i]];
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
2270
if (context->bw_ctx.dml.vba.BlendingAndTiming[pipe_split_from[i]] == pipe_split_from[i])
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
2272
context->bw_ctx.dml.vba.ODMCombineEnablePerState[vlevel_req][pipe_split_from[i]];
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
2282
context, pipes, validate_mode);
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
2285
context, pipes, validate_mode);
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
2299
calculate_wm_set_for_vlevel(vlevel, table_entry, &context->bw_ctx.bw.dcn.watermarks.d,
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
2300
&context->bw_ctx.dml, pipes, pipe_cnt);
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
2304
calculate_wm_set_for_vlevel(vlevel, table_entry, &context->bw_ctx.bw.dcn.watermarks.c,
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
2305
&context->bw_ctx.dml, pipes, pipe_cnt);
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
2309
calculate_wm_set_for_vlevel(vlevel, table_entry, &context->bw_ctx.bw.dcn.watermarks.b,
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
2310
&context->bw_ctx.dml, pipes, pipe_cnt);
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
2315
calculate_wm_set_for_vlevel(vlevel, table_entry, &context->bw_ctx.bw.dcn.watermarks.a,
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
2316
&context->bw_ctx.dml, pipes, pipe_cnt);
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
2319
bool dcn21_validate_bandwidth_fp(struct dc *dc, struct dc_state *context,
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
2336
ASSERT(context != dc->current_state);
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
2338
out = dcn21_fast_validate_bw(dc, context, pipes, &pipe_cnt, pipe_split_from, &vlevel, validate_mode);
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
2353
dcn21_calculate_wm(dc, context, pipes, &pipe_cnt, pipe_split_from, vlevel, validate_mode);
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
2354
dcn20_calculate_dlg_params(dc, context, pipes, pipe_cnt, vlevel);
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
2362
dml_get_status_message(context->bw_ctx.dml.vba.ValidationStatus[context->bw_ctx.dml.vba.soc.num_states]));
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.h
36
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.h
40
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.h
45
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.h
49
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.h
64
bool dcn20_validate_bandwidth_fp(struct dc *dc, struct dc_state *context,
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.h
76
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.h
79
bool dcn21_validate_bandwidth_fp(struct dc *dc, struct dc_state *context, enum
drivers/gpu/drm/amd/display/dc/dml/dcn30/dcn30_fpu.c
289
void dcn30_fpu_update_soc_for_wm_a(struct dc *dc, struct dc_state *context)
drivers/gpu/drm/amd/display/dc/dml/dcn30/dcn30_fpu.c
295
if (!context->bw_ctx.bw.dcn.clk.fw_based_mclk_switching ||
drivers/gpu/drm/amd/display/dc/dml/dcn30/dcn30_fpu.c
296
context->bw_ctx.dml.soc.dram_clock_change_latency_us == 0)
drivers/gpu/drm/amd/display/dc/dml/dcn30/dcn30_fpu.c
297
context->bw_ctx.dml.soc.dram_clock_change_latency_us = dc->clk_mgr->bw_params->wm_table.nv_entries[WM_A].dml_input.pstate_latency_us;
drivers/gpu/drm/amd/display/dc/dml/dcn30/dcn30_fpu.c
298
context->bw_ctx.dml.soc.sr_enter_plus_exit_time_us = dc->clk_mgr->bw_params->wm_table.nv_entries[WM_A].dml_input.sr_enter_plus_exit_time_us;
drivers/gpu/drm/amd/display/dc/dml/dcn30/dcn30_fpu.c
299
context->bw_ctx.dml.soc.sr_exit_time_us = dc->clk_mgr->bw_params->wm_table.nv_entries[WM_A].dml_input.sr_exit_time_us;
drivers/gpu/drm/amd/display/dc/dml/dcn30/dcn30_fpu.c
304
struct dc *dc, struct dc_state *context,
drivers/gpu/drm/amd/display/dc/dml/dcn30/dcn30_fpu.c
309
int maxMpcComb = context->bw_ctx.dml.vba.maxMpcComb;
drivers/gpu/drm/amd/display/dc/dml/dcn30/dcn30_fpu.c
311
double dcfclk = context->bw_ctx.dml.vba.DCFCLKState[vlevel][maxMpcComb];
drivers/gpu/drm/amd/display/dc/dml/dcn30/dcn30_fpu.c
312
bool pstate_en = context->bw_ctx.dml.vba.DRAMClockChangeSupport[vlevel][maxMpcComb] != dm_dram_clock_change_unsupported;
drivers/gpu/drm/amd/display/dc/dml/dcn30/dcn30_fpu.c
318
context->bw_ctx.bw.dcn.clk.fw_based_mclk_switching = false;
drivers/gpu/drm/amd/display/dc/dml/dcn30/dcn30_fpu.c
319
for (i = 0; i < context->stream_count; i++) {
drivers/gpu/drm/amd/display/dc/dml/dcn30/dcn30_fpu.c
321
if (context->streams[i])
drivers/gpu/drm/amd/display/dc/dml/dcn30/dcn30_fpu.c
322
stream_status = dc_state_get_stream_status(context, context->streams[i]);
drivers/gpu/drm/amd/display/dc/dml/dcn30/dcn30_fpu.c
329
context->bw_ctx.bw.dcn.clk.fw_based_mclk_switching =
drivers/gpu/drm/amd/display/dc/dml/dcn30/dcn30_fpu.c
330
dcn30_can_support_mclk_switch_using_fw_based_vblank_stretch(dc, context);
drivers/gpu/drm/amd/display/dc/dml/dcn30/dcn30_fpu.c
332
if (context->bw_ctx.bw.dcn.clk.fw_based_mclk_switching) {
drivers/gpu/drm/amd/display/dc/dml/dcn30/dcn30_fpu.c
334
context, pipes, pipe_cnt, vlevel);
drivers/gpu/drm/amd/display/dc/dml/dcn30/dcn30_fpu.c
341
context->bw_ctx.dml.soc.dram_clock_change_latency_us = dc->clk_mgr->bw_params->wm_table.nv_entries[WM_A].dml_input.pstate_latency_us;
drivers/gpu/drm/amd/display/dc/dml/dcn30/dcn30_fpu.c
342
dcn30_internal_validate_bw(dc, context, pipes, &pipe_cnt, &vlevel,
drivers/gpu/drm/amd/display/dc/dml/dcn30/dcn30_fpu.c
344
maxMpcComb = context->bw_ctx.dml.vba.maxMpcComb;
drivers/gpu/drm/amd/display/dc/dml/dcn30/dcn30_fpu.c
345
dcfclk = context->bw_ctx.dml.vba.DCFCLKState[vlevel][context->bw_ctx.dml.vba.maxMpcComb];
drivers/gpu/drm/amd/display/dc/dml/dcn30/dcn30_fpu.c
346
pstate_en = context->bw_ctx.dml.vba.DRAMClockChangeSupport[vlevel][maxMpcComb] != dm_dram_clock_change_unsupported;
drivers/gpu/drm/amd/display/dc/dml/dcn30/dcn30_fpu.c
350
if (context->bw_ctx.dml.soc.min_dcfclk > dcfclk)
drivers/gpu/drm/amd/display/dc/dml/dcn30/dcn30_fpu.c
351
dcfclk = context->bw_ctx.dml.soc.min_dcfclk;
drivers/gpu/drm/amd/display/dc/dml/dcn30/dcn30_fpu.c
355
pipes[0].clks_cfg.socclk_mhz = context->bw_ctx.dml.soc.clock_limits[vlevel].socclk_mhz;
drivers/gpu/drm/amd/display/dc/dml/dcn30/dcn30_fpu.c
364
pipes[0].clks_cfg.dcfclk_mhz = context->bw_ctx.dml.soc.clock_limits[0].dcfclk_mhz;
drivers/gpu/drm/amd/display/dc/dml/dcn30/dcn30_fpu.c
366
context->bw_ctx.dml.soc.dram_clock_change_latency_us = dc->clk_mgr->bw_params->wm_table.nv_entries[WM_B].dml_input.pstate_latency_us;
drivers/gpu/drm/amd/display/dc/dml/dcn30/dcn30_fpu.c
367
context->bw_ctx.dml.soc.sr_enter_plus_exit_time_us = dc->clk_mgr->bw_params->wm_table.nv_entries[WM_B].dml_input.sr_enter_plus_exit_time_us;
drivers/gpu/drm/amd/display/dc/dml/dcn30/dcn30_fpu.c
368
context->bw_ctx.dml.soc.sr_exit_time_us = dc->clk_mgr->bw_params->wm_table.nv_entries[WM_B].dml_input.sr_exit_time_us;
drivers/gpu/drm/amd/display/dc/dml/dcn30/dcn30_fpu.c
370
context->bw_ctx.bw.dcn.watermarks.b.urgent_ns = get_wm_urgent(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000;
drivers/gpu/drm/amd/display/dc/dml/dcn30/dcn30_fpu.c
371
context->bw_ctx.bw.dcn.watermarks.b.cstate_pstate.cstate_enter_plus_exit_ns = get_wm_stutter_enter_exit(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000;
drivers/gpu/drm/amd/display/dc/dml/dcn30/dcn30_fpu.c
372
context->bw_ctx.bw.dcn.watermarks.b.cstate_pstate.cstate_exit_ns = get_wm_stutter_exit(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000;
drivers/gpu/drm/amd/display/dc/dml/dcn30/dcn30_fpu.c
373
context->bw_ctx.bw.dcn.watermarks.b.cstate_pstate.pstate_change_ns = get_wm_dram_clock_change(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000;
drivers/gpu/drm/amd/display/dc/dml/dcn30/dcn30_fpu.c
374
context->bw_ctx.bw.dcn.watermarks.b.pte_meta_urgent_ns = get_wm_memory_trip(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000;
drivers/gpu/drm/amd/display/dc/dml/dcn30/dcn30_fpu.c
375
context->bw_ctx.bw.dcn.watermarks.b.frac_urg_bw_nom = get_fraction_of_urgent_bandwidth(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000;
drivers/gpu/drm/amd/display/dc/dml/dcn30/dcn30_fpu.c
376
context->bw_ctx.bw.dcn.watermarks.b.frac_urg_bw_flip = get_fraction_of_urgent_bandwidth_imm_flip(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000;
drivers/gpu/drm/amd/display/dc/dml/dcn30/dcn30_fpu.c
377
context->bw_ctx.bw.dcn.watermarks.b.urgent_latency_ns = get_urgent_latency(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000;
drivers/gpu/drm/amd/display/dc/dml/dcn30/dcn30_fpu.c
409
unsigned int min_dram_speed_mts = context->bw_ctx.dml.vba.DRAMSpeed;
drivers/gpu/drm/amd/display/dc/dml/dcn30/dcn30_fpu.c
412
context->bw_ctx.dml.soc.dram_clock_change_latency_us =
drivers/gpu/drm/amd/display/dc/dml/dcn30/dcn30_fpu.c
415
if (context->bw_ctx.dml.vba.DRAMClockChangeSupport[vlevel][maxMpcComb] ==
drivers/gpu/drm/amd/display/dc/dml/dcn30/dcn30_fpu.c
423
if (!context->bw_ctx.bw.dcn.clk.fw_based_mclk_switching) {
drivers/gpu/drm/amd/display/dc/dml/dcn30/dcn30_fpu.c
433
context->bw_ctx.dml.soc.dram_clock_change_latency_us =
drivers/gpu/drm/amd/display/dc/dml/dcn30/dcn30_fpu.c
436
context->bw_ctx.dml.soc.sr_enter_plus_exit_time_us = dc->clk_mgr->bw_params->wm_table.nv_entries[WM_C].dml_input.sr_enter_plus_exit_time_us;
drivers/gpu/drm/amd/display/dc/dml/dcn30/dcn30_fpu.c
437
context->bw_ctx.dml.soc.sr_exit_time_us = dc->clk_mgr->bw_params->wm_table.nv_entries[WM_C].dml_input.sr_exit_time_us;
drivers/gpu/drm/amd/display/dc/dml/dcn30/dcn30_fpu.c
440
context->bw_ctx.bw.dcn.watermarks.c.urgent_ns = get_wm_urgent(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000;
drivers/gpu/drm/amd/display/dc/dml/dcn30/dcn30_fpu.c
441
context->bw_ctx.bw.dcn.watermarks.c.cstate_pstate.cstate_enter_plus_exit_ns = get_wm_stutter_enter_exit(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000;
drivers/gpu/drm/amd/display/dc/dml/dcn30/dcn30_fpu.c
442
context->bw_ctx.bw.dcn.watermarks.c.cstate_pstate.cstate_exit_ns = get_wm_stutter_exit(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000;
drivers/gpu/drm/amd/display/dc/dml/dcn30/dcn30_fpu.c
443
context->bw_ctx.bw.dcn.watermarks.c.cstate_pstate.pstate_change_ns = get_wm_dram_clock_change(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000;
drivers/gpu/drm/amd/display/dc/dml/dcn30/dcn30_fpu.c
444
context->bw_ctx.bw.dcn.watermarks.c.pte_meta_urgent_ns = get_wm_memory_trip(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000;
drivers/gpu/drm/amd/display/dc/dml/dcn30/dcn30_fpu.c
445
context->bw_ctx.bw.dcn.watermarks.c.frac_urg_bw_nom = get_fraction_of_urgent_bandwidth(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000;
drivers/gpu/drm/amd/display/dc/dml/dcn30/dcn30_fpu.c
446
context->bw_ctx.bw.dcn.watermarks.c.frac_urg_bw_flip = get_fraction_of_urgent_bandwidth_imm_flip(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000;
drivers/gpu/drm/amd/display/dc/dml/dcn30/dcn30_fpu.c
447
context->bw_ctx.bw.dcn.watermarks.c.urgent_latency_ns = get_urgent_latency(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000;
drivers/gpu/drm/amd/display/dc/dml/dcn30/dcn30_fpu.c
453
context->bw_ctx.bw.dcn.watermarks.a = context->bw_ctx.bw.dcn.watermarks.c;
drivers/gpu/drm/amd/display/dc/dml/dcn30/dcn30_fpu.c
454
context->bw_ctx.bw.dcn.watermarks.a.cstate_pstate.pstate_change_ns = 0;
drivers/gpu/drm/amd/display/dc/dml/dcn30/dcn30_fpu.c
462
dc->res_pool->funcs->update_soc_for_wm_a(dc, context);
drivers/gpu/drm/amd/display/dc/dml/dcn30/dcn30_fpu.c
463
context->bw_ctx.bw.dcn.watermarks.a.urgent_ns = get_wm_urgent(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000;
drivers/gpu/drm/amd/display/dc/dml/dcn30/dcn30_fpu.c
464
context->bw_ctx.bw.dcn.watermarks.a.cstate_pstate.cstate_enter_plus_exit_ns = get_wm_stutter_enter_exit(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000;
drivers/gpu/drm/amd/display/dc/dml/dcn30/dcn30_fpu.c
465
context->bw_ctx.bw.dcn.watermarks.a.cstate_pstate.cstate_exit_ns = get_wm_stutter_exit(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000;
drivers/gpu/drm/amd/display/dc/dml/dcn30/dcn30_fpu.c
466
context->bw_ctx.bw.dcn.watermarks.a.cstate_pstate.pstate_change_ns = get_wm_dram_clock_change(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000;
drivers/gpu/drm/amd/display/dc/dml/dcn30/dcn30_fpu.c
467
context->bw_ctx.bw.dcn.watermarks.a.pte_meta_urgent_ns = get_wm_memory_trip(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000;
drivers/gpu/drm/amd/display/dc/dml/dcn30/dcn30_fpu.c
468
context->bw_ctx.bw.dcn.watermarks.a.frac_urg_bw_nom = get_fraction_of_urgent_bandwidth(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000;
drivers/gpu/drm/amd/display/dc/dml/dcn30/dcn30_fpu.c
469
context->bw_ctx.bw.dcn.watermarks.a.frac_urg_bw_flip = get_fraction_of_urgent_bandwidth_imm_flip(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000;
drivers/gpu/drm/amd/display/dc/dml/dcn30/dcn30_fpu.c
470
context->bw_ctx.bw.dcn.watermarks.a.urgent_latency_ns = get_urgent_latency(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000;
drivers/gpu/drm/amd/display/dc/dml/dcn30/dcn30_fpu.c
473
context->perf_params.stutter_period_us = context->bw_ctx.dml.vba.StutterPeriod;
drivers/gpu/drm/amd/display/dc/dml/dcn30/dcn30_fpu.c
476
context->bw_ctx.bw.dcn.watermarks.d = context->bw_ctx.bw.dcn.watermarks.a;
drivers/gpu/drm/amd/display/dc/dml/dcn30/dcn30_fpu.c
479
if (!context->res_ctx.pipe_ctx[i].stream)
drivers/gpu/drm/amd/display/dc/dml/dcn30/dcn30_fpu.c
482
pipes[pipe_idx].clks_cfg.dispclk_mhz = get_dispclk_calculated(&context->bw_ctx.dml, pipes, pipe_cnt);
drivers/gpu/drm/amd/display/dc/dml/dcn30/dcn30_fpu.c
483
pipes[pipe_idx].clks_cfg.dppclk_mhz = get_dppclk_calculated(&context->bw_ctx.dml, pipes, pipe_cnt, pipe_idx);
drivers/gpu/drm/amd/display/dc/dml/dcn30/dcn30_fpu.c
486
pipes[pipe_idx].clks_cfg.dispclk_mhz = context->bw_ctx.dml.soc.clock_limits[0].dispclk_mhz;
drivers/gpu/drm/amd/display/dc/dml/dcn30/dcn30_fpu.c
487
pipes[pipe_idx].clks_cfg.dppclk_mhz = context->bw_ctx.dml.soc.clock_limits[0].dppclk_mhz;
drivers/gpu/drm/amd/display/dc/dml/dcn30/dcn30_fpu.c
498
if (context->bw_ctx.bw.dcn.clk.fw_based_mclk_switching &&
drivers/gpu/drm/amd/display/dc/dml/dcn30/dcn30_fpu.c
500
context->bw_ctx.dml.vba.DRAMSpeed <= 1700 &&
drivers/gpu/drm/amd/display/dc/dml/dcn30/dcn30_fpu.c
501
context->bw_ctx.dml.vba.DRAMSpeed >= 1500) {
drivers/gpu/drm/amd/display/dc/dml/dcn30/dcn30_fpu.c
505
context->bw_ctx.dml.vba.DRAMSpeed = dc->dml.soc.clock_limits[i].dram_speed_mts;
drivers/gpu/drm/amd/display/dc/dml/dcn30/dcn30_fpu.c
511
dcn20_calculate_dlg_params(dc, context, pipes, pipe_cnt, vlevel);
drivers/gpu/drm/amd/display/dc/dml/dcn30/dcn30_fpu.c
515
context->bw_ctx.dml.soc.dram_clock_change_latency_us =
drivers/gpu/drm/amd/display/dc/dml/dcn30/dcn30_fpu.c
518
if (context->bw_ctx.bw.dcn.clk.fw_based_mclk_switching)
drivers/gpu/drm/amd/display/dc/dml/dcn30/dcn30_fpu.c
519
dcn30_setup_mclk_switch_using_fw_based_vblank_stretch(dc, context);
drivers/gpu/drm/amd/display/dc/dml/dcn30/dcn30_fpu.c
621
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/dml/dcn30/dcn30_fpu.c
632
context->bw_ctx.dml.soc.dram_clock_change_latency_us =
drivers/gpu/drm/amd/display/dc/dml/dcn30/dcn30_fpu.c
634
dcn30_internal_validate_bw(dc, context, pipes, &pipe_cnt, &vlevel,
drivers/gpu/drm/amd/display/dc/dml/dcn30/dcn30_fpu.c
637
if (context->bw_ctx.dml.soc.allow_dram_self_refresh_or_dram_clock_change_in_vblank ==
drivers/gpu/drm/amd/display/dc/dml/dcn30/dcn30_fpu.h
41
void dcn30_fpu_update_soc_for_wm_a(struct dc *dc, struct dc_state *context);
drivers/gpu/drm/amd/display/dc/dml/dcn30/dcn30_fpu.h
44
struct dc *dc, struct dc_state *context,
drivers/gpu/drm/amd/display/dc/dml/dcn30/dcn30_fpu.h
64
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/dml/dcn301/dcn301_fpu.c
427
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/dml/dcn301/dcn301_fpu.c
448
calculate_wm_set_for_vlevel(vlevel, table_entry, &context->bw_ctx.bw.dcn.watermarks.d,
drivers/gpu/drm/amd/display/dc/dml/dcn301/dcn301_fpu.c
449
&context->bw_ctx.dml, pipes, pipe_cnt);
drivers/gpu/drm/amd/display/dc/dml/dcn301/dcn301_fpu.c
453
calculate_wm_set_for_vlevel(vlevel, table_entry, &context->bw_ctx.bw.dcn.watermarks.c,
drivers/gpu/drm/amd/display/dc/dml/dcn301/dcn301_fpu.c
454
&context->bw_ctx.dml, pipes, pipe_cnt);
drivers/gpu/drm/amd/display/dc/dml/dcn301/dcn301_fpu.c
458
calculate_wm_set_for_vlevel(vlevel, table_entry, &context->bw_ctx.bw.dcn.watermarks.b,
drivers/gpu/drm/amd/display/dc/dml/dcn301/dcn301_fpu.c
459
&context->bw_ctx.dml, pipes, pipe_cnt);
drivers/gpu/drm/amd/display/dc/dml/dcn301/dcn301_fpu.c
464
calculate_wm_set_for_vlevel(vlevel, table_entry, &context->bw_ctx.bw.dcn.watermarks.a,
drivers/gpu/drm/amd/display/dc/dml/dcn301/dcn301_fpu.c
465
&context->bw_ctx.dml, pipes, pipe_cnt);
drivers/gpu/drm/amd/display/dc/dml/dcn301/dcn301_fpu.c
468
if (!context->res_ctx.pipe_ctx[i].stream)
drivers/gpu/drm/amd/display/dc/dml/dcn301/dcn301_fpu.c
471
pipes[pipe_idx].clks_cfg.dispclk_mhz = get_dispclk_calculated(&context->bw_ctx.dml, pipes, pipe_cnt);
drivers/gpu/drm/amd/display/dc/dml/dcn301/dcn301_fpu.c
472
pipes[pipe_idx].clks_cfg.dppclk_mhz = get_dppclk_calculated(&context->bw_ctx.dml, pipes, pipe_cnt, pipe_idx);
drivers/gpu/drm/amd/display/dc/dml/dcn301/dcn301_fpu.c
475
pipes[pipe_idx].clks_cfg.dispclk_mhz = context->bw_ctx.dml.soc.clock_limits[0].dispclk_mhz;
drivers/gpu/drm/amd/display/dc/dml/dcn301/dcn301_fpu.c
476
pipes[pipe_idx].clks_cfg.dppclk_mhz = context->bw_ctx.dml.soc.clock_limits[0].dppclk_mhz;
drivers/gpu/drm/amd/display/dc/dml/dcn301/dcn301_fpu.c
485
dcn20_calculate_dlg_params(dc, context, pipes, pipe_cnt, vlevel);
drivers/gpu/drm/amd/display/dc/dml/dcn301/dcn301_fpu.h
37
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/dml/dcn31/dcn31_fpu.c
454
void dcn31_update_soc_for_wm_a(struct dc *dc, struct dc_state *context)
drivers/gpu/drm/amd/display/dc/dml/dcn31/dcn31_fpu.c
459
context->bw_ctx.dml.soc.dram_clock_change_latency_us = dc->clk_mgr->bw_params->wm_table.entries[WM_A].pstate_latency_us;
drivers/gpu/drm/amd/display/dc/dml/dcn31/dcn31_fpu.c
460
context->bw_ctx.dml.soc.sr_enter_plus_exit_time_us = dc->clk_mgr->bw_params->wm_table.entries[WM_A].sr_enter_plus_exit_time_us;
drivers/gpu/drm/amd/display/dc/dml/dcn31/dcn31_fpu.c
461
context->bw_ctx.dml.soc.sr_exit_time_us = dc->clk_mgr->bw_params->wm_table.entries[WM_A].sr_exit_time_us;
drivers/gpu/drm/amd/display/dc/dml/dcn31/dcn31_fpu.c
465
void dcn315_update_soc_for_wm_a(struct dc *dc, struct dc_state *context)
drivers/gpu/drm/amd/display/dc/dml/dcn31/dcn31_fpu.c
471
if (context->bw_ctx.dml.vba.DRAMClockChangeSupport[context->bw_ctx.dml.vba.VoltageLevel][context->bw_ctx.dml.vba.maxMpcComb] != dm_dram_clock_change_vactive)
drivers/gpu/drm/amd/display/dc/dml/dcn31/dcn31_fpu.c
472
context->bw_ctx.dml.soc.dram_clock_change_latency_us = context->bw_ctx.dml.soc.dummy_pstate_latency_us;
drivers/gpu/drm/amd/display/dc/dml/dcn31/dcn31_fpu.c
474
context->bw_ctx.dml.soc.dram_clock_change_latency_us = dc->clk_mgr->bw_params->wm_table.entries[WM_A].pstate_latency_us;
drivers/gpu/drm/amd/display/dc/dml/dcn31/dcn31_fpu.c
475
context->bw_ctx.dml.soc.sr_enter_plus_exit_time_us =
drivers/gpu/drm/amd/display/dc/dml/dcn31/dcn31_fpu.c
477
context->bw_ctx.dml.soc.sr_exit_time_us =
drivers/gpu/drm/amd/display/dc/dml/dcn31/dcn31_fpu.c
483
struct dc *dc, struct dc_state *context,
drivers/gpu/drm/amd/display/dc/dml/dcn31/dcn31_fpu.c
489
double dcfclk = context->bw_ctx.dml.vba.DCFCLKState[vlevel][context->bw_ctx.dml.vba.maxMpcComb];
drivers/gpu/drm/amd/display/dc/dml/dcn31/dcn31_fpu.c
494
if (context->bw_ctx.dml.soc.min_dcfclk > dcfclk)
drivers/gpu/drm/amd/display/dc/dml/dcn31/dcn31_fpu.c
495
dcfclk = context->bw_ctx.dml.soc.min_dcfclk;
drivers/gpu/drm/amd/display/dc/dml/dcn31/dcn31_fpu.c
502
context->bw_ctx.bw.dcn.clk.dcfclk_khz = dcfclk; // always should be vlevel 0
drivers/gpu/drm/amd/display/dc/dml/dcn31/dcn31_fpu.c
508
pipes[0].clks_cfg.socclk_mhz = context->bw_ctx.dml.soc.clock_limits[vlevel].socclk_mhz;
drivers/gpu/drm/amd/display/dc/dml/dcn31/dcn31_fpu.c
511
get_wm_z8_stutter_enter_exit(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000;
drivers/gpu/drm/amd/display/dc/dml/dcn31/dcn31_fpu.c
513
if (get_stutter_period(&context->bw_ctx.dml, pipes, pipe_cnt) < dc->debug.minimum_z8_residency_time &&
drivers/gpu/drm/amd/display/dc/dml/dcn31/dcn31_fpu.c
522
dc->res_pool->funcs->update_soc_for_wm_a(dc, context);
drivers/gpu/drm/amd/display/dc/dml/dcn31/dcn31_fpu.c
523
context->bw_ctx.bw.dcn.watermarks.a.urgent_ns = get_wm_urgent(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000;
drivers/gpu/drm/amd/display/dc/dml/dcn31/dcn31_fpu.c
524
context->bw_ctx.bw.dcn.watermarks.a.cstate_pstate.cstate_enter_plus_exit_ns = get_wm_stutter_enter_exit(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000;
drivers/gpu/drm/amd/display/dc/dml/dcn31/dcn31_fpu.c
525
context->bw_ctx.bw.dcn.watermarks.a.cstate_pstate.cstate_exit_ns = get_wm_stutter_exit(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000;
drivers/gpu/drm/amd/display/dc/dml/dcn31/dcn31_fpu.c
526
context->bw_ctx.bw.dcn.watermarks.a.cstate_pstate.pstate_change_ns = get_wm_dram_clock_change(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000;
drivers/gpu/drm/amd/display/dc/dml/dcn31/dcn31_fpu.c
527
context->bw_ctx.bw.dcn.watermarks.a.cstate_pstate.cstate_enter_plus_exit_z8_ns = cstate_enter_plus_exit_z8_ns;
drivers/gpu/drm/amd/display/dc/dml/dcn31/dcn31_fpu.c
528
context->bw_ctx.bw.dcn.watermarks.a.cstate_pstate.cstate_exit_z8_ns = get_wm_z8_stutter_exit(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000;
drivers/gpu/drm/amd/display/dc/dml/dcn31/dcn31_fpu.c
529
context->bw_ctx.bw.dcn.watermarks.a.pte_meta_urgent_ns = get_wm_memory_trip(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000;
drivers/gpu/drm/amd/display/dc/dml/dcn31/dcn31_fpu.c
530
context->bw_ctx.bw.dcn.watermarks.a.frac_urg_bw_nom = get_fraction_of_urgent_bandwidth(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000;
drivers/gpu/drm/amd/display/dc/dml/dcn31/dcn31_fpu.c
531
context->bw_ctx.bw.dcn.watermarks.a.frac_urg_bw_flip = get_fraction_of_urgent_bandwidth_imm_flip(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000;
drivers/gpu/drm/amd/display/dc/dml/dcn31/dcn31_fpu.c
532
context->bw_ctx.bw.dcn.watermarks.a.urgent_latency_ns = get_urgent_latency(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000;
drivers/gpu/drm/amd/display/dc/dml/dcn31/dcn31_fpu.c
533
context->bw_ctx.bw.dcn.watermarks.b = context->bw_ctx.bw.dcn.watermarks.a;
drivers/gpu/drm/amd/display/dc/dml/dcn31/dcn31_fpu.c
534
context->bw_ctx.bw.dcn.watermarks.c = context->bw_ctx.bw.dcn.watermarks.a;
drivers/gpu/drm/amd/display/dc/dml/dcn31/dcn31_fpu.c
535
context->bw_ctx.bw.dcn.watermarks.d = context->bw_ctx.bw.dcn.watermarks.a;
drivers/gpu/drm/amd/display/dc/dml/dcn31/dcn31_fpu.c
538
if (!context->res_ctx.pipe_ctx[i].stream)
drivers/gpu/drm/amd/display/dc/dml/dcn31/dcn31_fpu.c
541
if (context->res_ctx.pipe_ctx[i].plane_state)
drivers/gpu/drm/amd/display/dc/dml/dcn31/dcn31_fpu.c
544
pipes[pipe_idx].clks_cfg.dispclk_mhz = get_dispclk_calculated(&context->bw_ctx.dml, pipes, pipe_cnt);
drivers/gpu/drm/amd/display/dc/dml/dcn31/dcn31_fpu.c
545
pipes[pipe_idx].clks_cfg.dppclk_mhz = get_dppclk_calculated(&context->bw_ctx.dml, pipes, pipe_cnt, pipe_idx);
drivers/gpu/drm/amd/display/dc/dml/dcn31/dcn31_fpu.c
548
pipes[pipe_idx].clks_cfg.dispclk_mhz = context->bw_ctx.dml.soc.clock_limits[0].dispclk_mhz;
drivers/gpu/drm/amd/display/dc/dml/dcn31/dcn31_fpu.c
549
pipes[pipe_idx].clks_cfg.dppclk_mhz = context->bw_ctx.dml.soc.clock_limits[0].dppclk_mhz;
drivers/gpu/drm/amd/display/dc/dml/dcn31/dcn31_fpu.c
559
dcn20_calculate_dlg_params(dc, context, pipes, pipe_cnt, vlevel);
drivers/gpu/drm/amd/display/dc/dml/dcn31/dcn31_fpu.c
561
context->bw_ctx.bw.dcn.clk.p_state_change_support =
drivers/gpu/drm/amd/display/dc/dml/dcn31/dcn31_fpu.c
562
context->bw_ctx.dml.vba.DRAMClockChangeSupport[vlevel][context->bw_ctx.dml.vba.maxMpcComb] == dm_dram_clock_change_vactive;
drivers/gpu/drm/amd/display/dc/dml/dcn31/dcn31_fpu.c
565
context->bw_ctx.bw.dcn.clk.socclk_khz = 0;
drivers/gpu/drm/amd/display/dc/dml/dcn31/dcn31_fpu.c
566
context->bw_ctx.bw.dcn.clk.dppclk_khz = 0;
drivers/gpu/drm/amd/display/dc/dml/dcn31/dcn31_fpu.c
567
context->bw_ctx.bw.dcn.clk.dcfclk_khz = 0;
drivers/gpu/drm/amd/display/dc/dml/dcn31/dcn31_fpu.c
568
context->bw_ctx.bw.dcn.clk.dcfclk_deep_sleep_khz = 0;
drivers/gpu/drm/amd/display/dc/dml/dcn31/dcn31_fpu.c
569
context->bw_ctx.bw.dcn.clk.dramclk_khz = 0;
drivers/gpu/drm/amd/display/dc/dml/dcn31/dcn31_fpu.c
570
context->bw_ctx.bw.dcn.clk.fclk_khz = 0;
drivers/gpu/drm/amd/display/dc/dml/dcn31/dcn31_fpu.c
571
context->bw_ctx.bw.dcn.clk.p_state_change_support = true;
drivers/gpu/drm/amd/display/dc/dml/dcn31/dcn31_fpu.c
573
if (context->res_ctx.pipe_ctx[i].stream)
drivers/gpu/drm/amd/display/dc/dml/dcn31/dcn31_fpu.c
574
context->res_ctx.pipe_ctx[i].plane_res.bw.dppclk_khz = 0;
drivers/gpu/drm/amd/display/dc/dml/dcn31/dcn31_fpu.c
577
if (!context->res_ctx.pipe_ctx[i].stream)
drivers/gpu/drm/amd/display/dc/dml/dcn31/dcn31_fpu.c
580
context->res_ctx.pipe_ctx[i].det_buffer_size_kb =
drivers/gpu/drm/amd/display/dc/dml/dcn31/dcn31_fpu.c
581
get_det_buffer_size_kbytes(&context->bw_ctx.dml, pipes, pipe_cnt, pipe_idx);
drivers/gpu/drm/amd/display/dc/dml/dcn31/dcn31_fpu.c
582
if (context->res_ctx.pipe_ctx[i].det_buffer_size_kb > 384)
drivers/gpu/drm/amd/display/dc/dml/dcn31/dcn31_fpu.c
583
context->res_ctx.pipe_ctx[i].det_buffer_size_kb /= 2;
drivers/gpu/drm/amd/display/dc/dml/dcn31/dcn31_fpu.c
584
total_det += context->res_ctx.pipe_ctx[i].det_buffer_size_kb;
drivers/gpu/drm/amd/display/dc/dml/dcn31/dcn31_fpu.c
587
context->bw_ctx.bw.dcn.compbuf_size_kb = context->bw_ctx.dml.ip.config_return_buffer_size_in_kbytes - total_det;
drivers/gpu/drm/amd/display/dc/dml/dcn31/dcn31_fpu.h
38
void dcn31_update_soc_for_wm_a(struct dc *dc, struct dc_state *context);
drivers/gpu/drm/amd/display/dc/dml/dcn31/dcn31_fpu.h
39
void dcn315_update_soc_for_wm_a(struct dc *dc, struct dc_state *context);
drivers/gpu/drm/amd/display/dc/dml/dcn31/dcn31_fpu.h
42
struct dc *dc, struct dc_state *context,
drivers/gpu/drm/amd/display/dc/dml/dcn31/dcn31_fpu.h
56
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/dml/dcn314/dcn314_fpu.c
307
int dcn314_populate_dml_pipes_from_context_fpu(struct dc *dc, struct dc_state *context,
drivers/gpu/drm/amd/display/dc/dml/dcn314/dcn314_fpu.c
312
struct resource_context *res_ctx = &context->res_ctx;
drivers/gpu/drm/amd/display/dc/dml/dcn314/dcn314_fpu.c
319
dcn31x_populate_dml_pipes_from_context(dc, context, pipes, validate_mode);
drivers/gpu/drm/amd/display/dc/dml/dcn314/dcn314_fpu.c
392
context->bw_ctx.dml.ip.det_buffer_size_kbytes = DCN3_14_DEFAULT_DET_SIZE;
drivers/gpu/drm/amd/display/dc/dml/dcn314/dcn314_fpu.c
402
context->bw_ctx.dml.ip.det_buffer_size_kbytes = 192;
drivers/gpu/drm/amd/display/dc/dml/dcn314/dcn314_fpu.c
405
} else if (context->stream_count >= dc->debug.crb_alloc_policy_min_disp_count
drivers/gpu/drm/amd/display/dc/dml/dcn314/dcn314_fpu.c
407
context->bw_ctx.dml.ip.det_buffer_size_kbytes = dc->debug.crb_alloc_policy * 64;
drivers/gpu/drm/amd/display/dc/dml/dcn314/dcn314_fpu.c
408
} else if (context->stream_count >= 3 && upscaled) {
drivers/gpu/drm/amd/display/dc/dml/dcn314/dcn314_fpu.c
409
context->bw_ctx.dml.ip.det_buffer_size_kbytes = 192;
drivers/gpu/drm/amd/display/dc/dml/dcn314/dcn314_fpu.c
413
context->bw_ctx.dml.ip.odm_combine_4to1_supported = true;
drivers/gpu/drm/amd/display/dc/dml/dcn314/dcn314_fpu.c
416
struct pipe_ctx *pipe = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/dml/dcn314/dcn314_fpu.c
425
context->bw_ctx.dml.vba.ODMCombinePolicy = dm_odm_combine_policy_2to1;
drivers/gpu/drm/amd/display/dc/dml/dcn314/dcn314_fpu.h
36
int dcn314_populate_dml_pipes_from_context_fpu(struct dc *dc, struct dc_state *context,
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1001
dc_state_get_pipe_subvp_type(context, pipe) == SUBVP_MAIN) {
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1038
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1042
struct vba_vars_st *vba = &context->bw_ctx.dml.vba;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1049
struct pipe_ctx *pipe = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1050
enum mall_stream_type pipe_mall_type = dc_state_get_pipe_subvp_type(context, pipe);
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1074
schedulable = subvp_subvp_admissable(dc, context) && subvp_subvp_schedulable(dc, context);
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1079
if (dcn32_subvp_drr_admissable(dc, context))
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1080
schedulable = subvp_drr_schedulable(dc, context);
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1081
else if (dcn32_subvp_vblank_admissable(dc, context, vlevel))
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1082
schedulable = subvp_vblank_schedulable(dc, context);
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1093
static void assign_subvp_index(struct dc *dc, struct dc_state *context)
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1099
struct pipe_ctx *pipe_ctx = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1102
dc_state_get_pipe_subvp_type(context, pipe_ctx) == SUBVP_MAIN) {
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1169
struct dc_state *context)
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1179
for (i = 0; i < context->stream_count; i++) {
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1180
stream = context->streams[i];
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1182
&context->res_ctx, stream);
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1190
&context->res_ctx, dpp_pipes);
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1201
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1232
pipe = &context->res_ctx.pipe_ctx[dc_pipe_idx];
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1267
static void update_pipes_with_slice_table(struct dc *dc, struct dc_state *context,
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1273
resource_update_pipes_for_stream_with_slice_count(context,
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1279
resource_update_pipes_for_plane_with_slice_count(context,
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1285
static bool update_pipes_with_split_flags(struct dc *dc, struct dc_state *context,
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1292
init_pipe_slice_table_from_context(&slice_table, context);
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1294
&slice_table, dc, context, vba,
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1296
update_pipes_with_slice_table(dc, context, &slice_table);
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1301
struct dc_state *context, struct vba_vars_st *v, int *split,
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1304
struct dc_stream_state *stream = context->streams[0];
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1324
if (context->stream_count != 1)
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1359
init_pipe_slice_table_from_context(&slice_table, context);
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1361
&slice_table, dc, context, v,
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1379
if (context->stream_status[0].plane_count != 1)
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1382
if (memcmp(&context->stream_status[0].plane_states[0]->clip_rect,
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1395
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1411
new_vlevel = dml_get_voltage_level(&context->bw_ctx.dml, pipes, pipe_cnt);
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1413
if (new_vlevel < context->bw_ctx.dml.soc.num_states) {
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1416
*vlevel = dcn20_validate_apply_pipe_split_flags(dc, context, new_vlevel, split, merge);
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1417
context->bw_ctx.dml.vba.VoltageLevel = *vlevel;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1425
struct dc_state *context)
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1429
for (i = 0; i < context->stream_count; i++) {
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1430
if (context->streams[i]->test_pattern.type != DP_TEST_PATTERN_VIDEO_MODE)
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1438
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1446
struct vba_vars_st *vba = &context->bw_ctx.dml.vba;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1461
context->bw_ctx.dml.soc.allow_for_pstate_or_stutter_in_vblank_final =
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1464
context->bw_ctx.dml.soc.allow_for_pstate_or_stutter_in_vblank_final =
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1467
*vlevel = dml_get_voltage_level(&context->bw_ctx.dml, pipes, *pipe_cnt);
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1469
if (*vlevel < context->bw_ctx.dml.soc.num_states) {
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1470
*vlevel = dcn20_validate_apply_pipe_split_flags(dc, context, *vlevel, split, merge);
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1475
if (!dcn32_apply_merge_split_flags_helper(dc, context, repopulate_pipes, split, merge))
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1487
if (!dc->debug.force_disable_subvp && !dc->caps.dmub_caps.gecc_enable && dcn32_all_pipes_have_stream_and_plane(dc, context) &&
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1488
!dcn32_mpo_in_use(context) && !dcn32_any_surfaces_rotated(dc, context) && !is_test_pattern_enabled(context) &&
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1489
(*vlevel == context->bw_ctx.dml.soc.num_states || (vba->DRAMSpeedPerState[*vlevel] != vba->DRAMSpeedPerState[0] &&
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1496
while (!found_supported_config && dcn32_enough_pipes_for_subvp(dc, context) &&
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1497
dcn32_assign_subvp_pipe(dc, context, &dc_pipe_idx)) {
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1505
if (*vlevel == context->bw_ctx.dml.soc.num_states &&
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1506
context->bw_ctx.dml.soc.allow_for_pstate_or_stutter_in_vblank_final ==
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1508
context->bw_ctx.dml.soc.allow_for_pstate_or_stutter_in_vblank_final =
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1515
*vlevel = dml_get_voltage_level(&context->bw_ctx.dml, pipes, *pipe_cnt);
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1518
dc->res_pool->funcs->add_phantom_pipes(dc, context, pipes, *pipe_cnt, dc_pipe_idx);
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1520
*pipe_cnt = dc->res_pool->funcs->populate_dml_pipes(dc, context, pipes,
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1524
pipes[0].clks_cfg.dppclk_mhz = get_dppclk_calculated(&context->bw_ctx.dml, pipes, *pipe_cnt, 0);
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1525
*vlevel = dml_get_voltage_level(&context->bw_ctx.dml, pipes, *pipe_cnt);
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1530
for (i = *vlevel; i < context->bw_ctx.dml.soc.num_states; i++) {
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1537
if (*vlevel < context->bw_ctx.dml.soc.num_states
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1538
&& subvp_validate_static_schedulability(dc, context, *vlevel))
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1542
if (dcn32_subvp_drr_admissable(dc, context) && subvp_drr_schedulable(dc, context)) {
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1561
dc_state_remove_phantom_streams_and_planes(dc, context);
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1562
dc_state_release_phantom_streams_and_planes(dc, context);
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1564
*pipe_cnt = dc->res_pool->funcs->populate_dml_pipes(dc, context, pipes,
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1567
*vlevel = dml_get_voltage_level(&context->bw_ctx.dml, pipes, *pipe_cnt);
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1569
if (*vlevel < context->bw_ctx.dml.soc.num_states) {
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1570
*vlevel = dcn20_validate_apply_pipe_split_flags(dc, context, *vlevel, split, merge);
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1575
dcn32_helper_populate_phantom_dlg_params(dc, context, pipes, *pipe_cnt);
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1585
*vlevel = dcn20_validate_apply_pipe_split_flags(dc, context, *vlevel, split, merge);
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1589
assign_subvp_index(dc, context);
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1593
if (should_apply_odm_power_optimization(dc, context, vba, split, merge))
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1595
dc, context, pipes, split, merge, vlevel, *pipe_cnt);
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1600
static bool is_dtbclk_required(struct dc *dc, struct dc_state *context)
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1605
if (!context->res_ctx.pipe_ctx[i].stream)
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1607
if (dc->link_srv->dp_is_128b_132b_signal(&context->res_ctx.pipe_ctx[i]))
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1645
static void dcn32_calculate_dlg_params(struct dc *dc, struct dc_state *context,
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1652
struct vba_vars_st *vba = &context->bw_ctx.dml.vba;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1657
dc->res_pool->funcs->set_mcif_arb_params(dc, context, pipes, pipe_cnt);
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1659
context->bw_ctx.bw.dcn.clk.dispclk_khz = context->bw_ctx.dml.vba.DISPCLK * 1000;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1660
context->bw_ctx.bw.dcn.clk.dcfclk_khz = context->bw_ctx.dml.vba.DCFCLK * 1000;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1661
context->bw_ctx.bw.dcn.clk.socclk_khz = context->bw_ctx.dml.vba.SOCCLK * 1000;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1662
context->bw_ctx.bw.dcn.clk.dramclk_khz = context->bw_ctx.dml.vba.DRAMSpeed * 1000 / 16;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1663
context->bw_ctx.bw.dcn.clk.dcfclk_deep_sleep_khz = context->bw_ctx.dml.vba.DCFCLKDeepSleep * 1000;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1664
context->bw_ctx.bw.dcn.clk.fclk_khz = context->bw_ctx.dml.vba.FabricClock * 1000;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1665
context->bw_ctx.bw.dcn.clk.p_state_change_support =
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1666
context->bw_ctx.dml.vba.DRAMClockChangeSupport[vlevel][context->bw_ctx.dml.vba.maxMpcComb]
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1672
context->bw_ctx.bw.dcn.clk.p_state_change_support |= context->bw_ctx.bw.dcn.clk.fw_based_mclk_switching;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1674
context->bw_ctx.bw.dcn.clk.dppclk_khz = 0;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1675
context->bw_ctx.bw.dcn.clk.dtbclk_en = is_dtbclk_required(dc, context);
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1676
context->bw_ctx.bw.dcn.clk.ref_dtbclk_khz = context->bw_ctx.dml.vba.DTBCLKPerState[vlevel] * 1000;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1677
if (context->bw_ctx.dml.vba.FCLKChangeSupport[vlevel][context->bw_ctx.dml.vba.maxMpcComb] == dm_fclock_change_unsupported)
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1678
context->bw_ctx.bw.dcn.clk.fclk_p_state_change_support = false;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1680
context->bw_ctx.bw.dcn.clk.fclk_p_state_change_support = true;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1682
usr_retraining_support = context->bw_ctx.dml.vba.USRRetrainingSupport[vlevel][context->bw_ctx.dml.vba.maxMpcComb];
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1685
if (context->bw_ctx.bw.dcn.clk.dispclk_khz < dc->debug.min_disp_clk_khz)
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1686
context->bw_ctx.bw.dcn.clk.dispclk_khz = dc->debug.min_disp_clk_khz;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1688
unbounded_req_enabled = get_unbounded_request_enabled(&context->bw_ctx.dml, pipes, pipe_cnt);
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1696
context->bw_ctx.bw.dcn.mall_ss_size_bytes = 0;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1697
context->bw_ctx.bw.dcn.mall_ss_psr_active_size_bytes = 0;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1698
context->bw_ctx.bw.dcn.mall_subvp_size_bytes = 0;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1701
if (!context->res_ctx.pipe_ctx[i].stream)
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1703
if (context->res_ctx.pipe_ctx[i].plane_state)
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1705
pipes[pipe_idx].pipe.dest.vstartup_start = get_vstartup(&context->bw_ctx.dml, pipes, pipe_cnt,
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1707
pipes[pipe_idx].pipe.dest.vupdate_offset = get_vupdate_offset(&context->bw_ctx.dml, pipes, pipe_cnt,
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1709
pipes[pipe_idx].pipe.dest.vupdate_width = get_vupdate_width(&context->bw_ctx.dml, pipes, pipe_cnt,
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1711
pipes[pipe_idx].pipe.dest.vready_offset = get_vready_offset(&context->bw_ctx.dml, pipes, pipe_cnt,
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1714
if (dc_state_get_pipe_subvp_type(context, &context->res_ctx.pipe_ctx[i]) == SUBVP_PHANTOM) {
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1716
context->res_ctx.pipe_ctx[i].det_buffer_size_kb = 0;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1717
context->res_ctx.pipe_ctx[i].unbounded_req = false;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1719
context->res_ctx.pipe_ctx[i].det_buffer_size_kb = get_det_buffer_size_kbytes(&context->bw_ctx.dml, pipes, pipe_cnt,
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1721
context->res_ctx.pipe_ctx[i].unbounded_req = unbounded_req_enabled;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1724
if (context->bw_ctx.bw.dcn.clk.dppclk_khz < pipes[pipe_idx].clks_cfg.dppclk_mhz * 1000)
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1725
context->bw_ctx.bw.dcn.clk.dppclk_khz = pipes[pipe_idx].clks_cfg.dppclk_mhz * 1000;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1726
if (context->res_ctx.pipe_ctx[i].plane_state)
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1727
context->res_ctx.pipe_ctx[i].plane_res.bw.dppclk_khz = pipes[pipe_idx].clks_cfg.dppclk_mhz * 1000;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1729
context->res_ctx.pipe_ctx[i].plane_res.bw.dppclk_khz = 0;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1730
context->res_ctx.pipe_ctx[i].pipe_dlg_param = pipes[pipe_idx].pipe.dest;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1732
context->res_ctx.pipe_ctx[i].surface_size_in_mall_bytes = get_surface_size_in_mall(&context->bw_ctx.dml, pipes, pipe_cnt, pipe_idx);
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1735
context->res_ctx.pipe_ctx[i].has_vactive_margin = true;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1737
context->res_ctx.pipe_ctx[i].has_vactive_margin = false;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1741
if (context->res_ctx.pipe_ctx[i].stream && context->res_ctx.pipe_ctx[i].plane_state &&
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1742
(context->res_ctx.pipe_ctx[i].top_pipe == NULL ||
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1743
context->res_ctx.pipe_ctx[i].plane_state != context->res_ctx.pipe_ctx[i].top_pipe->plane_state) &&
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1744
context->res_ctx.pipe_ctx[i].prev_odm_pipe == NULL) {
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1746
if (dc_state_get_pipe_subvp_type(context, &context->res_ctx.pipe_ctx[i]) != SUBVP_PHANTOM) {
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1747
context->bw_ctx.bw.dcn.mall_ss_size_bytes += context->res_ctx.pipe_ctx[i].surface_size_in_mall_bytes;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1749
if (context->res_ctx.pipe_ctx[i].stream->link->psr_settings.psr_version == DC_PSR_VERSION_UNSUPPORTED) {
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1751
context->bw_ctx.bw.dcn.mall_ss_psr_active_size_bytes += context->res_ctx.pipe_ctx[i].surface_size_in_mall_bytes;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1755
context->bw_ctx.bw.dcn.mall_subvp_size_bytes += context->res_ctx.pipe_ctx[i].surface_size_in_mall_bytes;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1759
if (context->res_ctx.pipe_ctx[i].stream->adaptive_sync_infopacket.valid)
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1761
&context->res_ctx.pipe_ctx[i].stream->timing,
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1762
&context->res_ctx.pipe_ctx[i].pipe_dlg_param.vstartup_start);
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1768
context->bw_ctx.bw.dcn.clk.socclk_khz = 0;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1769
context->bw_ctx.bw.dcn.clk.dppclk_khz = 0;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1770
context->bw_ctx.bw.dcn.clk.dcfclk_khz = 0;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1771
context->bw_ctx.bw.dcn.clk.dcfclk_deep_sleep_khz = 0;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1772
context->bw_ctx.bw.dcn.clk.dramclk_khz = 0;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1773
context->bw_ctx.bw.dcn.clk.fclk_khz = 0;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1774
context->bw_ctx.bw.dcn.clk.p_state_change_support = true;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1775
context->bw_ctx.bw.dcn.clk.fclk_p_state_change_support = true;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1778
context->bw_ctx.bw.dcn.clk.bw_dppclk_khz = context->bw_ctx.bw.dcn.clk.dppclk_khz;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1779
context->bw_ctx.bw.dcn.clk.bw_dispclk_khz = context->bw_ctx.bw.dcn.clk.dispclk_khz;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1780
context->bw_ctx.bw.dcn.clk.max_supported_dppclk_khz = context->bw_ctx.dml.soc.clock_limits[vlevel].dppclk_mhz
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1782
context->bw_ctx.bw.dcn.clk.max_supported_dispclk_khz = context->bw_ctx.dml.soc.clock_limits[vlevel].dispclk_mhz
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1785
context->bw_ctx.bw.dcn.clk.num_ways = dcn32_helper_calculate_num_ways_for_subvp(dc, context);
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1787
context->bw_ctx.bw.dcn.compbuf_size_kb = context->bw_ctx.dml.ip.config_return_buffer_size_in_kbytes;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1790
if (context->res_ctx.pipe_ctx[i].stream)
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1791
context->bw_ctx.bw.dcn.compbuf_size_kb -= context->res_ctx.pipe_ctx[i].det_buffer_size_kb;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1796
if (!context->res_ctx.pipe_ctx[i].stream)
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1799
context->bw_ctx.dml.funcs.rq_dlg_get_dlg_reg_v2(&context->bw_ctx.dml,
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1800
&context->res_ctx.pipe_ctx[i].dlg_regs, &context->res_ctx.pipe_ctx[i].ttu_regs, pipes,
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1803
context->bw_ctx.dml.funcs.rq_dlg_get_rq_reg_v2(&context->res_ctx.pipe_ctx[i].rq_regs,
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1804
&context->bw_ctx.dml, pipes, pipe_cnt, pipe_idx);
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1811
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1817
if (old_index >= 0 && context->res_ctx.pipe_ctx[old_index].stream == NULL) {
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1818
pipe = &context->res_ctx.pipe_ctx[old_index];
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1826
if (context->res_ctx.pipe_ctx[i].stream == NULL) {
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1827
pipe = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
183
static bool dcn32_apply_merge_split_flags_helper(struct dc *dc, struct dc_state *context,
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1841
if (context->res_ctx.pipe_ctx[i].stream == NULL) {
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1842
pipe = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1939
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1946
struct vba_vars_st *vba = &context->bw_ctx.dml.vba;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1950
dc, context, vba, split, merge))
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1959
struct pipe_ctx *pipe = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2006
dcn20_release_dsc(&context->res_ctx, dc->res_pool, &pipe->stream_res.dsc);
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2033
struct pipe_ctx *pipe = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2062
hsplit_pipe = dcn32_find_split_pipe(dc, context, old_index);
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2068
dc, &context->res_ctx,
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2085
pipe_4to1 = dcn32_find_split_pipe(dc, context, old_index);
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2090
dc, &context->res_ctx,
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2104
pipe_4to1 = dcn32_find_split_pipe(dc, context, old_index);
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2109
dc, &context->res_ctx,
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2115
dcn20_build_mapped_resource(dc, context, pipe->stream);
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2119
struct pipe_ctx *pipe = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2127
for (i = 0; i < context->stream_count; i++) {
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2128
struct pipe_ctx *otg_master = resource_get_otg_master_for_stream(&context->res_ctx,
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2129
context->streams[i]);
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2132
resource_build_test_pattern_params(&context->res_ctx, otg_master);
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2139
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2150
int vlevel = context->bw_ctx.dml.soc.num_states;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2151
struct vba_vars_st *vba = &context->bw_ctx.dml.vba;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2160
dc_state_remove_phantom_streams_and_planes(dc, context);
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2161
dc_state_release_phantom_streams_and_planes(dc, context);
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2163
dc->res_pool->funcs->update_soc_for_wm_a(dc, context);
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2165
for (i = 0; i < context->stream_count; i++)
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2166
resource_update_pipes_for_stream_with_slice_count(context, dc->current_state, dc->res_pool, context->streams[i], 1);
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2167
pipe_cnt = dc->res_pool->funcs->populate_dml_pipes(dc, context, pipes, validate_mode);
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2174
dml_log_pipe_params(&context->bw_ctx.dml, pipes, pipe_cnt);
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2175
context->bw_ctx.dml.soc.max_vratio_pre = dcn32_determine_max_vratio_prefetch(dc, context);
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2178
if (!dcn32_full_validate_bw_helper(dc, context, pipes, &vlevel, split, merge,
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2185
(vlevel == context->bw_ctx.dml.soc.num_states ||
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2197
context->bw_ctx.dml.soc.allow_for_pstate_or_stutter_in_vblank_final =
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2200
context->bw_ctx.dml.validate_max_state = (validate_mode != DC_VALIDATE_MODE_AND_PROGRAMMING);
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2201
vlevel = dml_get_voltage_level(&context->bw_ctx.dml, pipes, pipe_cnt);
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2203
context->bw_ctx.dml.validate_max_state = false;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2205
if (vlevel < context->bw_ctx.dml.soc.num_states) {
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2208
vlevel = dcn20_validate_apply_pipe_split_flags(dc, context, vlevel, split, merge);
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2214
dml_log_mode_support_params(&context->bw_ctx.dml);
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2216
if (vlevel == context->bw_ctx.dml.soc.num_states)
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2220
struct pipe_ctx *pipe = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2238
if (!dcn32_apply_merge_split_flags_helper(dc, context, &repopulate_pipes, split, merge))
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2242
if (!dcn20_validate_dsc(dc, context)) {
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2252
pipe_cnt = dc->res_pool->funcs->populate_dml_pipes(dc, context, pipes, validate_mode);
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2254
dcn32_update_dml_pipes_odm_policy_based_on_context(dc, context, pipes);
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2261
context->bw_ctx.dml.soc.allow_for_pstate_or_stutter_in_vblank_final =
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2264
vlevel = dml_get_voltage_level(&context->bw_ctx.dml, pipes, pipe_cnt);
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2266
if (vlevel == context->bw_ctx.dml.soc.num_states) {
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2270
flag_max_mpc_comb != context->bw_ctx.dml.vba.maxMpcComb) {
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2273
for (i = flag_vlevel; i < context->bw_ctx.dml.soc.num_states; i++) {
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2302
void dcn32_calculate_wm_and_dlg_fpu(struct dc *dc, struct dc_state *context,
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2309
double dcfclk_from_validation = context->bw_ctx.dml.vba.DCFCLKState[vlevel][context->bw_ctx.dml.vba.maxMpcComb];
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2310
double dram_speed_from_validation = context->bw_ctx.dml.vba.DRAMSpeed;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2312
bool pstate_en = context->bw_ctx.dml.vba.DRAMClockChangeSupport[vlevel][context->bw_ctx.dml.vba.maxMpcComb] !=
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2315
int maxMpcComb = context->bw_ctx.dml.vba.maxMpcComb;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2316
unsigned int min_dram_speed_mts = context->bw_ctx.dml.vba.DRAMSpeed;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2317
bool subvp_in_use = dcn32_subvp_in_use(dc, context);
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2330
context->bw_ctx.dml.vba.DRAMClockChangeSupport[vlevel][maxMpcComb] = dm_dram_clock_change_vblank_w_mall_sub_vp;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2331
context->bw_ctx.dml.soc.allow_for_pstate_or_stutter_in_vblank_final = dm_prefetch_support_fclk_and_stutter;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2336
context, pipes, pipe_cnt, vlevel);
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2341
if (context->bw_ctx.dml.soc.fclk_change_latency_us < dc->clk_mgr->bw_params->dummy_pstate_table[dummy_latency_index].dummy_pstate_latency_us) {
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2343
context->bw_ctx.dml.soc.fclk_change_latency_us =
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2346
context->bw_ctx.dml.soc.dram_clock_change_latency_us =
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2348
dcn32_internal_validate_bw(dc, context, pipes, &pipe_cnt, &vlevel, DC_VALIDATE_MODE_AND_PROGRAMMING);
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2349
maxMpcComb = context->bw_ctx.dml.vba.maxMpcComb;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2351
context->bw_ctx.dml.vba.DRAMClockChangeSupport[vlevel][maxMpcComb] = dm_dram_clock_change_vblank_w_mall_sub_vp;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2355
context->bw_ctx.bw.dcn.clk.fw_based_mclk_switching = false;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2356
for (i = 0; i < context->stream_count; i++) {
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2358
if (context->streams[i])
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2359
stream_status = dc_state_get_stream_status(context, context->streams[i]);
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2367
fpo_candidate_stream = dcn32_can_support_mclk_switch_using_fw_based_vblank_stretch(dc, context);
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2369
stream_status = dc_state_get_stream_status(context, fpo_candidate_stream);
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2372
context->bw_ctx.bw.dcn.clk.fw_based_mclk_switching = true;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2375
if (context->bw_ctx.bw.dcn.clk.fw_based_mclk_switching) {
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2377
context, pipes, pipe_cnt, vlevel);
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2384
context->bw_ctx.dml.soc.dram_clock_change_latency_us =
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2389
if (context->bw_ctx.dml.soc.fclk_change_latency_us < dc->clk_mgr->bw_params->dummy_pstate_table[dummy_latency_index].dummy_pstate_latency_us) {
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2391
context->bw_ctx.dml.soc.fclk_change_latency_us =
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2394
dcn32_internal_validate_bw(dc, context, pipes, &pipe_cnt, &vlevel_temp,
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2398
maxMpcComb = context->bw_ctx.dml.vba.maxMpcComb;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2399
dcfclk_from_fw_based_mclk_switching = context->bw_ctx.dml.vba.DCFCLKState[vlevel][context->bw_ctx.dml.vba.maxMpcComb];
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2401
context->bw_ctx.dml.vba.DRAMClockChangeSupport[vlevel][maxMpcComb] = dm_dram_clock_change_vblank;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2407
context->bw_ctx.bw.dcn.clk.fw_based_mclk_switching = false;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2408
for (i = 0; i < context->stream_count; i++) {
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2410
if (context->streams[i])
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2411
stream_status = dc_state_get_stream_status(context, context->streams[i]);
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2415
context->bw_ctx.dml.soc.fclk_change_latency_us = dc->clk_mgr->bw_params->wm_table.nv_entries[WM_A].dml_input.fclk_change_latency_us;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2416
dcn32_internal_validate_bw(dc, context, pipes, &pipe_cnt, &vlevel,
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2437
pipes[0].clks_cfg.socclk_mhz = context->bw_ctx.dml.soc.clock_limits[vlevel_temp].socclk_mhz;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2440
context->bw_ctx.dml.soc.dram_clock_change_latency_us = dc->clk_mgr->bw_params->wm_table.nv_entries[WM_B].dml_input.pstate_latency_us;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2441
context->bw_ctx.dml.soc.fclk_change_latency_us = dc->clk_mgr->bw_params->wm_table.nv_entries[WM_B].dml_input.fclk_change_latency_us;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2442
context->bw_ctx.dml.soc.sr_enter_plus_exit_time_us = dc->clk_mgr->bw_params->wm_table.nv_entries[WM_B].dml_input.sr_enter_plus_exit_time_us;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2443
context->bw_ctx.dml.soc.sr_exit_time_us = dc->clk_mgr->bw_params->wm_table.nv_entries[WM_B].dml_input.sr_exit_time_us;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2445
context->bw_ctx.bw.dcn.watermarks.b.urgent_ns = get_wm_urgent(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2446
context->bw_ctx.bw.dcn.watermarks.b.cstate_pstate.cstate_enter_plus_exit_ns = get_wm_stutter_enter_exit(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2447
context->bw_ctx.bw.dcn.watermarks.b.cstate_pstate.cstate_exit_ns = get_wm_stutter_exit(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2448
context->bw_ctx.bw.dcn.watermarks.b.cstate_pstate.pstate_change_ns = get_wm_dram_clock_change(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2449
context->bw_ctx.bw.dcn.watermarks.b.pte_meta_urgent_ns = get_wm_memory_trip(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2450
context->bw_ctx.bw.dcn.watermarks.b.frac_urg_bw_nom = get_fraction_of_urgent_bandwidth(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2451
context->bw_ctx.bw.dcn.watermarks.b.frac_urg_bw_flip = get_fraction_of_urgent_bandwidth_imm_flip(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2452
context->bw_ctx.bw.dcn.watermarks.b.urgent_latency_ns = get_urgent_latency(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2453
context->bw_ctx.bw.dcn.watermarks.b.cstate_pstate.fclk_pstate_change_ns = get_fclk_watermark(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2454
context->bw_ctx.bw.dcn.watermarks.b.usr_retraining_ns = get_usr_retraining_watermark(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2502
pipes[0].clks_cfg.socclk_mhz = context->bw_ctx.dml.soc.clock_limits[vlevel].socclk_mhz;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2504
if (context->bw_ctx.bw.dcn.clk.fw_based_mclk_switching) {
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2512
context->bw_ctx.dml.soc.dram_clock_change_latency_us =
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2515
if (context->bw_ctx.dml.vba.DRAMClockChangeSupport[vlevel][maxMpcComb] ==
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2523
if (!context->bw_ctx.bw.dcn.clk.fw_based_mclk_switching && !subvp_in_use) {
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2533
context->bw_ctx.dml.soc.dram_clock_change_latency_us =
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2536
context->bw_ctx.dml.soc.fclk_change_latency_us = dc->clk_mgr->bw_params->wm_table.nv_entries[WM_C].dml_input.fclk_change_latency_us;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2537
context->bw_ctx.dml.soc.sr_enter_plus_exit_time_us = dc->clk_mgr->bw_params->wm_table.nv_entries[WM_C].dml_input.sr_enter_plus_exit_time_us;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2538
context->bw_ctx.dml.soc.sr_exit_time_us = dc->clk_mgr->bw_params->wm_table.nv_entries[WM_C].dml_input.sr_exit_time_us;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2541
context->bw_ctx.bw.dcn.watermarks.c.urgent_ns = get_wm_urgent(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2542
context->bw_ctx.bw.dcn.watermarks.c.cstate_pstate.cstate_enter_plus_exit_ns = get_wm_stutter_enter_exit(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2543
context->bw_ctx.bw.dcn.watermarks.c.cstate_pstate.cstate_exit_ns = get_wm_stutter_exit(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2544
context->bw_ctx.bw.dcn.watermarks.c.cstate_pstate.pstate_change_ns = get_wm_dram_clock_change(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2545
context->bw_ctx.bw.dcn.watermarks.c.pte_meta_urgent_ns = get_wm_memory_trip(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2546
context->bw_ctx.bw.dcn.watermarks.c.frac_urg_bw_nom = get_fraction_of_urgent_bandwidth(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2547
context->bw_ctx.bw.dcn.watermarks.c.frac_urg_bw_flip = get_fraction_of_urgent_bandwidth_imm_flip(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2548
context->bw_ctx.bw.dcn.watermarks.c.urgent_latency_ns = get_urgent_latency(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2553
context->bw_ctx.bw.dcn.watermarks.c.cstate_pstate.fclk_pstate_change_ns = get_wm_dram_clock_change(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2554
context->bw_ctx.bw.dcn.watermarks.c.usr_retraining_ns = get_usr_retraining_watermark(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2561
context->bw_ctx.bw.dcn.watermarks.a = context->bw_ctx.bw.dcn.watermarks.c;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2562
context->bw_ctx.bw.dcn.watermarks.a.cstate_pstate.pstate_change_ns = 0;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2566
context->bw_ctx.bw.dcn.watermarks.a.cstate_pstate.fclk_pstate_change_ns = get_fclk_watermark(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2576
context->bw_ctx.dml.soc.dram_clock_change_latency_us = dc->clk_mgr->bw_params->wm_table.nv_entries[WM_A].dml_input.pstate_latency_us;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2577
context->bw_ctx.dml.soc.sr_enter_plus_exit_time_us = dc->clk_mgr->bw_params->wm_table.nv_entries[WM_A].dml_input.sr_enter_plus_exit_time_us;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2578
context->bw_ctx.dml.soc.sr_exit_time_us = dc->clk_mgr->bw_params->wm_table.nv_entries[WM_A].dml_input.sr_exit_time_us;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2580
context->bw_ctx.bw.dcn.watermarks.a.urgent_ns = get_wm_urgent(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2581
context->bw_ctx.bw.dcn.watermarks.a.cstate_pstate.cstate_enter_plus_exit_ns = get_wm_stutter_enter_exit(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2582
context->bw_ctx.bw.dcn.watermarks.a.cstate_pstate.cstate_exit_ns = get_wm_stutter_exit(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2583
context->bw_ctx.bw.dcn.watermarks.a.cstate_pstate.pstate_change_ns = get_wm_dram_clock_change(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2584
context->bw_ctx.bw.dcn.watermarks.a.pte_meta_urgent_ns = get_wm_memory_trip(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2585
context->bw_ctx.bw.dcn.watermarks.a.frac_urg_bw_nom = get_fraction_of_urgent_bandwidth(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2586
context->bw_ctx.bw.dcn.watermarks.a.frac_urg_bw_flip = get_fraction_of_urgent_bandwidth_imm_flip(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2587
context->bw_ctx.bw.dcn.watermarks.a.urgent_latency_ns = get_urgent_latency(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2588
context->bw_ctx.bw.dcn.watermarks.a.cstate_pstate.fclk_pstate_change_ns = get_fclk_watermark(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2589
context->bw_ctx.bw.dcn.watermarks.a.usr_retraining_ns = get_usr_retraining_watermark(&context->bw_ctx.dml, pipes, pipe_cnt) * 1000;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2593
context->bw_ctx.bw.dcn.watermarks.d = context->bw_ctx.bw.dcn.watermarks.a;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2596
if (!context->res_ctx.pipe_ctx[i].stream)
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2599
pipes[pipe_idx].clks_cfg.dispclk_mhz = get_dispclk_calculated(&context->bw_ctx.dml, pipes, pipe_cnt);
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2600
pipes[pipe_idx].clks_cfg.dppclk_mhz = get_dppclk_calculated(&context->bw_ctx.dml, pipes, pipe_cnt, pipe_idx);
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2603
pipes[pipe_idx].clks_cfg.dispclk_mhz = context->bw_ctx.dml.soc.clock_limits[0].dispclk_mhz;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2604
pipes[pipe_idx].clks_cfg.dppclk_mhz = context->bw_ctx.dml.soc.clock_limits[0].dppclk_mhz;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2614
context->perf_params.stutter_period_us = context->bw_ctx.dml.vba.StutterPeriod;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2618
context->bw_ctx.dml.soc.fclk_change_latency_us =
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2621
dcn32_calculate_dlg_params(dc, context, pipes, pipe_cnt, vlevel);
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2625
context->bw_ctx.dml.soc.dram_clock_change_latency_us =
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
2630
context->bw_ctx.dml.soc.fclk_change_latency_us =
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
276
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
282
struct vba_vars_st *vba = &context->bw_ctx.dml.vba;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
284
enum clock_change_support temp_clock_change_support = vba->DRAMClockChangeSupport[vlevel][context->bw_ctx.dml.vba.maxMpcComb];
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
290
vba->DRAMClockChangeSupport[vlevel][context->bw_ctx.dml.vba.maxMpcComb] = temp_clock_change_support;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
291
context->bw_ctx.dml.soc.dram_clock_change_latency_us =
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
293
dcn32_internal_validate_bw(dc, context, pipes, &pipe_cnt, &vlevel, DC_VALIDATE_MODE_AND_PROGRAMMING);
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
297
dcn32_subvp_in_use(dc, context))
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
298
vba->DRAMClockChangeSupport[vlevel][context->bw_ctx.dml.vba.maxMpcComb] = temp_clock_change_support;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
300
if (vlevel < context->bw_ctx.dml.vba.soc.num_states &&
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
334
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
343
struct pipe_ctx *pipe = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
3433
bool dcn32_allow_subvp_high_refresh_rate(struct dc *dc, struct dc_state *context, struct pipe_ctx *pipe)
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
3447
struct pipe_ctx *pipe_ctx = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
3470
if ((context->stream_count == 1 && !pipe->stream->allow_freesync) || context->stream_count > 1) {
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
348
if (pipe->plane_state && dc_state_get_pipe_subvp_type(context, pipe) == SUBVP_PHANTOM) {
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
3489
double dcn32_determine_max_vratio_prefetch(struct dc *dc, struct dc_state *context)
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
3497
if (context->stream_count == 1 && context->stream_status[0].plane_count > 1) {
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
3498
for (i = 0; i < context->stream_status[0].plane_count; i++) {
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
3499
if (context->stream_status[0].plane_states[i]->format == SURFACE_PIXEL_FORMAT_VIDEO_420_YCbCr ||
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
350
get_vstartup(&context->bw_ctx.dml, pipes, pipe_cnt, pipe_idx);
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
3500
context->stream_status[0].plane_states[i]->format == SURFACE_PIXEL_FORMAT_VIDEO_420_YCrCb) {
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
352
get_vupdate_offset(&context->bw_ctx.dml, pipes, pipe_cnt, pipe_idx);
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
3523
void dcn32_assign_fpo_vactive_candidate(struct dc *dc, const struct dc_state *context, struct dc_stream_state **fpo_candidate_stream)
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
3526
const struct vba_vars_st *vba = &context->bw_ctx.dml.vba;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
3529
const struct pipe_ctx *pipe = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
354
get_vupdate_width(&context->bw_ctx.dml, pipes, pipe_cnt, pipe_idx);
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
356
get_vready_offset(&context->bw_ctx.dml, pipes, pipe_cnt, pipe_idx);
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
3560
bool dcn32_find_vactive_pipe(struct dc *dc, const struct dc_state *context, struct dc_stream_state *fpo_candidate_stream, uint32_t vactive_margin_req_us)
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
3563
const struct vba_vars_st *vba = &context->bw_ctx.dml.vba;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
3568
const struct pipe_ctx *pipe = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
3600
void dcn32_override_min_req_memclk(struct dc *dc, struct dc_state *context)
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
3603
if ((context->bw_ctx.bw.dcn.clk.fw_based_mclk_switching || dcn32_subvp_in_use(dc, context)) &&
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
3607
if (context->bw_ctx.dml.vba.DRAMSpeed <= dc->clk_mgr->bw_params->clk_table.entries[0].memclk_mhz * 16 &&
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
3609
context->bw_ctx.dml.vba.DRAMSpeed = dc->clk_mgr->bw_params->clk_table.entries[1].memclk_mhz * 16;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
3610
context->bw_ctx.bw.dcn.clk.dramclk_khz = context->bw_ctx.dml.vba.DRAMSpeed * 1000 / 16;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
467
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
478
unsigned int vlevel = context->bw_ctx.dml.vba.VoltageLevel;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
479
unsigned int dcfclk = context->bw_ctx.dml.vba.DCFCLKState[vlevel][context->bw_ctx.dml.vba.maxMpcComb];
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
480
unsigned int socclk = context->bw_ctx.dml.vba.SOCCLKPerState[vlevel];
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
481
struct vba_vars_st *vba = &context->bw_ctx.dml.vba;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
488
pipe = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
515
phantom_vactive = get_subviewport_lines_needed_in_mall(&context->bw_ctx.dml, pipes, pipe_cnt, pipe_idx) +
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
527
phantom_bp = get_vstartup(&context->bw_ctx.dml, pipes, pipe_cnt, pipe_idx);
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
558
static unsigned int dcn32_get_num_free_pipes(struct dc *dc, struct dc_state *context)
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
565
struct pipe_ctx *pipe = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
600
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
606
unsigned int free_pipes = dcn32_get_num_free_pipes(dc, context);
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
607
struct vba_vars_st *vba = &context->bw_ctx.dml.vba;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
610
struct pipe_ctx *pipe = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
630
!dc_state_get_stream_cursor_subvp_limit(pipe->stream, context) &&
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
632
(!dcn32_is_psr_capable(pipe) || (context->stream_count == 1 && dc->caps.dmub_caps.subvp_psr)) &&
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
633
dc_state_get_pipe_subvp_type(context, pipe) == SUBVP_NONE &&
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
634
(refresh_rate < 120 || dcn32_allow_subvp_high_refresh_rate(dc, context, pipe)) &&
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
644
pipe = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
680
static bool dcn32_enough_pipes_for_subvp(struct dc *dc, struct dc_state *context)
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
687
struct pipe_ctx *pipe = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
691
dc_state_get_pipe_subvp_type(context, pipe) == SUBVP_NONE) {
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
703
free_pipes = dcn32_get_num_free_pipes(dc, context);
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
726
static bool subvp_subvp_schedulable(struct dc *dc, struct dc_state *context)
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
737
struct pipe_ctx *pipe = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
743
phantom = dc_state_get_paired_subvp_stream(context, pipe->stream);
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
745
dc_state_get_pipe_subvp_type(context, pipe) == SUBVP_MAIN) {
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
797
static bool subvp_drr_schedulable(struct dc *dc, struct dc_state *context)
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
819
pipe = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
828
if (dc_state_get_pipe_subvp_type(context, pipe) == SUBVP_MAIN) {
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
836
drr_pipe = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
843
if (dc_state_get_pipe_subvp_type(context, drr_pipe) == SUBVP_NONE && drr_pipe->stream->ignore_msa_timing_param &&
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
850
phantom_stream = dc_state_get_paired_subvp_stream(context, pipe->stream);
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
898
static bool subvp_vblank_schedulable(struct dc *dc, struct dc_state *context)
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
925
pipe = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
926
pipe_mall_type = dc_state_get_pipe_subvp_type(context, pipe);
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
944
phantom_stream = dc_state_get_paired_subvp_stream(context, subvp_pipe->stream);
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
947
vblank_timing = &context->res_ctx.pipe_ctx[vblank_index].stream->timing;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
986
struct dc_state *context)
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
995
struct pipe_ctx *pipe = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.h
35
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.h
40
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.h
48
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.h
54
void dcn32_calculate_wm_and_dlg_fpu(struct dc *dc, struct dc_state *context,
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.h
62
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.h
72
void dcn32_assign_fpo_vactive_candidate(struct dc *dc, const struct dc_state *context, struct dc_stream_state **fpo_candidate_stream);
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.h
74
bool dcn32_find_vactive_pipe(struct dc *dc, const struct dc_state *context, struct dc_stream_state *fpo_candidate_stream, uint32_t vactive_margin_req);
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.h
76
void dcn32_override_min_req_memclk(struct dc *dc, struct dc_state *context);
drivers/gpu/drm/amd/display/dc/dml/dcn35/dcn35_fpu.c
438
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/dml/dcn35/dcn35_fpu.c
443
struct resource_context *res_ctx = &context->res_ctx;
drivers/gpu/drm/amd/display/dc/dml/dcn35/dcn35_fpu.c
450
dcn31_populate_dml_pipes_from_context(dc, context, pipes,
drivers/gpu/drm/amd/display/dc/dml/dcn35/dcn35_fpu.c
530
context->bw_ctx.dml.ip.det_buffer_size_kbytes = 384;/*per guide*/
drivers/gpu/drm/amd/display/dc/dml/dcn35/dcn35_fpu.c
544
context->bw_ctx.dml.ip.det_buffer_size_kbytes = 192;
drivers/gpu/drm/amd/display/dc/dml/dcn35/dcn35_fpu.c
547
} else if (context->stream_count >=
drivers/gpu/drm/amd/display/dc/dml/dcn35/dcn35_fpu.c
550
context->bw_ctx.dml.ip.det_buffer_size_kbytes =
drivers/gpu/drm/amd/display/dc/dml/dcn35/dcn35_fpu.c
552
} else if (context->stream_count >= 3 && upscaled) {
drivers/gpu/drm/amd/display/dc/dml/dcn35/dcn35_fpu.c
553
context->bw_ctx.dml.ip.det_buffer_size_kbytes = 192;
drivers/gpu/drm/amd/display/dc/dml/dcn35/dcn35_fpu.c
557
struct pipe_ctx *pipe = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/dml/dcn35/dcn35_fpu.c
568
context->bw_ctx.dml.vba.ODMCombinePolicy =
drivers/gpu/drm/amd/display/dc/dml/dcn35/dcn35_fpu.c
578
void dcn35_decide_zstate_support(struct dc *dc, struct dc_state *context)
drivers/gpu/drm/amd/display/dc/dml/dcn35/dcn35_fpu.c
587
if (context->res_ctx.pipe_ctx[i].plane_state)
drivers/gpu/drm/amd/display/dc/dml/dcn35/dcn35_fpu.c
591
if (context->stream_count == 0 || plane_count == 0) {
drivers/gpu/drm/amd/display/dc/dml/dcn35/dcn35_fpu.c
593
} else if (context->stream_count == 1 && context->streams[0]->signal == SIGNAL_TYPE_EDP) {
drivers/gpu/drm/amd/display/dc/dml/dcn35/dcn35_fpu.c
594
struct dc_link *link = context->streams[0]->sink->link;
drivers/gpu/drm/amd/display/dc/dml/dcn35/dcn35_fpu.c
601
bool allow_z8 = context->bw_ctx.dml.vba.StutterPeriod > (double)minmum_z8_residency;
drivers/gpu/drm/amd/display/dc/dml/dcn35/dcn35_fpu.c
604
bool allow_z10 = context->bw_ctx.dml.vba.StutterPeriod > (double)minmum_z10_residency;
drivers/gpu/drm/amd/display/dc/dml/dcn35/dcn35_fpu.c
617
(int)context->bw_ctx.dml.vba.StutterPeriod);
drivers/gpu/drm/amd/display/dc/dml/dcn35/dcn35_fpu.c
619
context->bw_ctx.bw.dcn.clk.zstate_support = support;
drivers/gpu/drm/amd/display/dc/dml/dcn35/dcn35_fpu.h
38
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/dml/dcn35/dcn35_fpu.h
42
void dcn35_decide_zstate_support(struct dc *dc, struct dc_state *context);
drivers/gpu/drm/amd/display/dc/dml/dcn351/dcn351_fpu.c
471
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/dml/dcn351/dcn351_fpu.c
476
struct resource_context *res_ctx = &context->res_ctx;
drivers/gpu/drm/amd/display/dc/dml/dcn351/dcn351_fpu.c
483
dcn31_populate_dml_pipes_from_context(dc, context, pipes,
drivers/gpu/drm/amd/display/dc/dml/dcn351/dcn351_fpu.c
563
context->bw_ctx.dml.ip.det_buffer_size_kbytes = 384;/*per guide*/
drivers/gpu/drm/amd/display/dc/dml/dcn351/dcn351_fpu.c
577
context->bw_ctx.dml.ip.det_buffer_size_kbytes = 192;
drivers/gpu/drm/amd/display/dc/dml/dcn351/dcn351_fpu.c
580
} else if (context->stream_count >=
drivers/gpu/drm/amd/display/dc/dml/dcn351/dcn351_fpu.c
583
context->bw_ctx.dml.ip.det_buffer_size_kbytes =
drivers/gpu/drm/amd/display/dc/dml/dcn351/dcn351_fpu.c
585
} else if (context->stream_count >= 3 && upscaled) {
drivers/gpu/drm/amd/display/dc/dml/dcn351/dcn351_fpu.c
586
context->bw_ctx.dml.ip.det_buffer_size_kbytes = 192;
drivers/gpu/drm/amd/display/dc/dml/dcn351/dcn351_fpu.c
590
struct pipe_ctx *pipe = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/dml/dcn351/dcn351_fpu.c
601
context->bw_ctx.dml.vba.ODMCombinePolicy =
drivers/gpu/drm/amd/display/dc/dml/dcn351/dcn351_fpu.c
611
void dcn351_decide_zstate_support(struct dc *dc, struct dc_state *context)
drivers/gpu/drm/amd/display/dc/dml/dcn351/dcn351_fpu.c
617
if (context->res_ctx.pipe_ctx[i].plane_state)
drivers/gpu/drm/amd/display/dc/dml/dcn351/dcn351_fpu.c
622
if (context->stream_count == 0 || plane_count == 0) {
drivers/gpu/drm/amd/display/dc/dml/dcn351/dcn351_fpu.c
624
} else if (context->stream_count == 1 && context->streams[0]->signal == SIGNAL_TYPE_EDP) {
drivers/gpu/drm/amd/display/dc/dml/dcn351/dcn351_fpu.c
625
struct dc_link *link = context->streams[0]->sink->link;
drivers/gpu/drm/amd/display/dc/dml/dcn351/dcn351_fpu.c
632
bool allow_z8 = context->bw_ctx.dml.vba.StutterPeriod > (double)minmum_z8_residency;
drivers/gpu/drm/amd/display/dc/dml/dcn351/dcn351_fpu.c
638
context->bw_ctx.bw.dcn.clk.zstate_support = support;
drivers/gpu/drm/amd/display/dc/dml/dcn351/dcn351_fpu.h
13
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/dml/dcn351/dcn351_fpu.h
17
void dcn351_decide_zstate_support(struct dc *dc, struct dc_state *context);
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_translation_helper.c
469
const struct dc_state *context)
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_translation_helper.c
477
const struct pipe_ctx *pipe = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_translation_helper.c
498
const struct dc_state *context, unsigned int stream_index)
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_translation_helper.c
500
const struct scaler_data *scaler_data = get_scaler_data_for_plane(dml_ctx, plane_state, context);
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_translation_helper.c
501
struct dc_stream_state *stream = context->streams[stream_index];
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_translation_helper.c
644
static bool dml21_wrapper_get_plane_id(const struct dc_state *context, unsigned int stream_id, const struct dc_plane_state *plane, unsigned int *plane_id)
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_translation_helper.c
651
for (i = 0; i < context->stream_count; i++) {
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_translation_helper.c
652
if (context->streams[i]->stream_id == stream_id) {
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_translation_helper.c
653
for (j = 0; j < context->stream_status[i].plane_count; j++) {
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_translation_helper.c
654
if (context->stream_status[i].plane_states[j] == plane) {
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_translation_helper.c
681
const struct dc_plane_state *plane, const struct dc_state *context)
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_translation_helper.c
687
if (!dml21_wrapper_get_plane_id(context, stream_id, plane, &plane_id)) {
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_translation_helper.c
727
bool dml21_map_dc_state_into_dml_display_cfg(const struct dc *in_dc, struct dc_state *context, struct dml2_context *dml_ctx)
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_translation_helper.c
747
for (stream_index = 0; stream_index < context->stream_count; stream_index++) {
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_translation_helper.c
748
disp_cfg_stream_location = map_stream_to_dml21_display_cfg(dml_ctx, context->streams[stream_index]);
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_translation_helper.c
754
populate_dml21_timing_config_from_stream_state(&dml_dispcfg->stream_descriptors[disp_cfg_stream_location].timing, context->streams[stream_index], &context->res_ctx.pipe_ctx[stream_index], dml_ctx);
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_translation_helper.c
755
populate_dml21_output_config_from_stream_state(&dml_dispcfg->stream_descriptors[disp_cfg_stream_location].output, context->streams[stream_index], &context->res_ctx.pipe_ctx[stream_index]);
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_translation_helper.c
756
populate_dml21_stream_overrides_from_stream_state(&dml_dispcfg->stream_descriptors[disp_cfg_stream_location], context->streams[stream_index], &context->stream_status[stream_index]);
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_translation_helper.c
762
dml_ctx->v21.dml_to_dc_pipe_mapping.disp_cfg_to_stream_id[disp_cfg_stream_location] = context->streams[stream_index]->stream_id;
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_translation_helper.c
765
if (context->stream_status[stream_index].plane_count == 0) {
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_translation_helper.c
767
populate_dml21_dummy_surface_cfg(&dml_dispcfg->plane_descriptors[disp_cfg_plane_location].surface, context->streams[stream_index]);
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_translation_helper.c
768
populate_dml21_dummy_plane_cfg(&dml_dispcfg->plane_descriptors[disp_cfg_plane_location], context->streams[stream_index]);
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_translation_helper.c
771
for (plane_index = 0; plane_index < context->stream_status[stream_index].plane_count; plane_index++) {
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_translation_helper.c
772
disp_cfg_plane_location = map_plane_to_dml21_display_cfg(dml_ctx, context->streams[stream_index]->stream_id, context->stream_status[stream_index].plane_states[plane_index], context);
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_translation_helper.c
779
populate_dml21_surface_config_from_plane_state(in_dc, &dml_dispcfg->plane_descriptors[disp_cfg_plane_location].surface, context->stream_status[stream_index].plane_states[plane_index]);
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_translation_helper.c
780
populate_dml21_plane_config_from_plane_state(dml_ctx, &dml_dispcfg->plane_descriptors[disp_cfg_plane_location], context->stream_status[stream_index].plane_states[plane_index], context, stream_index);
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_translation_helper.c
783
if (dml21_wrapper_get_plane_id(context, context->streams[stream_index]->stream_id, context->stream_status[stream_index].plane_states[plane_index], &dml_ctx->v21.dml_to_dc_pipe_mapping.disp_cfg_to_plane_id[disp_cfg_plane_location]))
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_translation_helper.c
804
void dml21_copy_clocks_to_dc_state(struct dml2_context *in_ctx, struct dc_state *context)
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_translation_helper.c
807
context->bw_ctx.bw.dcn.clk.dispclk_khz = in_ctx->v21.mode_programming.programming->min_clocks.dcn4x.dispclk_khz;
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_translation_helper.c
808
context->bw_ctx.bw.dcn.clk.dcfclk_khz = in_ctx->v21.mode_programming.programming->min_clocks.dcn4x.active.dcfclk_khz;
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_translation_helper.c
809
context->bw_ctx.bw.dcn.clk.dramclk_khz = in_ctx->v21.mode_programming.programming->min_clocks.dcn4x.active.uclk_khz;
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_translation_helper.c
810
context->bw_ctx.bw.dcn.clk.fclk_khz = in_ctx->v21.mode_programming.programming->min_clocks.dcn4x.active.fclk_khz;
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_translation_helper.c
811
context->bw_ctx.bw.dcn.clk.idle_dramclk_khz = in_ctx->v21.mode_programming.programming->min_clocks.dcn4x.idle.uclk_khz;
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_translation_helper.c
812
context->bw_ctx.bw.dcn.clk.idle_fclk_khz = in_ctx->v21.mode_programming.programming->min_clocks.dcn4x.idle.fclk_khz;
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_translation_helper.c
813
context->bw_ctx.bw.dcn.clk.dcfclk_deep_sleep_khz = in_ctx->v21.mode_programming.programming->min_clocks.dcn4x.deepsleep_dcfclk_khz;
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_translation_helper.c
814
context->bw_ctx.bw.dcn.clk.fclk_p_state_change_support = in_ctx->v21.mode_programming.programming->fclk_pstate_supported;
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_translation_helper.c
815
context->bw_ctx.bw.dcn.clk.p_state_change_support = in_ctx->v21.mode_programming.programming->uclk_pstate_supported;
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_translation_helper.c
816
context->bw_ctx.bw.dcn.clk.dtbclk_en = in_ctx->v21.mode_programming.programming->min_clocks.dcn4x.dtbrefclk_khz > 0;
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_translation_helper.c
817
context->bw_ctx.bw.dcn.clk.ref_dtbclk_khz = in_ctx->v21.mode_programming.programming->min_clocks.dcn4x.dtbrefclk_khz;
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_translation_helper.c
818
context->bw_ctx.bw.dcn.clk.socclk_khz = in_ctx->v21.mode_programming.programming->min_clocks.dcn4x.socclk_khz;
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_translation_helper.c
819
context->bw_ctx.bw.dcn.clk.subvp_prefetch_dramclk_khz = in_ctx->v21.mode_programming.programming->min_clocks.dcn4x.svp_prefetch_no_throttle.uclk_khz;
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_translation_helper.c
820
context->bw_ctx.bw.dcn.clk.subvp_prefetch_fclk_khz = in_ctx->v21.mode_programming.programming->min_clocks.dcn4x.svp_prefetch_no_throttle.fclk_khz;
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_translation_helper.c
821
context->bw_ctx.bw.dcn.clk.stutter_efficiency.base_efficiency = in_ctx->v21.mode_programming.programming->stutter.base_percent_efficiency;
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_translation_helper.c
822
context->bw_ctx.bw.dcn.clk.stutter_efficiency.low_power_efficiency = in_ctx->v21.mode_programming.programming->stutter.low_power_percent_efficiency;
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_translation_helper.c
882
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_translation_helper.h
21
bool dml21_map_dc_state_into_dml_display_cfg(const struct dc *in_dc, struct dc_state *context, struct dml2_context *dml_ctx);
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_translation_helper.h
22
void dml21_copy_clocks_to_dc_state(struct dml2_context *in_ctx, struct dc_state *context);
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_translation_helper.h
25
void dml21_get_pipe_mcache_config(struct dc_state *context, struct pipe_ctx *pipe_ctx, struct dml2_per_plane_programming *pln_prog, struct dml2_pipe_configuration_descriptor *mcache_pipe_config);
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_translation_helper.h
27
unsigned int map_plane_to_dml21_display_cfg(const struct dml2_context *dml_ctx, unsigned int stream_id, const struct dc_plane_state *plane, const struct dc_state *context);
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_utils.c
108
dc_main_stream = dml_ctx->config.callbacks.get_stream_from_id(context, main_stream_id);
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_utils.c
109
dc_main_stream_status = dml_ctx->config.callbacks.get_stream_status(context, dc_main_stream);
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_utils.c
118
num_pipes = dml_ctx->config.callbacks.get_dpp_pipes_for_plane(dc_main_plane, &context->res_ctx, dc_main_pipes);
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_utils.c
121
struct pipe_ctx *otg_master_pipe = dml_ctx->config.callbacks.get_otg_master_for_stream(&context->res_ctx, dc_main_stream);
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_utils.c
123
num_pipes = dml_ctx->config.callbacks.get_opp_heads_for_otg_master(otg_master_pipe, &context->res_ctx, dc_main_pipes);
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_utils.c
127
dc_phantom_stream = dml_ctx->config.svp_pstate.callbacks.get_paired_subvp_stream(context, dc_main_stream);
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_utils.c
129
dc_phantom_stream_status = dml_ctx->config.callbacks.get_stream_status(context, dc_phantom_stream);
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_utils.c
137
dml_ctx->config.callbacks.get_dpp_pipes_for_plane(dc_phantom_plane, &context->res_ctx, dc_phantom_pipes);
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_utils.c
146
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_utils.c
152
if (dml_ctx->config.svp_pstate.callbacks.get_pipe_subvp_type(context, pipe_ctx) == SUBVP_PHANTOM) {
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_utils.c
162
void dml21_populate_mall_allocation_size(struct dc_state *context,
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_utils.c
175
if (in_ctx->config.svp_pstate.callbacks.get_pipe_subvp_type(context, dc_pipe) != SUBVP_PHANTOM) {
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_utils.c
177
context->bw_ctx.bw.dcn.mall_ss_size_bytes += dc_pipe->surface_size_in_mall_bytes;
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_utils.c
181
context->bw_ctx.bw.dcn.mall_subvp_size_bytes += dc_pipe->surface_size_in_mall_bytes;
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_utils.c
196
static bool is_sub_vp_enabled(struct dc *dc, struct dc_state *context)
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_utils.c
201
struct pipe_ctx *pipe_ctx = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_utils.c
203
if (pipe_ctx->stream && dc_state_get_paired_subvp_stream(context, pipe_ctx->stream) &&
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_utils.c
204
dc_state_get_pipe_subvp_type(context, pipe_ctx) == SUBVP_MAIN) {
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_utils.c
212
void dml21_program_dc_pipe(struct dml2_context *dml_ctx, struct dc_state *context, struct pipe_ctx *pipe_ctx, struct dml2_per_plane_programming *pln_prog,
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_utils.c
217
dml21_pipe_populate_global_sync(dml_ctx, context, pipe_ctx, stream_prog);
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_utils.c
220
if (dml_ctx->config.svp_pstate.callbacks.get_pipe_subvp_type(context, pipe_ctx) == SUBVP_PHANTOM) {
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_utils.c
231
if (context->bw_ctx.bw.dcn.clk.dppclk_khz < pipe_ctx->plane_res.bw.dppclk_khz)
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_utils.c
232
context->bw_ctx.bw.dcn.clk.dppclk_khz = pipe_ctx->plane_res.bw.dppclk_khz;
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_utils.c
234
dml21_populate_mall_allocation_size(context, dml_ctx, pln_prog, pipe_ctx);
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_utils.c
236
bool sub_vp_enabled = is_sub_vp_enabled(pipe_ctx->stream->ctx->dc, context);
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_utils.c
243
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_utils.c
250
phantom_stream = dml_ctx->config.svp_pstate.callbacks.create_phantom_stream(dc, context, main_stream);
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_utils.c
273
dml_ctx->config.svp_pstate.callbacks.add_phantom_stream(dc, context, phantom_stream, main_stream);
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_utils.c
280
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_utils.c
287
phantom_plane = dml_ctx->config.svp_pstate.callbacks.create_phantom_plane(dc, context, main_plane);
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_utils.c
315
dml_ctx->config.svp_pstate.callbacks.add_phantom_plane(dc, phantom_stream, phantom_plane, context);
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_utils.c
320
void dml21_handle_phantom_streams_planes(const struct dc *dc, struct dc_state *context, struct dml2_context *dml_ctx)
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_utils.c
334
main_stream = dml_ctx->config.callbacks.get_stream_from_id(context,
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_utils.c
337
main_stream_status = dml_ctx->config.callbacks.get_stream_status(context, main_stream);
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_utils.c
345
context,
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_utils.c
362
context,
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_utils.c
374
dml2_map_dc_pipes(dml_ctx, context, NULL, &dml_ctx->v21.dml_to_dc_pipe_mapping, dc->current_state);
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_utils.c
378
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_utils.c
385
memset(&context->bw_ctx.bw.dcn.fams2_stream_base_params, 0, sizeof(union dmub_cmd_fams2_config) * DML2_MAX_PLANES);
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_utils.c
386
memset(&context->bw_ctx.bw.dcn.fams2_stream_sub_params, 0, sizeof(union dmub_cmd_fams2_config) * DML2_MAX_PLANES);
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_utils.c
387
memset(&context->bw_ctx.bw.dcn.fams2_stream_sub_params_v2, 0, sizeof(union dmub_fams2_stream_static_sub_state_v2) * DML2_MAX_PLANES);
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_utils.c
388
memset(&context->bw_ctx.bw.dcn.fams2_global_config, 0, sizeof(struct dmub_cmd_fams2_global_config));
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_utils.c
392
for (i = 0; i < context->stream_count; i++) {
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_utils.c
398
union dmub_cmd_fams2_config *static_base_state = &context->bw_ctx.bw.dcn.fams2_stream_base_params[num_fams2_streams];
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_utils.c
399
union dmub_cmd_fams2_config *static_sub_state = &context->bw_ctx.bw.dcn.fams2_stream_sub_params[num_fams2_streams];
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_utils.c
401
struct dc_stream_state *stream = context->streams[i];
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_utils.c
403
if (context->stream_status[i].plane_count == 0 ||
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_utils.c
404
dml_ctx->config.svp_pstate.callbacks.get_stream_subvp_type(context, stream) == SUBVP_PHANTOM) {
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_utils.c
421
memcpy(&context->bw_ctx.bw.dcn.fams2_stream_sub_params_v2[num_fams2_streams],
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_utils.c
436
static_base_state->stream_v1.base.num_planes = context->stream_status[i].plane_count;
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_utils.c
437
static_base_state->stream_v1.base.otg_inst = context->stream_status[i].primary_otg_inst;
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_utils.c
440
for (j = 0; j < context->stream_status[i].plane_count; j++) {
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_utils.c
442
if (context->res_ctx.pipe_ctx[k].stream &&
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_utils.c
443
context->res_ctx.pipe_ctx[k].stream->stream_id == stream->stream_id &&
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_utils.c
444
context->res_ctx.pipe_ctx[k].plane_state == context->stream_status[i].plane_states[j]) {
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_utils.c
460
phantom_stream = dml_ctx->config.svp_pstate.callbacks.get_paired_subvp_stream(context, stream);
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_utils.c
464
phantom_status = dml_ctx->config.callbacks.get_stream_status(context, phantom_stream);
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_utils.c
479
if (context->res_ctx.pipe_ctx[k].stream &&
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_utils.c
480
context->res_ctx.pipe_ctx[k].stream->stream_id == phantom_stream->stream_id &&
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_utils.c
481
context->res_ctx.pipe_ctx[k].plane_state == phantom_status->plane_states[j]) {
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_utils.c
504
memcpy(&context->bw_ctx.bw.dcn.fams2_global_config,
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_utils.c
508
context->bw_ctx.bw.dcn.fams2_global_config.num_streams = num_fams2_streams;
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_utils.c
511
context->bw_ctx.bw.dcn.clk.fw_based_mclk_switching = context->bw_ctx.bw.dcn.fams2_global_config.features.bits.enable;
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_utils.c
85
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_utils.h
22
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_utils.h
25
void dml21_populate_mall_allocation_size(struct dc_state *context,
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_utils.h
34
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_utils.h
40
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_utils.h
44
void dml21_handle_phantom_streams_planes(const struct dc *in_dc, struct dc_state *context, struct dml2_context *dml_ctx);
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_utils.h
47
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_wrapper.c
101
memcpy(&context->bw_ctx.bw.dcn.arb_regs, &in_ctx->v21.mode_programming.programming->global_regs.arb_regs, sizeof(struct dml2_display_arb_regs));
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_wrapper.c
104
context->bw_ctx.bw.dcn.compbuf_size_kb = (int)in_ctx->v21.mode_programming.programming->global_regs.arb_regs.compbuf_size * 64;
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_wrapper.c
106
context->bw_ctx.bw.dcn.mall_ss_size_bytes = 0;
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_wrapper.c
107
context->bw_ctx.bw.dcn.mall_ss_psr_active_size_bytes = 0;
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_wrapper.c
108
context->bw_ctx.bw.dcn.mall_subvp_size_bytes = 0;
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_wrapper.c
125
num_pipes = dml21_find_dc_pipes_for_plane(dc, context, in_ctx, dc_main_pipes, dc_phantom_pipes, dml_prog_idx);
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_wrapper.c
132
dml21_program_dc_pipe(in_ctx, context, dc_main_pipes[dc_pipe_index], pln_prog, stream_prog);
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_wrapper.c
135
dml21_program_dc_pipe(in_ctx, context, dc_phantom_pipes[dc_pipe_index], pln_prog, stream_prog);
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_wrapper.c
140
memcpy(&context->bw_ctx.bw.dcn.mcache_allocations[dml_prog_idx], &pln_prog->mcache_allocation, sizeof(struct dml2_mcache_surface_allocation));
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_wrapper.c
142
memcpy(&context->bw_ctx.bw.dcn.mcache_allocations[dml_phantom_prog_idx],
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_wrapper.c
151
context->bw_ctx.bw.dcn.clk.bw_dppclk_khz = context->bw_ctx.bw.dcn.clk.dppclk_khz;
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_wrapper.c
152
context->bw_ctx.bw.dcn.clk.bw_dispclk_khz = context->bw_ctx.bw.dcn.clk.dispclk_khz;
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_wrapper.c
154
context->bw_ctx.bw.dcn.clk.max_supported_dispclk_khz =
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_wrapper.c
157
context->bw_ctx.bw.dcn.clk.max_supported_dispclk_khz = in_ctx->v21.dml_init.soc_bb.clk_table.dispclk.clk_values_khz[0] * 1000;
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_wrapper.c
161
context->bw_ctx.bw.dcn.clk.max_supported_dppclk_khz =
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_wrapper.c
164
context->bw_ctx.bw.dcn.clk.max_supported_dppclk_khz = in_ctx->v21.dml_init.soc_bb.clk_table.dppclk.clk_values_khz[0] * 1000;
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_wrapper.c
169
context->bw_ctx.bw.dcn.clk.num_ways = dc->res_pool->funcs->calculate_mall_ways_from_bytes(dc, context->bw_ctx.bw.dcn.mall_subvp_size_bytes);
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_wrapper.c
171
context->bw_ctx.bw.dcn.clk.num_ways = 0;
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_wrapper.c
175
static void dml21_prepare_mcache_params(struct dml2_context *dml_ctx, struct dc_state *context, struct dc_mcache_params *mcache_params)
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_wrapper.c
181
for (stream_idx = 0; stream_idx < context->stream_count; stream_idx++) {
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_wrapper.c
182
for (plane_idx = 0; plane_idx < context->stream_status[stream_idx].plane_count; plane_idx++) {
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_wrapper.c
183
dml_prog_idx = map_plane_to_dml21_display_cfg(dml_ctx, context->streams[stream_idx]->stream_id, context->stream_status[stream_idx].plane_states[plane_idx], context);
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_wrapper.c
204
static bool dml21_mode_check_and_programming(const struct dc *in_dc, struct dc_state *context, struct dml2_context *dml_ctx)
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_wrapper.c
214
if (!context)
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_wrapper.c
217
if (context->stream_count == 0) {
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_wrapper.c
218
dml21_build_fams2_programming(in_dc, context, dml_ctx);
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_wrapper.c
223
dml_ctx->config.svp_pstate.callbacks.remove_phantom_streams_and_planes(in_dc, context);
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_wrapper.c
224
dml_ctx->config.svp_pstate.callbacks.release_phantom_streams_and_planes(in_dc, context);
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_wrapper.c
227
result = dml21_map_dc_state_into_dml_display_cfg(in_dc, context, dml_ctx);
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_wrapper.c
240
dml2_map_dc_pipes(dml_ctx, context, NULL, &dml_ctx->v21.dml_to_dc_pipe_mapping, in_dc->current_state);
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_wrapper.c
242
dml21_handle_phantom_streams_planes(in_dc, context, dml_ctx);
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_wrapper.c
246
dml21_prepare_mcache_params(dml_ctx, context, mcache_params);
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_wrapper.c
249
dml_ctx->config.callbacks.allocate_mcache(context, mcache_params);
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_wrapper.c
255
dml21_calculate_rq_and_dlg_params(in_dc, context, &context->res_ctx, dml_ctx, in_dc->res_pool->pipe_count);
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_wrapper.c
256
dml21_copy_clocks_to_dc_state(dml_ctx, context);
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_wrapper.c
257
dml21_extract_watermark_sets(in_dc, &context->bw_ctx.bw.dcn.watermarks, dml_ctx);
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_wrapper.c
258
dml21_build_fams2_programming(in_dc, context, dml_ctx);
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_wrapper.c
264
static bool dml21_check_mode_support(const struct dc *in_dc, struct dc_state *context, struct dml2_context *dml_ctx)
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_wrapper.c
274
if (!context || context->stream_count == 0)
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_wrapper.c
278
dml_ctx->config.svp_pstate.callbacks.remove_phantom_streams_and_planes(in_dc, context);
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_wrapper.c
279
dml_ctx->config.svp_pstate.callbacks.release_phantom_streams_and_planes(in_dc, context);
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_wrapper.c
282
dml21_map_dc_state_into_dml_display_cfg(in_dc, context, dml_ctx);
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_wrapper.c
293
bool dml21_validate(const struct dc *in_dc, struct dc_state *context, struct dml2_context *dml_ctx,
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_wrapper.c
300
out = dml21_check_mode_support(in_dc, context, dml_ctx);
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_wrapper.c
302
out = dml21_mode_check_and_programming(in_dc, context, dml_ctx);
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_wrapper.c
307
void dml21_prepare_mcache_programming(struct dc *in_dc, struct dc_state *context, struct dml2_context *dml_ctx)
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_wrapper.c
318
if (context->stream_count == 0) {
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_wrapper.c
335
mcache_config->mcache_allocation = &context->bw_ctx.bw.dcn.mcache_allocations[dml_prog_idx];
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_wrapper.c
343
num_pipes = dml21_find_dc_pipes_for_plane(in_dc, context, dml_ctx, dc_main_pipes, dc_phantom_pipes, dml_prog_idx);
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_wrapper.c
351
dml21_get_pipe_mcache_config(context, dc_main_pipes[dc_pipe_index], pln_prog, &mcache_config->pipe_configurations[dc_pipe_index]);
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_wrapper.c
362
mcache_config->mcache_allocation = &context->bw_ctx.bw.dcn.mcache_allocations[dml_phantom_prog_idx];
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_wrapper.c
368
dml21_get_pipe_mcache_config(context, dc_phantom_pipes[dc_pipe_index], pln_prog, &mcache_config->pipe_configurations[dc_pipe_index]);
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_wrapper.c
383
num_pipes = dml21_find_dc_pipes_for_plane(in_dc, context, dml_ctx, dc_main_pipes, dc_phantom_pipes, dml_prog_idx);
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_wrapper.c
87
static void dml21_calculate_rq_and_dlg_params(const struct dc *dc, struct dc_state *context, struct resource_context *out_new_hw_state,
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_wrapper.c
98
context->bw_ctx.bw.dcn.clk.dppclk_khz = 0;
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_wrapper.h
62
bool dml21_validate(const struct dc *in_dc, struct dc_state *context, struct dml2_context *dml_ctx,
drivers/gpu/drm/amd/display/dc/dml2_0/dml21/dml21_wrapper.h
66
void dml21_prepare_mcache_programming(struct dc *in_dc, struct dc_state *context, struct dml2_context *dml_ctx);
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_mall_phantom.c
102
static void merge_pipes_for_subvp(struct dml2_context *ctx, struct dc_state *context)
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_mall_phantom.c
108
struct pipe_ctx *pipe = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_mall_phantom.c
126
ctx->config.svp_pstate.callbacks.release_dsc(&context->res_ctx, ctx->config.svp_pstate.callbacks.dc->res_pool, &pipe->stream_res.dsc);
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_mall_phantom.c
147
static bool all_pipes_have_stream_and_plane(struct dml2_context *ctx, const struct dc_state *context)
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_mall_phantom.c
152
const struct pipe_ctx *pipe = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_mall_phantom.c
163
static bool mpo_in_use(const struct dc_state *context)
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_mall_phantom.c
167
for (i = 0; i < context->stream_count; i++) {
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_mall_phantom.c
168
if (context->stream_status[i].plane_count > 1)
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_mall_phantom.c
229
static bool assign_subvp_pipe(struct dml2_context *ctx, struct dc_state *context, unsigned int *index)
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_mall_phantom.c
236
struct vba_vars_st *vba = &context->bw_ctx.dml.vba;
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_mall_phantom.c
239
struct pipe_ctx *pipe = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_mall_phantom.c
257
ctx->config.svp_pstate.callbacks.get_pipe_subvp_type(context, pipe) == SUBVP_NONE && refresh_rate < 120 &&
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_mall_phantom.c
264
pipe = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_mall_phantom.c
33
unsigned int dml2_helper_calculate_num_ways_for_subvp(struct dml2_context *ctx, struct dc_state *context)
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_mall_phantom.c
358
static bool subvp_subvp_schedulable(struct dml2_context *ctx, struct dc_state *context)
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_mall_phantom.c
369
struct pipe_ctx *pipe = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_mall_phantom.c
376
ctx->config.svp_pstate.callbacks.get_pipe_subvp_type(context, pipe) == SUBVP_MAIN) {
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_mall_phantom.c
377
phantom = ctx->config.svp_pstate.callbacks.get_paired_subvp_stream(context, pipe->stream);
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_mall_phantom.c
432
bool dml2_svp_drr_schedulable(struct dml2_context *ctx, struct dc_state *context, struct dc_crtc_timing *drr_timing)
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_mall_phantom.c
450
pipe = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_mall_phantom.c
458
if (ctx->config.svp_pstate.callbacks.get_pipe_subvp_type(context, pipe) == SUBVP_MAIN)
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_mall_phantom.c
462
phantom_stream = ctx->config.svp_pstate.callbacks.get_paired_subvp_stream(context, pipe->stream);
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_mall_phantom.c
508
static bool subvp_vblank_schedulable(struct dml2_context *ctx, struct dc_state *context)
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_mall_phantom.c
51
struct pipe_ctx *pipe = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_mall_phantom.c
535
pipe = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_mall_phantom.c
536
pipe_mall_type = ctx->config.svp_pstate.callbacks.get_pipe_subvp_type(context, pipe);
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_mall_phantom.c
55
ctx->config.svp_pstate.callbacks.get_pipe_subvp_type(context, pipe) == SUBVP_PHANTOM) {
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_mall_phantom.c
553
if (found && context->res_ctx.pipe_ctx[vblank_index].stream->ignore_msa_timing_param) {
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_mall_phantom.c
555
schedulable = dml2_svp_drr_schedulable(ctx, context, &context->res_ctx.pipe_ctx[vblank_index].stream->timing);
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_mall_phantom.c
557
phantom_stream = ctx->config.svp_pstate.callbacks.get_paired_subvp_stream(context, subvp_pipe->stream);
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_mall_phantom.c
560
vblank_timing = &context->res_ctx.pipe_ctx[vblank_index].stream->timing;
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_mall_phantom.c
602
bool dml2_svp_validate_static_schedulability(struct dml2_context *ctx, struct dc_state *context, enum dml_dram_clock_change_support pstate_change_type)
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_mall_phantom.c
605
struct vba_vars_st *vba = &context->bw_ctx.dml.vba;
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_mall_phantom.c
611
struct pipe_ctx *pipe = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_mall_phantom.c
612
enum mall_stream_type pipe_mall_type = ctx->config.svp_pstate.callbacks.get_pipe_subvp_type(context, pipe);
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_mall_phantom.c
633
schedulable = subvp_subvp_schedulable(ctx, context);
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_mall_phantom.c
640
schedulable = subvp_vblank_schedulable(ctx, context);
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_mall_phantom.c
811
static bool remove_all_phantom_planes_for_stream(struct dml2_context *ctx, struct dc_stream_state *stream, struct dc_state *context)
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_mall_phantom.c
817
for (i = 0; i < context->stream_count; i++)
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_mall_phantom.c
818
if (context->streams[i] == stream) {
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_mall_phantom.c
819
stream_status = &context->stream_status[i];
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_mall_phantom.c
833
if (!ctx->config.svp_pstate.callbacks.remove_phantom_plane(ctx->config.svp_pstate.callbacks.dc, stream, del_planes[i], context))
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_mall_phantom.c
835
ctx->config.svp_pstate.callbacks.release_phantom_plane(ctx->config.svp_pstate.callbacks.dc, context, del_planes[i]);
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_mall_phantom.h
42
unsigned int dml2_helper_calculate_num_ways_for_subvp(struct dml2_context *ctx, struct dc_state *context);
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_mall_phantom.h
48
bool dml2_svp_validate_static_schedulability(struct dml2_context *ctx, struct dc_state *context, enum dml_dram_clock_change_support pstate_change_type);
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_mall_phantom.h
50
bool dml2_svp_drr_schedulable(struct dml2_context *ctx, struct dc_state *context, struct dc_crtc_timing *drr_timing);
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_translation_helper.c
1063
const struct dc_plane_state *in, struct dc_state *context,
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_translation_helper.c
1066
struct scaler_data *scaler_data = get_scaler_data_for_plane(in, context);
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_translation_helper.c
1149
static bool get_plane_id(struct dml2_context *dml2, const struct dc_state *context, const struct dc_plane_state *plane,
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_translation_helper.c
1158
for (i = 0; i < context->stream_count; i++) {
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_translation_helper.c
1159
if (context->streams[i]->stream_id == stream_id) {
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_translation_helper.c
1160
for (j = 0; j < context->stream_status[i].plane_count; j++) {
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_translation_helper.c
1161
if (context->stream_status[i].plane_states[j] == plane &&
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_translation_helper.c
1174
const struct dc_state *context, const struct dml_display_cfg_st *dml_dispcfg, unsigned int stream_id, int plane_index)
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_translation_helper.c
1180
if (!get_plane_id(context->bw_ctx.dml2, context, plane, stream_id, plane_index, &plane_id)) {
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_translation_helper.c
1270
static void dml2_map_hpo_stream_encoder_to_hpo_link_encoder_index(struct dml2_context *dml2, struct dc_state *context)
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_translation_helper.c
1282
current_pipe_context = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_translation_helper.c
1293
void map_dc_state_into_dml_display_cfg(struct dml2_context *dml2, struct dc_state *context, struct dml_display_cfg_st *dml_dispcfg)
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_translation_helper.c
1315
dml2_populate_pipe_to_plane_index_mapping(dml2, context);
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_translation_helper.c
1316
dml2_map_hpo_stream_encoder_to_hpo_link_encoder_index(dml2, context);
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_translation_helper.c
1318
for (i = 0; i < context->stream_count; i++) {
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_translation_helper.c
1323
if (context->streams[i] == context->res_ctx.pipe_ctx[k].stream) {
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_translation_helper.c
1324
current_pipe_context = &context->res_ctx.pipe_ctx[k];
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_translation_helper.c
1328
disp_cfg_stream_location = map_stream_to_dml_display_cfg(dml2, context->streams[i], dml_dispcfg);
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_translation_helper.c
1329
stream_mall_type = dc_state_get_stream_subvp_type(context, context->streams[i]);
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_translation_helper.c
1336
populate_dml_timing_cfg_from_stream_state(&dml_dispcfg->timing, disp_cfg_stream_location, context->streams[i]);
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_translation_helper.c
1337
populate_dml_output_cfg_from_stream_state(&dml_dispcfg->output, disp_cfg_stream_location, context->streams[i], current_pipe_context, dml2);
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_translation_helper.c
1340
disp_cfg_stream_location, context->streams[i]);
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_translation_helper.c
1342
switch (context->streams[i]->debug.force_odm_combine_segments) {
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_translation_helper.c
1353
dml2->v20.scratch.dml_to_dc_pipe_mapping.disp_cfg_to_stream_id[disp_cfg_stream_location] = context->streams[i]->stream_id;
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_translation_helper.c
1356
if (context->stream_status[i].plane_count == 0) {
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_translation_helper.c
1359
populate_dummy_dml_surface_cfg(&dml_dispcfg->surface, disp_cfg_plane_location, context->streams[i]);
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_translation_helper.c
1361
context->streams[i], &dml2->v20.dml_core_ctx.soc);
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_translation_helper.c
1367
for (j = 0; j < context->stream_status[i].plane_count; j++) {
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_translation_helper.c
1369
context->stream_status[i].plane_states[j], context, dml_dispcfg, context->streams[i]->stream_id, j);
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_translation_helper.c
1376
populate_dml_surface_cfg_from_plane_state(dml2->v20.dml_core_ctx.project, &dml_dispcfg->surface, disp_cfg_plane_location, context->stream_status[i].plane_states[j]);
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_translation_helper.c
1379
context->stream_status[i].plane_states[j], context,
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_translation_helper.c
1396
if (get_plane_id(dml2, context, context->stream_status[i].plane_states[j], context->streams[i]->stream_id, j,
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_translation_helper.c
1401
populate_dml_timing_cfg_from_stream_state(&dml_dispcfg->timing, disp_cfg_plane_location, context->streams[i]);
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_translation_helper.c
1402
populate_dml_output_cfg_from_stream_state(&dml_dispcfg->output, disp_cfg_plane_location, context->streams[i], current_pipe_context, dml2);
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_translation_helper.c
1403
switch (context->streams[i]->debug.force_odm_combine_segments) {
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_translation_helper.c
1419
dml2->v20.scratch.dml_to_dc_pipe_mapping.disp_cfg_to_stream_id[disp_cfg_plane_location] = context->streams[i]->stream_id;
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_translation_helper.c
982
struct dc_state *context)
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_translation_helper.c
985
struct pipe_ctx *temp_pipe = &context->res_ctx.temp_pipe;
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_translation_helper.c
990
const struct pipe_ctx *pipe = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_translation_helper.h
37
void map_dc_state_into_dml_display_cfg(struct dml2_context *dml2, struct dc_state *context, struct dml_display_cfg_st *dml_dispcfg);
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_utils.c
169
bool is_dtbclk_required(const struct dc *dc, struct dc_state *context)
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_utils.c
174
if (!context->res_ctx.pipe_ctx[i].stream)
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_utils.c
176
if (is_dp2p0_output_encoder(&context->res_ctx.pipe_ctx[i]))
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_utils.c
182
void dml2_copy_clocks_to_dc_state(struct dml2_dcn_clocks *out_clks, struct dc_state *context)
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_utils.c
184
context->bw_ctx.bw.dcn.clk.dispclk_khz = out_clks->dispclk_khz;
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_utils.c
185
context->bw_ctx.bw.dcn.clk.dcfclk_khz = out_clks->dcfclk_khz;
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_utils.c
186
context->bw_ctx.bw.dcn.clk.dramclk_khz = out_clks->uclk_mts / 16;
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_utils.c
187
context->bw_ctx.bw.dcn.clk.fclk_khz = out_clks->fclk_khz;
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_utils.c
188
context->bw_ctx.bw.dcn.clk.phyclk_khz = out_clks->phyclk_khz;
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_utils.c
189
context->bw_ctx.bw.dcn.clk.socclk_khz = out_clks->socclk_khz;
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_utils.c
190
context->bw_ctx.bw.dcn.clk.ref_dtbclk_khz = out_clks->ref_dtbclk_khz;
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_utils.c
191
context->bw_ctx.bw.dcn.clk.p_state_change_support = out_clks->p_state_supported;
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_utils.c
279
void dml2_calculate_rq_and_dlg_params(const struct dc *dc, struct dc_state *context, struct resource_context *out_new_hw_state, struct dml2_context *in_ctx, unsigned int pipe_cnt)
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_utils.c
285
context->bw_ctx.bw.dcn.clk.dcfclk_deep_sleep_khz = (unsigned int)in_ctx->v20.dml_core_ctx.mp.DCFCLKDeepSleep * 1000;
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_utils.c
286
context->bw_ctx.bw.dcn.clk.dppclk_khz = 0;
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_utils.c
289
context->bw_ctx.bw.dcn.clk.fclk_p_state_change_support = false;
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_utils.c
291
context->bw_ctx.bw.dcn.clk.fclk_p_state_change_support = true;
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_utils.c
293
if (context->bw_ctx.bw.dcn.clk.dispclk_khz < dc->debug.min_disp_clk_khz)
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_utils.c
294
context->bw_ctx.bw.dcn.clk.dispclk_khz = dc->debug.min_disp_clk_khz;
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_utils.c
296
context->bw_ctx.bw.dcn.compbuf_size_kb = in_ctx->v20.dml_core_ctx.ip.config_return_buffer_size_in_kbytes;
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_utils.c
299
if (!context->res_ctx.pipe_ctx[dc_pipe_ctx_index].stream)
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_utils.c
305
if (get_plane_id(in_ctx, context, context->res_ctx.pipe_ctx[dc_pipe_ctx_index].plane_state,
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_utils.c
306
context->res_ctx.pipe_ctx[dc_pipe_ctx_index].stream->stream_id,
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_utils.c
307
in_ctx->v20.scratch.dml_to_dc_pipe_mapping.dml_pipe_idx_to_plane_index[context->res_ctx.pipe_ctx[dc_pipe_ctx_index].pipe_idx], &plane_id)) {
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_utils.c
310
dml_pipe_idx = dml2_helper_find_dml_pipe_idx_by_stream_id(in_ctx, context->res_ctx.pipe_ctx[dc_pipe_ctx_index].stream->stream_id);
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_utils.c
316
ASSERT(in_ctx->v20.scratch.dml_to_dc_pipe_mapping.dml_pipe_idx_to_stream_id[dml_pipe_idx] == context->res_ctx.pipe_ctx[dc_pipe_ctx_index].stream->stream_id);
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_utils.c
321
populate_pipe_ctx_dlg_params_from_dml(&context->res_ctx.pipe_ctx[dc_pipe_ctx_index], &context->bw_ctx.dml2->v20.dml_core_ctx, dml_pipe_idx);
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_utils.c
323
pipe_mall_type = dc_state_get_pipe_subvp_type(context, &context->res_ctx.pipe_ctx[dc_pipe_ctx_index]);
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_utils.c
326
context->res_ctx.pipe_ctx[dc_pipe_ctx_index].det_buffer_size_kb = 0;
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_utils.c
327
context->res_ctx.pipe_ctx[dc_pipe_ctx_index].unbounded_req = false;
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_utils.c
329
context->res_ctx.pipe_ctx[dc_pipe_ctx_index].det_buffer_size_kb = dml_get_det_buffer_size_kbytes(&context->bw_ctx.dml2->v20.dml_core_ctx, dml_pipe_idx);
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_utils.c
331
context->res_ctx.pipe_ctx[dc_pipe_ctx_index].unbounded_req = in_ctx->v20.dml_core_ctx.ms.UnboundedRequestEnabledThisState;
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_utils.c
334
context->bw_ctx.bw.dcn.compbuf_size_kb -= context->res_ctx.pipe_ctx[dc_pipe_ctx_index].det_buffer_size_kb;
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_utils.c
335
context->res_ctx.pipe_ctx[dc_pipe_ctx_index].plane_res.bw.dppclk_khz = dml_get_dppclk_calculated(&context->bw_ctx.dml2->v20.dml_core_ctx, dml_pipe_idx) * 1000;
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_utils.c
336
if (context->bw_ctx.bw.dcn.clk.dppclk_khz < context->res_ctx.pipe_ctx[dc_pipe_ctx_index].plane_res.bw.dppclk_khz)
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_utils.c
337
context->bw_ctx.bw.dcn.clk.dppclk_khz = context->res_ctx.pipe_ctx[dc_pipe_ctx_index].plane_res.bw.dppclk_khz;
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_utils.c
343
context->res_ctx.pipe_ctx[dc_pipe_ctx_index].surface_size_in_mall_bytes = dml_get_surface_size_for_mall(&context->bw_ctx.dml2->v20.dml_core_ctx, dml_pipe_idx);
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_utils.c
347
if (context->res_ctx.pipe_ctx[dc_pipe_ctx_index].stream && context->res_ctx.pipe_ctx[dc_pipe_ctx_index].plane_state &&
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_utils.c
348
(context->res_ctx.pipe_ctx[dc_pipe_ctx_index].top_pipe == NULL ||
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_utils.c
349
context->res_ctx.pipe_ctx[dc_pipe_ctx_index].plane_state != context->res_ctx.pipe_ctx[dc_pipe_ctx_index].top_pipe->plane_state) &&
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_utils.c
350
context->res_ctx.pipe_ctx[dc_pipe_ctx_index].prev_odm_pipe == NULL) {
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_utils.c
353
context->bw_ctx.bw.dcn.mall_ss_size_bytes += context->res_ctx.pipe_ctx[dc_pipe_ctx_index].surface_size_in_mall_bytes;
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_utils.c
356
context->bw_ctx.bw.dcn.mall_subvp_size_bytes += context->res_ctx.pipe_ctx[dc_pipe_ctx_index].surface_size_in_mall_bytes;
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_utils.c
361
context->bw_ctx.bw.dcn.clk.bw_dppclk_khz = context->bw_ctx.bw.dcn.clk.dppclk_khz;
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_utils.c
362
context->bw_ctx.bw.dcn.clk.bw_dispclk_khz = context->bw_ctx.bw.dcn.clk.dispclk_khz;
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_utils.c
364
context->bw_ctx.bw.dcn.clk.max_supported_dppclk_khz = in_ctx->v20.dml_core_ctx.states.state_array[in_ctx->v20.scratch.mode_support_params.out_lowest_state_idx].dppclk_mhz
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_utils.c
366
context->bw_ctx.bw.dcn.clk.max_supported_dispclk_khz = in_ctx->v20.dml_core_ctx.states.state_array[in_ctx->v20.scratch.mode_support_params.out_lowest_state_idx].dispclk_mhz
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_utils.c
370
context->bw_ctx.bw.dcn.clk.bw_dispclk_khz = context->bw_ctx.bw.dcn.clk.max_supported_dispclk_khz;
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_utils.c
371
context->bw_ctx.bw.dcn.clk.bw_dppclk_khz = context->bw_ctx.bw.dcn.clk.max_supported_dppclk_khz ;
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_utils.c
414
void dml2_extract_writeback_wm(struct dc_state *context, struct display_mode_lib_st *dml_core_ctx)
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_utils.c
421
if (context->stream_count != 0) {
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_utils.c
422
for (i = 0; i < context->stream_count; i++) {
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_utils.c
423
if (context->streams[i]->num_wb_info != 0)
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_utils.c
430
bw_writeback = &context->bw_ctx.bw.dcn.bw_writeback;
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_utils.c
431
wb_arb_params = &context->bw_ctx.bw.dcn.bw_writeback.mcif_wb_arb[i];
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_utils.c
440
if (context->res_ctx.pipe_ctx[i].stream->phy_pix_clk != 0) {
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_utils.c
443
(1000000 << 6) / context->res_ctx.pipe_ctx[i].stream->phy_pix_clk;
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_utils.h
122
void dml2_calculate_rq_and_dlg_params(const struct dc *dc, struct dc_state *context, struct resource_context *out_new_hw_state, struct dml2_context *in_ctx, unsigned int pipe_cnt);
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_utils.h
41
void dml2_copy_clocks_to_dc_state(struct dml2_dcn_clocks *out_clks, struct dc_state *context);
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_utils.h
43
void dml2_extract_writeback_wm(struct dc_state *context, struct display_mode_lib_st *dml_core_ctx);
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_utils.h
45
bool is_dtbclk_required(const struct dc *dc, struct dc_state *context);
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_utils.h
71
void dml2_dc_construct_pipes(struct dc_state *context, struct dml_mode_support_info_st *dml_mode_support_st,
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_utils.h
84
bool dml2_predict_pipe_split(struct dc_state *context, display_pipe_params_st pipe, int index);
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_utils.h
96
enum dc_status dml2_build_mapped_resource(const struct dc *dc, struct dc_state *context, struct dc_stream_state *stream);
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_wrapper.c
11
bool dml2_validate(const struct dc *in_dc, struct dc_state *context, struct dml2_context *dml2,
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_wrapper.c
22
out = dml21_validate(in_dc, context, dml2, validate_mode);
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_wrapper.c
30
out = dml2_validate_only(context, validate_mode);
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_wrapper.c
32
out = dml2_validate_and_build_resource(in_dc, context, validate_mode);
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_wrapper.h
111
bool (*allocate_mcache)(struct dc_state *context, const struct dc_mcache_params *mcache_params);
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_wrapper.h
127
bool (*add_phantom_plane)(const struct dc *dc, struct dc_stream_state *stream, struct dc_plane_state *plane_state, struct dc_state *context);
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_wrapper.h
131
struct dc_state *context);
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_wrapper.h
296
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_wrapper.h
308
void dml2_prepare_mcache_programming(struct dc *in_dc, struct dc_state *context, struct dml2_context *dml2);
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_wrapper.h
311
bool dml2_validate_only(struct dc_state *context, enum dc_validate_mode validate_mode);
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_wrapper.h
312
bool dml2_validate_and_build_resource(const struct dc *in_dc, struct dc_state *context,
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_wrapper.h
76
bool (*can_support_mclk_switch_using_fw_based_vblank_stretch)(struct dc *dc, struct dc_state *context);
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_wrapper_fpu.c
357
static bool call_dml_mode_support_and_programming(struct dc_state *context, enum dc_validate_mode validate_mode)
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_wrapper_fpu.c
364
if (!context)
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_wrapper_fpu.c
367
struct dml2_context *dml2 = context->bw_ctx.dml2;
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_wrapper_fpu.c
370
if (!context->streams[0]->sink->link->dc->caps.is_apu) {
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_wrapper_fpu.c
371
min_state_for_g6_temp_read = calculate_lowest_supported_state_for_temp_read(dml2, context,
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_wrapper_fpu.c
377
result = dml_mode_support_wrapper(dml2, context, validate_mode);
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_wrapper_fpu.c
382
if (!context->streams[0]->sink->link->dc->caps.is_apu) {
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_wrapper_fpu.c
390
if (!context->streams[0]->sink->link->dc->caps.is_apu) {
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_wrapper_fpu.c
399
bool dml2_validate_and_build_resource(const struct dc *in_dc, struct dc_state *context,
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_wrapper_fpu.c
402
struct dml2_context *dml2 = context->bw_ctx.dml2;
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_wrapper_fpu.c
409
if (context->stream_count == 0) {
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_wrapper_fpu.c
420
context->bw_ctx.bw.dcn.clk.dtbclk_en = false;
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_wrapper_fpu.c
421
dml2_copy_clocks_to_dc_state(&out_clks, context);
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_wrapper_fpu.c
436
result = call_dml_mode_support_and_programming(context, validate_mode);
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_wrapper_fpu.c
442
dml2_map_dc_pipes(dml2, context, &s->cur_display_config, &s->dml_to_dc_pipe_mapping, in_dc->current_state);
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_wrapper_fpu.c
450
need_recalculation = dml2_verify_det_buffer_configuration(dml2, context, &dml2->det_helper_scratch);
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_wrapper_fpu.c
453
call_dml_mode_support_and_programming(context, validate_mode);
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_wrapper_fpu.c
455
dml2_map_dc_pipes(dml2, context, &s->cur_display_config, &s->dml_to_dc_pipe_mapping, in_dc->current_state);
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_wrapper_fpu.c
457
need_recalculation = dml2_verify_det_buffer_configuration(dml2, context, &dml2->det_helper_scratch);
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_wrapper_fpu.c
478
context->bw_ctx.bw.dcn.clk.dtbclk_en = is_dtbclk_required(in_dc, context);
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_wrapper_fpu.c
482
dml2_calculate_rq_and_dlg_params(in_dc, context, &context->res_ctx, dml2, in_dc->res_pool->pipe_count);
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_wrapper_fpu.c
485
dml2_copy_clocks_to_dc_state(&out_clks, context);
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_wrapper_fpu.c
486
dml2_extract_watermark_set(&context->bw_ctx.bw.dcn.watermarks.a, &dml2->v20.dml_core_ctx);
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_wrapper_fpu.c
487
dml2_extract_watermark_set(&context->bw_ctx.bw.dcn.watermarks.b, &dml2->v20.dml_core_ctx);
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_wrapper_fpu.c
488
if (context->streams[0]->sink->link->dc->caps.is_apu)
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_wrapper_fpu.c
489
dml2_extract_watermark_set(&context->bw_ctx.bw.dcn.watermarks.c, &dml2->v20.dml_core_ctx);
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_wrapper_fpu.c
491
memcpy(&context->bw_ctx.bw.dcn.watermarks.c, &dml2->v20.g6_temp_read_watermark_set, sizeof(context->bw_ctx.bw.dcn.watermarks.c));
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_wrapper_fpu.c
492
dml2_extract_watermark_set(&context->bw_ctx.bw.dcn.watermarks.d, &dml2->v20.dml_core_ctx);
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_wrapper_fpu.c
493
dml2_extract_writeback_wm(context, &dml2->v20.dml_core_ctx);
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_wrapper_fpu.c
495
context->bw_ctx.dml.vba.StutterPeriod = context->bw_ctx.dml2->v20.dml_core_ctx.mp.StutterPeriod;
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_wrapper_fpu.c
497
cstate_enter_plus_exit_z8_ns = context->bw_ctx.bw.dcn.watermarks.a.cstate_pstate.cstate_enter_plus_exit_z8_ns;
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_wrapper_fpu.c
499
if (context->bw_ctx.dml.vba.StutterPeriod < in_dc->debug.minimum_z8_residency_time &&
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_wrapper_fpu.c
503
context->bw_ctx.bw.dcn.watermarks.a.cstate_pstate.cstate_enter_plus_exit_z8_ns = cstate_enter_plus_exit_z8_ns;
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_wrapper_fpu.c
509
bool dml2_validate_only(struct dc_state *context, enum dc_validate_mode validate_mode)
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_wrapper_fpu.c
514
if (!context || context->stream_count == 0)
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_wrapper_fpu.c
517
dml2 = context->bw_ctx.dml2;
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_wrapper_fpu.c
527
map_dc_state_into_dml_display_cfg(dml2, context, &dml2->v20.scratch.cur_display_config);
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_wrapper_fpu.c
571
void dml2_prepare_mcache_programming(struct dc *in_dc, struct dc_state *context, struct dml2_context *dml2)
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_wrapper_fpu.c
574
dml21_prepare_mcache_programming(in_dc, context, dml2);
drivers/gpu/drm/amd/display/dc/hwss/dce100/dce100_hwseq.c
110
struct dc_state *context)
drivers/gpu/drm/amd/display/dc/hwss/dce100/dce100_hwseq.c
112
dce110_set_safe_displaymarks(&context->res_ctx, dc->res_pool);
drivers/gpu/drm/amd/display/dc/hwss/dce100/dce100_hwseq.c
116
context,
drivers/gpu/drm/amd/display/dc/hwss/dce100/dce100_hwseq.c
122
struct dc_state *context)
drivers/gpu/drm/amd/display/dc/hwss/dce100/dce100_hwseq.c
124
dce110_set_safe_displaymarks(&context->res_ctx, dc->res_pool);
drivers/gpu/drm/amd/display/dc/hwss/dce100/dce100_hwseq.c
128
context,
drivers/gpu/drm/amd/display/dc/hwss/dce100/dce100_hwseq.h
39
struct dc_state *context);
drivers/gpu/drm/amd/display/dc/hwss/dce100/dce100_hwseq.h
43
struct dc_state *context);
drivers/gpu/drm/amd/display/dc/hwss/dce110/dce110_hwseq.c
1505
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/hwss/dce110/dce110_hwseq.c
1570
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/hwss/dce110/dce110_hwseq.c
1590
build_audio_output(context, pipe_ctx, &audio_output);
drivers/gpu/drm/amd/display/dc/hwss/dce110/dce110_hwseq.c
1610
check_syncd_pipes_for_disabled_master_pipe(dc, context, pipe_ctx->pipe_idx);
drivers/gpu/drm/amd/display/dc/hwss/dce110/dce110_hwseq.c
1646
hws->funcs.enable_stream_timing(pipe_ctx, context, dc);
drivers/gpu/drm/amd/display/dc/hwss/dce110/dce110_hwseq.c
1688
dc->link_srv->set_dpms_on(context, pipe_ctx);
drivers/gpu/drm/amd/display/dc/hwss/dce110/dce110_hwseq.c
1697
hws->funcs.enable_stream_timing(pipe_ctx, context, dc);
drivers/gpu/drm/amd/display/dc/hwss/dce110/dce110_hwseq.c
1706
if (dc_state_get_pipe_subvp_type(context, pipe_ctx) != SUBVP_PHANTOM) {
drivers/gpu/drm/amd/display/dc/hwss/dce110/dce110_hwseq.c
1808
static void get_edp_streams(struct dc_state *context,
drivers/gpu/drm/amd/display/dc/hwss/dce110/dce110_hwseq.c
1815
for (i = 0; i < context->stream_count; i++) {
drivers/gpu/drm/amd/display/dc/hwss/dce110/dce110_hwseq.c
1816
if (context->streams[i]->signal == SIGNAL_TYPE_EDP) {
drivers/gpu/drm/amd/display/dc/hwss/dce110/dce110_hwseq.c
1817
edp_streams[*edp_stream_num] = context->streams[i];
drivers/gpu/drm/amd/display/dc/hwss/dce110/dce110_hwseq.c
1895
void dce110_enable_accelerated_mode(struct dc *dc, struct dc_state *context)
drivers/gpu/drm/amd/display/dc/hwss/dce110/dce110_hwseq.c
1920
hws->funcs.init_pipes(dc, context);
drivers/gpu/drm/amd/display/dc/hwss/dce110/dce110_hwseq.c
1922
get_edp_streams(context, edp_streams, &edp_stream_num);
drivers/gpu/drm/amd/display/dc/hwss/dce110/dce110_hwseq.c
1950
pipe_ctx = resource_get_otg_master_for_stream(&context->res_ctx, edp_stream);
drivers/gpu/drm/amd/display/dc/hwss/dce110/dce110_hwseq.c
1975
for (i = 0; i < context->stream_count; i++) {
drivers/gpu/drm/amd/display/dc/hwss/dce110/dce110_hwseq.c
1976
if (context->streams[i]->apply_seamless_boot_optimization) {
drivers/gpu/drm/amd/display/dc/hwss/dce110/dce110_hwseq.c
2043
struct dc_state *context)
drivers/gpu/drm/amd/display/dc/hwss/dce110/dce110_hwseq.c
2049
struct pipe_ctx *pipe_ctx = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/hwss/dce110/dce110_hwseq.c
2059
context->bw_ctx.bw.dce.nbp_state_change_wm_ns[num_pipes],
drivers/gpu/drm/amd/display/dc/hwss/dce110/dce110_hwseq.c
2060
context->bw_ctx.bw.dce.stutter_exit_wm_ns[num_pipes],
drivers/gpu/drm/amd/display/dc/hwss/dce110/dce110_hwseq.c
2061
context->bw_ctx.bw.dce.stutter_entry_wm_ns[num_pipes],
drivers/gpu/drm/amd/display/dc/hwss/dce110/dce110_hwseq.c
2062
context->bw_ctx.bw.dce.urgent_wm_ns[num_pipes],
drivers/gpu/drm/amd/display/dc/hwss/dce110/dce110_hwseq.c
2068
context->bw_ctx.bw.dce.nbp_state_change_wm_ns[num_pipes],
drivers/gpu/drm/amd/display/dc/hwss/dce110/dce110_hwseq.c
2069
context->bw_ctx.bw.dce.stutter_exit_wm_ns[num_pipes],
drivers/gpu/drm/amd/display/dc/hwss/dce110/dce110_hwseq.c
2070
context->bw_ctx.bw.dce.urgent_wm_ns[num_pipes],
drivers/gpu/drm/amd/display/dc/hwss/dce110/dce110_hwseq.c
2194
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/hwss/dce110/dce110_hwseq.c
2199
struct resource_context *res_ctx = &context->res_ctx;
drivers/gpu/drm/amd/display/dc/hwss/dce110/dce110_hwseq.c
2210
if (context->stream_count != 1)
drivers/gpu/drm/amd/display/dc/hwss/dce110/dce110_hwseq.c
2260
struct dc_state *context)
drivers/gpu/drm/amd/display/dc/hwss/dce110/dce110_hwseq.c
2264
if (should_enable_fbc(dc, context, &pipe_idx)) {
drivers/gpu/drm/amd/display/dc/hwss/dce110/dce110_hwseq.c
2268
struct pipe_ctx *pipe_ctx = &context->res_ctx.pipe_ctx[pipe_idx];
drivers/gpu/drm/amd/display/dc/hwss/dce110/dce110_hwseq.c
2284
struct dc_state *context)
drivers/gpu/drm/amd/display/dc/hwss/dce110/dce110_hwseq.c
2293
struct pipe_ctx *pipe_ctx = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/hwss/dce110/dce110_hwseq.c
2342
if (old_clk && 0 == resource_get_clock_source_reference(&context->res_ctx,
drivers/gpu/drm/amd/display/dc/hwss/dce110/dce110_hwseq.c
2356
struct dc_state *context)
drivers/gpu/drm/amd/display/dc/hwss/dce110/dce110_hwseq.c
2380
struct pipe_ctx *pipe_ctx = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/hwss/dce110/dce110_hwseq.c
2392
build_audio_output(context, pipe_ctx, &audio_output);
drivers/gpu/drm/amd/display/dc/hwss/dce110/dce110_hwseq.c
2418
struct pipe_ctx *pipe_ctx = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/hwss/dce110/dce110_hwseq.c
2432
build_audio_output(context, pipe_ctx, &audio_output);
drivers/gpu/drm/amd/display/dc/hwss/dce110/dce110_hwseq.c
2447
struct dc_state *context)
drivers/gpu/drm/amd/display/dc/hwss/dce110/dce110_hwseq.c
2454
bool is_hpo_acquired = resource_is_hpo_acquired(context);
drivers/gpu/drm/amd/display/dc/hwss/dce110/dce110_hwseq.c
2458
reset_syncd_pipes_from_disabled_pipes(dc, context);
drivers/gpu/drm/amd/display/dc/hwss/dce110/dce110_hwseq.c
2462
hws->funcs.reset_hw_ctx_wrap(dc, context);
drivers/gpu/drm/amd/display/dc/hwss/dce110/dce110_hwseq.c
2465
if (context->stream_count <= 0)
drivers/gpu/drm/amd/display/dc/hwss/dce110/dce110_hwseq.c
2475
struct pipe_ctx *pipe_ctx = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/hwss/dce110/dce110_hwseq.c
2495
dce110_setup_audio_dto(dc, context);
drivers/gpu/drm/amd/display/dc/hwss/dce110/dce110_hwseq.c
2504
struct pipe_ctx *pipe_ctx = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/hwss/dce110/dce110_hwseq.c
2522
context,
drivers/gpu/drm/amd/display/dc/hwss/dce110/dce110_hwseq.c
2530
hws->funcs.resync_fifo_dccg_dio(hws, dc, context, i);
drivers/gpu/drm/amd/display/dc/hwss/dce110/dce110_hwseq.c
2797
static void dce110_init_pipes(struct dc *dc, struct dc_state *context)
drivers/gpu/drm/amd/display/dc/hwss/dce110/dce110_hwseq.c
2883
struct dc_state *context)
drivers/gpu/drm/amd/display/dc/hwss/dce110/dce110_hwseq.c
2887
dce110_set_safe_displaymarks(&context->res_ctx, dc->res_pool);
drivers/gpu/drm/amd/display/dc/hwss/dce110/dce110_hwseq.c
2891
context,
drivers/gpu/drm/amd/display/dc/hwss/dce110/dce110_hwseq.c
2897
struct dc_state *context)
drivers/gpu/drm/amd/display/dc/hwss/dce110/dce110_hwseq.c
2901
dce110_set_displaymarks(dc, context);
drivers/gpu/drm/amd/display/dc/hwss/dce110/dce110_hwseq.c
2906
context,
drivers/gpu/drm/amd/display/dc/hwss/dce110/dce110_hwseq.c
3024
struct dc_state *context)
drivers/gpu/drm/amd/display/dc/hwss/dce110/dce110_hwseq.c
3035
struct pipe_ctx *pipe_ctx = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/hwss/dce110/dce110_hwseq.c
3046
context->stream_count);
drivers/gpu/drm/amd/display/dc/hwss/dce110/dce110_hwseq.c
3057
enable_fbc(dc, context);
drivers/gpu/drm/amd/display/dc/hwss/dce110/dce110_hwseq.c
3062
struct dc_state *context)
drivers/gpu/drm/amd/display/dc/hwss/dce110/dce110_hwseq.h
123
struct dc_state *context);
drivers/gpu/drm/amd/display/dc/hwss/dce110/dce110_hwseq.h
40
struct dc_state *context);
drivers/gpu/drm/amd/display/dc/hwss/dce110/dce110_hwseq.h
44
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/hwss/dce110/dce110_hwseq.h
62
void dce110_enable_accelerated_mode(struct dc *dc, struct dc_state *context);
drivers/gpu/drm/amd/display/dc/hwss/dce110/dce110_hwseq.h
72
struct dc_state *context);
drivers/gpu/drm/amd/display/dc/hwss/dce110/dce110_hwseq.h
76
struct dc_state *context);
drivers/gpu/drm/amd/display/dc/hwss/dce60/dce60_hwseq.c
116
struct dc_state *context)
drivers/gpu/drm/amd/display/dc/hwss/dce60/dce60_hwseq.c
120
if (dce60_should_enable_fbc(dc, context, &pipe_idx)) {
drivers/gpu/drm/amd/display/dc/hwss/dce60/dce60_hwseq.c
124
struct pipe_ctx *pipe_ctx = &context->res_ctx.pipe_ctx[pipe_idx];
drivers/gpu/drm/amd/display/dc/hwss/dce60/dce60_hwseq.c
385
struct dc_state *context)
drivers/gpu/drm/amd/display/dc/hwss/dce60/dce60_hwseq.c
396
struct pipe_ctx *pipe_ctx = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/hwss/dce60/dce60_hwseq.c
407
context->stream_count);
drivers/gpu/drm/amd/display/dc/hwss/dce60/dce60_hwseq.c
418
dce60_enable_fbc(dc, context);
drivers/gpu/drm/amd/display/dc/hwss/dce60/dce60_hwseq.c
51
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/hwss/dce60/dce60_hwseq.c
56
struct resource_context *res_ctx = &context->res_ctx;
drivers/gpu/drm/amd/display/dc/hwss/dce60/dce60_hwseq.c
67
if (context->stream_count != 1)
drivers/gpu/drm/amd/display/dc/hwss/dcn10/dcn10_hwseq.c
1181
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/hwss/dcn10/dcn10_hwseq.c
1285
struct dc_state *context)
drivers/gpu/drm/amd/display/dc/hwss/dcn10/dcn10_hwseq.c
1570
void dcn10_init_pipes(struct dc *dc, struct dc_state *context)
drivers/gpu/drm/amd/display/dc/hwss/dcn10/dcn10_hwseq.c
1578
for (i = 0; i < context->stream_count; i++) {
drivers/gpu/drm/amd/display/dc/hwss/dcn10/dcn10_hwseq.c
1579
if (context->streams[i]->apply_seamless_boot_optimization) {
drivers/gpu/drm/amd/display/dc/hwss/dcn10/dcn10_hwseq.c
1587
struct pipe_ctx *pipe_ctx = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/hwss/dcn10/dcn10_hwseq.c
1613
struct pipe_ctx *pipe_ctx = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/hwss/dcn10/dcn10_hwseq.c
1630
struct pipe_ctx *pipe_ctx = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/hwss/dcn10/dcn10_hwseq.c
1644
struct pipe_ctx *pipe_ctx = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/hwss/dcn10/dcn10_hwseq.c
1685
hws->funcs.plane_atomic_disconnect(dc, context, pipe_ctx);
drivers/gpu/drm/amd/display/dc/hwss/dcn10/dcn10_hwseq.c
1690
dc->hwss.disable_plane(dc, context, pipe_ctx);
drivers/gpu/drm/amd/display/dc/hwss/dcn10/dcn10_hwseq.c
1949
struct dc_state *context)
drivers/gpu/drm/amd/display/dc/hwss/dcn10/dcn10_hwseq.c
1958
struct pipe_ctx *pipe_ctx = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/hwss/dcn10/dcn10_hwseq.c
221
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/hwss/dcn10/dcn10_hwseq.c
231
pipe_ctx = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/hwss/dcn10/dcn10_hwseq.c
242
dc_state_get_pipe_subvp_type(context, pipe_ctx) == SUBVP_PHANTOM)
drivers/gpu/drm/amd/display/dc/hwss/dcn10/dcn10_hwseq.c
2733
struct dc_state *context)
drivers/gpu/drm/amd/display/dc/hwss/dcn10/dcn10_hwseq.c
2980
struct dc_state *context)
drivers/gpu/drm/amd/display/dc/hwss/dcn10/dcn10_hwseq.c
3021
if (context->bw_ctx.bw.dcn.clk.dispclk_khz <
drivers/gpu/drm/amd/display/dc/hwss/dcn10/dcn10_hwseq.c
3026
context->bw_ctx.bw.dcn.clk.dppclk_khz <=
drivers/gpu/drm/amd/display/dc/hwss/dcn10/dcn10_hwseq.c
3207
struct dc_state *context)
drivers/gpu/drm/amd/display/dc/hwss/dcn10/dcn10_hwseq.c
3232
dcn10_enable_plane(dc, pipe_ctx, context);
drivers/gpu/drm/amd/display/dc/hwss/dcn10/dcn10_hwseq.c
3234
dcn10_update_dchubp_dpp(dc, pipe_ctx, context);
drivers/gpu/drm/amd/display/dc/hwss/dcn10/dcn10_hwseq.c
3254
struct dc_state *context)
drivers/gpu/drm/amd/display/dc/hwss/dcn10/dcn10_hwseq.c
3261
pipe_ctx = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/hwss/dcn10/dcn10_hwseq.c
3287
struct dc_state *context)
drivers/gpu/drm/amd/display/dc/hwss/dcn10/dcn10_hwseq.c
3292
struct pipe_ctx *pipe_ctx = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/hwss/dcn10/dcn10_hwseq.c
3299
if (context->stream_status[i].plane_count == 0)
drivers/gpu/drm/amd/display/dc/hwss/dcn10/dcn10_hwseq.c
3305
if (context->res_ctx.pipe_ctx[i].update_flags.bits.disable)
drivers/gpu/drm/amd/display/dc/hwss/dcn10/dcn10_hwseq.c
3309
if (context->res_ctx.pipe_ctx[i].update_flags.bits.disable) {
drivers/gpu/drm/amd/display/dc/hwss/dcn10/dcn10_hwseq.c
3310
dc->hwss.optimize_bandwidth(dc, context);
drivers/gpu/drm/amd/display/dc/hwss/dcn10/dcn10_hwseq.c
3318
static void dcn10_stereo_hw_frame_pack_wa(struct dc *dc, struct dc_state *context)
drivers/gpu/drm/amd/display/dc/hwss/dcn10/dcn10_hwseq.c
3322
for (i = 0; i < context->stream_count; i++) {
drivers/gpu/drm/amd/display/dc/hwss/dcn10/dcn10_hwseq.c
3323
if (context->streams[i]->timing.timing_3d_format
drivers/gpu/drm/amd/display/dc/hwss/dcn10/dcn10_hwseq.c
3336
struct dc_state *context)
drivers/gpu/drm/amd/display/dc/hwss/dcn10/dcn10_hwseq.c
3345
if (context->stream_count == 0)
drivers/gpu/drm/amd/display/dc/hwss/dcn10/dcn10_hwseq.c
3346
context->bw_ctx.bw.dcn.clk.phyclk_khz = 0;
drivers/gpu/drm/amd/display/dc/hwss/dcn10/dcn10_hwseq.c
3350
context,
drivers/gpu/drm/amd/display/dc/hwss/dcn10/dcn10_hwseq.c
3354
&context->bw_ctx.bw.dcn.watermarks,
drivers/gpu/drm/amd/display/dc/hwss/dcn10/dcn10_hwseq.c
3357
dcn10_stereo_hw_frame_pack_wa(dc, context);
drivers/gpu/drm/amd/display/dc/hwss/dcn10/dcn10_hwseq.c
3374
struct dc_state *context)
drivers/gpu/drm/amd/display/dc/hwss/dcn10/dcn10_hwseq.c
3383
if (context->stream_count == 0)
drivers/gpu/drm/amd/display/dc/hwss/dcn10/dcn10_hwseq.c
3384
context->bw_ctx.bw.dcn.clk.phyclk_khz = 0;
drivers/gpu/drm/amd/display/dc/hwss/dcn10/dcn10_hwseq.c
3388
context,
drivers/gpu/drm/amd/display/dc/hwss/dcn10/dcn10_hwseq.c
3392
&context->bw_ctx.bw.dcn.watermarks,
drivers/gpu/drm/amd/display/dc/hwss/dcn10/dcn10_hwseq.c
3396
dcn10_stereo_hw_frame_pack_wa(dc, context);
drivers/gpu/drm/amd/display/dc/hwss/dcn10/dcn10_hwseq.c
4063
struct dc_state *context = dc->current_state;
drivers/gpu/drm/amd/display/dc/hwss/dcn10/dcn10_hwseq.c
4065
struct dc_clocks *current_clocks = &context->bw_ctx.bw.dcn.clk;
drivers/gpu/drm/amd/display/dc/hwss/dcn10/dcn10_hwseq.c
4071
context, clock_type, &clock_cfg);
drivers/gpu/drm/amd/display/dc/hwss/dcn10/dcn10_hwseq.c
4092
context, true);
drivers/gpu/drm/amd/display/dc/hwss/dcn10/dcn10_hwseq.c
4101
struct dc_state *context = dc->current_state;
drivers/gpu/drm/amd/display/dc/hwss/dcn10/dcn10_hwseq.c
4104
dc->clk_mgr->funcs->get_clock(dc->clk_mgr, context, clock_type, clock_cfg);
drivers/gpu/drm/amd/display/dc/hwss/dcn10/dcn10_hwseq.h
110
struct dc_state *context);
drivers/gpu/drm/amd/display/dc/hwss/dcn10/dcn10_hwseq.h
113
void dcn10_init_pipes(struct dc *dc, struct dc_state *context);
drivers/gpu/drm/amd/display/dc/hwss/dcn10/dcn10_hwseq.h
117
struct dc_state *context);
drivers/gpu/drm/amd/display/dc/hwss/dcn10/dcn10_hwseq.h
124
void dce110_enable_accelerated_mode(struct dc *dc, struct dc_state *context);
drivers/gpu/drm/amd/display/dc/hwss/dcn10/dcn10_hwseq.h
203
struct dc_state *context);
drivers/gpu/drm/amd/display/dc/hwss/dcn10/dcn10_hwseq.h
206
struct dc_state *context);
drivers/gpu/drm/amd/display/dc/hwss/dcn10/dcn10_hwseq.h
45
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/hwss/dcn10/dcn10_hwseq.h
49
struct dc_state *context);
drivers/gpu/drm/amd/display/dc/hwss/dcn10/dcn10_hwseq.h
52
struct dc_state *context);
drivers/gpu/drm/amd/display/dc/hwss/dcn10/dcn10_hwseq.h
84
struct dc_state *context);
drivers/gpu/drm/amd/display/dc/hwss/dcn10/dcn10_hwseq.h
88
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/hwss/dcn10/dcn10_hwseq.h
92
struct dc_state *context);
drivers/gpu/drm/amd/display/dc/hwss/dcn20/dcn20_hwseq.c
1176
void dcn20_update_odm(struct dc *dc, struct dc_state *context, struct pipe_ctx *pipe_ctx)
drivers/gpu/drm/amd/display/dc/hwss/dcn20/dcn20_hwseq.c
1298
struct dc_state *context)
drivers/gpu/drm/amd/display/dc/hwss/dcn20/dcn20_hwseq.c
1667
struct dc_state *context)
drivers/gpu/drm/amd/display/dc/hwss/dcn20/dcn20_hwseq.c
1675
enum mall_stream_type pipe_mall_type = dc_state_get_pipe_subvp_type(context, pipe_ctx);
drivers/gpu/drm/amd/display/dc/hwss/dcn20/dcn20_hwseq.c
1771
(context == dc->current_state && plane_state->update_flags.bits.position_change) ||
drivers/gpu/drm/amd/display/dc/hwss/dcn20/dcn20_hwseq.c
1772
(context == dc->current_state && plane_state->update_flags.bits.scaling_change) ||
drivers/gpu/drm/amd/display/dc/hwss/dcn20/dcn20_hwseq.c
1773
(context == dc->current_state && pipe_ctx->stream->update_flags.bits.scaling)) {
drivers/gpu/drm/amd/display/dc/hwss/dcn20/dcn20_hwseq.c
1894
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/hwss/dcn20/dcn20_hwseq.c
1905
if (dc_state_get_pipe_subvp_type(context, pipe_ctx) != SUBVP_PHANTOM)
drivers/gpu/drm/amd/display/dc/hwss/dcn20/dcn20_hwseq.c
1918
struct dc_state *context)
drivers/gpu/drm/amd/display/dc/hwss/dcn20/dcn20_hwseq.c
1935
dcn20_program_tg(dc, pipe_ctx, context, hws);
drivers/gpu/drm/amd/display/dc/hwss/dcn20/dcn20_hwseq.c
1938
hws->funcs.update_odm(dc, context, pipe_ctx);
drivers/gpu/drm/amd/display/dc/hwss/dcn20/dcn20_hwseq.c
1942
hws->funcs.enable_plane(dc, pipe_ctx, context);
drivers/gpu/drm/amd/display/dc/hwss/dcn20/dcn20_hwseq.c
1944
dcn20_enable_plane(dc, pipe_ctx, context);
drivers/gpu/drm/amd/display/dc/hwss/dcn20/dcn20_hwseq.c
1963
dcn20_update_dchubp_dpp(dc, pipe_ctx, context);
drivers/gpu/drm/amd/display/dc/hwss/dcn20/dcn20_hwseq.c
2034
struct dc_state *context)
drivers/gpu/drm/amd/display/dc/hwss/dcn20/dcn20_hwseq.c
2044
if (resource_is_pipe_topology_changed(dc->current_state, context))
drivers/gpu/drm/amd/display/dc/hwss/dcn20/dcn20_hwseq.c
2045
resource_log_pipe_topology_update(dc, context);
drivers/gpu/drm/amd/display/dc/hwss/dcn20/dcn20_hwseq.c
2049
pipe = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/hwss/dcn20/dcn20_hwseq.c
2063
if (context->res_ctx.pipe_ctx[i].plane_state)
drivers/gpu/drm/amd/display/dc/hwss/dcn20/dcn20_hwseq.c
2076
dcn20_detect_pipe_changes(dc->current_state, context, &dc->current_state->res_ctx.pipe_ctx[i],
drivers/gpu/drm/amd/display/dc/hwss/dcn20/dcn20_hwseq.c
2077
&context->res_ctx.pipe_ctx[i]);
drivers/gpu/drm/amd/display/dc/hwss/dcn20/dcn20_hwseq.c
2087
if (context->res_ctx.pipe_ctx[i].update_flags.bits.disable && stream &&
drivers/gpu/drm/amd/display/dc/hwss/dcn20/dcn20_hwseq.c
2101
if (context->res_ctx.pipe_ctx[i].update_flags.bits.disable
drivers/gpu/drm/amd/display/dc/hwss/dcn20/dcn20_hwseq.c
2102
&& !context->res_ctx.pipe_ctx[i].top_pipe
drivers/gpu/drm/amd/display/dc/hwss/dcn20/dcn20_hwseq.c
2103
&& !context->res_ctx.pipe_ctx[i].prev_odm_pipe
drivers/gpu/drm/amd/display/dc/hwss/dcn20/dcn20_hwseq.c
2104
&& context->res_ctx.pipe_ctx[i].stream)
drivers/gpu/drm/amd/display/dc/hwss/dcn20/dcn20_hwseq.c
2105
hws->funcs.blank_pixel_data(dc, &context->res_ctx.pipe_ctx[i], true);
drivers/gpu/drm/amd/display/dc/hwss/dcn20/dcn20_hwseq.c
2109
if (context->res_ctx.pipe_ctx[i].update_flags.bits.disable
drivers/gpu/drm/amd/display/dc/hwss/dcn20/dcn20_hwseq.c
2110
|| context->res_ctx.pipe_ctx[i].update_flags.bits.opp_changed) {
drivers/gpu/drm/amd/display/dc/hwss/dcn20/dcn20_hwseq.c
2119
if ((context->res_ctx.pipe_ctx[i].update_flags.bits.disable ||
drivers/gpu/drm/amd/display/dc/hwss/dcn20/dcn20_hwseq.c
2120
(context->res_ctx.pipe_ctx[i].plane_state &&
drivers/gpu/drm/amd/display/dc/hwss/dcn20/dcn20_hwseq.c
2121
dc_state_get_pipe_subvp_type(context, &context->res_ctx.pipe_ctx[i])
drivers/gpu/drm/amd/display/dc/hwss/dcn20/dcn20_hwseq.c
2137
pipe = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/hwss/dcn20/dcn20_hwseq.c
2142
hws->funcs.update_odm(dc, context, pipe);
drivers/gpu/drm/amd/display/dc/hwss/dcn20/dcn20_hwseq.c
2150
pipe = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/hwss/dcn20/dcn20_hwseq.c
2155
hws->funcs.program_pipe(dc, pipe, context);
drivers/gpu/drm/amd/display/dc/hwss/dcn20/dcn20_hwseq.c
2166
dc_state_get_pipe_subvp_type(context, pipe) != SUBVP_PHANTOM)
drivers/gpu/drm/amd/display/dc/hwss/dcn20/dcn20_hwseq.c
2167
dcn20_program_pipe(dc, pipe, context);
drivers/gpu/drm/amd/display/dc/hwss/dcn20/dcn20_hwseq.c
2175
pipe = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/hwss/dcn20/dcn20_hwseq.c
2181
hws->funcs.program_all_writeback_pipes_in_tree(dc, pipe->stream, context);
drivers/gpu/drm/amd/display/dc/hwss/dcn20/dcn20_hwseq.c
2189
context->stream_status[0].plane_count > 1) {
drivers/gpu/drm/amd/display/dc/hwss/dcn20/dcn20_hwseq.c
2234
struct dc_state *context)
drivers/gpu/drm/amd/display/dc/hwss/dcn20/dcn20_hwseq.c
2244
!resource_is_pipe_type(&context->res_ctx.pipe_ctx[i], OPP_HEAD))
drivers/gpu/drm/amd/display/dc/hwss/dcn20/dcn20_hwseq.c
2249
if (context->res_ctx.pipe_ctx[i].update_flags.bits.disable)
drivers/gpu/drm/amd/display/dc/hwss/dcn20/dcn20_hwseq.c
2259
struct pipe_ctx *pipe = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/hwss/dcn20/dcn20_hwseq.c
2262
dc_state_get_pipe_subvp_type(context, pipe) != SUBVP_PHANTOM) {
drivers/gpu/drm/amd/display/dc/hwss/dcn20/dcn20_hwseq.c
2273
struct pipe_ctx *pipe = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/hwss/dcn20/dcn20_hwseq.c
2282
dc_state_get_pipe_subvp_type(context, pipe) != SUBVP_PHANTOM) {
drivers/gpu/drm/amd/display/dc/hwss/dcn20/dcn20_hwseq.c
2299
struct pipe_ctx *pipe = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/hwss/dcn20/dcn20_hwseq.c
2308
if (pipe->stream && dc_state_get_pipe_subvp_type(context, pipe) == SUBVP_PHANTOM) {
drivers/gpu/drm/amd/display/dc/hwss/dcn20/dcn20_hwseq.c
2315
dc->hwss.update_phantom_vp_position(dc, context, pipe);
drivers/gpu/drm/amd/display/dc/hwss/dcn20/dcn20_hwseq.c
2316
dcn20_program_pipe(dc, pipe, context);
drivers/gpu/drm/amd/display/dc/hwss/dcn20/dcn20_hwseq.c
2334
dc->hwseq->funcs.update_force_pstate(dc, context);
drivers/gpu/drm/amd/display/dc/hwss/dcn20/dcn20_hwseq.c
2340
hwseq->funcs.program_mall_pipe_config(dc, context);
drivers/gpu/drm/amd/display/dc/hwss/dcn20/dcn20_hwseq.c
2350
context->stream_status[0].plane_count > 1) {
drivers/gpu/drm/amd/display/dc/hwss/dcn20/dcn20_hwseq.c
2364
struct dc_state *context)
drivers/gpu/drm/amd/display/dc/hwss/dcn20/dcn20_hwseq.c
2368
unsigned int cache_wm_a = context->bw_ctx.bw.dcn.watermarks.a.cstate_pstate.pstate_change_ns;
drivers/gpu/drm/amd/display/dc/hwss/dcn20/dcn20_hwseq.c
2373
context,
drivers/gpu/drm/amd/display/dc/hwss/dcn20/dcn20_hwseq.c
2377
struct pipe_ctx *pipe = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/hwss/dcn20/dcn20_hwseq.c
2380
if (pipe->stream && dc_state_get_pipe_subvp_type(context, pipe) != SUBVP_NONE) {
drivers/gpu/drm/amd/display/dc/hwss/dcn20/dcn20_hwseq.c
2381
context->bw_ctx.bw.dcn.watermarks.a.cstate_pstate.pstate_change_ns = 4U * 1000U * 1000U * 1000U;
drivers/gpu/drm/amd/display/dc/hwss/dcn20/dcn20_hwseq.c
2391
&context->bw_ctx.bw.dcn.watermarks,
drivers/gpu/drm/amd/display/dc/hwss/dcn20/dcn20_hwseq.c
2397
context->bw_ctx.bw.dcn.watermarks.a.cstate_pstate.pstate_change_ns = cache_wm_a;
drivers/gpu/drm/amd/display/dc/hwss/dcn20/dcn20_hwseq.c
2401
if (context->bw_ctx.dml.ip.min_comp_buffer_size_kbytes) {
drivers/gpu/drm/amd/display/dc/hwss/dcn20/dcn20_hwseq.c
2402
compbuf_size_kb = context->bw_ctx.dml.ip.min_comp_buffer_size_kbytes;
drivers/gpu/drm/amd/display/dc/hwss/dcn20/dcn20_hwseq.c
2405
compbuf_size_kb = context->bw_ctx.bw.dcn.compbuf_size_kb;
drivers/gpu/drm/amd/display/dc/hwss/dcn20/dcn20_hwseq.c
2415
struct dc_state *context)
drivers/gpu/drm/amd/display/dc/hwss/dcn20/dcn20_hwseq.c
2421
struct pipe_ctx *pipe = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/hwss/dcn20/dcn20_hwseq.c
2424
if (pipe->stream && dc_state_get_pipe_subvp_type(context, pipe) != SUBVP_NONE) {
drivers/gpu/drm/amd/display/dc/hwss/dcn20/dcn20_hwseq.c
2425
context->bw_ctx.bw.dcn.watermarks.a.cstate_pstate.pstate_change_ns = 4U * 1000U * 1000U * 1000U;
drivers/gpu/drm/amd/display/dc/hwss/dcn20/dcn20_hwseq.c
2432
&context->bw_ctx.bw.dcn.watermarks,
drivers/gpu/drm/amd/display/dc/hwss/dcn20/dcn20_hwseq.c
2438
context->bw_ctx.bw.dcn.clk.dramclk_khz <= dc->clk_mgr->bw_params->dc_mode_softmax_memclk * 1000)
drivers/gpu/drm/amd/display/dc/hwss/dcn20/dcn20_hwseq.c
2443
hubbub->funcs->program_compbuf_size(hubbub, context->bw_ctx.bw.dcn.compbuf_size_kb, true);
drivers/gpu/drm/amd/display/dc/hwss/dcn20/dcn20_hwseq.c
2445
if (context->bw_ctx.bw.dcn.clk.fw_based_mclk_switching) {
drivers/gpu/drm/amd/display/dc/hwss/dcn20/dcn20_hwseq.c
2447
true, context);
drivers/gpu/drm/amd/display/dc/hwss/dcn20/dcn20_hwseq.c
2448
context->bw_ctx.bw.dcn.clk.p_state_change_support = true;
drivers/gpu/drm/amd/display/dc/hwss/dcn20/dcn20_hwseq.c
2456
context,
drivers/gpu/drm/amd/display/dc/hwss/dcn20/dcn20_hwseq.c
2458
if (context->bw_ctx.bw.dcn.clk.zstate_support == DCN_ZSTATE_SUPPORT_ALLOW &&
drivers/gpu/drm/amd/display/dc/hwss/dcn20/dcn20_hwseq.c
2461
struct pipe_ctx *pipe_ctx = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/hwss/dcn20/dcn20_hwseq.c
2474
struct dc_state *context)
drivers/gpu/drm/amd/display/dc/hwss/dcn20/dcn20_hwseq.c
2480
if (dc->res_pool->funcs->validate_bandwidth(dc, context, DC_VALIDATE_MODE_AND_PROGRAMMING) != DC_OK)
drivers/gpu/drm/amd/display/dc/hwss/dcn20/dcn20_hwseq.c
2484
dc->hwss.prepare_bandwidth(dc, context);
drivers/gpu/drm/amd/display/dc/hwss/dcn20/dcn20_hwseq.c
2488
struct pipe_ctx *pipe_ctx = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/hwss/dcn20/dcn20_hwseq.c
2528
struct dc_state *context)
drivers/gpu/drm/amd/display/dc/hwss/dcn20/dcn20_hwseq.c
2544
mcif_wb->funcs->config_mcif_arb(mcif_wb, &context->bw_ctx.bw.dcn.bw_writeback.mcif_wb_arb[wb_info->dwb_pipe_inst]);
drivers/gpu/drm/amd/display/dc/hwss/dcn20/dcn20_hwseq.c
2800
struct dc_state *context)
drivers/gpu/drm/amd/display/dc/hwss/dcn20/dcn20_hwseq.c
2891
struct dc_state *context)
drivers/gpu/drm/amd/display/dc/hwss/dcn20/dcn20_hwseq.c
2900
struct pipe_ctx *pipe_ctx = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/hwss/dcn20/dcn20_hwseq.c
3124
struct dc_state *context = dc->current_state;
drivers/gpu/drm/amd/display/dc/hwss/dcn20/dcn20_hwseq.c
3185
struct pipe_ctx *pipe_ctx = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/hwss/dcn20/dcn20_hwseq.c
3207
hws->funcs.plane_atomic_disconnect(dc, context, pipe_ctx);
drivers/gpu/drm/amd/display/dc/hwss/dcn20/dcn20_hwseq.c
3222
struct pipe_ctx *pipe_ctx = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/hwss/dcn20/dcn20_hwseq.c
3224
dc->hwss.disable_plane(dc, context, pipe_ctx);
drivers/gpu/drm/amd/display/dc/hwss/dcn20/dcn20_hwseq.c
818
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/hwss/dcn20/dcn20_hwseq.c
854
opp_cnt = resource_get_opp_heads_for_otg_master(pipe_ctx, &context->res_ctx, opp_heads);
drivers/gpu/drm/amd/display/dc/hwss/dcn20/dcn20_hwseq.c
905
dc->hwseq->funcs.PLAT_58856_wa(context, pipe_ctx);
drivers/gpu/drm/amd/display/dc/hwss/dcn20/dcn20_hwseq.c
978
if (dc_state_get_pipe_subvp_type(context, pipe_ctx) == SUBVP_PHANTOM) {
drivers/gpu/drm/amd/display/dc/hwss/dcn20/dcn20_hwseq.h
117
struct dc_state *context);
drivers/gpu/drm/amd/display/dc/hwss/dcn20/dcn20_hwseq.h
121
void dcn20_update_odm(struct dc *dc, struct dc_state *context, struct pipe_ctx *pipe_ctx);
drivers/gpu/drm/amd/display/dc/hwss/dcn20/dcn20_hwseq.h
165
struct dc_state *context);
drivers/gpu/drm/amd/display/dc/hwss/dcn20/dcn20_hwseq.h
169
struct dc_state *context);
drivers/gpu/drm/amd/display/dc/hwss/dcn20/dcn20_hwseq.h
39
struct dc_state *context);
drivers/gpu/drm/amd/display/dc/hwss/dcn20/dcn20_hwseq.h
42
struct dc_state *context);
drivers/gpu/drm/amd/display/dc/hwss/dcn20/dcn20_hwseq.h
72
struct dc_state *context);
drivers/gpu/drm/amd/display/dc/hwss/dcn20/dcn20_hwseq.h
75
struct dc_state *context);
drivers/gpu/drm/amd/display/dc/hwss/dcn20/dcn20_hwseq.h
78
struct dc_state *context);
drivers/gpu/drm/amd/display/dc/hwss/dcn20/dcn20_hwseq.h
81
struct dc_state *context);
drivers/gpu/drm/amd/display/dc/hwss/dcn20/dcn20_hwseq.h
84
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/hwss/dcn20/dcn20_hwseq.h
92
struct dc_state *context);
drivers/gpu/drm/amd/display/dc/hwss/dcn201/dcn201_hwseq.c
232
struct dc_state *context = dc->current_state;
drivers/gpu/drm/amd/display/dc/hwss/dcn201/dcn201_hwseq.c
309
struct pipe_ctx *pipe_ctx = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/hwss/dcn201/dcn201_hwseq.c
329
hws->funcs.plane_atomic_disconnect(dc, context, pipe_ctx);
drivers/gpu/drm/amd/display/dc/hwss/dcn201/dcn201_hwseq.c
344
struct pipe_ctx *pipe_ctx = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/hwss/dcn201/dcn201_hwseq.c
346
dc->hwss.disable_plane(dc, context, pipe_ctx);
drivers/gpu/drm/amd/display/dc/hwss/dcn21/dcn21_hwseq.c
103
context,
drivers/gpu/drm/amd/display/dc/hwss/dcn21/dcn21_hwseq.c
109
struct dc_state *context)
drivers/gpu/drm/amd/display/dc/hwss/dcn21/dcn21_hwseq.c
113
context,
drivers/gpu/drm/amd/display/dc/hwss/dcn21/dcn21_hwseq.c
128
void dcn21_PLAT_58856_wa(struct dc_state *context, struct pipe_ctx *pipe_ctx)
drivers/gpu/drm/amd/display/dc/hwss/dcn21/dcn21_hwseq.c
134
pipe_ctx->stream->ctx->dc->link_srv->set_dpms_on(context, pipe_ctx);
drivers/gpu/drm/amd/display/dc/hwss/dcn21/dcn21_hwseq.c
288
struct dc_state *context, struct dc_stream_state *stream)
drivers/gpu/drm/amd/display/dc/hwss/dcn21/dcn21_hwseq.c
293
struct pipe_ctx *pipe_ctx = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/hwss/dcn21/dcn21_hwseq.c
99
struct dc_state *context)
drivers/gpu/drm/amd/display/dc/hwss/dcn21/dcn21_hwseq.h
41
struct dc_state *context);
drivers/gpu/drm/amd/display/dc/hwss/dcn21/dcn21_hwseq.h
45
struct dc_state *context);
drivers/gpu/drm/amd/display/dc/hwss/dcn21/dcn21_hwseq.h
47
void dcn21_PLAT_58856_wa(struct dc_state *context,
drivers/gpu/drm/amd/display/dc/hwss/dcn21/dcn21_hwseq.h
57
struct dc_state *context, struct dc_stream_state *stream);
drivers/gpu/drm/amd/display/dc/hwss/dcn30/dcn30_hwseq.c
1191
struct dc_state *context)
drivers/gpu/drm/amd/display/dc/hwss/dcn30/dcn30_hwseq.c
1193
if (context->bw_ctx.bw.dcn.clk.fw_based_mclk_switching && !dc->clk_mgr->clks.fw_based_mclk_switching) {
drivers/gpu/drm/amd/display/dc/hwss/dcn30/dcn30_hwseq.c
1195
context->bw_ctx.bw.dcn.clk.p_state_change_support = false;
drivers/gpu/drm/amd/display/dc/hwss/dcn30/dcn30_hwseq.c
1200
context->bw_ctx.bw.dcn.clk.dramclk_khz > dc->clk_mgr->bw_params->dc_mode_softmax_memclk * 1000)
drivers/gpu/drm/amd/display/dc/hwss/dcn30/dcn30_hwseq.c
1203
dcn20_prepare_bandwidth(dc, context);
drivers/gpu/drm/amd/display/dc/hwss/dcn30/dcn30_hwseq.c
1206
dc_dmub_srv_p_state_delegate(dc, false, context);
drivers/gpu/drm/amd/display/dc/hwss/dcn30/dcn30_hwseq.c
433
struct dc_state *context)
drivers/gpu/drm/amd/display/dc/hwss/dcn30/dcn30_hwseq.c
450
mcif_wb->funcs->config_mcif_arb(mcif_wb, &context->bw_ctx.bw.dcn.bw_writeback.mcif_wb_arb[wb_info->dwb_pipe_inst]);
drivers/gpu/drm/amd/display/dc/hwss/dcn30/dcn30_hwseq.c
456
struct dc_state *context)
drivers/gpu/drm/amd/display/dc/hwss/dcn30/dcn30_hwseq.c
464
dcn30_set_writeback(dc, wb_info, context);
drivers/gpu/drm/amd/display/dc/hwss/dcn30/dcn30_hwseq.c
530
struct dc_state *context)
drivers/gpu/drm/amd/display/dc/hwss/dcn30/dcn30_hwseq.c
546
dcn30_set_writeback(dc, wb_info, context);
drivers/gpu/drm/amd/display/dc/hwss/dcn30/dcn30_hwseq.c
578
struct dc_state *context)
drivers/gpu/drm/amd/display/dc/hwss/dcn30/dcn30_hwseq.c
587
for (i_stream = 0; i_stream < context->stream_count; i_stream++) {
drivers/gpu/drm/amd/display/dc/hwss/dcn30/dcn30_hwseq.c
588
if (context->streams[i_stream] == stream) {
drivers/gpu/drm/amd/display/dc/hwss/dcn30/dcn30_hwseq.c
589
stream_status = &context->stream_status[i_stream];
drivers/gpu/drm/amd/display/dc/hwss/dcn30/dcn30_hwseq.c
606
struct pipe_ctx *pipe_ctx = &context->res_ctx.pipe_ctx[i_pipe];
drivers/gpu/drm/amd/display/dc/hwss/dcn30/dcn30_hwseq.c
629
dc->hwss.update_writeback(dc, &wb_info, context);
drivers/gpu/drm/amd/display/dc/hwss/dcn30/dcn30_hwseq.c
632
dc->hwss.enable_writeback(dc, &wb_info, context);
drivers/gpu/drm/amd/display/dc/hwss/dcn30/dcn30_hwseq.h
38
struct dc_state *context);
drivers/gpu/drm/amd/display/dc/hwss/dcn30/dcn30_hwseq.h
42
struct dc_state *context);
drivers/gpu/drm/amd/display/dc/hwss/dcn30/dcn30_hwseq.h
46
struct dc_state *context);
drivers/gpu/drm/amd/display/dc/hwss/dcn30/dcn30_hwseq.h
98
struct dc_state *context);
drivers/gpu/drm/amd/display/dc/hwss/dcn31/dcn31_hwseq.c
512
struct dc_state *context)
drivers/gpu/drm/amd/display/dc/hwss/dcn31/dcn31_hwseq.c
613
struct dc_state *context)
drivers/gpu/drm/amd/display/dc/hwss/dcn31/dcn31_hwseq.c
622
struct pipe_ctx *pipe_ctx = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/hwss/dcn31/dcn31_hwseq.c
655
link_enc_cfg_set_transient_mode(dc, dc->current_state, context);
drivers/gpu/drm/amd/display/dc/hwss/dcn31/dcn31_hwseq.h
53
struct dc_state *context);
drivers/gpu/drm/amd/display/dc/hwss/dcn31/dcn31_hwseq.h
57
struct dc_state *context, struct dc_stream_state *stream);
drivers/gpu/drm/amd/display/dc/hwss/dcn31/dcn31_hwseq.h
58
void dcn31_init_pipes(struct dc *dc, struct dc_state *context);
drivers/gpu/drm/amd/display/dc/hwss/dcn314/dcn314_hwseq.c
173
void dcn314_update_odm(struct dc *dc, struct dc_state *context, struct pipe_ctx *pipe_ctx)
drivers/gpu/drm/amd/display/dc/hwss/dcn314/dcn314_hwseq.c
366
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/hwss/dcn314/dcn314_hwseq.c
374
pipe_ctx = resource_get_otg_master_for_stream(&context->res_ctx, stream);
drivers/gpu/drm/amd/display/dc/hwss/dcn314/dcn314_hwseq.c
399
void dcn314_resync_fifo_dccg_dio(struct dce_hwseq *hws, struct dc *dc, struct dc_state *context, unsigned int current_pipe_idx)
drivers/gpu/drm/amd/display/dc/hwss/dcn314/dcn314_hwseq.c
407
pipe = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/hwss/dcn314/dcn314_hwseq.c
421
reset_sync_context_for_pipe(dc, context, i);
drivers/gpu/drm/amd/display/dc/hwss/dcn314/dcn314_hwseq.c
430
pipe = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/hwss/dcn314/dcn314_hwseq.h
34
void dcn314_update_odm(struct dc *dc, struct dc_state *context, struct pipe_ctx *pipe_ctx);
drivers/gpu/drm/amd/display/dc/hwss/dcn314/dcn314_hwseq.h
42
void dcn314_calculate_pix_rate_divider(struct dc *dc, struct dc_state *context, const struct dc_stream_state *stream);
drivers/gpu/drm/amd/display/dc/hwss/dcn314/dcn314_hwseq.h
44
void dcn314_resync_fifo_dccg_dio(struct dce_hwseq *hws, struct dc *dc, struct dc_state *context, unsigned int current_pipe_idx);
drivers/gpu/drm/amd/display/dc/hwss/dcn32/dcn32_hwseq.c
1139
void dcn32_update_odm(struct dc *dc, struct dc_state *context, struct pipe_ctx *pipe_ctx)
drivers/gpu/drm/amd/display/dc/hwss/dcn32/dcn32_hwseq.c
1231
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/hwss/dcn32/dcn32_hwseq.c
1239
pipe_ctx = resource_get_otg_master_for_stream(&context->res_ctx, stream);
drivers/gpu/drm/amd/display/dc/hwss/dcn32/dcn32_hwseq.c
1251
void dcn32_resync_fifo_dccg_dio(struct dce_hwseq *hws, struct dc *dc, struct dc_state *context, unsigned int current_pipe_idx)
drivers/gpu/drm/amd/display/dc/hwss/dcn32/dcn32_hwseq.c
1260
pipe = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/hwss/dcn32/dcn32_hwseq.c
1261
dc_state = context;
drivers/gpu/drm/amd/display/dc/hwss/dcn32/dcn32_hwseq.c
1273
reset_sync_context_for_pipe(dc, context, i);
drivers/gpu/drm/amd/display/dc/hwss/dcn32/dcn32_hwseq.c
1282
pipe = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/hwss/dcn32/dcn32_hwseq.c
1437
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/hwss/dcn32/dcn32_hwseq.c
1444
struct pipe_ctx *pipe = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/hwss/dcn32/dcn32_hwseq.c
1446
if (pipe->stream && dc_state_get_pipe_subvp_type(context, pipe) == SUBVP_MAIN &&
drivers/gpu/drm/amd/display/dc/hwss/dcn32/dcn32_hwseq.c
1447
dc_state_get_paired_subvp_stream(context, pipe->stream) == phantom_pipe->stream) {
drivers/gpu/drm/amd/display/dc/hwss/dcn32/dcn32_hwseq.c
1522
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/hwss/dcn32/dcn32_hwseq.c
1532
if (context->res_ctx.is_dsc_acquired[i]) {
drivers/gpu/drm/amd/display/dc/hwss/dcn32/dcn32_hwseq.c
1544
void dcn32_disable_phantom_streams(struct dc *dc, struct dc_state *context)
drivers/gpu/drm/amd/display/dc/hwss/dcn32/dcn32_hwseq.c
1552
struct pipe_ctx *pipe_ctx = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/hwss/dcn32/dcn32_hwseq.c
1564
(pipe_ctx->stream && dc_state_get_pipe_subvp_type(context, pipe_ctx) != SUBVP_PHANTOM)) {
drivers/gpu/drm/amd/display/dc/hwss/dcn32/dcn32_hwseq.c
1577
void dcn32_enable_phantom_streams(struct dc *dc, struct dc_state *context)
drivers/gpu/drm/amd/display/dc/hwss/dcn32/dcn32_hwseq.c
1584
struct pipe_ctx *pipe = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/hwss/dcn32/dcn32_hwseq.c
1591
if (pipe->stream && dc_state_get_pipe_subvp_type(context, pipe) == SUBVP_PHANTOM &&
drivers/gpu/drm/amd/display/dc/hwss/dcn32/dcn32_hwseq.c
1604
struct pipe_ctx *pipe_ctx = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/hwss/dcn32/dcn32_hwseq.c
1609
if (dc_state_get_pipe_subvp_type(context, pipe_ctx) != SUBVP_PHANTOM)
drivers/gpu/drm/amd/display/dc/hwss/dcn32/dcn32_hwseq.c
1626
context,
drivers/gpu/drm/amd/display/dc/hwss/dcn32/dcn32_hwseq.c
1633
hws->funcs.resync_fifo_dccg_dio(hws, dc, context, i);
drivers/gpu/drm/amd/display/dc/hwss/dcn32/dcn32_hwseq.c
1797
struct dc_state *context)
drivers/gpu/drm/amd/display/dc/hwss/dcn32/dcn32_hwseq.c
1799
bool p_state_change_support = context->bw_ctx.bw.dcn.clk.p_state_change_support;
drivers/gpu/drm/amd/display/dc/hwss/dcn32/dcn32_hwseq.c
1803
if (context->bw_ctx.bw.dcn.clk.fw_based_mclk_switching || dc->clk_mgr->clks.fw_based_mclk_switching) {
drivers/gpu/drm/amd/display/dc/hwss/dcn32/dcn32_hwseq.c
1805
context->bw_ctx.bw.dcn.clk.p_state_change_support = false;
drivers/gpu/drm/amd/display/dc/hwss/dcn32/dcn32_hwseq.c
1810
context->bw_ctx.bw.dcn.clk.dramclk_khz > dc->clk_mgr->bw_params->dc_mode_softmax_memclk * 1000)
drivers/gpu/drm/amd/display/dc/hwss/dcn32/dcn32_hwseq.c
1813
dcn20_prepare_bandwidth(dc, context);
drivers/gpu/drm/amd/display/dc/hwss/dcn32/dcn32_hwseq.c
1815
if (!context->bw_ctx.bw.dcn.clk.fw_based_mclk_switching)
drivers/gpu/drm/amd/display/dc/hwss/dcn32/dcn32_hwseq.c
1816
dc_dmub_srv_p_state_delegate(dc, false, context);
drivers/gpu/drm/amd/display/dc/hwss/dcn32/dcn32_hwseq.c
1818
if (context->bw_ctx.bw.dcn.clk.fw_based_mclk_switching || dc->clk_mgr->clks.fw_based_mclk_switching) {
drivers/gpu/drm/amd/display/dc/hwss/dcn32/dcn32_hwseq.c
1822
context->bw_ctx.bw.dcn.clk.p_state_change_support = p_state_change_support;
drivers/gpu/drm/amd/display/dc/hwss/dcn32/dcn32_hwseq.c
1827
struct dc_state *context, bool lock)
drivers/gpu/drm/amd/display/dc/hwss/dcn32/dcn32_hwseq.c
1834
pipe = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/hwss/dcn32/dcn32_hwseq.c
1839
dc_state_get_pipe_subvp_type(context, pipe) == SUBVP_PHANTOM)
drivers/gpu/drm/amd/display/dc/hwss/dcn32/dcn32_hwseq.c
1850
struct dc_state *context)
drivers/gpu/drm/amd/display/dc/hwss/dcn32/dcn32_hwseq.c
1856
hubbub->funcs->program_compbuf_size(hubbub, context->bw_ctx.bw.dcn.compbuf_size_kb, true);
drivers/gpu/drm/amd/display/dc/hwss/dcn32/dcn32_hwseq.c
352
void dcn32_commit_subvp_config(struct dc *dc, struct dc_state *context)
drivers/gpu/drm/amd/display/dc/hwss/dcn32/dcn32_hwseq.c
361
struct pipe_ctx *pipe_ctx = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/hwss/dcn32/dcn32_hwseq.c
363
if (pipe_ctx->stream && dc_state_get_pipe_subvp_type(context, pipe_ctx) == SUBVP_MAIN) {
drivers/gpu/drm/amd/display/dc/hwss/dcn32/dcn32_hwseq.c
369
dc_dmub_setup_subvp_dmub_command(dc, context, enable_subvp);
drivers/gpu/drm/amd/display/dc/hwss/dcn32/dcn32_hwseq.c
379
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/hwss/dcn32/dcn32_hwseq.c
392
pipe = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/hwss/dcn32/dcn32_hwseq.c
393
pipe_mall_type = dc_state_get_pipe_subvp_type(context, pipe);
drivers/gpu/drm/amd/display/dc/hwss/dcn32/dcn32_hwseq.c
402
if (dc_state_get_pipe_subvp_type(context, top_pipe_to_program) == SUBVP_MAIN &&
drivers/gpu/drm/amd/display/dc/hwss/dcn32/dcn32_hwseq.c
413
pipe = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/hwss/dcn32/dcn32_hwseq.c
607
void dcn32_update_force_pstate(struct dc *dc, struct dc_state *context)
drivers/gpu/drm/amd/display/dc/hwss/dcn32/dcn32_hwseq.c
616
struct pipe_ctx *pipe = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/hwss/dcn32/dcn32_hwseq.c
621
stream_status = dc_state_get_stream_status(context, pipe->stream);
drivers/gpu/drm/amd/display/dc/hwss/dcn32/dcn32_hwseq.c
623
if (!pipe->stream || !(dc_state_get_pipe_subvp_type(context, pipe) == SUBVP_MAIN ||
drivers/gpu/drm/amd/display/dc/hwss/dcn32/dcn32_hwseq.c
635
struct pipe_ctx *pipe = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/hwss/dcn32/dcn32_hwseq.c
654
stream_status = dc_state_get_stream_status(context, pipe->stream);
drivers/gpu/drm/amd/display/dc/hwss/dcn32/dcn32_hwseq.c
658
if (pipe->stream && (dc_state_get_pipe_subvp_type(context, pipe) == SUBVP_MAIN ||
drivers/gpu/drm/amd/display/dc/hwss/dcn32/dcn32_hwseq.c
674
void dcn32_update_mall_sel(struct dc *dc, struct dc_state *context)
drivers/gpu/drm/amd/display/dc/hwss/dcn32/dcn32_hwseq.c
677
unsigned int num_ways = dcn32_calculate_cab_allocation(dc, context);
drivers/gpu/drm/amd/display/dc/hwss/dcn32/dcn32_hwseq.c
681
struct pipe_ctx *pipe = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/hwss/dcn32/dcn32_hwseq.c
707
if (dc_state_get_pipe_subvp_type(context, pipe) == SUBVP_PHANTOM) {
drivers/gpu/drm/amd/display/dc/hwss/dcn32/dcn32_hwseq.c
728
void dcn32_program_mall_pipe_config(struct dc *dc, struct dc_state *context)
drivers/gpu/drm/amd/display/dc/hwss/dcn32/dcn32_hwseq.c
737
hws->funcs.update_mall_sel(dc, context);
drivers/gpu/drm/amd/display/dc/hwss/dcn32/dcn32_hwseq.c
741
struct pipe_ctx *pipe = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/hwss/dcn32/dcn32_hwseq.c
750
if (dc_state_get_pipe_subvp_type(context, pipe) == SUBVP_MAIN)
drivers/gpu/drm/amd/display/dc/hwss/dcn32/dcn32_hwseq.h
101
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/hwss/dcn32/dcn32_hwseq.h
111
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/hwss/dcn32/dcn32_hwseq.h
114
void dcn32_enable_phantom_streams(struct dc *dc, struct dc_state *context);
drivers/gpu/drm/amd/display/dc/hwss/dcn32/dcn32_hwseq.h
116
void dcn32_disable_phantom_streams(struct dc *dc, struct dc_state *context);
drivers/gpu/drm/amd/display/dc/hwss/dcn32/dcn32_hwseq.h
127
struct dc_state *context);
drivers/gpu/drm/amd/display/dc/hwss/dcn32/dcn32_hwseq.h
130
struct dc_state *context, bool lock);
drivers/gpu/drm/amd/display/dc/hwss/dcn32/dcn32_hwseq.h
133
struct dc_state *context);
drivers/gpu/drm/amd/display/dc/hwss/dcn32/dcn32_hwseq.h
48
void dcn32_commit_subvp_config(struct dc *dc, struct dc_state *context);
drivers/gpu/drm/amd/display/dc/hwss/dcn32/dcn32_hwseq.h
66
void dcn32_program_mall_pipe_config(struct dc *dc, struct dc_state *context);
drivers/gpu/drm/amd/display/dc/hwss/dcn32/dcn32_hwseq.h
68
void dcn32_update_mall_sel(struct dc *dc, struct dc_state *context);
drivers/gpu/drm/amd/display/dc/hwss/dcn32/dcn32_hwseq.h
70
void dcn32_update_force_pstate(struct dc *dc, struct dc_state *context);
drivers/gpu/drm/amd/display/dc/hwss/dcn32/dcn32_hwseq.h
72
void dcn32_update_odm(struct dc *dc, struct dc_state *context, struct pipe_ctx *pipe_ctx);
drivers/gpu/drm/amd/display/dc/hwss/dcn32/dcn32_hwseq.h
78
void dcn32_resync_fifo_dccg_dio(struct dce_hwseq *hws, struct dc *dc, struct dc_state *context, unsigned int current_pipe_idx);
drivers/gpu/drm/amd/display/dc/hwss/dcn32/dcn32_hwseq.h
81
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/hwss/dcn32/dcn32_hwseq.h
94
void dcn32_calculate_pix_rate_divider(struct dc *dc, struct dc_state *context, const struct dc_stream_state *stream);
drivers/gpu/drm/amd/display/dc/hwss/dcn35/dcn35_hwseq.c
1022
void dcn35_calc_blocks_to_ungate(struct dc *dc, struct dc_state *context,
drivers/gpu/drm/amd/display/dc/hwss/dcn35/dcn35_hwseq.c
1033
struct pipe_ctx *new_pipe = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/hwss/dcn35/dcn35_hwseq.c
1106
if (context->res_ctx.is_hpo_dp_stream_enc_acquired[i] &&
drivers/gpu/drm/amd/display/dc/hwss/dcn35/dcn35_hwseq.c
1121
struct pipe_ctx *new_pipe = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/hwss/dcn35/dcn35_hwseq.c
1356
struct dc_state *context)
drivers/gpu/drm/amd/display/dc/hwss/dcn35/dcn35_hwseq.c
1361
dc->hwss.calc_blocks_to_ungate(dc, context, &pg_update_state);
drivers/gpu/drm/amd/display/dc/hwss/dcn35/dcn35_hwseq.c
1370
dcn20_prepare_bandwidth(dc, context);
drivers/gpu/drm/amd/display/dc/hwss/dcn35/dcn35_hwseq.c
1377
struct dc_state *context)
drivers/gpu/drm/amd/display/dc/hwss/dcn35/dcn35_hwseq.c
1383
dcn20_optimize_bandwidth(dc, context);
drivers/gpu/drm/amd/display/dc/hwss/dcn35/dcn35_hwseq.c
1386
dc->hwss.calc_blocks_to_gate(dc, context, &pg_update_state);
drivers/gpu/drm/amd/display/dc/hwss/dcn35/dcn35_hwseq.c
1720
void dcn35_notify_cursor_offload_drr_update(struct dc *dc, struct dc_state *context,
drivers/gpu/drm/amd/display/dc/hwss/dcn35/dcn35_hwseq.c
1723
dc_dmub_srv_control_cursor_offload(dc, context, stream, true);
drivers/gpu/drm/amd/display/dc/hwss/dcn35/dcn35_hwseq.c
429
void dcn35_update_odm(struct dc *dc, struct dc_state *context, struct pipe_ctx *pipe_ctx)
drivers/gpu/drm/amd/display/dc/hwss/dcn35/dcn35_hwseq.c
621
void dcn35_init_pipes(struct dc *dc, struct dc_state *context)
drivers/gpu/drm/amd/display/dc/hwss/dcn35/dcn35_hwseq.c
630
for (i = 0; i < context->stream_count; i++) {
drivers/gpu/drm/amd/display/dc/hwss/dcn35/dcn35_hwseq.c
631
if (context->streams[i]->apply_seamless_boot_optimization) {
drivers/gpu/drm/amd/display/dc/hwss/dcn35/dcn35_hwseq.c
639
struct pipe_ctx *pipe_ctx = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/hwss/dcn35/dcn35_hwseq.c
665
struct pipe_ctx *pipe_ctx = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/hwss/dcn35/dcn35_hwseq.c
682
struct pipe_ctx *pipe_ctx = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/hwss/dcn35/dcn35_hwseq.c
696
struct pipe_ctx *pipe_ctx = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/hwss/dcn35/dcn35_hwseq.c
737
hws->funcs.plane_atomic_disconnect(dc, context, pipe_ctx);
drivers/gpu/drm/amd/display/dc/hwss/dcn35/dcn35_hwseq.c
742
dc->hwss.disable_plane(dc, context, pipe_ctx);
drivers/gpu/drm/amd/display/dc/hwss/dcn35/dcn35_hwseq.c
817
struct dc_state *context)
drivers/gpu/drm/amd/display/dc/hwss/dcn35/dcn35_hwseq.c
918
void dcn35_calc_blocks_to_gate(struct dc *dc, struct dc_state *context,
drivers/gpu/drm/amd/display/dc/hwss/dcn35/dcn35_hwseq.c
930
if (context->res_ctx.is_hpo_dp_stream_enc_acquired[i] &&
drivers/gpu/drm/amd/display/dc/hwss/dcn35/dcn35_hwseq.c
943
struct pipe_ctx *pipe_ctx = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/hwss/dcn35/dcn35_hwseq.h
108
void dcn35_notify_cursor_offload_drr_update(struct dc *dc, struct dc_state *context,
drivers/gpu/drm/amd/display/dc/hwss/dcn35/dcn35_hwseq.h
34
void dcn35_update_odm(struct dc *dc, struct dc_state *context, struct pipe_ctx *pipe_ctx);
drivers/gpu/drm/amd/display/dc/hwss/dcn35/dcn35_hwseq.h
60
void dcn35_init_pipes(struct dc *dc, struct dc_state *context);
drivers/gpu/drm/amd/display/dc/hwss/dcn35/dcn35_hwseq.h
63
struct dc_state *context);
drivers/gpu/drm/amd/display/dc/hwss/dcn35/dcn35_hwseq.h
66
void dcn35_calc_blocks_to_gate(struct dc *dc, struct dc_state *context,
drivers/gpu/drm/amd/display/dc/hwss/dcn35/dcn35_hwseq.h
68
void dcn35_calc_blocks_to_ungate(struct dc *dc, struct dc_state *context,
drivers/gpu/drm/amd/display/dc/hwss/dcn35/dcn35_hwseq.h
79
struct dc_state *context);
drivers/gpu/drm/amd/display/dc/hwss/dcn35/dcn35_hwseq.h
83
struct dc_state *context);
drivers/gpu/drm/amd/display/dc/hwss/dcn351/dcn351_hwseq.c
38
void dcn351_calc_blocks_to_gate(struct dc *dc, struct dc_state *context,
drivers/gpu/drm/amd/display/dc/hwss/dcn351/dcn351_hwseq.c
43
dcn35_calc_blocks_to_gate(dc, context, update_state);
drivers/gpu/drm/amd/display/dc/hwss/dcn351/dcn351_hwseq.c
58
void dcn351_calc_blocks_to_ungate(struct dc *dc, struct dc_state *context,
drivers/gpu/drm/amd/display/dc/hwss/dcn351/dcn351_hwseq.c
63
dcn35_calc_blocks_to_ungate(dc, context, update_state);
drivers/gpu/drm/amd/display/dc/hwss/dcn351/dcn351_hwseq.h
32
void dcn351_calc_blocks_to_gate(struct dc *dc, struct dc_state *context,
drivers/gpu/drm/amd/display/dc/hwss/dcn351/dcn351_hwseq.h
34
void dcn351_calc_blocks_to_ungate(struct dc *dc, struct dc_state *context,
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.c
1382
struct dc_state *context)
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.c
1385
bool p_state_change_support = context->bw_ctx.bw.dcn.clk.p_state_change_support;
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.c
1391
context->bw_ctx.bw.dcn.clk.p_state_change_support = false;
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.c
1396
context->bw_ctx.bw.dcn.clk.dramclk_khz > dc->clk_mgr->bw_params->dc_mode_softmax_memclk * 1000)
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.c
1402
context,
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.c
1410
&context->bw_ctx.bw.dcn.watermarks,
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.c
1415
dc->optimized_required |= hubbub->funcs->program_arbiter(hubbub, &context->bw_ctx.bw.dcn.arb_regs, false);
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.c
1420
compbuf_size = context->bw_ctx.bw.dcn.arb_regs.compbuf_size;
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.c
1427
dcn401_dmub_hw_control_lock(dc, context, true);
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.c
1428
dcn401_fams2_update_config(dc, context, false);
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.c
1429
dcn401_dmub_hw_control_lock(dc, context, false);
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.c
1432
if (p_state_change_support != context->bw_ctx.bw.dcn.clk.p_state_change_support) {
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.c
1435
context->bw_ctx.bw.dcn.clk.p_state_change_support = p_state_change_support;
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.c
1441
struct dc_state *context)
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.c
1448
dcn401_dmub_hw_control_lock(dc, context, true);
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.c
1449
dcn401_fams2_update_config(dc, context, true);
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.c
1450
dcn401_dmub_hw_control_lock(dc, context, false);
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.c
1455
&context->bw_ctx.bw.dcn.watermarks,
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.c
1460
hubbub->funcs->program_arbiter(hubbub, &context->bw_ctx.bw.dcn.arb_regs, true);
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.c
1465
context->bw_ctx.bw.dcn.clk.dramclk_khz <= dc->clk_mgr->bw_params->dc_mode_softmax_memclk * 1000)
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.c
1470
hubbub->funcs->program_compbuf_segments(hubbub, context->bw_ctx.bw.dcn.arb_regs.compbuf_size, true);
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.c
1474
context,
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.c
1476
if (context->bw_ctx.bw.dcn.clk.zstate_support == DCN_ZSTATE_SUPPORT_ALLOW) {
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.c
1478
struct pipe_ctx *pipe_ctx = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.c
1490
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.c
1525
void dcn401_fams2_update_config(struct dc *dc, struct dc_state *context, bool enable)
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.c
1532
fams2_info_required = context->bw_ctx.bw.dcn.fams2_global_config.features.bits.enable;
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.c
1533
fams2_info_required |= context->bw_ctx.bw.dcn.fams2_global_config.features.bits.legacy_method_no_fams2;
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.c
1535
dc_dmub_srv_fams2_update_config(dc, context, enable && fams2_info_required);
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.c
1538
static void update_dsc_for_odm_change(struct dc *dc, struct dc_state *context,
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.c
1569
new_pipe = &context->res_ctx.pipe_ctx[old_pipe->pipe_idx];
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.c
1577
void dcn401_update_odm(struct dc *dc, struct dc_state *context,
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.c
1588
otg_master, &context->res_ctx, opp_heads);
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.c
1612
update_dsc_for_odm_change(dc, context, otg_master);
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.c
1622
static void dcn401_add_dsc_sequence_for_odm_change(struct dc *dc, struct dc_state *context,
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.c
1703
new_pipe = &context->res_ctx.pipe_ctx[old_pipe->pipe_idx];
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.c
1714
void dcn401_update_odm_sequence(struct dc *dc, struct dc_state *context,
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.c
1725
otg_master, &context->res_ctx, opp_heads);
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.c
1749
dcn401_add_dsc_sequence_for_odm_change(dc, context, otg_master, seq_state);
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.c
1816
void dcn401_wait_for_det_buffer_update_under_otg_master(struct dc *dc, struct dc_state *context, struct pipe_ctx *otg_master)
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.c
1827
&context->res_ctx, opp_heads);
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.c
1833
&context->res_ctx,
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.c
1850
struct dc_state *context, bool lock)
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.c
1858
pipe = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.c
1863
dc_state_get_pipe_subvp_type(context, pipe) == SUBVP_PHANTOM)
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.c
1870
pipe = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.c
1875
dc_state_get_pipe_subvp_type(context, pipe) == SUBVP_PHANTOM) {
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.c
1892
pipe = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.c
1896
dc_state_get_pipe_subvp_type(context, pipe) == SUBVP_PHANTOM) {
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.c
1954
struct dc_state *context)
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.c
1960
hubbub->funcs->program_compbuf_segments(hubbub, context->bw_ctx.bw.dcn.arb_regs.compbuf_size, true);
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.c
1966
struct dc_state *context)
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.c
2053
struct dc_state *context)
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.c
2062
struct pipe_ctx *pipe_ctx = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.c
2113
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.c
2124
if (dc_state_get_pipe_subvp_type(context, pipe_ctx) != SUBVP_PHANTOM)
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.c
2137
struct dc_state *context)
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.c
2154
dcn401_program_tg(dc, pipe_ctx, context, hws);
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.c
2157
hws->funcs.update_odm(dc, context, pipe_ctx);
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.c
2161
hws->funcs.enable_plane(dc, pipe_ctx, context);
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.c
2163
dc->hwss.enable_plane(dc, pipe_ctx, context);
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.c
2181
dc->hwss.update_dchubp_dpp(dc, pipe_ctx, context);
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.c
2262
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.c
2292
if (dc_state_get_pipe_subvp_type(context, pipe_ctx) != SUBVP_PHANTOM)
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.c
2305
hws->funcs.update_odm_sequence(dc, context, pipe_ctx, seq_state);
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.c
2310
dc->hwss.enable_plane_sequence(dc, pipe_ctx, context, seq_state);
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.c
2330
dc->hwss.update_dchubp_dpp_sequence(dc, pipe_ctx, context, seq_state);
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.c
2403
struct dc_state *context)
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.c
2413
if (resource_is_pipe_topology_changed(dc->current_state, context))
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.c
2414
resource_log_pipe_topology_update(dc, context);
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.c
2418
pipe = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.c
2434
if (context->res_ctx.pipe_ctx[i].plane_state)
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.c
2447
dc->hwss.detect_pipe_changes(dc->current_state, context, &dc->current_state->res_ctx.pipe_ctx[i],
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.c
2448
&context->res_ctx.pipe_ctx[i]);
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.c
2458
if (context->res_ctx.pipe_ctx[i].update_flags.bits.disable && stream &&
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.c
2472
if (context->res_ctx.pipe_ctx[i].update_flags.bits.disable
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.c
2473
&& !context->res_ctx.pipe_ctx[i].top_pipe
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.c
2474
&& !context->res_ctx.pipe_ctx[i].prev_odm_pipe
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.c
2475
&& context->res_ctx.pipe_ctx[i].stream)
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.c
2476
hws->funcs.blank_pixel_data(dc, &context->res_ctx.pipe_ctx[i], true);
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.c
2480
if (context->res_ctx.pipe_ctx[i].update_flags.bits.disable
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.c
2481
|| context->res_ctx.pipe_ctx[i].update_flags.bits.opp_changed) {
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.c
2490
if ((context->res_ctx.pipe_ctx[i].update_flags.bits.disable ||
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.c
2491
(context->res_ctx.pipe_ctx[i].plane_state &&
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.c
2492
dc_state_get_pipe_subvp_type(context, &context->res_ctx.pipe_ctx[i]) ==
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.c
2508
pipe = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.c
2513
hws->funcs.update_odm(dc, context, pipe);
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.c
2521
pipe = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.c
2526
hws->funcs.program_pipe(dc, pipe, context);
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.c
2537
dc_state_get_pipe_subvp_type(context, pipe) != SUBVP_PHANTOM)
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.c
2538
dcn401_program_pipe(dc, pipe, context);
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.c
2546
pipe = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.c
2552
hws->funcs.program_all_writeback_pipes_in_tree(dc, pipe->stream, context);
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.c
2560
context->stream_status[0].plane_count > 1) {
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.c
2568
struct dc_state *context)
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.c
2580
!resource_is_pipe_type(&context->res_ctx.pipe_ctx[i], OPP_HEAD))
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.c
2585
if (context->res_ctx.pipe_ctx[i].update_flags.bits.disable)
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.c
2595
struct pipe_ctx *pipe = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.c
2598
dc_state_get_pipe_subvp_type(context, pipe) != SUBVP_PHANTOM) {
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.c
2609
struct pipe_ctx *pipe = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.c
2618
dc_state_get_pipe_subvp_type(context, pipe) != SUBVP_PHANTOM) {
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.c
2636
struct pipe_ctx *pipe = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.c
2645
if (pipe->stream && dc_state_get_pipe_subvp_type(context, pipe) == SUBVP_PHANTOM) {
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.c
2652
dc->hwss.update_phantom_vp_position(dc, context, pipe);
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.c
2653
dcn401_program_pipe(dc, pipe, context);
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.c
2671
dc->hwseq->funcs.update_force_pstate(dc, context);
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.c
2676
hwseq->funcs.program_mall_pipe_config(dc, context);
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.c
2687
context->stream_status[0].plane_count > 1) {
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.c
2702
struct dc_state *context)
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.c
2708
if (dc->res_pool->funcs->validate_bandwidth(dc, context, DC_VALIDATE_MODE_AND_PROGRAMMING) != DC_OK)
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.c
2712
dc->hwss.prepare_bandwidth(dc, context);
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.c
2716
struct pipe_ctx *pipe_ctx = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.c
3205
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.c
3223
struct pipe_ctx *pipe_ctx = &context->res_ctx.pipe_ctx[i_pipe];
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.c
3247
dcn401_update_writeback_sequence(dc, wb_info, context, seq_state);
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.c
3250
dcn401_enable_writeback_sequence(dc, wb_info, context, mpcc_inst, seq_state);
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.c
3262
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.c
3282
hwss_add_mcif_wb_config_arb(seq_state, mcif_wb, &context->bw_ctx.bw.dcn.bw_writeback.mcif_wb_arb[wb_info->dwb_pipe_inst]);
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.c
3321
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.c
3526
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.c
3587
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.c
3596
enum mall_stream_type pipe_mall_type = dc_state_get_pipe_subvp_type(context, pipe_ctx);
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.c
3679
(context == dc->current_state && plane_state->update_flags.bits.position_change) ||
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.c
3680
(context == dc->current_state && plane_state->update_flags.bits.scaling_change) ||
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.c
3681
(context == dc->current_state && pipe_ctx->stream->update_flags.bits.scaling)) {
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.c
3929
void dcn401_program_mall_pipe_config_sequence(struct dc *dc, struct dc_state *context,
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.c
3933
unsigned int num_ways = dcn401_calculate_cab_allocation(dc, context);
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.c
3940
struct pipe_ctx *pipe = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.c
3966
if (dc_state_get_pipe_subvp_type(context, pipe) == SUBVP_PHANTOM) {
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.c
3981
struct pipe_ctx *pipe = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.c
3985
if (dc_state_get_pipe_subvp_type(context, pipe) == SUBVP_MAIN)
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.c
727
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.c
743
*opp_cnt = resource_get_opp_heads_for_otg_master(pipe_ctx, &context->res_ctx, opp_heads);
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.c
767
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.c
788
enable_stream_timing_calc(pipe_ctx, context, dc, &tmds_div, opp_inst,
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.c
830
dc->hwseq->funcs.PLAT_58856_wa(context, pipe_ctx);
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.c
891
if (dc_state_get_pipe_subvp_type(context, pipe_ctx) == SUBVP_PHANTOM) {
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.h
102
struct dc_state *context);
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.h
106
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.h
109
void dcn401_program_front_end_for_ctx(struct dc *dc, struct dc_state *context);
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.h
110
void dcn401_post_unlock_program_front_end(struct dc *dc, struct dc_state *context);
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.h
111
bool dcn401_update_bandwidth(struct dc *dc, struct dc_state *context);
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.h
139
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.h
145
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.h
157
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.h
180
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.h
185
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.h
204
void dcn401_program_mall_pipe_config_sequence(struct dc *dc, struct dc_state *context,
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.h
50
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.h
71
struct dc_state *context);
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.h
75
struct dc_state *context);
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.h
78
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.h
80
void dcn401_fams2_update_config(struct dc *dc, struct dc_state *context, bool enable);
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.h
84
void dcn401_update_odm(struct dc *dc, struct dc_state *context,
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.h
86
void dcn401_update_odm_sequence(struct dc *dc, struct dc_state *context,
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.h
89
void dcn401_wait_for_det_buffer_update_under_otg_master(struct dc *dc, struct dc_state *context, struct pipe_ctx *otg_master);
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.h
90
void dcn401_interdependent_update_lock(struct dc *dc, struct dc_state *context, bool lock);
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.h
91
void dcn401_program_outstanding_updates(struct dc *dc, struct dc_state *context);
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.h
95
struct dc_state *context);
drivers/gpu/drm/amd/display/dc/hwss/dcn401/dcn401_hwseq.h
98
struct dc_state *context);
drivers/gpu/drm/amd/display/dc/hwss/hw_sequencer.h
1004
struct dc_state *context);
drivers/gpu/drm/amd/display/dc/hwss/hw_sequencer.h
1006
struct dc_state *context);
drivers/gpu/drm/amd/display/dc/hwss/hw_sequencer.h
1013
int num_planes, struct dc_state *context);
drivers/gpu/drm/amd/display/dc/hwss/hw_sequencer.h
1015
struct dc_state *context);
drivers/gpu/drm/amd/display/dc/hwss/hw_sequencer.h
1017
struct dc_state *context);
drivers/gpu/drm/amd/display/dc/hwss/hw_sequencer.h
1019
struct dc_state *context);
drivers/gpu/drm/amd/display/dc/hwss/hw_sequencer.h
1037
void (*update_dsc_pg)(struct dc *dc, struct dc_state *context, bool safe_to_disable);
drivers/gpu/drm/amd/display/dc/hwss/hw_sequencer.h
1044
struct dc_state *context, bool lock);
drivers/gpu/drm/amd/display/dc/hwss/hw_sequencer.h
1083
void (*prepare_bandwidth)(struct dc *dc, struct dc_state *context);
drivers/gpu/drm/amd/display/dc/hwss/hw_sequencer.h
1084
bool (*update_bandwidth)(struct dc *dc, struct dc_state *context);
drivers/gpu/drm/amd/display/dc/hwss/hw_sequencer.h
1085
void (*optimize_bandwidth)(struct dc *dc, struct dc_state *context);
drivers/gpu/drm/amd/display/dc/hwss/hw_sequencer.h
1106
void (*notify_cursor_offload_drr_update)(struct dc *dc, struct dc_state *context,
drivers/gpu/drm/amd/display/dc/hwss/hw_sequencer.h
1129
struct dc_state *context);
drivers/gpu/drm/amd/display/dc/hwss/hw_sequencer.h
1132
struct dc_state *context);
drivers/gpu/drm/amd/display/dc/hwss/hw_sequencer.h
1143
struct dc_state *context);
drivers/gpu/drm/amd/display/dc/hwss/hw_sequencer.h
1145
struct dc_state *context);
drivers/gpu/drm/amd/display/dc/hwss/hw_sequencer.h
1147
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/hwss/hw_sequencer.h
1203
void (*commit_subvp_config)(struct dc *dc, struct dc_state *context);
drivers/gpu/drm/amd/display/dc/hwss/hw_sequencer.h
1204
void (*enable_phantom_streams)(struct dc *dc, struct dc_state *context);
drivers/gpu/drm/amd/display/dc/hwss/hw_sequencer.h
1205
void (*disable_phantom_streams)(struct dc *dc, struct dc_state *context);
drivers/gpu/drm/amd/display/dc/hwss/hw_sequencer.h
1207
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/hwss/hw_sequencer.h
1217
struct dc_state *context, struct dc_stream_state *stream);
drivers/gpu/drm/amd/display/dc/hwss/hw_sequencer.h
1234
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/hwss/hw_sequencer.h
1238
void (*calc_blocks_to_gate)(struct dc *dc, struct dc_state *context,
drivers/gpu/drm/amd/display/dc/hwss/hw_sequencer.h
1240
void (*calc_blocks_to_ungate)(struct dc *dc, struct dc_state *context,
drivers/gpu/drm/amd/display/dc/hwss/hw_sequencer.h
1254
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/hwss/hw_sequencer.h
1257
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/hwss/hw_sequencer.h
1262
struct dc_state *context);
drivers/gpu/drm/amd/display/dc/hwss/hw_sequencer.h
1271
struct dc_state *context);
drivers/gpu/drm/amd/display/dc/hwss/hw_sequencer.h
1274
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/hwss/hw_sequencer.h
1278
struct dc_state *context);
drivers/gpu/drm/amd/display/dc/hwss/hw_sequencer.h
1281
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/hwss/hw_sequencer.h
1341
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/hwss/hw_sequencer.h
1360
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/hwss/hw_sequencer.h
1379
struct dc_state *context);
drivers/gpu/drm/amd/display/dc/hwss/hw_sequencer.h
1382
struct dc_state *context);
drivers/gpu/drm/amd/display/dc/hwss/hw_sequencer.h
1385
struct dc_state *context);
drivers/gpu/drm/amd/display/dc/hwss/hw_sequencer.h
1388
struct dc_state *context);
drivers/gpu/drm/amd/display/dc/hwss/hw_sequencer.h
1659
struct dc *dc, struct dc_state *context, struct pipe_ctx *pipe_ctx);
drivers/gpu/drm/amd/display/dc/hwss/hw_sequencer.h
193
struct dc_state *context;
drivers/gpu/drm/amd/display/dc/hwss/hw_sequencer.h
2014
struct dc_state *context);
drivers/gpu/drm/amd/display/dc/hwss/hw_sequencer.h
266
struct dc_state *context;
drivers/gpu/drm/amd/display/dc/hwss/hw_sequencer.h
330
struct dc_state *context;
drivers/gpu/drm/amd/display/dc/hwss/hw_sequencer.h
457
struct dc_state *context;
drivers/gpu/drm/amd/display/dc/hwss/hw_sequencer_private.h
113
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/hwss/hw_sequencer_private.h
158
void (*update_odm)(struct dc *dc, struct dc_state *context,
drivers/gpu/drm/amd/display/dc/hwss/hw_sequencer_private.h
160
void (*update_odm_sequence)(struct dc *dc, struct dc_state *context,
drivers/gpu/drm/amd/display/dc/hwss/hw_sequencer_private.h
164
struct dc_state *context);
drivers/gpu/drm/amd/display/dc/hwss/hw_sequencer_private.h
168
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/hwss/hw_sequencer_private.h
179
struct dc_state *context);
drivers/gpu/drm/amd/display/dc/hwss/hw_sequencer_private.h
183
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/hwss/hw_sequencer_private.h
193
void (*PLAT_58856_wa)(struct dc_state *context,
drivers/gpu/drm/amd/display/dc/hwss/hw_sequencer_private.h
197
struct dc_state *context);
drivers/gpu/drm/amd/display/dc/hwss/hw_sequencer_private.h
198
void (*program_mall_pipe_config)(struct dc *dc, struct dc_state *context);
drivers/gpu/drm/amd/display/dc/hwss/hw_sequencer_private.h
199
void (*program_mall_pipe_config_sequence)(struct dc *dc, struct dc_state *context,
drivers/gpu/drm/amd/display/dc/hwss/hw_sequencer_private.h
201
void (*update_force_pstate)(struct dc *dc, struct dc_state *context);
drivers/gpu/drm/amd/display/dc/hwss/hw_sequencer_private.h
202
void (*update_mall_sel)(struct dc *dc, struct dc_state *context);
drivers/gpu/drm/amd/display/dc/hwss/hw_sequencer_private.h
207
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/hwss/hw_sequencer_private.h
211
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/hwss/hw_sequencer_private.h
216
struct dc_state *context);
drivers/gpu/drm/amd/display/dc/hwss/hw_sequencer_private.h
79
void (*init_pipes)(struct dc *dc, struct dc_state *context);
drivers/gpu/drm/amd/display/dc/hwss/hw_sequencer_private.h
80
void (*reset_hw_ctx_wrap)(struct dc *dc, struct dc_state *context);
drivers/gpu/drm/amd/display/dc/inc/core_types.h
102
struct dc_state *context);
drivers/gpu/drm/amd/display/dc/inc/core_types.h
111
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/inc/core_types.h
138
struct dc_state *context);
drivers/gpu/drm/amd/display/dc/inc/core_types.h
152
void (*release_pipe)(struct dc_state *context,
drivers/gpu/drm/amd/display/dc/inc/core_types.h
185
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/inc/core_types.h
211
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/inc/core_types.h
221
int (*get_power_profile)(const struct dc_state *context);
drivers/gpu/drm/amd/display/dc/inc/core_types.h
222
unsigned int (*get_det_buffer_size)(const struct dc_state *context);
drivers/gpu/drm/amd/display/dc/inc/core_types.h
227
bool (*program_mcache_pipe_config)(struct dc_state *context,
drivers/gpu/drm/amd/display/dc/inc/core_types.h
87
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/inc/core_types.h
90
struct dc *dc, struct dc_state *context,
drivers/gpu/drm/amd/display/dc/inc/core_types.h
95
struct dc *dc, struct dc_state *context);
drivers/gpu/drm/amd/display/dc/inc/dcn_calcs.h
624
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/inc/hw/clk_mgr.h
286
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/inc/hw/clk_mgr.h
303
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/inc/hw/clk_mgr_internal.h
466
struct dc_state *context);
drivers/gpu/drm/amd/display/dc/inc/hw/clk_mgr_internal.h
470
struct dc_state *context);
drivers/gpu/drm/amd/display/dc/inc/resource.h
111
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/inc/resource.h
122
struct dc_state *context);
drivers/gpu/drm/amd/display/dc/inc/resource.h
161
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/inc/resource.h
324
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/inc/resource.h
575
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/inc/resource.h
580
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/inc/resource.h
585
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/inc/resource.h
608
struct dc_state *context);
drivers/gpu/drm/amd/display/dc/inc/resource.h
611
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/inc/resource.h
615
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/inc/resource.h
638
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/inc/resource.h
657
bool resource_is_hpo_acquired(struct dc_state *context);
drivers/gpu/drm/amd/display/dc/link/link_validation.c
370
static const struct dc_tunnel_settings *get_dp_tunnel_settings(const struct dc_state *context,
drivers/gpu/drm/amd/display/dc/link/link_validation.c
377
if (context->res_ctx.pipe_ctx[i].stream && (context->res_ctx.pipe_ctx[i].stream == stream)) {
drivers/gpu/drm/amd/display/dc/link/link_validation.c
378
dp_tunnel_settings = &context->res_ctx.pipe_ctx[i].link_config.dp_tunnel_settings;
drivers/gpu/drm/amd/display/dc/resource/dce100/dce100_resource.c
849
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/resource/dce100/dce100_resource.c
852
struct pipe_ctx *pipe_ctx = resource_get_otg_master_for_stream(&context->res_ctx, stream);
drivers/gpu/drm/amd/display/dc/resource/dce100/dce100_resource.c
866
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/resource/dce100/dce100_resource.c
875
stream = context->res_ctx.pipe_ctx[i].stream;
drivers/gpu/drm/amd/display/dc/resource/dce100/dce100_resource.c
886
context->bw_ctx.bw.dce.dispclk_khz = 681000;
drivers/gpu/drm/amd/display/dc/resource/dce100/dce100_resource.c
887
context->bw_ctx.bw.dce.yclk_khz = 250000 * MEMORY_TYPE_MULTIPLIER_CZ;
drivers/gpu/drm/amd/display/dc/resource/dce100/dce100_resource.c
895
context->bw_ctx.bw.dce.dispclk_khz = 352000;
drivers/gpu/drm/amd/display/dc/resource/dce100/dce100_resource.c
897
context->bw_ctx.bw.dce.dispclk_khz = 0;
drivers/gpu/drm/amd/display/dc/resource/dce100/dce100_resource.c
899
context->bw_ctx.bw.dce.yclk_khz = 0;
drivers/gpu/drm/amd/display/dc/resource/dce100/dce100_resource.c
906
struct dc_state *context)
drivers/gpu/drm/amd/display/dc/resource/dce100/dce100_resource.c
910
for (i = 0; i < context->stream_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dce100/dce100_resource.c
911
if (context->stream_status[i].plane_count == 0)
drivers/gpu/drm/amd/display/dc/resource/dce100/dce100_resource.c
914
if (context->stream_status[i].plane_count > 1)
drivers/gpu/drm/amd/display/dc/resource/dce100/dce100_resource.c
917
if (context->stream_status[i].plane_states[0]->format
drivers/gpu/drm/amd/display/dc/resource/dce100/dce100_resource.c
927
struct dc_state *context)
drivers/gpu/drm/amd/display/dc/resource/dce100/dce100_resource.c
929
if (!dce100_validate_surface_sets(context))
drivers/gpu/drm/amd/display/dc/resource/dce100/dce100_resource.h
46
struct dc_state *context);
drivers/gpu/drm/amd/display/dc/resource/dce100/dce100_resource.h
50
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
1011
context->bw_ctx.bw.dce.nbp_state_change_wm_ns[0].b_mark,
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
1012
context->bw_ctx.bw.dce.nbp_state_change_wm_ns[0].a_mark,
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
1013
context->bw_ctx.bw.dce.urgent_wm_ns[0].b_mark,
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
1014
context->bw_ctx.bw.dce.urgent_wm_ns[0].a_mark,
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
1015
context->bw_ctx.bw.dce.stutter_exit_wm_ns[0].b_mark,
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
1016
context->bw_ctx.bw.dce.stutter_exit_wm_ns[0].a_mark,
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
1017
context->bw_ctx.bw.dce.nbp_state_change_wm_ns[1].b_mark,
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
1018
context->bw_ctx.bw.dce.nbp_state_change_wm_ns[1].a_mark,
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
1019
context->bw_ctx.bw.dce.urgent_wm_ns[1].b_mark,
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
1020
context->bw_ctx.bw.dce.urgent_wm_ns[1].a_mark,
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
1021
context->bw_ctx.bw.dce.stutter_exit_wm_ns[1].b_mark,
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
1022
context->bw_ctx.bw.dce.stutter_exit_wm_ns[1].a_mark,
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
1023
context->bw_ctx.bw.dce.nbp_state_change_wm_ns[2].b_mark,
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
1024
context->bw_ctx.bw.dce.nbp_state_change_wm_ns[2].a_mark,
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
1025
context->bw_ctx.bw.dce.urgent_wm_ns[2].b_mark,
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
1026
context->bw_ctx.bw.dce.urgent_wm_ns[2].a_mark,
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
1027
context->bw_ctx.bw.dce.stutter_exit_wm_ns[2].b_mark,
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
1028
context->bw_ctx.bw.dce.stutter_exit_wm_ns[2].a_mark,
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
1029
context->bw_ctx.bw.dce.stutter_mode_enable,
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
1030
context->bw_ctx.bw.dce.cpuc_state_change_enable,
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
1031
context->bw_ctx.bw.dce.cpup_state_change_enable,
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
1032
context->bw_ctx.bw.dce.nbp_state_change_enable,
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
1033
context->bw_ctx.bw.dce.all_displays_in_sync,
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
1034
context->bw_ctx.bw.dce.dispclk_khz,
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
1035
context->bw_ctx.bw.dce.sclk_khz,
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
1036
context->bw_ctx.bw.dce.sclk_deep_sleep_khz,
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
1037
context->bw_ctx.bw.dce.yclk_khz,
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
1038
context->bw_ctx.bw.dce.blackout_recovery_time_us);
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
1054
struct dc_state *context)
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
1058
for (i = 0; i < context->stream_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
1059
if (context->stream_status[i].plane_count == 0)
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
1062
if (context->stream_status[i].plane_count > 2)
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
1065
for (j = 0; j < context->stream_status[i].plane_count; j++) {
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
1067
context->stream_status[i].plane_states[j];
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
1086
if (context->streams[i]->timing.pixel_encoding
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
1100
struct dc_state *context)
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
1102
if (!dce110_validate_surface_sets(context))
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
948
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
951
struct pipe_ctx *pipe_ctx = resource_get_otg_master_for_stream(&context->res_ctx, stream);
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
971
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
984
context->res_ctx.pipe_ctx,
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
986
&context->bw_ctx.bw.dce))
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
992
context->streams[0]->timing.h_addressable,
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
993
context->streams[0]->timing.v_addressable,
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
994
context->streams[0]->timing.pix_clk_100hz / 10);
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
997
&context->bw_ctx.bw.dce, sizeof(context->bw_ctx.bw.dce))) {
drivers/gpu/drm/amd/display/dc/resource/dce112/dce112_resource.c
1002
for (i = 0; i < context->stream_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dce112/dce112_resource.c
1003
if (context->stream_status[i].plane_count == 0)
drivers/gpu/drm/amd/display/dc/resource/dce112/dce112_resource.c
1006
if (context->stream_status[i].plane_count > 1)
drivers/gpu/drm/amd/display/dc/resource/dce112/dce112_resource.c
1009
if (context->stream_status[i].plane_states[0]->format
drivers/gpu/drm/amd/display/dc/resource/dce112/dce112_resource.c
1038
struct dc_state *context)
drivers/gpu/drm/amd/display/dc/resource/dce112/dce112_resource.c
1040
if (!dce112_validate_surface_sets(context))
drivers/gpu/drm/amd/display/dc/resource/dce112/dce112_resource.c
875
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/resource/dce112/dce112_resource.c
878
struct pipe_ctx *pipe_ctx = resource_get_otg_master_for_stream(&context->res_ctx, stream);
drivers/gpu/drm/amd/display/dc/resource/dce112/dce112_resource.c
892
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/resource/dce112/dce112_resource.c
905
context->res_ctx.pipe_ctx,
drivers/gpu/drm/amd/display/dc/resource/dce112/dce112_resource.c
907
&context->bw_ctx.bw.dce))
drivers/gpu/drm/amd/display/dc/resource/dce112/dce112_resource.c
916
&context->bw_ctx.bw.dce, sizeof(context->bw_ctx.bw.dce))) {
drivers/gpu/drm/amd/display/dc/resource/dce112/dce112_resource.c
930
context->bw_ctx.bw.dce.nbp_state_change_wm_ns[0].b_mark,
drivers/gpu/drm/amd/display/dc/resource/dce112/dce112_resource.c
931
context->bw_ctx.bw.dce.nbp_state_change_wm_ns[0].a_mark,
drivers/gpu/drm/amd/display/dc/resource/dce112/dce112_resource.c
932
context->bw_ctx.bw.dce.urgent_wm_ns[0].b_mark,
drivers/gpu/drm/amd/display/dc/resource/dce112/dce112_resource.c
933
context->bw_ctx.bw.dce.urgent_wm_ns[0].a_mark,
drivers/gpu/drm/amd/display/dc/resource/dce112/dce112_resource.c
934
context->bw_ctx.bw.dce.stutter_exit_wm_ns[0].b_mark,
drivers/gpu/drm/amd/display/dc/resource/dce112/dce112_resource.c
935
context->bw_ctx.bw.dce.stutter_exit_wm_ns[0].a_mark,
drivers/gpu/drm/amd/display/dc/resource/dce112/dce112_resource.c
936
context->bw_ctx.bw.dce.nbp_state_change_wm_ns[1].b_mark,
drivers/gpu/drm/amd/display/dc/resource/dce112/dce112_resource.c
937
context->bw_ctx.bw.dce.nbp_state_change_wm_ns[1].a_mark,
drivers/gpu/drm/amd/display/dc/resource/dce112/dce112_resource.c
938
context->bw_ctx.bw.dce.urgent_wm_ns[1].b_mark,
drivers/gpu/drm/amd/display/dc/resource/dce112/dce112_resource.c
939
context->bw_ctx.bw.dce.urgent_wm_ns[1].a_mark,
drivers/gpu/drm/amd/display/dc/resource/dce112/dce112_resource.c
940
context->bw_ctx.bw.dce.stutter_exit_wm_ns[1].b_mark,
drivers/gpu/drm/amd/display/dc/resource/dce112/dce112_resource.c
941
context->bw_ctx.bw.dce.stutter_exit_wm_ns[1].a_mark,
drivers/gpu/drm/amd/display/dc/resource/dce112/dce112_resource.c
942
context->bw_ctx.bw.dce.nbp_state_change_wm_ns[2].b_mark,
drivers/gpu/drm/amd/display/dc/resource/dce112/dce112_resource.c
943
context->bw_ctx.bw.dce.nbp_state_change_wm_ns[2].a_mark,
drivers/gpu/drm/amd/display/dc/resource/dce112/dce112_resource.c
944
context->bw_ctx.bw.dce.urgent_wm_ns[2].b_mark,
drivers/gpu/drm/amd/display/dc/resource/dce112/dce112_resource.c
945
context->bw_ctx.bw.dce.urgent_wm_ns[2].a_mark,
drivers/gpu/drm/amd/display/dc/resource/dce112/dce112_resource.c
946
context->bw_ctx.bw.dce.stutter_exit_wm_ns[2].b_mark,
drivers/gpu/drm/amd/display/dc/resource/dce112/dce112_resource.c
947
context->bw_ctx.bw.dce.stutter_exit_wm_ns[2].a_mark,
drivers/gpu/drm/amd/display/dc/resource/dce112/dce112_resource.c
948
context->bw_ctx.bw.dce.stutter_mode_enable,
drivers/gpu/drm/amd/display/dc/resource/dce112/dce112_resource.c
949
context->bw_ctx.bw.dce.cpuc_state_change_enable,
drivers/gpu/drm/amd/display/dc/resource/dce112/dce112_resource.c
950
context->bw_ctx.bw.dce.cpup_state_change_enable,
drivers/gpu/drm/amd/display/dc/resource/dce112/dce112_resource.c
951
context->bw_ctx.bw.dce.nbp_state_change_enable,
drivers/gpu/drm/amd/display/dc/resource/dce112/dce112_resource.c
952
context->bw_ctx.bw.dce.all_displays_in_sync,
drivers/gpu/drm/amd/display/dc/resource/dce112/dce112_resource.c
953
context->bw_ctx.bw.dce.dispclk_khz,
drivers/gpu/drm/amd/display/dc/resource/dce112/dce112_resource.c
954
context->bw_ctx.bw.dce.sclk_khz,
drivers/gpu/drm/amd/display/dc/resource/dce112/dce112_resource.c
955
context->bw_ctx.bw.dce.sclk_deep_sleep_khz,
drivers/gpu/drm/amd/display/dc/resource/dce112/dce112_resource.c
956
context->bw_ctx.bw.dce.yclk_khz,
drivers/gpu/drm/amd/display/dc/resource/dce112/dce112_resource.c
957
context->bw_ctx.bw.dce.blackout_recovery_time_us);
drivers/gpu/drm/amd/display/dc/resource/dce112/dce112_resource.c
964
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/resource/dce112/dce112_resource.c
970
&context->res_ctx, stream);
drivers/gpu/drm/amd/display/dc/resource/dce112/dce112_resource.c
982
&context->res_ctx, dc->res_pool,
drivers/gpu/drm/amd/display/dc/resource/dce112/dce112_resource.c
990
&context->res_ctx,
drivers/gpu/drm/amd/display/dc/resource/dce112/dce112_resource.c
998
struct dc_state *context)
drivers/gpu/drm/amd/display/dc/resource/dce112/dce112_resource.h
42
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/resource/dce112/dce112_resource.h
47
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/resource/dcn10/dcn10_resource.c
1082
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/resource/dcn10/dcn10_resource.c
1085
struct pipe_ctx *pipe_ctx = resource_get_otg_master_for_stream(&context->res_ctx, stream);
drivers/gpu/drm/amd/display/dc/resource/dcn10/dcn10_resource.c
1165
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/resource/dcn10/dcn10_resource.c
1171
voltage_supported = dcn_validate_bandwidth(dc, context, validate_mode);
drivers/gpu/drm/amd/display/dc/resource/dcn10/dcn10_resource.c
1187
static enum dc_status dcn10_validate_global(struct dc *dc, struct dc_state *context)
drivers/gpu/drm/amd/display/dc/resource/dcn10/dcn10_resource.c
1196
for (i = 0; i < context->stream_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn10/dcn10_resource.c
1197
if (context->stream_status[i].plane_count == 0)
drivers/gpu/drm/amd/display/dc/resource/dcn10/dcn10_resource.c
1200
if (context->stream_status[i].plane_count > 2)
drivers/gpu/drm/amd/display/dc/resource/dcn10/dcn10_resource.c
1203
if (context->stream_status[i].plane_count > 1)
drivers/gpu/drm/amd/display/dc/resource/dcn10/dcn10_resource.c
1206
for (j = 0; j < context->stream_status[i].plane_count; j++) {
drivers/gpu/drm/amd/display/dc/resource/dcn10/dcn10_resource.c
1208
context->stream_status[i].plane_states[j];
drivers/gpu/drm/amd/display/dc/resource/dcn10/dcn10_resource.c
1230
if (context->stream_count > 1 && mpo_enabled)
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
1345
enum dc_status dcn20_build_mapped_resource(const struct dc *dc, struct dc_state *context, struct dc_stream_state *stream)
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
1348
struct pipe_ctx *pipe_ctx = resource_get_otg_master_for_stream(&context->res_ctx, stream);
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
1630
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
1642
if (!context->res_ctx.pipe_ctx[i].stream)
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
1646
if (context->res_ctx.pipe_ctx[i].stream->writeback_info[j].wb_enabled == false)
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
1650
wb_arb_params = &context->bw_ctx.bw.dcn.bw_writeback.mcif_wb_arb[dwb_pipe];
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
1652
if (context->res_ctx.pipe_ctx[i].stream->writeback_info[j].dwb_params.out_format == dwb_scaler_mode_yuv420) {
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
1653
if (context->res_ctx.pipe_ctx[i].stream->writeback_info[j].dwb_params.output_depth == DWB_OUTPUT_PIXEL_DEPTH_8BPC)
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
1661
dcn20_fpu_set_wb_arb_params(wb_arb_params, context, pipes, pipe_cnt, i);
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
1794
struct dc_state *context)
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
1800
struct pipe_ctx *pipe = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
1817
dcn20_release_dsc(&context->res_ctx, dc->res_pool, &odm_pipe->stream_res.dsc);
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
1829
struct pipe_ctx *pipe = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
1853
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
1862
struct vba_vars_st *v = &context->bw_ctx.dml.vba;
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
1865
if (context->stream_count > 1) {
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
1872
struct pipe_ctx *pipe = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
1896
struct pipe_ctx *pipe = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
1914
if (!context->res_ctx.pipe_ctx[i].stream)
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
1917
for (vlevel_split = vlevel; vlevel <= context->bw_ctx.dml.soc.num_states; vlevel++)
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
1922
if (vlevel > context->bw_ctx.dml.soc.num_states)
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
1933
struct pipe_ctx *pipe = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
1935
bool split4mpc = context->stream_count == 1 && plane_count == 1
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
1938
if (!context->res_ctx.pipe_ctx[i].stream)
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
2041
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
2057
dcn20_merge_pipes_for_validate(dc, context);
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
2059
pipe_cnt = dc->res_pool->funcs->populate_dml_pipes(dc, context, pipes, validate_mode);
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
2068
vlevel = dml_get_voltage_level(&context->bw_ctx.dml, pipes, pipe_cnt);
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
2070
if (vlevel > context->bw_ctx.dml.soc.num_states)
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
2073
vlevel = dcn20_validate_apply_pipe_split_flags(dc, context, vlevel, split, merge);
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
2080
struct pipe_ctx *pipe = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
2088
if (!pipe->top_pipe && !pipe->plane_state && context->bw_ctx.dml.vba.ODMCombineEnabled[pipe_idx]) {
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
2089
hsplit_pipe = dcn20_find_secondary_pipe(dc, &context->res_ctx, dc->res_pool, pipe);
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
2092
dc, &context->res_ctx,
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
2096
dcn20_build_mapped_resource(dc, context, pipe->stream);
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
2107
&& context->bw_ctx.dml.vba.ODMCombineEnabled[pipe_idx])
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
2113
hsplit_pipe = dcn20_find_secondary_pipe(dc, &context->res_ctx, dc->res_pool, pipe);
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
2117
dcn20_fpu_adjust_dppclk(&context->bw_ctx.dml.vba, vlevel, context->bw_ctx.dml.vba.maxMpcComb, pipe_idx, true);
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
2121
if (context->bw_ctx.dml.vba.ODMCombineEnabled[pipe_idx]) {
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
2123
dc, &context->res_ctx,
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
2126
dcn20_build_mapped_resource(dc, context, pipe->stream);
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
2129
&context->res_ctx, dc->res_pool,
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
2143
if (!dcn20_validate_dsc(dc, context)) {
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
2144
context->bw_ctx.dml.vba.ValidationStatus[context->bw_ctx.dml.vba.soc.num_states] =
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
2161
enum dc_status dcn20_validate_bandwidth(struct dc *dc, struct dc_state *context,
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
2173
voltage_supported = dcn20_validate_bandwidth_fp(dc, context, validate_mode, pipes);
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
2245
void dcn20_release_pipe(struct dc_state *context,
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
2250
dcn20_release_dsc(&context->res_ctx, pool, &pipe->stream_res.dsc);
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.h
119
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.h
122
enum dc_status dcn20_validate_bandwidth(struct dc *dc, struct dc_state *context, enum dc_validate_mode validate_mode);
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.h
125
struct dc_state *context);
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.h
128
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.h
156
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.h
163
enum dc_status dcn20_build_mapped_resource(const struct dc *dc, struct dc_state *context, struct dc_stream_state *stream);
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.h
66
void dcn20_release_pipe(struct dc_state *context,
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
796
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
812
dcn20_merge_pipes_for_validate(dc, context);
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
814
pipe_cnt = dc->res_pool->funcs->populate_dml_pipes(dc, context, pipes, validate_mode);
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
827
context->bw_ctx.dml.soc.allow_dram_self_refresh_or_dram_clock_change_in_vblank =
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
829
vlevel = dml_get_voltage_level(&context->bw_ctx.dml, pipes, pipe_cnt);
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
831
if (vlevel > context->bw_ctx.dml.soc.num_states) {
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
839
context->bw_ctx.dml.soc.allow_dram_self_refresh_or_dram_clock_change_in_vblank =
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
841
vlevel = dml_get_voltage_level(&context->bw_ctx.dml, pipes, pipe_cnt);
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
842
if (vlevel > context->bw_ctx.dml.soc.num_states)
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
846
vlevel = dcn20_validate_apply_pipe_split_flags(dc, context, vlevel, split, merge);
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
849
struct pipe_ctx *pipe = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
851
struct vba_vars_st *vba = &context->bw_ctx.dml.vba;
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
873
struct pipe_ctx *pipe = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
881
if (!pipe->top_pipe && !pipe->plane_state && context->bw_ctx.dml.vba.ODMCombineEnabled[pipe_idx]) {
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
882
hsplit_pipe = dcn20_find_secondary_pipe(dc, &context->res_ctx, dc->res_pool, pipe);
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
885
dc, &context->res_ctx,
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
889
dcn20_build_mapped_resource(dc, context, pipe->stream);
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
901
hsplit_pipe = dcn20_find_secondary_pipe(dc, &context->res_ctx, dc->res_pool, pipe);
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
905
dcn20_fpu_adjust_dppclk(&context->bw_ctx.dml.vba, vlevel, context->bw_ctx.dml.vba.maxMpcComb, pipe_idx, true);
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
909
if (context->bw_ctx.dml.vba.ODMCombineEnabled[pipe_idx]) {
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
911
dc, &context->res_ctx,
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
914
dcn20_build_mapped_resource(dc, context, pipe->stream);
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
917
&context->res_ctx, dc->res_pool,
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
930
if (!dcn20_validate_dsc(dc, context)) {
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
931
context->bw_ctx.dml.vba.ValidationStatus[context->bw_ctx.dml.vba.soc.num_states] =
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
953
static enum dc_status dcn21_validate_bandwidth(struct dc *dc, struct dc_state *context,
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
965
voltage_supported = dcn21_validate_bandwidth_fp(dc, context, validate_mode, pipes);
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.h
49
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1353
struct dc *dc, struct dc_state *context,
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1358
struct resource_context *res_ctx = &context->res_ctx;
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1361
dcn20_populate_dml_pipes_from_context(dc, context, pipes, validate_mode);
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1408
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1413
struct display_mode_lib *dml = &context->bw_ctx.dml;
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1421
if (!context->res_ctx.pipe_ctx[i].stream)
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1425
struct dc_writeback_info *writeback_info = &context->res_ctx.pipe_ctx[i].stream->writeback_info[j];
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1431
wb_arb_params = &context->bw_ctx.bw.dcn.bw_writeback.mcif_wb_arb[dwb_pipe];
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1442
wb_arb_params->time_per_pixel = (1000000 << 6) / context->res_ctx.pipe_ctx[i].stream->phy_pix_clk; /* time_per_pixel should be in u6.6 format */
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1617
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1623
if (old_index >= 0 && context->res_ctx.pipe_ctx[old_index].stream == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1624
pipe = &context->res_ctx.pipe_ctx[old_index];
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1632
if (context->res_ctx.pipe_ctx[i].stream == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1633
pipe = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1647
if (context->res_ctx.pipe_ctx[i].stream == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1648
pipe = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1659
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1672
struct vba_vars_st *vba = &context->bw_ctx.dml.vba;
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1678
context->bw_ctx.dml.vba.maxMpcComb = 0;
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1679
context->bw_ctx.dml.vba.VoltageLevel = 0;
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1680
context->bw_ctx.dml.vba.DRAMClockChangeSupport[0][0] = dm_dram_clock_change_vactive;
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1681
dc->res_pool->funcs->update_soc_for_wm_a(dc, context);
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1682
pipe_cnt = dc->res_pool->funcs->populate_dml_pipes(dc, context, pipes, validate_mode);
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1689
dml_log_pipe_params(&context->bw_ctx.dml, pipes, pipe_cnt);
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1697
context->bw_ctx.dml.soc.allow_dram_self_refresh_or_dram_clock_change_in_vblank =
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1699
vlevel = dml_get_voltage_level(&context->bw_ctx.dml, pipes, pipe_cnt);
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1701
if (vlevel < context->bw_ctx.dml.soc.num_states)
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1702
vlevel = dcn20_validate_apply_pipe_split_flags(dc, context, vlevel, split, merge);
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1705
(validate_mode != DC_VALIDATE_MODE_AND_PROGRAMMING || vlevel == context->bw_ctx.dml.soc.num_states ||
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1714
context->bw_ctx.dml.validate_max_state = (validate_mode != DC_VALIDATE_MODE_AND_PROGRAMMING);
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1715
context->bw_ctx.dml.soc.allow_dram_self_refresh_or_dram_clock_change_in_vblank =
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1718
vlevel = dml_get_voltage_level(&context->bw_ctx.dml, pipes, pipe_cnt);
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1719
if (vlevel < context->bw_ctx.dml.soc.num_states) {
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1722
vlevel = dcn20_validate_apply_pipe_split_flags(dc, context, vlevel, split, merge);
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1724
context->bw_ctx.dml.validate_max_state = false;
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1727
dml_log_mode_support_params(&context->bw_ctx.dml);
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1729
if (vlevel == context->bw_ctx.dml.soc.num_states)
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1734
struct pipe_ctx *pipe = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1755
struct pipe_ctx *pipe = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1775
dcn20_release_dsc(&context->res_ctx, dc->res_pool, &pipe->stream_res.dsc);
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1800
struct pipe_ctx *pipe = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1829
hsplit_pipe = dcn30_find_split_pipe(dc, context, old_index);
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1835
dc, &context->res_ctx,
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1852
pipe_4to1 = dcn30_find_split_pipe(dc, context, old_index);
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1857
dc, &context->res_ctx,
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1871
pipe_4to1 = dcn30_find_split_pipe(dc, context, old_index);
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1876
dc, &context->res_ctx,
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1882
dcn20_build_mapped_resource(dc, context, pipe->stream);
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1886
struct pipe_ctx *pipe = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1895
if (!dcn20_validate_dsc(dc, context)) {
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1901
pipe_cnt = dc->res_pool->funcs->populate_dml_pipes(dc, context, pipes, validate_mode);
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1902
context->bw_ctx.dml.vba.VoltageLevel = vlevel;
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1916
static int get_refresh_rate(struct dc_state *context)
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1922
if (context == NULL || context->streams[0] == NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1926
timing = &context->streams[0]->timing;
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1945
static int get_frame_rate_at_max_stretch_100hz(struct dc_state *context)
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1956
if (context == NULL || context->streams[0] == NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1960
timing = &context->streams[0]->timing;
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1975
static bool is_refresh_rate_support_mclk_switch_using_fw_based_vblank_stretch(struct dc_state *context)
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1980
if (context == NULL || context->streams[0] == NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1983
refresh_rate_max_stretch_100hz = get_frame_rate_at_max_stretch_100hz(context);
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1984
min_refresh_100hz = context->streams[0]->timing.min_refresh_in_uhz / 10000;
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1992
bool dcn30_can_support_mclk_switch_using_fw_based_vblank_stretch(struct dc *dc, struct dc_state *context)
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1998
if (context == NULL || context->streams[0] == NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
2001
if (context->streams[0]->sink->edid_caps.panel_patch.disable_fams)
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
2010
if (context->bw_ctx.bw.dcn.clk.fw_based_mclk_switching_shut_down)
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
2014
if (context->stream_count != 1)
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
2017
refresh_rate = get_refresh_rate(context);
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
2021
if (!is_refresh_rate_support_mclk_switch_using_fw_based_vblank_stretch(context))
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
2024
if (!context->streams[0]->allow_freesync)
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
2027
if (context->streams[0]->vrr_active_variable && (dc->debug.disable_fams_gaming == INGAME_FAMS_DISABLE))
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
2030
stream_status = dc_state_get_stream_status(context, context->streams[0]);
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
2043
void dcn30_setup_mclk_switch_using_fw_based_vblank_stretch(struct dc *dc, struct dc_state *context)
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
2045
ASSERT(dc != NULL && context != NULL);
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
2046
if (dc == NULL || context == NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
2050
context->bw_ctx.bw.dcn.watermarks.a.cstate_pstate.pstate_change_ns = 4U * 1000U * 1000U * 1000U;
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
2053
void dcn30_update_soc_for_wm_a(struct dc *dc, struct dc_state *context)
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
2056
dcn30_fpu_update_soc_for_wm_a(dc, context);
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
2061
struct dc *dc, struct dc_state *context,
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
2067
dcn30_fpu_calculate_wm_and_dlg(dc, context, pipes, pipe_cnt, vlevel);
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
2072
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
2091
out = dcn30_internal_validate_bw(dc, context, pipes, &pipe_cnt, &vlevel, validate_mode, true);
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
2109
dc->res_pool->funcs->calculate_wm_and_dlg(dc, context, pipes, pipe_cnt, vlevel);
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
2118
dml_get_status_message(context->bw_ctx.dml.vba.ValidationStatus[context->bw_ctx.dml.vba.soc.num_states]));
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.h
103
bool dcn30_can_support_mclk_switch_using_fw_based_vblank_stretch(struct dc *dc, struct dc_state *context);
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.h
104
void dcn30_setup_mclk_switch_using_fw_based_vblank_stretch(struct dc *dc, struct dc_state *context);
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.h
105
int dcn30_find_dummy_latency_index_for_fw_based_mclk_switch(struct dc *dc, struct dc_state *context,
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.h
50
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.h
59
enum dc_status dcn30_validate_bandwidth(struct dc *dc, struct dc_state *context,
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.h
63
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.h
70
struct dc *dc, struct dc_state *context,
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.h
74
void dcn30_update_soc_for_wm_a(struct dc *dc, struct dc_state *context);
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.h
79
struct dc *dc, struct dc_state *context,
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1407
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1413
dcn301_fpu_calculate_wm_and_dlg(dc, context, pipes, pipe_cnt, vlevel_req);
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
1648
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
1657
pipe_cnt = dcn20_populate_dml_pipes_from_context(dc, context, pipes, validate_mode);
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
1673
struct dc *dc, struct dc_state *context,
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
1678
struct resource_context *res_ctx = &context->res_ctx;
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
1683
dcn31x_populate_dml_pipes_from_context(dc, context, pipes, validate_mode);
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
1733
context->bw_ctx.dml.ip.det_buffer_size_kbytes = DCN3_1_DEFAULT_DET_SIZE;
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
1741
context->bw_ctx.dml.ip.det_buffer_size_kbytes = 192;
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
1744
} else if (context->stream_count >= dc->debug.crb_alloc_policy_min_disp_count
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
1746
context->bw_ctx.dml.ip.det_buffer_size_kbytes = dc->debug.crb_alloc_policy * 64;
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
1747
} else if (context->stream_count >= 3 && upscaled) {
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
1748
context->bw_ctx.dml.ip.det_buffer_size_kbytes = 192;
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
1755
const struct dc_state *context)
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
1757
return context->bw_ctx.dml.ip.det_buffer_size_kbytes;
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
1761
struct dc *dc, struct dc_state *context,
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
1767
dcn31_calculate_wm_and_dlg_fp(dc, context, pipes, pipe_cnt, vlevel);
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
1783
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
1788
dcn30_set_mcif_arb_params(dc, context, pipes, pipe_cnt);
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
1793
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
1812
out = dcn30_internal_validate_bw(dc, context, pipes, &pipe_cnt, &vlevel, validate_mode, true);
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
1829
dc->res_pool->funcs->calculate_wm_and_dlg(dc, context, pipes, pipe_cnt, vlevel);
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
1837
dml_get_status_message(context->bw_ctx.dml.vba.ValidationStatus[context->bw_ctx.dml.vba.soc.num_states]));
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.h
41
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.h
44
struct dc *dc, struct dc_state *context,
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.h
49
struct dc *dc, struct dc_state *context,
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.h
58
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.h
67
const struct dc_state *context);
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
1701
struct dc *dc, struct dc_state *context,
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
1708
pipe_cnt = dcn314_populate_dml_pipes_from_context_fpu(dc, context, pipes, validate_mode);
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
1731
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
1751
out = dcn30_internal_validate_bw(dc, context, pipes, &pipe_cnt, &vlevel, validate_mode, false);
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
1768
dc->res_pool->funcs->calculate_wm_and_dlg(dc, context, pipes, pipe_cnt, vlevel);
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
1776
dml_get_status_message(context->bw_ctx.dml.vba.ValidationStatus[context->bw_ctx.dml.vba.soc.num_states]));
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.h
43
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
1666
static bool allow_pixel_rate_crb(struct dc *dc, struct dc_state *context)
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
1669
struct resource_context *res_ctx = &context->res_ctx;
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
1672
if (context->stream_count != 2)
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
1674
if (context->streams[0]->signal != SIGNAL_TYPE_EDP && context->streams[1]->signal != SIGNAL_TYPE_EDP)
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
1697
struct dc *dc, struct dc_state *context,
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
1702
struct resource_context *res_ctx = &context->res_ctx;
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
1704
const int max_usable_det = context->bw_ctx.dml.ip.config_return_buffer_size_in_kbytes - DCN3_15_MIN_COMPBUF_SIZE_KB;
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
1706
bool pixel_rate_crb = allow_pixel_rate_crb(dc, context);
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
1709
dcn31x_populate_dml_pipes_from_context(dc, context, pipes, validate_mode);
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
1737
&context->bw_ctx.dml.soc, timing->pix_clk_100hz, bpp, DCN3_15_CRB_SEGMENT_SIZE_KB);
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
1815
context->bw_ctx.dml.ip.det_buffer_size_kbytes =
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
1817
if (context->bw_ctx.dml.ip.det_buffer_size_kbytes > DCN3_15_MAX_DET_SIZE)
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
1818
context->bw_ctx.dml.ip.det_buffer_size_kbytes = DCN3_15_MAX_DET_SIZE;
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
1825
context->bw_ctx.dml.ip.det_buffer_size_kbytes =
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
1831
context->bw_ctx.dml.ip.det_buffer_size_kbytes = 192;
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
1844
static int dcn315_get_power_profile(const struct dc_state *context)
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
1846
return !context->bw_ctx.bw.dcn.clk.p_state_change_support;
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1642
struct dc *dc, struct dc_state *context,
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1647
struct resource_context *res_ctx = &context->res_ctx;
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1649
const int max_usable_det = context->bw_ctx.dml.ip.config_return_buffer_size_in_kbytes - DCN3_16_MIN_COMPBUF_SIZE_KB;
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1652
dcn31x_populate_dml_pipes_from_context(dc, context, pipes, validate_mode);
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1699
context->bw_ctx.dml.ip.det_buffer_size_kbytes =
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1701
if (context->bw_ctx.dml.ip.det_buffer_size_kbytes > DCN3_16_MAX_DET_SIZE)
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1702
context->bw_ctx.dml.ip.det_buffer_size_kbytes = DCN3_16_MAX_DET_SIZE;
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1703
ASSERT(context->bw_ctx.dml.ip.det_buffer_size_kbytes >= DCN3_16_DEFAULT_DET_SIZE);
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1709
context->bw_ctx.dml.ip.det_buffer_size_kbytes =
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1712
context->bw_ctx.dml.ip.det_buffer_size_kbytes = 192;
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1675
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1681
struct pipe_ctx *curr_pipe = &context->res_ctx.pipe_ctx[dc_pipe_idx];
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1687
phantom_plane = dc_state_create_phantom_plane(dc, context, curr_pipe->plane_state);
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1711
dc_state_add_phantom_plane(dc, phantom_stream, phantom_plane, context);
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1719
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1725
struct pipe_ctx *ref_pipe = &context->res_ctx.pipe_ctx[dc_pipe_idx];
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1727
phantom_stream = dc_state_create_phantom_stream(dc, context, ref_pipe->stream);
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1736
dcn32_set_phantom_stream_timing(dc, context, ref_pipe, phantom_stream, pipes, pipe_cnt, dc_pipe_idx);
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1739
dc_state_add_phantom_stream(dc, context, phantom_stream, ref_pipe->stream);
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1746
void dcn32_add_phantom_pipes(struct dc *dc, struct dc_state *context,
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1757
phantom_stream = dcn32_enable_phantom_stream(dc, context, pipes, pipe_cnt, index);
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1761
dcn32_enable_phantom_plane(dc, context, phantom_stream, index);
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1764
struct pipe_ctx *pipe = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1770
dc_state_get_pipe_subvp_type(context, pipe) == SUBVP_PHANTOM) {
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1780
static bool dml1_validate(struct dc *dc, struct dc_state *context, enum dc_validate_mode validate_mode)
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1794
context->bw_ctx.bw.dcn.clk.fw_based_mclk_switching = false;
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1795
context->bw_ctx.dml.soc.dram_clock_change_requirement_final = true;
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1805
out = dcn32_internal_validate_bw(dc, context, pipes, &pipe_cnt, &vlevel, validate_mode);
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1821
dc->res_pool->funcs->calculate_wm_and_dlg(dc, context, pipes, pipe_cnt, vlevel);
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1824
dcn32_override_min_req_memclk(dc, context);
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1827
dcn32_override_min_req_dcfclk(dc, context);
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1835
dml_get_status_message(context->bw_ctx.dml.vba.ValidationStatus[context->bw_ctx.dml.vba.soc.num_states]));
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1849
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1857
for (i = 0; i < context->stream_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1858
stream = context->streams[i];
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1860
if (dc_state_can_clear_stream_cursor_subvp_limit(stream, context)) {
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1861
dc_state_set_stream_cursor_subvp_limit(stream, context, false);
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1866
status = dml2_validate(dc, context,
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1867
context->power_source == DC_POWER_SOURCE_DC ? context->bw_ctx.dml2_dc_power_source : context->bw_ctx.dml2,
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1870
status = dml1_validate(dc, context, validate_mode) ? DC_OK : DC_FAIL_BANDWIDTH_VALIDATE;
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1872
if (validate_mode == DC_VALIDATE_MODE_AND_PROGRAMMING && status == DC_OK && dc_state_is_subvp_in_use(context)) {
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1874
for (i = 0; i < context->stream_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1875
stream = context->streams[i];
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1877
if (dc_state_get_stream_subvp_type(context, stream) != SUBVP_PHANTOM &&
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1879
!dc_stream_check_cursor_attributes(stream, context, &stream->cursor_attributes)) {
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1881
dc_state_set_stream_cursor_subvp_limit(stream, context, true);
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1890
status = dml2_validate(dc, context,
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1891
context->power_source == DC_POWER_SOURCE_DC ? context->bw_ctx.dml2_dc_power_source : context->bw_ctx.dml2,
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1894
status = dml1_validate(dc, context, validate_mode) ? DC_OK : DC_FAIL_BANDWIDTH_VALIDATE;
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1901
struct dc *dc, struct dc_state *context,
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1906
struct resource_context *res_ctx = &context->res_ctx;
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1919
dcn20_populate_dml_pipes_from_context(dc, context, pipes, validate_mode);
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1924
for (i = 0; i < context->stream_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1925
stream = context->streams[i];
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1926
mall_type = dc_state_get_stream_subvp_type(context, stream);
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1943
mall_type = dc_state_get_pipe_subvp_type(context, pipe);
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1969
mall_type = dc_state_get_pipe_subvp_type(context, pipe);
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
2002
switch (dc_state_get_pipe_subvp_type(context, pipe)) {
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
2048
dcn32_set_det_allocations(dc, context, pipes);
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
2053
if (context->bw_ctx.bw.dcn.clk.fw_based_mclk_switching || subvp_in_use)
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
2054
context->bw_ctx.dml.soc.dram_clock_change_requirement_final = false;
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
2056
context->bw_ctx.dml.soc.dram_clock_change_requirement_final = true;
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
2090
void dcn32_calculate_wm_and_dlg(struct dc *dc, struct dc_state *context,
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
2096
dcn32_calculate_wm_and_dlg_fpu(dc, context, pipes, pipe_cnt, vlevel);
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.h
102
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.h
106
struct dc *dc, struct dc_state *context,
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.h
111
struct dc *dc, struct dc_state *context,
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.h
123
struct dc_state *context);
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.h
126
struct dc_state *context);
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.h
129
struct dc_state *context);
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.h
132
struct dc_state *context);
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.h
134
bool dcn32_mpo_in_use(struct dc_state *context);
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.h
136
bool dcn32_any_surfaces_rotated(struct dc *dc, struct dc_state *context);
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.h
158
void dcn32_release_pipe(struct dc_state *context,
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.h
163
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.h
166
void dcn32_set_det_allocations(struct dc *dc, struct dc_state *context,
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.h
169
struct dc_stream_state *dcn32_can_support_mclk_switch_using_fw_based_vblank_stretch(struct dc *dc, struct dc_state *context);
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.h
173
bool dcn32_allow_subvp_high_refresh_rate(struct dc *dc, struct dc_state *context, struct pipe_ctx *pipe);
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.h
177
double dcn32_determine_max_vratio_prefetch(struct dc *dc, struct dc_state *context);
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.h
181
bool dcn32_subvp_drr_admissable(struct dc *dc, struct dc_state *context);
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.h
183
bool dcn32_subvp_vblank_admissable(struct dc *dc, struct dc_state *context, int vlevel);
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.h
185
void dcn32_update_dml_pipes_odm_policy_based_on_context(struct dc *dc, struct dc_state *context, display_e2e_pipe_params_st *pipes);
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.h
187
void dcn32_override_min_req_dcfclk(struct dc *dc, struct dc_state *context);
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.h
96
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource_helpers.c
108
struct dc_state *context)
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource_helpers.c
114
struct pipe_ctx *pipe = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource_helpers.c
132
dcn20_release_dsc(&context->res_ctx, dc->res_pool, &pipe->stream_res.dsc);
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource_helpers.c
154
struct dc_state *context)
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource_helpers.c
159
struct pipe_ctx *pipe = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource_helpers.c
171
struct dc_state *context)
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource_helpers.c
176
struct pipe_ctx *pipe = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource_helpers.c
178
if (dc_state_get_pipe_subvp_type(context, pipe) != SUBVP_NONE)
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource_helpers.c
184
bool dcn32_mpo_in_use(struct dc_state *context)
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource_helpers.c
188
for (i = 0; i < context->stream_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource_helpers.c
189
if (context->stream_status[i].plane_count > 1)
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource_helpers.c
196
bool dcn32_any_surfaces_rotated(struct dc *dc, struct dc_state *context)
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource_helpers.c
201
struct pipe_ctx *pipe = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource_helpers.c
243
static void override_det_for_subvp(struct dc *dc, struct dc_state *context, uint8_t pipe_segments[])
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource_helpers.c
251
for (i = 0; i < context->stream_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource_helpers.c
252
if (context->stream_status[i].plane_count > 1)
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource_helpers.c
255
if (dc_state_get_stream_subvp_type(context, context->streams[i]) != SUBVP_PHANTOM)
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource_helpers.c
260
struct pipe_ctx *pipe_ctx = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource_helpers.c
262
if (pipe_ctx->stream && pipe_ctx->plane_state && dc_state_get_pipe_subvp_type(context, pipe_ctx) != SUBVP_PHANTOM) {
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource_helpers.c
263
if (dcn32_allow_subvp_high_refresh_rate(dc, context, pipe_ctx)) {
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource_helpers.c
275
struct pipe_ctx *pipe_ctx = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource_helpers.c
277
if (pipe_ctx->stream && pipe_ctx->plane_state && dc_state_get_pipe_subvp_type(context, pipe_ctx) != SUBVP_PHANTOM) {
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource_helpers.c
312
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource_helpers.c
322
for (i = 0; i < context->stream_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource_helpers.c
324
if (dc_state_get_stream_subvp_type(context, context->streams[i]) != SUBVP_PHANTOM)
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource_helpers.c
330
for (i = 0; i < context->stream_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource_helpers.c
331
if (dc_state_get_stream_subvp_type(context, context->streams[i]) == SUBVP_PHANTOM)
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource_helpers.c
334
if (context->stream_status[i].plane_count > 0)
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource_helpers.c
335
plane_segments = stream_segments / context->stream_status[i].plane_count;
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource_helpers.c
340
if (context->res_ctx.pipe_ctx[j].stream == context->streams[i] &&
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource_helpers.c
348
current_plane = context->res_ctx.pipe_ctx[j].plane_state;
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource_helpers.c
350
if (k != j && context->res_ctx.pipe_ctx[k].stream == context->streams[i] &&
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource_helpers.c
351
context->res_ctx.pipe_ctx[k].plane_state == current_plane) {
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource_helpers.c
359
if (k != j && context->res_ctx.pipe_ctx[k].stream == context->streams[i] &&
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource_helpers.c
360
context->res_ctx.pipe_ctx[k].plane_state == current_plane) {
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource_helpers.c
368
override_det_for_subvp(dc, context, pipe_segments);
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource_helpers.c
370
if (!context->res_ctx.pipe_ctx[i].stream)
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource_helpers.c
381
void dcn32_set_det_allocations(struct dc *dc, struct dc_state *context,
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource_helpers.c
385
struct resource_context *res_ctx = &context->res_ctx;
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource_helpers.c
414
dcn32_determine_det_override(dc, context, pipes);
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource_helpers.c
515
struct dc_stream_state *dcn32_can_support_mclk_switch_using_fw_based_vblank_stretch(struct dc *dc, struct dc_state *context)
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource_helpers.c
524
if (context == NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource_helpers.c
533
if (context->bw_ctx.bw.dcn.clk.fw_based_mclk_switching_shut_down)
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource_helpers.c
539
if (context->stream_count > 2)
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource_helpers.c
541
else if (context->stream_count == 2) {
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource_helpers.c
543
dcn32_assign_fpo_vactive_candidate(dc, context, &fpo_candidate_stream);
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource_helpers.c
546
fpo_stream_status = dc_state_get_stream_status(context, fpo_candidate_stream);
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource_helpers.c
548
is_fpo_vactive = dcn32_find_vactive_pipe(dc, context, fpo_candidate_stream, dc->debug.fpo_vactive_min_active_margin_us);
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource_helpers.c
553
fpo_candidate_stream = context->streams[0];
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource_helpers.c
555
fpo_stream_status = dc_state_get_stream_status(context, fpo_candidate_stream);
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource_helpers.c
585
(context->stream_count > 1 && !(dc->debug.disable_fams_gaming == INGAME_FAMS_MULTI_DISP_ENABLE))))
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource_helpers.c
644
bool dcn32_subvp_drr_admissable(struct dc *dc, struct dc_state *context)
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource_helpers.c
656
struct pipe_ctx *pipe = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource_helpers.c
657
enum mall_stream_type pipe_mall_type = dc_state_get_pipe_subvp_type(context, pipe);
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource_helpers.c
704
bool dcn32_subvp_vblank_admissable(struct dc *dc, struct dc_state *context, int vlevel)
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource_helpers.c
711
struct vba_vars_st *vba = &context->bw_ctx.dml.vba;
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource_helpers.c
717
struct pipe_ctx *pipe = &context->res_ctx.pipe_ctx[i];
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource_helpers.c
718
enum mall_stream_type pipe_mall_type = dc_state_get_pipe_subvp_type(context, pipe);
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource_helpers.c
750
void dcn32_update_dml_pipes_odm_policy_based_on_context(struct dc *dc, struct dc_state *context,
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource_helpers.c
754
struct resource_context *res_ctx = &context->res_ctx;
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource_helpers.c
776
void dcn32_override_min_req_dcfclk(struct dc *dc, struct dc_state *context)
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource_helpers.c
778
if (dcn32_subvp_in_use(dc, context) && context->bw_ctx.bw.dcn.clk.dcfclk_khz <= MIN_SUBVP_DCFCLK_KHZ)
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource_helpers.c
779
context->bw_ctx.bw.dcn.clk.dcfclk_khz = MIN_SUBVP_DCFCLK_KHZ;
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource_helpers.c
92
struct dc_state *context)
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource_helpers.c
94
if (context->bw_ctx.bw.dcn.mall_subvp_size_bytes > 0) {
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource_helpers.c
98
return dc->res_pool->funcs->calculate_mall_ways_from_bytes(dc, context->bw_ctx.bw.dcn.mall_subvp_size_bytes);
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
1771
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
1776
out = dml2_validate(dc, context,
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
1777
context->power_source == DC_POWER_SOURCE_DC ? context->bw_ctx.dml2_dc_power_source : context->bw_ctx.dml2,
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
1784
dcn35_decide_zstate_support(dc, context);
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
1799
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
1806
ret = dcn35_populate_dml_pipes_from_context_fpu(dc, context, pipes, validate_mode);
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
1751
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
1756
out = dml2_validate(dc, context,
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
1757
context->power_source == DC_POWER_SOURCE_DC ? context->bw_ctx.dml2_dc_power_source : context->bw_ctx.dml2,
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
1764
dcn35_decide_zstate_support(dc, context);
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
1771
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
1778
ret = dcn351_populate_dml_pipes_from_context_fpu(dc, context, pipes, validate_mode);
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
1758
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
1763
out = dml2_validate(dc, context,
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
1764
context->power_source == DC_POWER_SOURCE_DC ? context->bw_ctx.dml2_dc_power_source : context->bw_ctx.dml2,
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
1771
dcn35_decide_zstate_support(dc, context);
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
1779
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
1786
ret = dcn35_populate_dml_pipes_from_context_fpu(dc, context, pipes, validate_mode);
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
1675
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
1683
for (i = 0; i < context->stream_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
1684
stream = context->streams[i];
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
1686
if (dc_state_can_clear_stream_cursor_subvp_limit(stream, context)) {
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
1687
dc_state_set_stream_cursor_subvp_limit(stream, context, false);
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
1692
status = dml2_validate(dc, context,
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
1693
context->power_source == DC_POWER_SOURCE_DC ? context->bw_ctx.dml2_dc_power_source : context->bw_ctx.dml2,
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
1696
if (validate_mode == DC_VALIDATE_MODE_AND_PROGRAMMING && status == DC_OK && dc_state_is_subvp_in_use(context)) {
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
1698
for (i = 0; i < context->stream_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
1699
stream = context->streams[i];
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
1701
if (dc_state_get_stream_subvp_type(context, stream) != SUBVP_PHANTOM &&
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
1703
!dc_stream_check_cursor_attributes(stream, context, &stream->cursor_attributes)) {
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
1705
dc_state_set_stream_cursor_subvp_limit(stream, context, true);
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
1714
status = dml2_validate(dc, context,
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
1715
context->power_source == DC_POWER_SOURCE_DC ? context->bw_ctx.dml2_dc_power_source : context->bw_ctx.dml2,
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
1723
struct dc_state *context)
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
1726
dml2_prepare_mcache_programming(dc, context,
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
1727
context->power_source == DC_POWER_SOURCE_DC ? context->bw_ctx.dml2_dc_power_source : context->bw_ctx.dml2);
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
1793
static int dcn401_get_power_profile(const struct dc_state *context)
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
1795
int uclk_mhz = context->bw_ctx.bw.dcn.clk.dramclk_khz / 1000;
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
1798
for (int i = 0; i < context->clk_mgr->bw_params->clk_table.num_entries_per_clk.num_memclk_levels; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
1799
if (context->clk_mgr->bw_params->clk_table.entries[i].memclk_mhz == 0 ||
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
1800
uclk_mhz < context->clk_mgr->bw_params->clk_table.entries[i].memclk_mhz)
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
1802
if (uclk_mhz > context->clk_mgr->bw_params->clk_table.entries[i].memclk_mhz)
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.h
26
struct dc_state *context,
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.h
29
void dcn401_prepare_mcache_programming(struct dc *dc, struct dc_state *context);
drivers/gpu/drm/amd/display/include/logger_interface.h
52
struct dc_state *context);
drivers/gpu/drm/amd/display/modules/hdcp/hdcp_psp.c
1005
hdcp_cmd = (struct ta_hdcp_shared_memory *)psp->hdcp_context.context.mem_context.shared_buf;
drivers/gpu/drm/amd/display/modules/hdcp/hdcp_psp.c
130
if (!psp->dtm_context.context.initialized) {
drivers/gpu/drm/amd/display/modules/hdcp/hdcp_psp.c
136
dtm_cmd = (struct ta_dtm_shared_memory *)psp->dtm_context.context.mem_context.shared_buf;
drivers/gpu/drm/amd/display/modules/hdcp/hdcp_psp.c
177
if (!psp->dtm_context.context.initialized) {
drivers/gpu/drm/amd/display/modules/hdcp/hdcp_psp.c
183
dtm_cmd = (struct ta_dtm_shared_memory *)psp->dtm_context.context.mem_context.shared_buf;
drivers/gpu/drm/amd/display/modules/hdcp/hdcp_psp.c
258
if (!psp->hdcp_context.context.initialized) {
drivers/gpu/drm/amd/display/modules/hdcp/hdcp_psp.c
266
hdcp_cmd = (struct ta_hdcp_shared_memory *)psp->hdcp_context.context.mem_context.shared_buf;
drivers/gpu/drm/amd/display/modules/hdcp/hdcp_psp.c
301
hdcp_cmd = (struct ta_hdcp_shared_memory *)psp->hdcp_context.context.mem_context.shared_buf;
drivers/gpu/drm/amd/display/modules/hdcp/hdcp_psp.c
333
hdcp_cmd = (struct ta_hdcp_shared_memory *)psp->hdcp_context.context.mem_context.shared_buf;
drivers/gpu/drm/amd/display/modules/hdcp/hdcp_psp.c
378
hdcp_cmd = (struct ta_hdcp_shared_memory *)psp->hdcp_context.context.mem_context.shared_buf;
drivers/gpu/drm/amd/display/modules/hdcp/hdcp_psp.c
404
hdcp_cmd = (struct ta_hdcp_shared_memory *)psp->hdcp_context.context.mem_context.shared_buf;
drivers/gpu/drm/amd/display/modules/hdcp/hdcp_psp.c
447
hdcp_cmd = (struct ta_hdcp_shared_memory *)psp->hdcp_context.context.mem_context.shared_buf;
drivers/gpu/drm/amd/display/modules/hdcp/hdcp_psp.c
482
hdcp_cmd = (struct ta_hdcp_shared_memory *)psp->hdcp_context.context.mem_context.shared_buf;
drivers/gpu/drm/amd/display/modules/hdcp/hdcp_psp.c
509
if (!psp->hdcp_context.context.initialized) {
drivers/gpu/drm/amd/display/modules/hdcp/hdcp_psp.c
519
hdcp_cmd = (struct ta_hdcp_shared_memory *)psp->hdcp_context.context.mem_context.shared_buf;
drivers/gpu/drm/amd/display/modules/hdcp/hdcp_psp.c
556
hdcp_cmd = (struct ta_hdcp_shared_memory *)psp->hdcp_context.context.mem_context.shared_buf;
drivers/gpu/drm/amd/display/modules/hdcp/hdcp_psp.c
57
dtm_cmd = (struct ta_dtm_shared_memory *)psp->dtm_context.context.mem_context.shared_buf;
drivers/gpu/drm/amd/display/modules/hdcp/hdcp_psp.c
590
hdcp_cmd = (struct ta_hdcp_shared_memory *)psp->hdcp_context.context.mem_context.shared_buf;
drivers/gpu/drm/amd/display/modules/hdcp/hdcp_psp.c
622
hdcp_cmd = (struct ta_hdcp_shared_memory *)psp->hdcp_context.context.mem_context.shared_buf;
drivers/gpu/drm/amd/display/modules/hdcp/hdcp_psp.c
682
hdcp_cmd = (struct ta_hdcp_shared_memory *)psp->hdcp_context.context.mem_context.shared_buf;
drivers/gpu/drm/amd/display/modules/hdcp/hdcp_psp.c
728
hdcp_cmd = (struct ta_hdcp_shared_memory *)psp->hdcp_context.context.mem_context.shared_buf;
drivers/gpu/drm/amd/display/modules/hdcp/hdcp_psp.c
761
hdcp_cmd = (struct ta_hdcp_shared_memory *)psp->hdcp_context.context.mem_context.shared_buf;
drivers/gpu/drm/amd/display/modules/hdcp/hdcp_psp.c
796
hdcp_cmd = (struct ta_hdcp_shared_memory *)psp->hdcp_context.context.mem_context.shared_buf;
drivers/gpu/drm/amd/display/modules/hdcp/hdcp_psp.c
844
hdcp_cmd = (struct ta_hdcp_shared_memory *)psp->hdcp_context.context.mem_context.shared_buf;
drivers/gpu/drm/amd/display/modules/hdcp/hdcp_psp.c
873
hdcp_cmd = (struct ta_hdcp_shared_memory *)psp->hdcp_context.context.mem_context.shared_buf;
drivers/gpu/drm/amd/display/modules/hdcp/hdcp_psp.c
925
hdcp_cmd = (struct ta_hdcp_shared_memory *)psp->hdcp_context.context.mem_context.shared_buf;
drivers/gpu/drm/amd/display/modules/hdcp/hdcp_psp.c
93
dtm_cmd = (struct ta_dtm_shared_memory *)psp->dtm_context.context.mem_context.shared_buf;
drivers/gpu/drm/amd/display/modules/hdcp/hdcp_psp.c
969
hdcp_cmd = (struct ta_hdcp_shared_memory *)psp->hdcp_context.context.mem_context.shared_buf;
drivers/gpu/drm/amd/display/modules/power/power_helpers.c
938
bool mod_power_only_edp(const struct dc_state *context, const struct dc_stream_state *stream)
drivers/gpu/drm/amd/display/modules/power/power_helpers.c
940
return context && context->stream_count == 1 && dc_is_embedded_signal(stream->signal);
drivers/gpu/drm/amd/display/modules/power/power_helpers.h
78
bool mod_power_only_edp(const struct dc_state *context,
drivers/gpu/drm/amd/pm/powerplay/inc/ppinterrupt.h
41
void *context; /* Pointer to callback function context */
drivers/gpu/drm/amd/ras/ras_mgr/amdgpu_ras_mgr.c
194
struct ta_context *context = &adev->psp.ras_context.context;
drivers/gpu/drm/amd/ras/ras_mgr/amdgpu_ras_mgr.c
196
status->initialized = context->initialized;
drivers/gpu/drm/amd/ras/ras_mgr/amdgpu_ras_mgr.c
197
status->session_id = context->session_id;
drivers/gpu/drm/amd/ras/ras_mgr/amdgpu_ras_sys.c
244
mem_ctx = &psp->ras_context.context.mem_context;
drivers/gpu/drm/amd/ras/rascore/ras_process.c
206
static int ras_process_thread(void *context)
drivers/gpu/drm/amd/ras/rascore/ras_process.c
208
struct ras_core_context *ras_core = (struct ras_core_context *)context;
drivers/gpu/drm/bridge/cadence/cdns-mhdp8546-core.c
711
static void cdns_mhdp_fw_cb(const struct firmware *fw, void *context)
drivers/gpu/drm/bridge/cadence/cdns-mhdp8546-core.c
713
struct cdns_mhdp_device *mhdp = context;
drivers/gpu/drm/bridge/chipone-icn6211.c
204
static int chipone_dsi_read(void *context,
drivers/gpu/drm/bridge/chipone-icn6211.c
208
struct mipi_dsi_device *dsi = context;
drivers/gpu/drm/bridge/chipone-icn6211.c
217
static int chipone_dsi_write(void *context, const void *data, size_t count)
drivers/gpu/drm/bridge/chipone-icn6211.c
219
struct mipi_dsi_device *dsi = context;
drivers/gpu/drm/bridge/ite-it66121.c
517
static int it66121_get_edid_block(void *context, u8 *buf,
drivers/gpu/drm/bridge/ite-it66121.c
520
struct it66121_ctx *ctx = context;
drivers/gpu/drm/bridge/megachips-stdpxxxx-ge-b850v3-fw.c
68
static int stdp2690_read_block(void *context, u8 *buf, unsigned int block, size_t len)
drivers/gpu/drm/bridge/megachips-stdpxxxx-ge-b850v3-fw.c
70
struct i2c_client *client = context;
drivers/gpu/drm/drm_edid.c
2331
typedef int read_block_fn(void *context, u8 *buf, unsigned int block, size_t len);
drivers/gpu/drm/drm_edid.c
2335
void *context)
drivers/gpu/drm/drm_edid.c
2342
if (read_block(context, block, block_num, EDID_LENGTH))
drivers/gpu/drm/drm_edid.c
2367
read_block_fn read_block, void *context,
drivers/gpu/drm/drm_edid.c
2390
status = edid_block_read(edid, 0, read_block, context);
drivers/gpu/drm/drm_edid.c
2424
status = edid_block_read(block, i, read_block, context);
drivers/gpu/drm/drm_edid.c
2646
void *context)
drivers/gpu/drm/drm_edid.c
2652
edid = _drm_do_get_edid(connector, read_block, context, &size);
drivers/gpu/drm/drm_privacy_screen_x86.c
22
void *context, void **return_value)
drivers/gpu/drm/drm_suballoc.c
415
idx = fence->context & (DRM_SUBALLOC_MAX_QUEUES - 1);
drivers/gpu/drm/drm_suballoc.c
449
(unsigned long long)i->fence->context);
drivers/gpu/drm/drm_syncobj.c
1693
if (iter->context != fence->context) {
drivers/gpu/drm/etnaviv/etnaviv_cmdbuf.c
62
struct etnaviv_iommu_context *context,
drivers/gpu/drm/etnaviv/etnaviv_cmdbuf.c
66
return etnaviv_iommu_get_suballoc_va(context, mapping, memory_base,
drivers/gpu/drm/etnaviv/etnaviv_cmdbuf.c
70
void etnaviv_cmdbuf_suballoc_unmap(struct etnaviv_iommu_context *context,
drivers/gpu/drm/etnaviv/etnaviv_cmdbuf.c
73
etnaviv_iommu_put_suballoc_va(context, mapping);
drivers/gpu/drm/etnaviv/etnaviv_cmdbuf.h
31
struct etnaviv_iommu_context *context,
drivers/gpu/drm/etnaviv/etnaviv_cmdbuf.h
34
void etnaviv_cmdbuf_suballoc_unmap(struct etnaviv_iommu_context *context,
drivers/gpu/drm/etnaviv/etnaviv_gem.c
217
struct etnaviv_iommu_context *context)
drivers/gpu/drm/etnaviv/etnaviv_gem.c
222
if (mapping->context == context)
drivers/gpu/drm/etnaviv/etnaviv_gem.c
261
if (mapping->context == mmu_context)
drivers/gpu/drm/etnaviv/etnaviv_gem.c
516
struct etnaviv_iommu_context *context = mapping->context;
drivers/gpu/drm/etnaviv/etnaviv_gem.c
520
if (context)
drivers/gpu/drm/etnaviv/etnaviv_gem.c
521
etnaviv_iommu_unmap_gem(context, mapping);
drivers/gpu/drm/etnaviv/etnaviv_gem.h
28
struct etnaviv_iommu_context *context;
drivers/gpu/drm/etnaviv/etnaviv_gpu.c
711
struct etnaviv_iommu_context *context)
drivers/gpu/drm/etnaviv/etnaviv_gpu.c
719
etnaviv_iommu_restore(gpu, context);
drivers/gpu/drm/etnaviv/etnaviv_iommu.c
100
gpu_write(gpu, VIVS_MC_MEMORY_BASE_ADDR_RA, context->global->memory_base);
drivers/gpu/drm/etnaviv/etnaviv_iommu.c
101
gpu_write(gpu, VIVS_MC_MEMORY_BASE_ADDR_FE, context->global->memory_base);
drivers/gpu/drm/etnaviv/etnaviv_iommu.c
102
gpu_write(gpu, VIVS_MC_MEMORY_BASE_ADDR_TX, context->global->memory_base);
drivers/gpu/drm/etnaviv/etnaviv_iommu.c
103
gpu_write(gpu, VIVS_MC_MEMORY_BASE_ADDR_PEZ, context->global->memory_base);
drivers/gpu/drm/etnaviv/etnaviv_iommu.c
104
gpu_write(gpu, VIVS_MC_MEMORY_BASE_ADDR_PE, context->global->memory_base);
drivers/gpu/drm/etnaviv/etnaviv_iommu.c
130
struct etnaviv_iommu_context *context;
drivers/gpu/drm/etnaviv/etnaviv_iommu.c
140
context = global->v1.shared_context;
drivers/gpu/drm/etnaviv/etnaviv_iommu.c
141
etnaviv_iommu_context_get(context);
drivers/gpu/drm/etnaviv/etnaviv_iommu.c
143
return context;
drivers/gpu/drm/etnaviv/etnaviv_iommu.c
160
context = &v1_context->base;
drivers/gpu/drm/etnaviv/etnaviv_iommu.c
161
context->global = global;
drivers/gpu/drm/etnaviv/etnaviv_iommu.c
162
kref_init(&context->refcount);
drivers/gpu/drm/etnaviv/etnaviv_iommu.c
163
mutex_init(&context->lock);
drivers/gpu/drm/etnaviv/etnaviv_iommu.c
164
INIT_LIST_HEAD(&context->mappings);
drivers/gpu/drm/etnaviv/etnaviv_iommu.c
165
drm_mm_init(&context->mm, GPU_MEM_START, PT_ENTRIES * SZ_4K);
drivers/gpu/drm/etnaviv/etnaviv_iommu.c
166
context->global->v1.shared_context = context;
drivers/gpu/drm/etnaviv/etnaviv_iommu.c
170
return context;
drivers/gpu/drm/etnaviv/etnaviv_iommu.c
28
to_v1_context(struct etnaviv_iommu_context *context)
drivers/gpu/drm/etnaviv/etnaviv_iommu.c
30
return container_of(context, struct etnaviv_iommuv1_context, base);
drivers/gpu/drm/etnaviv/etnaviv_iommu.c
33
static void etnaviv_iommuv1_free(struct etnaviv_iommu_context *context)
drivers/gpu/drm/etnaviv/etnaviv_iommu.c
35
struct etnaviv_iommuv1_context *v1_context = to_v1_context(context);
drivers/gpu/drm/etnaviv/etnaviv_iommu.c
37
drm_mm_takedown(&context->mm);
drivers/gpu/drm/etnaviv/etnaviv_iommu.c
39
dma_free_wc(context->global->dev, PT_SIZE, v1_context->pgtable_cpu,
drivers/gpu/drm/etnaviv/etnaviv_iommu.c
42
context->global->v1.shared_context = NULL;
drivers/gpu/drm/etnaviv/etnaviv_iommu.c
47
static int etnaviv_iommuv1_map(struct etnaviv_iommu_context *context,
drivers/gpu/drm/etnaviv/etnaviv_iommu.c
51
struct etnaviv_iommuv1_context *v1_context = to_v1_context(context);
drivers/gpu/drm/etnaviv/etnaviv_iommu.c
62
static size_t etnaviv_iommuv1_unmap(struct etnaviv_iommu_context *context,
drivers/gpu/drm/etnaviv/etnaviv_iommu.c
65
struct etnaviv_iommuv1_context *v1_context = to_v1_context(context);
drivers/gpu/drm/etnaviv/etnaviv_iommu.c
71
v1_context->pgtable_cpu[index] = context->global->bad_page_dma;
drivers/gpu/drm/etnaviv/etnaviv_iommu.c
76
static size_t etnaviv_iommuv1_dump_size(struct etnaviv_iommu_context *context)
drivers/gpu/drm/etnaviv/etnaviv_iommu.c
81
static void etnaviv_iommuv1_dump(struct etnaviv_iommu_context *context,
drivers/gpu/drm/etnaviv/etnaviv_iommu.c
84
struct etnaviv_iommuv1_context *v1_context = to_v1_context(context);
drivers/gpu/drm/etnaviv/etnaviv_iommu.c
90
struct etnaviv_iommu_context *context)
drivers/gpu/drm/etnaviv/etnaviv_iommu.c
92
struct etnaviv_iommuv1_context *v1_context = to_v1_context(context);
drivers/gpu/drm/etnaviv/etnaviv_iommu.c
97
gpu->mmu_context = etnaviv_iommu_context_get(context);
drivers/gpu/drm/etnaviv/etnaviv_iommu_v2.c
121
static size_t etnaviv_iommuv2_unmap(struct etnaviv_iommu_context *context,
drivers/gpu/drm/etnaviv/etnaviv_iommu_v2.c
124
struct etnaviv_iommuv2_context *etnaviv_domain = to_v2_context(context);
drivers/gpu/drm/etnaviv/etnaviv_iommu_v2.c
138
static size_t etnaviv_iommuv2_dump_size(struct etnaviv_iommu_context *context)
drivers/gpu/drm/etnaviv/etnaviv_iommu_v2.c
140
struct etnaviv_iommuv2_context *v2_context = to_v2_context(context);
drivers/gpu/drm/etnaviv/etnaviv_iommu_v2.c
151
static void etnaviv_iommuv2_dump(struct etnaviv_iommu_context *context, void *buf)
drivers/gpu/drm/etnaviv/etnaviv_iommu_v2.c
153
struct etnaviv_iommuv2_context *v2_context = to_v2_context(context);
drivers/gpu/drm/etnaviv/etnaviv_iommu_v2.c
166
struct etnaviv_iommu_context *context)
drivers/gpu/drm/etnaviv/etnaviv_iommu_v2.c
168
struct etnaviv_iommuv2_context *v2_context = to_v2_context(context);
drivers/gpu/drm/etnaviv/etnaviv_iommu_v2.c
177
gpu->mmu_context = etnaviv_iommu_context_get(context);
drivers/gpu/drm/etnaviv/etnaviv_iommu_v2.c
181
(u32)context->global->bad_page_dma);
drivers/gpu/drm/etnaviv/etnaviv_iommu_v2.c
190
struct etnaviv_iommu_context *context)
drivers/gpu/drm/etnaviv/etnaviv_iommu_v2.c
192
struct etnaviv_iommuv2_context *v2_context = to_v2_context(context);
drivers/gpu/drm/etnaviv/etnaviv_iommu_v2.c
201
gpu->mmu_context = etnaviv_iommu_context_get(context);
drivers/gpu/drm/etnaviv/etnaviv_iommu_v2.c
204
lower_32_bits(context->global->v2.pta_dma));
drivers/gpu/drm/etnaviv/etnaviv_iommu_v2.c
206
upper_32_bits(context->global->v2.pta_dma));
drivers/gpu/drm/etnaviv/etnaviv_iommu_v2.c
210
lower_32_bits(context->global->bad_page_dma));
drivers/gpu/drm/etnaviv/etnaviv_iommu_v2.c
212
lower_32_bits(context->global->bad_page_dma));
drivers/gpu/drm/etnaviv/etnaviv_iommu_v2.c
215
upper_32_bits(context->global->bad_page_dma)) |
drivers/gpu/drm/etnaviv/etnaviv_iommu_v2.c
217
upper_32_bits(context->global->bad_page_dma)));
drivers/gpu/drm/etnaviv/etnaviv_iommu_v2.c
219
context->global->v2.pta_cpu[v2_context->id] = v2_context->mtlb_dma |
drivers/gpu/drm/etnaviv/etnaviv_iommu_v2.c
231
u32 etnaviv_iommuv2_get_mtlb_addr(struct etnaviv_iommu_context *context)
drivers/gpu/drm/etnaviv/etnaviv_iommu_v2.c
233
struct etnaviv_iommuv2_context *v2_context = to_v2_context(context);
drivers/gpu/drm/etnaviv/etnaviv_iommu_v2.c
238
unsigned short etnaviv_iommuv2_get_pta_id(struct etnaviv_iommu_context *context)
drivers/gpu/drm/etnaviv/etnaviv_iommu_v2.c
240
struct etnaviv_iommuv2_context *v2_context = to_v2_context(context);
drivers/gpu/drm/etnaviv/etnaviv_iommu_v2.c
245
struct etnaviv_iommu_context *context)
drivers/gpu/drm/etnaviv/etnaviv_iommu_v2.c
249
etnaviv_iommuv2_restore_nonsec(gpu, context);
drivers/gpu/drm/etnaviv/etnaviv_iommu_v2.c
252
etnaviv_iommuv2_restore_sec(gpu, context);
drivers/gpu/drm/etnaviv/etnaviv_iommu_v2.c
273
struct etnaviv_iommu_context *context;
drivers/gpu/drm/etnaviv/etnaviv_iommu_v2.c
300
context = &v2_context->base;
drivers/gpu/drm/etnaviv/etnaviv_iommu_v2.c
301
context->global = global;
drivers/gpu/drm/etnaviv/etnaviv_iommu_v2.c
302
kref_init(&context->refcount);
drivers/gpu/drm/etnaviv/etnaviv_iommu_v2.c
303
mutex_init(&context->lock);
drivers/gpu/drm/etnaviv/etnaviv_iommu_v2.c
304
INIT_LIST_HEAD(&context->mappings);
drivers/gpu/drm/etnaviv/etnaviv_iommu_v2.c
305
drm_mm_init(&context->mm, SZ_4K, (u64)SZ_1G * 4 - SZ_4K);
drivers/gpu/drm/etnaviv/etnaviv_iommu_v2.c
307
return context;
drivers/gpu/drm/etnaviv/etnaviv_iommu_v2.c
42
to_v2_context(struct etnaviv_iommu_context *context)
drivers/gpu/drm/etnaviv/etnaviv_iommu_v2.c
44
return container_of(context, struct etnaviv_iommuv2_context, base);
drivers/gpu/drm/etnaviv/etnaviv_iommu_v2.c
47
static void etnaviv_iommuv2_free(struct etnaviv_iommu_context *context)
drivers/gpu/drm/etnaviv/etnaviv_iommu_v2.c
49
struct etnaviv_iommuv2_context *v2_context = to_v2_context(context);
drivers/gpu/drm/etnaviv/etnaviv_iommu_v2.c
52
drm_mm_takedown(&context->mm);
drivers/gpu/drm/etnaviv/etnaviv_iommu_v2.c
56
dma_free_wc(context->global->dev, SZ_4K,
drivers/gpu/drm/etnaviv/etnaviv_iommu_v2.c
61
dma_free_wc(context->global->dev, SZ_4K, v2_context->mtlb_cpu,
drivers/gpu/drm/etnaviv/etnaviv_iommu_v2.c
64
clear_bit(v2_context->id, context->global->v2.pta_alloc);
drivers/gpu/drm/etnaviv/etnaviv_iommu_v2.c
92
static int etnaviv_iommuv2_map(struct etnaviv_iommu_context *context,
drivers/gpu/drm/etnaviv/etnaviv_iommu_v2.c
96
struct etnaviv_iommuv2_context *v2_context = to_v2_context(context);
drivers/gpu/drm/etnaviv/etnaviv_mmu.c
102
etnaviv_context_unmap(context, iova, da - iova);
drivers/gpu/drm/etnaviv/etnaviv_mmu.c
106
static void etnaviv_iommu_unmap(struct etnaviv_iommu_context *context, u32 iova,
drivers/gpu/drm/etnaviv/etnaviv_mmu.c
109
etnaviv_context_unmap(context, iova, len);
drivers/gpu/drm/etnaviv/etnaviv_mmu.c
111
context->flush_seq++;
drivers/gpu/drm/etnaviv/etnaviv_mmu.c
114
static void etnaviv_iommu_remove_mapping(struct etnaviv_iommu_context *context,
drivers/gpu/drm/etnaviv/etnaviv_mmu.c
119
lockdep_assert_held(&context->lock);
drivers/gpu/drm/etnaviv/etnaviv_mmu.c
121
etnaviv_iommu_unmap(context, mapping->vram_node.start,
drivers/gpu/drm/etnaviv/etnaviv_mmu.c
128
struct etnaviv_iommu_context *context = mapping->context;
drivers/gpu/drm/etnaviv/etnaviv_mmu.c
130
lockdep_assert_held(&context->lock);
drivers/gpu/drm/etnaviv/etnaviv_mmu.c
133
etnaviv_iommu_remove_mapping(context, mapping);
drivers/gpu/drm/etnaviv/etnaviv_mmu.c
134
etnaviv_iommu_context_put(mapping->context);
drivers/gpu/drm/etnaviv/etnaviv_mmu.c
135
mapping->context = NULL;
drivers/gpu/drm/etnaviv/etnaviv_mmu.c
139
static int etnaviv_iommu_find_iova(struct etnaviv_iommu_context *context,
drivers/gpu/drm/etnaviv/etnaviv_mmu.c
146
lockdep_assert_held(&context->lock);
drivers/gpu/drm/etnaviv/etnaviv_mmu.c
154
ret = drm_mm_insert_node_in_range(&context->mm, node,
drivers/gpu/drm/etnaviv/etnaviv_mmu.c
160
drm_mm_scan_init(&scan, &context->mm, size, 0, 0, mode);
drivers/gpu/drm/etnaviv/etnaviv_mmu.c
164
list_for_each_entry(free, &context->mappings, mmu_node) {
drivers/gpu/drm/etnaviv/etnaviv_mmu.c
18
static void etnaviv_context_unmap(struct etnaviv_iommu_context *context,
drivers/gpu/drm/etnaviv/etnaviv_mmu.c
221
static int etnaviv_iommu_insert_exact(struct etnaviv_iommu_context *context,
drivers/gpu/drm/etnaviv/etnaviv_mmu.c
229
lockdep_assert_held(&context->lock);
drivers/gpu/drm/etnaviv/etnaviv_mmu.c
231
ret = drm_mm_insert_node_in_range(&context->mm, node, size, 0, 0, va,
drivers/gpu/drm/etnaviv/etnaviv_mmu.c
247
drm_mm_for_each_node_in_range(scan_node, &context->mm, va, va + size) {
drivers/gpu/drm/etnaviv/etnaviv_mmu.c
25
unmapped_page = context->global->ops->unmap(context, iova,
drivers/gpu/drm/etnaviv/etnaviv_mmu.c
262
return drm_mm_insert_node_in_range(&context->mm, node, size, 0, 0, va,
drivers/gpu/drm/etnaviv/etnaviv_mmu.c
266
int etnaviv_iommu_map_gem(struct etnaviv_iommu_context *context,
drivers/gpu/drm/etnaviv/etnaviv_mmu.c
276
mutex_lock(&context->lock);
drivers/gpu/drm/etnaviv/etnaviv_mmu.c
279
if (context->global->version == ETNAVIV_IOMMU_V1 &&
drivers/gpu/drm/etnaviv/etnaviv_mmu.c
286
mapping->context = etnaviv_iommu_context_get(context);
drivers/gpu/drm/etnaviv/etnaviv_mmu.c
287
list_add_tail(&mapping->mmu_node, &context->mappings);
drivers/gpu/drm/etnaviv/etnaviv_mmu.c
296
ret = etnaviv_iommu_insert_exact(context, node, etnaviv_obj->size, va);
drivers/gpu/drm/etnaviv/etnaviv_mmu.c
298
ret = etnaviv_iommu_find_iova(context, node, etnaviv_obj->size);
drivers/gpu/drm/etnaviv/etnaviv_mmu.c
303
ret = etnaviv_iommu_map(context, node->start, etnaviv_obj->size, sgt,
drivers/gpu/drm/etnaviv/etnaviv_mmu.c
311
mapping->context = etnaviv_iommu_context_get(context);
drivers/gpu/drm/etnaviv/etnaviv_mmu.c
312
list_add_tail(&mapping->mmu_node, &context->mappings);
drivers/gpu/drm/etnaviv/etnaviv_mmu.c
314
mutex_unlock(&context->lock);
drivers/gpu/drm/etnaviv/etnaviv_mmu.c
319
void etnaviv_iommu_unmap_gem(struct etnaviv_iommu_context *context,
drivers/gpu/drm/etnaviv/etnaviv_mmu.c
324
mutex_lock(&context->lock);
drivers/gpu/drm/etnaviv/etnaviv_mmu.c
327
if (!mapping->context) {
drivers/gpu/drm/etnaviv/etnaviv_mmu.c
328
mutex_unlock(&context->lock);
drivers/gpu/drm/etnaviv/etnaviv_mmu.c
333
if (mapping->vram_node.mm == &context->mm)
drivers/gpu/drm/etnaviv/etnaviv_mmu.c
334
etnaviv_iommu_remove_mapping(context, mapping);
drivers/gpu/drm/etnaviv/etnaviv_mmu.c
337
mutex_unlock(&context->lock);
drivers/gpu/drm/etnaviv/etnaviv_mmu.c
338
etnaviv_iommu_context_put(context);
drivers/gpu/drm/etnaviv/etnaviv_mmu.c
343
struct etnaviv_iommu_context *context =
drivers/gpu/drm/etnaviv/etnaviv_mmu.c
346
etnaviv_cmdbuf_suballoc_unmap(context, &context->cmdbuf_mapping);
drivers/gpu/drm/etnaviv/etnaviv_mmu.c
347
mutex_destroy(&context->lock);
drivers/gpu/drm/etnaviv/etnaviv_mmu.c
348
context->global->ops->free(context);
drivers/gpu/drm/etnaviv/etnaviv_mmu.c
35
static int etnaviv_context_map(struct etnaviv_iommu_context *context,
drivers/gpu/drm/etnaviv/etnaviv_mmu.c
350
void etnaviv_iommu_context_put(struct etnaviv_iommu_context *context)
drivers/gpu/drm/etnaviv/etnaviv_mmu.c
352
kref_put(&context->refcount, etnaviv_iommu_context_free);
drivers/gpu/drm/etnaviv/etnaviv_mmu.c
392
struct etnaviv_iommu_context *context)
drivers/gpu/drm/etnaviv/etnaviv_mmu.c
394
context->global->ops->restore(gpu, context);
drivers/gpu/drm/etnaviv/etnaviv_mmu.c
397
int etnaviv_iommu_get_suballoc_va(struct etnaviv_iommu_context *context,
drivers/gpu/drm/etnaviv/etnaviv_mmu.c
402
mutex_lock(&context->lock);
drivers/gpu/drm/etnaviv/etnaviv_mmu.c
406
mutex_unlock(&context->lock);
drivers/gpu/drm/etnaviv/etnaviv_mmu.c
416
if (context->global->version == ETNAVIV_IOMMU_V1) {
drivers/gpu/drm/etnaviv/etnaviv_mmu.c
422
ret = etnaviv_iommu_find_iova(context, node, size);
drivers/gpu/drm/etnaviv/etnaviv_mmu.c
424
mutex_unlock(&context->lock);
drivers/gpu/drm/etnaviv/etnaviv_mmu.c
429
ret = etnaviv_context_map(context, node->start, paddr, size,
drivers/gpu/drm/etnaviv/etnaviv_mmu.c
433
mutex_unlock(&context->lock);
drivers/gpu/drm/etnaviv/etnaviv_mmu.c
437
context->flush_seq++;
drivers/gpu/drm/etnaviv/etnaviv_mmu.c
440
list_add_tail(&mapping->mmu_node, &context->mappings);
drivers/gpu/drm/etnaviv/etnaviv_mmu.c
443
mutex_unlock(&context->lock);
drivers/gpu/drm/etnaviv/etnaviv_mmu.c
448
void etnaviv_iommu_put_suballoc_va(struct etnaviv_iommu_context *context,
drivers/gpu/drm/etnaviv/etnaviv_mmu.c
45
ret = context->global->ops->map(context, iova, paddr, pgsize,
drivers/gpu/drm/etnaviv/etnaviv_mmu.c
453
mutex_lock(&context->lock);
drivers/gpu/drm/etnaviv/etnaviv_mmu.c
456
if (mapping->use > 0 || context->global->version == ETNAVIV_IOMMU_V1) {
drivers/gpu/drm/etnaviv/etnaviv_mmu.c
457
mutex_unlock(&context->lock);
drivers/gpu/drm/etnaviv/etnaviv_mmu.c
461
etnaviv_context_unmap(context, node->start, node->size);
drivers/gpu/drm/etnaviv/etnaviv_mmu.c
463
mutex_unlock(&context->lock);
drivers/gpu/drm/etnaviv/etnaviv_mmu.c
466
size_t etnaviv_iommu_dump_size(struct etnaviv_iommu_context *context)
drivers/gpu/drm/etnaviv/etnaviv_mmu.c
468
return context->global->ops->dump_size(context);
drivers/gpu/drm/etnaviv/etnaviv_mmu.c
471
void etnaviv_iommu_dump(struct etnaviv_iommu_context *context, void *buf)
drivers/gpu/drm/etnaviv/etnaviv_mmu.c
473
context->global->ops->dump(context, buf);
drivers/gpu/drm/etnaviv/etnaviv_mmu.c
57
etnaviv_context_unmap(context, orig_iova, orig_size - size);
drivers/gpu/drm/etnaviv/etnaviv_mmu.c
62
static int etnaviv_iommu_map(struct etnaviv_iommu_context *context,
drivers/gpu/drm/etnaviv/etnaviv_mmu.c
71
if (!context || !sgt)
drivers/gpu/drm/etnaviv/etnaviv_mmu.c
82
dev_err(context->global->dev,
drivers/gpu/drm/etnaviv/etnaviv_mmu.c
89
ret = etnaviv_context_map(context, da, pa, bytes, prot);
drivers/gpu/drm/etnaviv/etnaviv_mmu.c
97
context->flush_seq++;
drivers/gpu/drm/etnaviv/etnaviv_mmu.h
123
u32 etnaviv_iommuv2_get_mtlb_addr(struct etnaviv_iommu_context *context);
drivers/gpu/drm/etnaviv/etnaviv_mmu.h
124
unsigned short etnaviv_iommuv2_get_pta_id(struct etnaviv_iommu_context *context);
drivers/gpu/drm/etnaviv/etnaviv_mmu.h
25
int (*map)(struct etnaviv_iommu_context *context, unsigned long iova,
drivers/gpu/drm/etnaviv/etnaviv_mmu.h
27
size_t (*unmap)(struct etnaviv_iommu_context *context, unsigned long iova,
drivers/gpu/drm/etnaviv/etnaviv_mmu.h
88
int etnaviv_iommu_map_gem(struct etnaviv_iommu_context *context,
drivers/gpu/drm/etnaviv/etnaviv_mmu.h
91
void etnaviv_iommu_unmap_gem(struct etnaviv_iommu_context *context,
drivers/gpu/drm/exynos/exynos_drm_crtc.h
21
void *context);
drivers/gpu/drm/i915/display/intel_cdclk.c
2572
const char *context)
drivers/gpu/drm/i915/display/intel_cdclk.c
2575
context, cdclk_config->cdclk, cdclk_config->vco,
drivers/gpu/drm/i915/display/intel_cdclk.c
2614
enum pipe pipe, const char *context)
drivers/gpu/drm/i915/display/intel_cdclk.c
2624
intel_cdclk_dump_config(display, cdclk_config, context);
drivers/gpu/drm/i915/display/intel_cdclk.h
40
const char *context);
drivers/gpu/drm/i915/display/intel_crtc_state_dump.c
178
const char *context)
drivers/gpu/drm/i915/display/intel_crtc_state_dump.c
195
str_yes_no(pipe_config->hw.enable), context);
drivers/gpu/drm/i915/display/intel_crtc_state_dump.h
15
const char *context);
drivers/gpu/drm/i915/display/intel_display_reset.c
26
modeset_stuck_fn modeset_stuck, void *context)
drivers/gpu/drm/i915/display/intel_display_reset.c
38
modeset_stuck(context);
drivers/gpu/drm/i915/display/intel_display_reset.h
13
typedef void modeset_stuck_fn(void *context);
drivers/gpu/drm/i915/display/intel_display_reset.h
17
modeset_stuck_fn modeset_stuck, void *context);
drivers/gpu/drm/i915/display/intel_overlay.c
1417
overlay->context = engine->kernel_context;
drivers/gpu/drm/i915/display/intel_overlay.c
192
struct intel_context *context;
drivers/gpu/drm/i915/display/intel_overlay.c
244
rq = i915_request_create(overlay->context);
drivers/gpu/drm/i915/gem/i915_gem_context.c
1908
struct intel_sseu *context)
drivers/gpu/drm/i915/gem/i915_gem_context.c
1927
if (overflows_type(user->slice_mask, context->slice_mask) ||
drivers/gpu/drm/i915/gem/i915_gem_context.c
1928
overflows_type(user->subslice_mask, context->subslice_mask) ||
drivers/gpu/drm/i915/gem/i915_gem_context.c
1930
context->min_eus_per_subslice) ||
drivers/gpu/drm/i915/gem/i915_gem_context.c
1932
context->max_eus_per_subslice))
drivers/gpu/drm/i915/gem/i915_gem_context.c
1945
context->slice_mask = user->slice_mask;
drivers/gpu/drm/i915/gem/i915_gem_context.c
1946
context->subslice_mask = user->subslice_mask;
drivers/gpu/drm/i915/gem/i915_gem_context.c
1947
context->min_eus_per_subslice = user->min_eus_per_subslice;
drivers/gpu/drm/i915/gem/i915_gem_context.c
1948
context->max_eus_per_subslice = user->max_eus_per_subslice;
drivers/gpu/drm/i915/gem/i915_gem_context.c
1954
unsigned int req_s = hweight8(context->slice_mask);
drivers/gpu/drm/i915/gem/i915_gem_context.c
1955
unsigned int req_ss = hweight8(context->subslice_mask);
drivers/gpu/drm/i915/gem/i915_gem_context.h
245
struct intel_sseu *context);
drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c
2281
if (intel_context_is_parallel(eb->context))
drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c
2290
if (!eb->context->vm->has_read_only) {
drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c
2313
shadow = shadow_batch_pin(eb, pool->obj, eb->context->vm, PIN_USER);
drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c
2344
err = intel_engine_cmd_parser(eb->context->engine,
drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c
2361
if (intel_context_is_parallel(eb->context))
drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c
2378
if (intel_context_nopreempt(rq->context))
drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c
2393
if (rq->context->engine->emit_init_breadcrumb) {
drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c
2394
err = rq->context->engine->emit_init_breadcrumb(rq);
drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c
2399
err = rq->context->engine->emit_bb_start(rq,
drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c
2408
GEM_BUG_ON(intel_context_is_parallel(rq->context));
drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c
2410
err = rq->context->engine->emit_bb_start(rq,
drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c
253
struct intel_context *context; /* logical state for the request */
drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c
2554
struct intel_context *ce = eb->context, *child;
drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c
2605
struct intel_context *ce = eb->context, *child;
drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c
2737
eb->context = ce;
drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c
2763
i915_vm_put(eb->context->vm);
drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c
2770
intel_gt_pm_put(eb->context->engine->gt, eb->wakeref);
drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c
2771
for_each_child(eb->context, child)
drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c
2773
intel_context_put(eb->context);
drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c
3085
if (likely(!intel_context_is_closed(eb->context))) {
drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c
3094
if (intel_context_is_parallel(eb->context)) {
drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c
3193
GEM_BUG_ON(!intel_context_is_parent(eb->context));
drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c
3207
eb->context->parallel.fence_context,
drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c
3208
eb->context->parallel.seqno++,
drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c
3264
if (intel_context_is_parallel(eb->context)) {
drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c
327
return intel_engine_requires_cmd_parser(eb->context->engine) ||
drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c
328
(intel_engine_using_cmd_parser(eb->context->engine) &&
drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c
3283
return eb->context;
drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c
3285
for_each_child(eb->context, child)
drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c
3331
GEM_BUG_ON(intel_context_is_parallel(eb->context));
drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c
782
err = mutex_lock_interruptible(&eb->context->vm->mutex);
drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c
784
err = i915_gem_evict_vm(eb->context->vm, &eb->ww, NULL);
drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c
785
mutex_unlock(&eb->context->vm->mutex);
drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c
793
err = mutex_lock_interruptible(&eb->context->vm->mutex);
drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c
797
err = i915_gem_evict_vm(eb->context->vm, &eb->ww, &busy_bo);
drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c
798
mutex_unlock(&eb->context->vm->mutex);
drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c
890
struct i915_address_space *vm = eb->context->vm;
drivers/gpu/drm/i915/gem/i915_gem_ttm_move.c
202
if (!to_gt(i915)->migrate.context || intel_gt_is_wedged(to_gt(i915)))
drivers/gpu/drm/i915/gem/i915_gem_ttm_move.c
215
intel_engine_pm_get(to_gt(i915)->migrate.context->engine);
drivers/gpu/drm/i915/gem/i915_gem_ttm_move.c
216
ret = intel_context_migrate_clear(to_gt(i915)->migrate.context, deps,
drivers/gpu/drm/i915/gem/i915_gem_ttm_move.c
229
intel_engine_pm_get(to_gt(i915)->migrate.context->engine);
drivers/gpu/drm/i915/gem/i915_gem_ttm_move.c
230
ret = intel_context_migrate_copy(to_gt(i915)->migrate.context,
drivers/gpu/drm/i915/gem/i915_gem_ttm_move.c
242
intel_engine_pm_put(to_gt(i915)->migrate.context->engine);
drivers/gpu/drm/i915/gem/selftests/i915_gem_coherency.c
116
static int gtt_get(struct context *ctx, unsigned long offset, u32 *v)
drivers/gpu/drm/i915/gem/selftests/i915_gem_coherency.c
150
static int wc_set(struct context *ctx, unsigned long offset, u32 v)
drivers/gpu/drm/i915/gem/selftests/i915_gem_coherency.c
173
static int wc_get(struct context *ctx, unsigned long offset, u32 *v)
drivers/gpu/drm/i915/gem/selftests/i915_gem_coherency.c
194
static int gpu_set(struct context *ctx, unsigned long offset, u32 v)
drivers/gpu/drm/i915/gem/selftests/i915_gem_coherency.c
23
static int cpu_set(struct context *ctx, unsigned long offset, u32 v)
drivers/gpu/drm/i915/gem/selftests/i915_gem_coherency.c
252
static bool always_valid(struct context *ctx)
drivers/gpu/drm/i915/gem/selftests/i915_gem_coherency.c
257
static bool needs_fence_registers(struct context *ctx)
drivers/gpu/drm/i915/gem/selftests/i915_gem_coherency.c
267
static bool needs_mi_store_dword(struct context *ctx)
drivers/gpu/drm/i915/gem/selftests/i915_gem_coherency.c
277
int (*set)(struct context *ctx, unsigned long offset, u32 v);
drivers/gpu/drm/i915/gem/selftests/i915_gem_coherency.c
278
int (*get)(struct context *ctx, unsigned long offset, u32 *v);
drivers/gpu/drm/i915/gem/selftests/i915_gem_coherency.c
279
bool (*valid)(struct context *ctx);
drivers/gpu/drm/i915/gem/selftests/i915_gem_coherency.c
314
struct context ctx;
drivers/gpu/drm/i915/gem/selftests/i915_gem_coherency.c
54
static int cpu_get(struct context *ctx, unsigned long offset, u32 *v)
drivers/gpu/drm/i915/gem/selftests/i915_gem_coherency.c
82
static int gtt_set(struct context *ctx, unsigned long offset, u32 v)
drivers/gpu/drm/i915/gem/selftests/i915_gem_mman.c
1219
err = intel_context_migrate_clear(to_gt(i915)->migrate.context, NULL,
drivers/gpu/drm/i915/gt/gen8_engine_cs.c
488
struct intel_context *ce = rq->context;
drivers/gpu/drm/i915/gt/gen8_engine_cs.c
750
return i915_ggtt_offset(rq->context->state) +
drivers/gpu/drm/i915/gt/intel_breadcrumbs.c
115
if (rq->context != ce)
drivers/gpu/drm/i915/gt/intel_breadcrumbs.c
353
struct intel_context *ce = rq->context;
drivers/gpu/drm/i915/gt/intel_breadcrumbs.c
414
struct intel_context *ce = rq->context;
drivers/gpu/drm/i915/gt/intel_breadcrumbs.c
440
struct intel_context *ce = rq->context;
drivers/gpu/drm/i915/gt/intel_breadcrumbs.c
505
rq->fence.context, rq->fence.seqno,
drivers/gpu/drm/i915/gt/intel_context.c
489
GEM_BUG_ON(rq->context == ce);
drivers/gpu/drm/i915/gt/intel_context.c
564
if (rq->context != ce)
drivers/gpu/drm/i915/gt/intel_engine_cs.c
1320
frame->rq.context = ce;
drivers/gpu/drm/i915/gt/intel_engine_cs.c
2025
DIV_ROUND_CLOSEST_ULL(intel_context_get_total_runtime_ns(rq->context),
drivers/gpu/drm/i915/gt/intel_engine_cs.c
2183
rq->context->lrc.ccid,
drivers/gpu/drm/i915/gt/intel_engine_cs.c
2184
intel_context_is_closed(rq->context) ? "!" : "",
drivers/gpu/drm/i915/gt/intel_engine_cs.c
2185
intel_context_is_banned(rq->context) ? "*" : "");
drivers/gpu/drm/i915/gt/intel_engine_cs.c
2197
rq->context->lrc.ccid,
drivers/gpu/drm/i915/gt/intel_engine_cs.c
2198
intel_context_is_closed(rq->context) ? "!" : "",
drivers/gpu/drm/i915/gt/intel_engine_cs.c
2199
intel_context_is_banned(rq->context) ? "*" : "");
drivers/gpu/drm/i915/gt/intel_engine_cs.c
2310
if (rq->context->lrc_reg_state) {
drivers/gpu/drm/i915/gt/intel_engine_cs.c
2312
hexdump(m, rq->context->lrc_reg_state, PAGE_SIZE);
drivers/gpu/drm/i915/gt/intel_engine_cs.c
2489
struct intel_timeline *tl = request->context->timeline;
drivers/gpu/drm/i915/gt/intel_engine_heartbeat.c
113
rq->fence.context,
drivers/gpu/drm/i915/gt/intel_engine_pm.c
123
GEM_BUG_ON(rq->context->active_count != 1);
drivers/gpu/drm/i915/gt/intel_engine_pm.c
125
rq->context->wakeref = intel_wakeref_track(&engine->gt->wakeref);
drivers/gpu/drm/i915/gt/intel_execlists_submission.c
1014
inflight = intel_context_inflight(&ve->context);
drivers/gpu/drm/i915/gt/intel_execlists_submission.c
1054
GEM_BUG_ON(READ_ONCE(ve->context.inflight));
drivers/gpu/drm/i915/gt/intel_execlists_submission.c
1056
lrc_update_offsets(&ve->context, engine);
drivers/gpu/drm/i915/gt/intel_execlists_submission.c
1148
return rq->context->lrc.ccid == READ_ONCE(el->yield);
drivers/gpu/drm/i915/gt/intel_execlists_submission.c
1246
if (unlikely(intel_context_is_banned(rq->context) || bad_request(rq)))
drivers/gpu/drm/i915/gt/intel_execlists_submission.c
1322
last->fence.context,
drivers/gpu/drm/i915/gt/intel_execlists_submission.c
1349
last->fence.context, last->fence.seqno,
drivers/gpu/drm/i915/gt/intel_execlists_submission.c
1420
GEM_BUG_ON(rq->context != &ve->context);
drivers/gpu/drm/i915/gt/intel_execlists_submission.c
1435
rq->fence.context,
drivers/gpu/drm/i915/gt/intel_execlists_submission.c
1520
if (last->context == rq->context)
drivers/gpu/drm/i915/gt/intel_execlists_submission.c
1542
if (ctx_single_port_submission(last->context) ||
drivers/gpu/drm/i915/gt/intel_execlists_submission.c
1543
ctx_single_port_submission(rq->context))
drivers/gpu/drm/i915/gt/intel_execlists_submission.c
1556
!can_merge_ctx(last->context,
drivers/gpu/drm/i915/gt/intel_execlists_submission.c
1557
rq->context));
drivers/gpu/drm/i915/gt/intel_execlists_submission.c
178
struct intel_context context;
drivers/gpu/drm/i915/gt/intel_execlists_submission.c
1967
rq->context->lrc_reg_state;
drivers/gpu/drm/i915/gt/intel_execlists_submission.c
1982
rq->fence.context,
drivers/gpu/drm/i915/gt/intel_execlists_submission.c
2031
prev_ce = (*prev)->context;
drivers/gpu/drm/i915/gt/intel_execlists_submission.c
2033
active_ce = (*execlists->active)->context;
drivers/gpu/drm/i915/gt/intel_execlists_submission.c
2299
if (rq->context->lrc.ccid == ccid) {
drivers/gpu/drm/i915/gt/intel_execlists_submission.c
2308
if (rq->context->lrc.ccid == ccid) {
drivers/gpu/drm/i915/gt/intel_execlists_submission.c
2345
cap->rq = active_request(cap->rq->context->timeline, cap->rq);
drivers/gpu/drm/i915/gt/intel_execlists_submission.c
2714
struct i915_ppgtt * const ppgtt = i915_vm_to_ppgtt(rq->context->vm);
drivers/gpu/drm/i915/gt/intel_execlists_submission.c
2772
GEM_BUG_ON(!intel_context_is_pinned(request->context));
drivers/gpu/drm/i915/gt/intel_execlists_submission.c
2789
if (!i915_vm_is_4lvl(request->context->vm)) {
drivers/gpu/drm/i915/gt/intel_execlists_submission.c
3039
ce = rq->context;
drivers/gpu/drm/i915/gt/intel_execlists_submission.c
3342
if (inflight->context == rq->context)
drivers/gpu/drm/i915/gt/intel_execlists_submission.c
3348
rq->fence.context, rq->fence.seqno,
drivers/gpu/drm/i915/gt/intel_execlists_submission.c
3349
inflight->fence.context, inflight->fence.seqno,
drivers/gpu/drm/i915/gt/intel_execlists_submission.c
3604
GEM_BUG_ON(ve->context.inflight);
drivers/gpu/drm/i915/gt/intel_execlists_submission.c
3650
lrc_fini(&ve->context);
drivers/gpu/drm/i915/gt/intel_execlists_submission.c
3651
intel_context_fini(&ve->context);
drivers/gpu/drm/i915/gt/intel_execlists_submission.c
3665
container_of(kref, typeof(*ve), context.ref);
drivers/gpu/drm/i915/gt/intel_execlists_submission.c
3667
GEM_BUG_ON(!list_empty(&ve->context.signals));
drivers/gpu/drm/i915/gt/intel_execlists_submission.c
3680
queue_rcu_work(ve->context.engine->i915->unordered_wq, &ve->rcu);
drivers/gpu/drm/i915/gt/intel_execlists_submission.c
3707
struct virtual_engine *ve = container_of(ce, typeof(*ve), context);
drivers/gpu/drm/i915/gt/intel_execlists_submission.c
3716
struct virtual_engine *ve = container_of(ce, typeof(*ve), context);
drivers/gpu/drm/i915/gt/intel_execlists_submission.c
3724
struct virtual_engine *ve = container_of(ce, typeof(*ve), context);
drivers/gpu/drm/i915/gt/intel_execlists_submission.c
3731
struct virtual_engine *ve = container_of(ce, typeof(*ve), context);
drivers/gpu/drm/i915/gt/intel_execlists_submission.c
3742
struct virtual_engine *ve = container_of(ce, typeof(*ve), context);
drivers/gpu/drm/i915/gt/intel_execlists_submission.c
3800
rq->fence.context, rq->fence.seqno,
drivers/gpu/drm/i915/gt/intel_execlists_submission.c
3886
if (intel_context_inflight(&ve->context))
drivers/gpu/drm/i915/gt/intel_execlists_submission.c
3897
rq->fence.context,
drivers/gpu/drm/i915/gt/intel_execlists_submission.c
3987
intel_context_init(&ve->context, &ve->base);
drivers/gpu/drm/i915/gt/intel_execlists_submission.c
406
rq->context->lrc.desc |= CTX_DESC_FORCE_RESTORE;
drivers/gpu/drm/i915/gt/intel_execlists_submission.c
4066
return &ve->context;
drivers/gpu/drm/i915/gt/intel_execlists_submission.c
4069
intel_context_put(&ve->context);
drivers/gpu/drm/i915/gt/intel_execlists_submission.c
431
struct intel_context * const ce = rq->context;
drivers/gpu/drm/i915/gt/intel_execlists_submission.c
450
rq->fence.context, rq->fence.seqno);
drivers/gpu/drm/i915/gt/intel_execlists_submission.c
475
struct intel_context * const ce = rq->context;
drivers/gpu/drm/i915/gt/intel_execlists_submission.c
529
struct intel_context * const ce = rq->context;
drivers/gpu/drm/i915/gt/intel_execlists_submission.c
559
struct virtual_engine *ve = container_of(ce, typeof(*ve), context);
drivers/gpu/drm/i915/gt/intel_execlists_submission.c
650
struct intel_context * const ce = rq->context;
drivers/gpu/drm/i915/gt/intel_execlists_submission.c
674
struct intel_context *ce = rq->context;
drivers/gpu/drm/i915/gt/intel_execlists_submission.c
742
rq->context->lrc.ccid,
drivers/gpu/drm/i915/gt/intel_execlists_submission.c
743
rq->fence.context, rq->fence.seqno,
drivers/gpu/drm/i915/gt/intel_execlists_submission.c
810
if (ce == rq->context) {
drivers/gpu/drm/i915/gt/intel_execlists_submission.c
817
ce = rq->context;
drivers/gpu/drm/i915/gt/intel_execlists_submission.c
986
if (!can_merge_ctx(prev->context, next->context))
drivers/gpu/drm/i915/gt/intel_gt.c
593
GEM_BUG_ON(!test_bit(CONTEXT_ALLOC_BIT, &rq->context->flags));
drivers/gpu/drm/i915/gt/intel_gt.c
594
if (!rq->context->state)
drivers/gpu/drm/i915/gt/intel_gt.c
598
state = shmem_create_from_object(rq->context->state->obj);
drivers/gpu/drm/i915/gt/intel_gt.c
623
ce = rq->context;
drivers/gpu/drm/i915/gt/intel_migrate.c
1093
if (!m->context)
drivers/gpu/drm/i915/gt/intel_migrate.c
1098
ce = intel_context_get(m->context);
drivers/gpu/drm/i915/gt/intel_migrate.c
1130
if (!m->context)
drivers/gpu/drm/i915/gt/intel_migrate.c
1135
ce = intel_context_get(m->context);
drivers/gpu/drm/i915/gt/intel_migrate.c
1155
ce = fetch_and_zero(&m->context);
drivers/gpu/drm/i915/gt/intel_migrate.c
277
m->context = ce;
drivers/gpu/drm/i915/gt/intel_migrate.c
314
ce = __migrate_engines(m->context->engine->gt);
drivers/gpu/drm/i915/gt/intel_migrate.c
322
ce->vm = i915_vm_get(m->context->vm);
drivers/gpu/drm/i915/gt/intel_migrate.c
370
const u64 encode = rq->context->vm->pte_encode(0, pat_index,
drivers/gpu/drm/i915/gt/intel_migrate.c
699
GEM_BUG_ON(ce->vm != ce->engine->gt->migrate.context->vm);
drivers/gpu/drm/i915/gt/intel_migrate.c
998
GEM_BUG_ON(ce->vm != ce->engine->gt->migrate.context->vm);
drivers/gpu/drm/i915/gt/intel_migrate_types.h
12
struct intel_context *context;
drivers/gpu/drm/i915/gt/intel_reset.c
119
ctx = rcu_dereference(rq->context->gem_context);
drivers/gpu/drm/i915/gt/intel_reset.c
144
intel_context_ban(rq->context, rq);
drivers/gpu/drm/i915/gt/intel_reset.c
70
if (intel_context_is_closed(rq->context))
drivers/gpu/drm/i915/gt/intel_reset.c
74
ctx = rcu_dereference(rq->context->gem_context);
drivers/gpu/drm/i915/gt/intel_reset.c
79
return intel_context_is_banned(rq->context);
drivers/gpu/drm/i915/gt/intel_ring_submission.c
1021
GEM_BUG_ON(!intel_context_is_pinned(request->context));
drivers/gpu/drm/i915/gt/intel_ring_submission.c
655
if (rq->context == ce) {
drivers/gpu/drm/i915/gt/intel_ring_submission.c
955
struct intel_context *ce = rq->context;
drivers/gpu/drm/i915/gt/intel_rps.c
1028
if (test_bit(CONTEXT_LOW_LATENCY, &rq->context->flags))
drivers/gpu/drm/i915/gt/intel_rps.c
1052
rq->fence.context, rq->fence.seqno);
drivers/gpu/drm/i915/gt/intel_rps.c
1067
rq->fence.context, rq->fence.seqno);
drivers/gpu/drm/i915/gt/intel_timeline.h
47
u64 context, u32 seqno)
drivers/gpu/drm/i915/gt/intel_timeline.h
49
return i915_syncmap_set(&tl->sync, context, seqno);
drivers/gpu/drm/i915/gt/intel_timeline.h
55
return __intel_timeline_sync_set(tl, fence->context, fence->seqno);
drivers/gpu/drm/i915/gt/intel_timeline.h
59
u64 context, u32 seqno)
drivers/gpu/drm/i915/gt/intel_timeline.h
61
return i915_syncmap_is_later(&tl->sync, context, seqno);
drivers/gpu/drm/i915/gt/intel_timeline.h
67
return __intel_timeline_sync_is_later(tl, fence->context, fence->seqno);
drivers/gpu/drm/i915/gt/selftest_execlists.c
104
rq->fence.context,
drivers/gpu/drm/i915/gt/selftest_execlists.c
2077
clear_bit(CONTEXT_BANNED, &rq->context->flags);
drivers/gpu/drm/i915/gt/selftest_execlists.c
2085
intel_context_ban(rq->context, rq);
drivers/gpu/drm/i915/gt/selftest_execlists.c
2121
clear_bit(CONTEXT_BANNED, &rq[0]->context->flags);
drivers/gpu/drm/i915/gt/selftest_execlists.c
2137
clear_bit(CONTEXT_BANNED, &rq[1]->context->flags);
drivers/gpu/drm/i915/gt/selftest_execlists.c
2144
intel_context_ban(rq[1]->context, rq[1]);
drivers/gpu/drm/i915/gt/selftest_execlists.c
2192
clear_bit(CONTEXT_BANNED, &rq[0]->context->flags);
drivers/gpu/drm/i915/gt/selftest_execlists.c
2206
clear_bit(CONTEXT_BANNED, &rq[1]->context->flags);
drivers/gpu/drm/i915/gt/selftest_execlists.c
2227
intel_context_ban(rq[2]->context, rq[2]);
drivers/gpu/drm/i915/gt/selftest_execlists.c
2288
clear_bit(CONTEXT_BANNED, &rq->context->flags);
drivers/gpu/drm/i915/gt/selftest_execlists.c
2296
intel_context_ban(rq->context, rq);
drivers/gpu/drm/i915/gt/selftest_execlists.c
2344
clear_bit(CONTEXT_BANNED, &rq->context->flags);
drivers/gpu/drm/i915/gt/selftest_execlists.c
2352
intel_context_set_banned(rq->context);
drivers/gpu/drm/i915/gt/selftest_execlists.c
3792
request[nc]->fence.context,
drivers/gpu/drm/i915/gt/selftest_execlists.c
3797
request[nc]->fence.context,
drivers/gpu/drm/i915/gt/selftest_execlists.c
3959
request[n]->fence.context,
drivers/gpu/drm/i915/gt/selftest_execlists.c
3964
request[n]->fence.context,
drivers/gpu/drm/i915/gt/selftest_execlists.c
94
rq->fence.context,
drivers/gpu/drm/i915/gt/selftest_hangcheck.c
100
offset_in_page(sizeof(u32) * rq->fence.context);
drivers/gpu/drm/i915/gt/selftest_hangcheck.c
1101
engine->name, rq->fence.context,
drivers/gpu/drm/i915/gt/selftest_hangcheck.c
1102
rq->fence.seqno, rq->context->guc_id.id, err);
drivers/gpu/drm/i915/gt/selftest_hangcheck.c
1111
rq->fence.context,
drivers/gpu/drm/i915/gt/selftest_hangcheck.c
1112
rq->fence.seqno, rq->context->guc_id.id);
drivers/gpu/drm/i915/gt/selftest_hangcheck.c
1128
rq->fence.context,
drivers/gpu/drm/i915/gt/selftest_hangcheck.c
255
return READ_ONCE(h->seqno[rq->fence.context % (PAGE_SIZE/sizeof(u32))]);
drivers/gpu/drm/i915/gt/selftest_hangcheck.c
781
engine->name, rq->fence.context,
drivers/gpu/drm/i915/gt/selftest_hangcheck.c
782
rq->fence.seqno, rq->context->guc_id.id, err);
drivers/gpu/drm/i915/gt/selftest_hangcheck.c
878
rq->fence.context,
drivers/gpu/drm/i915/gt/selftest_migrate.c
1001
err = __perf_copy_blt(gt->migrate.context,
drivers/gpu/drm/i915/gt/selftest_migrate.c
1035
if (!gt->migrate.context)
drivers/gpu/drm/i915/gt/selftest_migrate.c
151
GEM_BUG_ON(ce->vm != ce->engine->gt->migrate.context->vm);
drivers/gpu/drm/i915/gt/selftest_migrate.c
234
if (!m->context)
drivers/gpu/drm/i915/gt/selftest_migrate.c
239
ce = intel_context_get(m->context);
drivers/gpu/drm/i915/gt/selftest_migrate.c
263
struct drm_i915_private *i915 = migrate->context->engine->i915;
drivers/gpu/drm/i915/gt/selftest_migrate.c
430
return intel_context_migrate_copy(migrate->context, NULL,
drivers/gpu/drm/i915/gt/selftest_migrate.c
44
struct drm_i915_private *i915 = migrate->context->engine->i915;
drivers/gpu/drm/i915/gt/selftest_migrate.c
469
return intel_context_migrate_clear(migrate->context, NULL,
drivers/gpu/drm/i915/gt/selftest_migrate.c
492
struct drm_i915_private *i915 = migrate->context->engine->i915;
drivers/gpu/drm/i915/gt/selftest_migrate.c
514
struct drm_i915_private *i915 = migrate->context->engine->i915;
drivers/gpu/drm/i915/gt/selftest_migrate.c
550
struct drm_i915_private *i915 = migrate->context->engine->i915;
drivers/gpu/drm/i915/gt/selftest_migrate.c
720
msleep((intel_vm_no_concurrent_access_wa(migrate->context->vm->i915) ? 100 : 10) * n_cpus);
drivers/gpu/drm/i915/gt/selftest_migrate.c
811
if (!gt->migrate.context)
drivers/gpu/drm/i915/gt/selftest_migrate.c
910
err = __perf_clear_blt(gt->migrate.context,
drivers/gpu/drm/i915/gt/selftest_timeline.c
859
lockdep_unpin_lock(&from->context->timeline->mutex, from->cookie);
drivers/gpu/drm/i915/gt/selftest_timeline.c
860
mutex_unlock(&from->context->timeline->mutex);
drivers/gpu/drm/i915/gt/selftest_timeline.c
864
mutex_lock(&to->context->timeline->mutex);
drivers/gpu/drm/i915/gt/selftest_timeline.c
865
to->cookie = lockdep_pin_lock(&to->context->timeline->mutex);
drivers/gpu/drm/i915/gt/selftest_timeline.c
954
struct intel_context *ce = rq->context;
drivers/gpu/drm/i915/gt/selftests/mock_timeline.c
11
void mock_timeline_init(struct intel_timeline *timeline, u64 context)
drivers/gpu/drm/i915/gt/selftests/mock_timeline.c
14
timeline->fence_context = context;
drivers/gpu/drm/i915/gt/selftests/mock_timeline.h
14
void mock_timeline_init(struct intel_timeline *timeline, u64 context);
drivers/gpu/drm/i915/gt/uc/intel_gsc_uc_heci_cmd_submit.c
177
engine = rq->context->engine;
drivers/gpu/drm/i915/gt/uc/intel_guc_submission.c
139
struct intel_context context;
drivers/gpu/drm/i915/gt/uc/intel_guc_submission.c
3486
container_of(ce, typeof(*ve), context);
drivers/gpu/drm/i915/gt/uc/intel_guc_submission.c
386
return intel_context_to_parent(rq->context);
drivers/gpu/drm/i915/gt/uc/intel_guc_submission.c
3884
GEM_BUG_ON(!intel_context_is_pinned(rq->context));
drivers/gpu/drm/i915/gt/uc/intel_guc_submission.c
5517
rq->context->guc_id.id,
drivers/gpu/drm/i915/gt/uc/intel_guc_submission.c
5640
struct intel_context *ce = rq->context;
drivers/gpu/drm/i915/gt/uc/intel_guc_submission.c
5687
struct intel_context *ce = rq->context;
drivers/gpu/drm/i915/gt/uc/intel_guc_submission.c
5731
struct intel_context *ce = rq->context;
drivers/gpu/drm/i915/gt/uc/intel_guc_submission.c
5781
struct intel_context *ce = rq->context;
drivers/gpu/drm/i915/gt/uc/intel_guc_submission.c
5825
struct intel_context *ce = rq->context;
drivers/gpu/drm/i915/gt/uc/intel_guc_submission.c
5857
struct intel_context *ce = rq->context;
drivers/gpu/drm/i915/gt/uc/intel_guc_submission.c
5939
intel_context_init(&ve->context, &ve->base);
drivers/gpu/drm/i915/gt/uc/intel_guc_submission.c
5991
return &ve->context;
drivers/gpu/drm/i915/gt/uc/intel_guc_submission.c
5994
intel_context_put(&ve->context);
drivers/gpu/drm/i915/gt/uc/intel_guc_submission.c
793
rq->context->lrc_reg_state[CTX_RING_TAIL] =
drivers/gpu/drm/i915/gt/uc/intel_guc_submission.c
804
return intel_context_is_parallel(rq->context);
drivers/gpu/drm/i915/gvt/scheduler.c
139
struct intel_context *ctx = workload->req->context;
drivers/gpu/drm/i915/gvt/scheduler.c
272
return intel_context_force_single_submission(rq->context);
drivers/gpu/drm/i915/gvt/scheduler.c
377
if (GRAPHICS_VER(req->engine->i915) == 9 && is_inhibit_context(req->context))
drivers/gpu/drm/i915/gvt/scheduler.c
605
(struct execlist_ring_context *)rq->context->lrc_reg_state;
drivers/gpu/drm/i915/gvt/scheduler.c
74
struct intel_context *ctx = workload->req->context;
drivers/gpu/drm/i915/gvt/scheduler.c
950
struct intel_context *ctx = workload->req->context;
drivers/gpu/drm/i915/i915_active.c
974
GEM_BUG_ON(!intel_context_is_barrier(rq->context));
drivers/gpu/drm/i915/i915_config.c
11
unsigned long i915_fence_context_timeout(u64 context)
drivers/gpu/drm/i915/i915_config.c
13
if (CONFIG_DRM_I915_FENCE_TIMEOUT && context)
drivers/gpu/drm/i915/i915_config.h
12
unsigned long i915_fence_context_timeout(u64 context);
drivers/gpu/drm/i915/i915_deps.c
196
if (!entry->context || entry->context != fence->context)
drivers/gpu/drm/i915/i915_gpu_error.c
1420
erq->context = request->fence.context;
drivers/gpu/drm/i915/i915_gpu_error.c
1428
if (!intel_context_is_closed(request->context)) {
drivers/gpu/drm/i915/i915_gpu_error.c
1431
ctx = rcu_dereference(request->context->gem_context);
drivers/gpu/drm/i915/i915_gpu_error.c
1622
ee->simulated |= record_context(&ee->context, ce);
drivers/gpu/drm/i915/i915_gpu_error.c
1644
vma = engine_coredump_add_context(ee, rq->context, gfp);
drivers/gpu/drm/i915/i915_gpu_error.c
1716
engine->name, rq->fence.context, rq->fence.seqno, ce->guc_id.id);
drivers/gpu/drm/i915/i915_gpu_error.c
1720
engine->name, rq->fence.context, rq->fence.seqno);
drivers/gpu/drm/i915/i915_gpu_error.c
2051
if (first && first->context.pid) {
drivers/gpu/drm/i915/i915_gpu_error.c
2056
first->context.comm, first->context.pid);
drivers/gpu/drm/i915/i915_gpu_error.c
492
prefix, erq->pid, erq->context, erq->seqno,
drivers/gpu/drm/i915/i915_gpu_error.c
886
error_print_context(m, " Active context: ", &ee->context);
drivers/gpu/drm/i915/i915_gpu_error.c
921
ee->context.comm,
drivers/gpu/drm/i915/i915_gpu_error.c
922
ee->context.pid);
drivers/gpu/drm/i915/i915_gpu_error.h
112
} context;
drivers/gpu/drm/i915/i915_gpu_error.h
50
u32 context;
drivers/gpu/drm/i915/i915_request.c
1212
if (!intel_context_use_semaphores(to->context))
drivers/gpu/drm/i915/i915_request.c
1254
fence->context,
drivers/gpu/drm/i915/i915_request.c
1261
return __intel_timeline_sync_set(tl, fence->context, fence->seqno - 1);
drivers/gpu/drm/i915/i915_request.c
1270
GEM_BUG_ON(intel_context_is_barrier(from->context));
drivers/gpu/drm/i915/i915_request.c
1354
i915_fence_context_timeout(fence->context),
drivers/gpu/drm/i915/i915_request.c
1386
return intel_context_is_parallel(rq->context);
drivers/gpu/drm/i915/i915_request.c
1391
return intel_context_to_parent(rq->context);
drivers/gpu/drm/i915/i915_request.c
1426
if (fence->context == rq->fence.context)
drivers/gpu/drm/i915/i915_request.c
1533
if (fence->context == rq->fence.context)
drivers/gpu/drm/i915/i915_request.c
1537
if (fence->context &&
drivers/gpu/drm/i915/i915_request.c
1553
if (fence->context)
drivers/gpu/drm/i915/i915_request.c
1623
struct intel_huc *huc = &rq->context->engine->gt->uc.huc;
drivers/gpu/drm/i915/i915_request.c
1626
if (!rcu_access_pointer(rq->context->gem_context))
drivers/gpu/drm/i915/i915_request.c
1685
bool same_context = prev->context == rq->context;
drivers/gpu/drm/i915/i915_request.c
1859
ctx = rcu_dereference(rq->context->gem_context);
drivers/gpu/drm/i915/i915_request.c
2229
rq->fence.context, rq->fence.seqno,
drivers/gpu/drm/i915/i915_request.c
304
struct intel_context *ce = rq->context;
drivers/gpu/drm/i915/i915_request.c
411
intel_context_exit(rq->context);
drivers/gpu/drm/i915/i915_request.c
412
intel_context_unpin(rq->context);
drivers/gpu/drm/i915/i915_request.c
484
if (!intel_context_inflight(signal->context))
drivers/gpu/drm/i915/i915_request.c
491
if (rq->context == signal->context) {
drivers/gpu/drm/i915/i915_request.c
637
if (unlikely(!intel_context_is_schedulable(request->context)))
drivers/gpu/drm/i915/i915_request.c
770
intel_context_cancel_request(rq->context, rq);
drivers/gpu/drm/i915/i915_request.c
947
rq->context = ce;
drivers/gpu/drm/i915/i915_request.h
213
struct intel_context *context;
drivers/gpu/drm/i915/i915_request.h
671
test_bit(CONTEXT_IS_PARKING, &rq->context->flags));
drivers/gpu/drm/i915/i915_request.h
678
return rcu_dereference_protected(rq->context->gem_context, true);
drivers/gpu/drm/i915/i915_request.h
68
rq__->fence.context, rq__->fence.seqno, \
drivers/gpu/drm/i915/i915_trace.h
283
__entry->ctx = rq->fence.context;
drivers/gpu/drm/i915/i915_trace.h
310
__entry->ctx = rq->fence.context;
drivers/gpu/drm/i915/i915_trace.h
359
__entry->ctx = rq->fence.context;
drivers/gpu/drm/i915/i915_trace.h
388
__entry->ctx = rq->fence.context;
drivers/gpu/drm/i915/i915_trace.h
629
__entry->ctx = rq->fence.context;
drivers/gpu/drm/i915/selftests/i915_request.c
1234
GEM_BUG_ON(request[idx]->context->vm != batch->vm);
drivers/gpu/drm/i915/selftests/i915_request.c
1364
GEM_BUG_ON(request[idx]->context->vm != batch->vm);
drivers/gpu/drm/i915/selftests/i915_request.c
411
rq->fence.context, rq->fence.seqno,
drivers/gpu/drm/i915/selftests/i915_request.c
427
rq->fence.context, rq->fence.seqno);
drivers/gpu/drm/i915/selftests/i915_syncmap.c
163
static int check_one(struct i915_syncmap **sync, u64 context, u32 seqno)
drivers/gpu/drm/i915/selftests/i915_syncmap.c
167
err = i915_syncmap_set(sync, context, seqno);
drivers/gpu/drm/i915/selftests/i915_syncmap.c
173
context, (*sync)->height, (*sync)->prefix);
drivers/gpu/drm/i915/selftests/i915_syncmap.c
179
context);
drivers/gpu/drm/i915/selftests/i915_syncmap.c
193
if (!i915_syncmap_is_later(sync, context, seqno)) {
drivers/gpu/drm/i915/selftests/i915_syncmap.c
195
context, seqno);
drivers/gpu/drm/i915/selftests/i915_syncmap.c
217
u64 context = i915_prandom_u64_state(&prng);
drivers/gpu/drm/i915/selftests/i915_syncmap.c
225
err = check_one(&sync, context,
drivers/gpu/drm/i915/selftests/i915_syncmap.c
238
static int check_leaf(struct i915_syncmap **sync, u64 context, u32 seqno)
drivers/gpu/drm/i915/selftests/i915_syncmap.c
242
err = i915_syncmap_set(sync, context, seqno);
drivers/gpu/drm/i915/selftests/i915_syncmap.c
248
context, (*sync)->height, (*sync)->prefix);
drivers/gpu/drm/i915/selftests/i915_syncmap.c
254
context, (*sync)->bitmap, hweight32((*sync)->bitmap));
drivers/gpu/drm/i915/selftests/i915_syncmap.c
262
if (!i915_syncmap_is_later(sync, context, seqno)) {
drivers/gpu/drm/i915/selftests/i915_syncmap.c
264
context, seqno);
drivers/gpu/drm/i915/selftests/i915_syncmap.c
297
u64 context = BIT_ULL(order);
drivers/gpu/drm/i915/selftests/i915_syncmap.c
300
err = check_leaf(&sync, context, 0);
drivers/gpu/drm/i915/selftests/i915_syncmap.c
321
if (__sync_child(join)[__sync_branch_idx(join, context)] != sync) {
drivers/gpu/drm/i915/selftests/i915_syncmap.c
346
u64 context = step * BIT_ULL(order);
drivers/gpu/drm/i915/selftests/i915_syncmap.c
348
err = i915_syncmap_set(&sync, context, 0);
drivers/gpu/drm/i915/selftests/i915_syncmap.c
354
context, order, step, sync->height, sync->prefix);
drivers/gpu/drm/i915/selftests/i915_syncmap.c
363
u64 context = step * BIT_ULL(order);
drivers/gpu/drm/i915/selftests/i915_syncmap.c
365
if (!i915_syncmap_is_later(&sync, context, 0)) {
drivers/gpu/drm/i915/selftests/i915_syncmap.c
367
context, order, step);
drivers/gpu/drm/i915/selftests/i915_syncmap.c
373
if (i915_syncmap_is_later(&sync, context + idx, 0)) {
drivers/gpu/drm/i915/selftests/i915_syncmap.c
375
context + idx, order, step);
drivers/gpu/drm/i915/selftests/i915_syncmap.c
385
u64 context = step * BIT_ULL(order);
drivers/gpu/drm/i915/selftests/i915_syncmap.c
387
if (!i915_syncmap_is_later(&sync, context, 0)) {
drivers/gpu/drm/i915/selftests/i915_syncmap.c
389
context, order, step);
drivers/gpu/drm/i915/selftests/i915_syncmap.c
414
u64 context = i915_prandom_u64_state(&prng) & ~MASK;
drivers/gpu/drm/i915/selftests/i915_syncmap.c
417
if (i915_syncmap_is_later(&sync, context, 0)) /* Skip repeats */
drivers/gpu/drm/i915/selftests/i915_syncmap.c
421
err = i915_syncmap_set(&sync, context + idx, 0);
drivers/gpu/drm/i915/selftests/i915_syncmap.c
427
context, sync->height, sync->prefix);
drivers/gpu/drm/i915/selftests/i915_syncmap.c
434
context, idx,
drivers/gpu/drm/i915/selftests/i915_syncmap.c
469
u64 context = idx * BIT_ULL(order) + idx;
drivers/gpu/drm/i915/selftests/i915_syncmap.c
471
err = i915_syncmap_set(&sync, context, 0);
drivers/gpu/drm/i915/selftests/i915_syncmap.c
477
context, order, idx,
drivers/gpu/drm/i915/selftests/i915_syncmap.c
562
u64 context = i915_prandom_u64_state(&prng);
drivers/gpu/drm/i915/selftests/i915_syncmap.c
564
err = i915_syncmap_set(&sync, context, 0);
drivers/gpu/drm/i915/selftests/i915_syncmap.c
582
u64 context = i915_prandom_u64_state(&ctx);
drivers/gpu/drm/i915/selftests/i915_syncmap.c
584
if (i915_syncmap_is_later(&sync, context, seqno) != expect) {
drivers/gpu/drm/i915/selftests/i915_syncmap.c
586
context, last_seqno, seqno, expect);
drivers/gpu/drm/i915/selftests/i915_syncmap.c
591
err = i915_syncmap_set(&sync, context, seqno);
drivers/gpu/drm/i915/selftests/igt_spinner.c
120
return i915_vma_offset(hws) + seqno_offset(rq->fence.context);
drivers/gpu/drm/i915/selftests/igt_spinner.c
223
u32 *seqno = spin->seqno + seqno_offset(rq->fence.context);
drivers/gpu/drm/i915/selftests/intel_memory_region.c
1064
err = intel_context_migrate_clear(engine->gt->migrate.context, NULL,
drivers/gpu/drm/lima/lima_ctx.c
22
err = lima_sched_context_init(dev->pipe + i, ctx->context + i);
drivers/gpu/drm/lima/lima_ctx.c
38
lima_sched_context_fini(dev->pipe + i, ctx->context + i);
drivers/gpu/drm/lima/lima_ctx.c
49
lima_sched_context_fini(ctx->dev->pipe + i, ctx->context + i);
drivers/gpu/drm/lima/lima_ctx.h
15
struct lima_sched_context context[lima_pipe_num];
drivers/gpu/drm/lima/lima_gem.c
343
submit->task, submit->ctx->context + submit->pipe,
drivers/gpu/drm/lima/lima_sched.c
116
struct lima_sched_context *context,
drivers/gpu/drm/lima/lima_sched.c
130
err = drm_sched_job_init(&task->base, &context->base, 1, vm,
drivers/gpu/drm/lima/lima_sched.c
161
struct lima_sched_context *context)
drivers/gpu/drm/lima/lima_sched.c
165
return drm_sched_entity_init(&context->base, DRM_SCHED_PRIORITY_NORMAL,
drivers/gpu/drm/lima/lima_sched.c
170
struct lima_sched_context *context)
drivers/gpu/drm/lima/lima_sched.c
172
drm_sched_entity_destroy(&context->base);
drivers/gpu/drm/lima/lima_sched.c
284
container_of(sched_ctx, struct lima_ctx, context[pipe_id]);
drivers/gpu/drm/lima/lima_sched.h
89
struct lima_sched_context *context,
drivers/gpu/drm/lima/lima_sched.h
96
struct lima_sched_context *context);
drivers/gpu/drm/lima/lima_sched.h
98
struct lima_sched_context *context);
drivers/gpu/drm/lima/lima_trace.h
17
__field(unsigned int, context)
drivers/gpu/drm/lima/lima_trace.h
23
__entry->context = task->base.s_fence->finished.context;
drivers/gpu/drm/lima/lima_trace.h
29
__entry->context, __entry->seqno,
drivers/gpu/drm/meson/meson_dw_hdmi.c
541
static int meson_dw_hdmi_reg_read(void *context, unsigned int reg,
drivers/gpu/drm/meson/meson_dw_hdmi.c
544
struct meson_dw_hdmi *dw_hdmi = context;
drivers/gpu/drm/meson/meson_dw_hdmi.c
552
static int meson_dw_hdmi_reg_write(void *context, unsigned int reg,
drivers/gpu/drm/meson/meson_dw_hdmi.c
555
struct meson_dw_hdmi *dw_hdmi = context;
drivers/gpu/drm/msm/adreno/a6xx_gpu_state.c
1909
int context = (cluster->context_id == STATE_FORCE_CTXT_1) ? 1 : 0;
drivers/gpu/drm/msm/adreno/a6xx_gpu_state.c
1913
drm_printf(p, " - context: %d\n", context);
drivers/gpu/drm/msm/msm_fence.c
200
fctx->context, ++fctx->last_fence);
drivers/gpu/drm/msm/msm_fence.c
55
fctx->context = dma_fence_context_alloc(1);
drivers/gpu/drm/msm/msm_fence.h
24
unsigned context;
drivers/gpu/drm/nouveau/include/nvkm/core/ramht.h
26
int chid, int addr, u32 handle, u32 context);
drivers/gpu/drm/nouveau/nouveau_fence.c
183
fctx->context = drm->runl[chan->runlist].context_base + chan->chid;
drivers/gpu/drm/nouveau/nouveau_fence.c
218
&fctx->lock, fctx->context, ++fctx->sequence);
drivers/gpu/drm/nouveau/nouveau_fence.c
221
&fctx->lock, fctx->context, ++fctx->sequence);
drivers/gpu/drm/nouveau/nouveau_fence.h
50
u32 context;
drivers/gpu/drm/nouveau/nvkm/core/ramht.c
108
int chid, int addr, u32 handle, u32 context)
drivers/gpu/drm/nouveau/nvkm/core/ramht.c
119
addr, handle, context);
drivers/gpu/drm/nouveau/nvkm/core/ramht.c
61
int chid, int addr, u32 handle, u32 context)
drivers/gpu/drm/nouveau/nvkm/core/ramht.c
88
if (addr < 0) context |= inst << -addr;
drivers/gpu/drm/nouveau/nvkm/core/ramht.c
89
else context |= inst >> addr;
drivers/gpu/drm/nouveau/nvkm/core/ramht.c
94
nvkm_wo32(ramht->gpuobj, (co << 3) + 4, context);
drivers/gpu/drm/nouveau/nvkm/engine/fifo/nv04.c
196
u32 context = 0x80000000 | chan->id << 24 | engn->id << 16;
drivers/gpu/drm/nouveau/nvkm/engine/fifo/nv04.c
200
hash = nvkm_ramht_insert(imem->ramht, eobj, chan->id, 4, eobj->handle, context);
drivers/gpu/drm/nouveau/nvkm/engine/fifo/nv40.c
115
u32 context = chan->id << 23 | engn->id << 20;
drivers/gpu/drm/nouveau/nvkm/engine/fifo/nv40.c
119
hash = nvkm_ramht_insert(imem->ramht, eobj, chan->id, 4, eobj->handle, context);
drivers/gpu/drm/nouveau/nvkm/subdev/mxm/nv50.c
39
struct context *ctx = info;
drivers/gpu/drm/nouveau/nvkm/subdev/mxm/nv50.c
53
struct context *ctx = info;
drivers/gpu/drm/nouveau/nvkm/subdev/mxm/nv50.c
98
struct context ctx = { .outp = (u32 *)(bios->data + pdcb) };
drivers/gpu/drm/panel/panel-ilitek-ili9322.c
290
static int ili9322_regmap_spi_write(void *context, const void *data,
drivers/gpu/drm/panel/panel-ilitek-ili9322.c
293
struct device *dev = context;
drivers/gpu/drm/panel/panel-ilitek-ili9322.c
305
static int ili9322_regmap_spi_read(void *context, const void *reg,
drivers/gpu/drm/panel/panel-ilitek-ili9322.c
308
struct device *dev = context;
drivers/gpu/drm/scheduler/gpu_scheduler_trace.h
105
__entry->fence_context = fence->finished.context;
drivers/gpu/drm/scheduler/gpu_scheduler_trace.h
123
__entry->fence_context = sched_job->s_fence->finished.context;
drivers/gpu/drm/scheduler/gpu_scheduler_trace.h
125
__entry->ctx = fence->context;
drivers/gpu/drm/scheduler/gpu_scheduler_trace.h
144
__entry->fence_context = sched_job->s_fence->finished.context;
drivers/gpu/drm/scheduler/gpu_scheduler_trace.h
146
__entry->ctx = fence->context;
drivers/gpu/drm/scheduler/gpu_scheduler_trace.h
76
__entry->fence_context = sched_job->s_fence->finished.context;
drivers/gpu/drm/scheduler/sched_entity.c
406
if (fence->context == entity->fence_context ||
drivers/gpu/drm/scheduler/sched_entity.c
407
fence->context == entity->fence_context + 1) {
drivers/gpu/drm/scheduler/sched_main.c
1514
if (bad->s_fence->scheduled.context ==
drivers/gpu/drm/scheduler/sched_main.c
749
guilty_context = s_job->s_fence->scheduled.context;
drivers/gpu/drm/scheduler/sched_main.c
752
if (found_guilty && s_job->s_fence->scheduled.context == guilty_context)
drivers/gpu/drm/scheduler/sched_main.c
900
if (entry->context != fence->context)
drivers/gpu/drm/scheduler/tests/mock_scheduler.c
173
sched->hw_timeline.context,
drivers/gpu/drm/scheduler/tests/mock_scheduler.c
309
sched->hw_timeline.context = dma_fence_context_alloc(1);
drivers/gpu/drm/scheduler/tests/sched_tests.h
52
u64 context;
drivers/gpu/drm/sitronix/st7571-i2c.c
41
static int st7571_i2c_regmap_write(void *context, const void *data, size_t count)
drivers/gpu/drm/sitronix/st7571-i2c.c
43
struct st7571_i2c_transport *t = context;
drivers/gpu/drm/sitronix/st7571-i2c.c
68
static int st7571_i2c_regmap_read(void *context, const void *reg_buf,
drivers/gpu/drm/solomon/ssd130x-spi.c
31
static int ssd130x_spi_write(void *context, const void *data, size_t count)
drivers/gpu/drm/solomon/ssd130x-spi.c
33
struct ssd130x_spi_transport *t = context;
drivers/gpu/drm/solomon/ssd130x-spi.c
48
static int ssd130x_spi_read(void *context, const void *reg, size_t reg_size,
drivers/gpu/drm/sprd/sprd_dsi.c
163
static int regmap_tst_io_write(void *context, u32 reg, u32 val)
drivers/gpu/drm/sprd/sprd_dsi.c
165
struct sprd_dsi *dsi = context;
drivers/gpu/drm/sprd/sprd_dsi.c
185
static int regmap_tst_io_read(void *context, u32 reg, u32 *val)
drivers/gpu/drm/sprd/sprd_dsi.c
187
struct sprd_dsi *dsi = context;
drivers/gpu/drm/tegra/drm.c
120
static void tegra_drm_context_free(struct tegra_drm_context *context)
drivers/gpu/drm/tegra/drm.c
122
context->client->ops->close_channel(context);
drivers/gpu/drm/tegra/drm.c
123
pm_runtime_put(context->client->base.dev);
drivers/gpu/drm/tegra/drm.c
124
kfree(context);
drivers/gpu/drm/tegra/drm.c
168
int tegra_drm_submit(struct tegra_drm_context *context,
drivers/gpu/drm/tegra/drm.c
172
struct host1x_client *client = &context->client->base;
drivers/gpu/drm/tegra/drm.c
198
job = host1x_job_alloc(context->channel, args->num_cmdbufs,
drivers/gpu/drm/tegra/drm.c
316
job->is_addr_reg = context->client->ops->is_addr_reg;
drivers/gpu/drm/tegra/drm.c
317
job->is_valid_class = context->client->ops->is_valid_class;
drivers/gpu/drm/tegra/drm.c
325
err = host1x_job_pin(job, context->client->base.dev);
drivers/gpu/drm/tegra/drm.c
431
struct tegra_drm_context *context)
drivers/gpu/drm/tegra/drm.c
439
err = client->ops->open_channel(client, context);
drivers/gpu/drm/tegra/drm.c
445
err = idr_alloc(&fpriv->legacy_contexts, context, 1, 0, GFP_KERNEL);
drivers/gpu/drm/tegra/drm.c
447
client->ops->close_channel(context);
drivers/gpu/drm/tegra/drm.c
452
context->client = client;
drivers/gpu/drm/tegra/drm.c
453
context->id = err;
drivers/gpu/drm/tegra/drm.c
464
struct tegra_drm_context *context;
drivers/gpu/drm/tegra/drm.c
468
context = kzalloc_obj(*context);
drivers/gpu/drm/tegra/drm.c
469
if (!context)
drivers/gpu/drm/tegra/drm.c
476
err = tegra_client_open(fpriv, client, context);
drivers/gpu/drm/tegra/drm.c
480
args->context = context->id;
drivers/gpu/drm/tegra/drm.c
485
kfree(context);
drivers/gpu/drm/tegra/drm.c
496
struct tegra_drm_context *context;
drivers/gpu/drm/tegra/drm.c
501
context = idr_find(&fpriv->legacy_contexts, args->context);
drivers/gpu/drm/tegra/drm.c
502
if (!context) {
drivers/gpu/drm/tegra/drm.c
507
idr_remove(&fpriv->legacy_contexts, context->id);
drivers/gpu/drm/tegra/drm.c
508
tegra_drm_context_free(context);
drivers/gpu/drm/tegra/drm.c
520
struct tegra_drm_context *context;
drivers/gpu/drm/tegra/drm.c
526
context = idr_find(&fpriv->legacy_contexts, args->context);
drivers/gpu/drm/tegra/drm.c
527
if (!context) {
drivers/gpu/drm/tegra/drm.c
532
if (args->index >= context->client->base.num_syncpts) {
drivers/gpu/drm/tegra/drm.c
537
syncpt = context->client->base.syncpts[args->index];
drivers/gpu/drm/tegra/drm.c
550
struct tegra_drm_context *context;
drivers/gpu/drm/tegra/drm.c
555
context = idr_find(&fpriv->legacy_contexts, args->context);
drivers/gpu/drm/tegra/drm.c
556
if (!context) {
drivers/gpu/drm/tegra/drm.c
561
err = context->client->ops->submit(context, args, drm, file);
drivers/gpu/drm/tegra/drm.c
573
struct tegra_drm_context *context;
drivers/gpu/drm/tegra/drm.c
580
context = idr_find(&fpriv->legacy_contexts, args->context);
drivers/gpu/drm/tegra/drm.c
581
if (!context) {
drivers/gpu/drm/tegra/drm.c
586
if (args->syncpt >= context->client->base.num_syncpts) {
drivers/gpu/drm/tegra/drm.c
591
syncpt = context->client->base.syncpts[args->syncpt];
drivers/gpu/drm/tegra/drm.c
810
struct tegra_drm_context *context = p;
drivers/gpu/drm/tegra/drm.c
812
tegra_drm_context_free(context);
drivers/gpu/drm/tegra/drm.h
79
struct tegra_drm_context *context);
drivers/gpu/drm/tegra/drm.h
80
void (*close_channel)(struct tegra_drm_context *context);
drivers/gpu/drm/tegra/drm.h
83
int (*submit)(struct tegra_drm_context *context,
drivers/gpu/drm/tegra/drm.h
90
int tegra_drm_submit(struct tegra_drm_context *context,
drivers/gpu/drm/tegra/gr2d.c
121
struct tegra_drm_context *context)
drivers/gpu/drm/tegra/gr2d.c
125
context->channel = host1x_channel_get(gr2d->channel);
drivers/gpu/drm/tegra/gr2d.c
126
if (!context->channel)
drivers/gpu/drm/tegra/gr2d.c
132
static void gr2d_close_channel(struct tegra_drm_context *context)
drivers/gpu/drm/tegra/gr2d.c
134
host1x_channel_put(context->channel);
drivers/gpu/drm/tegra/gr3d.c
130
struct tegra_drm_context *context)
drivers/gpu/drm/tegra/gr3d.c
134
context->channel = host1x_channel_get(gr3d->channel);
drivers/gpu/drm/tegra/gr3d.c
135
if (!context->channel)
drivers/gpu/drm/tegra/gr3d.c
141
static void gr3d_close_channel(struct tegra_drm_context *context)
drivers/gpu/drm/tegra/gr3d.c
143
host1x_channel_put(context->channel);
drivers/gpu/drm/tegra/nvdec.c
351
struct tegra_drm_context *context)
drivers/gpu/drm/tegra/nvdec.c
355
context->channel = host1x_channel_get(nvdec->channel);
drivers/gpu/drm/tegra/nvdec.c
356
if (!context->channel)
drivers/gpu/drm/tegra/nvdec.c
362
static void nvdec_close_channel(struct tegra_drm_context *context)
drivers/gpu/drm/tegra/nvdec.c
364
host1x_channel_put(context->channel);
drivers/gpu/drm/tegra/submit.c
146
tegra_drm_mapping_get(struct tegra_drm_context *context, u32 id)
drivers/gpu/drm/tegra/submit.c
150
xa_lock(&context->mappings);
drivers/gpu/drm/tegra/submit.c
152
mapping = xa_load(&context->mappings, id);
drivers/gpu/drm/tegra/submit.c
156
xa_unlock(&context->mappings);
drivers/gpu/drm/tegra/submit.c
180
struct tegra_drm_context *context,
drivers/gpu/drm/tegra/submit.c
187
SUBMIT_ERR(context, "gather_data_words cannot be zero");
drivers/gpu/drm/tegra/submit.c
192
SUBMIT_ERR(context, "gather_data_words is too large");
drivers/gpu/drm/tegra/submit.c
198
SUBMIT_ERR(context, "failed to allocate memory for bo info");
drivers/gpu/drm/tegra/submit.c
209
SUBMIT_ERR(context, "failed to allocate memory for gather data");
drivers/gpu/drm/tegra/submit.c
215
SUBMIT_ERR(context, "failed to copy gather data from userspace");
drivers/gpu/drm/tegra/submit.c
228
static int submit_write_reloc(struct tegra_drm_context *context, struct gather_bo *bo,
drivers/gpu/drm/tegra/submit.c
243
SUBMIT_ERR(context,
drivers/gpu/drm/tegra/submit.c
257
static int submit_process_bufs(struct tegra_drm_context *context, struct gather_bo *bo,
drivers/gpu/drm/tegra/submit.c
26
#define SUBMIT_ERR(context, fmt, ...) \
drivers/gpu/drm/tegra/submit.c
269
SUBMIT_ERR(context, "failed to copy bufs array from userspace");
drivers/gpu/drm/tegra/submit.c
27
dev_err_ratelimited(context->client->base.dev, \
drivers/gpu/drm/tegra/submit.c
275
SUBMIT_ERR(context, "failed to allocate memory for mapping info");
drivers/gpu/drm/tegra/submit.c
285
SUBMIT_ERR(context, "invalid flag specified for buffer");
drivers/gpu/drm/tegra/submit.c
290
mapping = tegra_drm_mapping_get(context, buf->mapping);
drivers/gpu/drm/tegra/submit.c
292
SUBMIT_ERR(context, "invalid mapping ID '%u' for buffer", buf->mapping);
drivers/gpu/drm/tegra/submit.c
297
err = submit_write_reloc(context, bo, buf, mapping);
drivers/gpu/drm/tegra/submit.c
327
static int submit_get_syncpt(struct tegra_drm_context *context, struct host1x_job *job,
drivers/gpu/drm/tegra/submit.c
333
SUBMIT_ERR(context, "invalid flag specified for syncpt");
drivers/gpu/drm/tegra/submit.c
340
SUBMIT_ERR(context, "syncpoint specified in syncpt was not allocated");
drivers/gpu/drm/tegra/submit.c
350
static int submit_job_add_gather(struct host1x_job *job, struct tegra_drm_context *context,
drivers/gpu/drm/tegra/submit.c
359
SUBMIT_ERR(context, "non-zero reserved field in GATHER_UPTR command");
drivers/gpu/drm/tegra/submit.c
365
SUBMIT_ERR(context, "too many words in GATHER_UPTR command");
drivers/gpu/drm/tegra/submit.c
370
SUBMIT_ERR(context, "too many total words in job");
drivers/gpu/drm/tegra/submit.c
375
SUBMIT_ERR(context, "GATHER_UPTR command overflows gather data");
drivers/gpu/drm/tegra/submit.c
379
if (tegra_drm_fw_validate(context->client, bo->gather_data, *offset,
drivers/gpu/drm/tegra/submit.c
381
SUBMIT_ERR(context, "job was rejected by firewall");
drivers/gpu/drm/tegra/submit.c
393
submit_create_job(struct tegra_drm_context *context, struct gather_bo *bo,
drivers/gpu/drm/tegra/submit.c
403
class = context->client->base.class;
drivers/gpu/drm/tegra/submit.c
408
SUBMIT_ERR(context, "failed to copy cmds array from userspace");
drivers/gpu/drm/tegra/submit.c
412
job = host1x_job_alloc(context->channel, args->num_cmds, 0, true);
drivers/gpu/drm/tegra/submit.c
414
SUBMIT_ERR(context, "failed to allocate memory for job");
drivers/gpu/drm/tegra/submit.c
419
err = submit_get_syncpt(context, job, syncpoints, args);
drivers/gpu/drm/tegra/submit.c
423
job->client = &context->client->base;
drivers/gpu/drm/tegra/submit.c
424
job->class = context->client->base.class;
drivers/gpu/drm/tegra/submit.c
431
SUBMIT_ERR(context, "unknown flags given for cmd");
drivers/gpu/drm/tegra/submit.c
437
err = submit_job_add_gather(job, context, &cmd->gather_uptr, bo,
drivers/gpu/drm/tegra/submit.c
443
SUBMIT_ERR(context, "non-zero reserved value");
drivers/gpu/drm/tegra/submit.c
452
SUBMIT_ERR(context, "non-zero reserved value");
drivers/gpu/drm/tegra/submit.c
458
SUBMIT_ERR(context, "syncpoint ID in CMD_WAIT_SYNCPT_RELATIVE is not used by the job");
drivers/gpu/drm/tegra/submit.c
466
SUBMIT_ERR(context, "unknown cmd type");
drivers/gpu/drm/tegra/submit.c
473
SUBMIT_ERR(context, "job must have at least one gather");
drivers/gpu/drm/tegra/submit.c
516
struct tegra_drm_context *context;
drivers/gpu/drm/tegra/submit.c
524
context = xa_load(&fpriv->contexts, args->context);
drivers/gpu/drm/tegra/submit.c
525
if (!context) {
drivers/gpu/drm/tegra/submit.c
528
current->comm, args->context);
drivers/gpu/drm/tegra/submit.c
537
SUBMIT_ERR(context, "invalid syncobj_in '%#x'", args->syncobj_in);
drivers/gpu/drm/tegra/submit.c
544
SUBMIT_ERR(context, "wait for syncobj_in timed out");
drivers/gpu/drm/tegra/submit.c
552
SUBMIT_ERR(context, "invalid syncobj_out '%#x'", args->syncobj_out);
drivers/gpu/drm/tegra/submit.c
559
err = submit_copy_gather_data(&bo, drm->dev, context, args);
drivers/gpu/drm/tegra/submit.c
565
SUBMIT_ERR(context, "failed to allocate memory for job data");
drivers/gpu/drm/tegra/submit.c
571
err = submit_process_bufs(context, bo, args, job_data);
drivers/gpu/drm/tegra/submit.c
576
job = submit_create_job(context, bo, args, job_data, &fpriv->syncpoints);
drivers/gpu/drm/tegra/submit.c
583
err = host1x_job_pin(job, context->client->base.dev);
drivers/gpu/drm/tegra/submit.c
585
SUBMIT_ERR(context, "failed to pin job: %d", err);
drivers/gpu/drm/tegra/submit.c
589
if (context->client->ops->get_streamid_offset) {
drivers/gpu/drm/tegra/submit.c
590
err = context->client->ops->get_streamid_offset(
drivers/gpu/drm/tegra/submit.c
591
context->client, &job->engine_streamid_offset);
drivers/gpu/drm/tegra/submit.c
593
SUBMIT_ERR(context, "failed to get streamid offset: %d", err);
drivers/gpu/drm/tegra/submit.c
598
if (context->memory_context && context->client->ops->can_use_memory_ctx) {
drivers/gpu/drm/tegra/submit.c
601
err = context->client->ops->can_use_memory_ctx(context->client, &supported);
drivers/gpu/drm/tegra/submit.c
603
SUBMIT_ERR(context, "failed to detect if engine can use memory context: %d", err);
drivers/gpu/drm/tegra/submit.c
608
job->memory_context = context->memory_context;
drivers/gpu/drm/tegra/submit.c
611
} else if (context->client->ops->get_streamid_offset) {
drivers/gpu/drm/tegra/submit.c
616
if (!tegra_dev_iommu_get_stream_id(context->client->base.dev,
drivers/gpu/drm/tegra/submit.c
622
err = pm_runtime_resume_and_get(context->client->base.dev);
drivers/gpu/drm/tegra/submit.c
624
SUBMIT_ERR(context, "could not power up engine: %d", err);
drivers/gpu/drm/tegra/submit.c
641
SUBMIT_ERR(context, "host1x job submission failed: %d", err);
drivers/gpu/drm/tegra/submit.c
652
SUBMIT_ERR(context, "failed to create postfence: %d", err);
drivers/gpu/drm/tegra/uapi.c
100
context->channel = host1x_channel_get(client->shared_channel);
drivers/gpu/drm/tegra/uapi.c
102
context->channel = host1x_channel_request(&client->base);
drivers/gpu/drm/tegra/uapi.c
103
if (!context->channel) {
drivers/gpu/drm/tegra/uapi.c
119
context->memory_context = host1x_memory_context_alloc(
drivers/gpu/drm/tegra/uapi.c
124
if (IS_ERR(context->memory_context)) {
drivers/gpu/drm/tegra/uapi.c
125
if (PTR_ERR(context->memory_context) != -EOPNOTSUPP) {
drivers/gpu/drm/tegra/uapi.c
126
err = PTR_ERR(context->memory_context);
drivers/gpu/drm/tegra/uapi.c
133
context->memory_context = NULL;
drivers/gpu/drm/tegra/uapi.c
138
err = xa_alloc(&fpriv->contexts, &args->context, context, XA_LIMIT(1, U32_MAX),
drivers/gpu/drm/tegra/uapi.c
143
context->client = client;
drivers/gpu/drm/tegra/uapi.c
144
xa_init_flags(&context->mappings, XA_FLAGS_ALLOC1);
drivers/gpu/drm/tegra/uapi.c
155
if (context->memory_context)
drivers/gpu/drm/tegra/uapi.c
156
host1x_memory_context_put(context->memory_context);
drivers/gpu/drm/tegra/uapi.c
158
host1x_channel_put(context->channel);
drivers/gpu/drm/tegra/uapi.c
160
kfree(context);
drivers/gpu/drm/tegra/uapi.c
169
struct tegra_drm_context *context;
drivers/gpu/drm/tegra/uapi.c
173
context = xa_load(&fpriv->contexts, args->context);
drivers/gpu/drm/tegra/uapi.c
174
if (!context) {
drivers/gpu/drm/tegra/uapi.c
179
xa_erase(&fpriv->contexts, args->context);
drivers/gpu/drm/tegra/uapi.c
183
tegra_drm_channel_context_close(context);
drivers/gpu/drm/tegra/uapi.c
193
struct tegra_drm_context *context;
drivers/gpu/drm/tegra/uapi.c
203
context = xa_load(&fpriv->contexts, args->context);
drivers/gpu/drm/tegra/uapi.c
204
if (!context) {
drivers/gpu/drm/tegra/uapi.c
217
if (context->memory_context)
drivers/gpu/drm/tegra/uapi.c
218
mapping_dev = &context->memory_context->dev;
drivers/gpu/drm/tegra/uapi.c
220
mapping_dev = context->client->base.dev;
drivers/gpu/drm/tegra/uapi.c
255
err = xa_alloc(&context->mappings, &args->mapping, mapping, XA_LIMIT(1, U32_MAX),
drivers/gpu/drm/tegra/uapi.c
280
struct tegra_drm_context *context;
drivers/gpu/drm/tegra/uapi.c
284
context = xa_load(&fpriv->contexts, args->context);
drivers/gpu/drm/tegra/uapi.c
285
if (!context) {
drivers/gpu/drm/tegra/uapi.c
290
mapping = xa_erase(&context->mappings, args->mapping);
drivers/gpu/drm/tegra/uapi.c
31
static void tegra_drm_channel_context_close(struct tegra_drm_context *context)
drivers/gpu/drm/tegra/uapi.c
36
if (context->memory_context)
drivers/gpu/drm/tegra/uapi.c
37
host1x_memory_context_put(context->memory_context);
drivers/gpu/drm/tegra/uapi.c
39
xa_for_each(&context->mappings, id, mapping)
drivers/gpu/drm/tegra/uapi.c
42
xa_destroy(&context->mappings);
drivers/gpu/drm/tegra/uapi.c
44
host1x_channel_put(context->channel);
drivers/gpu/drm/tegra/uapi.c
46
kfree(context);
drivers/gpu/drm/tegra/uapi.c
51
struct tegra_drm_context *context;
drivers/gpu/drm/tegra/uapi.c
55
xa_for_each(&file->contexts, id, context)
drivers/gpu/drm/tegra/uapi.c
56
tegra_drm_channel_context_close(context);
drivers/gpu/drm/tegra/uapi.c
83
struct tegra_drm_context *context;
drivers/gpu/drm/tegra/uapi.c
89
context = kzalloc_obj(*context);
drivers/gpu/drm/tegra/uapi.c
90
if (!context)
drivers/gpu/drm/tegra/vic.c
359
struct tegra_drm_context *context)
drivers/gpu/drm/tegra/vic.c
363
context->channel = host1x_channel_get(vic->channel);
drivers/gpu/drm/tegra/vic.c
364
if (!context->channel)
drivers/gpu/drm/tegra/vic.c
370
static void vic_close_channel(struct tegra_drm_context *context)
drivers/gpu/drm/tegra/vic.c
372
host1x_channel_put(context->channel);
drivers/gpu/drm/ttm/ttm_bo_util.c
661
if (fence->context != tmp->context)
drivers/gpu/drm/udl/udl_main.c
152
struct urb_node *unode = urb->context;
drivers/gpu/drm/udl/udl_main.c
192
unode = urb->context;
drivers/gpu/drm/virtio/virtgpu_drv.h
138
uint64_t context;
drivers/gpu/drm/virtio/virtgpu_fence.c
131
if (signaled->f.context != curr->f.context)
drivers/gpu/drm/virtio/virtgpu_fence.c
71
fence->emit_fence_info = !(base_fence_ctx == drv->context);
drivers/gpu/drm/virtio/virtgpu_ioctl.c
172
fence = virtio_gpu_fence_alloc(vgdev, vgdev->fence_drv.context, 0);
drivers/gpu/drm/virtio/virtgpu_ioctl.c
259
fence = virtio_gpu_fence_alloc(vgdev, vgdev->fence_drv.context, 0);
drivers/gpu/drm/virtio/virtgpu_ioctl.c
319
fence = virtio_gpu_fence_alloc(vgdev, vgdev->fence_drv.context,
drivers/gpu/drm/virtio/virtgpu_kms.c
150
vgdev->fence_drv.context = dma_fence_context_alloc(1);
drivers/gpu/drm/virtio/virtgpu_object.c
113
fence = virtio_gpu_fence_alloc(vgdev, vgdev->fence_drv.context, 0);
drivers/gpu/drm/virtio/virtgpu_plane.c
372
vgdev->fence_drv.context,
drivers/gpu/drm/virtio/virtgpu_submit.c
478
u64 fence_ctx = vgdev->fence_drv.context;
drivers/gpu/drm/virtio/virtgpu_submit.c
51
u64 context = submit->fence_ctx + submit->ring_idx;
drivers/gpu/drm/virtio/virtgpu_submit.c
53
if (dma_fence_match_context(in_fence, context))
drivers/gpu/drm/vmwgfx/device_include/svga_reg.h
326
SVGACBContext context;
drivers/gpu/drm/vmwgfx/device_include/svga_reg.h
330
SVGACBContext context;
drivers/gpu/drm/vmwgfx/device_include/svga_reg.h
335
SVGACBContext context;
drivers/gpu/drm/vmwgfx/device_include/svga_reg.h
339
SVGACBContext context;
drivers/gpu/drm/vmwgfx/device_include/svga_reg.h
343
SVGACBContext context;
drivers/gpu/drm/vmwgfx/vmwgfx_cmdbuf.c
1172
static int vmw_cmdbuf_preempt(struct vmw_cmdbuf_man *man, u32 context)
drivers/gpu/drm/vmwgfx/vmwgfx_cmdbuf.c
1180
cmd.body.context = SVGA_CB_CONTEXT_0 + context;
drivers/gpu/drm/vmwgfx/vmwgfx_cmdbuf.c
1197
static int vmw_cmdbuf_startstop(struct vmw_cmdbuf_man *man, u32 context,
drivers/gpu/drm/vmwgfx/vmwgfx_cmdbuf.c
1207
cmd.body.context = SVGA_CB_CONTEXT_0 + context;
drivers/gpu/drm/vmwgfx/vmwgfx_cmdbuf.c
198
static int vmw_cmdbuf_startstop(struct vmw_cmdbuf_man *man, u32 context,
drivers/gpu/drm/vmwgfx/vmwgfx_cmdbuf.c
200
static int vmw_cmdbuf_preempt(struct vmw_cmdbuf_man *man, u32 context);
drivers/gpu/drm/vmwgfx/vmwgfx_drv.h
436
struct vmw_resource *context;
drivers/gpu/drm/xe/xe_bo.c
1571
fence->context,
drivers/gpu/drm/xe/xe_exec_queue.c
1220
q->lr.context = dma_fence_context_alloc(1);
drivers/gpu/drm/xe/xe_exec_queue_types.h
189
u64 context;
drivers/gpu/drm/xe/xe_i2c.c
245
static int xe_i2c_read(void *context, unsigned int reg, unsigned int *val)
drivers/gpu/drm/xe/xe_i2c.c
247
struct xe_i2c *i2c = context;
drivers/gpu/drm/xe/xe_i2c.c
254
static int xe_i2c_write(void *context, unsigned int reg, unsigned int val)
drivers/gpu/drm/xe/xe_i2c.c
256
struct xe_i2c *i2c = context;
drivers/gpu/drm/xe/xe_preempt_fence.c
144
u64 context, u32 seqno)
drivers/gpu/drm/xe/xe_preempt_fence.c
150
&pfence->lock, context, seqno);
drivers/gpu/drm/xe/xe_preempt_fence.c
170
u64 context, u32 seqno)
drivers/gpu/drm/xe/xe_preempt_fence.c
178
return xe_preempt_fence_arm(pfence, q, context, seqno);
drivers/gpu/drm/xe/xe_preempt_fence.h
15
u64 context, u32 seqno);
drivers/gpu/drm/xe/xe_preempt_fence.h
23
u64 context, u32 seqno);
drivers/gpu/drm/xe/xe_trace.h
351
__entry->ctx = fence->dma.context;
drivers/gpu/drm/xe/xe_vm.c
167
q, q->lr.context,
drivers/gpu/drm/xe/xe_vm.c
234
pfence = xe_preempt_fence_create(q, q->lr.context,
drivers/greybus/es2.c
1026
struct es2_ap_dev *es2 = urb->context;
drivers/greybus/es2.c
173
struct usb_ctrlrequest *dr = urb->context;
drivers/greybus/es2.c
815
struct gb_host_device *hd = urb->context;
drivers/greybus/es2.c
858
struct gb_message *message = urb->context;
drivers/hid/hid-hyperv.c
308
static void mousevsc_on_channel_callback(void *context)
drivers/hid/hid-hyperv.c
310
struct hv_device *device = context;
drivers/hid/hid-sony.c
563
struct sony_sc *sc = urb->context;
drivers/hid/hid-thrustmaster.c
204
struct hid_device *hdev = urb->context;
drivers/hid/hid-thrustmaster.c
222
struct hid_device *hdev = urb->context;
drivers/hid/hid-u2fzero.c
119
struct u2fzero_transfer_context *ctx = urb->context;
drivers/hid/hid-u2fzero.c
137
dev->urb->context = &ctx;
drivers/hid/intel-thc-hid/intel-thc/intel-thc-dev.c
12
static int thc_regmap_read(void *context, unsigned int reg,
drivers/hid/intel-thc-hid/intel-thc/intel-thc-dev.c
15
struct thc_device *thc_ctx = context;
drivers/hid/intel-thc-hid/intel-thc/intel-thc-dev.c
22
static int thc_regmap_write(void *context, unsigned int reg,
drivers/hid/intel-thc-hid/intel-thc/intel-thc-dev.c
25
struct thc_device *thc_ctx = context;
drivers/hid/usbhid/hid-core.c
275
struct hid_device *hid = urb->context;
drivers/hid/usbhid/hid-core.c
286
hid_input_report(urb->context, HID_INPUT_REPORT,
drivers/hid/usbhid/hid-core.c
429
struct hid_device *hid = urb->context;
drivers/hid/usbhid/hid-core.c
477
struct hid_device *hid = urb->context;
drivers/hid/usbhid/hid-core.c
485
hid_input_report(urb->context,
drivers/hid/usbhid/usbkbd.c
102
struct usb_kbd *kbd = urb->context;
drivers/hid/usbhid/usbkbd.c
193
struct usb_kbd *kbd = urb->context;
drivers/hid/usbhid/usbmouse.c
51
struct usb_mouse *mouse = urb->context;
drivers/hsi/clients/cmt_speech.c
211
struct cs_hsi_iface *hi = msg->context;
drivers/hsi/clients/cmt_speech.c
218
struct cs_hsi_iface *hi = msg->context;
drivers/hsi/clients/cmt_speech.c
283
msg->context = hi;
drivers/hsi/clients/cmt_speech.c
296
struct cs_hsi_iface *hi = msg->context;
drivers/hsi/clients/cmt_speech.c
330
rxmsg->context = hi;
drivers/hsi/clients/cmt_speech.c
339
txmsg->context = hi;
drivers/hsi/clients/cmt_speech.c
428
struct cs_hsi_iface *hi = msg->context;
drivers/hsi/clients/cmt_speech.c
460
struct cs_hsi_iface *hi = msg->context;
drivers/hsi/clients/cmt_speech.c
511
struct cs_hsi_iface *hi = msg->context;
drivers/hsi/clients/cmt_speech.c
575
struct cs_hsi_iface *hi = msg->context;
drivers/hsi/clients/cmt_speech.c
602
struct cs_hsi_iface *hi = msg->context;
drivers/hsi/clients/cmt_speech.c
678
struct cs_hsi_iface *hi = msg->context;
drivers/hsi/clients/ssi_protocol.c
191
skb = msg->context;
drivers/hsi/clients/ssi_protocol.c
192
pr_debug("free data: msg %p context %p skb %p\n", msg, msg->context,
drivers/hsi/clients/ssi_protocol.c
210
msg->context = skb;
drivers/hsi/clients/ssi_protocol.c
554
ssip_free_data(msg->context);
drivers/hsi/clients/ssi_protocol.c
564
data = msg->context;
drivers/hsi/clients/ssi_protocol.c
589
skb = dmsg->context;
drivers/hsi/clients/ssi_protocol.c
590
msg->context = dmsg;
drivers/hsi/clients/ssi_protocol.c
652
skb = msg->context;
drivers/hsi/controllers/omap_ssi_port.c
438
msg->ttype, msg->context);
drivers/hv/channel.c
654
void (*onchannelcallback)(void *context), void *context)
drivers/hv/channel.c
680
newchannel->channel_callback_context = context;
drivers/hv/channel.c
795
void (*onchannelcallback)(void *context), void *context)
drivers/hv/channel.c
797
return __vmbus_open(newchannel, NULL, 0, onchannelcallback, context);
drivers/hv/channel.c
807
void (*onchannelcallback)(void *context), void *context)
drivers/hv/channel.c
817
onchannelcallback, context);
drivers/hv/connection.c
410
void (*callback_fn)(void *context);
drivers/hv/hv_balloon.c
1373
static void balloon_onchannelcallback(void *context);
drivers/hv/hv_balloon.c
1476
static void balloon_onchannelcallback(void *context)
drivers/hv/hv_balloon.c
1478
struct hv_device *dev = context;
drivers/hv/hv_kvp.c
635
void hv_kvp_onchannelcallback(void *context)
drivers/hv/hv_kvp.c
637
struct vmbus_channel *channel = context;
drivers/hv/hv_snapshot.c
292
void hv_vss_onchannelcallback(void *context)
drivers/hv/hv_snapshot.c
294
struct vmbus_channel *channel = context;
drivers/hv/hv_util.c
118
static void shutdown_onchannelcallback(void *context);
drivers/hv/hv_util.c
128
static void timesync_onchannelcallback(void *context);
drivers/hv/hv_util.c
136
static void heartbeat_onchannelcallback(void *context);
drivers/hv/hv_util.c
179
static void shutdown_onchannelcallback(void *context)
drivers/hv/hv_util.c
181
struct vmbus_channel *channel = context;
drivers/hv/hv_util.c
417
static void timesync_onchannelcallback(void *context)
drivers/hv/hv_util.c
419
struct vmbus_channel *channel = context;
drivers/hv/hv_util.c
510
static void heartbeat_onchannelcallback(void *context)
drivers/hv/hv_util.c
512
struct vmbus_channel *channel = context;
drivers/hv/hyperv_vmbus.h
435
void hv_kvp_onchannelcallback(void *context);
drivers/hv/hyperv_vmbus.h
442
void hv_vss_onchannelcallback(void *context);
drivers/hv/vmbus_drv.c
1273
void (*callback_fn)(void *context);
drivers/hwmon/adt7310.c
105
static int adt7310_reg_write(void *context, unsigned int reg, unsigned int val)
drivers/hwmon/adt7310.c
107
struct spi_device *spi = context;
drivers/hwmon/adt7310.c
83
static int adt7310_reg_read(void *context, unsigned int reg, unsigned int *val)
drivers/hwmon/adt7310.c
85
struct spi_device *spi = context;
drivers/hwmon/adt7410.c
28
static int adt7410_reg_read(void *context, unsigned int reg, unsigned int *val)
drivers/hwmon/adt7410.c
30
struct i2c_client *client = context;
drivers/hwmon/adt7410.c
50
static int adt7410_reg_write(void *context, unsigned int reg, unsigned int val)
drivers/hwmon/adt7410.c
52
struct i2c_client *client = context;
drivers/hwmon/aspeed-pwm-tacho.c
349
static int regmap_aspeed_pwm_tacho_reg_write(void *context, unsigned int reg,
drivers/hwmon/aspeed-pwm-tacho.c
352
void __iomem *regs = (void __iomem *)context;
drivers/hwmon/aspeed-pwm-tacho.c
358
static int regmap_aspeed_pwm_tacho_reg_read(void *context, unsigned int reg,
drivers/hwmon/aspeed-pwm-tacho.c
361
void __iomem *regs = (void __iomem *)context;
drivers/hwmon/asus_wmi_sensors.c
614
static int asus_wmi_probe(struct wmi_device *wdev, const void *context)
drivers/hwmon/dell-smm-hwmon.c
1699
static int dell_smm_wmi_probe(struct wmi_device *wdev, const void *context)
drivers/hwmon/hp-wmi-sensors.c
1600
static void hp_wmi_notify(union acpi_object *wobj, void *context)
drivers/hwmon/hp-wmi-sensors.c
1603
struct hp_wmi_sensors *state = context;
drivers/hwmon/hp-wmi-sensors.c
2045
static int hp_wmi_sensors_probe(struct wmi_device *wdev, const void *context)
drivers/hwmon/lm75.c
582
static int lm75_i2c_reg_read(void *context, unsigned int reg, unsigned int *val)
drivers/hwmon/lm75.c
584
struct i2c_client *client = context;
drivers/hwmon/lm75.c
602
static int lm75_i2c_reg_write(void *context, unsigned int reg, unsigned int val)
drivers/hwmon/lm75.c
604
struct i2c_client *client = context;
drivers/hwmon/lm75.c
620
static int lm75_i3c_reg_read(void *context, unsigned int reg, unsigned int *val)
drivers/hwmon/lm75.c
622
struct i3c_device *i3cdev = context;
drivers/hwmon/lm75.c
657
static int lm75_i3c_reg_write(void *context, unsigned int reg, unsigned int val)
drivers/hwmon/lm75.c
659
struct i3c_device *i3cdev = context;
drivers/hwmon/lm83.c
109
static int lm83_regmap_reg_read(void *context, unsigned int reg, unsigned int *val)
drivers/hwmon/lm83.c
111
struct i2c_client *client = context;
drivers/hwmon/lm83.c
128
static int lm83_regmap_reg_write(void *context, unsigned int reg, unsigned int val)
drivers/hwmon/lm83.c
130
struct i2c_client *client = context;
drivers/hwmon/lm92.c
327
static int lm92_reg_read(void *context, unsigned int reg, unsigned int *val)
drivers/hwmon/lm92.c
332
ret = i2c_smbus_read_byte_data(context, reg);
drivers/hwmon/lm92.c
334
ret = i2c_smbus_read_word_swapped(context, reg);
drivers/hwmon/lm92.c
342
static int lm92_reg_write(void *context, unsigned int reg, unsigned int val)
drivers/hwmon/lm92.c
345
return i2c_smbus_write_byte_data(context, LM92_REG_CONFIG, val);
drivers/hwmon/lm92.c
347
return i2c_smbus_write_word_swapped(context, reg, val);
drivers/hwmon/max1619.c
304
static int max1619_reg_read(void *context, unsigned int reg, unsigned int *val)
drivers/hwmon/max1619.c
308
ret = i2c_smbus_read_byte_data(context, reg);
drivers/hwmon/max1619.c
316
static int max1619_reg_write(void *context, unsigned int reg, unsigned int val)
drivers/hwmon/max1619.c
320
return i2c_smbus_write_byte_data(context, reg + offset, val);
drivers/hwmon/max1668.c
229
static int max1668_reg_read(void *context, unsigned int reg, unsigned int *val)
drivers/hwmon/max1668.c
233
ret = i2c_smbus_read_byte_data(context, reg);
drivers/hwmon/max1668.c
241
static int max1668_reg_write(void *context, unsigned int reg, unsigned int val)
drivers/hwmon/max1668.c
243
return i2c_smbus_write_byte_data(context, reg + 11, val);
drivers/hwmon/powerz.c
88
struct powerz_priv *priv = urb->context;
drivers/hwmon/powerz.c
95
struct powerz_priv *priv = urb->context;
drivers/hwmon/sch56xx-common.c
282
static int sch56xx_reg_write(void *context, unsigned int reg, unsigned int val)
drivers/hwmon/sch56xx-common.c
284
struct sch56xx_bus_context *bus = context;
drivers/hwmon/sch56xx-common.c
294
static int sch56xx_reg_read(void *context, unsigned int reg, unsigned int *val)
drivers/hwmon/sch56xx-common.c
296
struct sch56xx_bus_context *bus = context;
drivers/hwmon/sch56xx-common.c
311
static void sch56xx_free_context(void *context)
drivers/hwmon/sch56xx-common.c
313
kfree(context);
drivers/hwmon/sch56xx-common.c
327
struct sch56xx_bus_context *context;
drivers/hwmon/sch56xx-common.c
333
context = kzalloc_obj(*context);
drivers/hwmon/sch56xx-common.c
334
if (!context)
drivers/hwmon/sch56xx-common.c
337
context->lock = lock;
drivers/hwmon/sch56xx-common.c
338
context->addr = addr;
drivers/hwmon/sch56xx-common.c
340
map = devm_regmap_init(dev, &sch56xx_bus, context, config);
drivers/hwmon/sch56xx-common.c
342
kfree(context);
drivers/hwmon/tmp108.c
330
static int tmp108_i2c_reg_read(void *context, unsigned int reg, unsigned int *val)
drivers/hwmon/tmp108.c
332
struct i2c_client *client = context;
drivers/hwmon/tmp108.c
351
static int tmp108_i2c_reg_write(void *context, unsigned int reg, unsigned int val)
drivers/hwmon/tmp108.c
353
struct i2c_client *client = context;
drivers/hwmon/tmp108.c
366
static int tmp108_i3c_reg_read(void *context, unsigned int reg, unsigned int *val)
drivers/hwmon/tmp108.c
368
struct i3c_device *i3cdev = context;
drivers/hwmon/tmp108.c
401
static int tmp108_i3c_reg_write(void *context, unsigned int reg, unsigned int val)
drivers/hwmon/tmp108.c
403
struct i3c_device *i3cdev = context;
drivers/hwmon/tmp401.c
141
static int tmp401_reg_read(void *context, unsigned int reg, unsigned int *val)
drivers/hwmon/tmp401.c
143
struct tmp401_data *data = context;
drivers/hwmon/tmp401.c
226
static int tmp401_reg_write(void *context, unsigned int reg, unsigned int val)
drivers/hwmon/tmp401.c
228
struct tmp401_data *data = context;
drivers/hwtracing/ptt/hisi_ptt.c
299
static irqreturn_t hisi_ptt_isr(int irq, void *context)
drivers/hwtracing/ptt/hisi_ptt.c
301
struct hisi_ptt *hisi_ptt = context;
drivers/i2c/busses/i2c-designware-common.c
100
static int dw_reg_write_swab(void *context, unsigned int reg, unsigned int val)
drivers/i2c/busses/i2c-designware-common.c
102
struct dw_i2c_dev *dev = context;
drivers/i2c/busses/i2c-designware-common.c
109
static int dw_reg_read_word(void *context, unsigned int reg, unsigned int *val)
drivers/i2c/busses/i2c-designware-common.c
111
struct dw_i2c_dev *dev = context;
drivers/i2c/busses/i2c-designware-common.c
119
static int dw_reg_write_word(void *context, unsigned int reg, unsigned int val)
drivers/i2c/busses/i2c-designware-common.c
121
struct dw_i2c_dev *dev = context;
drivers/i2c/busses/i2c-designware-common.c
73
static int dw_reg_read(void *context, unsigned int reg, unsigned int *val)
drivers/i2c/busses/i2c-designware-common.c
75
struct dw_i2c_dev *dev = context;
drivers/i2c/busses/i2c-designware-common.c
82
static int dw_reg_write(void *context, unsigned int reg, unsigned int val)
drivers/i2c/busses/i2c-designware-common.c
84
struct dw_i2c_dev *dev = context;
drivers/i2c/busses/i2c-designware-common.c
91
static int dw_reg_read_swab(void *context, unsigned int reg, unsigned int *val)
drivers/i2c/busses/i2c-designware-common.c
93
struct dw_i2c_dev *dev = context;
drivers/i2c/busses/i2c-hisi.c
338
static irqreturn_t hisi_i2c_irq(int irq, void *context)
drivers/i2c/busses/i2c-hisi.c
340
struct hisi_i2c_controller *ctlr = context;
drivers/i2c/busses/i2c-scmi.c
341
void *context, void **return_value)
drivers/i2c/busses/i2c-scmi.c
345
struct acpi_smbus_cmi *smbus_cmi = context;
drivers/iio/accel/adxl367.c
1431
void *context, struct regmap *regmap, int irq)
drivers/iio/accel/adxl367.c
1445
st->context = context;
drivers/iio/accel/adxl367.c
158
void *context;
drivers/iio/accel/adxl367.c
793
ret = st->ops->read_fifo(st->context, st->fifo_buf, fifo_entries);
drivers/iio/accel/adxl367.h
16
int (*read_fifo)(void *context, __be16 *fifo_buf,
drivers/iio/accel/adxl367.h
21
void *context, struct regmap *regmap, int irq);
drivers/iio/accel/adxl367_i2c.c
25
static int adxl367_i2c_read_fifo(void *context, __be16 *fifo_buf,
drivers/iio/accel/adxl367_i2c.c
28
struct adxl367_i2c_state *st = context;
drivers/iio/accel/adxl367_spi.c
41
static int adxl367_read_fifo(void *context, __be16 *fifo_buf,
drivers/iio/accel/adxl367_spi.c
44
struct adxl367_spi_state *st = context;
drivers/iio/accel/adxl367_spi.c
52
static int adxl367_read(void *context, const void *reg_buf, size_t reg_size,
drivers/iio/accel/adxl367_spi.c
55
struct adxl367_spi_state *st = context;
drivers/iio/accel/adxl367_spi.c
65
static int adxl367_write(void *context, const void *val_buf, size_t val_size)
drivers/iio/accel/adxl367_spi.c
67
struct adxl367_spi_state *st = context;
drivers/iio/accel/bma400_spi.c
20
static int bma400_regmap_spi_read(void *context,
drivers/iio/accel/bma400_spi.c
24
struct device *dev = context;
drivers/iio/accel/bma400_spi.c
47
static int bma400_regmap_spi_write(void *context, const void *data,
drivers/iio/accel/bma400_spi.c
50
struct device *dev = context;
drivers/iio/accel/bmi088-accel-spi.c
18
static int bmi088_regmap_spi_write(void *context, const void *data, size_t count)
drivers/iio/accel/bmi088-accel-spi.c
20
struct spi_device *spi = context;
drivers/iio/accel/bmi088-accel-spi.c
26
static int bmi088_regmap_spi_read(void *context, const void *reg,
drivers/iio/accel/bmi088-accel-spi.c
29
struct spi_device *spi = context;
drivers/iio/adc/ad4030.c
269
static int ad4030_spi_read(void *context, const void *reg, size_t reg_size,
drivers/iio/adc/ad4030.c
273
struct ad4030_state *st = context;
drivers/iio/adc/ad4030.c
301
static int ad4030_spi_write(void *context, const void *data, size_t count)
drivers/iio/adc/ad4030.c
304
struct ad4030_state *st = context;
drivers/iio/adc/ad4130.c
422
static int ad4130_reg_write(void *context, unsigned int reg, unsigned int val)
drivers/iio/adc/ad4130.c
424
struct ad4130_state *st = context;
drivers/iio/adc/ad4130.c
451
static int ad4130_reg_read(void *context, unsigned int reg, unsigned int *val)
drivers/iio/adc/ad4130.c
453
struct ad4130_state *st = context;
drivers/iio/adc/ad4134.c
150
static int ad4134_reg_write(void *context, unsigned int reg, unsigned int val)
drivers/iio/adc/ad4134.c
152
struct ad4134_state *st = context;
drivers/iio/adc/ad4134.c
233
static int ad4134_reg_read(void *context, unsigned int reg, unsigned int *val)
drivers/iio/adc/ad4134.c
235
struct ad4134_state *st = context;
drivers/iio/adc/ad4170-4.c
521
static int ad4170_reg_write(void *context, unsigned int reg, unsigned int val)
drivers/iio/adc/ad4170-4.c
523
struct ad4170_state *st = context;
drivers/iio/adc/ad4170-4.c
555
static int ad4170_reg_read(void *context, unsigned int reg, unsigned int *val)
drivers/iio/adc/ad4170-4.c
557
struct ad4170_state *st = context;
drivers/iio/adc/ad4695.c
247
static int ad4695_regmap_bus_reg_write(void *context, const void *data,
drivers/iio/adc/ad4695.c
250
struct ad4695_state *st = context;
drivers/iio/adc/ad4695.c
265
static int ad4695_regmap_bus_reg_read(void *context, const void *reg,
drivers/iio/adc/ad4695.c
269
struct ad4695_state *st = context;
drivers/iio/adc/ad7091r8.c
114
static int ad7091r_regmap_bus_reg_read(void *context, unsigned int reg,
drivers/iio/adc/ad7091r8.c
117
struct ad7091r_state *st = context;
drivers/iio/adc/ad7091r8.c
145
static int ad7091r_regmap_bus_reg_write(void *context, unsigned int reg,
drivers/iio/adc/ad7091r8.c
148
struct ad7091r_state *st = context;
drivers/iio/adc/ad7380.c
919
static int ad7380_regmap_reg_write(void *context, unsigned int reg,
drivers/iio/adc/ad7380.c
922
struct ad7380_state *st = context;
drivers/iio/adc/ad7380.c
937
static int ad7380_regmap_reg_read(void *context, unsigned int reg,
drivers/iio/adc/ad7380.c
940
struct ad7380_state *st = context;
drivers/iio/adc/ade9000.c
652
static int ade9000_spi_write_reg(void *context, unsigned int reg,
drivers/iio/adc/ade9000.c
655
struct ade9000_state *st = context;
drivers/iio/adc/ade9000.c
680
static int ade9000_spi_read_reg(void *context, unsigned int reg,
drivers/iio/adc/ade9000.c
683
struct ade9000_state *st = context;
drivers/iio/adc/max14001.c
154
static int max14001_write_single_reg(void *context, unsigned int reg, unsigned int val)
drivers/iio/adc/max14001.c
156
struct max14001_state *st = context;
drivers/iio/adc/max14001.c
84
static int max14001_read(void *context, unsigned int reg, unsigned int *val)
drivers/iio/adc/max14001.c
86
struct max14001_state *st = context;
drivers/iio/adc/rohm-bd79112.c
116
static int bd79112_reg_read(void *context, unsigned int reg, unsigned int *val)
drivers/iio/adc/rohm-bd79112.c
118
struct bd79112_data *data = context;
drivers/iio/adc/rohm-bd79112.c
144
static int bd79112_reg_write(void *context, unsigned int reg, unsigned int val)
drivers/iio/adc/rohm-bd79112.c
146
struct bd79112_data *data = context;
drivers/iio/adc/ti-ads1298.c
365
static int ads1298_reg_write(void *context, unsigned int reg, unsigned int val)
drivers/iio/adc/ti-ads1298.c
367
struct ads1298_private *priv = context;
drivers/iio/adc/ti-ads1298.c
386
static int ads1298_reg_read(void *context, unsigned int reg, unsigned int *val)
drivers/iio/adc/ti-ads1298.c
388
struct ads1298_private *priv = context;
drivers/iio/adc/ti-ads1298.c
486
static void ads1298_rdata_complete(void *context)
drivers/iio/adc/ti-ads1298.c
488
struct iio_dev *indio_dev = context;
drivers/iio/adc/ti-ads1298.c
732
priv->rdata_msg.context = indio_dev;
drivers/iio/addac/ad74115.c
424
static int ad74115_reg_write(void *context, unsigned int reg, unsigned int val)
drivers/iio/addac/ad74115.c
426
struct ad74115_state *st = context;
drivers/iio/addac/ad74115.c
447
static int ad74115_reg_read(void *context, unsigned int reg, unsigned int *val)
drivers/iio/addac/ad74115.c
449
struct ad74115_state *st = context;
drivers/iio/addac/ad74413r.c
173
static int ad74413r_reg_write(void *context, unsigned int reg, unsigned int val)
drivers/iio/addac/ad74413r.c
175
struct ad74413r_state *st = context;
drivers/iio/addac/ad74413r.c
195
static int ad74413r_reg_read(void *context, unsigned int reg, unsigned int *val)
drivers/iio/addac/ad74413r.c
197
struct ad74413r_state *st = context;
drivers/iio/chemical/bme680_spi.c
63
static int bme680_regmap_spi_write(void *context, const void *data,
drivers/iio/chemical/bme680_spi.c
66
struct bme680_spi_bus_context *ctx = context;
drivers/iio/chemical/bme680_spi.c
86
static int bme680_regmap_spi_read(void *context, const void *reg,
drivers/iio/chemical/bme680_spi.c
89
struct bme680_spi_bus_context *ctx = context;
drivers/iio/chemical/sunrise_co2.c
49
static int sunrise_regmap_read(void *context, const void *reg_buf,
drivers/iio/chemical/sunrise_co2.c
52
struct i2c_client *client = context;
drivers/iio/chemical/sunrise_co2.c
86
static int sunrise_regmap_write(void *context, const void *val_buf, size_t count)
drivers/iio/chemical/sunrise_co2.c
88
struct i2c_client *client = context;
drivers/iio/dac/ltc2688.c
101
static int ltc2688_spi_read(void *context, const void *reg, size_t reg_size,
drivers/iio/dac/ltc2688.c
104
struct ltc2688_state *st = context;
drivers/iio/dac/ltc2688.c
129
static int ltc2688_spi_write(void *context, const void *data, size_t count)
drivers/iio/dac/ltc2688.c
131
struct ltc2688_state *st = context;
drivers/iio/dac/max22007.c
127
static int max22007_spi_write(void *context, const void *data, size_t count)
drivers/iio/dac/max22007.c
129
struct max22007_state *st = context;
drivers/iio/dac/max22007.c
89
static int max22007_spi_read(void *context, const void *reg, size_t reg_size,
drivers/iio/dac/max22007.c
92
struct max22007_state *st = context;
drivers/iio/imu/bmi323/bmi323_i2c.c
25
static int bmi323_regmap_i2c_read(void *context, const void *reg_buf,
drivers/iio/imu/bmi323/bmi323_i2c.c
29
struct bmi323_i2c_priv *priv = context;
drivers/iio/imu/bmi323/bmi323_i2c.c
52
static int bmi323_regmap_i2c_write(void *context, const void *data,
drivers/iio/imu/bmi323/bmi323_i2c.c
55
struct bmi323_i2c_priv *priv = context;
drivers/iio/imu/bmi323/bmi323_spi.c
20
static int bmi323_regmap_spi_read(void *context, const void *reg_buf,
drivers/iio/imu/bmi323/bmi323_spi.c
24
struct spi_device *spi = context;
drivers/iio/imu/bmi323/bmi323_spi.c
29
static int bmi323_regmap_spi_write(void *context, const void *data,
drivers/iio/imu/bmi323/bmi323_spi.c
32
struct spi_device *spi = context;
drivers/iio/imu/bno055/bno055_ser_core.c
285
static int bno055_ser_write_reg(void *context, const void *_data, size_t count)
drivers/iio/imu/bno055/bno055_ser_core.c
288
struct bno055_ser_priv *priv = context;
drivers/iio/imu/bno055/bno055_ser_core.c
299
static int bno055_ser_read_reg(void *context,
drivers/iio/imu/bno055/bno055_ser_core.c
306
struct bno055_ser_priv *priv = context;
drivers/iio/imu/inv_icm45600/inv_icm45600_core.c
103
static int inv_icm45600_read(void *context, const void *reg_buf, size_t reg_size,
drivers/iio/imu/inv_icm45600/inv_icm45600_core.c
107
struct regmap *map = context;
drivers/iio/imu/inv_icm45600/inv_icm45600_core.c
116
static int inv_icm45600_write(void *context, const void *data, size_t count)
drivers/iio/imu/inv_icm45600/inv_icm45600_core.c
120
struct regmap *map = context;
drivers/iio/imu/smi240.c
182
static int smi240_regmap_spi_read(void *context, const void *reg_buf,
drivers/iio/imu/smi240.c
189
struct spi_device *spi = context;
drivers/iio/imu/smi240.c
227
static int smi240_regmap_spi_write(void *context, const void *data,
drivers/iio/imu/smi240.c
234
struct spi_device *spi = context;
drivers/iio/imu/smi330/smi330_i2c.c
21
static int smi330_regmap_i2c_read(void *context, const void *reg_buf,
drivers/iio/imu/smi330/smi330_i2c.c
25
struct smi330_i2c_priv *priv = context;
drivers/iio/imu/smi330/smi330_i2c.c
62
static int smi330_regmap_i2c_write(void *context, const void *data,
drivers/iio/imu/smi330/smi330_i2c.c
65
struct smi330_i2c_priv *priv = context;
drivers/iio/imu/smi330/smi330_spi.c
12
static int smi330_regmap_spi_read(void *context, const void *reg_buf,
drivers/iio/imu/smi330/smi330_spi.c
16
struct spi_device *spi = context;
drivers/iio/imu/smi330/smi330_spi.c
30
static int smi330_regmap_spi_write(void *context, const void *data,
drivers/iio/imu/smi330/smi330_spi.c
33
struct spi_device *spi = context;
drivers/iio/industrialio-buffer.c
1684
priv->context = dma_fence_context_alloc(1);
drivers/iio/industrialio-buffer.c
1883
&priv->lock, priv->context, seqno);
drivers/iio/industrialio-buffer.c
48
u64 context;
drivers/iio/light/gp2ap002.c
396
static int gp2ap002_regmap_i2c_read(void *context, unsigned int reg,
drivers/iio/light/gp2ap002.c
399
struct device *dev = context;
drivers/iio/light/gp2ap002.c
412
static int gp2ap002_regmap_i2c_write(void *context, unsigned int reg,
drivers/iio/light/gp2ap002.c
415
struct device *dev = context;
drivers/iio/light/iqs621-als.c
192
static void iqs621_als_notifier_unregister(void *context)
drivers/iio/light/iqs621-als.c
194
struct iqs621_als_private *iqs621_als = context;
drivers/iio/light/iqs621-als.c
94
unsigned long event_flags, void *context)
drivers/iio/light/iqs621-als.c
96
struct iqs62x_event_data *event_data = context;
drivers/iio/position/iqs624-pos.c
101
struct iqs624_pos_private *iqs624_pos = context;
drivers/iio/position/iqs624-pos.c
51
unsigned long event_flags, void *context)
drivers/iio/position/iqs624-pos.c
53
struct iqs62x_event_data *event_data = context;
drivers/iio/position/iqs624-pos.c
99
static void iqs624_pos_notifier_unregister(void *context)
drivers/iio/pressure/bmp280-spi.c
15
static int bmp280_regmap_spi_write(void *context, const void *data,
drivers/iio/pressure/bmp280-spi.c
18
struct spi_device *spi = to_spi_device(context);
drivers/iio/pressure/bmp280-spi.c
31
static int bmp280_regmap_spi_read(void *context, const void *reg,
drivers/iio/pressure/bmp280-spi.c
34
struct spi_device *spi = to_spi_device(context);
drivers/iio/pressure/bmp280-spi.c
39
static int bmp380_regmap_spi_read(void *context, const void *reg,
drivers/iio/pressure/bmp280-spi.c
42
struct spi_device *spi = to_spi_device(context);
drivers/iio/proximity/hx9023s.c
1050
static void hx9023s_cfg_update(const struct firmware *fw, void *context)
drivers/iio/proximity/hx9023s.c
1052
struct hx9023s_data *data = context;
drivers/iio/resolver/ad2s1210.c
195
static int ad2s1210_regmap_reg_write(void *context, unsigned int reg,
drivers/iio/resolver/ad2s1210.c
198
struct ad2s1210_state *st = context;
drivers/iio/resolver/ad2s1210.c
241
static int ad2s1210_regmap_reg_read(void *context, unsigned int reg,
drivers/iio/resolver/ad2s1210.c
244
struct ad2s1210_state *st = context;
drivers/infiniband/core/addr.c
60
void *context;
drivers/infiniband/core/addr.c
62
struct rdma_dev_addr *addr, void *context);
drivers/infiniband/core/addr.c
623
req->addr, req->context);
drivers/infiniband/core/addr.c
642
struct rdma_dev_addr *addr, void *context),
drivers/infiniband/core/addr.c
643
bool resolve_by_gid_attr, void *context)
drivers/infiniband/core/addr.c
670
req->context = context;
drivers/infiniband/core/addr.c
783
struct rdma_dev_addr *addr, void *context)
drivers/infiniband/core/addr.c
785
((struct resolve_cb_context *)context)->status = status;
drivers/infiniband/core/addr.c
786
complete(&((struct resolve_cb_context *)context)->comp);
drivers/infiniband/core/cache.c
1000
return container_of(attr, struct ib_gid_table_entry, attr)->context;
drivers/infiniband/core/cache.c
361
ret = attr->device->ops.add_gid(attr, &entry->context);
drivers/infiniband/core/cache.c
403
ib_dev->ops.del_gid(&entry->attr, &entry->context);
drivers/infiniband/core/cache.c
739
void *context)
drivers/infiniband/core/cache.c
761
if (filter(gid, &entry->attr, context)) {
drivers/infiniband/core/cache.c
89
void *context;
drivers/infiniband/core/cm.c
2223
cm_id_priv->id.context = listen_cm_id_priv->id.context;
drivers/infiniband/core/cm.c
3228
cm_id_priv->msg->context[1] = (void *) (unsigned long)
drivers/infiniband/core/cm.c
346
msg->context[0] = cm_id_priv;
drivers/infiniband/core/cm.c
347
msg->context[1] = (void *) (unsigned long) state;
drivers/infiniband/core/cm.c
3597
cm_id_priv->id.context = listen_cm_id_priv->id.context;
drivers/infiniband/core/cm.c
363
struct cm_id_private *cm_id_priv = msg->context[0];
drivers/infiniband/core/cm.c
3756
enum ib_cm_state state = (unsigned long) msg->context[1];
drivers/infiniband/core/cm.c
3817
port = mad_agent->context;
drivers/infiniband/core/cm.c
3821
if (msg->context[0] == CM_DIRECT_RETRY_CTX) {
drivers/infiniband/core/cm.c
3825
cm_id_priv = msg->context[0];
drivers/infiniband/core/cm.c
387
m->context[0] = direct_retry ? CM_DIRECT_RETRY_CTX : NULL;
drivers/infiniband/core/cm.c
3999
struct cm_port *port = mad_agent->context;
drivers/infiniband/core/cm.c
660
cur_cm_id_priv->id.context ||
drivers/infiniband/core/cm.c
824
void *context)
drivers/infiniband/core/cm.c
837
cm_id_priv->id.context = context;
drivers/infiniband/core/cm.c
873
void *context)
drivers/infiniband/core/cm.c
877
cm_id_priv = cm_alloc_id_priv(device, cm_handler, context);
drivers/infiniband/core/cma.c
1011
void *context, enum rdma_ucm_port_space ps,
drivers/infiniband/core/cma.c
1022
id_priv->id.context = context;
drivers/infiniband/core/cma.c
1053
void *context, enum rdma_ucm_port_space ps,
drivers/infiniband/core/cma.c
1058
ret = __rdma_create_id(net, event_handler, context, ps, qp_type, NULL);
drivers/infiniband/core/cma.c
1068
void *context,
drivers/infiniband/core/cma.c
1074
ret = __rdma_create_id(current->nsproxy->net_ns, event_handler, context,
drivers/infiniband/core/cma.c
2081
cma_id_put(id_priv->id.context);
drivers/infiniband/core/cma.c
2189
struct rdma_id_private *id_priv = cm_id->context;
drivers/infiniband/core/cma.c
2288
listen_id->event_handler, listen_id->context,
drivers/infiniband/core/cma.c
2349
listen_id->context, listen_id->ps, IB_QPT_UD,
drivers/infiniband/core/cma.c
2458
cm_id->context = conn_id;
drivers/infiniband/core/cma.c
2472
trace_cm_prepare_mra(cm_id->context);
drivers/infiniband/core/cma.c
2524
struct rdma_id_private *id_priv = iw_id->context;
drivers/infiniband/core/cma.c
2601
listen_id = cm_id->context;
drivers/infiniband/core/cma.c
2610
listen_id->id.context, RDMA_PS_TCP,
drivers/infiniband/core/cma.c
2634
cm_id->context = conn_id;
drivers/infiniband/core/cma.c
2707
struct rdma_id_private *id_priv = id->context;
drivers/infiniband/core/cma.c
2713
id->context = id_priv->id.context;
drivers/infiniband/core/cma.c
2911
unsigned int num_prs, void *context)
drivers/infiniband/core/cma.c
2913
struct cma_work *work = context;
drivers/infiniband/core/cma.c
3509
struct rdma_dev_addr *dev_addr, void *context)
drivers/infiniband/core/cma.c
3511
struct rdma_id_private *id_priv = context;
drivers/infiniband/core/cma.c
352
void *context;
drivers/infiniband/core/cma.c
4253
struct rdma_id_private *id_priv = cm_id->context;
drivers/infiniband/core/cma.c
4869
event->param.ud.private_data = mc->context;
drivers/infiniband/core/cma.c
4900
struct cma_multicast *mc = multicast->context;
drivers/infiniband/core/cma.c
5085
u8 join_state, void *context)
drivers/infiniband/core/cma.c
5109
mc->context = context;
drivers/infiniband/core/cma.c
5553
unsigned int num_recs, void *context)
drivers/infiniband/core/cma.c
5555
struct cma_work *work = context;
drivers/infiniband/core/iwcm.c
241
void *context)
drivers/infiniband/core/iwcm.c
252
cm_id_priv->id.context = context;
drivers/infiniband/core/iwcm.c
779
listen_id_priv->id.context);
drivers/infiniband/core/mad.c
248
void *context,
drivers/infiniband/core/mad.c
410
mad_agent_priv->agent.context = context;
drivers/infiniband/core/mad_rmpp.c
172
msg->context[0] = ah;
drivers/infiniband/core/mad_rmpp.c
206
if (mad_send_wc->send_buf->context[0] == mad_send_wc->send_buf->ah)
drivers/infiniband/core/multicast.c
125
void *context);
drivers/infiniband/core/multicast.c
127
void *context);
drivers/infiniband/core/multicast.c
512
void *context)
drivers/infiniband/core/multicast.c
514
struct mcast_group *group = context;
drivers/infiniband/core/multicast.c
546
void *context)
drivers/infiniband/core/multicast.c
548
struct mcast_group *group = context;
drivers/infiniband/core/multicast.c
613
void *context)
drivers/infiniband/core/multicast.c
633
member->multicast.context = context;
drivers/infiniband/core/nldev.c
657
cq->uobject->uevent.uobject.context->res.id))
drivers/infiniband/core/nldev.c
744
pd->uobject->context->res.id))
drivers/infiniband/core/rdma_core.c
136
WARN_ON(!uobj->context);
drivers/infiniband/core/rdma_core.c
148
uobj->context = NULL;
drivers/infiniband/core/rdma_core.c
266
if (!attrs->context) {
drivers/infiniband/core/rdma_core.c
272
attrs->context = ucontext;
drivers/infiniband/core/rdma_core.c
283
uobj->context = attrs->context;
drivers/infiniband/core/rdma_core.c
415
attrs->context = uobj->context;
drivers/infiniband/core/rdma_core.c
439
ret = ib_rdmacg_try_charge(&uobj->cg_obj, uobj->context->device,
drivers/infiniband/core/rdma_core.c
528
ib_rdmacg_uncharge(&uobj->cg_obj, uobj->context->device,
drivers/infiniband/core/rdma_core.c
549
ib_rdmacg_uncharge(&uobj->cg_obj, uobj->context->device,
drivers/infiniband/core/rdma_core.c
820
.context = uobj->context,
drivers/infiniband/core/rdma_core.c
914
attrs.context = obj->context;
drivers/infiniband/core/sa.h
58
void *context),
drivers/infiniband/core/sa.h
59
void *context, struct ib_sa_query **sa_query);
drivers/infiniband/core/sa_query.c
1000
path_query->context);
drivers/infiniband/core/sa_query.c
131
unsigned int num_paths, void *context);
drivers/infiniband/core/sa_query.c
132
void *context;
drivers/infiniband/core/sa_query.c
139
void *context;
drivers/infiniband/core/sa_query.c
1417
query->mad_buf->context[0] = query;
drivers/infiniband/core/sa_query.c
145
void *context;
drivers/infiniband/core/sa_query.c
151
void *context;
drivers/infiniband/core/sa_query.c
1531
query->callback(status, NULL, 0, query->context);
drivers/infiniband/core/sa_query.c
1539
query->callback(status, &rec, 1, query->context);
drivers/infiniband/core/sa_query.c
1553
query->callback(status, &opa, 1, query->context);
drivers/infiniband/core/sa_query.c
1555
query->callback(status, &rec, 1, query->context);
drivers/infiniband/core/sa_query.c
157
unsigned int num_services, void *context);
drivers/infiniband/core/sa_query.c
158
void *context;
drivers/infiniband/core/sa_query.c
1600
query->callback(status, NULL, 0, query->context);
drivers/infiniband/core/sa_query.c
1606
query->callback(-ENODATA, NULL, 0, query->context);
drivers/infiniband/core/sa_query.c
1612
query->callback(-ENOMEM, NULL, 0, query->context);
drivers/infiniband/core/sa_query.c
1617
query->callback(status, rec, num_services, query->context);
drivers/infiniband/core/sa_query.c
1670
unsigned int num_paths, void *context),
drivers/infiniband/core/sa_query.c
1671
void *context,
drivers/infiniband/core/sa_query.c
1720
query->context = context;
drivers/infiniband/core/sa_query.c
1746
query->sa_query.mad_buf->context[1] = (query->conv_pr) ?
drivers/infiniband/core/sa_query.c
1800
void *context),
drivers/infiniband/core/sa_query.c
1801
void *context, struct ib_sa_query **sa_query)
drivers/infiniband/core/sa_query.c
1829
query->context = context;
drivers/infiniband/core/sa_query.c
1844
query->sa_query.mad_buf->context[1] = rec;
drivers/infiniband/core/sa_query.c
1873
query->callback(status, &rec, query->context);
drivers/infiniband/core/sa_query.c
1875
query->callback(status, NULL, query->context);
drivers/infiniband/core/sa_query.c
1891
void *context),
drivers/infiniband/core/sa_query.c
1892
void *context,
drivers/infiniband/core/sa_query.c
1920
query->context = context;
drivers/infiniband/core/sa_query.c
1964
query->callback(status, &rec, query->context);
drivers/infiniband/core/sa_query.c
1966
query->callback(status, NULL, query->context);
drivers/infiniband/core/sa_query.c
1981
void *context),
drivers/infiniband/core/sa_query.c
1982
void *context,
drivers/infiniband/core/sa_query.c
2016
query->context = context;
drivers/infiniband/core/sa_query.c
2055
static void ib_classportinfo_cb(void *context)
drivers/infiniband/core/sa_query.c
2057
struct ib_classport_info_context *cb_ctx = context;
drivers/infiniband/core/sa_query.c
2110
query->callback(query->context);
drivers/infiniband/core/sa_query.c
2121
void (*callback)(void *context),
drivers/infiniband/core/sa_query.c
2122
void *context,
drivers/infiniband/core/sa_query.c
2146
query->context = context;
drivers/infiniband/core/sa_query.c
2229
struct ib_sa_query *query = mad_send_wc->send_buf->context[0];
drivers/infiniband/core/sa_query.c
2275
query = send_buf->context[0];
drivers/infiniband/core/sa_query.c
763
struct sa_path_rec *sa_rec = query->mad_buf->context[1];
drivers/infiniband/core/sa_query.c
770
query->mad_buf->context[1] = NULL;
drivers/infiniband/core/ucma.c
305
struct ucma_context *listen_ctx = cm_id->context;
drivers/infiniband/core/ucma.c
322
ctx->cm_id->context = ctx;
drivers/infiniband/core/ucma.c
343
struct ucma_context *ctx = cm_id->context;
drivers/infiniband/core/user_mad.c
224
struct ib_umad_file *file = agent->context;
drivers/infiniband/core/user_mad.c
225
struct ib_umad_packet *packet = send_wc->send_buf->context[0];
drivers/infiniband/core/user_mad.c
244
struct ib_umad_file *file = agent->context;
drivers/infiniband/core/user_mad.c
609
packet->msg->context[0] = packet;
drivers/infiniband/core/uverbs_cmd.c
228
attrs->context = ucontext;
drivers/infiniband/core/uverbs_cmd.c
234
struct ib_ucontext *ucontext = attrs->context;
drivers/infiniband/core/uverbs_cmd.c
3279
if (!rdma_is_port_valid(uobj->context->device, cmd.flow_attr.port)) {
drivers/infiniband/core/uverbs_cmd.c
337
rdma_restrack_put(&attrs->context->res);
drivers/infiniband/core/uverbs_cmd.c
338
kfree(attrs->context);
drivers/infiniband/core/uverbs_cmd.c
339
attrs->context = NULL;
drivers/infiniband/core/uverbs_ioctl.c
601
pbundle->bundle.context = NULL; /* only valid if bundle has uobject */
drivers/infiniband/core/uverbs_main.c
494
struct ib_device *ib_dev = async_file->uobj.context->device;
drivers/infiniband/core/uverbs_main.c
616
bundle.context = NULL; /* only valid if bundle has uobject */
drivers/infiniband/core/uverbs_std_types_counters.c
60
struct ib_device *ib_dev = attrs->context->device;
drivers/infiniband/core/uverbs_std_types_cq.c
68
struct ib_device *ib_dev = attrs->context->device;
drivers/infiniband/core/uverbs_std_types_device.c
267
kfree(attrs->context);
drivers/infiniband/core/uverbs_std_types_device.c
268
attrs->context = NULL;
drivers/infiniband/core/uverbs_std_types_dm.c
56
struct ib_device *ib_dev = attrs->context->device;
drivers/infiniband/core/uverbs_std_types_dm.c
73
dm = ib_dev->ops.alloc_dm(ib_dev, attrs->context, &attr, attrs);
drivers/infiniband/core/uverbs_std_types_dmabuf.c
106
mmap_entry = ib_dev->ops.pgoff_to_mmap_entry(attrs->context, pg_off);
drivers/infiniband/core/uverbs_std_types_dmabuf.c
73
if (!priv->uobj.context)
drivers/infiniband/core/uverbs_std_types_dmabuf.c
96
struct ib_device *ib_dev = attrs->context->device;
drivers/infiniband/core/uverbs_std_types_dmah.c
36
struct ib_device *ib_dev = attrs->context->device;
drivers/infiniband/core/verbs.c
1159
static void __ib_qp_event_handler(struct ib_event *event, void *context)
drivers/infiniband/core/verbs.c
1169
static void __ib_shared_qp_event_handler(struct ib_event *event, void *context)
drivers/infiniband/core/verbs.c
1171
struct ib_qp *qp = context;
drivers/infiniband/core/verbs.c
741
void *context)
drivers/infiniband/core/verbs.c
743
struct find_gid_index_context *ctx = context;
drivers/infiniband/core/verbs.c
762
struct find_gid_index_context context = {.vlan_id = vlan_id,
drivers/infiniband/core/verbs.c
766
&context);
drivers/infiniband/hw/bnxt_re/ib_verbs.c
383
int bnxt_re_del_gid(const struct ib_gid_attr *attr, void **context)
drivers/infiniband/hw/bnxt_re/ib_verbs.c
393
ctx = *context;
drivers/infiniband/hw/bnxt_re/ib_verbs.c
436
int bnxt_re_add_gid(const struct ib_gid_attr *attr, void **context)
drivers/infiniband/hw/bnxt_re/ib_verbs.c
455
*context = ctx_tbl[tbl_idx];
drivers/infiniband/hw/bnxt_re/ib_verbs.c
471
*context = ctx;
drivers/infiniband/hw/bnxt_re/ib_verbs.h
215
int bnxt_re_del_gid(const struct ib_gid_attr *attr, void **context);
drivers/infiniband/hw/bnxt_re/ib_verbs.h
216
int bnxt_re_add_gid(const struct ib_gid_attr *attr, void **context);
drivers/infiniband/hw/bnxt_re/ib_verbs.h
274
void bnxt_re_dealloc_ucontext(struct ib_ucontext *context);
drivers/infiniband/hw/bnxt_re/ib_verbs.h
280
int bnxt_re_mmap(struct ib_ucontext *context, struct vm_area_struct *vma);
drivers/infiniband/hw/cxgb4/provider.c
103
spin_lock(&context->mmap_lock);
drivers/infiniband/hw/cxgb4/provider.c
104
uresp.status_page_key = context->key;
drivers/infiniband/hw/cxgb4/provider.c
105
context->key += PAGE_SIZE;
drivers/infiniband/hw/cxgb4/provider.c
106
spin_unlock(&context->mmap_lock);
drivers/infiniband/hw/cxgb4/provider.c
119
insert_mmap(context, mm);
drivers/infiniband/hw/cxgb4/provider.c
128
static int c4iw_mmap(struct ib_ucontext *context, struct vm_area_struct *vma)
drivers/infiniband/hw/cxgb4/provider.c
149
rdev = &(to_c4iw_dev(context->device)->rdev);
drivers/infiniband/hw/cxgb4/provider.c
150
ucontext = to_c4iw_ucontext(context);
drivers/infiniband/hw/cxgb4/provider.c
62
static void c4iw_dealloc_ucontext(struct ib_ucontext *context)
drivers/infiniband/hw/cxgb4/provider.c
64
struct c4iw_ucontext *ucontext = to_c4iw_ucontext(context);
drivers/infiniband/hw/cxgb4/provider.c
68
pr_debug("context %p\n", context);
drivers/infiniband/hw/cxgb4/provider.c
80
struct c4iw_ucontext *context = to_c4iw_ucontext(ucontext);
drivers/infiniband/hw/cxgb4/provider.c
87
c4iw_init_dev_ucontext(&rhp->rdev, &context->uctx);
drivers/infiniband/hw/cxgb4/provider.c
88
INIT_LIST_HEAD(&context->mmaps);
drivers/infiniband/hw/cxgb4/provider.c
89
spin_lock_init(&context->mmap_lock);
drivers/infiniband/hw/erdma/erdma_verbs.c
2176
int erdma_add_gid(const struct ib_gid_attr *attr, void **context)
drivers/infiniband/hw/erdma/erdma_verbs.c
2189
int erdma_del_gid(const struct ib_gid_attr *attr, void **context)
drivers/infiniband/hw/erdma/erdma_verbs.h
482
int erdma_add_gid(const struct ib_gid_attr *attr, void **context);
drivers/infiniband/hw/erdma/erdma_verbs.h
483
int erdma_del_gid(const struct ib_gid_attr *attr, void **context);
drivers/infiniband/hw/hfi1/chip.c
1020
unsigned int context, u64 err_status);
drivers/infiniband/hw/hfi1/chip.c
1023
unsigned int context, u64 err_status);
drivers/infiniband/hw/hfi1/chip.c
1025
unsigned int context, u64 err_status);
drivers/infiniband/hw/hfi1/chip.c
1178
u64 (*rw_cntr)(const struct cntr_entry *, void *context, int vl,
drivers/infiniband/hw/hfi1/chip.c
12341
u64 *psval, void *context, int vl)
drivers/infiniband/hw/hfi1/chip.c
12353
val = entry->rw_cntr(entry, context, vl, CNTR_MODE_R, 0);
drivers/infiniband/hw/hfi1/chip.c
12393
u64 *psval, void *context, int vl, u64 data)
drivers/infiniband/hw/hfi1/chip.c
12407
val = entry->rw_cntr(entry, context, vl, CNTR_MODE_W,
drivers/infiniband/hw/hfi1/chip.c
12411
val = entry->rw_cntr(entry, context, vl, CNTR_MODE_W,
drivers/infiniband/hw/hfi1/chip.c
12415
val = entry->rw_cntr(entry, context, vl, CNTR_MODE_W, data);
drivers/infiniband/hw/hfi1/chip.c
1371
void *context, int vl, int mode, u64 data)
drivers/infiniband/hw/hfi1/chip.c
1373
struct hfi1_devdata *dd = context;
drivers/infiniband/hw/hfi1/chip.c
1388
void *context, int idx, int mode, u64 data)
drivers/infiniband/hw/hfi1/chip.c
1390
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
1398
void *context, int idx, int mode, u64 data)
drivers/infiniband/hw/hfi1/chip.c
1400
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
1408
void *context, int idx, int mode, u64 data)
drivers/infiniband/hw/hfi1/chip.c
1410
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
1418
void *context, int idx, int mode,
drivers/infiniband/hw/hfi1/chip.c
1421
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
1428
static u64 dev_access_u64_csr(const struct cntr_entry *entry, void *context,
drivers/infiniband/hw/hfi1/chip.c
1431
struct hfi1_devdata *dd = context;
drivers/infiniband/hw/hfi1/chip.c
1449
static u64 dc_access_lcb_cntr(const struct cntr_entry *entry, void *context,
drivers/infiniband/hw/hfi1/chip.c
1452
struct hfi1_devdata *dd = context;
drivers/infiniband/hw/hfi1/chip.c
1474
static u64 port_access_u32_csr(const struct cntr_entry *entry, void *context,
drivers/infiniband/hw/hfi1/chip.c
1477
struct hfi1_pportdata *ppd = context;
drivers/infiniband/hw/hfi1/chip.c
1485
void *context, int vl, int mode, u64 data)
drivers/infiniband/hw/hfi1/chip.c
1487
struct hfi1_pportdata *ppd = context;
drivers/infiniband/hw/hfi1/chip.c
1524
static u64 access_sw_link_dn_cnt(const struct cntr_entry *entry, void *context,
drivers/infiniband/hw/hfi1/chip.c
1527
struct hfi1_pportdata *ppd = context;
drivers/infiniband/hw/hfi1/chip.c
1534
static u64 access_sw_link_up_cnt(const struct cntr_entry *entry, void *context,
drivers/infiniband/hw/hfi1/chip.c
1537
struct hfi1_pportdata *ppd = context;
drivers/infiniband/hw/hfi1/chip.c
1545
void *context, int vl, int mode,
drivers/infiniband/hw/hfi1/chip.c
1548
struct hfi1_pportdata *ppd = (struct hfi1_pportdata *)context;
drivers/infiniband/hw/hfi1/chip.c
1556
void *context, int vl, int mode, u64 data)
drivers/infiniband/hw/hfi1/chip.c
1558
struct hfi1_pportdata *ppd = (struct hfi1_pportdata *)context;
drivers/infiniband/hw/hfi1/chip.c
1573
void *context, int vl, int mode,
drivers/infiniband/hw/hfi1/chip.c
1576
struct hfi1_pportdata *ppd = context;
drivers/infiniband/hw/hfi1/chip.c
1586
void *context, int vl, int mode, u64 data)
drivers/infiniband/hw/hfi1/chip.c
1588
struct hfi1_pportdata *ppd = context;
drivers/infiniband/hw/hfi1/chip.c
1633
void *context, int vl, int mode, u64 data)
drivers/infiniband/hw/hfi1/chip.c
1635
struct hfi1_devdata *dd = context;
drivers/infiniband/hw/hfi1/chip.c
1642
void *context, int vl, int mode, u64 data)
drivers/infiniband/hw/hfi1/chip.c
1644
struct hfi1_devdata *dd = context;
drivers/infiniband/hw/hfi1/chip.c
1651
void *context, int vl, int mode, u64 data)
drivers/infiniband/hw/hfi1/chip.c
1653
struct hfi1_devdata *dd = context;
drivers/infiniband/hw/hfi1/chip.c
1659
void *context, int vl, int mode, u64 data)
drivers/infiniband/hw/hfi1/chip.c
1661
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
1667
void *context, int vl, int mode, u64 data)
drivers/infiniband/hw/hfi1/chip.c
1669
struct hfi1_devdata *dd = context;
drivers/infiniband/hw/hfi1/chip.c
1675
void *context, int vl, int mode, u64 data)
drivers/infiniband/hw/hfi1/chip.c
1677
struct hfi1_devdata *dd = context;
drivers/infiniband/hw/hfi1/chip.c
1683
void *context, int vl, int mode, u64 data)
drivers/infiniband/hw/hfi1/chip.c
1685
struct hfi1_devdata *dd = context;
drivers/infiniband/hw/hfi1/chip.c
1691
void *context, int vl, int mode, u64 data)
drivers/infiniband/hw/hfi1/chip.c
1693
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
1701
void *context, int vl, int mode,
drivers/infiniband/hw/hfi1/chip.c
1704
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
1710
void *context, int vl, int mode,
drivers/infiniband/hw/hfi1/chip.c
1713
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
1719
void *context, int vl, int mode,
drivers/infiniband/hw/hfi1/chip.c
1722
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
1728
void *context, int vl,
drivers/infiniband/hw/hfi1/chip.c
1731
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
1737
void *context, int vl, int mode,
drivers/infiniband/hw/hfi1/chip.c
1740
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
1747
void *context, int vl, int mode, u64 data)
drivers/infiniband/hw/hfi1/chip.c
1749
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
1755
void *context, int vl,
drivers/infiniband/hw/hfi1/chip.c
1758
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
1764
void *context, int vl, int mode,
drivers/infiniband/hw/hfi1/chip.c
1767
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
1773
void *context, int vl, int mode,
drivers/infiniband/hw/hfi1/chip.c
1776
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
1782
void *context, int vl,
drivers/infiniband/hw/hfi1/chip.c
1785
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
1792
void *context, int vl, int mode, u64 data)
drivers/infiniband/hw/hfi1/chip.c
1794
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
1800
void *context, int vl,
drivers/infiniband/hw/hfi1/chip.c
1803
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
1809
void *context, int vl, int mode,
drivers/infiniband/hw/hfi1/chip.c
1812
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
1823
void *context, int vl, int mode, u64 data)
drivers/infiniband/hw/hfi1/chip.c
1825
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
1835
void *context, int vl, int mode,
drivers/infiniband/hw/hfi1/chip.c
1838
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
1844
void *context, int vl, int mode,
drivers/infiniband/hw/hfi1/chip.c
1847
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
1853
void *context, int vl, int mode,
drivers/infiniband/hw/hfi1/chip.c
1856
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
1862
void *context, int vl, int mode,
drivers/infiniband/hw/hfi1/chip.c
1865
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
1871
void *context, int vl, int mode,
drivers/infiniband/hw/hfi1/chip.c
1874
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
1881
void *context, int vl, int mode, u64 data)
drivers/infiniband/hw/hfi1/chip.c
1883
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
1890
void *context, int vl, int mode, u64 data)
drivers/infiniband/hw/hfi1/chip.c
1892
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
1898
void *context, int vl,
drivers/infiniband/hw/hfi1/chip.c
1901
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
1907
void *context, int vl, int mode,
drivers/infiniband/hw/hfi1/chip.c
1910
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
1916
void *context, int vl, int mode, u64 data)
drivers/infiniband/hw/hfi1/chip.c
1918
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
1924
void *context, int vl, int mode,
drivers/infiniband/hw/hfi1/chip.c
1927
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
1933
void *context, int vl, int mode,
drivers/infiniband/hw/hfi1/chip.c
1936
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
1943
void *context, int vl, int mode, u64 data)
drivers/infiniband/hw/hfi1/chip.c
1945
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
1952
void *context, int vl, int mode, u64 data)
drivers/infiniband/hw/hfi1/chip.c
1954
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
1960
void *context, int vl, int mode,
drivers/infiniband/hw/hfi1/chip.c
1963
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
1969
void *context, int vl, int mode,
drivers/infiniband/hw/hfi1/chip.c
1972
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
1978
void *context, int vl, int mode,
drivers/infiniband/hw/hfi1/chip.c
1981
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
1987
void *context, int vl, int mode,
drivers/infiniband/hw/hfi1/chip.c
1990
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
1996
void *context, int vl,
drivers/infiniband/hw/hfi1/chip.c
1999
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
2005
void *context, int vl, int mode,
drivers/infiniband/hw/hfi1/chip.c
2008
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
2015
void *context, int vl, int mode, u64 data)
drivers/infiniband/hw/hfi1/chip.c
2017
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
2023
void *context, int vl,
drivers/infiniband/hw/hfi1/chip.c
2026
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
2032
void *context, int vl, int mode,
drivers/infiniband/hw/hfi1/chip.c
2035
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
2041
void *context, int vl, int mode,
drivers/infiniband/hw/hfi1/chip.c
2044
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
2050
void *context, int vl, int mode,
drivers/infiniband/hw/hfi1/chip.c
2053
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
2059
void *context, int vl, int mode,
drivers/infiniband/hw/hfi1/chip.c
2062
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
2068
void *context, int vl,
drivers/infiniband/hw/hfi1/chip.c
2071
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
2077
void *context, int vl, int mode,
drivers/infiniband/hw/hfi1/chip.c
2080
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
2087
void *context, int vl, int mode, u64 data)
drivers/infiniband/hw/hfi1/chip.c
2089
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
2096
void *context, int vl, int mode, u64 data)
drivers/infiniband/hw/hfi1/chip.c
2098
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
2105
void *context, int vl, int mode, u64 data)
drivers/infiniband/hw/hfi1/chip.c
2107
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
2114
void *context, int vl, int mode, u64 data)
drivers/infiniband/hw/hfi1/chip.c
2116
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
2123
void *context, int vl, int mode, u64 data)
drivers/infiniband/hw/hfi1/chip.c
2125
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
2131
void *context, int vl,
drivers/infiniband/hw/hfi1/chip.c
2134
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
2141
void *context, int vl, int mode, u64 data)
drivers/infiniband/hw/hfi1/chip.c
2143
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
2149
void *context, int vl, int mode,
drivers/infiniband/hw/hfi1/chip.c
2152
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
2158
void *context, int vl, int mode,
drivers/infiniband/hw/hfi1/chip.c
2161
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
2168
void *context, int vl, int mode, u64 data)
drivers/infiniband/hw/hfi1/chip.c
2170
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
2176
void *context, int vl,
drivers/infiniband/hw/hfi1/chip.c
2179
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
2185
void *context, int vl,
drivers/infiniband/hw/hfi1/chip.c
2188
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
2194
void *context, int vl, int mode,
drivers/infiniband/hw/hfi1/chip.c
2197
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
2207
void *context, int vl, int mode,
drivers/infiniband/hw/hfi1/chip.c
2210
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
2216
void *context, int vl,
drivers/infiniband/hw/hfi1/chip.c
2219
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
2225
void *context, int vl, int mode,
drivers/infiniband/hw/hfi1/chip.c
2228
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
2234
void *context, int vl, int mode,
drivers/infiniband/hw/hfi1/chip.c
2237
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
2243
void *context, int vl,
drivers/infiniband/hw/hfi1/chip.c
2246
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
2252
void *context, int vl,
drivers/infiniband/hw/hfi1/chip.c
2255
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
2261
void *context, int vl, int mode,
drivers/infiniband/hw/hfi1/chip.c
2264
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
2270
void *context, int vl, int mode,
drivers/infiniband/hw/hfi1/chip.c
2273
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
2279
void *context, int vl, int mode,
drivers/infiniband/hw/hfi1/chip.c
2282
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
2289
void *context, int vl, int mode, u64 data)
drivers/infiniband/hw/hfi1/chip.c
2291
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
2298
void *context, int vl, int mode, u64 data)
drivers/infiniband/hw/hfi1/chip.c
2300
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
2306
void *context, int vl,
drivers/infiniband/hw/hfi1/chip.c
2309
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
2315
void *context, int vl,
drivers/infiniband/hw/hfi1/chip.c
2318
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
2324
void *context, int vl,
drivers/infiniband/hw/hfi1/chip.c
2327
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
2333
void *context, int vl,
drivers/infiniband/hw/hfi1/chip.c
2336
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
2342
void *context, int vl,
drivers/infiniband/hw/hfi1/chip.c
2345
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
2351
void *context, int vl,
drivers/infiniband/hw/hfi1/chip.c
2354
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
2360
void *context, int vl, int mode,
drivers/infiniband/hw/hfi1/chip.c
2363
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
2370
void *context, int vl, int mode, u64 data)
drivers/infiniband/hw/hfi1/chip.c
2372
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
2379
void *context, int vl, int mode, u64 data)
drivers/infiniband/hw/hfi1/chip.c
2381
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
2388
void *context, int vl, int mode, u64 data)
drivers/infiniband/hw/hfi1/chip.c
2390
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
2397
void *context, int vl, int mode, u64 data)
drivers/infiniband/hw/hfi1/chip.c
2399
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
2406
void *context, int vl, int mode, u64 data)
drivers/infiniband/hw/hfi1/chip.c
2408
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
2415
void *context, int vl, int mode, u64 data)
drivers/infiniband/hw/hfi1/chip.c
2417
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
2424
void *context, int vl, int mode, u64 data)
drivers/infiniband/hw/hfi1/chip.c
2426
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
2433
void *context, int vl, int mode, u64 data)
drivers/infiniband/hw/hfi1/chip.c
2435
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
2442
void *context, int vl, int mode, u64 data)
drivers/infiniband/hw/hfi1/chip.c
2444
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
2451
void *context, int vl, int mode, u64 data)
drivers/infiniband/hw/hfi1/chip.c
2453
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
2460
void *context, int vl, int mode, u64 data)
drivers/infiniband/hw/hfi1/chip.c
2462
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
2469
void *context, int vl, int mode, u64 data)
drivers/infiniband/hw/hfi1/chip.c
2471
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
2478
void *context, int vl, int mode, u64 data)
drivers/infiniband/hw/hfi1/chip.c
2480
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
2486
void *context, int vl, int mode,
drivers/infiniband/hw/hfi1/chip.c
2489
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
2495
void *context, int vl, int mode,
drivers/infiniband/hw/hfi1/chip.c
2498
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
2504
void *context, int vl, int mode,
drivers/infiniband/hw/hfi1/chip.c
2507
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
2513
void *context, int vl, int mode,
drivers/infiniband/hw/hfi1/chip.c
2516
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
2522
void *context, int vl,
drivers/infiniband/hw/hfi1/chip.c
2525
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
2532
void *context, int vl, int mode, u64 data)
drivers/infiniband/hw/hfi1/chip.c
2534
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
2541
void *context, int vl, int mode, u64 data)
drivers/infiniband/hw/hfi1/chip.c
2543
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
2550
void *context, int vl, int mode, u64 data)
drivers/infiniband/hw/hfi1/chip.c
2552
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
2559
void *context, int vl, int mode, u64 data)
drivers/infiniband/hw/hfi1/chip.c
2561
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
2568
void *context, int vl, int mode, u64 data)
drivers/infiniband/hw/hfi1/chip.c
2570
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
2577
void *context, int vl, int mode, u64 data)
drivers/infiniband/hw/hfi1/chip.c
2579
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
2586
void *context, int vl, int mode, u64 data)
drivers/infiniband/hw/hfi1/chip.c
2588
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
2595
void *context, int vl, int mode, u64 data)
drivers/infiniband/hw/hfi1/chip.c
2597
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
2604
void *context, int vl, int mode, u64 data)
drivers/infiniband/hw/hfi1/chip.c
2606
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
2612
void *context, int vl,
drivers/infiniband/hw/hfi1/chip.c
2615
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
2621
void *context, int vl,
drivers/infiniband/hw/hfi1/chip.c
2624
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
2631
void *context, int vl, int mode, u64 data)
drivers/infiniband/hw/hfi1/chip.c
2633
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
2640
void *context, int vl, int mode, u64 data)
drivers/infiniband/hw/hfi1/chip.c
2642
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
2648
void *context, int vl,
drivers/infiniband/hw/hfi1/chip.c
2651
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
2657
void *context, int vl,
drivers/infiniband/hw/hfi1/chip.c
2660
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
2666
void *context, int vl, int mode,
drivers/infiniband/hw/hfi1/chip.c
2669
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
2675
void *context, int vl, int mode,
drivers/infiniband/hw/hfi1/chip.c
2678
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
2684
void *context, int vl, int mode,
drivers/infiniband/hw/hfi1/chip.c
2687
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
2693
void *context, int vl, int mode,
drivers/infiniband/hw/hfi1/chip.c
2696
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
2702
void *context, int vl, int mode,
drivers/infiniband/hw/hfi1/chip.c
2705
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
2712
void *context, int vl, int mode, u64 data)
drivers/infiniband/hw/hfi1/chip.c
2714
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
2721
void *context, int vl, int mode, u64 data)
drivers/infiniband/hw/hfi1/chip.c
2723
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
2729
void *context, int vl, int mode,
drivers/infiniband/hw/hfi1/chip.c
2732
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
2738
void *context, int vl, int mode,
drivers/infiniband/hw/hfi1/chip.c
2741
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
2747
void *context, int vl, int mode,
drivers/infiniband/hw/hfi1/chip.c
2750
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
2756
void *context, int vl, int mode,
drivers/infiniband/hw/hfi1/chip.c
2759
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
2765
void *context, int vl, int mode,
drivers/infiniband/hw/hfi1/chip.c
2768
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
2774
void *context, int vl, int mode,
drivers/infiniband/hw/hfi1/chip.c
2777
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
2788
void *context, int vl, int mode, u64 data)
drivers/infiniband/hw/hfi1/chip.c
2790
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
2797
void *context, int vl, int mode, u64 data)
drivers/infiniband/hw/hfi1/chip.c
2799
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
2806
void *context, int vl, int mode, u64 data)
drivers/infiniband/hw/hfi1/chip.c
2808
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
2815
void *context, int vl, int mode, u64 data)
drivers/infiniband/hw/hfi1/chip.c
2817
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
2823
void *context, int vl, int mode,
drivers/infiniband/hw/hfi1/chip.c
2826
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
2832
void *context, int vl, int mode,
drivers/infiniband/hw/hfi1/chip.c
2835
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
2841
void *context, int vl, int mode,
drivers/infiniband/hw/hfi1/chip.c
2844
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
2851
void *context, int vl, int mode, u64 data)
drivers/infiniband/hw/hfi1/chip.c
2853
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
2859
void *context, int vl, int mode,
drivers/infiniband/hw/hfi1/chip.c
2862
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
2868
void *context, int vl, int mode,
drivers/infiniband/hw/hfi1/chip.c
2871
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
2877
void *context, int vl,
drivers/infiniband/hw/hfi1/chip.c
2880
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
2887
void *context, int vl, int mode, u64 data)
drivers/infiniband/hw/hfi1/chip.c
2889
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
2896
void *context, int vl, int mode, u64 data)
drivers/infiniband/hw/hfi1/chip.c
2898
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
2904
void *context, int vl, int mode,
drivers/infiniband/hw/hfi1/chip.c
2907
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
2913
void *context, int vl,
drivers/infiniband/hw/hfi1/chip.c
2916
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
2922
void *context, int vl,
drivers/infiniband/hw/hfi1/chip.c
2925
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
2931
void *context, int vl,
drivers/infiniband/hw/hfi1/chip.c
2934
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
2941
void *context, int vl, int mode, u64 data)
drivers/infiniband/hw/hfi1/chip.c
2943
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
2949
void *context, int vl, int mode,
drivers/infiniband/hw/hfi1/chip.c
2952
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
2958
void *context, int vl, int mode,
drivers/infiniband/hw/hfi1/chip.c
2961
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
2968
void *context, int vl, int mode, u64 data)
drivers/infiniband/hw/hfi1/chip.c
2970
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
2977
void *context, int vl, int mode, u64 data)
drivers/infiniband/hw/hfi1/chip.c
2979
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
2986
void *context, int vl, int mode, u64 data)
drivers/infiniband/hw/hfi1/chip.c
2988
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
2995
void *context, int vl, int mode, u64 data)
drivers/infiniband/hw/hfi1/chip.c
2997
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
3004
void *context, int vl, int mode, u64 data)
drivers/infiniband/hw/hfi1/chip.c
3006
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
3013
void *context, int vl, int mode, u64 data)
drivers/infiniband/hw/hfi1/chip.c
3015
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
3022
void *context, int vl, int mode, u64 data)
drivers/infiniband/hw/hfi1/chip.c
3024
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
3031
void *context, int vl, int mode, u64 data)
drivers/infiniband/hw/hfi1/chip.c
3033
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
3040
void *context, int vl, int mode, u64 data)
drivers/infiniband/hw/hfi1/chip.c
3042
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
3048
void *context, int vl, int mode,
drivers/infiniband/hw/hfi1/chip.c
3051
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
3057
void *context, int vl, int mode,
drivers/infiniband/hw/hfi1/chip.c
3060
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
3066
void *context, int vl, int mode,
drivers/infiniband/hw/hfi1/chip.c
3069
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
3075
void *context, int vl, int mode,
drivers/infiniband/hw/hfi1/chip.c
3078
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
3084
void *context, int vl, int mode,
drivers/infiniband/hw/hfi1/chip.c
3087
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
3093
void *context, int vl,
drivers/infiniband/hw/hfi1/chip.c
3096
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
3102
void *context, int vl, int mode,
drivers/infiniband/hw/hfi1/chip.c
3105
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
3116
void *context, int vl, int mode, u64 data)
drivers/infiniband/hw/hfi1/chip.c
3118
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
3125
void *context, int vl, int mode, u64 data)
drivers/infiniband/hw/hfi1/chip.c
3127
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
3133
void *context, int vl, int mode,
drivers/infiniband/hw/hfi1/chip.c
3136
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
3142
void *context, int vl, int mode,
drivers/infiniband/hw/hfi1/chip.c
3145
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
3156
void *context, int vl, int mode, u64 data)
drivers/infiniband/hw/hfi1/chip.c
3158
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
3165
void *context, int vl, int mode, u64 data)
drivers/infiniband/hw/hfi1/chip.c
3167
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
3173
void *context, int vl, int mode,
drivers/infiniband/hw/hfi1/chip.c
3176
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
3182
void *context, int vl,
drivers/infiniband/hw/hfi1/chip.c
3185
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
3192
void *context, int vl, int mode, u64 data)
drivers/infiniband/hw/hfi1/chip.c
3194
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
3200
void *context, int vl, int mode,
drivers/infiniband/hw/hfi1/chip.c
3203
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
3209
void *context, int vl, int mode,
drivers/infiniband/hw/hfi1/chip.c
3212
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
3218
void *context, int vl, int mode,
drivers/infiniband/hw/hfi1/chip.c
3221
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
3227
void *context, int vl, int mode,
drivers/infiniband/hw/hfi1/chip.c
3230
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
3236
void *context, int vl, int mode,
drivers/infiniband/hw/hfi1/chip.c
3239
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
3245
void *context, int vl, int mode,
drivers/infiniband/hw/hfi1/chip.c
3248
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
3254
void *context, int vl, int mode,
drivers/infiniband/hw/hfi1/chip.c
3257
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
3263
void *context, int vl, int mode,
drivers/infiniband/hw/hfi1/chip.c
3266
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
3272
void *context, int vl, int mode,
drivers/infiniband/hw/hfi1/chip.c
3275
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
3281
void *context, int vl, int mode,
drivers/infiniband/hw/hfi1/chip.c
3284
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
3290
void *context, int vl, int mode,
drivers/infiniband/hw/hfi1/chip.c
3293
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
3299
void *context, int vl, int mode,
drivers/infiniband/hw/hfi1/chip.c
3302
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
3308
void *context, int vl, int mode,
drivers/infiniband/hw/hfi1/chip.c
3311
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
3317
void *context, int vl, int mode,
drivers/infiniband/hw/hfi1/chip.c
3320
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
3326
void *context, int vl,
drivers/infiniband/hw/hfi1/chip.c
3329
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
3336
void *context, int vl, int mode, u64 data)
drivers/infiniband/hw/hfi1/chip.c
3338
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
3344
void *context, int vl, int mode,
drivers/infiniband/hw/hfi1/chip.c
3347
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
3354
void *context, int vl, int mode, u64 data)
drivers/infiniband/hw/hfi1/chip.c
3356
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
3363
void *context, int vl, int mode, u64 data)
drivers/infiniband/hw/hfi1/chip.c
3365
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
3372
void *context, int vl, int mode, u64 data)
drivers/infiniband/hw/hfi1/chip.c
3374
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
3381
void *context, int vl, int mode, u64 data)
drivers/infiniband/hw/hfi1/chip.c
3383
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
3390
void *context, int vl, int mode, u64 data)
drivers/infiniband/hw/hfi1/chip.c
3392
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
3399
void *context, int vl, int mode, u64 data)
drivers/infiniband/hw/hfi1/chip.c
3401
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
3408
void *context, int vl, int mode, u64 data)
drivers/infiniband/hw/hfi1/chip.c
3410
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
3417
void *context, int vl, int mode, u64 data)
drivers/infiniband/hw/hfi1/chip.c
3419
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
3426
void *context, int vl, int mode, u64 data)
drivers/infiniband/hw/hfi1/chip.c
3428
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
3435
void *context, int vl, int mode, u64 data)
drivers/infiniband/hw/hfi1/chip.c
3437
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
3444
void *context, int vl, int mode, u64 data)
drivers/infiniband/hw/hfi1/chip.c
3446
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
3453
void *context, int vl, int mode, u64 data)
drivers/infiniband/hw/hfi1/chip.c
3455
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
3462
void *context, int vl, int mode, u64 data)
drivers/infiniband/hw/hfi1/chip.c
3464
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
3471
void *context, int vl, int mode, u64 data)
drivers/infiniband/hw/hfi1/chip.c
3473
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
3480
void *context, int vl, int mode, u64 data)
drivers/infiniband/hw/hfi1/chip.c
3482
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
3489
void *context, int vl, int mode, u64 data)
drivers/infiniband/hw/hfi1/chip.c
3491
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
3498
void *context, int vl, int mode, u64 data)
drivers/infiniband/hw/hfi1/chip.c
3500
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
3507
void *context, int vl, int mode, u64 data)
drivers/infiniband/hw/hfi1/chip.c
3509
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
3516
void *context, int vl, int mode, u64 data)
drivers/infiniband/hw/hfi1/chip.c
3518
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
3525
void *context, int vl, int mode, u64 data)
drivers/infiniband/hw/hfi1/chip.c
3527
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
3534
void *context, int vl, int mode, u64 data)
drivers/infiniband/hw/hfi1/chip.c
3536
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
3543
void *context, int vl, int mode, u64 data)
drivers/infiniband/hw/hfi1/chip.c
3545
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
3552
void *context, int vl, int mode, u64 data)
drivers/infiniband/hw/hfi1/chip.c
3554
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
3561
void *context, int vl, int mode, u64 data)
drivers/infiniband/hw/hfi1/chip.c
3563
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
3570
void *context, int vl, int mode, u64 data)
drivers/infiniband/hw/hfi1/chip.c
3572
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
3579
void *context, int vl, int mode, u64 data)
drivers/infiniband/hw/hfi1/chip.c
3581
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
3587
void *context, int vl, int mode,
drivers/infiniband/hw/hfi1/chip.c
3590
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
3596
void *context, int vl,
drivers/infiniband/hw/hfi1/chip.c
3599
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
3605
void *context, int vl, int mode,
drivers/infiniband/hw/hfi1/chip.c
3608
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
3614
void *context, int vl, int mode,
drivers/infiniband/hw/hfi1/chip.c
3617
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
3624
void *context, int vl, int mode, u64 data)
drivers/infiniband/hw/hfi1/chip.c
3626
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
3632
void *context, int vl, int mode,
drivers/infiniband/hw/hfi1/chip.c
3635
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
3641
void *context, int vl, int mode,
drivers/infiniband/hw/hfi1/chip.c
3644
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
3651
void *context, int vl, int mode, u64 data)
drivers/infiniband/hw/hfi1/chip.c
3653
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
3660
void *context, int vl, int mode, u64 data)
drivers/infiniband/hw/hfi1/chip.c
3662
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
3668
void *context, int vl, int mode,
drivers/infiniband/hw/hfi1/chip.c
3671
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
3678
void *context, int vl, int mode, u64 data)
drivers/infiniband/hw/hfi1/chip.c
3680
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
3686
void *context, int vl, int mode,
drivers/infiniband/hw/hfi1/chip.c
3689
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
3696
void *context, int vl, int mode, u64 data)
drivers/infiniband/hw/hfi1/chip.c
3698
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
3704
void *context, int vl, int mode,
drivers/infiniband/hw/hfi1/chip.c
3707
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
3714
void *context, int vl, int mode, u64 data)
drivers/infiniband/hw/hfi1/chip.c
3716
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
3723
void *context, int vl, int mode, u64 data)
drivers/infiniband/hw/hfi1/chip.c
3725
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
3736
void *context, int vl, int mode, u64 data)
drivers/infiniband/hw/hfi1/chip.c
3738
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
3744
void *context, int vl,
drivers/infiniband/hw/hfi1/chip.c
3747
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
3753
void *context, int vl, int mode,
drivers/infiniband/hw/hfi1/chip.c
3756
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
3767
void *context, int vl, int mode, u64 data)
drivers/infiniband/hw/hfi1/chip.c
3769
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
3775
void *context, int vl, int mode,
drivers/infiniband/hw/hfi1/chip.c
3778
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
3785
void *context, int vl, int mode, u64 data)
drivers/infiniband/hw/hfi1/chip.c
3787
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
3793
void *context, int vl,
drivers/infiniband/hw/hfi1/chip.c
3796
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
3802
void *context, int vl, int mode,
drivers/infiniband/hw/hfi1/chip.c
3805
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
3816
void *context, int vl, int mode, u64 data)
drivers/infiniband/hw/hfi1/chip.c
3818
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
3825
void *context, int vl, int mode, u64 data)
drivers/infiniband/hw/hfi1/chip.c
3827
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
3834
void *context, int vl, int mode, u64 data)
drivers/infiniband/hw/hfi1/chip.c
3836
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
3842
void *context, int vl, int mode,
drivers/infiniband/hw/hfi1/chip.c
3845
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
3851
void *context, int vl, int mode,
drivers/infiniband/hw/hfi1/chip.c
3854
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
3861
void *context, int vl, int mode, u64 data)
drivers/infiniband/hw/hfi1/chip.c
3863
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
3870
void *context, int vl, int mode, u64 data)
drivers/infiniband/hw/hfi1/chip.c
3872
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
3879
void *context, int vl, int mode, u64 data)
drivers/infiniband/hw/hfi1/chip.c
3881
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
3887
void *context, int vl, int mode,
drivers/infiniband/hw/hfi1/chip.c
3890
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
3896
void *context, int vl, int mode,
drivers/infiniband/hw/hfi1/chip.c
3899
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
3905
void *context, int vl, int mode,
drivers/infiniband/hw/hfi1/chip.c
3908
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
3914
void *context, int vl, int mode,
drivers/infiniband/hw/hfi1/chip.c
3917
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
3923
void *context, int vl, int mode,
drivers/infiniband/hw/hfi1/chip.c
3926
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
3932
void *context, int vl, int mode,
drivers/infiniband/hw/hfi1/chip.c
3935
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
3941
void *context, int vl, int mode,
drivers/infiniband/hw/hfi1/chip.c
3944
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
3951
void *context, int vl, int mode, u64 data)
drivers/infiniband/hw/hfi1/chip.c
3953
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
3959
void *context, int vl,
drivers/infiniband/hw/hfi1/chip.c
3962
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
3968
void *context, int vl, int mode, u64 data)
drivers/infiniband/hw/hfi1/chip.c
3970
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
3976
void *context, int vl, int mode,
drivers/infiniband/hw/hfi1/chip.c
3979
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
3985
void *context, int vl, int mode,
drivers/infiniband/hw/hfi1/chip.c
3988
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
3995
void *context, int vl, int mode, u64 data)
drivers/infiniband/hw/hfi1/chip.c
3997
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
4003
void *context, int vl, int mode,
drivers/infiniband/hw/hfi1/chip.c
4006
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
4012
void *context, int vl, int mode,
drivers/infiniband/hw/hfi1/chip.c
4015
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
4021
void *context, int vl, int mode,
drivers/infiniband/hw/hfi1/chip.c
4024
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
4030
void *context, int vl, int mode,
drivers/infiniband/hw/hfi1/chip.c
4033
struct hfi1_devdata *dd = (struct hfi1_devdata *)context;
drivers/infiniband/hw/hfi1/chip.c
4053
void *context, int vl, int mode, u64 data) \
drivers/infiniband/hw/hfi1/chip.c
4055
struct hfi1_pportdata *ppd = (struct hfi1_pportdata *)context; \
drivers/infiniband/hw/hfi1/chip.c
4067
void *context, int vl, int mode, u64 data) \
drivers/infiniband/hw/hfi1/chip.c
4069
struct hfi1_pportdata *ppd = (struct hfi1_pportdata *)context; \
drivers/infiniband/hw/hfi1/chip.c
5882
u32 context,
drivers/infiniband/hw/hfi1/chip.c
5891
reg = read_kctxt_csr(dd, context, eri->status);
drivers/infiniband/hw/hfi1/chip.c
5894
write_kctxt_csr(dd, context, eri->clear, reg);
drivers/infiniband/hw/hfi1/chip.c
5896
eri->handler(dd, context, reg);
drivers/infiniband/hw/hfi1/chip.c
5907
mask = read_kctxt_csr(dd, context, eri->mask);
drivers/infiniband/hw/hfi1/chip.c
5909
write_kctxt_csr(dd, context, eri->mask, mask);
drivers/infiniband/hw/hfi1/init.c
305
struct hfi1_ctxtdata **context)
drivers/infiniband/hw/hfi1/init.c
324
*context = NULL;
drivers/infiniband/hw/hfi1/init.c
451
*context = rcd;
drivers/infiniband/hw/hfi1/init.c
456
*context = NULL;
drivers/infiniband/hw/hfi1/pio.c
402
int ret, i, j, context;
drivers/infiniband/hw/hfi1/pio.c
427
context = 0;
drivers/infiniband/hw/hfi1/pio.c
434
&dd->send_contexts[context];
drivers/infiniband/hw/hfi1/pio.c
439
context++;
drivers/infiniband/hw/hfi1/pio.c
457
u32 context;
drivers/infiniband/hw/hfi1/pio.c
464
context = chip_send_contexts(dd) - index - 1;
drivers/infiniband/hw/hfi1/pio.c
465
dd->hw_to_sw[context] = index;
drivers/infiniband/hw/hfi1/pio.c
467
*hw_context = context;
drivers/infiniband/hw/hfi1/pio.c
494
static inline u32 group_context(u32 context, u32 group)
drivers/infiniband/hw/hfi1/pio.c
496
return (context >> group) << group;
drivers/infiniband/hw/hfi1/tid_rdma.c
1674
void *context, int vl, int mode, u64 data)
drivers/infiniband/hw/hfi1/tid_rdma.c
1676
struct hfi1_devdata *dd = context;
drivers/infiniband/hw/hfi1/tid_rdma.h
242
void *context, int vl, int mode, u64 data);
drivers/infiniband/hw/hns/hns_roce_cmd.c
103
context->result = (status == HNS_ROCE_CMD_SUCCESS) ? 0 : (-EIO);
drivers/infiniband/hw/hns/hns_roce_cmd.c
104
context->out_param = out_param;
drivers/infiniband/hw/hns/hns_roce_cmd.c
105
complete(&context->done);
drivers/infiniband/hw/hns/hns_roce_cmd.c
113
struct hns_roce_cmd_context *context;
drivers/infiniband/hw/hns/hns_roce_cmd.c
120
context = &cmd->context[cmd->free_head];
drivers/infiniband/hw/hns/hns_roce_cmd.c
121
cmd->free_head = context->next;
drivers/infiniband/hw/hns/hns_roce_cmd.c
122
} while (context->busy);
drivers/infiniband/hw/hns/hns_roce_cmd.c
124
context->busy = 1;
drivers/infiniband/hw/hns/hns_roce_cmd.c
125
context->token += cmd->max_cmds;
drivers/infiniband/hw/hns/hns_roce_cmd.c
129
reinit_completion(&context->done);
drivers/infiniband/hw/hns/hns_roce_cmd.c
131
mbox_msg->token = context->token;
drivers/infiniband/hw/hns/hns_roce_cmd.c
140
if (!wait_for_completion_timeout(&context->done,
drivers/infiniband/hw/hns/hns_roce_cmd.c
143
context->token, mbox_msg->cmd);
drivers/infiniband/hw/hns/hns_roce_cmd.c
148
ret = context->result;
drivers/infiniband/hw/hns/hns_roce_cmd.c
151
context->token, mbox_msg->cmd, ret);
drivers/infiniband/hw/hns/hns_roce_cmd.c
154
context->busy = 0;
drivers/infiniband/hw/hns/hns_roce_cmd.c
221
hr_cmd->context =
drivers/infiniband/hw/hns/hns_roce_cmd.c
222
kzalloc_objs(*hr_cmd->context, hr_cmd->max_cmds);
drivers/infiniband/hw/hns/hns_roce_cmd.c
223
if (!hr_cmd->context) {
drivers/infiniband/hw/hns/hns_roce_cmd.c
229
hr_cmd->context[i].token = i;
drivers/infiniband/hw/hns/hns_roce_cmd.c
230
hr_cmd->context[i].next = i + 1;
drivers/infiniband/hw/hns/hns_roce_cmd.c
231
init_completion(&hr_cmd->context[i].done);
drivers/infiniband/hw/hns/hns_roce_cmd.c
233
hr_cmd->context[hr_cmd->max_cmds - 1].next = 0;
drivers/infiniband/hw/hns/hns_roce_cmd.c
248
kfree(hr_cmd->context);
drivers/infiniband/hw/hns/hns_roce_cmd.c
93
struct hns_roce_cmd_context *context =
drivers/infiniband/hw/hns/hns_roce_cmd.c
94
&hr_dev->cmd.context[token % hr_dev->cmd.max_cmds];
drivers/infiniband/hw/hns/hns_roce_cmd.c
96
if (unlikely(token != context->token)) {
drivers/infiniband/hw/hns/hns_roce_cmd.c
99
token, context->token);
drivers/infiniband/hw/hns/hns_roce_db.c
10
int hns_roce_db_map_user(struct hns_roce_ucontext *context, unsigned long virt,
drivers/infiniband/hw/hns/hns_roce_db.c
18
mutex_lock(&context->page_mutex);
drivers/infiniband/hw/hns/hns_roce_db.c
20
list_for_each_entry(page, &context->page_list, list)
drivers/infiniband/hw/hns/hns_roce_db.c
32
page->umem = ib_umem_get(context->ibucontext.device, page_addr,
drivers/infiniband/hw/hns/hns_roce_db.c
40
list_add(&page->list, &context->page_list);
drivers/infiniband/hw/hns/hns_roce_db.c
50
mutex_unlock(&context->page_mutex);
drivers/infiniband/hw/hns/hns_roce_db.c
55
void hns_roce_db_unmap_user(struct hns_roce_ucontext *context,
drivers/infiniband/hw/hns/hns_roce_db.c
58
mutex_lock(&context->page_mutex);
drivers/infiniband/hw/hns/hns_roce_db.c
67
mutex_unlock(&context->page_mutex);
drivers/infiniband/hw/hns/hns_roce_device.h
1300
int hns_roce_db_map_user(struct hns_roce_ucontext *context, unsigned long virt,
drivers/infiniband/hw/hns/hns_roce_device.h
1302
void hns_roce_db_unmap_user(struct hns_roce_ucontext *context,
drivers/infiniband/hw/hns/hns_roce_device.h
562
struct hns_roce_cmd_context *context;
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
4491
struct hns_roce_v2_qp_context *context,
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
4505
memcpy(mailbox->buf, context, qpc_size);
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
4517
struct hns_roce_v2_qp_context *context,
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
4533
hr_reg_write_bool(context, QPC_RRE,
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
4537
hr_reg_write_bool(context, QPC_RWE,
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
4541
hr_reg_write_bool(context, QPC_ATE,
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
4544
hr_reg_write_bool(context, QPC_EXT_ATE,
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
4550
struct hns_roce_v2_qp_context *context)
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
4552
hr_reg_write(context, QPC_SGE_SHIFT,
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
4556
hr_reg_write(context, QPC_SQ_SHIFT, ilog2(hr_qp->sq.wqe_cnt));
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
4558
hr_reg_write(context, QPC_RQ_SHIFT, ilog2(hr_qp->rq.wqe_cnt));
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
4572
struct hns_roce_v2_qp_context *context)
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
4583
hr_reg_write(context, QPC_TST, to_hr_qp_type(ibqp->qp_type));
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
4585
hr_reg_write(context, QPC_PD, get_pdn(ibqp->pd));
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
4587
hr_reg_write(context, QPC_RQWS, ilog2(hr_qp->rq.max_gs));
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
4589
set_qpc_wqe_cnt(hr_qp, context);
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
4592
hr_reg_write(context, QPC_VLAN_ID, 0xfff);
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
4595
context->qkey_xrcd = cpu_to_le32(hr_qp->xrcdn);
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
4597
hr_reg_enable(context, QPC_XRC_QP_TYPE);
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
4601
hr_reg_enable(context, QPC_RQ_RECORD_EN);
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
4604
hr_reg_enable(context, QPC_OWNER_MODE);
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
4606
hr_reg_write(context, QPC_RQ_DB_RECORD_ADDR_L,
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
4608
hr_reg_write(context, QPC_RQ_DB_RECORD_ADDR_H,
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
4611
hr_reg_write(context, QPC_RX_CQN, get_cqn(ibqp->recv_cq));
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
4614
hr_reg_enable(context, QPC_SRQ_EN);
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
4615
hr_reg_write(context, QPC_SRQN, to_hr_srq(ibqp->srq)->srqn);
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
4618
hr_reg_enable(context, QPC_FRE);
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
4620
hr_reg_write(context, QPC_TX_CQN, get_cqn(ibqp->send_cq));
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
4626
hr_reg_enable(&context->ext, QPCEX_STASH);
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
4630
struct hns_roce_v2_qp_context *context,
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
4639
hr_reg_write(context, QPC_TST, to_hr_qp_type(ibqp->qp_type));
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
4642
hr_reg_write(context, QPC_PD, get_pdn(ibqp->pd));
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
4645
hr_reg_write(context, QPC_RX_CQN, get_cqn(ibqp->recv_cq));
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
4648
hr_reg_write(context, QPC_TX_CQN, get_cqn(ibqp->send_cq));
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
4652
hr_reg_enable(context, QPC_SRQ_EN);
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
4654
hr_reg_write(context, QPC_SRQN, to_hr_srq(ibqp->srq)->srqn);
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
4661
struct hns_roce_v2_qp_context *context,
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
4680
context->wqe_sge_ba = cpu_to_le32(wqe_sge_ba >> 3);
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
4689
hr_reg_write(context, QPC_WQE_SGE_BA_H, wqe_sge_ba >> (32 + 3));
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
4692
hr_reg_write(context, QPC_SQ_HOP_NUM,
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
4697
hr_reg_write(context, QPC_SGE_HOP_NUM,
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
4702
hr_reg_write(context, QPC_RQ_HOP_NUM,
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
4708
hr_reg_write(context, QPC_WQE_SGE_BA_PG_SZ,
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
4712
hr_reg_write(context, QPC_WQE_SGE_BUF_PG_SZ,
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
4716
context->rq_cur_blk_addr = cpu_to_le32(to_hr_hw_page_addr(mtts[0]));
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
4719
hr_reg_write(context, QPC_RQ_CUR_BLK_ADDR_H,
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
4724
context->rq_nxt_blk_addr =
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
4727
hr_reg_write(context, QPC_RQ_NXT_BLK_ADDR_H,
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
4737
struct hns_roce_v2_qp_context *context,
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
4769
hr_reg_write(context, QPC_SQ_CUR_BLK_ADDR_L,
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
4771
hr_reg_write(context, QPC_SQ_CUR_BLK_ADDR_H,
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
4776
hr_reg_write(context, QPC_SQ_CUR_SGE_BLK_ADDR_L,
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
4778
hr_reg_write(context, QPC_SQ_CUR_SGE_BLK_ADDR_H,
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
4783
hr_reg_write(context, QPC_RX_SQ_CUR_BLK_ADDR_L,
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
4785
hr_reg_write(context, QPC_RX_SQ_CUR_BLK_ADDR_H,
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
4804
struct hns_roce_v2_qp_context *context,
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
4826
ret = config_qp_rq_buf(hr_dev, hr_qp, context, qpc_mask);
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
4854
hr_reg_write(context, QPC_TRRL_BA_L, trrl_ba >> QPC_TRRL_BA_L_S);
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
4856
context->trrl_ba = cpu_to_le32(trrl_ba >> QPC_TRRL_BA_M_S);
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
4858
hr_reg_write(context, QPC_TRRL_BA_H, trrl_ba >> QPC_TRRL_BA_H_S);
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
4861
context->irrl_ba = cpu_to_le32(irrl_ba >> QPC_IRRL_BA_L_S);
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
4863
hr_reg_write(context, QPC_IRRL_BA_H, irrl_ba >> QPC_IRRL_BA_H_S);
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
4866
hr_reg_enable(context, QPC_RMT_E2E);
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
4869
hr_reg_write(context, QPC_SIG_TYPE, hr_qp->sq_signal_bits);
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
4879
hr_reg_write(context, QPC_LBI, hr_dev->loop_idc);
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
4884
hr_reg_write(context, QPC_DQPN, attr->dest_qp_num);
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
4888
memcpy(&context->dmac, dmac, sizeof(u32));
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
4889
hr_reg_write(context, QPC_DMAC_H, *((u16 *)(&dmac[4])));
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
4905
hr_reg_write(context, QPC_MTU, ib_mtu);
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
4909
hr_reg_write(context, QPC_LP_PKTN_INI, lp_pktn_ini);
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
4927
hr_reg_write(context, QPC_ACK_REQ_FREQ, ack_req_freq);
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
4934
context->rq_rnr_timer = 0;
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
4942
hr_reg_write(context, QPC_LP_SGEN_INI, MAX_LP_SGEN);
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
4947
hr_reg_write_bool(context, QPC_RQIE,
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
4956
hr_reg_write_bool(context, QPC_CQEIE,
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
4961
hr_reg_write(context, QPC_CQEIS, 0);
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
4969
struct hns_roce_v2_qp_context *context,
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
4983
ret = config_qp_sq_buf(hr_dev, hr_qp, context, qpc_mask);
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
5155
struct hns_roce_v2_qp_context *context,
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
5174
hr_reg_write(context, QPC_CONG_ALGO_TMPL_ID, hr_dev->cong_algo_tmpl_id +
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
5177
hr_reg_write(&context->ext, QPCEX_CONG_ALG_SEL, cong_field.alg_sel);
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
5179
hr_reg_write(&context->ext, QPCEX_CONG_ALG_SUB_SEL,
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
5182
hr_reg_write(&context->ext, QPCEX_DIP_CTX_IDX_VLD, cong_field.dip_vld);
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
5184
hr_reg_write(&context->ext, QPCEX_SQ_RQ_NOT_FORBID_EN,
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
5198
hr_reg_write(&context->ext, QPCEX_DIP_CTX_IDX, dip_idx);
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
5234
struct hns_roce_v2_qp_context *context,
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
5263
hr_reg_write(context, QPC_SL, hr_qp->sl);
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
5272
struct hns_roce_v2_qp_context *context,
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
5296
hr_reg_write(context, QPC_SL, sl);
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
5319
hr_reg_enable(context, QPC_RQ_VLAN_EN);
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
5321
hr_reg_enable(context, QPC_SQ_VLAN_EN);
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
5325
hr_reg_write(context, QPC_VLAN_ID, vlan_id);
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
5339
hr_reg_write(context, QPC_UDPSPN,
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
5346
hr_reg_write(context, QPC_GMV_IDX, grh->sgid_index);
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
5350
hr_reg_write(context, QPC_HOPLIMIT, grh->hop_limit);
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
5353
ret = fill_cong_field(ibqp, attr, context, qpc_mask);
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
5357
hr_reg_write(context, QPC_TC, get_tclass(&attr->ah_attr.grh));
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
5360
hr_reg_write(context, QPC_FL, grh->flow_label);
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
5362
memcpy(context->dgid, grh->dgid.raw, sizeof(grh->dgid.raw));
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
5365
return hns_roce_set_sl(ibqp, attr, context, qpc_mask);
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
5398
struct hns_roce_v2_qp_context *context,
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
5410
modify_qp_reset_to_init(ibqp, context);
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
5412
modify_qp_init_to_init(ibqp, context, qpc_mask);
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
5414
ret = modify_qp_init_to_rtr(ibqp, attr, attr_mask, context,
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
5417
ret = modify_qp_rtr_to_rts(ibqp, attr_mask, context, qpc_mask);
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
5449
struct hns_roce_v2_qp_context *context,
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
5458
ret = hns_roce_v2_set_path(ibqp, attr, attr_mask, context,
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
5467
hr_reg_write(context, QPC_AT, timeout);
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
5473
hr_reg_write(context, QPC_RETRY_NUM_INIT, attr->retry_cnt);
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
5476
hr_reg_write(context, QPC_RETRY_CNT, attr->retry_cnt);
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
5481
hr_reg_write(context, QPC_RNR_NUM_INIT, attr->rnr_retry);
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
5484
hr_reg_write(context, QPC_RNR_CNT, attr->rnr_retry);
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
5489
hr_reg_write(context, QPC_SQ_CUR_PSN, attr->sq_psn);
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
5492
hr_reg_write(context, QPC_SQ_MAX_PSN, attr->sq_psn);
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
5495
hr_reg_write(context, QPC_RETRY_MSG_PSN_L, attr->sq_psn);
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
5498
hr_reg_write(context, QPC_RETRY_MSG_PSN_H,
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
5502
hr_reg_write(context, QPC_RETRY_MSG_FPKT_PSN, attr->sq_psn);
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
5505
hr_reg_write(context, QPC_RX_ACK_EPSN, attr->sq_psn);
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
5511
hr_reg_write(context, QPC_RR_MAX,
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
5517
hr_reg_write(context, QPC_SR_MAX, fls(attr->max_rd_atomic - 1));
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
5522
set_access_flags(hr_qp, context, qpc_mask, attr, attr_mask);
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
5525
hr_reg_write(context, QPC_MIN_RNR_TIME,
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
5532
hr_reg_write(context, QPC_RX_REQ_EPSN, attr->rq_psn);
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
5535
hr_reg_write(context, QPC_RAQ_PSN, attr->rq_psn - 1);
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
5540
context->qkey_xrcd = cpu_to_le32(attr->qkey);
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
5590
struct hns_roce_v2_qp_context *context,
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
5602
hr_reg_write(context, QPC_SQ_PRODUCER_IDX, hr_qp->sq.head);
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
5612
hr_reg_write(context, QPC_RQ_PRODUCER_IDX, hr_qp->rq.head);
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
5624
struct hns_roce_v2_qp_context *context;
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
5638
context = kvzalloc_obj(*context);
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
5640
if (!context || !qpc_mask)
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
5646
new_state, context, qpc_mask, udata);
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
5652
v2_set_flushed_fields(ibqp, context, qpc_mask);
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
5655
ret = hns_roce_v2_set_opt_fields(ibqp, attr, attr_mask, context,
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
5660
hr_reg_write_bool(context, QPC_INV_CREDIT,
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
5666
hr_reg_write(context, QPC_QP_ST, new_state);
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
5670
ret = hns_roce_v2_qp_modify(hr_dev, context, qpc_mask, hr_qp);
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
5685
kvfree(context);
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
5730
struct hns_roce_srq_context *context;
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
5738
context = mailbox->buf;
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
5744
memcpy(buffer, context, sizeof(*context));
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
5754
struct hns_roce_v2_scc_context *context;
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
5767
context = mailbox->buf;
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
5768
memcpy(buffer, context, sizeof(*context));
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
5776
struct hns_roce_v2_qp_context *context)
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
5780
timeout = (u8)hr_reg_read(context, QPC_AT);
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
5793
struct hns_roce_v2_qp_context context = {};
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
5810
ret = hns_roce_v2_query_qpc(hr_dev, hr_qp->qpn, &context);
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
5819
state = hr_reg_read(&context, QPC_QP_ST);
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
5828
qp_attr->path_mtu = (enum ib_mtu)hr_reg_read(&context, QPC_MTU);
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
5832
qp_attr->qkey = le32_to_cpu(context.qkey_xrcd);
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
5834
qp_attr->rq_psn = hr_reg_read(&context, QPC_RX_REQ_EPSN);
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
5835
qp_attr->sq_psn = (u32)hr_reg_read(&context, QPC_SQ_CUR_PSN);
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
5836
qp_attr->dest_qp_num = hr_reg_read(&context, QPC_DQPN);
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
5838
((hr_reg_read(&context, QPC_RRE)) << V2_QP_RRE_S) |
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
5839
((hr_reg_read(&context, QPC_RWE)) << V2_QP_RWE_S) |
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
5840
((hr_reg_read(&context, QPC_ATE)) << V2_QP_ATE_S);
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
5849
hr_reg_read(&context, QPC_SL));
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
5852
grh->flow_label = hr_reg_read(&context, QPC_FL);
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
5853
grh->sgid_index = hr_reg_read(&context, QPC_GMV_IDX);
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
5854
grh->hop_limit = hr_reg_read(&context, QPC_HOPLIMIT);
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
5855
grh->traffic_class = hr_reg_read(&context, QPC_TC);
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
5857
memcpy(grh->dgid.raw, context.dgid, sizeof(grh->dgid.raw));
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
5862
qp_attr->max_rd_atomic = 1 << hr_reg_read(&context, QPC_SR_MAX);
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
5863
qp_attr->max_dest_rd_atomic = 1 << hr_reg_read(&context, QPC_RR_MAX);
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
5865
qp_attr->min_rnr_timer = (u8)hr_reg_read(&context, QPC_MIN_RNR_TIME);
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
5866
qp_attr->timeout = get_qp_timeout_attr(hr_dev, &context);
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
5867
qp_attr->retry_cnt = hr_reg_read(&context, QPC_RETRY_NUM_INIT);
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
5868
qp_attr->rnr_retry = hr_reg_read(&context, QPC_RNR_NUM_INIT);
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
6290
struct hns_roce_v2_cq_context *context;
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
6298
context = mailbox->buf;
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
6308
memcpy(buffer, context, sizeof(*context));
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
6319
struct hns_roce_v2_mpt_entry *context;
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
6327
context = mailbox->buf;
drivers/infiniband/hw/hns/hns_roce_hw_v2.c
6337
memcpy(buffer, context, sizeof(*context));
drivers/infiniband/hw/hns/hns_roce_main.c
400
static void hns_roce_dealloc_uar_entry(struct hns_roce_ucontext *context)
drivers/infiniband/hw/hns/hns_roce_main.c
402
if (context->db_mmap_entry)
drivers/infiniband/hw/hns/hns_roce_main.c
404
&context->db_mmap_entry->rdma_entry);
drivers/infiniband/hw/hns/hns_roce_main.c
409
struct hns_roce_ucontext *context = to_hr_ucontext(uctx);
drivers/infiniband/hw/hns/hns_roce_main.c
412
address = context->uar.pfn << PAGE_SHIFT;
drivers/infiniband/hw/hns/hns_roce_main.c
413
context->db_mmap_entry = hns_roce_user_mmap_entry_insert(
drivers/infiniband/hw/hns/hns_roce_main.c
415
if (!context->db_mmap_entry)
drivers/infiniband/hw/hns/hns_roce_main.c
424
struct hns_roce_ucontext *context = to_hr_ucontext(uctx);
drivers/infiniband/hw/hns/hns_roce_main.c
442
context->config = ucmd.config & HNS_ROCE_EXSGE_FLAGS;
drivers/infiniband/hw/hns/hns_roce_main.c
444
if (context->config & HNS_ROCE_EXSGE_FLAGS) {
drivers/infiniband/hw/hns/hns_roce_main.c
450
context->config |= ucmd.config & HNS_ROCE_RQ_INLINE_FLAGS;
drivers/infiniband/hw/hns/hns_roce_main.c
451
if (context->config & HNS_ROCE_RQ_INLINE_FLAGS)
drivers/infiniband/hw/hns/hns_roce_main.c
456
context->config |= ucmd.config & HNS_ROCE_CQE_INLINE_FLAGS;
drivers/infiniband/hw/hns/hns_roce_main.c
457
if (context->config & HNS_ROCE_CQE_INLINE_FLAGS)
drivers/infiniband/hw/hns/hns_roce_main.c
464
ret = hns_roce_uar_alloc(hr_dev, &context->uar);
drivers/infiniband/hw/hns/hns_roce_main.c
474
INIT_LIST_HEAD(&context->page_list);
drivers/infiniband/hw/hns/hns_roce_main.c
475
mutex_init(&context->page_mutex);
drivers/infiniband/hw/hns/hns_roce_main.c
485
hns_roce_get_cq_bankid_for_uctx(context);
drivers/infiniband/hw/hns/hns_roce_main.c
492
mutex_destroy(&context->page_mutex);
drivers/infiniband/hw/hns/hns_roce_main.c
493
hns_roce_dealloc_uar_entry(context);
drivers/infiniband/hw/hns/hns_roce_main.c
496
ida_free(&hr_dev->uar_ida.ida, (int)context->uar.logic_idx);
drivers/infiniband/hw/hns/hns_roce_main.c
506
struct hns_roce_ucontext *context = to_hr_ucontext(ibcontext);
drivers/infiniband/hw/hns/hns_roce_main.c
509
hns_roce_put_cq_bankid_for_uctx(context);
drivers/infiniband/hw/hns/hns_roce_main.c
513
mutex_destroy(&context->page_mutex);
drivers/infiniband/hw/hns/hns_roce_main.c
515
hns_roce_dealloc_uar_entry(context);
drivers/infiniband/hw/hns/hns_roce_main.c
517
ida_free(&hr_dev->uar_ida.ida, (int)context->uar.logic_idx);
drivers/infiniband/hw/hns/hns_roce_main.c
64
static int hns_roce_add_gid(const struct ib_gid_attr *attr, void **context)
drivers/infiniband/hw/hns/hns_roce_main.c
78
static int hns_roce_del_gid(const struct ib_gid_attr *attr, void **context)
drivers/infiniband/hw/hns/hns_roce_restrack.c
102
} context = {};
drivers/infiniband/hw/hns/hns_roce_restrack.c
109
ret = hr_dev->hw->query_qpc(hr_dev, hr_qp->qpn, &context.qpc);
drivers/infiniband/hw/hns/hns_roce_restrack.c
126
ret = hr_dev->hw->query_sccc(hr_dev, sccn, &context.sccc);
drivers/infiniband/hw/hns/hns_roce_restrack.c
133
ret = nla_put(msg, RDMA_NLDEV_ATTR_RES_RAW, sizeof(context), &context);
drivers/infiniband/hw/hns/hns_roce_restrack.c
172
struct hns_roce_v2_mpt_entry context;
drivers/infiniband/hw/hns/hns_roce_restrack.c
178
ret = hr_dev->hw->query_mpt(hr_dev, hr_mr->key, &context);
drivers/infiniband/hw/hns/hns_roce_restrack.c
182
ret = nla_put(msg, RDMA_NLDEV_ATTR_RES_RAW, sizeof(context), &context);
drivers/infiniband/hw/hns/hns_roce_restrack.c
221
struct hns_roce_srq_context context;
drivers/infiniband/hw/hns/hns_roce_restrack.c
227
ret = hr_dev->hw->query_srqc(hr_dev, hr_srq->srqn, &context);
drivers/infiniband/hw/hns/hns_roce_restrack.c
231
ret = nla_put(msg, RDMA_NLDEV_ATTR_RES_RAW, sizeof(context), &context);
drivers/infiniband/hw/hns/hns_roce_restrack.c
46
struct hns_roce_v2_cq_context context;
drivers/infiniband/hw/hns/hns_roce_restrack.c
52
ret = hr_dev->hw->query_cqc(hr_dev, hr_cq->cqn, &context);
drivers/infiniband/hw/hns/hns_roce_restrack.c
56
ret = nla_put(msg, RDMA_NLDEV_ATTR_RES_RAW, sizeof(context), &context);
drivers/infiniband/hw/ionic/ionic_ibdev.h
357
if (!uobj->context)
drivers/infiniband/hw/ionic/ionic_ibdev.h
360
return to_ionic_ctx(uobj->context);
drivers/infiniband/hw/irdma/cm.h
215
u32 context;
drivers/infiniband/hw/irdma/verbs.c
129
static void irdma_disassociate_ucontext(struct ib_ucontext *context)
drivers/infiniband/hw/irdma/verbs.c
185
static int irdma_mmap(struct ib_ucontext *context, struct vm_area_struct *vma)
drivers/infiniband/hw/irdma/verbs.c
193
ucontext = to_ucontext(context);
drivers/infiniband/hw/irdma/verbs.c
217
ret = rdma_user_mmap_io(context, vma, pfn, PAGE_SIZE,
drivers/infiniband/hw/irdma/verbs.c
222
ret = rdma_user_mmap_io(context, vma, pfn, PAGE_SIZE,
drivers/infiniband/hw/irdma/verbs.c
382
static void irdma_dealloc_ucontext(struct ib_ucontext *context)
drivers/infiniband/hw/irdma/verbs.c
384
struct irdma_ucontext *ucontext = to_ucontext(context);
drivers/infiniband/hw/mana/cq.c
154
gdma_cq->cq.context = cq;
drivers/infiniband/hw/mana/main.c
788
spec.eq.context = mdev;
drivers/infiniband/hw/mana/main.c
880
int mana_ib_gd_add_gid(const struct ib_gid_attr *attr, void **context)
drivers/infiniband/hw/mana/main.c
910
int mana_ib_gd_del_gid(const struct ib_gid_attr *attr, void **context)
drivers/infiniband/hw/mana/mana_ib.h
708
int mana_ib_gd_add_gid(const struct ib_gid_attr *attr, void **context);
drivers/infiniband/hw/mana/mana_ib.h
710
int mana_ib_gd_del_gid(const struct ib_gid_attr *attr, void **context);
drivers/infiniband/hw/mana/mana_ib.h
744
struct ib_dm *mana_ib_alloc_dm(struct ib_device *dev, struct ib_ucontext *context,
drivers/infiniband/hw/mana/mr.c
355
struct ib_ucontext *context,
drivers/infiniband/hw/mlx4/alias_GUID.c
289
void *context)
drivers/infiniband/hw/mlx4/alias_GUID.c
292
struct mlx4_alias_guid_work_context *cb_ctx = context;
drivers/infiniband/hw/mlx4/alias_GUID.c
301
if (!context)
drivers/infiniband/hw/mlx4/cq.c
190
struct mlx4_ib_ucontext *context = rdma_udata_to_drv_context(
drivers/infiniband/hw/mlx4/cq.c
227
uar = &context->uar;
drivers/infiniband/hw/mlx4/cq.c
279
mlx4_ib_db_unmap_user(context, &cq->db);
drivers/infiniband/hw/mlx4/doorbell.c
50
struct mlx4_ib_ucontext *context = rdma_udata_to_drv_context(
drivers/infiniband/hw/mlx4/doorbell.c
53
mutex_lock(&context->db_page_mutex);
drivers/infiniband/hw/mlx4/doorbell.c
55
list_for_each_entry(page, &context->db_page_list, list)
drivers/infiniband/hw/mlx4/doorbell.c
67
page->umem = ib_umem_get(context->ibucontext.device, virt & PAGE_MASK,
drivers/infiniband/hw/mlx4/doorbell.c
75
list_add(&page->list, &context->db_page_list);
drivers/infiniband/hw/mlx4/doorbell.c
84
mutex_unlock(&context->db_page_mutex);
drivers/infiniband/hw/mlx4/doorbell.c
89
void mlx4_ib_db_unmap_user(struct mlx4_ib_ucontext *context, struct mlx4_db *db)
drivers/infiniband/hw/mlx4/doorbell.c
91
mutex_lock(&context->db_page_mutex);
drivers/infiniband/hw/mlx4/doorbell.c
99
mutex_unlock(&context->db_page_mutex);
drivers/infiniband/hw/mlx4/mad.c
1018
if (mad_send_wc->send_buf->context[0])
drivers/infiniband/hw/mlx4/mad.c
1019
rdma_destroy_ah(mad_send_wc->send_buf->context[0], 0);
drivers/infiniband/hw/mlx4/main.c
1097
struct mlx4_ib_ucontext *context = to_mucontext(uctx);
drivers/infiniband/hw/mlx4/main.c
1118
err = mlx4_uar_alloc(to_mdev(ibdev)->dev, &context->uar);
drivers/infiniband/hw/mlx4/main.c
1122
INIT_LIST_HEAD(&context->db_page_list);
drivers/infiniband/hw/mlx4/main.c
1123
mutex_init(&context->db_page_mutex);
drivers/infiniband/hw/mlx4/main.c
1125
INIT_LIST_HEAD(&context->wqn_ranges_list);
drivers/infiniband/hw/mlx4/main.c
1126
mutex_init(&context->wqn_ranges_mutex);
drivers/infiniband/hw/mlx4/main.c
1134
mlx4_uar_free(to_mdev(ibdev)->dev, &context->uar);
drivers/infiniband/hw/mlx4/main.c
1143
struct mlx4_ib_ucontext *context = to_mucontext(ibcontext);
drivers/infiniband/hw/mlx4/main.c
1145
mlx4_uar_free(to_mdev(ibcontext->device)->dev, &context->uar);
drivers/infiniband/hw/mlx4/main.c
1152
static int mlx4_ib_mmap(struct ib_ucontext *context, struct vm_area_struct *vma)
drivers/infiniband/hw/mlx4/main.c
1154
struct mlx4_ib_dev *dev = to_mdev(context->device);
drivers/infiniband/hw/mlx4/main.c
1158
return rdma_user_mmap_io(context, vma,
drivers/infiniband/hw/mlx4/main.c
1159
to_mucontext(context)->uar.pfn,
drivers/infiniband/hw/mlx4/main.c
1168
context, vma,
drivers/infiniband/hw/mlx4/main.c
1169
to_mucontext(context)->uar.pfn +
drivers/infiniband/hw/mlx4/main.c
1183
context, vma,
drivers/infiniband/hw/mlx4/main.c
255
static int mlx4_ib_add_gid(const struct ib_gid_attr *attr, void **context)
drivers/infiniband/hw/mlx4/main.c
274
if (!context)
drivers/infiniband/hw/mlx4/main.c
303
*context = port_gid_table->gids[free].ctx;
drivers/infiniband/hw/mlx4/main.c
314
*context = ctx;
drivers/infiniband/hw/mlx4/main.c
321
*context = NULL;
drivers/infiniband/hw/mlx4/main.c
336
*context = NULL;
drivers/infiniband/hw/mlx4/main.c
346
static int mlx4_ib_del_gid(const struct ib_gid_attr *attr, void **context)
drivers/infiniband/hw/mlx4/main.c
348
struct gid_cache_context *ctx = *context;
drivers/infiniband/hw/mlx4/mlx4_ib.h
755
void mlx4_ib_db_unmap_user(struct mlx4_ib_ucontext *context, struct mlx4_db *db);
drivers/infiniband/hw/mlx4/qp.c
1000
struct mlx4_ib_ucontext *context = rdma_udata_to_drv_context(
drivers/infiniband/hw/mlx4/qp.c
1271
mlx4_ib_db_unmap_user(context, &qp->db);
drivers/infiniband/hw/mlx4/qp.c
1935
struct mlx4_qp_context *context)
drivers/infiniband/hw/mlx4/qp.c
1942
context->pri_path.sched_queue = MLX4_IB_DEFAULT_SCHED_QUEUE | ((qp->port - 1) << 6);
drivers/infiniband/hw/mlx4/qp.c
1949
context->pri_path.grh_mylmc = 0x80 | (u8) smac_index;
drivers/infiniband/hw/mlx4/qp.c
2094
static void fill_qp_rss_context(struct mlx4_qp_context *context,
drivers/infiniband/hw/mlx4/qp.c
2099
rss_context = (void *)context + offsetof(struct mlx4_qp_context,
drivers/infiniband/hw/mlx4/qp.c
2131
struct mlx4_qp_context *context;
drivers/infiniband/hw/mlx4/qp.c
2166
context = kzalloc_obj(*context);
drivers/infiniband/hw/mlx4/qp.c
2167
if (!context)
drivers/infiniband/hw/mlx4/qp.c
2170
context->flags = cpu_to_be32((to_mlx4_state(new_state) << 28) |
drivers/infiniband/hw/mlx4/qp.c
2174
context->flags |= cpu_to_be32(MLX4_QP_PM_MIGRATED << 11);
drivers/infiniband/hw/mlx4/qp.c
2179
context->flags |= cpu_to_be32(MLX4_QP_PM_MIGRATED << 11);
drivers/infiniband/hw/mlx4/qp.c
2182
context->flags |= cpu_to_be32(MLX4_QP_PM_REARM << 11);
drivers/infiniband/hw/mlx4/qp.c
2185
context->flags |= cpu_to_be32(MLX4_QP_PM_ARMED << 11);
drivers/infiniband/hw/mlx4/qp.c
2191
context->param3 |= cpu_to_be32(1 << 25);
drivers/infiniband/hw/mlx4/qp.c
2194
context->param3 |= cpu_to_be32(1 << 29);
drivers/infiniband/hw/mlx4/qp.c
2197
context->mtu_msgmax = (IB_MTU_4096 << 5) | 11;
drivers/infiniband/hw/mlx4/qp.c
2199
context->mtu_msgmax = (MLX4_RAW_QP_MTU << 5) | MLX4_RAW_QP_MSGMAX;
drivers/infiniband/hw/mlx4/qp.c
2202
context->mtu_msgmax = (IB_MTU_4096 << 5) |
drivers/infiniband/hw/mlx4/qp.c
2205
context->mtu_msgmax = (IB_MTU_4096 << 5) | 13;
drivers/infiniband/hw/mlx4/qp.c
2212
context->mtu_msgmax = (attr->path_mtu << 5) |
drivers/infiniband/hw/mlx4/qp.c
2218
context->rq_size_stride = ilog2(qp->rq.wqe_cnt) << 3;
drivers/infiniband/hw/mlx4/qp.c
2219
context->rq_size_stride |= qp->rq.wqe_shift - 4;
drivers/infiniband/hw/mlx4/qp.c
2223
context->sq_size_stride = ilog2(qp->sq.wqe_cnt) << 3;
drivers/infiniband/hw/mlx4/qp.c
2224
context->sq_size_stride |= qp->sq.wqe_shift - 4;
drivers/infiniband/hw/mlx4/qp.c
2230
context->sq_size_stride |= !!qp->sq_no_prefetch << 7;
drivers/infiniband/hw/mlx4/qp.c
2231
context->xrcd = cpu_to_be32((u32) qp->xrcdn);
drivers/infiniband/hw/mlx4/qp.c
2233
context->param3 |= cpu_to_be32(1 << 30);
drivers/infiniband/hw/mlx4/qp.c
2237
context->usr_page = cpu_to_be32(
drivers/infiniband/hw/mlx4/qp.c
2240
context->usr_page = cpu_to_be32(
drivers/infiniband/hw/mlx4/qp.c
2244
context->remote_qpn = cpu_to_be32(attr->dest_qp_num);
drivers/infiniband/hw/mlx4/qp.c
2249
mlx4_set_sched(&context->pri_path, attr->port_num);
drivers/infiniband/hw/mlx4/qp.c
2265
context->pri_path.counter_index = counter_index;
drivers/infiniband/hw/mlx4/qp.c
2268
context->pri_path.fl |=
drivers/infiniband/hw/mlx4/qp.c
2270
context->pri_path.vlan_control |=
drivers/infiniband/hw/mlx4/qp.c
2274
context->pri_path.counter_index =
drivers/infiniband/hw/mlx4/qp.c
2287
context->rlkey_roce_mode |= (qpc_roce_mode << 6);
drivers/infiniband/hw/mlx4/qp.c
2293
context->pri_path.disable_pkey_check = 0x40;
drivers/infiniband/hw/mlx4/qp.c
2294
context->pri_path.pkey_index = attr->pkey_index;
drivers/infiniband/hw/mlx4/qp.c
2315
if (mlx4_set_path(dev, attr, attr_mask, qp, &context->pri_path,
drivers/infiniband/hw/mlx4/qp.c
2330
context->rlkey_roce_mode |= (qpc_roce_mode << 6);
drivers/infiniband/hw/mlx4/qp.c
2336
context->pri_path.ackto |= attr->timeout << 3;
drivers/infiniband/hw/mlx4/qp.c
2350
&context->alt_path,
drivers/infiniband/hw/mlx4/qp.c
2354
context->alt_path.pkey_index = attr->alt_pkey_index;
drivers/infiniband/hw/mlx4/qp.c
2355
context->alt_path.ackto = attr->alt_timeout << 3;
drivers/infiniband/hw/mlx4/qp.c
2359
context->pd = cpu_to_be32(pd->pdn);
drivers/infiniband/hw/mlx4/qp.c
2362
context->params1 = cpu_to_be32(MLX4_IB_ACK_REQ_FREQ << 28);
drivers/infiniband/hw/mlx4/qp.c
2368
context->cqn_send = cpu_to_be32(send_cq->mcq.cqn);
drivers/infiniband/hw/mlx4/qp.c
2369
context->cqn_recv = cpu_to_be32(recv_cq->mcq.cqn);
drivers/infiniband/hw/mlx4/qp.c
2373
context->params1 |= cpu_to_be32(1 << 11);
drivers/infiniband/hw/mlx4/qp.c
2376
context->params1 |= cpu_to_be32(attr->rnr_retry << 13);
drivers/infiniband/hw/mlx4/qp.c
2381
context->params1 |= cpu_to_be32(attr->retry_cnt << 16);
drivers/infiniband/hw/mlx4/qp.c
2387
context->params1 |=
drivers/infiniband/hw/mlx4/qp.c
2393
context->next_send_psn = cpu_to_be32(attr->sq_psn);
drivers/infiniband/hw/mlx4/qp.c
2397
context->params2 |=
drivers/infiniband/hw/mlx4/qp.c
2403
context->params2 |= to_mlx4_access_flags(qp, attr, attr_mask);
drivers/infiniband/hw/mlx4/qp.c
2408
context->params2 |= cpu_to_be32(MLX4_QP_BIT_RIC);
drivers/infiniband/hw/mlx4/qp.c
2411
context->rnr_nextrecvpsn |= cpu_to_be32(attr->min_rnr_timer << 24);
drivers/infiniband/hw/mlx4/qp.c
2415
context->rnr_nextrecvpsn |= cpu_to_be32(attr->rq_psn);
drivers/infiniband/hw/mlx4/qp.c
2421
context->qkey = cpu_to_be32(IB_QP_SET_QKEY);
drivers/infiniband/hw/mlx4/qp.c
2433
context->qkey = cpu_to_be32(attr->qkey);
drivers/infiniband/hw/mlx4/qp.c
2439
context->srqn = cpu_to_be32(1 << 24 |
drivers/infiniband/hw/mlx4/qp.c
2445
context->db_rec_addr = cpu_to_be64(qp->db.dma);
drivers/infiniband/hw/mlx4/qp.c
2451
context->pri_path.sched_queue = (qp->port - 1) << 6;
drivers/infiniband/hw/mlx4/qp.c
2455
context->pri_path.sched_queue |= MLX4_IB_DEFAULT_QP0_SCHED_QUEUE;
drivers/infiniband/hw/mlx4/qp.c
2457
context->pri_path.fl = 0x80;
drivers/infiniband/hw/mlx4/qp.c
2460
context->pri_path.fl = 0x80;
drivers/infiniband/hw/mlx4/qp.c
2461
context->pri_path.sched_queue |= MLX4_IB_DEFAULT_SCHED_QUEUE;
drivers/infiniband/hw/mlx4/qp.c
2467
context->pri_path.feup = 1 << 7; /* don't fsm */
drivers/infiniband/hw/mlx4/qp.c
2472
err = handle_eth_ud_smac_index(dev, qp, context);
drivers/infiniband/hw/mlx4/qp.c
2484
context->pri_path.ackto = (context->pri_path.ackto & 0xf8) |
drivers/infiniband/hw/mlx4/qp.c
2489
context->srqn = cpu_to_be32(7 << 28);
drivers/infiniband/hw/mlx4/qp.c
2498
context->pri_path.ackto = MLX4_IB_LINK_TYPE_ETH;
drivers/infiniband/hw/mlx4/qp.c
2512
context->rlkey_roce_mode |= (1 << 4);
drivers/infiniband/hw/mlx4/qp.c
2538
fill_qp_rss_context(context, qp);
drivers/infiniband/hw/mlx4/qp.c
2539
context->flags |= cpu_to_be32(1 << MLX4_RSS_QPC_FLAG_OFFSET);
drivers/infiniband/hw/mlx4/qp.c
2543
to_mlx4_state(new_state), context, optpar,
drivers/infiniband/hw/mlx4/qp.c
2629
kfree(context);
drivers/infiniband/hw/mlx4/qp.c
4022
struct mlx4_qp_context context;
drivers/infiniband/hw/mlx4/qp.c
4036
err = mlx4_qp_query(dev->dev, &qp->mqp, &context);
drivers/infiniband/hw/mlx4/qp.c
4042
mlx4_state = be32_to_cpu(context.flags) >> 28;
drivers/infiniband/hw/mlx4/qp.c
4046
qp_attr->path_mtu = context.mtu_msgmax >> 5;
drivers/infiniband/hw/mlx4/qp.c
4048
to_ib_mig_state((be32_to_cpu(context.flags) >> 11) & 0x3);
drivers/infiniband/hw/mlx4/qp.c
4049
qp_attr->qkey = be32_to_cpu(context.qkey);
drivers/infiniband/hw/mlx4/qp.c
4050
qp_attr->rq_psn = be32_to_cpu(context.rnr_nextrecvpsn) & 0xffffff;
drivers/infiniband/hw/mlx4/qp.c
4051
qp_attr->sq_psn = be32_to_cpu(context.next_send_psn) & 0xffffff;
drivers/infiniband/hw/mlx4/qp.c
4052
qp_attr->dest_qp_num = be32_to_cpu(context.remote_qpn) & 0xffffff;
drivers/infiniband/hw/mlx4/qp.c
4054
to_ib_qp_access_flags(be32_to_cpu(context.params2));
drivers/infiniband/hw/mlx4/qp.c
4059
to_rdma_ah_attr(dev, &qp_attr->ah_attr, &context.pri_path);
drivers/infiniband/hw/mlx4/qp.c
4060
to_rdma_ah_attr(dev, &qp_attr->alt_ah_attr, &context.alt_path);
drivers/infiniband/hw/mlx4/qp.c
4061
qp_attr->alt_pkey_index = context.alt_path.pkey_index & 0x7f;
drivers/infiniband/hw/mlx4/qp.c
4066
qp_attr->pkey_index = context.pri_path.pkey_index & 0x7f;
drivers/infiniband/hw/mlx4/qp.c
4070
qp_attr->port_num = context.pri_path.sched_queue & 0x40 ? 2 : 1;
drivers/infiniband/hw/mlx4/qp.c
4075
qp_attr->max_rd_atomic = 1 << ((be32_to_cpu(context.params1) >> 21) & 0x7);
drivers/infiniband/hw/mlx4/qp.c
4078
1 << ((be32_to_cpu(context.params2) >> 21) & 0x7);
drivers/infiniband/hw/mlx4/qp.c
4080
(be32_to_cpu(context.rnr_nextrecvpsn) >> 24) & 0x1f;
drivers/infiniband/hw/mlx4/qp.c
4081
qp_attr->timeout = context.pri_path.ackto >> 3;
drivers/infiniband/hw/mlx4/qp.c
4082
qp_attr->retry_cnt = (be32_to_cpu(context.params1) >> 16) & 0x7;
drivers/infiniband/hw/mlx4/qp.c
4083
qp_attr->rnr_retry = (be32_to_cpu(context.params1) >> 13) & 0x7;
drivers/infiniband/hw/mlx4/qp.c
4084
qp_attr->alt_timeout = context.alt_path.ackto >> 3;
drivers/infiniband/hw/mlx4/qp.c
781
static int mlx4_ib_alloc_wqn(struct mlx4_ib_ucontext *context,
drivers/infiniband/hw/mlx4/qp.c
784
struct mlx4_ib_dev *dev = to_mdev(context->ibucontext.device);
drivers/infiniband/hw/mlx4/qp.c
788
mutex_lock(&context->wqn_ranges_mutex);
drivers/infiniband/hw/mlx4/qp.c
790
range = list_first_entry_or_null(&context->wqn_ranges_list,
drivers/infiniband/hw/mlx4/qp.c
809
list_add(&range->list, &context->wqn_ranges_list);
drivers/infiniband/hw/mlx4/qp.c
826
mutex_unlock(&context->wqn_ranges_mutex);
drivers/infiniband/hw/mlx4/qp.c
831
static void mlx4_ib_release_wqn(struct mlx4_ib_ucontext *context,
drivers/infiniband/hw/mlx4/qp.c
834
struct mlx4_ib_dev *dev = to_mdev(context->ibucontext.device);
drivers/infiniband/hw/mlx4/qp.c
837
mutex_lock(&context->wqn_ranges_mutex);
drivers/infiniband/hw/mlx4/qp.c
856
mutex_unlock(&context->wqn_ranges_mutex);
drivers/infiniband/hw/mlx4/qp.c
865
struct mlx4_ib_ucontext *context = rdma_udata_to_drv_context(
drivers/infiniband/hw/mlx4/qp.c
944
err = mlx4_ib_alloc_wqn(context, qp, range_size, &qpn);
drivers/infiniband/hw/mlx4/qp.c
981
mlx4_ib_release_wqn(context, qp, 0);
drivers/infiniband/hw/mlx4/qp.c
983
mlx4_ib_db_unmap_user(context, &qp->db);
drivers/infiniband/hw/mlx5/cq.c
731
struct mlx5_ib_ucontext *context = rdma_udata_to_drv_context(
drivers/infiniband/hw/mlx5/cq.c
768
err = mlx5_ib_db_map_user(context, ucmd.db_addr, &cq->db);
drivers/infiniband/hw/mlx5/cq.c
801
} else if (context->bfregi.lib_uar_dyn) {
drivers/infiniband/hw/mlx5/cq.c
805
*index = context->bfregi.sys_pages[0];
drivers/infiniband/hw/mlx5/cq.c
851
MLX5_SET(create_cq_in, *cqb, uid, context->devx_uid);
drivers/infiniband/hw/mlx5/cq.c
858
mlx5_ib_db_unmap_user(context, &cq->db);
drivers/infiniband/hw/mlx5/cq.c
867
struct mlx5_ib_ucontext *context = rdma_udata_to_drv_context(
drivers/infiniband/hw/mlx5/cq.c
870
mlx5_ib_db_unmap_user(context, &cq->db);
drivers/infiniband/hw/mlx5/devx.c
1808
static void devx_query_callback(int status, struct mlx5_async_work *context)
drivers/infiniband/hw/mlx5/devx.c
1811
container_of(context, struct devx_async_data, cb_work);
drivers/infiniband/hw/mlx5/devx.c
2661
static void devx_async_destroy_cb(int status, struct mlx5_async_work *context)
drivers/infiniband/hw/mlx5/devx.c
2663
struct mlx5_async_cmd *devx_out = container_of(context,
drivers/infiniband/hw/mlx5/dm.c
150
static int add_dm_mmap_entry(struct ib_ucontext *context,
drivers/infiniband/hw/mlx5/dm.c
158
context, &mentry->rdma_entry, size,
drivers/infiniband/hw/mlx5/dm.c
209
struct mlx5_ib_dev *dev = to_mdev(uobj->context->device);
drivers/infiniband/hw/mlx5/dm.c
244
err = add_dm_mmap_entry(uobj->context, &op_entry->mentry,
drivers/infiniband/hw/mlx5/dm.c
422
struct ib_ucontext *context,
drivers/infiniband/hw/mlx5/dm.c
440
return handle_alloc_dm_memic(context, attr, attrs);
drivers/infiniband/hw/mlx5/dm.c
445
return handle_alloc_dm_sw_icm(context, attr, attrs, type);
drivers/infiniband/hw/mlx5/dm.h
58
struct ib_ucontext *context,
drivers/infiniband/hw/mlx5/doorbell.c
104
mutex_unlock(&context->db_page_mutex);
drivers/infiniband/hw/mlx5/doorbell.c
48
int mlx5_ib_db_map_user(struct mlx5_ib_ucontext *context, unsigned long virt,
drivers/infiniband/hw/mlx5/doorbell.c
54
mutex_lock(&context->db_page_mutex);
drivers/infiniband/hw/mlx5/doorbell.c
56
list_for_each_entry(page, &context->db_page_list, list)
drivers/infiniband/hw/mlx5/doorbell.c
69
page->umem = ib_umem_get(context->ibucontext.device, virt & PAGE_MASK,
drivers/infiniband/hw/mlx5/doorbell.c
79
list_add(&page->list, &context->db_page_list);
drivers/infiniband/hw/mlx5/doorbell.c
88
mutex_unlock(&context->db_page_mutex);
drivers/infiniband/hw/mlx5/doorbell.c
93
void mlx5_ib_db_unmap_user(struct mlx5_ib_ucontext *context, struct mlx5_db *db)
drivers/infiniband/hw/mlx5/doorbell.c
95
mutex_lock(&context->db_page_mutex);
drivers/infiniband/hw/mlx5/fs.c
2973
if (!verify_context_caps(dev, uobj->context->enabled_caps)) {
drivers/infiniband/hw/mlx5/main.c
1932
static int allocate_uars(struct mlx5_ib_dev *dev, struct mlx5_ib_ucontext *context)
drivers/infiniband/hw/mlx5/main.c
1938
bfregi = &context->bfregi;
drivers/infiniband/hw/mlx5/main.c
1941
context->devx_uid);
drivers/infiniband/hw/mlx5/main.c
1956
context->devx_uid))
drivers/infiniband/hw/mlx5/main.c
1963
struct mlx5_ib_ucontext *context)
drivers/infiniband/hw/mlx5/main.c
1968
bfregi = &context->bfregi;
drivers/infiniband/hw/mlx5/main.c
1973
context->devx_uid);
drivers/infiniband/hw/mlx5/main.c
2097
struct mlx5_ib_ucontext *context = to_mucontext(uctx);
drivers/infiniband/hw/mlx5/main.c
2098
struct mlx5_bfreg_info *bfregi = &context->bfregi;
drivers/infiniband/hw/mlx5/main.c
2116
resp->cqe_version = context->cqe_version;
drivers/infiniband/hw/mlx5/main.c
2183
struct mlx5_ib_ucontext *context = to_mucontext(uctx);
drivers/infiniband/hw/mlx5/main.c
2221
context->devx_uid = err;
drivers/infiniband/hw/mlx5/main.c
2225
context->devx_uid);
drivers/infiniband/hw/mlx5/main.c
2233
bfregi = &context->bfregi;
drivers/infiniband/hw/mlx5/main.c
2262
err = allocate_uars(dev, context);
drivers/infiniband/hw/mlx5/main.c
2267
err = mlx5_ib_alloc_transport_domain(dev, &context->tdn,
drivers/infiniband/hw/mlx5/main.c
2268
context->devx_uid);
drivers/infiniband/hw/mlx5/main.c
2272
INIT_LIST_HEAD(&context->db_page_list);
drivers/infiniband/hw/mlx5/main.c
2273
mutex_init(&context->db_page_mutex);
drivers/infiniband/hw/mlx5/main.c
2275
context->cqe_version = min_t(__u8,
drivers/infiniband/hw/mlx5/main.c
2290
context->lib_caps = req.lib_caps;
drivers/infiniband/hw/mlx5/main.c
2291
print_lib_caps(dev, context->lib_caps);
drivers/infiniband/hw/mlx5/main.c
2296
atomic_set(&context->tx_port_affinity,
drivers/infiniband/hw/mlx5/main.c
2304
mlx5_ib_dealloc_transport_domain(dev, context->tdn, context->devx_uid);
drivers/infiniband/hw/mlx5/main.c
2307
deallocate_uars(dev, context);
drivers/infiniband/hw/mlx5/main.c
2318
mlx5_cmd_remove_privileged_uid(dev->mdev, context->devx_uid);
drivers/infiniband/hw/mlx5/main.c
2322
mlx5_ib_devx_destroy(dev, context->devx_uid);
drivers/infiniband/hw/mlx5/main.c
2353
struct mlx5_ib_ucontext *context = to_mucontext(ibcontext);
drivers/infiniband/hw/mlx5/main.c
2357
bfregi = &context->bfregi;
drivers/infiniband/hw/mlx5/main.c
2358
mlx5_ib_dealloc_transport_domain(dev, context->tdn, context->devx_uid);
drivers/infiniband/hw/mlx5/main.c
2360
deallocate_uars(dev, context);
drivers/infiniband/hw/mlx5/main.c
2364
if (context->devx_uid) {
drivers/infiniband/hw/mlx5/main.c
2367
context->devx_uid);
drivers/infiniband/hw/mlx5/main.c
2368
mlx5_ib_devx_destroy(dev, context->devx_uid);
drivers/infiniband/hw/mlx5/main.c
2437
struct mlx5_ib_ucontext *context)
drivers/infiniband/hw/mlx5/main.c
2526
struct mlx5_ib_ucontext *context = to_mucontext(entry->ucontext);
drivers/infiniband/hw/mlx5/main.c
2542
context->devx_uid);
drivers/infiniband/hw/mlx5/main.c
2552
struct mlx5_ib_ucontext *context)
drivers/infiniband/hw/mlx5/main.c
2554
struct mlx5_bfreg_info *bfregi = &context->bfregi;
drivers/infiniband/hw/mlx5/main.c
2621
context->devx_uid);
drivers/infiniband/hw/mlx5/main.c
2633
err = rdma_user_mmap_io(&context->ibucontext, vma, pfn, PAGE_SIZE,
drivers/infiniband/hw/mlx5/main.c
2650
mlx5_cmd_uar_dealloc(dev->mdev, idx, context->devx_uid);
drivers/infiniband/hw/mlx5/main.c
2711
struct mlx5_ib_ucontext *context = to_mucontext(ibcontext);
drivers/infiniband/hw/mlx5/main.c
2725
return uar_mmap(dev, command, vma, context);
drivers/infiniband/hw/mlx5/main.c
2745
return rdma_user_mmap_io(&context->ibucontext, vma, pfn,
drivers/infiniband/hw/mlx5/main.c
2750
return mlx5_ib_mmap_clock_info_page(dev, vma, context);
drivers/infiniband/hw/mlx5/main.c
2768
struct mlx5_ib_ucontext *context = rdma_udata_to_drv_context(
drivers/infiniband/hw/mlx5/main.c
2771
uid = context ? context->devx_uid : 0;
drivers/infiniband/hw/mlx5/main.c
681
__always_unused void **context)
drivers/infiniband/hw/mlx5/main.c
694
__always_unused void **context)
drivers/infiniband/hw/mlx5/mlx5_ib.h
1258
struct mlx5_ib_ucontext *context = rdma_udata_to_drv_context(
drivers/infiniband/hw/mlx5/mlx5_ib.h
1261
return to_mdev(context->ibucontext.device);
drivers/infiniband/hw/mlx5/mlx5_ib.h
1334
int mlx5_ib_db_map_user(struct mlx5_ib_ucontext *context, unsigned long virt,
drivers/infiniband/hw/mlx5/mlx5_ib.h
1336
void mlx5_ib_db_unmap_user(struct mlx5_ib_ucontext *context, struct mlx5_db *db);
drivers/infiniband/hw/mlx5/mr.c
194
static void create_mkey_callback(int status, struct mlx5_async_work *context)
drivers/infiniband/hw/mlx5/mr.c
197
container_of(context, struct mlx5r_async_create_mkey, cb_work);
drivers/infiniband/hw/mlx5/mr.c
57
create_mkey_callback(int status, struct mlx5_async_work *context);
drivers/infiniband/hw/mlx5/qp.c
1042
resp->bfreg_index = adjust_bfregn(dev, &context->bfregi, bfregn);
drivers/infiniband/hw/mlx5/qp.c
1047
err = mlx5_ib_db_map_user(context, ucmd->db_addr, &qp->db);
drivers/infiniband/hw/mlx5/qp.c
1063
mlx5_ib_free_bfreg(dev, &context->bfregi, bfregn);
drivers/infiniband/hw/mlx5/qp.c
1070
struct mlx5_ib_ucontext *context = rdma_udata_to_drv_context(
drivers/infiniband/hw/mlx5/qp.c
1075
mlx5_ib_db_unmap_user(context, &qp->db);
drivers/infiniband/hw/mlx5/qp.c
1083
mlx5_ib_free_bfreg(dev, &context->bfregi, qp->bfregn);
drivers/infiniband/hw/mlx5/qp.c
867
struct mlx5_ib_ucontext *context =
drivers/infiniband/hw/mlx5/qp.c
876
mlx5_ib_db_unmap_user(context, &rwq->db);
drivers/infiniband/hw/mlx5/qp.c
948
struct mlx5_ib_ucontext *context;
drivers/infiniband/hw/mlx5/qp.c
961
context = rdma_udata_to_drv_context(udata, struct mlx5_ib_ucontext,
drivers/infiniband/hw/mlx5/qp.c
971
uar_index = bfregn_to_uar_index(dev, &context->bfregi,
drivers/infiniband/hw/mlx5/qp.c
980
bfregn = alloc_bfreg(dev, &context->bfregi);
drivers/infiniband/hw/mlx5/qp.c
990
uar_index = bfregn_to_uar_index(dev, &context->bfregi, bfregn,
drivers/infiniband/hw/mlx5/umr.c
340
struct mlx5_ib_umr_context *context =
drivers/infiniband/hw/mlx5/umr.c
343
context->status = wc->status;
drivers/infiniband/hw/mlx5/umr.c
344
complete(&context->done);
drivers/infiniband/hw/mlx5/umr.c
347
static inline void mlx5r_umr_init_context(struct mlx5r_umr_context *context)
drivers/infiniband/hw/mlx5/umr.c
349
context->cqe.done = mlx5r_umr_done;
drivers/infiniband/hw/mlx5/umr.c
350
init_completion(&context->done);
drivers/infiniband/hw/mthca/mthca_cmd.c
392
struct mthca_cmd_context *context =
drivers/infiniband/hw/mthca/mthca_cmd.c
393
&dev->cmd.context[token & dev->cmd.token_mask];
drivers/infiniband/hw/mthca/mthca_cmd.c
396
if (token != context->token)
drivers/infiniband/hw/mthca/mthca_cmd.c
399
context->result = 0;
drivers/infiniband/hw/mthca/mthca_cmd.c
400
context->status = status;
drivers/infiniband/hw/mthca/mthca_cmd.c
401
context->out_param = out_param;
drivers/infiniband/hw/mthca/mthca_cmd.c
403
complete(&context->done);
drivers/infiniband/hw/mthca/mthca_cmd.c
416
struct mthca_cmd_context *context;
drivers/infiniband/hw/mthca/mthca_cmd.c
422
context = &dev->cmd.context[dev->cmd.free_head];
drivers/infiniband/hw/mthca/mthca_cmd.c
423
context->token += dev->cmd.token_mask + 1;
drivers/infiniband/hw/mthca/mthca_cmd.c
424
dev->cmd.free_head = context->next;
drivers/infiniband/hw/mthca/mthca_cmd.c
427
init_completion(&context->done);
drivers/infiniband/hw/mthca/mthca_cmd.c
432
op, context->token, 1);
drivers/infiniband/hw/mthca/mthca_cmd.c
436
if (!wait_for_completion_timeout(&context->done, timeout)) {
drivers/infiniband/hw/mthca/mthca_cmd.c
441
err = context->result;
drivers/infiniband/hw/mthca/mthca_cmd.c
445
if (context->status) {
drivers/infiniband/hw/mthca/mthca_cmd.c
447
op, context->status);
drivers/infiniband/hw/mthca/mthca_cmd.c
448
err = mthca_status_to_errno(context->status);
drivers/infiniband/hw/mthca/mthca_cmd.c
452
*out_param = context->out_param;
drivers/infiniband/hw/mthca/mthca_cmd.c
460
context->next = dev->cmd.free_head;
drivers/infiniband/hw/mthca/mthca_cmd.c
461
dev->cmd.free_head = context - dev->cmd.context;
drivers/infiniband/hw/mthca/mthca_cmd.c
562
dev->cmd.context = kmalloc_objs(struct mthca_cmd_context,
drivers/infiniband/hw/mthca/mthca_cmd.c
564
if (!dev->cmd.context)
drivers/infiniband/hw/mthca/mthca_cmd.c
568
dev->cmd.context[i].token = i;
drivers/infiniband/hw/mthca/mthca_cmd.c
569
dev->cmd.context[i].next = i + 1;
drivers/infiniband/hw/mthca/mthca_cmd.c
572
dev->cmd.context[dev->cmd.max_cmds - 1].next = -1;
drivers/infiniband/hw/mthca/mthca_cmd.c
603
kfree(dev->cmd.context);
drivers/infiniband/hw/mthca/mthca_dev.h
127
struct mthca_cmd_context *context;
drivers/infiniband/hw/mthca/mthca_provider.c
291
struct mthca_ucontext *context = to_mucontext(uctx);
drivers/infiniband/hw/mthca/mthca_provider.c
303
err = mthca_uar_alloc(to_mdev(ibdev), &context->uar);
drivers/infiniband/hw/mthca/mthca_provider.c
307
context->db_tab = mthca_init_user_db_tab(to_mdev(ibdev));
drivers/infiniband/hw/mthca/mthca_provider.c
308
if (IS_ERR(context->db_tab)) {
drivers/infiniband/hw/mthca/mthca_provider.c
309
err = PTR_ERR(context->db_tab);
drivers/infiniband/hw/mthca/mthca_provider.c
310
mthca_uar_free(to_mdev(ibdev), &context->uar);
drivers/infiniband/hw/mthca/mthca_provider.c
315
mthca_cleanup_user_db_tab(to_mdev(ibdev), &context->uar, context->db_tab);
drivers/infiniband/hw/mthca/mthca_provider.c
316
mthca_uar_free(to_mdev(ibdev), &context->uar);
drivers/infiniband/hw/mthca/mthca_provider.c
320
context->reg_mr_warned = 0;
drivers/infiniband/hw/mthca/mthca_provider.c
325
static void mthca_dealloc_ucontext(struct ib_ucontext *context)
drivers/infiniband/hw/mthca/mthca_provider.c
327
mthca_cleanup_user_db_tab(to_mdev(context->device), &to_mucontext(context)->uar,
drivers/infiniband/hw/mthca/mthca_provider.c
328
to_mucontext(context)->db_tab);
drivers/infiniband/hw/mthca/mthca_provider.c
329
mthca_uar_free(to_mdev(context->device), &to_mucontext(context)->uar);
drivers/infiniband/hw/mthca/mthca_provider.c
332
static int mthca_mmap_uar(struct ib_ucontext *context,
drivers/infiniband/hw/mthca/mthca_provider.c
341
to_mucontext(context)->uar.pfn,
drivers/infiniband/hw/mthca/mthca_provider.c
396
struct mthca_ucontext *context = rdma_udata_to_drv_context(
drivers/infiniband/hw/mthca/mthca_provider.c
408
err = mthca_map_user_db(to_mdev(ibsrq->device), &context->uar,
drivers/infiniband/hw/mthca/mthca_provider.c
409
context->db_tab, ucmd.db_index,
drivers/infiniband/hw/mthca/mthca_provider.c
423
mthca_unmap_user_db(to_mdev(ibsrq->device), &context->uar,
drivers/infiniband/hw/mthca/mthca_provider.c
424
context->db_tab, ucmd.db_index);
drivers/infiniband/hw/mthca/mthca_provider.c
429
if (context && ib_copy_to_udata(udata, &srq->srqn, sizeof(__u32))) {
drivers/infiniband/hw/mthca/mthca_provider.c
431
mthca_unmap_user_db(to_mdev(ibsrq->device), &context->uar,
drivers/infiniband/hw/mthca/mthca_provider.c
432
context->db_tab, ucmd.db_index);
drivers/infiniband/hw/mthca/mthca_provider.c
443
struct mthca_ucontext *context =
drivers/infiniband/hw/mthca/mthca_provider.c
449
mthca_unmap_user_db(to_mdev(srq->device), &context->uar,
drivers/infiniband/hw/mthca/mthca_provider.c
450
context->db_tab, to_msrq(srq)->db_index);
drivers/infiniband/hw/mthca/mthca_provider.c
459
struct mthca_ucontext *context = rdma_udata_to_drv_context(
drivers/infiniband/hw/mthca/mthca_provider.c
478
err = mthca_map_user_db(dev, &context->uar,
drivers/infiniband/hw/mthca/mthca_provider.c
479
context->db_tab,
drivers/infiniband/hw/mthca/mthca_provider.c
485
err = mthca_map_user_db(dev, &context->uar,
drivers/infiniband/hw/mthca/mthca_provider.c
486
context->db_tab,
drivers/infiniband/hw/mthca/mthca_provider.c
490
mthca_unmap_user_db(dev, &context->uar,
drivers/infiniband/hw/mthca/mthca_provider.c
491
context->db_tab,
drivers/infiniband/hw/mthca/mthca_provider.c
508
mthca_unmap_user_db(dev, &context->uar, context->db_tab,
drivers/infiniband/hw/mthca/mthca_provider.c
510
mthca_unmap_user_db(dev, &context->uar, context->db_tab,
drivers/infiniband/hw/mthca/mthca_provider.c
556
struct mthca_ucontext *context =
drivers/infiniband/hw/mthca/mthca_provider.c
563
&context->uar,
drivers/infiniband/hw/mthca/mthca_provider.c
564
context->db_tab,
drivers/infiniband/hw/mthca/mthca_provider.c
567
&context->uar,
drivers/infiniband/hw/mthca/mthca_provider.c
568
context->db_tab,
drivers/infiniband/hw/mthca/mthca_provider.c
587
struct mthca_ucontext *context = rdma_udata_to_drv_context(
drivers/infiniband/hw/mthca/mthca_provider.c
600
err = mthca_map_user_db(to_mdev(ibdev), &context->uar,
drivers/infiniband/hw/mthca/mthca_provider.c
601
context->db_tab, ucmd.set_db_index,
drivers/infiniband/hw/mthca/mthca_provider.c
606
err = mthca_map_user_db(to_mdev(ibdev), &context->uar,
drivers/infiniband/hw/mthca/mthca_provider.c
607
context->db_tab, ucmd.arm_db_index,
drivers/infiniband/hw/mthca/mthca_provider.c
624
err = mthca_init_cq(to_mdev(ibdev), nent, context,
drivers/infiniband/hw/mthca/mthca_provider.c
642
mthca_unmap_user_db(to_mdev(ibdev), &context->uar,
drivers/infiniband/hw/mthca/mthca_provider.c
643
context->db_tab, ucmd.arm_db_index);
drivers/infiniband/hw/mthca/mthca_provider.c
647
mthca_unmap_user_db(to_mdev(ibdev), &context->uar,
drivers/infiniband/hw/mthca/mthca_provider.c
648
context->db_tab, ucmd.set_db_index);
drivers/infiniband/hw/mthca/mthca_provider.c
777
struct mthca_ucontext *context =
drivers/infiniband/hw/mthca/mthca_provider.c
784
&context->uar,
drivers/infiniband/hw/mthca/mthca_provider.c
785
context->db_tab,
drivers/infiniband/hw/mthca/mthca_provider.c
788
&context->uar,
drivers/infiniband/hw/mthca/mthca_provider.c
789
context->db_tab,
drivers/infiniband/hw/mthca/mthca_provider.c
834
struct mthca_ucontext *context = rdma_udata_to_drv_context(
drivers/infiniband/hw/mthca/mthca_provider.c
847
if (!context->reg_mr_warned) {
drivers/infiniband/hw/mthca/mthca_provider.c
852
++context->reg_mr_warned;
drivers/infiniband/hw/mthca/mthca_qp.c
162
struct mthca_qp_context context;
drivers/infiniband/hw/mthca/mthca_qp.c
438
struct mthca_qp_context *context;
drivers/infiniband/hw/mthca/mthca_qp.c
461
context = &qp_param->context;
drivers/infiniband/hw/mthca/mthca_qp.c
462
mthca_state = be32_to_cpu(context->flags) >> 28;
drivers/infiniband/hw/mthca/mthca_qp.c
466
qp_attr->path_mtu = context->mtu_msgmax >> 5;
drivers/infiniband/hw/mthca/mthca_qp.c
468
to_ib_mig_state((be32_to_cpu(context->flags) >> 11) & 0x3);
drivers/infiniband/hw/mthca/mthca_qp.c
469
qp_attr->qkey = be32_to_cpu(context->qkey);
drivers/infiniband/hw/mthca/mthca_qp.c
470
qp_attr->rq_psn = be32_to_cpu(context->rnr_nextrecvpsn) & 0xffffff;
drivers/infiniband/hw/mthca/mthca_qp.c
471
qp_attr->sq_psn = be32_to_cpu(context->next_send_psn) & 0xffffff;
drivers/infiniband/hw/mthca/mthca_qp.c
472
qp_attr->dest_qp_num = be32_to_cpu(context->remote_qpn) & 0xffffff;
drivers/infiniband/hw/mthca/mthca_qp.c
474
to_ib_qp_access_flags(be32_to_cpu(context->params2));
drivers/infiniband/hw/mthca/mthca_qp.c
477
to_rdma_ah_attr(dev, &qp_attr->ah_attr, &context->pri_path);
drivers/infiniband/hw/mthca/mthca_qp.c
478
to_rdma_ah_attr(dev, &qp_attr->alt_ah_attr, &context->alt_path);
drivers/infiniband/hw/mthca/mthca_qp.c
480
be32_to_cpu(context->alt_path.port_pkey) & 0x7f;
drivers/infiniband/hw/mthca/mthca_qp.c
485
qp_attr->pkey_index = be32_to_cpu(context->pri_path.port_pkey) & 0x7f;
drivers/infiniband/hw/mthca/mthca_qp.c
487
(be32_to_cpu(context->pri_path.port_pkey) >> 24) & 0x3;
drivers/infiniband/hw/mthca/mthca_qp.c
492
qp_attr->max_rd_atomic = 1 << ((be32_to_cpu(context->params1) >> 21) & 0x7);
drivers/infiniband/hw/mthca/mthca_qp.c
495
1 << ((be32_to_cpu(context->params2) >> 21) & 0x7);
drivers/infiniband/hw/mthca/mthca_qp.c
497
(be32_to_cpu(context->rnr_nextrecvpsn) >> 24) & 0x1f;
drivers/infiniband/hw/mthca/mthca_qp.c
498
qp_attr->timeout = context->pri_path.ackto >> 3;
drivers/infiniband/hw/mthca/mthca_qp.c
499
qp_attr->retry_cnt = (be32_to_cpu(context->params1) >> 16) & 0x7;
drivers/infiniband/hw/mthca/mthca_qp.c
500
qp_attr->rnr_retry = context->pri_path.rnr_retry >> 5;
drivers/infiniband/hw/mthca/mthca_qp.c
501
qp_attr->alt_timeout = context->alt_path.ackto >> 3;
drivers/infiniband/hw/mthca/mthca_qp.c
564
struct mthca_ucontext *context = rdma_udata_to_drv_context(
drivers/infiniband/hw/mthca/mthca_qp.c
578
qp_context = &qp_param->context;
drivers/infiniband/hw/mthca/mthca_qp.c
627
qp_context->usr_page = cpu_to_be32(context->uar.index);
drivers/infiniband/hw/mthca/mthca_srq.c
100
struct mthca_tavor_srq_context *context,
drivers/infiniband/hw/mthca/mthca_srq.c
106
memset(context, 0, sizeof *context);
drivers/infiniband/hw/mthca/mthca_srq.c
108
context->wqe_base_ds = cpu_to_be64(1 << (srq->wqe_shift - 4));
drivers/infiniband/hw/mthca/mthca_srq.c
109
context->state_pd = cpu_to_be32(pd->pd_num);
drivers/infiniband/hw/mthca/mthca_srq.c
110
context->lkey = cpu_to_be32(srq->mr.ibmr.lkey);
drivers/infiniband/hw/mthca/mthca_srq.c
113
context->uar = cpu_to_be32(ucontext->uar.index);
drivers/infiniband/hw/mthca/mthca_srq.c
115
context->uar = cpu_to_be32(dev->driver_uar.index);
drivers/infiniband/hw/mthca/mthca_srq.c
121
struct mthca_arbel_srq_context *context,
drivers/infiniband/hw/mthca/mthca_srq.c
128
memset(context, 0, sizeof *context);
drivers/infiniband/hw/mthca/mthca_srq.c
136
context->state_logsize_srqn = cpu_to_be32(logsize << 24 | srq->srqn);
drivers/infiniband/hw/mthca/mthca_srq.c
137
context->lkey = cpu_to_be32(srq->mr.ibmr.lkey);
drivers/infiniband/hw/mthca/mthca_srq.c
138
context->db_index = cpu_to_be32(srq->db_index);
drivers/infiniband/hw/mthca/mthca_srq.c
139
context->logstride_usrpage = cpu_to_be32((srq->wqe_shift - 4) << 29);
drivers/infiniband/hw/mthca/mthca_srq.c
141
context->logstride_usrpage |= cpu_to_be32(ucontext->uar.index);
drivers/infiniband/hw/mthca/mthca_srq.c
143
context->logstride_usrpage |= cpu_to_be32(dev->driver_uar.index);
drivers/infiniband/hw/mthca/mthca_srq.c
144
context->eq_pd = cpu_to_be32(MTHCA_EQ_ASYNC << 24 | pd->pd_num);
drivers/infiniband/hw/ocrdma/ocrdma_verbs.c
539
int ocrdma_mmap(struct ib_ucontext *context, struct vm_area_struct *vma)
drivers/infiniband/hw/ocrdma/ocrdma_verbs.c
541
struct ocrdma_ucontext *ucontext = get_ocrdma_ucontext(context);
drivers/infiniband/hw/ocrdma/ocrdma_verbs.c
542
struct ocrdma_dev *dev = get_ocrdma_dev(context->device);
drivers/infiniband/hw/qedr/main.c
634
static void qedr_unaffiliated_event(void *context, u8 event_code)
drivers/infiniband/hw/qedr/main.c
639
static void qedr_affiliated_event(void *context, u8 e_code, void *fw_handle)
drivers/infiniband/hw/qedr/main.c
645
struct qedr_dev *dev = (struct qedr_dev *)context;
drivers/infiniband/hw/qedr/main.c
808
events.context = dev;
drivers/infiniband/hw/qedr/qedr_iw_cm.c
104
qedr_iw_mpa_request(void *context, struct qed_iwarp_cm_event_params *params)
drivers/infiniband/hw/qedr/qedr_iw_cm.c
106
struct qedr_iw_listener *listener = (struct qedr_iw_listener *)context;
drivers/infiniband/hw/qedr/qedr_iw_cm.c
139
qedr_iw_issue_event(void *context,
drivers/infiniband/hw/qedr/qedr_iw_cm.c
143
struct qedr_iw_ep *ep = (struct qedr_iw_ep *)context;
drivers/infiniband/hw/qedr/qedr_iw_cm.c
171
qedr_iw_close_event(void *context, struct qed_iwarp_cm_event_params *params)
drivers/infiniband/hw/qedr/qedr_iw_cm.c
173
struct qedr_iw_ep *ep = (struct qedr_iw_ep *)context;
drivers/infiniband/hw/qedr/qedr_iw_cm.c
176
qedr_iw_issue_event(context, params, IW_CM_EVENT_CLOSE);
drivers/infiniband/hw/qedr/qedr_iw_cm.c
182
qedr_iw_qp_event(void *context,
drivers/infiniband/hw/qedr/qedr_iw_cm.c
186
struct qedr_iw_ep *ep = (struct qedr_iw_ep *)context;
drivers/infiniband/hw/qedr/qedr_iw_cm.c
254
qedr_iw_disconnect_event(void *context,
drivers/infiniband/hw/qedr/qedr_iw_cm.c
258
struct qedr_iw_ep *ep = (struct qedr_iw_ep *)context;
drivers/infiniband/hw/qedr/qedr_iw_cm.c
280
qedr_iw_passive_complete(void *context,
drivers/infiniband/hw/qedr/qedr_iw_cm.c
283
struct qedr_iw_ep *ep = (struct qedr_iw_ep *)context;
drivers/infiniband/hw/qedr/qedr_iw_cm.c
297
qedr_iw_issue_event(context, params, IW_CM_EVENT_ESTABLISHED);
drivers/infiniband/hw/qedr/qedr_iw_cm.c
300
qedr_iw_close_event(context, params);
drivers/infiniband/hw/qedr/qedr_iw_cm.c
304
qedr_iw_active_complete(void *context,
drivers/infiniband/hw/qedr/qedr_iw_cm.c
307
struct qedr_iw_ep *ep = (struct qedr_iw_ep *)context;
drivers/infiniband/hw/qedr/qedr_iw_cm.c
310
qedr_iw_issue_event(context, params, IW_CM_EVENT_CONNECT_REPLY);
drivers/infiniband/hw/qedr/qedr_iw_cm.c
317
qedr_iw_mpa_reply(void *context, struct qed_iwarp_cm_event_params *params)
drivers/infiniband/hw/qedr/qedr_iw_cm.c
319
struct qedr_iw_ep *ep = (struct qedr_iw_ep *)context;
drivers/infiniband/hw/qedr/qedr_iw_cm.c
329
qedr_iw_event_handler(void *context, struct qed_iwarp_cm_event_params *params)
drivers/infiniband/hw/qedr/qedr_iw_cm.c
331
struct qedr_iw_ep *ep = (struct qedr_iw_ep *)context;
drivers/infiniband/hw/qedr/qedr_iw_cm.c
336
qedr_iw_mpa_request(context, params);
drivers/infiniband/hw/qedr/qedr_iw_cm.c
339
qedr_iw_mpa_reply(context, params);
drivers/infiniband/hw/qedr/qedr_iw_cm.c
342
qedr_iw_passive_complete(context, params);
drivers/infiniband/hw/qedr/qedr_iw_cm.c
345
qedr_iw_active_complete(context, params);
drivers/infiniband/hw/qedr/qedr_iw_cm.c
348
qedr_iw_disconnect_event(context, params);
drivers/infiniband/hw/qedr/qedr_iw_cm.c
351
qedr_iw_close_event(context, params);
drivers/infiniband/hw/qedr/qedr_iw_cm.c
354
qedr_iw_qp_event(context, params, IB_EVENT_QP_FATAL,
drivers/infiniband/hw/qedr/qedr_iw_cm.c
358
qedr_iw_qp_event(context, params, IB_EVENT_QP_FATAL,
drivers/infiniband/hw/qedr/qedr_iw_cm.c
362
qedr_iw_qp_event(context, params, IB_EVENT_QP_FATAL,
drivers/infiniband/hw/qedr/qedr_iw_cm.c
366
qedr_iw_qp_event(context, params, IB_EVENT_QP_ACCESS_ERR,
drivers/infiniband/hw/qedr/qedr_iw_cm.c
370
qedr_iw_qp_event(context, params, IB_EVENT_QP_FATAL,
drivers/infiniband/hw/qedr/qedr_iw_cm.c
374
qedr_iw_qp_event(context, params, IB_EVENT_QP_FATAL,
drivers/infiniband/hw/qedr/qedr_iw_cm.c
378
qedr_iw_qp_event(context, params, IB_EVENT_QP_ACCESS_ERR,
drivers/infiniband/hw/qedr/qedr_iw_cm.c
382
qedr_iw_qp_event(context, params, IB_EVENT_QP_FATAL,
drivers/infiniband/hw/qedr/verbs.c
462
struct qedr_ucontext *context = rdma_udata_to_drv_context(
drivers/infiniband/hw/qedr/verbs.c
472
pd->uctx = context;
drivers/infiniband/hw/usnic/usnic_ib_sysfs.c
237
static QPN_ATTR_RO(context);
drivers/infiniband/hw/usnic/usnic_ib_verbs.c
640
struct usnic_ib_ucontext *context = to_ucontext(uctx);
drivers/infiniband/hw/usnic/usnic_ib_verbs.c
644
INIT_LIST_HEAD(&context->qp_grp_list);
drivers/infiniband/hw/usnic/usnic_ib_verbs.c
646
list_add_tail(&context->link, &us_ibdev->ctx_list);
drivers/infiniband/hw/usnic/usnic_ib_verbs.c
654
struct usnic_ib_ucontext *context = to_uucontext(ibcontext);
drivers/infiniband/hw/usnic/usnic_ib_verbs.c
659
WARN_ON_ONCE(!list_empty(&context->qp_grp_list));
drivers/infiniband/hw/usnic/usnic_ib_verbs.c
660
list_del(&context->link);
drivers/infiniband/hw/usnic/usnic_ib_verbs.c
664
int usnic_ib_mmap(struct ib_ucontext *context,
drivers/infiniband/hw/usnic/usnic_ib_verbs.c
667
struct usnic_ib_ucontext *uctx = to_ucontext(context);
drivers/infiniband/hw/usnic/usnic_ib_verbs.c
678
us_ibdev = to_usdev(context->device);
drivers/infiniband/hw/usnic/usnic_ib_verbs.h
68
int usnic_ib_mmap(struct ib_ucontext *context,
drivers/infiniband/hw/vmw_pvrdma/pvrdma_cq.c
118
struct pvrdma_ucontext *context = rdma_udata_to_drv_context(
drivers/infiniband/hw/vmw_pvrdma/pvrdma_cq.c
186
cmd->ctx_handle = context ? context->ctx_handle : 0;
drivers/infiniband/hw/vmw_pvrdma/pvrdma_cq.c
204
cq->uar = &context->uar;
drivers/infiniband/hw/vmw_pvrdma/pvrdma_main.c
640
static int pvrdma_add_gid(const struct ib_gid_attr *attr, void **context)
drivers/infiniband/hw/vmw_pvrdma/pvrdma_main.c
65
static int pvrdma_add_gid(const struct ib_gid_attr *attr, void **context);
drivers/infiniband/hw/vmw_pvrdma/pvrdma_main.c
66
static int pvrdma_del_gid(const struct ib_gid_attr *attr, void **context);
drivers/infiniband/hw/vmw_pvrdma/pvrdma_main.c
676
static int pvrdma_del_gid(const struct ib_gid_attr *attr, void **context)
drivers/infiniband/hw/vmw_pvrdma/pvrdma_verbs.c
289
struct pvrdma_ucontext *context = to_vucontext(uctx);
drivers/infiniband/hw/vmw_pvrdma/pvrdma_verbs.c
300
context->dev = vdev;
drivers/infiniband/hw/vmw_pvrdma/pvrdma_verbs.c
301
ret = pvrdma_uar_alloc(vdev, &context->uar);
drivers/infiniband/hw/vmw_pvrdma/pvrdma_verbs.c
307
cmd->pfn = context->uar.pfn;
drivers/infiniband/hw/vmw_pvrdma/pvrdma_verbs.c
309
cmd->pfn64 = context->uar.pfn;
drivers/infiniband/hw/vmw_pvrdma/pvrdma_verbs.c
319
context->ctx_handle = resp->ctx_handle;
drivers/infiniband/hw/vmw_pvrdma/pvrdma_verbs.c
325
pvrdma_uar_free(vdev, &context->uar);
drivers/infiniband/hw/vmw_pvrdma/pvrdma_verbs.c
326
pvrdma_dealloc_ucontext(&context->ibucontext);
drivers/infiniband/hw/vmw_pvrdma/pvrdma_verbs.c
333
pvrdma_uar_free(vdev, &context->uar);
drivers/infiniband/hw/vmw_pvrdma/pvrdma_verbs.c
343
struct pvrdma_ucontext *context = to_vucontext(ibcontext);
drivers/infiniband/hw/vmw_pvrdma/pvrdma_verbs.c
349
cmd->ctx_handle = context->ctx_handle;
drivers/infiniband/hw/vmw_pvrdma/pvrdma_verbs.c
351
ret = pvrdma_cmd_post(context->dev, &req, NULL, 0);
drivers/infiniband/hw/vmw_pvrdma/pvrdma_verbs.c
353
dev_warn(&context->dev->pdev->dev,
drivers/infiniband/hw/vmw_pvrdma/pvrdma_verbs.c
357
pvrdma_uar_free(to_vdev(ibcontext->device), &context->uar);
drivers/infiniband/hw/vmw_pvrdma/pvrdma_verbs.c
369
struct pvrdma_ucontext *context = to_vucontext(ibcontext);
drivers/infiniband/hw/vmw_pvrdma/pvrdma_verbs.c
374
dev_dbg(&context->dev->pdev->dev, "create mmap region\n");
drivers/infiniband/hw/vmw_pvrdma/pvrdma_verbs.c
377
dev_warn(&context->dev->pdev->dev,
drivers/infiniband/hw/vmw_pvrdma/pvrdma_verbs.c
385
if (io_remap_pfn_range(vma, start, context->uar.pfn, size,
drivers/infiniband/hw/vmw_pvrdma/pvrdma_verbs.c
410
struct pvrdma_ucontext *context = rdma_udata_to_drv_context(
drivers/infiniband/hw/vmw_pvrdma/pvrdma_verbs.c
418
cmd->ctx_handle = context ? context->ctx_handle : 0;
drivers/infiniband/hw/vmw_pvrdma/pvrdma_verbs.h
361
int pvrdma_mmap(struct ib_ucontext *context, struct vm_area_struct *vma);
drivers/infiniband/hw/vmw_pvrdma/pvrdma_verbs.h
363
void pvrdma_dealloc_ucontext(struct ib_ucontext *context);
drivers/infiniband/sw/rdmavt/mmap.c
139
ip->context =
drivers/infiniband/sw/rdmavt/mmap.c
141
->context;
drivers/infiniband/sw/rdmavt/mmap.c
32
struct rvt_dev_info *rdi = ib_to_rvt(ip->context->device);
drivers/infiniband/sw/rdmavt/mmap.c
68
int rvt_mmap(struct ib_ucontext *context, struct vm_area_struct *vma)
drivers/infiniband/sw/rdmavt/mmap.c
70
struct rvt_dev_info *rdi = ib_to_rvt(context->device);
drivers/infiniband/sw/rdmavt/mmap.c
85
if (context != ip->context || (__u64)offset != ip->offset)
drivers/infiniband/sw/rdmavt/mmap.h
13
int rvt_mmap(struct ib_ucontext *context, struct vm_area_struct *vma);
drivers/infiniband/sw/rdmavt/vt.c
254
static void rvt_dealloc_ucontext(struct ib_ucontext *context)
drivers/infiniband/sw/rxe/rxe_loc.h
45
struct ib_ucontext *context;
drivers/infiniband/sw/rxe/rxe_loc.h
57
int rxe_mmap(struct ib_ucontext *context, struct vm_area_struct *vma);
drivers/infiniband/sw/rxe/rxe_mmap.c
141
ip->context =
drivers/infiniband/sw/rxe/rxe_mmap.c
143
->context;
drivers/infiniband/sw/rxe/rxe_mmap.c
20
struct rxe_dev *rxe = to_rdev(ip->context->device);
drivers/infiniband/sw/rxe/rxe_mmap.c
62
int rxe_mmap(struct ib_ucontext *context, struct vm_area_struct *vma)
drivers/infiniband/sw/rxe/rxe_mmap.c
64
struct rxe_dev *rxe = to_rdev(context->device);
drivers/infiniband/sw/rxe/rxe_mmap.c
77
if (context != ip->context || (__u64)offset != ip->info.offset)
drivers/infiniband/ulp/ipoib/ipoib_cm.c
1252
struct ipoib_cm_tx *tx = cm_id->context;
drivers/infiniband/ulp/ipoib/ipoib_cm.c
445
struct net_device *dev = cm_id->context;
drivers/infiniband/ulp/ipoib/ipoib_cm.c
457
cm_id->context = p;
drivers/infiniband/ulp/ipoib/ipoib_cm.c
517
p = cm_id->context;
drivers/infiniband/ulp/ipoib/ipoib_cm.c
988
struct ipoib_cm_tx *p = cm_id->context;
drivers/infiniband/ulp/ipoib/ipoib_multicast.c
359
struct ipoib_mcast *mcast = multicast->context;
drivers/infiniband/ulp/iser/iser_verbs.c
40
static void iser_qp_event_callback(struct ib_event *cause, void *context)
drivers/infiniband/ulp/iser/iser_verbs.c
480
struct iser_conn *iser_conn = cma_id->context;
drivers/infiniband/ulp/iser/iser_verbs.c
525
struct iser_conn *iser_conn = cma_id->context;
drivers/infiniband/ulp/iser/iser_verbs.c
577
struct iser_conn *iser_conn = cma_id->context;
drivers/infiniband/ulp/iser/iser_verbs.c
621
struct iser_conn *iser_conn = cma_id->context;
drivers/infiniband/ulp/iser/iser_verbs.c
653
struct iser_conn *iser_conn = cma_id->context;
drivers/infiniband/ulp/iser/iser_verbs.c
678
iser_conn = cma_id->context;
drivers/infiniband/ulp/iser/iser_verbs.c
681
event->status, cma_id->context, cma_id);
drivers/infiniband/ulp/isert/ib_isert.c
1025
struct isert_conn *isert_conn = conn->context;
drivers/infiniband/ulp/isert/ib_isert.c
1760
struct isert_conn *isert_conn = conn->context;
drivers/infiniband/ulp/isert/ib_isert.c
1811
struct isert_conn *isert_conn = conn->context;
drivers/infiniband/ulp/isert/ib_isert.c
1826
struct isert_conn *isert_conn = conn->context;
drivers/infiniband/ulp/isert/ib_isert.c
1848
struct isert_conn *isert_conn = conn->context;
drivers/infiniband/ulp/isert/ib_isert.c
1867
struct isert_conn *isert_conn = conn->context;
drivers/infiniband/ulp/isert/ib_isert.c
1885
struct isert_conn *isert_conn = conn->context;
drivers/infiniband/ulp/isert/ib_isert.c
1903
struct isert_conn *isert_conn = conn->context;
drivers/infiniband/ulp/isert/ib_isert.c
1938
struct isert_conn *isert_conn = conn->context;
drivers/infiniband/ulp/isert/ib_isert.c
2094
struct isert_conn *isert_conn = conn->context;
drivers/infiniband/ulp/isert/ib_isert.c
2140
ret = isert_rdma_rw_ctx_post(isert_cmd, conn->context,
drivers/infiniband/ulp/isert/ib_isert.c
2176
struct isert_conn *isert_conn = conn->context;
drivers/infiniband/ulp/isert/ib_isert.c
2231
isert_dbg("id %p context %p\n", id, id->context);
drivers/infiniband/ulp/isert/ib_isert.c
2336
struct isert_conn *isert_conn = conn->context;
drivers/infiniband/ulp/isert/ib_isert.c
2417
conn->context = isert_conn;
drivers/infiniband/ulp/isert/ib_isert.c
2556
struct isert_conn *isert_conn = conn->context;
drivers/infiniband/ulp/isert/ib_isert.c
2574
struct isert_conn *isert_conn = conn->context;
drivers/infiniband/ulp/isert/ib_isert.c
413
struct isert_np *isert_np = cma_id->context;
drivers/infiniband/ulp/isert/ib_isert.c
428
cma_id, cma_id->context);
drivers/infiniband/ulp/isert/ib_isert.c
514
struct isert_np *isert_np = cma_id->context;
drivers/infiniband/ulp/isert/ib_isert.c
552
struct isert_np *isert_np = isert_conn->cm_id->context;
drivers/infiniband/ulp/isert/ib_isert.c
655
struct isert_np *isert_np = cma_id->context;
drivers/infiniband/ulp/isert/ib_isert.c
671
struct isert_np *isert_np = cma_id->context;
drivers/infiniband/ulp/isert/ib_isert.c
677
event->status, cma_id, cma_id->context);
drivers/infiniband/ulp/isert/ib_isert.c
680
return isert_np_cma_handler(cma_id->context, event->event);
drivers/infiniband/ulp/isert/ib_isert.c
83
isert_qp_event_callback(struct ib_event *e, void *context)
drivers/infiniband/ulp/isert/ib_isert.c
85
struct isert_conn *isert_conn = context;
drivers/infiniband/ulp/isert/ib_isert.c
915
struct isert_conn *isert_conn = conn->context;
drivers/infiniband/ulp/opa_vnic/opa_vnic_vema.c
633
port = mad_agent->context;
drivers/infiniband/ulp/rtrs/rtrs-clt.c
1979
struct rtrs_clt_con *con = cm_id->context;
drivers/infiniband/ulp/rtrs/rtrs-srv.c
1776
cm_id->context = &con->c;
drivers/infiniband/ulp/rtrs/rtrs-srv.c
1890
struct rtrs_srv_ctx *ctx = cm_id->context;
drivers/infiniband/ulp/rtrs/rtrs-srv.c
2033
c = cm_id->context;
drivers/infiniband/ulp/srp/ib_srp.c
1283
struct srp_terminate_context *context = context_ptr;
drivers/infiniband/ulp/srp/ib_srp.c
1284
struct srp_target_port *target = context->srp_target;
drivers/infiniband/ulp/srp/ib_srp.c
1289
srp_finish_req(ch, req, NULL, context->scsi_result);
drivers/infiniband/ulp/srp/ib_srp.c
1297
struct srp_terminate_context context = { .srp_target = target,
drivers/infiniband/ulp/srp/ib_srp.c
1300
scsi_host_busy_iter(target->scsi_host, srp_terminate_cmd, &context);
drivers/infiniband/ulp/srp/ib_srp.c
1357
struct srp_terminate_context context = {
drivers/infiniband/ulp/srp/ib_srp.c
1361
&context);
drivers/infiniband/ulp/srp/ib_srp.c
2511
struct srp_rdma_ch *ch = cm_id->context;
drivers/infiniband/ulp/srp/ib_srp.c
2623
struct srp_rdma_ch *ch = cm_id->context;
drivers/infiniband/ulp/srp/ib_srp.c
265
static void srp_qp_event(struct ib_event *event, void *context)
drivers/infiniband/ulp/srpt/ib_srpt.c
2291
rdma_cm_id->context = ch;
drivers/infiniband/ulp/srpt/ib_srpt.c
2294
ib_cm_id->context = ch;
drivers/infiniband/ulp/srpt/ib_srpt.c
2550
rdma_cm_id->context = NULL;
drivers/infiniband/ulp/srpt/ib_srpt.c
2552
ib_cm_id->context = NULL;
drivers/infiniband/ulp/srpt/ib_srpt.c
2598
return srpt_cm_req_recv(cm_id->context, cm_id, NULL, param->port,
drivers/infiniband/ulp/srpt/ib_srpt.c
2707
struct srpt_rdma_ch *ch = cm_id->context;
drivers/infiniband/ulp/srpt/ib_srpt.c
2759
struct srpt_rdma_ch *ch = cm_id->context;
drivers/infiniband/ulp/srpt/ib_srpt.c
539
struct srpt_port *sport = (struct srpt_port *)mad_agent->context;
drivers/input/joystick/iforce/iforce-usb.c
139
struct iforce_usb *iforce_usb = urb->context;
drivers/input/joystick/iforce/iforce-usb.c
173
struct iforce_usb *iforce_usb = urb->context;
drivers/input/joystick/pxrc.c
34
struct pxrc *pxrc = urb->context;
drivers/input/joystick/xpad.c
1234
struct usb_xpad *xpad = urb->context;
drivers/input/joystick/xpad.c
1377
struct usb_xpad *xpad = urb->context;
drivers/input/keyboard/applespi.c
1529
static void applespi_async_read_complete(void *context)
drivers/input/keyboard/applespi.c
1531
struct applespi_data *applespi = context;
drivers/input/keyboard/applespi.c
1549
static u32 applespi_notify(acpi_handle gpe_device, u32 gpe, void *context)
drivers/input/keyboard/applespi.c
1551
struct applespi_data *applespi = context;
drivers/input/keyboard/applespi.c
614
message->context = applespi;
drivers/input/keyboard/applespi.c
736
static void applespi_async_write_complete(void *context)
drivers/input/keyboard/applespi.c
738
struct applespi_data *applespi = context;
drivers/input/keyboard/iqs62x-keys.c
185
unsigned long event_flags, void *context)
drivers/input/keyboard/iqs62x-keys.c
187
struct iqs62x_event_data *event_data = context;
drivers/input/misc/ati_remote2.c
396
struct ati_remote2 *ar2 = urb->context;
drivers/input/misc/ati_remote2.c
425
struct ati_remote2 *ar2 = urb->context;
drivers/input/misc/cm109.c
392
struct cm109_dev *dev = urb->context;
drivers/input/misc/cm109.c
436
struct cm109_dev *dev = urb->context;
drivers/input/misc/ideapad_slidebar.c
124
struct serio *port, void *context)
drivers/input/misc/ims-pcu.c
1488
struct ims_pcu *pcu = urb->context;
drivers/input/misc/ims-pcu.c
918
void *context)
drivers/input/misc/ims-pcu.c
920
struct ims_pcu *pcu = context;
drivers/input/misc/iqs269a.c
1461
static irqreturn_t iqs269_irq(int irq, void *context)
drivers/input/misc/iqs269a.c
1463
struct iqs269_private *iqs269 = context;
drivers/input/misc/iqs626a.c
1605
static irqreturn_t iqs626_irq(int irq, void *context)
drivers/input/misc/iqs626a.c
1607
struct iqs626_private *iqs626 = context;
drivers/input/misc/iqs7222.c
3047
static irqreturn_t iqs7222_irq(int irq, void *context)
drivers/input/misc/iqs7222.c
3049
struct iqs7222_private *iqs7222 = context;
drivers/input/misc/keyspan_remote.c
374
struct usb_keyspan *dev = urb->context;
drivers/input/misc/powermate.c
196
struct powermate_device *pm = urb->context;
drivers/input/misc/powermate.c
88
struct powermate_device *pm = urb->context;
drivers/input/misc/yealink.c
414
struct yealink_dev *yld = urb->context;
drivers/input/misc/yealink.c
452
struct yealink_dev *yld = urb->context;
drivers/input/mouse/appletouch.c
446
struct atp *dev = urb->context;
drivers/input/mouse/appletouch.c
519
struct atp *dev = urb->context;
drivers/input/mouse/appletouch.c
659
struct atp *dev = urb->context;
drivers/input/mouse/bcm5974.c
729
struct bcm5974 *dev = urb->context;
drivers/input/mouse/bcm5974.c
760
struct bcm5974 *dev = urb->context;
drivers/input/mouse/synaptics_usb.c
190
struct synusb *synusb = urb->context;
drivers/input/serio/hyperv-keyboard.c
238
static void hv_kbd_on_channel_callback(void *context)
drivers/input/serio/hyperv-keyboard.c
241
struct hv_device *hv_dev = context;
drivers/input/serio/i8042.c
198
int i8042_install_filter(i8042_filter_t filter, void *context)
drivers/input/serio/i8042.c
206
i8042_platform_filter_context = context;
drivers/input/tablet/acecad.c
38
struct usb_acecad *acecad = urb->context;
drivers/input/tablet/aiptek.c
414
struct aiptek *aiptek = urb->context;
drivers/input/tablet/hanwang.c
251
struct hanwang *hanwang = urb->context;
drivers/input/tablet/kbtab.c
33
struct kbtab *kbtab = urb->context;
drivers/input/tablet/pegasus_notetaker.c
180
struct pegasus *pegasus = urb->context;
drivers/input/touchscreen/ad7877.c
647
m->context = ts;
drivers/input/touchscreen/ads7846.c
1110
m->context = ts;
drivers/input/touchscreen/edt-ft5x06.c
180
static int edt_M06_i2c_read(void *context, const void *reg_buf, size_t reg_size,
drivers/input/touchscreen/edt-ft5x06.c
183
struct device *dev = context;
drivers/input/touchscreen/edt-ft5x06.c
257
static int edt_M06_i2c_write(void *context, const void *data, size_t count)
drivers/input/touchscreen/edt-ft5x06.c
259
struct device *dev = context;
drivers/input/touchscreen/goodix_berlin_spi.c
35
static int goodix_berlin_spi_read(void *context, const void *reg_buf,
drivers/input/touchscreen/goodix_berlin_spi.c
39
struct spi_device *spi = context;
drivers/input/touchscreen/goodix_berlin_spi.c
79
static int goodix_berlin_spi_write(void *context, const void *data,
drivers/input/touchscreen/goodix_berlin_spi.c
83
struct spi_device *spi = context;
drivers/input/touchscreen/iqs7211.c
2339
static irqreturn_t iqs7211_irq(int irq, void *context)
drivers/input/touchscreen/iqs7211.c
2341
struct iqs7211_private *iqs7211 = context;
drivers/input/touchscreen/usbtouchscreen.c
1290
struct usbtouch_usb *usbtouch = urb->context;
drivers/iommu/intel/debugfs.c
217
struct context_entry *context;
drivers/iommu/intel/debugfs.c
237
context = iommu_context_addr(iommu, bus, devfn, 0);
drivers/iommu/intel/debugfs.c
238
if (!context)
drivers/iommu/intel/debugfs.c
241
if (!context_present(context))
drivers/iommu/intel/debugfs.c
247
tbl_wlk.ctx_entry = context;
drivers/iommu/intel/debugfs.c
251
pasid_dir_ptr = context->lo & VTD_PAGE_MASK;
drivers/iommu/intel/debugfs.c
252
pasid_dir_size = get_pasid_dir_size(context);
drivers/iommu/intel/debugfs.c
360
struct context_entry *context;
drivers/iommu/intel/debugfs.c
389
context = iommu_context_addr(iommu, bus, devfn, 0);
drivers/iommu/intel/debugfs.c
390
if (!context || !context_present(context))
drivers/iommu/intel/debugfs.c
399
pasid_dir_ptr = context->lo & VTD_PAGE_MASK;
drivers/iommu/intel/debugfs.c
436
u8 tt = (u8)(context->lo & GENMASK_ULL(3, 2)) >> 2;
drivers/iommu/intel/debugfs.c
445
pgd = context->lo & VTD_PAGE_MASK;
drivers/iommu/intel/debugfs.c
446
agaw = context->hi & 7;
drivers/iommu/intel/dmar.c
2323
void *context, void **retval)
drivers/iommu/intel/iommu.c
1097
struct context_entry *context,
drivers/iommu/intel/iommu.c
1107
did_old = context_domain_id(context);
drivers/iommu/intel/iommu.c
1108
context_clear_entry(context);
drivers/iommu/intel/iommu.c
1151
struct context_entry *context;
drivers/iommu/intel/iommu.c
1164
context = iommu_context_addr(iommu, bus, devfn, 1);
drivers/iommu/intel/iommu.c
1165
if (!context)
drivers/iommu/intel/iommu.c
1169
if (context_present(context) && !context_copied(iommu, bus, devfn))
drivers/iommu/intel/iommu.c
1172
copied_context_tear_down(iommu, context, bus, devfn);
drivers/iommu/intel/iommu.c
1173
context_clear_entry(context);
drivers/iommu/intel/iommu.c
1174
context_set_domain_id(context, did);
drivers/iommu/intel/iommu.c
1181
context_set_address_root(context, pt_info.ssptptr);
drivers/iommu/intel/iommu.c
1182
context_set_address_width(context, pt_info.aw);
drivers/iommu/intel/iommu.c
1183
context_set_translation_type(context, translation);
drivers/iommu/intel/iommu.c
1184
context_set_fault_enable(context);
drivers/iommu/intel/iommu.c
1185
context_set_present(context);
drivers/iommu/intel/iommu.c
1187
clflush_cache_range(context, sizeof(*context));
drivers/iommu/intel/iommu.c
1232
struct context_entry *context;
drivers/iommu/intel/iommu.c
1236
context = iommu_context_addr(iommu, bus, devfn, 0);
drivers/iommu/intel/iommu.c
1237
if (!context) {
drivers/iommu/intel/iommu.c
1242
did = context_domain_id(context);
drivers/iommu/intel/iommu.c
1243
context_clear_present(context);
drivers/iommu/intel/iommu.c
1244
__iommu_flush_cache(iommu, context, sizeof(*context));
drivers/iommu/intel/iommu.c
1246
intel_context_flush_no_pasid(info, context, did);
drivers/iommu/intel/iommu.c
1247
context_clear_entry(context);
drivers/iommu/intel/iommu.c
1248
__iommu_flush_cache(iommu, context, sizeof(*context));
drivers/iommu/intel/iommu.c
345
struct context_entry *context;
drivers/iommu/intel/iommu.c
364
context = phys_to_virt(*entry & VTD_PAGE_MASK);
drivers/iommu/intel/iommu.c
370
context = iommu_alloc_pages_node_sz(iommu->node, GFP_ATOMIC,
drivers/iommu/intel/iommu.c
372
if (!context)
drivers/iommu/intel/iommu.c
375
__iommu_flush_cache(iommu, (void *)context, CONTEXT_SIZE);
drivers/iommu/intel/iommu.c
376
phy_addr = virt_to_phys((void *)context);
drivers/iommu/intel/iommu.c
3772
struct context_entry *context;
drivers/iommu/intel/iommu.c
3775
context = iommu_context_addr(iommu, bus, devfn, 1);
drivers/iommu/intel/iommu.c
3776
if (!context) {
drivers/iommu/intel/iommu.c
3781
if (context_present(context) && !context_copied(iommu, bus, devfn)) {
drivers/iommu/intel/iommu.c
3786
copied_context_tear_down(iommu, context, bus, devfn);
drivers/iommu/intel/iommu.c
3787
context_clear_entry(context);
drivers/iommu/intel/iommu.c
3788
context_set_domain_id(context, FLPT_DEFAULT_DID);
drivers/iommu/intel/iommu.c
3794
context_set_address_width(context, iommu->msagaw);
drivers/iommu/intel/iommu.c
3795
context_set_translation_type(context, CONTEXT_TT_PASS_THROUGH);
drivers/iommu/intel/iommu.c
3796
context_set_fault_enable(context);
drivers/iommu/intel/iommu.c
3797
context_set_present(context);
drivers/iommu/intel/iommu.c
3799
clflush_cache_range(context, sizeof(*context));
drivers/iommu/intel/iommu.c
380
return &context[devfn];
drivers/iommu/intel/iommu.c
529
struct context_entry *context;
drivers/iommu/intel/iommu.c
536
context = iommu_context_addr(iommu, i, 0, 0);
drivers/iommu/intel/iommu.c
537
if (context)
drivers/iommu/intel/iommu.c
538
iommu_free_pages(context);
drivers/iommu/intel/iommu.c
543
context = iommu_context_addr(iommu, i, 0x80, 0);
drivers/iommu/intel/iommu.c
544
if (context)
drivers/iommu/intel/iommu.c
545
iommu_free_pages(context);
drivers/iommu/intel/iommu.h
1000
context_set_sm_rid2pasid(struct context_entry *context, unsigned long pasid)
drivers/iommu/intel/iommu.h
1002
context->hi |= pasid & ((1 << 20) - 1);
drivers/iommu/intel/iommu.h
1009
static inline void context_set_sm_dte(struct context_entry *context)
drivers/iommu/intel/iommu.h
1011
context->lo |= BIT_ULL(2);
drivers/iommu/intel/iommu.h
1018
static inline void context_set_sm_pre(struct context_entry *context)
drivers/iommu/intel/iommu.h
1020
context->lo |= BIT_ULL(4);
drivers/iommu/intel/iommu.h
1027
static inline void context_clear_sm_pre(struct context_entry *context)
drivers/iommu/intel/iommu.h
1029
context->lo &= ~BIT_ULL(4);
drivers/iommu/intel/iommu.h
1254
struct context_entry *context, u16 did);
drivers/iommu/intel/iommu.h
870
static inline bool context_present(struct context_entry *context)
drivers/iommu/intel/iommu.h
872
return (context->lo & 1);
drivers/iommu/intel/iommu.h
901
static inline void context_set_present(struct context_entry *context)
drivers/iommu/intel/iommu.h
906
val = READ_ONCE(context->lo) | 1;
drivers/iommu/intel/iommu.h
907
WRITE_ONCE(context->lo, val);
drivers/iommu/intel/iommu.h
916
static inline void context_clear_present(struct context_entry *context)
drivers/iommu/intel/iommu.h
920
val = READ_ONCE(context->lo) & GENMASK_ULL(63, 1);
drivers/iommu/intel/iommu.h
921
WRITE_ONCE(context->lo, val);
drivers/iommu/intel/iommu.h
925
static inline void context_set_fault_enable(struct context_entry *context)
drivers/iommu/intel/iommu.h
927
context->lo &= (((u64)-1) << 2) | 1;
drivers/iommu/intel/iommu.h
930
static inline void context_set_translation_type(struct context_entry *context,
drivers/iommu/intel/iommu.h
933
context->lo &= (((u64)-1) << 4) | 3;
drivers/iommu/intel/iommu.h
934
context->lo |= (value & 3) << 2;
drivers/iommu/intel/iommu.h
937
static inline void context_set_address_root(struct context_entry *context,
drivers/iommu/intel/iommu.h
940
context->lo &= ~VTD_PAGE_MASK;
drivers/iommu/intel/iommu.h
941
context->lo |= value & VTD_PAGE_MASK;
drivers/iommu/intel/iommu.h
944
static inline void context_set_address_width(struct context_entry *context,
drivers/iommu/intel/iommu.h
947
context->hi |= value & 7;
drivers/iommu/intel/iommu.h
950
static inline void context_set_domain_id(struct context_entry *context,
drivers/iommu/intel/iommu.h
953
context->hi |= (value & ((1 << 16) - 1)) << 8;
drivers/iommu/intel/iommu.h
956
static inline void context_set_pasid(struct context_entry *context)
drivers/iommu/intel/iommu.h
958
context->lo |= CONTEXT_PASIDE;
drivers/iommu/intel/iommu.h
966
static inline void context_clear_entry(struct context_entry *context)
drivers/iommu/intel/iommu.h
968
context->lo = 0;
drivers/iommu/intel/iommu.h
969
context->hi = 0;
drivers/iommu/intel/pasid.c
740
struct context_entry *context;
drivers/iommu/intel/pasid.c
744
context = iommu_context_addr(iommu, bus, devfn, false);
drivers/iommu/intel/pasid.c
745
if (!context) {
drivers/iommu/intel/pasid.c
750
did = context_domain_id(context);
drivers/iommu/intel/pasid.c
751
context_clear_entry(context);
drivers/iommu/intel/pasid.c
752
__iommu_flush_cache(iommu, context, sizeof(*context));
drivers/iommu/intel/pasid.c
754
intel_context_flush_no_pasid(info, context, did);
drivers/iommu/intel/pasid.c
796
static int context_entry_set_pasid_table(struct context_entry *context,
drivers/iommu/intel/pasid.c
804
context_clear_entry(context);
drivers/iommu/intel/pasid.c
807
context->lo = (u64)virt_to_phys(table->table) | context_pdts(pds);
drivers/iommu/intel/pasid.c
808
context_set_sm_rid2pasid(context, IOMMU_NO_PASID);
drivers/iommu/intel/pasid.c
811
context_set_sm_dte(context);
drivers/iommu/intel/pasid.c
813
context_set_pasid(context);
drivers/iommu/intel/pasid.c
815
context_set_sm_pre(context);
drivers/iommu/intel/pasid.c
817
context_set_fault_enable(context);
drivers/iommu/intel/pasid.c
818
context_set_present(context);
drivers/iommu/intel/pasid.c
819
__iommu_flush_cache(iommu, context, sizeof(*context));
drivers/iommu/intel/pasid.c
828
struct context_entry *context;
drivers/iommu/intel/pasid.c
831
context = iommu_context_addr(iommu, bus, devfn, true);
drivers/iommu/intel/pasid.c
832
if (!context) {
drivers/iommu/intel/pasid.c
837
if (context_present(context) && !context_copied(iommu, bus, devfn)) {
drivers/iommu/intel/pasid.c
843
context_clear_present(context);
drivers/iommu/intel/pasid.c
844
__iommu_flush_cache(iommu, context, sizeof(*context));
drivers/iommu/intel/pasid.c
863
context_clear_entry(context);
drivers/iommu/intel/pasid.c
864
__iommu_flush_cache(iommu, context, sizeof(*context));
drivers/iommu/intel/pasid.c
874
context_entry_set_pasid_table(context, dev);
drivers/iommu/intel/pasid.c
958
struct context_entry *context, u16 did)
drivers/irqchip/irq-sifive-plic.c
587
static int plic_parse_context_parent(struct fwnode_handle *fwnode, u32 context,
drivers/irqchip/irq-sifive-plic.c
595
hartid = acpi_rintc_ext_parent_to_hartid(id, context);
drivers/irqchip/irq-sifive-plic.c
604
rc = of_irq_parse_one(to_of_node(fwnode), context, &parent);
drivers/irqchip/qcom-irq-combiner.c
141
static acpi_status count_registers_cb(struct acpi_resource *ares, void *context)
drivers/irqchip/qcom-irq-combiner.c
143
int *count = context;
drivers/irqchip/qcom-irq-combiner.c
172
static acpi_status get_registers_cb(struct acpi_resource *ares, void *context)
drivers/irqchip/qcom-irq-combiner.c
174
struct get_registers_context *ctx = context;
drivers/isdn/hardware/mISDN/hfcsusb.c
106
struct hfcsusb *hw = (struct hfcsusb *) urb->context;
drivers/isdn/hardware/mISDN/hfcsusb.c
1067
(usb_complete_t)rx_iso_complete, urb->context);
drivers/isdn/hardware/mISDN/hfcsusb.c
1089
struct usb_fifo *fifo = (struct usb_fifo *) urb->context;
drivers/isdn/hardware/mISDN/hfcsusb.c
1160
struct iso_urb *context_iso_urb = (struct iso_urb *) urb->context;
drivers/isdn/hardware/mISDN/hfcsusb.c
1229
(usb_complete_t)tx_iso_complete, urb->context);
drivers/isdn/hardware/mISDN/hfcsusb.c
943
usb_complete_t complete, void *context)
drivers/isdn/hardware/mISDN/hfcsusb.c
948
complete, context);
drivers/isdn/hardware/mISDN/hfcsusb.c
966
struct iso_urb *context_iso_urb = (struct iso_urb *) urb->context;
drivers/leds/leds-lp55xx-common.c
534
static void lp55xx_firmware_loaded(const struct firmware *fw, void *context)
drivers/leds/leds-lp55xx-common.c
536
struct lp55xx_chip *chip = context;
drivers/md/dm-bio-prison-v1.c
271
void *context,
drivers/md/dm-bio-prison-v1.c
276
visit_fn(context, cell);
drivers/md/dm-bio-prison-v1.h
107
void *context, struct dm_bio_prison_cell *cell);
drivers/md/dm-bufio.c
1290
static void dmio_complete(unsigned long error, void *context)
drivers/md/dm-bufio.c
1292
struct dm_buffer *b = context;
drivers/md/dm-bufio.c
1305
.notify.context = b,
drivers/md/dm-bufio.c
1505
static enum evict_result is_clean(struct dm_buffer *b, void *context)
drivers/md/dm-bufio.c
1507
struct dm_bufio_client *c = context;
drivers/md/dm-bufio.c
1524
static enum evict_result is_dirty(struct dm_buffer *b, void *context)
drivers/md/dm-bufio.c
156
typedef bool (*iter_predicate)(struct lru_entry *le, void *context);
drivers/md/dm-bufio.c
164
iter_predicate pred, void *context)
drivers/md/dm-bufio.c
1691
static enum evict_result cleaned(struct dm_buffer *b, void *context)
drivers/md/dm-bufio.c
1712
static enum it_action write_one(struct dm_buffer *b, void *context)
drivers/md/dm-bufio.c
1714
struct write_context *wc = context;
drivers/md/dm-bufio.c
177
if (pred(e, context))
drivers/md/dm-bufio.c
2140
static bool is_writing(struct lru_entry *e, void *context)
drivers/md/dm-bufio.c
2283
static enum evict_result idle(struct dm_buffer *b, void *context)
drivers/md/dm-bufio.c
2354
static enum it_action warn_leak(struct dm_buffer *b, void *context)
drivers/md/dm-bufio.c
2356
bool *warned = context;
drivers/md/dm-bufio.c
269
switch (pred(le, context)) {
drivers/md/dm-bufio.c
2733
static enum evict_result select_for_evict(struct dm_buffer *b, void *context)
drivers/md/dm-bufio.c
656
static enum evict_result __evict_pred(struct lru_entry *le, void *context)
drivers/md/dm-bufio.c
673
struct evict_wrapper w = {.lh = lh, .pred = pred, .context = context};
drivers/md/dm-bufio.c
689
b_predicate pred, void *context)
drivers/md/dm-bufio.c
730
struct evict_wrapper w = {.lh = lh, .pred = pred, .context = context};
drivers/md/dm-bufio.c
744
b_predicate pred, void *context)
drivers/md/dm-bufio.c
786
switch (fn(b, context)) {
drivers/md/dm-bufio.c
800
iter_fn fn, void *context)
drivers/md/dm-cache-metadata.c
1162
load_discard_fn fn, void *context)
drivers/md/dm-cache-metadata.c
1183
r = fn(context, cmd->discard_block_size, to_dblock(b),
drivers/md/dm-cache-metadata.c
1200
r = fn(context, cmd->discard_block_size, to_dblock(b), false);
drivers/md/dm-cache-metadata.c
1210
load_discard_fn fn, void *context)
drivers/md/dm-cache-metadata.c
1215
r = __load_discards(cmd, fn, context);
drivers/md/dm-cache-metadata.c
1320
load_mapping_fn fn, void *context)
drivers/md/dm-cache-metadata.c
1346
r = fn(context, oblock, to_cblock(cb), dirty,
drivers/md/dm-cache-metadata.c
1362
load_mapping_fn fn, void *context)
drivers/md/dm-cache-metadata.c
1388
r = fn(context, oblock, to_cblock(cb), dirty,
drivers/md/dm-cache-metadata.c
1401
load_mapping_fn fn, void *context)
drivers/md/dm-cache-metadata.c
1441
fn, context);
drivers/md/dm-cache-metadata.c
1445
fn, context);
drivers/md/dm-cache-metadata.c
1490
load_mapping_fn fn, void *context)
drivers/md/dm-cache-metadata.c
1495
r = __load_mappings(cmd, policy, fn, context);
drivers/md/dm-cache-metadata.c
1556
static int is_dirty_callback(uint32_t index, bool *value, void *context)
drivers/md/dm-cache-metadata.c
1558
unsigned long *bits = context;
drivers/md/dm-cache-metadata.c
1661
static int get_hint(uint32_t index, void *value_le, void *context)
drivers/md/dm-cache-metadata.c
1664
struct dm_cache_policy *policy = context;
drivers/md/dm-cache-metadata.c
295
vt.context = NULL;
drivers/md/dm-cache-metadata.h
79
typedef int (*load_discard_fn)(void *context, sector_t discard_block_size,
drivers/md/dm-cache-metadata.h
82
load_discard_fn fn, void *context);
drivers/md/dm-cache-metadata.h
90
typedef int (*load_mapping_fn)(void *context, dm_oblock_t oblock,
drivers/md/dm-cache-metadata.h
96
void *context);
drivers/md/dm-cache-target.c
1090
static void copy_complete(int read_err, unsigned long write_err, void *context)
drivers/md/dm-cache-target.c
1092
struct dm_cache_migration *mg = container_of(context, struct dm_cache_migration, k);
drivers/md/dm-cache-target.c
1764
static blk_status_t commit_op(void *context)
drivers/md/dm-cache-target.c
1766
struct cache *cache = context;
drivers/md/dm-cache-target.c
2807
static int load_mapping(void *context, dm_oblock_t oblock, dm_cblock_t cblock,
drivers/md/dm-cache-target.c
2810
struct cache *cache = context;
drivers/md/dm-cache-target.c
2821
static int load_filtered_mapping(void *context, dm_oblock_t oblock, dm_cblock_t cblock,
drivers/md/dm-cache-target.c
2824
struct cache *cache = context;
drivers/md/dm-cache-target.c
2836
return load_mapping(context, oblock, cblock, dirty, hint, hint_valid);
drivers/md/dm-cache-target.c
2893
static int load_discard(void *context, sector_t discard_block_size,
drivers/md/dm-cache-target.c
2896
struct discard_load_info *li = context;
drivers/md/dm-cache-target.c
77
blk_status_t (*commit_op)(void *context);
drivers/md/dm-cache-target.c
822
static void issue_op(struct bio *bio, void *context)
drivers/md/dm-cache-target.c
824
struct cache *cache = context;
drivers/md/dm-cache-target.c
84
void (*issue_op)(struct bio *bio, void *context);
drivers/md/dm-clone-target.c
761
static void hydration_kcopyd_callback(int read_err, unsigned long write_err, void *context)
drivers/md/dm-clone-target.c
765
struct dm_clone_region_hydration *tmp, *hd = context;
drivers/md/dm-delay.c
324
delayed->context = dc;
drivers/md/dm-delay.c
51
struct delay_c *context;
drivers/md/dm-era-target.c
376
static void ws_inc(void *context, const void *value, unsigned int count)
drivers/md/dm-era-target.c
378
struct era_metadata *md = context;
drivers/md/dm-era-target.c
390
static void ws_dec(void *context, const void *value, unsigned int count)
drivers/md/dm-era-target.c
392
struct era_metadata *md = context;
drivers/md/dm-era-target.c
404
static int ws_eq(void *context, const void *value1, const void *value2)
drivers/md/dm-era-target.c
417
vt->context = md;
drivers/md/dm-era-target.c
428
vt.context = NULL;
drivers/md/dm-exception-store.h
126
void *context;
drivers/md/dm-integrity.c
1038
static void complete_journal_io(unsigned long error, void *context)
drivers/md/dm-integrity.c
1040
struct journal_completion *comp = context;
drivers/md/dm-integrity.c
1074
io_req.notify.context = comp;
drivers/md/dm-integrity.c
1194
io_req.notify.context = data;
drivers/md/dm-integrity.c
1520
fr.io_req.notify.context = &fr;
drivers/md/dm-integrity.c
2897
static void complete_copy_from_journal(unsigned long error, void *context)
drivers/md/dm-integrity.c
2899
struct journal_io *io = context;
drivers/md/dm-integrity.c
881
static void complete_journal_op(void *context)
drivers/md/dm-integrity.c
883
struct journal_completion *comp = context;
drivers/md/dm-io.c
123
void *context = io->context;
drivers/md/dm-io.c
130
fn(error_bits, context);
drivers/md/dm-io.c
39
void *context;
drivers/md/dm-io.c
414
struct dpages *dp, io_notify_fn fn, void *context,
drivers/md/dm-io.c
424
io->context = context;
drivers/md/dm-io.c
437
static void sync_io_complete(unsigned long error, void *context)
drivers/md/dm-io.c
439
struct sync_io *sio = context;
drivers/md/dm-io.c
522
io_req->notify.fn, io_req->notify.context, ioprio);
drivers/md/dm-kcopyd.c
377
void *context;
drivers/md/dm-kcopyd.c
494
void *context = job->context;
drivers/md/dm-kcopyd.c
510
fn(read_err, write_err, context);
drivers/md/dm-kcopyd.c
520
static void complete_io(unsigned long error, void *context)
drivers/md/dm-kcopyd.c
522
struct kcopyd_job *job = context;
drivers/md/dm-kcopyd.c
564
.notify.context = job,
drivers/md/dm-kcopyd.c
694
void *context)
drivers/md/dm-kcopyd.c
699
struct kcopyd_job *sub_job = context;
drivers/md/dm-kcopyd.c
743
sub_job->context = sub_job;
drivers/md/dm-kcopyd.c
780
unsigned int flags, dm_kcopyd_notify_fn fn, void *context)
drivers/md/dm-kcopyd.c
845
job->context = context;
drivers/md/dm-kcopyd.c
860
unsigned int flags, dm_kcopyd_notify_fn fn, void *context)
drivers/md/dm-kcopyd.c
862
dm_kcopyd_copy(kc, NULL, num_dests, dests, flags, fn, context);
drivers/md/dm-kcopyd.c
867
dm_kcopyd_notify_fn fn, void *context)
drivers/md/dm-kcopyd.c
876
job->context = context;
drivers/md/dm-log-userspace-base.c
322
log->context = lc;
drivers/md/dm-log-userspace-base.c
330
struct log_c *lc = log->context;
drivers/md/dm-log-userspace-base.c
355
struct log_c *lc = log->context;
drivers/md/dm-log-userspace-base.c
366
struct log_c *lc = log->context;
drivers/md/dm-log-userspace-base.c
383
struct log_c *lc = log->context;
drivers/md/dm-log-userspace-base.c
394
struct log_c *lc = log->context;
drivers/md/dm-log-userspace-base.c
413
struct log_c *lc = log->context;
drivers/md/dm-log-userspace-base.c
441
struct log_c *lc = log->context;
drivers/md/dm-log-userspace-base.c
566
struct log_c *lc = log->context;
drivers/md/dm-log-userspace-base.c
651
struct log_c *lc = log->context;
drivers/md/dm-log-userspace-base.c
678
struct log_c *lc = log->context;
drivers/md/dm-log-userspace-base.c
712
struct log_c *lc = log->context;
drivers/md/dm-log-userspace-base.c
738
struct log_c *lc = log->context;
drivers/md/dm-log-userspace-base.c
769
struct log_c *lc = log->context;
drivers/md/dm-log-userspace-base.c
795
struct log_c *lc = log->context;
drivers/md/dm-log-userspace-base.c
835
struct log_c *lc = log->context;
drivers/md/dm-log.c
513
log->context = lc;
drivers/md/dm-log.c
533
struct log_c *lc = log->context;
drivers/md/dm-log.c
572
struct log_c *lc = log->context;
drivers/md/dm-log.c
593
struct log_c *lc = log->context;
drivers/md/dm-log.c
655
struct log_c *lc = log->context;
drivers/md/dm-log.c
662
struct log_c *lc = log->context;
drivers/md/dm-log.c
670
struct log_c *lc = log->context;
drivers/md/dm-log.c
677
struct log_c *lc = log->context;
drivers/md/dm-log.c
691
struct log_c *lc = log->context;
drivers/md/dm-log.c
730
struct log_c *lc = log->context;
drivers/md/dm-log.c
737
struct log_c *lc = log->context;
drivers/md/dm-log.c
745
struct log_c *lc = log->context;
drivers/md/dm-log.c
768
struct log_c *lc = log->context;
drivers/md/dm-log.c
782
struct log_c *lc = log->context;
drivers/md/dm-log.c
797
struct log_c *lc = log->context;
drivers/md/dm-log.c
822
struct log_c *lc = log->context;
drivers/md/dm-path-selector.h
27
void *context;
drivers/md/dm-ps-historical-service-time.c
149
struct selector *s = ps->context;
drivers/md/dm-ps-historical-service-time.c
164
struct selector *s = ps->context;
drivers/md/dm-ps-historical-service-time.c
210
ps->context = s;
drivers/md/dm-ps-historical-service-time.c
229
struct selector *s = ps->context;
drivers/md/dm-ps-historical-service-time.c
234
ps->context = NULL;
drivers/md/dm-ps-historical-service-time.c
244
struct selector *s = ps->context;
drivers/md/dm-ps-historical-service-time.c
270
struct selector *s = ps->context;
drivers/md/dm-ps-historical-service-time.c
321
struct selector *s = ps->context;
drivers/md/dm-ps-historical-service-time.c
333
struct selector *s = ps->context;
drivers/md/dm-ps-historical-service-time.c
370
struct selector *s = ps->context;
drivers/md/dm-ps-historical-service-time.c
433
struct selector *s = ps->context;
drivers/md/dm-ps-historical-service-time.c
496
struct selector *s = ps->context;
drivers/md/dm-ps-io-affinity.c
127
ps->context = s;
drivers/md/dm-ps-io-affinity.c
139
struct selector *s = ps->context;
drivers/md/dm-ps-io-affinity.c
149
ps->context = NULL;
drivers/md/dm-ps-io-affinity.c
155
struct selector *s = ps->context;
drivers/md/dm-ps-io-affinity.c
199
struct selector *s = ps->context;
drivers/md/dm-ps-io-affinity.c
46
struct selector *s = ps->context;
drivers/md/dm-ps-queue-length.c
118
struct selector *s = ps->context;
drivers/md/dm-ps-queue-length.c
166
struct selector *s = ps->context;
drivers/md/dm-ps-queue-length.c
177
struct selector *s = ps->context;
drivers/md/dm-ps-queue-length.c
193
struct selector *s = ps->context;
drivers/md/dm-ps-queue-length.c
63
ps->context = s;
drivers/md/dm-ps-queue-length.c
79
struct selector *s = ps->context;
drivers/md/dm-ps-queue-length.c
84
ps->context = NULL;
drivers/md/dm-ps-round-robin.c
124
struct selector *s = ps->context;
drivers/md/dm-ps-round-robin.c
168
struct selector *s = ps->context;
drivers/md/dm-ps-round-robin.c
179
struct selector *s = ps->context;
drivers/md/dm-ps-round-robin.c
192
struct selector *s = ps->context;
drivers/md/dm-ps-round-robin.c
77
ps->context = s;
drivers/md/dm-ps-round-robin.c
83
struct selector *s = ps->context;
drivers/md/dm-ps-round-robin.c
88
ps->context = NULL;
drivers/md/dm-ps-service-time.c
115
struct selector *s = ps->context;
drivers/md/dm-ps-service-time.c
178
struct selector *s = ps->context;
drivers/md/dm-ps-service-time.c
189
struct selector *s = ps->context;
drivers/md/dm-ps-service-time.c
279
struct selector *s = ps->context;
drivers/md/dm-ps-service-time.c
59
ps->context = s;
drivers/md/dm-ps-service-time.c
75
struct selector *s = ps->context;
drivers/md/dm-ps-service-time.c
80
ps->context = NULL;
drivers/md/dm-raid1.c
121
static void wakeup_all_recovery_waiters(void *context)
drivers/md/dm-raid1.c
141
static void dispatch_bios(void *context, struct bio_list *bio_list)
drivers/md/dm-raid1.c
143
struct mirror_set *ms = context;
drivers/md/dm-raid1.c
302
void *context)
drivers/md/dm-raid1.c
304
struct dm_region *reg = context;
drivers/md/dm-raid1.c
514
static void read_callback(unsigned long error, void *context)
drivers/md/dm-raid1.c
516
struct bio *bio = context;
drivers/md/dm-raid1.c
550
.notify.context = bio,
drivers/md/dm-raid1.c
603
static void write_callback(unsigned long error, void *context)
drivers/md/dm-raid1.c
606
struct bio *bio = context;
drivers/md/dm-raid1.c
664
.notify.context = bio,
drivers/md/dm-raid1.c
97
static void wakeup_mirrord(void *context)
drivers/md/dm-raid1.c
99
struct mirror_set *ms = context;
drivers/md/dm-region-hash.c
102
void (*wakeup_all_recovery_waiters)(void *context);
drivers/md/dm-region-hash.c
140
return reg->rh->context;
drivers/md/dm-region-hash.c
165
void *context, void (*dispatch_bios)(void *context,
drivers/md/dm-region-hash.c
167
void (*wakeup_workers)(void *context),
drivers/md/dm-region-hash.c
168
void (*wakeup_all_recovery_waiters)(void *context),
drivers/md/dm-region-hash.c
193
rh->context = context;
drivers/md/dm-region-hash.c
382
rh->dispatch_bios(rh->context, ®->delayed_bios);
drivers/md/dm-region-hash.c
384
rh->wakeup_all_recovery_waiters(rh->context);
drivers/md/dm-region-hash.c
583
rh->wakeup_workers(rh->context);
drivers/md/dm-region-hash.c
641
rh->wakeup_all_recovery_waiters(rh->context);
drivers/md/dm-region-hash.c
676
rh->wakeup_workers(rh->context);
drivers/md/dm-region-hash.c
721
rh->wakeup_workers(rh->context);
drivers/md/dm-region-hash.c
92
void *context;
drivers/md/dm-region-hash.c
96
void (*dispatch_bios)(void *context, struct bio_list *bios);
drivers/md/dm-region-hash.c
99
void (*wakeup_workers)(void *context);
drivers/md/dm-snap-persistent.c
570
return store->context;
drivers/md/dm-snap-persistent.c
721
cb->context = callback_context;
drivers/md/dm-snap-persistent.c
756
cb->callback(cb->context, ps->valid);
drivers/md/dm-snap-persistent.c
893
store->context = ps;
drivers/md/dm-snap-persistent.c
99
void *context;
drivers/md/dm-snap-transient.c
31
kfree(store->context);
drivers/md/dm-snap-transient.c
45
struct transient_c *tc = store->context;
drivers/md/dm-snap-transient.c
71
*sectors_allocated = ((struct transient_c *) store->context)->next_free;
drivers/md/dm-snap-transient.c
85
store->context = tc;
drivers/md/dm-snap.c
1011
void *context);
drivers/md/dm-snap.c
1119
static void merge_callback(int read_err, unsigned long write_err, void *context)
drivers/md/dm-snap.c
1121
struct dm_snapshot *s = context;
drivers/md/dm-snap.c
1634
static void pending_complete(void *context, int success)
drivers/md/dm-snap.c
1636
struct dm_snap_pending_exception *pe = context;
drivers/md/dm-snap.c
1735
static void copy_callback(int read_err, unsigned long write_err, void *context)
drivers/md/dm-snap.c
1737
struct dm_snap_pending_exception *pe = context;
drivers/md/dm-snap.c
1908
static void zero_callback(int read_err, unsigned long write_err, void *context)
drivers/md/dm-snap.c
1910
struct bio *bio = context;
drivers/md/dm-snap.c
823
static int dm_add_exception(void *context, chunk_t old, chunk_t new)
drivers/md/dm-snap.c
825
struct dm_snapshot *s = context;
drivers/md/dm-table.c
1536
void (*fn)(void *), void *context)
drivers/md/dm-table.c
1540
t->event_context = context;
drivers/md/dm-thin-metadata.c
2092
void *context)
drivers/md/dm-thin-metadata.c
2099
threshold, fn, context);
drivers/md/dm-thin-metadata.c
2108
void *context)
drivers/md/dm-thin-metadata.c
2112
pmd->pre_commit_context = context;
drivers/md/dm-thin-metadata.c
356
static void data_block_inc(void *context, const void *value_le, unsigned int count)
drivers/md/dm-thin-metadata.c
358
with_runs((struct dm_space_map *) context,
drivers/md/dm-thin-metadata.c
362
static void data_block_dec(void *context, const void *value_le, unsigned int count)
drivers/md/dm-thin-metadata.c
364
with_runs((struct dm_space_map *) context,
drivers/md/dm-thin-metadata.c
368
static int data_block_equal(void *context, const void *value1_le, const void *value2_le)
drivers/md/dm-thin-metadata.c
382
static void subtree_inc(void *context, const void *value, unsigned int count)
drivers/md/dm-thin-metadata.c
384
struct dm_btree_info *info = context;
drivers/md/dm-thin-metadata.c
392
static void subtree_dec(void *context, const void *value, unsigned int count)
drivers/md/dm-thin-metadata.c
394
struct dm_btree_info *info = context;
drivers/md/dm-thin-metadata.c
403
static int subtree_equal(void *context, const void *value1_le, const void *value2_le)
drivers/md/dm-thin-metadata.c
487
pmd->info.value_type.context = pmd->data_sm;
drivers/md/dm-thin-metadata.c
498
pmd->tl_info.value_type.context = &pmd->bl_info;
drivers/md/dm-thin-metadata.c
506
pmd->bl_info.value_type.context = pmd->data_sm;
drivers/md/dm-thin-metadata.c
514
pmd->details_info.value_type.context = NULL;
drivers/md/dm-thin-metadata.h
220
void *context);
drivers/md/dm-thin-metadata.h
234
typedef int (*dm_pool_pre_commit_fn)(void *context);
drivers/md/dm-thin-metadata.h
238
void *context);
drivers/md/dm-thin.c
1827
static void __remap_and_issue_shared_cell(void *context,
drivers/md/dm-thin.c
1830
struct remap_info *info = context;
drivers/md/dm-thin.c
3191
static void metadata_low_callback(void *context)
drivers/md/dm-thin.c
3193
struct pool *pool = context;
drivers/md/dm-thin.c
3212
static int metadata_pre_commit_callback(void *context)
drivers/md/dm-thin.c
3214
struct pool *pool = context;
drivers/md/dm-thin.c
477
void *context,
drivers/md/dm-thin.c
480
dm_cell_visit_release(pool->prison, fn, context, cell);
drivers/md/dm-thin.c
847
static void copy_complete(int read_err, unsigned long write_err, void *context)
drivers/md/dm-thin.c
849
struct dm_thin_new_mapping *m = context;
drivers/md/dm-thin.c
905
static void __inc_remap_and_issue_cell(void *context,
drivers/md/dm-thin.c
908
struct remap_info *info = context;
drivers/md/dm-vdo/action-manager.c
105
thread_id_t initiator_thread_id, void *context,
drivers/md/dm-vdo/action-manager.c
121
.context = context,
drivers/md/dm-vdo/action-manager.c
140
return manager->current_action->in_use ? manager->current_action->context : NULL;
drivers/md/dm-vdo/action-manager.c
148
return manager->get_zone_thread_id(manager->context, manager->acting_zone);
drivers/md/dm-vdo/action-manager.c
195
manager->current_action->zone_action(manager->context, zone, completion);
drivers/md/dm-vdo/action-manager.c
230
action->preamble(manager->context, &manager->completion);
drivers/md/dm-vdo/action-manager.c
247
manager->scheduler(manager->context));
drivers/md/dm-vdo/action-manager.c
266
result = action.conclusion(manager->context);
drivers/md/dm-vdo/action-manager.c
35
void *context;
drivers/md/dm-vdo/action-manager.c
356
void *context, struct vdo_completion *parent)
drivers/md/dm-vdo/action-manager.c
379
.context = context,
drivers/md/dm-vdo/action-manager.c
62
void *context;
drivers/md/dm-vdo/action-manager.c
73
static bool no_default_action(void *context __always_unused)
drivers/md/dm-vdo/action-manager.c
79
static void no_preamble(void *context __always_unused, struct vdo_completion *completion)
drivers/md/dm-vdo/action-manager.c
85
static int no_conclusion(void *context __always_unused)
drivers/md/dm-vdo/action-manager.h
108
void *context, struct vdo_completion *parent);
drivers/md/dm-vdo/action-manager.h
42
typedef void (*vdo_zone_action_fn)(void *context, zone_count_t zone_number,
drivers/md/dm-vdo/action-manager.h
51
typedef void (*vdo_action_preamble_fn)(void *context, struct vdo_completion *parent);
drivers/md/dm-vdo/action-manager.h
60
typedef int (*vdo_action_conclusion_fn)(void *context);
drivers/md/dm-vdo/action-manager.h
68
typedef bool (*vdo_action_scheduler_fn)(void *context);
drivers/md/dm-vdo/action-manager.h
75
typedef thread_id_t (*vdo_zone_thread_getter_fn)(void *context, zone_count_t zone_number);
drivers/md/dm-vdo/action-manager.h
81
thread_id_t initiator_thread_id, void *context,
drivers/md/dm-vdo/block-map.c
1517
static void write_page_callback(struct vdo_waiter *waiter, void *context)
drivers/md/dm-vdo/block-map.c
1519
write_page(container_of(waiter, struct tree_page, waiter), context);
drivers/md/dm-vdo/block-map.c
1552
static void write_page_if_not_dirtied(struct vdo_waiter *waiter, void *context)
drivers/md/dm-vdo/block-map.c
1555
struct write_if_not_dirtied_context *write_context = context;
drivers/md/dm-vdo/block-map.c
1578
struct block_map_zone *zone = pooled->context;
drivers/md/dm-vdo/block-map.c
1590
struct write_if_not_dirtied_context context = {
drivers/md/dm-vdo/block-map.c
1596
write_page_if_not_dirtied, &context);
drivers/md/dm-vdo/block-map.c
1623
struct block_map_zone *zone = pooled->context;
drivers/md/dm-vdo/block-map.c
1636
struct block_map_zone *zone = pooled->context;
drivers/md/dm-vdo/block-map.c
1658
struct block_map_zone *zone = vio->context;
drivers/md/dm-vdo/block-map.c
1670
struct block_map_zone *zone = vio->context;
drivers/md/dm-vdo/block-map.c
1742
static void abort_lookup_for_waiter(struct vdo_waiter *waiter, void *context)
drivers/md/dm-vdo/block-map.c
1745
int result = *((int *) context);
drivers/md/dm-vdo/block-map.c
1832
static void continue_load_for_waiter(struct vdo_waiter *waiter, void *context)
drivers/md/dm-vdo/block-map.c
1837
continue_with_loaded_page(data_vio, context);
drivers/md/dm-vdo/block-map.c
1849
struct block_map_zone *zone = pooled->context;
drivers/md/dm-vdo/block-map.c
1889
static void load_page(struct vdo_waiter *waiter, void *context)
drivers/md/dm-vdo/block-map.c
1891
struct pooled_vio *pooled = context;
drivers/md/dm-vdo/block-map.c
1966
static void continue_allocation_for_waiter(struct vdo_waiter *waiter, void *context)
drivers/md/dm-vdo/block-map.c
1970
physical_block_number_t pbn = *((physical_block_number_t *) context);
drivers/md/dm-vdo/block-map.c
2652
static void launch_cursor(struct vdo_waiter *waiter, void *context)
drivers/md/dm-vdo/block-map.c
2655
struct pooled_vio *pooled = context;
drivers/md/dm-vdo/block-map.c
2805
static thread_id_t get_block_map_zone_thread_id(void *context, zone_count_t zone_number)
drivers/md/dm-vdo/block-map.c
2807
struct block_map *map = context;
drivers/md/dm-vdo/block-map.c
2813
static void prepare_for_era_advance(void *context, struct vdo_completion *parent)
drivers/md/dm-vdo/block-map.c
2815
struct block_map *map = context;
drivers/md/dm-vdo/block-map.c
2822
static void advance_block_map_zone_era(void *context, zone_count_t zone_number,
drivers/md/dm-vdo/block-map.c
2825
struct block_map *map = context;
drivers/md/dm-vdo/block-map.c
2839
static bool schedule_era_advance(void *context)
drivers/md/dm-vdo/block-map.c
2841
struct block_map *map = context;
drivers/md/dm-vdo/block-map.c
3016
static void drain_zone(void *context, zone_count_t zone_number,
drivers/md/dm-vdo/block-map.c
3019
struct block_map *map = context;
drivers/md/dm-vdo/block-map.c
3035
static void resume_block_map_zone(void *context, zone_count_t zone_number,
drivers/md/dm-vdo/block-map.c
3038
struct block_map *map = context;
drivers/md/dm-vdo/block-map.c
3069
static void grow_forest(void *context, struct vdo_completion *completion)
drivers/md/dm-vdo/block-map.c
3071
replace_forest(context);
drivers/md/dm-vdo/block-map.c
570
static void set_persistent_error(struct vdo_page_cache *cache, const char *context,
drivers/md/dm-vdo/block-map.c
579
context);
drivers/md/dm-vdo/block-map.c
893
static bool completion_needs_page(struct vdo_waiter *waiter, void *context)
drivers/md/dm-vdo/block-map.c
895
physical_block_number_t *pbn = context;
drivers/md/dm-vdo/dedupe.c
1599
static bool decode_uds_advice(struct dedupe_context *context)
drivers/md/dm-vdo/dedupe.c
1601
const struct uds_request *request = &context->request;
drivers/md/dm-vdo/dedupe.c
1602
struct data_vio *data_vio = context->requestor;
drivers/md/dm-vdo/dedupe.c
1647
struct dedupe_context *context = agent->dedupe_context;
drivers/md/dm-vdo/dedupe.c
1649
if (context == NULL)
drivers/md/dm-vdo/dedupe.c
1652
if (change_context_state(context, DEDUPE_CONTEXT_COMPLETE, DEDUPE_CONTEXT_IDLE)) {
drivers/md/dm-vdo/dedupe.c
1653
agent->is_duplicate = decode_uds_advice(context);
drivers/md/dm-vdo/dedupe.c
1655
release_context(context);
drivers/md/dm-vdo/dedupe.c
2134
static void start_expiration_timer(struct dedupe_context *context)
drivers/md/dm-vdo/dedupe.c
2136
u64 start_time = context->submission_jiffies;
drivers/md/dm-vdo/dedupe.c
2139
if (!change_timer_state(context->zone, DEDUPE_QUERY_TIMER_IDLE,
drivers/md/dm-vdo/dedupe.c
2145
mod_timer(&context->zone->timer, end_time);
drivers/md/dm-vdo/dedupe.c
2226
struct dedupe_context *context = container_of(request, struct dedupe_context,
drivers/md/dm-vdo/dedupe.c
2229
if (change_context_state(context, DEDUPE_CONTEXT_PENDING,
drivers/md/dm-vdo/dedupe.c
2235
continue_data_vio(context->requestor);
drivers/md/dm-vdo/dedupe.c
2243
if (!change_context_state(context, DEDUPE_CONTEXT_TIMED_OUT,
drivers/md/dm-vdo/dedupe.c
2246
atomic_read(&context->state));
drivers/md/dm-vdo/dedupe.c
2249
vdo_funnel_queue_put(context->zone->timed_out_complete, &context->queue_entry);
drivers/md/dm-vdo/dedupe.c
2275
struct dedupe_context *context;
drivers/md/dm-vdo/dedupe.c
2282
context = container_of(entry, struct dedupe_context, queue_entry);
drivers/md/dm-vdo/dedupe.c
2283
atomic_set(&context->state, DEDUPE_CONTEXT_IDLE);
drivers/md/dm-vdo/dedupe.c
2284
list_add(&context->list_entry, &zone->available);
drivers/md/dm-vdo/dedupe.c
2296
struct dedupe_context *context, *tmp;
drivers/md/dm-vdo/dedupe.c
2303
list_for_each_entry_safe(context, tmp, &zone->pending, list_entry) {
drivers/md/dm-vdo/dedupe.c
2304
if (cutoff <= context->submission_jiffies) {
drivers/md/dm-vdo/dedupe.c
2309
start_expiration_timer(context);
drivers/md/dm-vdo/dedupe.c
2313
if (!change_context_state(context, DEDUPE_CONTEXT_PENDING,
drivers/md/dm-vdo/dedupe.c
2328
list_del_init(&context->list_entry);
drivers/md/dm-vdo/dedupe.c
2329
context->requestor->dedupe_context = NULL;
drivers/md/dm-vdo/dedupe.c
2330
continue_data_vio(context->requestor);
drivers/md/dm-vdo/dedupe.c
2384
struct dedupe_context *context = &zone->contexts[i];
drivers/md/dm-vdo/dedupe.c
2386
context->zone = zone;
drivers/md/dm-vdo/dedupe.c
2387
context->request.callback = finish_index_operation;
drivers/md/dm-vdo/dedupe.c
2388
context->request.session = zones->index_session;
drivers/md/dm-vdo/dedupe.c
2389
list_add(&context->list_entry, &zone->available);
drivers/md/dm-vdo/dedupe.c
2396
static thread_id_t get_thread_id_for_zone(void *context, zone_count_t zone_number)
drivers/md/dm-vdo/dedupe.c
2398
struct hash_zones *zones = context;
drivers/md/dm-vdo/dedupe.c
2519
static void suspend_index(void *context, struct vdo_completion *completion)
drivers/md/dm-vdo/dedupe.c
2521
struct hash_zones *zones = context;
drivers/md/dm-vdo/dedupe.c
2535
static void drain_hash_zone(void *context, zone_count_t zone_number,
drivers/md/dm-vdo/dedupe.c
2538
struct hash_zones *zones = context;
drivers/md/dm-vdo/dedupe.c
2576
static void resume_index(void *context, struct vdo_completion *parent)
drivers/md/dm-vdo/dedupe.c
2578
struct hash_zones *zones = context;
drivers/md/dm-vdo/dedupe.c
2604
static void resume_hash_zone(void *context, zone_count_t zone_number,
drivers/md/dm-vdo/dedupe.c
2607
struct hash_zone *zone = &(((struct hash_zones *) context)->zones[zone_number]);
drivers/md/dm-vdo/dedupe.c
2859
struct dedupe_context *context;
drivers/md/dm-vdo/dedupe.c
2866
context = list_first_entry(&zone->available, struct dedupe_context,
drivers/md/dm-vdo/dedupe.c
2868
list_del_init(&context->list_entry);
drivers/md/dm-vdo/dedupe.c
2869
return context;
drivers/md/dm-vdo/dedupe.c
2903
struct dedupe_context *context;
drivers/md/dm-vdo/dedupe.c
2914
context = acquire_context(zone);
drivers/md/dm-vdo/dedupe.c
2915
if (context == NULL) {
drivers/md/dm-vdo/dedupe.c
2921
data_vio->dedupe_context = context;
drivers/md/dm-vdo/dedupe.c
2922
context->requestor = data_vio;
drivers/md/dm-vdo/dedupe.c
2923
context->submission_jiffies = jiffies;
drivers/md/dm-vdo/dedupe.c
2924
prepare_uds_request(&context->request, data_vio, operation);
drivers/md/dm-vdo/dedupe.c
2925
atomic_set(&context->state, DEDUPE_CONTEXT_PENDING);
drivers/md/dm-vdo/dedupe.c
2926
list_add_tail(&context->list_entry, &zone->pending);
drivers/md/dm-vdo/dedupe.c
2927
start_expiration_timer(context);
drivers/md/dm-vdo/dedupe.c
2928
result = uds_launch_request(&context->request);
drivers/md/dm-vdo/dedupe.c
2930
context->request.status = result;
drivers/md/dm-vdo/dedupe.c
2931
finish_index_operation(&context->request);
drivers/md/dm-vdo/dedupe.c
327
static inline bool change_context_state(struct dedupe_context *context, int old, int new)
drivers/md/dm-vdo/dedupe.c
329
return (atomic_cmpxchg(&context->state, old, new) == old);
drivers/md/dm-vdo/dedupe.c
568
static void abort_waiter(struct vdo_waiter *waiter, void __always_unused *context)
drivers/md/dm-vdo/dedupe.c
716
static void release_context(struct dedupe_context *context)
drivers/md/dm-vdo/dedupe.c
718
struct hash_zone *zone = context->zone;
drivers/md/dm-vdo/dedupe.c
721
list_move(&context->list_entry, &zone->available);
drivers/md/dm-vdo/dedupe.c
726
struct dedupe_context *context = agent->dedupe_context;
drivers/md/dm-vdo/dedupe.c
728
if ((context == NULL) ||
drivers/md/dm-vdo/dedupe.c
729
!change_context_state(context, DEDUPE_CONTEXT_COMPLETE, DEDUPE_CONTEXT_IDLE))
drivers/md/dm-vdo/dedupe.c
733
release_context(context);
drivers/md/dm-vdo/dedupe.c
926
static void enter_forked_lock(struct vdo_waiter *waiter, void *context)
drivers/md/dm-vdo/dedupe.c
929
struct hash_lock *new_lock = context;
drivers/md/dm-vdo/dm-vdo-target.c
1137
static bool vdo_uses_device(struct vdo *vdo, const void *context)
drivers/md/dm-vdo/dm-vdo-target.c
1139
const struct device_config *config = context;
drivers/md/dm-vdo/dm-vdo-target.c
1501
static bool __must_check vdo_is_named(struct vdo *vdo, const void *context)
drivers/md/dm-vdo/dm-vdo-target.c
1506
return strcmp(device_name, context) == 0;
drivers/md/dm-vdo/dm-vdo-target.c
2534
static void copy_callback(int read_err, unsigned long write_err, void *context)
drivers/md/dm-vdo/dm-vdo-target.c
2536
struct vdo_completion *completion = context;
drivers/md/dm-vdo/funnel-workqueue.c
399
void *context = ((thread_privates != NULL) ? thread_privates[0] : NULL);
drivers/md/dm-vdo/funnel-workqueue.c
401
result = make_simple_work_queue(thread_name_prefix, name, owner, context,
drivers/md/dm-vdo/funnel-workqueue.c
434
void *context = ((thread_privates != NULL) ? thread_privates[i] : NULL);
drivers/md/dm-vdo/funnel-workqueue.c
438
context, type, &queue->service_queues[i]);
drivers/md/dm-vdo/funnel-workqueue.h
18
void (*start)(void *context);
drivers/md/dm-vdo/funnel-workqueue.h
19
void (*finish)(void *context);
drivers/md/dm-vdo/logical-zone.c
169
static void drain_logical_zone(void *context, zone_count_t zone_number,
drivers/md/dm-vdo/logical-zone.c
172
struct logical_zones *zones = context;
drivers/md/dm-vdo/logical-zone.c
188
static void resume_logical_zone(void *context, zone_count_t zone_number,
drivers/md/dm-vdo/logical-zone.c
191
struct logical_zone *zone = &(((struct logical_zones *) context)->zones[zone_number]);
drivers/md/dm-vdo/logical-zone.c
39
static thread_id_t get_thread_id_for_zone(void *context, zone_count_t zone_number)
drivers/md/dm-vdo/logical-zone.c
41
struct logical_zones *zones = context;
drivers/md/dm-vdo/physical-zone.c
521
static void retry_allocation(struct vdo_waiter *waiter, void __always_unused *context)
drivers/md/dm-vdo/recovery-journal.c
1088
static void assign_entry(struct vdo_waiter *waiter, void *context)
drivers/md/dm-vdo/recovery-journal.c
1091
struct recovery_journal_block *block = context;
drivers/md/dm-vdo/recovery-journal.c
1177
static void continue_committed_waiter(struct vdo_waiter *waiter, void *context)
drivers/md/dm-vdo/recovery-journal.c
1180
struct recovery_journal *journal = context;
drivers/md/dm-vdo/recovery-journal.c
1376
static void write_block(struct vdo_waiter *waiter, void __always_unused *context)
drivers/md/dm-vdo/recovery-journal.c
269
static void continue_waiter(struct vdo_waiter *waiter, void *context)
drivers/md/dm-vdo/recovery-journal.c
271
continue_data_vio_with_error(vdo_waiter_as_data_vio(waiter), *((int *) context));
drivers/md/dm-vdo/repair.c
851
void vdo_replay_into_slab_journals(struct block_allocator *allocator, void *context)
drivers/md/dm-vdo/repair.c
854
struct repair_completion *repair = context;
drivers/md/dm-vdo/repair.h
11
void vdo_replay_into_slab_journals(struct block_allocator *allocator, void *context);
drivers/md/dm-vdo/slab-depot.c
1015
static void finish_summary_update(struct vdo_waiter *waiter, void *context)
drivers/md/dm-vdo/slab-depot.c
1018
int result = *((int *) context);
drivers/md/dm-vdo/slab-depot.c
1030
static void write_reference_block(struct vdo_waiter *waiter, void *context);
drivers/md/dm-vdo/slab-depot.c
1041
static void launch_reference_block_write(struct vdo_waiter *waiter, void *context)
drivers/md/dm-vdo/slab-depot.c
1043
struct vdo_slab *slab = context;
drivers/md/dm-vdo/slab-depot.c
1185
static void write_reference_block(struct vdo_waiter *waiter, void *context)
drivers/md/dm-vdo/slab-depot.c
1189
struct pooled_vio *pooled = context;
drivers/md/dm-vdo/slab-depot.c
1215
completion->callback_thread_id = ((struct block_allocator *) pooled->context)->thread_id;
drivers/md/dm-vdo/slab-depot.c
126
static void release_journal_locks(struct vdo_waiter *waiter, void *context);
drivers/md/dm-vdo/slab-depot.c
1691
static void add_entry_from_waiter(struct vdo_waiter *waiter, void *context)
drivers/md/dm-vdo/slab-depot.c
1697
struct slab_journal *journal = context;
drivers/md/dm-vdo/slab-depot.c
2344
static void load_reference_block_group(struct vdo_waiter *waiter, void *context)
drivers/md/dm-vdo/slab-depot.c
2346
struct pooled_vio *pooled = context;
drivers/md/dm-vdo/slab-depot.c
2551
static void read_slab_journal_tail(struct vdo_waiter *waiter, void *context)
drivers/md/dm-vdo/slab-depot.c
2556
struct pooled_vio *pooled = context;
drivers/md/dm-vdo/slab-depot.c
3150
static void abort_waiter(struct vdo_waiter *waiter, void __always_unused *context)
drivers/md/dm-vdo/slab-depot.c
3466
void *context =
drivers/md/dm-vdo/slab-depot.c
3469
vdo_replay_into_slab_journals(allocator, context);
drivers/md/dm-vdo/slab-depot.c
3478
static void copy_callback(int read_err, unsigned long write_err, void *context)
drivers/md/dm-vdo/slab-depot.c
3480
struct block_allocator *allocator = context;
drivers/md/dm-vdo/slab-depot.c
3909
static thread_id_t get_allocator_thread_id(void *context, zone_count_t zone_number)
drivers/md/dm-vdo/slab-depot.c
3911
return ((struct slab_depot *) context)->allocators[zone_number].thread_id;
drivers/md/dm-vdo/slab-depot.c
3947
static void release_tail_block_locks(void *context, zone_count_t zone_number,
drivers/md/dm-vdo/slab-depot.c
3951
struct slab_depot *depot = context;
drivers/md/dm-vdo/slab-depot.c
3970
static void prepare_for_tail_block_commit(void *context, struct vdo_completion *parent)
drivers/md/dm-vdo/slab-depot.c
3972
struct slab_depot *depot = context;
drivers/md/dm-vdo/slab-depot.c
3987
static bool schedule_tail_block_commit(void *context)
drivers/md/dm-vdo/slab-depot.c
3989
struct slab_depot *depot = context;
drivers/md/dm-vdo/slab-depot.c
450
static void flush_for_reaping(struct vdo_waiter *waiter, void *context)
drivers/md/dm-vdo/slab-depot.c
454
struct pooled_vio *pooled = context;
drivers/md/dm-vdo/slab-depot.c
4635
static void load_slab_summary(void *context, struct vdo_completion *parent)
drivers/md/dm-vdo/slab-depot.c
4639
struct slab_depot *depot = context;
drivers/md/dm-vdo/slab-depot.c
4663
static void load_allocator(void *context, zone_count_t zone_number,
drivers/md/dm-vdo/slab-depot.c
4666
struct slab_depot *depot = context;
drivers/md/dm-vdo/slab-depot.c
4685
struct vdo_completion *parent, void *context)
drivers/md/dm-vdo/slab-depot.c
4692
NULL, context, parent);
drivers/md/dm-vdo/slab-depot.c
4696
static void prepare_to_allocate(void *context, zone_count_t zone_number,
drivers/md/dm-vdo/slab-depot.c
4699
struct slab_depot *depot = context;
drivers/md/dm-vdo/slab-depot.c
4801
static int finish_registration(void *context)
drivers/md/dm-vdo/slab-depot.c
4803
struct slab_depot *depot = context;
drivers/md/dm-vdo/slab-depot.c
4814
static void register_new_slabs(void *context, zone_count_t zone_number,
drivers/md/dm-vdo/slab-depot.c
4817
struct slab_depot *depot = context;
drivers/md/dm-vdo/slab-depot.c
4919
static void drain_allocator(void *context, zone_count_t zone_number,
drivers/md/dm-vdo/slab-depot.c
4922
struct slab_depot *depot = context;
drivers/md/dm-vdo/slab-depot.c
5011
static void resume_allocator(void *context, zone_count_t zone_number,
drivers/md/dm-vdo/slab-depot.c
5014
struct slab_depot *depot = context;
drivers/md/dm-vdo/slab-depot.c
5057
static void scrub_all_unrecovered_slabs(void *context, zone_count_t zone_number,
drivers/md/dm-vdo/slab-depot.c
5060
struct slab_depot *depot = context;
drivers/md/dm-vdo/slab-depot.c
559
static void release_journal_locks(struct vdo_waiter *waiter, void *context)
drivers/md/dm-vdo/slab-depot.c
564
int result = *((int *) context);
drivers/md/dm-vdo/slab-depot.c
744
static void write_slab_journal_block(struct vdo_waiter *waiter, void *context)
drivers/md/dm-vdo/slab-depot.c
746
struct pooled_vio *pooled = context;
drivers/md/dm-vdo/slab-depot.h
583
struct vdo_completion *parent, void *context);
drivers/md/dm-vdo/vdo.c
102
const void *context)
drivers/md/dm-vdo/vdo.c
107
if (filter(vdo, context))
drivers/md/dm-vdo/vdo.c
119
struct vdo *vdo_find_matching(vdo_filter_fn filter, const void *context)
drivers/md/dm-vdo/vdo.c
124
vdo = filter_vdos_locked(filter, context);
drivers/md/dm-vdo/vdo.c
87
static bool vdo_is_equal(struct vdo *vdo, const void *context)
drivers/md/dm-vdo/vdo.c
89
return (vdo == context);
drivers/md/dm-vdo/vdo.h
287
typedef bool (*vdo_filter_fn)(struct vdo *vdo, const void *context);
drivers/md/dm-vdo/vdo.h
290
struct vdo * __must_check vdo_find_matching(vdo_filter_fn filter, const void *context);
drivers/md/dm-vdo/vio.c
322
enum vio_type vio_type, enum vio_priority priority, void *context,
drivers/md/dm-vdo/vio.c
357
pooled->context = context;
drivers/md/dm-vdo/vio.h
199
enum vio_priority priority, void *context,
drivers/md/dm-vdo/vio.h
30
void *context;
drivers/md/dm-vdo/wait-queue.c
193
vdo_waiter_callback_fn callback, void *context)
drivers/md/dm-vdo/wait-queue.c
202
callback(waiter, context);
drivers/md/dm-vdo/wait-queue.c
84
vdo_waiter_callback_fn callback, void *context)
drivers/md/dm-vdo/wait-queue.c
97
vdo_waitq_notify_next_waiter(&waiters, callback, context);
drivers/md/dm-vdo/wait-queue.h
112
vdo_waiter_callback_fn callback, void *context);
drivers/md/dm-vdo/wait-queue.h
115
vdo_waiter_callback_fn callback, void *context);
drivers/md/dm-vdo/wait-queue.h
51
typedef void (*vdo_waiter_callback_fn)(struct vdo_waiter *waiter, void *context);
drivers/md/dm-vdo/wait-queue.h
58
typedef bool (*vdo_waiter_match_fn)(struct vdo_waiter *waiter, void *context);
drivers/md/dm-writecache.c
477
static void writecache_notify_io(unsigned long error, void *context)
drivers/md/dm-writecache.c
479
struct io_notify *endio = context;
drivers/md/dm-writecache.c
530
req.notify.context = &endio;
drivers/md/dm-writecache.c
568
req.notify.context = NULL;
drivers/md/dm-zoned-reclaim.c
100
void *context)
drivers/md/dm-zoned-reclaim.c
102
struct dmz_reclaim *zrc = context;
drivers/md/dm.c
2425
static void event_callback(void *context)
drivers/md/dm.c
2429
struct mapped_device *md = context;
drivers/md/dm.h
58
void (*fn)(void *), void *context);
drivers/md/persistent-data/dm-array.c
116
fn(info->value_type.context, element_at(info, ab, 0), nr_entries);
drivers/md/persistent-data/dm-array.c
187
vt->inc(vt->context, value, delta);
drivers/md/persistent-data/dm-array.c
210
vt->dec(vt->context, element_at(info, ab, new_nr - 1), delta);
drivers/md/persistent-data/dm-array.c
575
static void block_inc(void *context, const void *value, unsigned int count)
drivers/md/persistent-data/dm-array.c
578
struct dm_array_info *info = context;
drivers/md/persistent-data/dm-array.c
585
static void __block_dec(void *context, const void *value)
drivers/md/persistent-data/dm-array.c
593
struct dm_array_info *info = context;
drivers/md/persistent-data/dm-array.c
624
static void block_dec(void *context, const void *value, unsigned int count)
drivers/md/persistent-data/dm-array.c
629
__block_dec(context, value);
drivers/md/persistent-data/dm-array.c
632
static int block_equal(void *context, const void *value1, const void *value2)
drivers/md/persistent-data/dm-array.c
649
bvt->context = info;
drivers/md/persistent-data/dm-array.c
708
value_fn fn, void *context,
drivers/md/persistent-data/dm-array.c
719
r = fn(base + i, element_at(info, ab, i), context);
drivers/md/persistent-data/dm-array.c
724
vt->inc(vt->context, element_at(info, ab, i), 1);
drivers/md/persistent-data/dm-array.c
732
uint32_t size, value_fn fn, void *context)
drivers/md/persistent-data/dm-array.c
752
r = populate_ablock_with_values(info, ab, fn, context,
drivers/md/persistent-data/dm-array.c
834
(!vt->equal || !vt->equal(vt->context, old_value, value))) {
drivers/md/persistent-data/dm-array.c
835
vt->dec(vt->context, old_value, 1);
drivers/md/persistent-data/dm-array.c
837
vt->inc(vt->context, value, 1);
drivers/md/persistent-data/dm-array.c
861
int (*fn)(void *context, uint64_t key, void *leaf);
drivers/md/persistent-data/dm-array.c
862
void *context;
drivers/md/persistent-data/dm-array.c
865
static int walk_ablock(void *context, uint64_t *keys, void *leaf)
drivers/md/persistent-data/dm-array.c
867
struct walk_info *wi = context;
drivers/md/persistent-data/dm-array.c
884
r = wi->fn(wi->context, keys[0] * max_entries + i,
drivers/md/persistent-data/dm-array.c
897
void *context)
drivers/md/persistent-data/dm-array.c
903
wi.context = context;
drivers/md/persistent-data/dm-array.h
130
typedef int (*value_fn)(uint32_t index, void *value_le, void *context);
drivers/md/persistent-data/dm-array.h
132
uint32_t size, value_fn fn, void *context);
drivers/md/persistent-data/dm-array.h
181
int (*fn)(void *context, uint64_t key, void *leaf),
drivers/md/persistent-data/dm-array.h
182
void *context);
drivers/md/persistent-data/dm-bitset.c
20
.context = NULL,
drivers/md/persistent-data/dm-bitset.c
46
void *context;
drivers/md/persistent-data/dm-bitset.c
49
static int pack_bits(uint32_t index, void *value, void *context)
drivers/md/persistent-data/dm-bitset.c
52
struct packer_context *p = context;
drivers/md/persistent-data/dm-bitset.c
58
r = p->fn(index * 64 + bit, &bv, p->context);
drivers/md/persistent-data/dm-bitset.c
74
uint32_t size, bit_value_fn fn, void *context)
drivers/md/persistent-data/dm-bitset.c
80
p.context = context;
drivers/md/persistent-data/dm-bitset.h
108
typedef int (*bit_value_fn)(uint32_t index, bool *value, void *context);
drivers/md/persistent-data/dm-bitset.h
110
uint32_t size, bit_value_fn fn, void *context);
drivers/md/persistent-data/dm-btree-remove.c
622
info->value_type.dec(info->value_type.context,
drivers/md/persistent-data/dm-btree-remove.c
733
info->value_type.dec(info->value_type.context,
drivers/md/persistent-data/dm-btree-spine.c
236
static void le64_inc(void *context, const void *value_le, unsigned int count)
drivers/md/persistent-data/dm-btree-spine.c
238
dm_tm_with_runs(context, value_le, count, dm_tm_inc_range);
drivers/md/persistent-data/dm-btree-spine.c
241
static void le64_dec(void *context, const void *value_le, unsigned int count)
drivers/md/persistent-data/dm-btree-spine.c
243
dm_tm_with_runs(context, value_le, count, dm_tm_dec_range);
drivers/md/persistent-data/dm-btree-spine.c
246
static int le64_equal(void *context, const void *value1_le, const void *value2_le)
drivers/md/persistent-data/dm-btree-spine.c
258
vt->context = tm;
drivers/md/persistent-data/dm-btree.c
1299
info->value_type.context,
drivers/md/persistent-data/dm-btree.c
1302
info->value_type.dec(info->value_type.context,
drivers/md/persistent-data/dm-btree.c
1422
int (*fn)(void *context, uint64_t *keys, void *leaf),
drivers/md/persistent-data/dm-btree.c
1423
void *context)
drivers/md/persistent-data/dm-btree.c
1440
r = walk_node(info, value64(n, i), fn, context);
drivers/md/persistent-data/dm-btree.c
1445
r = fn(context, &keys, value_ptr(n, i));
drivers/md/persistent-data/dm-btree.c
1457
int (*fn)(void *context, uint64_t *keys, void *leaf),
drivers/md/persistent-data/dm-btree.c
1458
void *context)
drivers/md/persistent-data/dm-btree.c
1461
return walk_node(info, root, fn, context);
drivers/md/persistent-data/dm-btree.c
325
info->value_type.dec(info->value_type.context,
drivers/md/persistent-data/dm-btree.c
83
vt->inc(vt->context, value_ptr(n, 0), nr_entries);
drivers/md/persistent-data/dm-btree.h
177
int (*fn)(void *context, uint64_t *keys, void *leaf),
drivers/md/persistent-data/dm-btree.h
178
void *context);
drivers/md/persistent-data/dm-btree.h
42
void *context;
drivers/md/persistent-data/dm-btree.h
62
void (*inc)(void *context, const void *value, unsigned int count);
drivers/md/persistent-data/dm-btree.h
69
void (*dec)(void *context, const void *value, unsigned int count);
drivers/md/persistent-data/dm-btree.h
76
int (*equal)(void *context, const void *value1, const void *value2);
drivers/md/persistent-data/dm-space-map-metadata.c
30
void *context;
drivers/md/persistent-data/dm-space-map-metadata.c
40
dm_sm_threshold_fn fn, void *context)
drivers/md/persistent-data/dm-space-map-metadata.c
45
t->context = context;
drivers/md/persistent-data/dm-space-map-metadata.c
527
void *context)
drivers/md/persistent-data/dm-space-map-metadata.c
531
set_threshold(&smm->threshold, threshold, fn, context);
drivers/md/persistent-data/dm-space-map-metadata.c
62
t->fn(t->context);
drivers/md/persistent-data/dm-space-map.h
13
typedef void (*dm_sm_threshold_fn)(void *context);
drivers/md/persistent-data/dm-space-map.h
160
void *context)
drivers/md/persistent-data/dm-space-map.h
163
return sm->register_threshold_callback(sm, threshold, fn, context);
drivers/md/persistent-data/dm-space-map.h
73
void *context);
drivers/media/cec/platform/meson/ao-cec-g12a.c
372
static int meson_ao_cec_g12a_read(void *context, unsigned int addr,
drivers/media/cec/platform/meson/ao-cec-g12a.c
375
struct meson_ao_cec_g12a_device *ao_cec = context;
drivers/media/cec/platform/meson/ao-cec-g12a.c
396
static int meson_ao_cec_g12a_write(void *context, unsigned int addr,
drivers/media/cec/platform/meson/ao-cec-g12a.c
399
struct meson_ao_cec_g12a_device *ao_cec = context;
drivers/media/common/siano/smscoreapi.c
1010
rc = coredev->sendrequest_handler(coredev->context, msg,
drivers/media/common/siano/smscoreapi.c
1030
return coredev->postload_handler(coredev->context);
drivers/media/common/siano/smscoreapi.c
1373
coredev->detectmode_handler(coredev->context,
drivers/media/common/siano/smscoreapi.c
1377
rc = coredev->setmode_handler(coredev->context, mode);
drivers/media/common/siano/smscoreapi.c
1506
rc = client->onresponse_handler(client->context, cb);
drivers/media/common/siano/smscoreapi.c
1726
newclient->context = params->context;
drivers/media/common/siano/smscoreapi.c
1732
pr_debug("%p %d %d\n", params->context, params->data_type,
drivers/media/common/siano/smscoreapi.c
1761
pr_debug("%p\n", client->context);
drivers/media/common/siano/smscoreapi.c
1806
return coredev->sendrequest_handler(coredev->context, buffer, size);
drivers/media/common/siano/smscoreapi.c
1854
return coredev->sendrequest_handler(coredev->context,
drivers/media/common/siano/smscoreapi.c
1878
return coredev->sendrequest_handler(coredev->context,
drivers/media/common/siano/smscoreapi.c
44
void *context;
drivers/media/common/siano/smscoreapi.c
593
client->onremove_handler(client->context);
drivers/media/common/siano/smscoreapi.c
720
dev->context = params->context;
drivers/media/common/siano/smscoreapi.c
756
rc = coredev->sendrequest_handler(coredev->context, buffer, size);
drivers/media/common/siano/smscoreapi.c
844
coredev->sendrequest_handler(coredev->context, &mtu_msg,
drivers/media/common/siano/smscoreapi.c
857
coredev->sendrequest_handler(coredev->context, &crys_msg,
drivers/media/common/siano/smscoreapi.c
924
rc = coredev->preload_handler(coredev->context);
drivers/media/common/siano/smscoreapi.h
100
typedef int (*preload_t)(void *context);
drivers/media/common/siano/smscoreapi.h
101
typedef int (*postload_t)(void *context);
drivers/media/common/siano/smscoreapi.h
103
typedef int (*onresponse_t)(void *context, struct smscore_buffer_t *cb);
drivers/media/common/siano/smscoreapi.h
104
typedef void (*onremove_t)(void *context);
drivers/media/common/siano/smscoreapi.h
134
void *context;
drivers/media/common/siano/smscoreapi.h
143
void *context;
drivers/media/common/siano/smscoreapi.h
161
void *context;
drivers/media/common/siano/smscoreapi.h
97
typedef int (*setmode_t)(void *context, int mode);
drivers/media/common/siano/smscoreapi.h
98
typedef void (*detectmode_t)(void *context, int *mode);
drivers/media/common/siano/smscoreapi.h
99
typedef int (*sendrequest_t)(void *context, void *buffer, size_t size);
drivers/media/common/siano/smsdvb-main.c
1177
params.context = client;
drivers/media/common/siano/smsdvb-main.c
541
static int smsdvb_onresponse(void *context, struct smscore_buffer_t *cb)
drivers/media/common/siano/smsdvb-main.c
543
struct smsdvb_client_t *client = (struct smsdvb_client_t *) context;
drivers/media/common/siano/smsdvb-main.c
667
static void smsdvb_onremove(void *context)
drivers/media/common/siano/smsdvb-main.c
671
smsdvb_unregister_client((struct smsdvb_client_t *) context);
drivers/media/dvb-frontends/af9013.c
1362
static int af9013_regmap_write(void *context, const void *data, size_t count)
drivers/media/dvb-frontends/af9013.c
1364
struct i2c_client *client = context;
drivers/media/dvb-frontends/af9013.c
1400
static int af9013_regmap_read(void *context, const void *reg_buf,
drivers/media/dvb-frontends/af9013.c
1403
struct i2c_client *client = context;
drivers/media/dvb-frontends/drxk_hard.c
6225
void *context)
drivers/media/dvb-frontends/drxk_hard.c
6227
struct drxk_state *state = context;
drivers/media/dvb-frontends/rtl2830.c
690
static int rtl2830_regmap_read(void *context, const void *reg_buf,
drivers/media/dvb-frontends/rtl2830.c
693
struct i2c_client *client = context;
drivers/media/dvb-frontends/rtl2830.c
719
static int rtl2830_regmap_write(void *context, const void *data, size_t count)
drivers/media/dvb-frontends/rtl2830.c
721
struct i2c_client *client = context;
drivers/media/dvb-frontends/rtl2830.c
742
static int rtl2830_regmap_gather_write(void *context, const void *reg,
drivers/media/dvb-frontends/rtl2830.c
746
struct i2c_client *client = context;
drivers/media/dvb-frontends/rtl2832_sdr.c
219
struct rtl2832_sdr_dev *dev = urb->context;
drivers/media/firewire/firedtv-fw.c
128
fw_iso_context_queue_flush(ctx->context);
drivers/media/firewire/firedtv-fw.c
142
ctx->context = fw_iso_context_create(device->card,
drivers/media/firewire/firedtv-fw.c
145
if (IS_ERR(ctx->context)) {
drivers/media/firewire/firedtv-fw.c
146
err = PTR_ERR(ctx->context);
drivers/media/firewire/firedtv-fw.c
167
err = fw_iso_context_start(ctx->context, -1, 0,
drivers/media/firewire/firedtv-fw.c
178
fw_iso_context_destroy(ctx->context);
drivers/media/firewire/firedtv-fw.c
189
fw_iso_context_stop(ctx->context);
drivers/media/firewire/firedtv-fw.c
191
fw_iso_context_destroy(ctx->context);
drivers/media/firewire/firedtv-fw.c
78
struct fw_iso_context *context;
drivers/media/firewire/firedtv-fw.c
94
return fw_iso_context_queue(ctx->context, &p, &ctx->buffer,
drivers/media/firewire/firedtv-fw.c
98
static void handle_iso(struct fw_iso_context *context, u32 cycle,
drivers/media/i2c/ov08x40.c
1753
const void *context)
drivers/media/i2c/ov08x40.c
1756
const struct ov08x40 *ov08x = context;
drivers/media/mmc/siano/smssdio.c
258
params.context = smsdev;
drivers/media/mmc/siano/smssdio.c
88
static int smssdio_sendrequest(void *context, void *buffer, size_t size)
drivers/media/mmc/siano/smssdio.c
93
smsdev = context;
drivers/media/pci/intel/ipu6/ipu6-fw-isys.c
367
ipu6_fw_isys_get_resp(void *context, unsigned int queue)
drivers/media/pci/intel/ipu6/ipu6-fw-isys.c
369
return ipu6_recv_get_token(context, queue);
drivers/media/pci/intel/ipu6/ipu6-fw-isys.c
372
void ipu6_fw_isys_put_resp(void *context, unsigned int queue)
drivers/media/pci/intel/ipu6/ipu6-fw-isys.c
374
ipu6_recv_put_token(context, queue);
drivers/media/pci/intel/ipu6/ipu6-fw-isys.h
594
ipu6_fw_isys_get_resp(void *context, unsigned int queue);
drivers/media/pci/intel/ipu6/ipu6-fw-isys.h
595
void ipu6_fw_isys_put_resp(void *context, unsigned int queue);
drivers/media/pci/saa7164/saa7164-types.h
196
u8 *context;
drivers/media/platform/allegro-dvt/allegro-core.c
3794
static void allegro_fw_callback(const struct firmware *fw, void *context)
drivers/media/platform/allegro-dvt/allegro-core.c
3796
struct allegro_dev *dev = context;
drivers/media/platform/arm/mali-c55/mali-c55-common.h
258
struct mali_c55_context context;
drivers/media/platform/arm/mali-c55/mali-c55-core.c
200
return &mali_c55->context;
drivers/media/platform/arm/mali-c55/mali-c55-core.c
555
static irqreturn_t mali_c55_isr(int irq, void *context)
drivers/media/platform/arm/mali-c55/mali-c55-core.c
557
struct device *dev = context;
drivers/media/platform/arm/mali-c55/mali-c55-core.c
648
struct mali_c55_context *ctx = &mali_c55->context;
drivers/media/platform/arm/mali-c55/mali-c55-core.c
881
kfree(mali_c55->context.registers);
drivers/media/platform/arm/mali-c55/mali-c55-core.c
892
kfree(mali_c55->context.registers);
drivers/media/platform/chips-media/coda/coda-common.c
2962
static void coda_fw_callback(const struct firmware *fw, void *context);
drivers/media/platform/chips-media/coda/coda-common.c
2980
static void coda_fw_callback(const struct firmware *fw, void *context)
drivers/media/platform/chips-media/coda/coda-common.c
2982
struct coda_dev *dev = context;
drivers/media/platform/renesas/rzv2h-ivc/rzv2h-ivc-dev.c
100
struct device *dev = context;
drivers/media/platform/renesas/rzv2h-ivc/rzv2h-ivc-dev.c
98
static irqreturn_t rzv2h_ivc_isr(int irq, void *context)
drivers/media/platform/samsung/exynos4-is/fimc-is.c
377
static void fimc_is_load_firmware(const struct firmware *fw, void *context)
drivers/media/platform/samsung/exynos4-is/fimc-is.c
379
struct fimc_is *is = context;
drivers/media/platform/ti/vpe/vpdma.c
1082
static void vpdma_firmware_cb(const struct firmware *f, void *context)
drivers/media/platform/ti/vpe/vpdma.c
1084
struct vpdma_data *vpdma = context;
drivers/media/radio/si470x/radio-si470x-usb.c
364
struct si470x_device *radio = urb->context;
drivers/media/rc/ati_remote.c
383
struct ati_remote *ati_remote = urb->context;
drivers/media/rc/ati_remote.c
479
struct ati_remote *ati_remote = urb->context;
drivers/media/rc/ati_remote.c
652
struct ati_remote *ati_remote = urb->context;
drivers/media/rc/igorplugusb.c
97
struct igorplugusb *ir = urb->context;
drivers/media/rc/iguanair.c
151
ir = urb->context;
drivers/media/rc/iguanair.c
176
struct iguanair *ir = urb->context;
drivers/media/rc/imon.c
1086
ictx = (struct imon_context *)urb->context;
drivers/media/rc/imon.c
1751
ictx = (struct imon_context *)urb->context;
drivers/media/rc/imon.c
1797
ictx = (struct imon_context *)urb->context;
drivers/media/rc/imon_raw.c
82
struct imon *imon = urb->context;
drivers/media/rc/ir_toy.c
202
struct irtoy *irtoy = urb->context;
drivers/media/rc/ir_toy.c
214
struct irtoy *irtoy = urb->context;
drivers/media/rc/mceusb.c
1372
ir = urb->context;
drivers/media/rc/mceusb.c
800
complete(urb->context);
drivers/media/rc/redrat3.c
690
struct redrat3_dev *rr3 = urb->context;
drivers/media/rc/redrat3.c
885
struct redrat3_dev *rr3 = urb->context;
drivers/media/rc/redrat3.c
904
struct redrat3_dev *rr3 = urb->context;
drivers/media/rc/streamzap.c
209
sz = urb->context;
drivers/media/rc/ttusbir.c
158
struct ttusbir *tt = urb->context;
drivers/media/rc/ttusbir.c
248
urb->context = tt;
drivers/media/rc/ttusbir.c
83
struct ttusbir *tt = urb->context;
drivers/media/rc/xbox_remote.c
119
struct xbox_remote *xbox_remote = urb->context;
drivers/media/rc/xbox_remote.c
91
struct xbox_remote *xbox_remote = urb->context;
drivers/media/tuners/xc2028.c
1361
void *context)
drivers/media/tuners/xc2028.c
1363
struct dvb_frontend *fe = context;
drivers/media/usb/airspy/airspy.c
269
struct airspy *s = urb->context;
drivers/media/usb/as102/as102_usb_drv.c
213
struct as102_dev_t *as102_dev = urb->context;
drivers/media/usb/as102/as10x_cmd.h
450
union as10x_fw_context context;
drivers/media/usb/as102/as10x_cmd_cfg.c
103
sizeof(pcmd->body.context.req)
drivers/media/usb/as102/as10x_cmd_cfg.c
106
sizeof(prsp->body.context.rsp)
drivers/media/usb/as102/as10x_cmd_cfg.c
185
err = prsp->body.context.rsp.error;
drivers/media/usb/as102/as10x_cmd_cfg.c
188
(le16_to_cpu(prsp->body.context.rsp.proc_id) == proc_id)) {
drivers/media/usb/as102/as10x_cmd_cfg.c
34
sizeof(pcmd->body.context.req));
drivers/media/usb/as102/as10x_cmd_cfg.c
37
pcmd->body.context.req.proc_id = cpu_to_le16(CONTROL_PROC_CONTEXT);
drivers/media/usb/as102/as10x_cmd_cfg.c
38
pcmd->body.context.req.tag = cpu_to_le16(tag);
drivers/media/usb/as102/as10x_cmd_cfg.c
39
pcmd->body.context.req.type = cpu_to_le16(GET_CONTEXT_DATA);
drivers/media/usb/as102/as10x_cmd_cfg.c
45
sizeof(pcmd->body.context.req)
drivers/media/usb/as102/as10x_cmd_cfg.c
48
sizeof(prsp->body.context.rsp)
drivers/media/usb/as102/as10x_cmd_cfg.c
63
*pvalue = le32_to_cpu((__force __le32)prsp->body.context.rsp.reg_val.u.value32);
drivers/media/usb/as102/as10x_cmd_cfg.c
90
sizeof(pcmd->body.context.req));
drivers/media/usb/as102/as10x_cmd_cfg.c
93
pcmd->body.context.req.proc_id = cpu_to_le16(CONTROL_PROC_CONTEXT);
drivers/media/usb/as102/as10x_cmd_cfg.c
95
pcmd->body.context.req.reg_val.u.value32 = (__force u32)cpu_to_le32(value);
drivers/media/usb/as102/as10x_cmd_cfg.c
96
pcmd->body.context.req.tag = cpu_to_le16(tag);
drivers/media/usb/as102/as10x_cmd_cfg.c
97
pcmd->body.context.req.type = cpu_to_le16(SET_CONTEXT_DATA);
drivers/media/usb/au0828/au0828-dvb.c
110
struct au0828_dev *dev = purb->context;
drivers/media/usb/au0828/au0828-video.c
112
struct au0828_dmaqueue *dma_q = urb->context;
drivers/media/usb/au0828/au0828-video.c
493
struct au0828_dmaqueue *dma_q = urb->context;
drivers/media/usb/b2c2/flexcop-usb.c
376
struct flexcop_usb *fc_usb = urb->context;
drivers/media/usb/b2c2/flexcop-usb.c
464
urb->context = fc_usb;
drivers/media/usb/cx231xx/cx231xx-417.c
1317
struct cx231xx_dmaqueue *dma_q = urb->context;
drivers/media/usb/cx231xx/cx231xx-417.c
1343
struct cx231xx_dmaqueue *dma_q = urb->context;
drivers/media/usb/cx231xx/cx231xx-audio.c
173
struct cx231xx *dev = urb->context;
drivers/media/usb/cx231xx/cx231xx-audio.c
290
urb->context = dev;
drivers/media/usb/cx231xx/cx231xx-audio.c
351
urb->context = dev;
drivers/media/usb/cx231xx/cx231xx-audio.c
81
struct cx231xx *dev = urb->context;
drivers/media/usb/cx231xx/cx231xx-core.c
781
struct cx231xx_dmaqueue *dma_q = urb->context;
drivers/media/usb/cx231xx/cx231xx-core.c
827
struct cx231xx_dmaqueue *dma_q = urb->context;
drivers/media/usb/cx231xx/cx231xx-vbi.c
257
struct cx231xx_dmaqueue *dma_q = urb->context;
drivers/media/usb/cx231xx/cx231xx-vbi.c
73
struct cx231xx_dmaqueue *dma_q = urb->context;
drivers/media/usb/cx231xx/cx231xx-video.c
264
struct cx231xx_dmaqueue *dma_q = urb->context;
drivers/media/usb/cx231xx/cx231xx-video.c
361
struct cx231xx_dmaqueue *dma_q = urb->context;
drivers/media/usb/dvb-usb-v2/af9015.c
1295
static int af9015_regmap_write(void *context, const void *data, size_t count)
drivers/media/usb/dvb-usb-v2/af9015.c
1297
struct dvb_usb_device *d = context;
drivers/media/usb/dvb-usb-v2/af9015.c
1315
static int af9015_regmap_read(void *context, const void *reg_buf,
drivers/media/usb/dvb-usb-v2/af9015.c
1318
struct dvb_usb_device *d = context;
drivers/media/usb/dvb-usb-v2/lmedm04.c
269
struct dvb_usb_adapter *adap = lme_urb->context;
drivers/media/usb/dvb-usb-v2/usb_urb.c
184
urb->context = stream;
drivers/media/usb/dvb-usb-v2/usb_urb.c
21
struct usb_data_stream *stream = urb->context;
drivers/media/usb/dvb-usb/cxusb-analog.c
632
struct dvb_usb_device *dvbdev = u->context;
drivers/media/usb/dvb-usb/cxusb-analog.c
839
surb->context = dvbdev;
drivers/media/usb/dvb-usb/dib0700_core.c
710
struct dvb_usb_device *d = purb->context;
drivers/media/usb/dvb-usb/usb-urb.c
17
struct usb_data_stream *stream = urb->context;
drivers/media/usb/dvb-usb/usb-urb.c
192
urb->context = stream;
drivers/media/usb/em28xx/em28xx-audio.c
68
struct em28xx *dev = urb->context;
drivers/media/usb/em28xx/em28xx-audio.c
784
urb->context = dev;
drivers/media/usb/em28xx/em28xx-core.c
772
struct em28xx *dev = urb->context;
drivers/media/usb/go7007/go7007-usb.c
747
struct go7007 *go = (struct go7007 *)urb->context;
drivers/media/usb/go7007/go7007-usb.c
787
struct go7007 *go = (struct go7007 *)urb->context;
drivers/media/usb/go7007/go7007-usb.c
810
struct go7007 *go = (struct go7007 *)urb->context;
drivers/media/usb/gspca/benq.c
141
struct gspca_dev *gspca_dev = (struct gspca_dev *) urb->context;
drivers/media/usb/gspca/benq.c
98
urb->context = gspca_dev;
drivers/media/usb/gspca/gspca.c
323
struct gspca_dev *gspca_dev = (struct gspca_dev *) urb->context;
drivers/media/usb/gspca/gspca.c
336
struct gspca_dev *gspca_dev = (struct gspca_dev *) urb->context;
drivers/media/usb/gspca/gspca.c
712
urb->context = gspca_dev;
drivers/media/usb/gspca/gspca.c
82
struct gspca_dev *gspca_dev = (struct gspca_dev *) urb->context;
drivers/media/usb/gspca/konica.c
219
urb->context = gspca_dev;
drivers/media/usb/gspca/konica.c
256
struct gspca_dev *gspca_dev = (struct gspca_dev *) urb->context;
drivers/media/usb/hackrf/hackrf.c
483
struct hackrf_dev *dev = urb->context;
drivers/media/usb/hackrf/hackrf.c
528
struct hackrf_dev *dev = urb->context;
drivers/media/usb/hdpvr/hdpvr-video.c
76
struct hdpvr_buffer *buf = (struct hdpvr_buffer *)urb->context;
drivers/media/usb/msi2500/msi2500.c
365
struct msi2500_dev *dev = (struct msi2500_dev *)urb->context;
drivers/media/usb/msi2500/msi2500.c
517
urb->context = dev;
drivers/media/usb/pvrusb2/pvrusb2-hdw.c
3541
struct pvr2_hdw *hdw = urb->context;
drivers/media/usb/pvrusb2/pvrusb2-hdw.c
3550
struct pvr2_hdw *hdw = urb->context;
drivers/media/usb/pvrusb2/pvrusb2-io.c
417
struct pvr2_buffer *bp = urb->context;
drivers/media/usb/pwc/pwc-if.c
286
struct pwc_device *pdev = (struct pwc_device *)urb->context;
drivers/media/usb/pwc/pwc-if.c
476
urb->context = pdev;
drivers/media/usb/s2255/s2255drv.c
2038
pipe_info = purb->context;
drivers/media/usb/s2255/s2255drv.c
494
struct s2255_fw *data = urb->context;
drivers/media/usb/siano/smsusb.c
208
static int smsusb_sendrequest(void *context, void *buffer, size_t size)
drivers/media/usb/siano/smsusb.c
210
struct smsusb_device_t *dev = (struct smsusb_device_t *) context;
drivers/media/usb/siano/smsusb.c
301
static void smsusb1_detectmode(void *context, int *mode)
drivers/media/usb/siano/smsusb.c
304
((struct smsusb_device_t *) context)->udev->product;
drivers/media/usb/siano/smsusb.c
323
static int smsusb1_setmode(void *context, int mode)
drivers/media/usb/siano/smsusb.c
333
return smsusb_sendrequest(context, &msg, sizeof(msg));
drivers/media/usb/siano/smsusb.c
452
params.context = dev;
drivers/media/usb/siano/smsusb.c
88
struct smsusb_urb_t *surb = (struct smsusb_urb_t *) urb->context;
drivers/media/usb/stk1160/stk1160-video.c
301
struct stk1160 *dev = urb->context;
drivers/media/usb/stk1160/stk1160-video.c
480
urb->context = dev;
drivers/media/usb/ttusb-budget/dvb-ttusb-budget.c
717
struct ttusb *ttusb = urb->context;
drivers/media/usb/ttusb-budget/dvb-ttusb-budget.c
810
urb->context = ttusb;
drivers/media/usb/ttusb-dec/ttusb_dec.c
251
struct ttusb_dec *dec = urb->context;
drivers/media/usb/ttusb-dec/ttusb_dec.c
797
struct ttusb_dec *dec = urb->context;
drivers/media/usb/ttusb-dec/ttusb_dec.c
850
urb->context = dec;
drivers/media/usb/usbtv/usbtv-audio.c
100
struct usbtv *chip = urb->context;
drivers/media/usb/usbtv/usbtv-video.c
467
struct usbtv *usbtv = (struct usbtv *)ip->context;
drivers/media/usb/usbtv/usbtv-video.c
513
ip->context = usbtv;
drivers/media/usb/uvc/uvc_status.c
169
struct uvc_device *dev = urb->context;
drivers/media/usb/uvc/uvc_status.c
205
struct uvc_device *dev = urb->context;
drivers/media/usb/uvc/uvc_video.c
1689
struct uvc_urb *uvc_urb = urb->context;
drivers/media/usb/uvc/uvc_video.c
1905
urb->context = uvc_urb;
drivers/media/v4l2-core/v4l2-common.c
166
const void *context),
drivers/media/v4l2-core/v4l2-common.c
167
const void *context)
drivers/media/v4l2-core/v4l2-common.c
180
if (func && !func(array, i, context))
drivers/memory/omap-gpmc.c
2427
gpmc_context = &gpmc->context;
drivers/memory/omap-gpmc.c
246
struct omap3_gpmc_regs context;
drivers/memory/omap-gpmc.c
2465
gpmc_context = &gpmc->context;
drivers/memory/renesas-rpc-if.c
131
static int rpcif_reg_write(void *context, unsigned int reg, unsigned int val)
drivers/memory/renesas-rpc-if.c
133
struct rpcif_priv *rpc = context;
drivers/memory/renesas-rpc-if.c
180
static int xspi_reg_read(void *context, unsigned int reg, unsigned int *val)
drivers/memory/renesas-rpc-if.c
182
struct rpcif_priv *xspi = context;
drivers/memory/renesas-rpc-if.c
188
static int xspi_reg_write(void *context, unsigned int reg, unsigned int val)
drivers/memory/renesas-rpc-if.c
190
struct rpcif_priv *xspi = context;
drivers/memory/renesas-rpc-if.c
95
static int rpcif_reg_read(void *context, unsigned int reg, unsigned int *val)
drivers/memory/renesas-rpc-if.c
97
struct rpcif_priv *rpc = context;
drivers/mfd/acer-ec-a500.c
36
static int a500_ec_read(void *context, const void *reg_buf, size_t reg_size,
drivers/mfd/acer-ec-a500.c
39
struct i2c_client *client = context;
drivers/mfd/acer-ec-a500.c
67
static int a500_ec_write(void *context, const void *data, size_t count)
drivers/mfd/acer-ec-a500.c
69
struct i2c_client *client = context;
drivers/mfd/atmel-hlcdc.c
36
static int regmap_atmel_hlcdc_reg_write(void *context, unsigned int reg,
drivers/mfd/atmel-hlcdc.c
39
struct atmel_hlcdc_regmap *hregmap = context;
drivers/mfd/atmel-hlcdc.c
61
static int regmap_atmel_hlcdc_reg_read(void *context, unsigned int reg,
drivers/mfd/atmel-hlcdc.c
64
struct atmel_hlcdc_regmap *hregmap = context;
drivers/mfd/cs40l50-core.c
273
static void cs40l50_dsp_bringup(const struct firmware *bin, void *context)
drivers/mfd/cs40l50-core.c
275
struct cs40l50 *cs40l50 = context;
drivers/mfd/cs40l50-core.c
310
static void cs40l50_request_firmware(const struct firmware *fw, void *context)
drivers/mfd/cs40l50-core.c
312
struct cs40l50 *cs40l50 = context;
drivers/mfd/cs42l43.c
672
static void cs42l43_mcu_load_firmware(const struct firmware *firmware, void *context)
drivers/mfd/cs42l43.c
674
struct cs42l43 *cs42l43 = context;
drivers/mfd/dln2.c
236
struct dln2_dev *dln2 = urb->context;
drivers/mfd/ene-kb3930.c
84
static int kb3930_ec_ram_reg_write(void *context, unsigned int reg,
drivers/mfd/ene-kb3930.c
87
struct kb3930 *ddata = context;
drivers/mfd/ene-kb3930.c
93
static int kb3930_ec_ram_reg_read(void *context, unsigned int reg,
drivers/mfd/ene-kb3930.c
96
struct kb3930 *ddata = context;
drivers/mfd/gateworks-gsc.c
32
int gsc_write(void *context, unsigned int reg, unsigned int val)
drivers/mfd/gateworks-gsc.c
34
struct i2c_client *client = context;
drivers/mfd/gateworks-gsc.c
51
int gsc_read(void *context, unsigned int reg, unsigned int *val)
drivers/mfd/gateworks-gsc.c
53
struct i2c_client *client = context;
drivers/mfd/intel-m10-bmc-pmci.c
62
static int indirect_reg_read(void *context, unsigned int reg, unsigned int *val)
drivers/mfd/intel-m10-bmc-pmci.c
64
struct indirect_ctx *ctx = context;
drivers/mfd/intel-m10-bmc-pmci.c
94
static int indirect_reg_write(void *context, unsigned int reg, unsigned int val)
drivers/mfd/intel-m10-bmc-pmci.c
96
struct indirect_ctx *ctx = context;
drivers/mfd/intel_soc_pmic_bxtwc.c
302
static int regmap_ipc_byte_reg_read(void *context, unsigned int reg,
drivers/mfd/intel_soc_pmic_bxtwc.c
309
struct intel_soc_pmic *pmic = context;
drivers/mfd/intel_soc_pmic_bxtwc.c
334
static int regmap_ipc_byte_reg_write(void *context, unsigned int reg,
drivers/mfd/intel_soc_pmic_bxtwc.c
339
struct intel_soc_pmic *pmic = context;
drivers/mfd/intel_soc_pmic_chtwc.c
72
static int cht_wc_byte_reg_read(void *context, unsigned int reg,
drivers/mfd/intel_soc_pmic_chtwc.c
75
struct i2c_client *client = context;
drivers/mfd/intel_soc_pmic_chtwc.c
94
static int cht_wc_byte_reg_write(void *context, unsigned int reg,
drivers/mfd/intel_soc_pmic_chtwc.c
97
struct i2c_client *client = context;
drivers/mfd/intel_soc_pmic_mrfld.c
73
static int bcove_ipc_byte_reg_read(void *context, unsigned int reg,
drivers/mfd/intel_soc_pmic_mrfld.c
76
struct intel_soc_pmic *pmic = context;
drivers/mfd/intel_soc_pmic_mrfld.c
88
static int bcove_ipc_byte_reg_write(void *context, unsigned int reg,
drivers/mfd/intel_soc_pmic_mrfld.c
91
struct intel_soc_pmic *pmic = context;
drivers/mfd/iqs62x.c
454
static irqreturn_t iqs62x_irq(int irq, void *context)
drivers/mfd/iqs62x.c
456
struct iqs62x_core *iqs62x = context;
drivers/mfd/iqs62x.c
583
static void iqs62x_firmware_load(const struct firmware *fw, void *context)
drivers/mfd/iqs62x.c
585
struct iqs62x_core *iqs62x = context;
drivers/mfd/mc13xxx-spi.c
59
static int mc13xxx_spi_read(void *context, const void *reg, size_t reg_size,
drivers/mfd/mc13xxx-spi.c
65
struct device *dev = context;
drivers/mfd/mc13xxx-spi.c
88
static int mc13xxx_spi_write(void *context, const void *data, size_t count)
drivers/mfd/mc13xxx-spi.c
90
struct device *dev = context;
drivers/mfd/mt6360-core.c
400
static int mt6360_regmap_read(void *context, const void *reg, size_t reg_size,
drivers/mfd/mt6360-core.c
403
struct mt6360_ddata *ddata = context;
drivers/mfd/mt6360-core.c
451
static int mt6360_regmap_write(void *context, const void *val, size_t val_size)
drivers/mfd/mt6360-core.c
453
struct mt6360_ddata *ddata = context;
drivers/mfd/mt6370.c
182
static int mt6370_regmap_read(void *context, const void *reg_buf,
drivers/mfd/mt6370.c
185
struct mt6370_info *info = context;
drivers/mfd/mt6370.c
204
static int mt6370_regmap_write(void *context, const void *data, size_t count)
drivers/mfd/mt6370.c
206
struct mt6370_info *info = context;
drivers/mfd/nct6694.c
194
struct nct6694 *nct6694 = urb->context;
drivers/mfd/ntxec.c
103
struct regmap *regmap = context;
drivers/mfd/ntxec.c
110
static int regmap_wrap_read(void *context, unsigned int reg,
drivers/mfd/ntxec.c
113
struct regmap *regmap = context;
drivers/mfd/ntxec.c
99
static int regmap_ignore_write(void *context,
drivers/mfd/ocelot-spi.c
141
static int ocelot_spi_regmap_bus_read(void *context, const void *reg, size_t reg_size,
drivers/mfd/ocelot-spi.c
145
struct device *dev = context;
drivers/mfd/ocelot-spi.c
171
static int ocelot_spi_regmap_bus_write(void *context, const void *data, size_t count)
drivers/mfd/ocelot-spi.c
173
struct device *dev = context;
drivers/mfd/retu-mfd.c
189
static int retu_regmap_read(void *context, const void *reg, size_t reg_size,
drivers/mfd/retu-mfd.c
193
struct device *dev = context;
drivers/mfd/retu-mfd.c
206
static int retu_regmap_write(void *context, const void *data, size_t count)
drivers/mfd/retu-mfd.c
210
struct device *dev = context;
drivers/mfd/rk8xx-spi.c
41
static int rk806_spi_bus_write(void *context, const void *vdata, size_t count)
drivers/mfd/rk8xx-spi.c
43
struct device *dev = context;
drivers/mfd/rk8xx-spi.c
63
static int rk806_spi_bus_read(void *context, const void *vreg, size_t reg_size,
drivers/mfd/rk8xx-spi.c
66
struct device *dev = context;
drivers/mfd/rohm-bd71828.c
401
static int regmap_write_wrapper(void *context, unsigned int reg, unsigned int val)
drivers/mfd/rohm-bd71828.c
403
struct bd72720_regmaps *maps = context;
drivers/mfd/rohm-bd71828.c
413
static int regmap_read_wrapper(void *context, unsigned int reg, unsigned int *val)
drivers/mfd/rohm-bd71828.c
415
struct bd72720_regmaps *maps = context;
drivers/mfd/rsmu_i2c.c
164
static int rsmu_i2c_reg_read(void *context, unsigned int reg, unsigned int *val)
drivers/mfd/rsmu_i2c.c
166
struct rsmu_ddata *rsmu = i2c_get_clientdata((struct i2c_client *)context);
drivers/mfd/rsmu_i2c.c
181
static int rsmu_i2c_reg_write(void *context, unsigned int reg, unsigned int val)
drivers/mfd/rsmu_i2c.c
183
struct rsmu_ddata *rsmu = i2c_get_clientdata((struct i2c_client *)context);
drivers/mfd/rsmu_i2c.c
200
static int rsmu_smbus_i2c_reg_read(void *context, unsigned int reg, unsigned int *val)
drivers/mfd/rsmu_i2c.c
202
struct rsmu_ddata *rsmu = i2c_get_clientdata((struct i2c_client *)context);
drivers/mfd/rsmu_i2c.c
217
static int rsmu_smbus_i2c_reg_write(void *context, unsigned int reg, unsigned int val)
drivers/mfd/rsmu_i2c.c
219
struct rsmu_ddata *rsmu = i2c_get_clientdata((struct i2c_client *)context);
drivers/mfd/rsmu_spi.c
144
static int rsmu_reg_read(void *context, unsigned int reg, unsigned int *val)
drivers/mfd/rsmu_spi.c
146
struct rsmu_ddata *rsmu = spi_get_drvdata((struct spi_device *)context);
drivers/mfd/rsmu_spi.c
161
static int rsmu_reg_write(void *context, unsigned int reg, unsigned int val)
drivers/mfd/rsmu_spi.c
163
struct rsmu_ddata *rsmu = spi_get_drvdata((struct spi_device *)context);
drivers/mfd/sec-acpm.c
389
static int sec_pmic_acpm_bus_write(void *context, const void *data,
drivers/mfd/sec-acpm.c
392
struct sec_pmic_acpm_bus_context *ctx = context;
drivers/mfd/sec-acpm.c
409
static int sec_pmic_acpm_bus_read(void *context, const void *reg_buf, size_t reg_size,
drivers/mfd/sec-acpm.c
412
struct sec_pmic_acpm_bus_context *ctx = context;
drivers/mfd/sec-acpm.c
428
static int sec_pmic_acpm_bus_reg_update_bits(void *context, unsigned int reg, unsigned int mask,
drivers/mfd/sec-acpm.c
431
struct sec_pmic_acpm_bus_context *ctx = context;
drivers/mfd/si476x-prop.c
191
static int si476x_core_regmap_write(void *context, unsigned int reg,
drivers/mfd/si476x-prop.c
194
return si476x_core_cmd_set_property(context, reg, val);
drivers/mfd/si476x-prop.c
197
static int si476x_core_regmap_read(void *context, unsigned int reg,
drivers/mfd/si476x-prop.c
200
struct si476x_core *core = context;
drivers/mfd/smpro-core.c
27
static int smpro_core_write(void *context, const void *data, size_t count)
drivers/mfd/smpro-core.c
29
struct device *dev = context;
drivers/mfd/smpro-core.c
40
static int smpro_core_read(void *context, const void *reg, size_t reg_size,
drivers/mfd/smpro-core.c
43
struct device *dev = context;
drivers/mfd/sprd-sc27xx-spi.c
107
static int sprd_pmic_spi_write(void *context, const void *data, size_t count)
drivers/mfd/sprd-sc27xx-spi.c
109
struct device *dev = context;
drivers/mfd/sprd-sc27xx-spi.c
115
static int sprd_pmic_spi_read(void *context,
drivers/mfd/sprd-sc27xx-spi.c
119
struct device *dev = context;
drivers/mfd/tps6594-i2c.c
108
static int tps6594_i2c_read(void *context, const void *reg_buf, size_t reg_size,
drivers/mfd/tps6594-i2c.c
111
struct i2c_client *client = context;
drivers/mfd/tps6594-i2c.c
147
static int tps6594_i2c_write(void *context, const void *data, size_t count)
drivers/mfd/tps6594-i2c.c
149
struct i2c_client *client = context;
drivers/mfd/tps6594-spi.c
31
static int tps6594_spi_reg_read(void *context, unsigned int reg, unsigned int *val)
drivers/mfd/tps6594-spi.c
33
struct spi_device *spi = context;
drivers/mfd/tps6594-spi.c
57
static int tps6594_spi_reg_write(void *context, unsigned int reg, unsigned int val)
drivers/mfd/tps6594-spi.c
59
struct spi_device *spi = context;
drivers/mfd/upboard-fpga.c
42
static int upboard_fpga_read(void *context, unsigned int reg, unsigned int *val)
drivers/mfd/upboard-fpga.c
44
struct upboard_fpga *fpga = context;
drivers/mfd/upboard-fpga.c
75
static int upboard_fpga_write(void *context, unsigned int reg, unsigned int val)
drivers/mfd/upboard-fpga.c
77
struct upboard_fpga *fpga = context;
drivers/misc/lattice-ecp3-config.c
65
static void firmware_load(const struct firmware *fw, void *context)
drivers/misc/lattice-ecp3-config.c
67
struct spi_device *spi = (struct spi_device *)context;
drivers/misc/mei/platform-vsc.c
293
static void mei_vsc_event_cb(void *context)
drivers/misc/mei/platform-vsc.c
295
struct mei_device *mei_dev = context;
drivers/misc/mei/vsc-tp.c
403
void *context)
drivers/misc/mei/vsc-tp.c
408
tp->event_notify_context = context;
drivers/misc/mei/vsc-tp.h
29
typedef void (*vsc_tp_event_cb_t)(void *context);
drivers/misc/mei/vsc-tp.h
38
void *context);
drivers/misc/ocxl/context.c
45
*context = ctx;
drivers/misc/ocxl/context.c
7
int ocxl_context_alloc(struct ocxl_context **context, struct ocxl_afu *afu,
drivers/misc/ocxl/context.c
83
pidr = mm->context.id;
drivers/misc/ocxl/link.c
247
WARN_ON(pe_data->mm->context.id != pid);
drivers/misc/ocxl/link.c
502
pid = mm->context.id;
drivers/misc/ocxl/link.c
595
trace_ocxl_init_mmu_notifier(pasid, mm->context.id);
drivers/misc/ocxl/link.c
732
pe_data->mm->context.id);
drivers/misc/ocxl/link.c
737
pe_data->mm->context.id,
drivers/misc/vmw_balloon.c
1379
b->vmci_doorbell.context,
drivers/misc/vmw_vmci/vmci_context.c
1001
if (src_cid != handle.context) {
drivers/misc/vmw_vmci/vmci_context.c
1005
VMCI_CONTEXT_IS_VM(handle.context)) {
drivers/misc/vmw_vmci/vmci_context.c
1007
src_cid, handle.context);
drivers/misc/vmw_vmci/vmci_context.c
1015
handle.context, handle.resource);
drivers/misc/vmw_vmci/vmci_context.c
1030
if (handle.context == VMCI_HOST_CONTEXT_ID) {
drivers/misc/vmw_vmci/vmci_context.c
1062
bool vmci_ctx_supports_host_qp(struct vmci_ctx *context)
drivers/misc/vmw_vmci/vmci_context.c
1064
return context && context->user_version >= VMCI_VERSION_HOSTQP;
drivers/misc/vmw_vmci/vmci_context.c
107
context = kzalloc_obj(*context);
drivers/misc/vmw_vmci/vmci_context.c
1071
int vmci_ctx_qp_create(struct vmci_ctx *context, struct vmci_handle handle)
drivers/misc/vmw_vmci/vmci_context.c
1075
if (context == NULL || vmci_handle_is_invalid(handle))
drivers/misc/vmw_vmci/vmci_context.c
1078
if (!vmci_handle_arr_has_entry(context->queue_pair_array, handle))
drivers/misc/vmw_vmci/vmci_context.c
108
if (!context) {
drivers/misc/vmw_vmci/vmci_context.c
1080
&context->queue_pair_array, handle);
drivers/misc/vmw_vmci/vmci_context.c
1091
int vmci_ctx_qp_destroy(struct vmci_ctx *context, struct vmci_handle handle)
drivers/misc/vmw_vmci/vmci_context.c
1095
if (context == NULL || vmci_handle_is_invalid(handle))
drivers/misc/vmw_vmci/vmci_context.c
1098
hndl = vmci_handle_arr_remove_entry(context->queue_pair_array, handle);
drivers/misc/vmw_vmci/vmci_context.c
1108
bool vmci_ctx_qp_exists(struct vmci_ctx *context, struct vmci_handle handle)
drivers/misc/vmw_vmci/vmci_context.c
1110
if (context == NULL || vmci_handle_is_invalid(handle))
drivers/misc/vmw_vmci/vmci_context.c
1113
return vmci_handle_arr_has_entry(context->queue_pair_array, handle);
drivers/misc/vmw_vmci/vmci_context.c
1126
struct vmci_ctx *context;
drivers/misc/vmw_vmci/vmci_context.c
1128
context = vmci_ctx_get(context_id);
drivers/misc/vmw_vmci/vmci_context.c
1129
if (!context)
drivers/misc/vmw_vmci/vmci_context.c
1132
flags = context->priv_flags;
drivers/misc/vmw_vmci/vmci_context.c
1133
vmci_ctx_put(context);
drivers/misc/vmw_vmci/vmci_context.c
114
kref_init(&context->kref);
drivers/misc/vmw_vmci/vmci_context.c
115
spin_lock_init(&context->lock);
drivers/misc/vmw_vmci/vmci_context.c
1152
struct vmci_ctx *context = vmci_ctx_get(context_id);
drivers/misc/vmw_vmci/vmci_context.c
1153
if (context) {
drivers/misc/vmw_vmci/vmci_context.c
1154
if (context->cred)
drivers/misc/vmw_vmci/vmci_context.c
1155
is_owner = uid_eq(context->cred->uid, uid);
drivers/misc/vmw_vmci/vmci_context.c
1156
vmci_ctx_put(context);
drivers/misc/vmw_vmci/vmci_context.c
116
INIT_LIST_HEAD(&context->list_item);
drivers/misc/vmw_vmci/vmci_context.c
117
INIT_LIST_HEAD(&context->datagram_queue);
drivers/misc/vmw_vmci/vmci_context.c
118
INIT_LIST_HEAD(&context->notifier_list);
drivers/misc/vmw_vmci/vmci_context.c
121
init_waitqueue_head(&context->host_context.wait_queue);
drivers/misc/vmw_vmci/vmci_context.c
123
context->queue_pair_array =
drivers/misc/vmw_vmci/vmci_context.c
125
if (!context->queue_pair_array) {
drivers/misc/vmw_vmci/vmci_context.c
130
context->doorbell_array =
drivers/misc/vmw_vmci/vmci_context.c
132
if (!context->doorbell_array) {
drivers/misc/vmw_vmci/vmci_context.c
137
context->pending_doorbell_array =
drivers/misc/vmw_vmci/vmci_context.c
139
if (!context->pending_doorbell_array) {
drivers/misc/vmw_vmci/vmci_context.c
144
context->user_version = user_version;
drivers/misc/vmw_vmci/vmci_context.c
146
context->priv_flags = priv_flags;
drivers/misc/vmw_vmci/vmci_context.c
149
context->cred = get_cred(cred);
drivers/misc/vmw_vmci/vmci_context.c
151
context->notify = &ctx_dummy_notify;
drivers/misc/vmw_vmci/vmci_context.c
152
context->notify_page = NULL;
drivers/misc/vmw_vmci/vmci_context.c
168
context->cid = cid;
drivers/misc/vmw_vmci/vmci_context.c
170
list_add_tail_rcu(&context->list_item, &ctx_list.head);
drivers/misc/vmw_vmci/vmci_context.c
173
return context;
drivers/misc/vmw_vmci/vmci_context.c
176
vmci_handle_arr_destroy(context->doorbell_array);
drivers/misc/vmw_vmci/vmci_context.c
178
vmci_handle_arr_destroy(context->queue_pair_array);
drivers/misc/vmw_vmci/vmci_context.c
180
kfree(context);
drivers/misc/vmw_vmci/vmci_context.c
188
void vmci_ctx_destroy(struct vmci_ctx *context)
drivers/misc/vmw_vmci/vmci_context.c
191
list_del_rcu(&context->list_item);
drivers/misc/vmw_vmci/vmci_context.c
195
vmci_ctx_put(context);
drivers/misc/vmw_vmci/vmci_context.c
264
ev.msg.hdr.dst.context);
drivers/misc/vmw_vmci/vmci_context.c
279
struct vmci_ctx *context;
drivers/misc/vmw_vmci/vmci_context.c
290
context = vmci_ctx_get(cid);
drivers/misc/vmw_vmci/vmci_context.c
291
if (!context) {
drivers/misc/vmw_vmci/vmci_context.c
300
vmci_ctx_put(context);
drivers/misc/vmw_vmci/vmci_context.c
308
spin_lock(&context->lock);
drivers/misc/vmw_vmci/vmci_context.c
319
if (context->datagram_queue_size + vmci_dg_size >=
drivers/misc/vmw_vmci/vmci_context.c
325
context->datagram_queue_size + vmci_dg_size >=
drivers/misc/vmw_vmci/vmci_context.c
327
spin_unlock(&context->lock);
drivers/misc/vmw_vmci/vmci_context.c
328
vmci_ctx_put(context);
drivers/misc/vmw_vmci/vmci_context.c
334
list_add(&dq_entry->list_item, &context->datagram_queue);
drivers/misc/vmw_vmci/vmci_context.c
335
context->pending_datagrams++;
drivers/misc/vmw_vmci/vmci_context.c
336
context->datagram_queue_size += vmci_dg_size;
drivers/misc/vmw_vmci/vmci_context.c
337
ctx_signal_notify(context);
drivers/misc/vmw_vmci/vmci_context.c
338
wake_up(&context->host_context.wait_queue);
drivers/misc/vmw_vmci/vmci_context.c
339
spin_unlock(&context->lock);
drivers/misc/vmw_vmci/vmci_context.c
340
vmci_ctx_put(context);
drivers/misc/vmw_vmci/vmci_context.c
352
struct vmci_ctx *context;
drivers/misc/vmw_vmci/vmci_context.c
357
list_for_each_entry_rcu(context, &ctx_list.head, list_item) {
drivers/misc/vmw_vmci/vmci_context.c
358
if (context->cid == cid) {
drivers/misc/vmw_vmci/vmci_context.c
373
struct vmci_ctx *c, *context = NULL;
drivers/misc/vmw_vmci/vmci_context.c
389
context = c;
drivers/misc/vmw_vmci/vmci_context.c
390
kref_get(&context->kref);
drivers/misc/vmw_vmci/vmci_context.c
396
return context;
drivers/misc/vmw_vmci/vmci_context.c
406
struct vmci_ctx *context = container_of(kref, struct vmci_ctx, kref);
drivers/misc/vmw_vmci/vmci_context.c
415
ctx_fire_notification(context->cid, context->priv_flags);
drivers/misc/vmw_vmci/vmci_context.c
422
temp_handle = vmci_handle_arr_get_entry(context->queue_pair_array, 0);
drivers/misc/vmw_vmci/vmci_context.c
425
context) < VMCI_SUCCESS) {
drivers/misc/vmw_vmci/vmci_context.c
432
vmci_handle_arr_remove_entry(context->queue_pair_array,
drivers/misc/vmw_vmci/vmci_context.c
436
vmci_handle_arr_get_entry(context->queue_pair_array, 0);
drivers/misc/vmw_vmci/vmci_context.c
44
static void ctx_signal_notify(struct vmci_ctx *context)
drivers/misc/vmw_vmci/vmci_context.c
444
&context->datagram_queue, list_item) {
drivers/misc/vmw_vmci/vmci_context.c
452
&context->notifier_list, node) {
drivers/misc/vmw_vmci/vmci_context.c
457
vmci_handle_arr_destroy(context->queue_pair_array);
drivers/misc/vmw_vmci/vmci_context.c
458
vmci_handle_arr_destroy(context->doorbell_array);
drivers/misc/vmw_vmci/vmci_context.c
459
vmci_handle_arr_destroy(context->pending_doorbell_array);
drivers/misc/vmw_vmci/vmci_context.c
46
*context->notify = true;
drivers/misc/vmw_vmci/vmci_context.c
460
vmci_ctx_unset_notify(context);
drivers/misc/vmw_vmci/vmci_context.c
461
if (context->cred)
drivers/misc/vmw_vmci/vmci_context.c
462
put_cred(context->cred);
drivers/misc/vmw_vmci/vmci_context.c
463
kfree(context);
drivers/misc/vmw_vmci/vmci_context.c
475
void vmci_ctx_put(struct vmci_ctx *context)
drivers/misc/vmw_vmci/vmci_context.c
477
kref_put(&context->kref, ctx_free_ctx);
drivers/misc/vmw_vmci/vmci_context.c
488
int vmci_ctx_dequeue_datagram(struct vmci_ctx *context,
drivers/misc/vmw_vmci/vmci_context.c
49
static void ctx_clear_notify(struct vmci_ctx *context)
drivers/misc/vmw_vmci/vmci_context.c
497
spin_lock(&context->lock);
drivers/misc/vmw_vmci/vmci_context.c
498
if (context->pending_datagrams == 0) {
drivers/misc/vmw_vmci/vmci_context.c
499
ctx_clear_notify_call(context);
drivers/misc/vmw_vmci/vmci_context.c
500
spin_unlock(&context->lock);
drivers/misc/vmw_vmci/vmci_context.c
505
list_item = context->datagram_queue.next;
drivers/misc/vmw_vmci/vmci_context.c
51
*context->notify = false;
drivers/misc/vmw_vmci/vmci_context.c
513
spin_unlock(&context->lock);
drivers/misc/vmw_vmci/vmci_context.c
520
context->pending_datagrams--;
drivers/misc/vmw_vmci/vmci_context.c
521
context->datagram_queue_size -= dq_entry->dg_size;
drivers/misc/vmw_vmci/vmci_context.c
522
if (context->pending_datagrams == 0) {
drivers/misc/vmw_vmci/vmci_context.c
523
ctx_clear_notify_call(context);
drivers/misc/vmw_vmci/vmci_context.c
531
list_item = context->datagram_queue.next;
drivers/misc/vmw_vmci/vmci_context.c
542
spin_unlock(&context->lock);
drivers/misc/vmw_vmci/vmci_context.c
556
void vmci_ctx_unset_notify(struct vmci_ctx *context)
drivers/misc/vmw_vmci/vmci_context.c
560
spin_lock(&context->lock);
drivers/misc/vmw_vmci/vmci_context.c
562
notify_page = context->notify_page;
drivers/misc/vmw_vmci/vmci_context.c
563
context->notify = &ctx_dummy_notify;
drivers/misc/vmw_vmci/vmci_context.c
564
context->notify_page = NULL;
drivers/misc/vmw_vmci/vmci_context.c
566
spin_unlock(&context->lock);
drivers/misc/vmw_vmci/vmci_context.c
58
static void ctx_clear_notify_call(struct vmci_ctx *context)
drivers/misc/vmw_vmci/vmci_context.c
580
struct vmci_ctx *context;
drivers/misc/vmw_vmci/vmci_context.c
585
context = vmci_ctx_get(context_id);
drivers/misc/vmw_vmci/vmci_context.c
586
if (!context)
drivers/misc/vmw_vmci/vmci_context.c
596
if (context->priv_flags & VMCI_PRIVILEGE_FLAG_RESTRICTED) {
drivers/misc/vmw_vmci/vmci_context.c
60
if (context->pending_datagrams == 0 &&
drivers/misc/vmw_vmci/vmci_context.c
61
vmci_handle_arr_get_size(context->pending_doorbell_array) == 0)
drivers/misc/vmw_vmci/vmci_context.c
610
spin_lock(&context->lock);
drivers/misc/vmw_vmci/vmci_context.c
612
if (context->n_notifiers < VMCI_MAX_CONTEXTS) {
drivers/misc/vmw_vmci/vmci_context.c
613
list_for_each_entry(n, &context->notifier_list, node) {
drivers/misc/vmw_vmci/vmci_context.c
62
ctx_clear_notify(context);
drivers/misc/vmw_vmci/vmci_context.c
625
&context->notifier_list);
drivers/misc/vmw_vmci/vmci_context.c
626
context->n_notifiers++;
drivers/misc/vmw_vmci/vmci_context.c
634
spin_unlock(&context->lock);
drivers/misc/vmw_vmci/vmci_context.c
637
vmci_ctx_put(context);
drivers/misc/vmw_vmci/vmci_context.c
647
struct vmci_ctx *context;
drivers/misc/vmw_vmci/vmci_context.c
651
context = vmci_ctx_get(context_id);
drivers/misc/vmw_vmci/vmci_context.c
652
if (!context)
drivers/misc/vmw_vmci/vmci_context.c
657
spin_lock(&context->lock);
drivers/misc/vmw_vmci/vmci_context.c
659
&context->notifier_list, node) {
drivers/misc/vmw_vmci/vmci_context.c
662
context->n_notifiers--;
drivers/misc/vmw_vmci/vmci_context.c
667
spin_unlock(&context->lock);
drivers/misc/vmw_vmci/vmci_context.c
672
vmci_ctx_put(context);
drivers/misc/vmw_vmci/vmci_context.c
677
static int vmci_ctx_get_chkpt_notifiers(struct vmci_ctx *context,
drivers/misc/vmw_vmci/vmci_context.c
685
if (context->n_notifiers == 0) {
drivers/misc/vmw_vmci/vmci_context.c
69
void vmci_ctx_check_signal_notify(struct vmci_ctx *context)
drivers/misc/vmw_vmci/vmci_context.c
691
data_size = context->n_notifiers * sizeof(*notifiers);
drivers/misc/vmw_vmci/vmci_context.c
701
list_for_each_entry(entry, &context->notifier_list, node)
drivers/misc/vmw_vmci/vmci_context.c
702
notifiers[i++] = entry->handle.context;
drivers/misc/vmw_vmci/vmci_context.c
709
static int vmci_ctx_get_chkpt_doorbells(struct vmci_ctx *context,
drivers/misc/vmw_vmci/vmci_context.c
71
spin_lock(&context->lock);
drivers/misc/vmw_vmci/vmci_context.c
715
n_doorbells = vmci_handle_arr_get_size(context->doorbell_array);
drivers/misc/vmw_vmci/vmci_context.c
72
if (context->pending_datagrams)
drivers/misc/vmw_vmci/vmci_context.c
729
context->doorbell_array, i);
drivers/misc/vmw_vmci/vmci_context.c
73
ctx_signal_notify(context);
drivers/misc/vmw_vmci/vmci_context.c
74
spin_unlock(&context->lock);
drivers/misc/vmw_vmci/vmci_context.c
749
struct vmci_ctx *context;
drivers/misc/vmw_vmci/vmci_context.c
752
context = vmci_ctx_get(context_id);
drivers/misc/vmw_vmci/vmci_context.c
753
if (!context)
drivers/misc/vmw_vmci/vmci_context.c
756
spin_lock(&context->lock);
drivers/misc/vmw_vmci/vmci_context.c
760
result = vmci_ctx_get_chkpt_notifiers(context, buf_size, pbuf);
drivers/misc/vmw_vmci/vmci_context.c
775
result = vmci_ctx_get_chkpt_doorbells(context, buf_size, pbuf);
drivers/misc/vmw_vmci/vmci_context.c
784
spin_unlock(&context->lock);
drivers/misc/vmw_vmci/vmci_context.c
785
vmci_ctx_put(context);
drivers/misc/vmw_vmci/vmci_context.c
841
struct vmci_ctx *context;
drivers/misc/vmw_vmci/vmci_context.c
844
context = vmci_ctx_get(context_id);
drivers/misc/vmw_vmci/vmci_context.c
845
if (context == NULL)
drivers/misc/vmw_vmci/vmci_context.c
848
spin_lock(&context->lock);
drivers/misc/vmw_vmci/vmci_context.c
85
struct vmci_ctx *context;
drivers/misc/vmw_vmci/vmci_context.c
850
*db_handle_array = context->pending_doorbell_array;
drivers/misc/vmw_vmci/vmci_context.c
851
context->pending_doorbell_array =
drivers/misc/vmw_vmci/vmci_context.c
853
if (!context->pending_doorbell_array) {
drivers/misc/vmw_vmci/vmci_context.c
854
context->pending_doorbell_array = *db_handle_array;
drivers/misc/vmw_vmci/vmci_context.c
860
spin_unlock(&context->lock);
drivers/misc/vmw_vmci/vmci_context.c
861
vmci_ctx_put(context);
drivers/misc/vmw_vmci/vmci_context.c
877
struct vmci_ctx *context = vmci_ctx_get(context_id);
drivers/misc/vmw_vmci/vmci_context.c
879
spin_lock(&context->lock);
drivers/misc/vmw_vmci/vmci_context.c
891
context->pending_doorbell_array);
drivers/misc/vmw_vmci/vmci_context.c
899
context->pending_doorbell_array);
drivers/misc/vmw_vmci/vmci_context.c
901
vmci_handle_arr_destroy(context->pending_doorbell_array);
drivers/misc/vmw_vmci/vmci_context.c
902
context->pending_doorbell_array = db_handle_array;
drivers/misc/vmw_vmci/vmci_context.c
905
ctx_clear_notify_call(context);
drivers/misc/vmw_vmci/vmci_context.c
907
spin_unlock(&context->lock);
drivers/misc/vmw_vmci/vmci_context.c
908
vmci_ctx_put(context);
drivers/misc/vmw_vmci/vmci_context.c
923
struct vmci_ctx *context;
drivers/misc/vmw_vmci/vmci_context.c
929
context = vmci_ctx_get(context_id);
drivers/misc/vmw_vmci/vmci_context.c
930
if (context == NULL)
drivers/misc/vmw_vmci/vmci_context.c
933
spin_lock(&context->lock);
drivers/misc/vmw_vmci/vmci_context.c
934
if (!vmci_handle_arr_has_entry(context->doorbell_array, handle))
drivers/misc/vmw_vmci/vmci_context.c
935
result = vmci_handle_arr_append_entry(&context->doorbell_array,
drivers/misc/vmw_vmci/vmci_context.c
940
spin_unlock(&context->lock);
drivers/misc/vmw_vmci/vmci_context.c
941
vmci_ctx_put(context);
drivers/misc/vmw_vmci/vmci_context.c
952
struct vmci_ctx *context;
drivers/misc/vmw_vmci/vmci_context.c
958
context = vmci_ctx_get(context_id);
drivers/misc/vmw_vmci/vmci_context.c
959
if (context == NULL)
drivers/misc/vmw_vmci/vmci_context.c
962
spin_lock(&context->lock);
drivers/misc/vmw_vmci/vmci_context.c
964
vmci_handle_arr_remove_entry(context->doorbell_array, handle);
drivers/misc/vmw_vmci/vmci_context.c
965
vmci_handle_arr_remove_entry(context->pending_doorbell_array, handle);
drivers/misc/vmw_vmci/vmci_context.c
966
spin_unlock(&context->lock);
drivers/misc/vmw_vmci/vmci_context.c
968
vmci_ctx_put(context);
drivers/misc/vmw_vmci/vmci_context.c
995
dst_context = vmci_ctx_get(handle.context);
drivers/misc/vmw_vmci/vmci_context.c
997
pr_devel("Invalid context (ID=0x%x)\n", handle.context);
drivers/misc/vmw_vmci/vmci_context.h
129
void vmci_ctx_destroy(struct vmci_ctx *context);
drivers/misc/vmw_vmci/vmci_context.h
131
bool vmci_ctx_supports_host_qp(struct vmci_ctx *context);
drivers/misc/vmw_vmci/vmci_context.h
133
int vmci_ctx_dequeue_datagram(struct vmci_ctx *context,
drivers/misc/vmw_vmci/vmci_context.h
136
void vmci_ctx_put(struct vmci_ctx *context);
drivers/misc/vmw_vmci/vmci_context.h
146
int vmci_ctx_qp_create(struct vmci_ctx *context, struct vmci_handle handle);
drivers/misc/vmw_vmci/vmci_context.h
147
int vmci_ctx_qp_destroy(struct vmci_ctx *context, struct vmci_handle handle);
drivers/misc/vmw_vmci/vmci_context.h
148
bool vmci_ctx_qp_exists(struct vmci_ctx *context, struct vmci_handle handle);
drivers/misc/vmw_vmci/vmci_context.h
150
void vmci_ctx_check_signal_notify(struct vmci_ctx *context);
drivers/misc/vmw_vmci/vmci_context.h
151
void vmci_ctx_unset_notify(struct vmci_ctx *context);
drivers/misc/vmw_vmci/vmci_context.h
165
static inline u32 vmci_ctx_get_id(struct vmci_ctx *context)
drivers/misc/vmw_vmci/vmci_context.h
167
if (!context)
drivers/misc/vmw_vmci/vmci_context.h
169
return context->cid;
drivers/misc/vmw_vmci/vmci_datagram.c
166
if (dg->dst.context == VMCI_HYPERVISOR_CONTEXT_ID)
drivers/misc/vmw_vmci/vmci_datagram.c
170
if (dg->src.context != context_id) {
drivers/misc/vmw_vmci/vmci_datagram.c
172
context_id, dg->src.context, dg->src.resource);
drivers/misc/vmw_vmci/vmci_datagram.c
181
dg->src.context, dg->src.resource);
drivers/misc/vmw_vmci/vmci_datagram.c
186
if (dg->dst.context == VMCI_HOST_CONTEXT_ID) {
drivers/misc/vmw_vmci/vmci_datagram.c
191
if (dg->src.context == VMCI_HYPERVISOR_CONTEXT_ID &&
drivers/misc/vmw_vmci/vmci_datagram.c
200
dg->dst.context, dg->dst.resource);
drivers/misc/vmw_vmci/vmci_datagram.c
217
dg->src.context == VMCI_HOST_CONTEXT_ID) {
drivers/misc/vmw_vmci/vmci_datagram.c
254
if (context_id != dg->dst.context) {
drivers/misc/vmw_vmci/vmci_datagram.c
257
(dg->dst.context))) {
drivers/misc/vmw_vmci/vmci_datagram.c
266
context_id, dg->dst.context);
drivers/misc/vmw_vmci/vmci_datagram.c
276
retval = vmci_ctx_enqueue_datagram(dg->dst.context, new_dg);
drivers/misc/vmw_vmci/vmci_datagram.c
333
dg->src.context, dg->dst.context, retval);
drivers/misc/vmw_vmci/vmci_datagram.c
364
dg->dst.context, dg->dst.resource);
drivers/misc/vmw_vmci/vmci_datagram.c
469
handle.context, handle.resource);
drivers/misc/vmw_vmci/vmci_datagram.c
92
handle.context, handle.resource, result);
drivers/misc/vmw_vmci/vmci_doorbell.c
102
} else if (handle.context == VMCI_HYPERVISOR_CONTEXT_ID) {
drivers/misc/vmw_vmci/vmci_doorbell.c
109
*priv_flags = vmci_context_get_priv_flags(handle.context);
drivers/misc/vmw_vmci/vmci_doorbell.c
282
handle.context, handle.resource);
drivers/misc/vmw_vmci/vmci_doorbell.c
290
handle.context, handle.resource);
drivers/misc/vmw_vmci/vmci_doorbell.c
432
if (handle->context == VMCI_HOST_CONTEXT_ID ||
drivers/misc/vmw_vmci/vmci_doorbell.c
434
vmci_get_context_id() == handle->context)) {
drivers/misc/vmw_vmci/vmci_doorbell.c
440
handle->context, handle->resource);
drivers/misc/vmw_vmci/vmci_doorbell.c
462
new_handle.context, new_handle.resource, result);
drivers/misc/vmw_vmci/vmci_doorbell.c
508
handle.context, handle.resource);
drivers/misc/vmw_vmci/vmci_doorbell.c
536
handle.context, handle.resource, result);
drivers/misc/vmw_vmci/vmci_doorbell.c
87
if (priv_flags == NULL || handle.context == VMCI_INVALID_ID)
drivers/misc/vmw_vmci/vmci_doorbell.c
90
if (handle.context == VMCI_HOST_CONTEXT_ID) {
drivers/misc/vmw_vmci/vmci_guest.c
405
if (dg->src.context == VMCI_HYPERVISOR_CONTEXT_ID &&
drivers/misc/vmw_vmci/vmci_host.c
143
vmci_ctx_destroy(vmci_host_dev->context);
drivers/misc/vmw_vmci/vmci_host.c
144
vmci_host_dev->context = NULL;
drivers/misc/vmw_vmci/vmci_host.c
168
struct vmci_ctx *context;
drivers/misc/vmw_vmci/vmci_host.c
176
context = vmci_host_dev->context;
drivers/misc/vmw_vmci/vmci_host.c
180
poll_wait(filp, &context->host_context.wait_queue,
drivers/misc/vmw_vmci/vmci_host.c
183
spin_lock(&context->lock);
drivers/misc/vmw_vmci/vmci_host.c
184
if (context->pending_datagrams > 0 ||
drivers/misc/vmw_vmci/vmci_host.c
186
context->pending_doorbell_array) > 0) {
drivers/misc/vmw_vmci/vmci_host.c
189
spin_unlock(&context->lock);
drivers/misc/vmw_vmci/vmci_host.c
227
static int vmci_host_setup_notify(struct vmci_ctx *context,
drivers/misc/vmw_vmci/vmci_host.c
233
if (context->notify_page) {
drivers/misc/vmw_vmci/vmci_host.c
251
context->notify_page = page;
drivers/misc/vmw_vmci/vmci_host.c
256
context->notify = kmap(context->notify_page) + (uva & (PAGE_SIZE - 1));
drivers/misc/vmw_vmci/vmci_host.c
257
vmci_ctx_check_signal_notify(context);
drivers/misc/vmw_vmci/vmci_host.c
324
vmci_host_dev->context = vmci_ctx_create(init_block.cid,
drivers/misc/vmw_vmci/vmci_host.c
329
if (IS_ERR(vmci_host_dev->context)) {
drivers/misc/vmw_vmci/vmci_host.c
330
retval = PTR_ERR(vmci_host_dev->context);
drivers/misc/vmw_vmci/vmci_host.c
339
init_block.cid = vmci_ctx_get_id(vmci_host_dev->context);
drivers/misc/vmw_vmci/vmci_host.c
341
vmci_ctx_destroy(vmci_host_dev->context);
drivers/misc/vmw_vmci/vmci_host.c
342
vmci_host_dev->context = NULL;
drivers/misc/vmw_vmci/vmci_host.c
403
dg->dst.context, dg->dst.resource,
drivers/misc/vmw_vmci/vmci_host.c
404
dg->src.context, dg->src.resource,
drivers/misc/vmw_vmci/vmci_host.c
408
cid = vmci_ctx_get_id(vmci_host_dev->context);
drivers/misc/vmw_vmci/vmci_host.c
433
recv_info.result = vmci_ctx_dequeue_datagram(vmci_host_dev->context,
drivers/misc/vmw_vmci/vmci_host.c
477
vmci_host_dev->context);
drivers/misc/vmw_vmci/vmci_host.c
502
vmci_host_dev->context);
drivers/misc/vmw_vmci/vmci_host.c
508
vmci_host_dev->context);
drivers/misc/vmw_vmci/vmci_host.c
543
vmci_host_dev->context,
drivers/misc/vmw_vmci/vmci_host.c
551
vmci_host_dev->context, 0);
drivers/misc/vmw_vmci/vmci_host.c
608
vmci_host_dev->context);
drivers/misc/vmw_vmci/vmci_host.c
652
vmci_host_dev->context);
drivers/misc/vmw_vmci/vmci_host.c
678
cid = vmci_ctx_get_id(vmci_host_dev->context);
drivers/misc/vmw_vmci/vmci_host.c
701
cid = vmci_ctx_get_id(vmci_host_dev->context);
drivers/misc/vmw_vmci/vmci_host.c
725
cid = vmci_ctx_get_id(vmci_host_dev->context);
drivers/misc/vmw_vmci/vmci_host.c
762
cid = vmci_ctx_get_id(vmci_host_dev->context);
drivers/misc/vmw_vmci/vmci_host.c
797
vmci_host_setup_notify(vmci_host_dev->context,
drivers/misc/vmw_vmci/vmci_host.c
800
vmci_ctx_unset_notify(vmci_host_dev->context);
drivers/misc/vmw_vmci/vmci_host.c
828
cid = vmci_ctx_get_id(vmci_host_dev->context);
drivers/misc/vmw_vmci/vmci_host.c
85
struct vmci_ctx *context;
drivers/misc/vmw_vmci/vmci_host.c
887
cid = vmci_ctx_get_id(vmci_host_dev->context);
drivers/misc/vmw_vmci/vmci_queue_pair.c
1202
if (queue_pair_entry->qp.handle.context != context_id ||
drivers/misc/vmw_vmci/vmci_queue_pair.c
1289
struct vmci_ctx *context,
drivers/misc/vmw_vmci/vmci_queue_pair.c
1294
const u32 context_id = vmci_ctx_get_id(context);
drivers/misc/vmw_vmci/vmci_queue_pair.c
1308
if (handle.context != context_id && handle.context != peer)
drivers/misc/vmw_vmci/vmci_queue_pair.c
1325
if (vmci_ctx_get_id(context) == VMCI_HOST_CONTEXT_ID && !is_local) {
drivers/misc/vmw_vmci/vmci_queue_pair.c
1351
!!(context->priv_flags & VMCI_PRIVILEGE_FLAG_RESTRICTED);
drivers/misc/vmw_vmci/vmci_queue_pair.c
1419
handle.context, handle.resource, result);
drivers/misc/vmw_vmci/vmci_queue_pair.c
1431
vmci_ctx_qp_create(context, entry->qp.handle);
drivers/misc/vmw_vmci/vmci_queue_pair.c
1518
struct vmci_ctx *context,
drivers/misc/vmw_vmci/vmci_queue_pair.c
1523
const u32 context_id = vmci_ctx_get_id(context);
drivers/misc/vmw_vmci/vmci_queue_pair.c
1549
if ((context->priv_flags & VMCI_PRIVILEGE_FLAG_RESTRICTED) &&
drivers/misc/vmw_vmci/vmci_queue_pair.c
1574
if (!vmci_ctx_supports_host_qp(context))
drivers/misc/vmw_vmci/vmci_queue_pair.c
1668
entry->create_id, entry->qp.handle.context,
drivers/misc/vmw_vmci/vmci_queue_pair.c
1684
vmci_ctx_qp_create(context, entry->qp.handle);
drivers/misc/vmw_vmci/vmci_queue_pair.c
1703
struct vmci_ctx *context,
drivers/misc/vmw_vmci/vmci_queue_pair.c
1709
const u32 context_id = vmci_ctx_get_id(context);
drivers/misc/vmw_vmci/vmci_queue_pair.c
1718
!context || context_id == VMCI_INVALID_ID ||
drivers/misc/vmw_vmci/vmci_queue_pair.c
1719
handle.context == VMCI_INVALID_ID) {
drivers/misc/vmw_vmci/vmci_queue_pair.c
1733
if (!is_local && vmci_ctx_qp_exists(context, handle)) {
drivers/misc/vmw_vmci/vmci_queue_pair.c
1735
context_id, handle.context, handle.resource);
drivers/misc/vmw_vmci/vmci_queue_pair.c
1748
context, wakeup_cb, client_data, ent);
drivers/misc/vmw_vmci/vmci_queue_pair.c
1754
context, wakeup_cb, client_data, ent);
drivers/misc/vmw_vmci/vmci_queue_pair.c
1782
struct vmci_ctx *context;
drivers/misc/vmw_vmci/vmci_queue_pair.c
1793
context = vmci_ctx_get(VMCI_HOST_CONTEXT_ID);
drivers/misc/vmw_vmci/vmci_queue_pair.c
1797
produce_size, consume_size, NULL, context,
drivers/misc/vmw_vmci/vmci_queue_pair.c
1820
vmci_ctx_put(context);
drivers/misc/vmw_vmci/vmci_queue_pair.c
1865
struct vmci_ctx *context;
drivers/misc/vmw_vmci/vmci_queue_pair.c
1867
context = vmci_ctx_get(VMCI_HOST_CONTEXT_ID);
drivers/misc/vmw_vmci/vmci_queue_pair.c
1869
result = vmci_qp_broker_detach(handle, context);
drivers/misc/vmw_vmci/vmci_queue_pair.c
1871
vmci_ctx_put(context);
drivers/misc/vmw_vmci/vmci_queue_pair.c
1937
struct vmci_ctx *context)
drivers/misc/vmw_vmci/vmci_queue_pair.c
1944
page_store, context, NULL, NULL, NULL, NULL);
drivers/misc/vmw_vmci/vmci_queue_pair.c
1966
struct vmci_ctx *context)
drivers/misc/vmw_vmci/vmci_queue_pair.c
1970
const u32 context_id = vmci_ctx_get_id(context);
drivers/misc/vmw_vmci/vmci_queue_pair.c
1972
if (vmci_handle_is_invalid(handle) || !context ||
drivers/misc/vmw_vmci/vmci_queue_pair.c
1986
if (!vmci_ctx_qp_exists(context, handle)) {
drivers/misc/vmw_vmci/vmci_queue_pair.c
1988
context_id, handle.context, handle.resource);
drivers/misc/vmw_vmci/vmci_queue_pair.c
2042
entry->create_id, entry->qp.handle.context,
drivers/misc/vmw_vmci/vmci_queue_pair.c
2082
int vmci_qp_broker_detach(struct vmci_handle handle, struct vmci_ctx *context)
drivers/misc/vmw_vmci/vmci_queue_pair.c
2085
const u32 context_id = vmci_ctx_get_id(context);
drivers/misc/vmw_vmci/vmci_queue_pair.c
2090
if (vmci_handle_is_invalid(handle) || !context ||
drivers/misc/vmw_vmci/vmci_queue_pair.c
2097
if (!vmci_ctx_qp_exists(context, handle)) {
drivers/misc/vmw_vmci/vmci_queue_pair.c
2099
context_id, handle.context, handle.resource);
drivers/misc/vmw_vmci/vmci_queue_pair.c
2107
context_id, handle.context, handle.resource);
drivers/misc/vmw_vmci/vmci_queue_pair.c
2149
handle.context, handle.resource,
drivers/misc/vmw_vmci/vmci_queue_pair.c
2186
vmci_ctx_qp_destroy(context, handle);
drivers/misc/vmw_vmci/vmci_queue_pair.c
2197
vmci_ctx_qp_destroy(context, handle);
drivers/misc/vmw_vmci/vmci_queue_pair.c
2213
struct vmci_ctx *context,
drivers/misc/vmw_vmci/vmci_queue_pair.c
2217
const u32 context_id = vmci_ctx_get_id(context);
drivers/misc/vmw_vmci/vmci_queue_pair.c
2220
if (vmci_handle_is_invalid(handle) || !context ||
drivers/misc/vmw_vmci/vmci_queue_pair.c
2226
if (!vmci_ctx_qp_exists(context, handle)) {
drivers/misc/vmw_vmci/vmci_queue_pair.c
2228
context_id, handle.context, handle.resource);
drivers/misc/vmw_vmci/vmci_queue_pair.c
2236
context_id, handle.context, handle.resource);
drivers/misc/vmw_vmci/vmci_queue_pair.c
2323
struct vmci_ctx *context,
drivers/misc/vmw_vmci/vmci_queue_pair.c
2327
const u32 context_id = vmci_ctx_get_id(context);
drivers/misc/vmw_vmci/vmci_queue_pair.c
2330
if (vmci_handle_is_invalid(handle) || !context ||
drivers/misc/vmw_vmci/vmci_queue_pair.c
2336
if (!vmci_ctx_qp_exists(context, handle)) {
drivers/misc/vmw_vmci/vmci_queue_pair.c
2338
context_id, handle.context, handle.resource);
drivers/misc/vmw_vmci/vmci_queue_pair.c
2346
context_id, handle.context, handle.resource);
drivers/misc/vmw_vmci/vmci_queue_pair.c
2362
handle.context, handle.resource, result);
drivers/misc/vmw_vmci/vmci_queue_pair.c
918
handle.context, handle.resource, result);
drivers/misc/vmw_vmci/vmci_queue_pair.h
146
struct vmci_ctx *context);
drivers/misc/vmw_vmci/vmci_queue_pair.h
149
struct vmci_ctx *context);
drivers/misc/vmw_vmci/vmci_queue_pair.h
150
int vmci_qp_broker_detach(struct vmci_handle handle, struct vmci_ctx *context);
drivers/misc/vmw_vmci/vmci_queue_pair.h
161
struct vmci_ctx *context, u64 guest_mem);
drivers/misc/vmw_vmci/vmci_queue_pair.h
163
struct vmci_ctx *context, u32 gid);
drivers/misc/vmw_vmci/vmci_resource.c
110
handle.resource = vmci_resource_find_id(handle.context,
drivers/misc/vmw_vmci/vmci_resource.c
47
u32 cid = r->handle.context;
drivers/misc/vmw_vmci/vmci_resource.c
52
(cid == handle.context || cid == VMCI_INVALID_ID ||
drivers/misc/vmw_vmci/vmci_resource.c
53
handle.context == VMCI_INVALID_ID)) {
drivers/misc/vmw_vmci/vmci_route.c
113
if (VMCI_INVALID_ID == src->context)
drivers/misc/vmw_vmci/vmci_route.c
114
src->context = vmci_get_context_id();
drivers/misc/vmw_vmci/vmci_route.c
130
if (VMCI_INVALID_ID == src->context) {
drivers/misc/vmw_vmci/vmci_route.c
139
src->context = VMCI_HOST_CONTEXT_ID;
drivers/misc/vmw_vmci/vmci_route.c
153
if (vmci_ctx_exists(dst->context)) {
drivers/misc/vmw_vmci/vmci_route.c
154
if (VMCI_INVALID_ID == src->context) {
drivers/misc/vmw_vmci/vmci_route.c
165
src->context = VMCI_HOST_CONTEXT_ID;
drivers/misc/vmw_vmci/vmci_route.c
166
} else if (VMCI_CONTEXT_IS_VM(src->context) &&
drivers/misc/vmw_vmci/vmci_route.c
167
src->context != dst->context) {
drivers/misc/vmw_vmci/vmci_route.c
209
if (VMCI_INVALID_ID == src->context)
drivers/misc/vmw_vmci/vmci_route.c
210
src->context = vmci_get_context_id();
drivers/misc/vmw_vmci/vmci_route.c
42
if (VMCI_INVALID_ID == dst->context)
drivers/misc/vmw_vmci/vmci_route.c
46
if (VMCI_HYPERVISOR_CONTEXT_ID == dst->context) {
drivers/misc/vmw_vmci/vmci_route.c
64
if (VMCI_HOST_CONTEXT_ID == src->context)
drivers/misc/vmw_vmci/vmci_route.c
75
if (VMCI_INVALID_ID == src->context &&
drivers/misc/vmw_vmci/vmci_route.c
77
src->context = vmci_get_context_id();
drivers/misc/vmw_vmci/vmci_route.c
85
if (VMCI_HOST_CONTEXT_ID == dst->context) {
drivers/misc/vmw_vmci/vmci_route.c
94
if (src->context == VMCI_HYPERVISOR_CONTEXT_ID) {
drivers/mmc/host/ushc.c
165
struct ushc_data *ushc = urb->context;
drivers/mmc/host/ushc.c
199
struct ushc_data *ushc = urb->context;
drivers/mmc/host/ushc.c
209
struct ushc_data *ushc = urb->context;
drivers/mmc/host/ushc.c
217
struct ushc_data *ushc = urb->context;
drivers/mmc/host/vub300.c
1390
complete((struct completion *)urb->context);
drivers/mmc/host/vub300.c
1407
vub300->urb->context = &done;
drivers/mmc/host/vub300.c
445
struct vub300_mmc_host *vub300 = (struct vub300_mmc_host *)urb->context;
drivers/mmc/host/vub300.c
453
struct vub300_mmc_host *vub300 = (struct vub300_mmc_host *)urb->context;
drivers/mmc/host/vub300.c
791
struct vub300_mmc_host *vub300 = (struct vub300_mmc_host *)urb->context;
drivers/mmc/host/vub300.c
820
struct vub300_mmc_host *vub300 = (struct vub300_mmc_host *)urb->context;
drivers/most/core.c
102
struct most_channel *c = mbo->context;
drivers/most/core.c
1029
struct most_channel *c = mbo->context;
drivers/most/core.c
1052
struct most_channel *c = mbo->context;
drivers/most/core.c
739
struct most_channel *c = mbo->context;
drivers/most/core.c
763
struct most_channel *c = mbo->context;
drivers/most/core.c
840
c = mbo->context;
drivers/most/core.c
887
mbo->context = c;
drivers/most/core.c
927
if (WARN_ONCE(!mbo || !mbo->context,
drivers/most/core.c
946
c = mbo->context;
drivers/most/most_usb.c
342
struct mbo *mbo = urb->context;
drivers/most/most_usb.c
394
struct mbo *mbo = urb->context;
drivers/net/can/m_can/tcan4x5x-regmap.c
17
static int tcan4x5x_regmap_gather_write(void *context,
drivers/net/can/m_can/tcan4x5x-regmap.c
21
struct spi_device *spi = context;
drivers/net/can/m_can/tcan4x5x-regmap.c
39
static int tcan4x5x_regmap_write(void *context, const void *data, size_t count)
drivers/net/can/m_can/tcan4x5x-regmap.c
41
return tcan4x5x_regmap_gather_write(context, data, sizeof(__be32),
drivers/net/can/m_can/tcan4x5x-regmap.c
46
static int tcan4x5x_regmap_read(void *context,
drivers/net/can/m_can/tcan4x5x-regmap.c
50
struct spi_device *spi = context;
drivers/net/can/spi/mcp251xfd/mcp251xfd-regmap.c
124
mcp251xfd_regmap_nocrc_update_bits(void *context, unsigned int reg,
drivers/net/can/spi/mcp251xfd/mcp251xfd-regmap.c
127
struct spi_device *spi = context;
drivers/net/can/spi/mcp251xfd/mcp251xfd-regmap.c
16
_mcp251xfd_regmap_nocrc_gather_write(void *context,
drivers/net/can/spi/mcp251xfd/mcp251xfd-regmap.c
185
return mcp251xfd_regmap_nocrc_gather_write(context, &buf_tx->cmd, 2, &tmp_le32, len);
drivers/net/can/spi/mcp251xfd/mcp251xfd-regmap.c
189
mcp251xfd_regmap_nocrc_read(void *context,
drivers/net/can/spi/mcp251xfd/mcp251xfd-regmap.c
193
struct spi_device *spi = context;
drivers/net/can/spi/mcp251xfd/mcp251xfd-regmap.c
20
struct spi_device *spi = context;
drivers/net/can/spi/mcp251xfd/mcp251xfd-regmap.c
239
_mcp251xfd_regmap_crc_gather_write(void *context,
drivers/net/can/spi/mcp251xfd/mcp251xfd-regmap.c
243
struct spi_device *spi = context;
drivers/net/can/spi/mcp251xfd/mcp251xfd-regmap.c
273
mcp251xfd_regmap_crc_gather_write(void *context,
drivers/net/can/spi/mcp251xfd/mcp251xfd-regmap.c
295
ret = _mcp251xfd_regmap_crc_gather_write(context, ®, reg_len, val, len);
drivers/net/can/spi/mcp251xfd/mcp251xfd-regmap.c
301
return _mcp251xfd_regmap_crc_gather_write(context, ®, reg_len,
drivers/net/can/spi/mcp251xfd/mcp251xfd-regmap.c
306
return _mcp251xfd_regmap_crc_gather_write(context, reg_p, reg_len,
drivers/net/can/spi/mcp251xfd/mcp251xfd-regmap.c
311
mcp251xfd_regmap_crc_write(void *context,
drivers/net/can/spi/mcp251xfd/mcp251xfd-regmap.c
317
return mcp251xfd_regmap_crc_gather_write(context,
drivers/net/can/spi/mcp251xfd/mcp251xfd-regmap.c
360
mcp251xfd_regmap_crc_read(void *context,
drivers/net/can/spi/mcp251xfd/mcp251xfd-regmap.c
364
struct spi_device *spi = context;
drivers/net/can/spi/mcp251xfd/mcp251xfd-regmap.c
43
mcp251xfd_regmap_nocrc_gather_write(void *context,
drivers/net/can/spi/mcp251xfd/mcp251xfd-regmap.c
65
ret = _mcp251xfd_regmap_nocrc_gather_write(context, reg_p, reg_len, val, len);
drivers/net/can/spi/mcp251xfd/mcp251xfd-regmap.c
72
return _mcp251xfd_regmap_nocrc_gather_write(context, ®, reg_len,
drivers/net/can/spi/mcp251xfd/mcp251xfd-regmap.c
77
return _mcp251xfd_regmap_nocrc_gather_write(context, reg_p, reg_len,
drivers/net/can/spi/mcp251xfd/mcp251xfd-regmap.c
82
mcp251xfd_regmap_nocrc_write(void *context, const void *data, size_t count)
drivers/net/can/spi/mcp251xfd/mcp251xfd-regmap.c
86
return mcp251xfd_regmap_nocrc_gather_write(context, data, data_offset,
drivers/net/can/usb/ems_usb.c
264
struct ems_usb *dev = urb->context;
drivers/net/can/usb/ems_usb.c
417
struct ems_usb *dev = urb->context;
drivers/net/can/usb/ems_usb.c
514
struct ems_tx_urb_context *context = urb->context;
drivers/net/can/usb/ems_usb.c
518
BUG_ON(!context);
drivers/net/can/usb/ems_usb.c
520
dev = context->dev;
drivers/net/can/usb/ems_usb.c
539
netdev->stats.tx_bytes += can_get_echo_skb(netdev, context->echo_index,
drivers/net/can/usb/ems_usb.c
543
context->echo_index = MAX_TX_URBS;
drivers/net/can/usb/ems_usb.c
759
struct ems_tx_urb_context *context = NULL;
drivers/net/can/usb/ems_usb.c
806
context = &dev->tx_contexts[i];
drivers/net/can/usb/ems_usb.c
815
if (!context) {
drivers/net/can/usb/ems_usb.c
824
context->dev = dev;
drivers/net/can/usb/ems_usb.c
825
context->echo_index = i;
drivers/net/can/usb/ems_usb.c
828
size, ems_usb_write_bulk_callback, context);
drivers/net/can/usb/ems_usb.c
832
can_put_echo_skb(skb, netdev, context->echo_index, 0);
drivers/net/can/usb/ems_usb.c
838
can_free_echo_skb(netdev, context->echo_index, NULL);
drivers/net/can/usb/esd_usb.c
462
struct esd_tx_urb_context *context;
drivers/net/can/usb/esd_usb.c
467
context = &priv->tx_contexts[msg->txdone.hnd & (ESD_USB_MAX_TX_URBS - 1)];
drivers/net/can/usb/esd_usb.c
471
stats->tx_bytes += can_get_echo_skb(netdev, context->echo_index,
drivers/net/can/usb/esd_usb.c
475
can_free_echo_skb(netdev, context->echo_index, NULL);
drivers/net/can/usb/esd_usb.c
479
context->echo_index = ESD_USB_MAX_TX_URBS;
drivers/net/can/usb/esd_usb.c
487
struct esd_usb *dev = urb->context;
drivers/net/can/usb/esd_usb.c
569
struct esd_tx_urb_context *context = urb->context;
drivers/net/can/usb/esd_usb.c
574
WARN_ON(!context);
drivers/net/can/usb/esd_usb.c
576
priv = context->priv;
drivers/net/can/usb/esd_usb.c
822
struct esd_tx_urb_context *context = NULL;
drivers/net/can/usb/esd_usb.c
884
context = &priv->tx_contexts[i];
drivers/net/can/usb/esd_usb.c
890
if (!context) {
drivers/net/can/usb/esd_usb.c
896
context->priv = priv;
drivers/net/can/usb/esd_usb.c
897
context->echo_index = i;
drivers/net/can/usb/esd_usb.c
904
esd_usb_write_bulk_callback, context);
drivers/net/can/usb/esd_usb.c
910
can_put_echo_skb(skb, netdev, context->echo_index, 0);
drivers/net/can/usb/esd_usb.c
920
can_free_echo_skb(netdev, context->echo_index, NULL);
drivers/net/can/usb/etas_es58x/es58x_core.c
1412
struct es58x_device *es58x_dev = urb->context;
drivers/net/can/usb/etas_es58x/es58x_core.c
1498
struct net_device *netdev = urb->context;
drivers/net/can/usb/etas_es58x/es58x_core.c
1637
urb->context = netdev;
drivers/net/can/usb/f81604.c
415
struct net_device *netdev = urb->context;
drivers/net/can/usb/f81604.c
616
struct net_device *netdev = urb->context;
drivers/net/can/usb/f81604.c
889
struct net_device *netdev = urb->context;
drivers/net/can/usb/gs_usb.c
611
struct gs_usb *parent = urb->context;
drivers/net/can/usb/gs_usb.c
817
struct gs_tx_context *txc = urb->context;
drivers/net/can/usb/kvaser_usb/kvaser_usb_core.c
274
struct net_device *netdev = urb->context;
drivers/net/can/usb/kvaser_usb/kvaser_usb_core.c
336
struct kvaser_usb *dev = urb->context;
drivers/net/can/usb/kvaser_usb/kvaser_usb_core.c
643
struct kvaser_usb_tx_urb_context *context = urb->context;
drivers/net/can/usb/kvaser_usb/kvaser_usb_core.c
647
if (WARN_ON(!context))
drivers/net/can/usb/kvaser_usb/kvaser_usb_core.c
650
priv = context->priv;
drivers/net/can/usb/kvaser_usb/kvaser_usb_core.c
669
struct kvaser_usb_tx_urb_context *context = NULL;
drivers/net/can/usb/kvaser_usb/kvaser_usb_core.c
690
context = &priv->tx_contexts[i];
drivers/net/can/usb/kvaser_usb/kvaser_usb_core.c
692
context->echo_index = i;
drivers/net/can/usb/kvaser_usb/kvaser_usb_core.c
703
if (!context) {
drivers/net/can/usb/kvaser_usb/kvaser_usb_core.c
710
buf = ops->dev_frame_to_cmd(priv, skb, &cmd_len, context->echo_index);
drivers/net/can/usb/kvaser_usb/kvaser_usb_core.c
716
context->echo_index = dev->max_tx_urbs;
drivers/net/can/usb/kvaser_usb/kvaser_usb_core.c
724
context->priv = priv;
drivers/net/can/usb/kvaser_usb/kvaser_usb_core.c
726
can_put_echo_skb(skb, netdev, context->echo_index, 0);
drivers/net/can/usb/kvaser_usb/kvaser_usb_core.c
732
context);
drivers/net/can/usb/kvaser_usb/kvaser_usb_core.c
739
can_free_echo_skb(netdev, context->echo_index, NULL);
drivers/net/can/usb/kvaser_usb/kvaser_usb_core.c
740
context->echo_index = dev->max_tx_urbs;
drivers/net/can/usb/kvaser_usb/kvaser_usb_hydra.c
1182
struct kvaser_usb_tx_urb_context *context;
drivers/net/can/usb/kvaser_usb/kvaser_usb_hydra.c
1212
context = &priv->tx_contexts[transid % dev->max_tx_urbs];
drivers/net/can/usb/kvaser_usb/kvaser_usb_hydra.c
1216
skb = priv->can.echo_skb[context->echo_index];
drivers/net/can/usb/kvaser_usb/kvaser_usb_hydra.c
1219
len = can_get_echo_skb(priv->netdev, context->echo_index, NULL);
drivers/net/can/usb/kvaser_usb/kvaser_usb_hydra.c
1220
context->echo_index = dev->max_tx_urbs;
drivers/net/can/usb/kvaser_usb/kvaser_usb_leaf.c
1007
struct kvaser_usb_tx_urb_context *context;
drivers/net/can/usb/kvaser_usb/kvaser_usb_leaf.c
1030
context = &priv->tx_contexts[tid % dev->max_tx_urbs];
drivers/net/can/usb/kvaser_usb/kvaser_usb_leaf.c
1063
skb = priv->can.echo_skb[context->echo_index];
drivers/net/can/usb/kvaser_usb/kvaser_usb_leaf.c
1068
context->echo_index, NULL);
drivers/net/can/usb/kvaser_usb/kvaser_usb_leaf.c
1069
context->echo_index = dev->max_tx_urbs;
drivers/net/can/usb/mcba_usb.c
221
struct mcba_usb_ctx *ctx = urb->context;
drivers/net/can/usb/mcba_usb.c
564
struct mcba_priv *priv = urb->context;
drivers/net/can/usb/peak_usb/pcan_usb.c
334
struct pcan_usb *pdev = urb->context;
drivers/net/can/usb/peak_usb/pcan_usb_core.c
235
struct peak_usb_device *dev = urb->context;
drivers/net/can/usb/peak_usb/pcan_usb_core.c
300
struct peak_tx_urb_context *context = urb->context;
drivers/net/can/usb/peak_usb/pcan_usb_core.c
305
BUG_ON(!context);
drivers/net/can/usb/peak_usb/pcan_usb_core.c
307
dev = context->dev;
drivers/net/can/usb/peak_usb/pcan_usb_core.c
336
tx_bytes = can_get_echo_skb(netdev, context->echo_index, NULL);
drivers/net/can/usb/peak_usb/pcan_usb_core.c
337
context->echo_index = PCAN_USB_MAX_TX_URBS;
drivers/net/can/usb/peak_usb/pcan_usb_core.c
356
struct peak_tx_urb_context *context = NULL;
drivers/net/can/usb/peak_usb/pcan_usb_core.c
368
context = dev->tx_contexts + i;
drivers/net/can/usb/peak_usb/pcan_usb_core.c
372
if (!context) {
drivers/net/can/usb/peak_usb/pcan_usb_core.c
377
urb = context->urb;
drivers/net/can/usb/peak_usb/pcan_usb_core.c
389
context->echo_index = i;
drivers/net/can/usb/peak_usb/pcan_usb_core.c
393
can_put_echo_skb(skb, netdev, context->echo_index, 0);
drivers/net/can/usb/peak_usb/pcan_usb_core.c
399
can_free_echo_skb(netdev, context->echo_index, NULL);
drivers/net/can/usb/peak_usb/pcan_usb_core.c
404
context->echo_index = PCAN_USB_MAX_TX_URBS;
drivers/net/can/usb/peak_usb/pcan_usb_core.c
494
struct peak_tx_urb_context *context;
drivers/net/can/usb/peak_usb/pcan_usb_core.c
512
context = dev->tx_contexts + i;
drivers/net/can/usb/peak_usb/pcan_usb_core.c
513
context->dev = dev;
drivers/net/can/usb/peak_usb/pcan_usb_core.c
514
context->urb = urb;
drivers/net/can/usb/peak_usb/pcan_usb_core.c
519
peak_usb_write_bulk_callback, context);
drivers/net/can/usb/peak_usb/pcan_usb_pro.c
483
peak_usb_restart_complete(urb->context);
drivers/net/can/usb/ucan.c
1051
struct ucan_urb_context *context,
drivers/net/can/usb/ucan.c
1098
m, mlen, ucan_write_bulk_callback, context);
drivers/net/can/usb/ucan.c
1119
struct ucan_urb_context *context;
drivers/net/can/usb/ucan.c
1128
context = ucan_alloc_context(up);
drivers/net/can/usb/ucan.c
1129
echo_index = context - up->context_array;
drivers/net/can/usb/ucan.c
1131
if (WARN_ON_ONCE(!context))
drivers/net/can/usb/ucan.c
1135
urb = ucan_prepare_tx_urb(up, context, cf, echo_index);
drivers/net/can/usb/ucan.c
1153
if (!ucan_release_context(up, context))
drivers/net/can/usb/ucan.c
1183
if (!ucan_release_context(up, context))
drivers/net/can/usb/ucan.c
644
struct ucan_urb_context *context;
drivers/net/can/usb/ucan.c
664
context = &up->context_array[echo_index];
drivers/net/can/usb/ucan.c
669
if (!ucan_release_context(up, context))
drivers/net/can/usb/ucan.c
692
struct ucan_priv *up = urb->context;
drivers/net/can/usb/ucan.c
821
struct ucan_urb_context *context = urb->context;
drivers/net/can/usb/ucan.c
824
if (WARN_ON_ONCE(!context))
drivers/net/can/usb/ucan.c
833
up = context->up;
drivers/net/can/usb/ucan.c
849
can_free_echo_skb(up->netdev, context - up->context_array, NULL);
drivers/net/can/usb/ucan.c
855
if (!ucan_release_context(up, context))
drivers/net/can/usb/usb_8dev.c
498
struct usb_8dev_priv *priv = urb->context;
drivers/net/can/usb/usb_8dev.c
566
struct usb_8dev_tx_urb_context *context = urb->context;
drivers/net/can/usb/usb_8dev.c
570
BUG_ON(!context);
drivers/net/can/usb/usb_8dev.c
572
priv = context->priv;
drivers/net/can/usb/usb_8dev.c
589
netdev->stats.tx_bytes += can_get_echo_skb(netdev, context->echo_index, NULL);
drivers/net/can/usb/usb_8dev.c
592
context->echo_index = MAX_TX_URBS;
drivers/net/can/usb/usb_8dev.c
606
struct usb_8dev_tx_urb_context *context = NULL;
drivers/net/can/usb/usb_8dev.c
645
context = &priv->tx_contexts[i];
drivers/net/can/usb/usb_8dev.c
653
if (!context)
drivers/net/can/usb/usb_8dev.c
656
context->priv = priv;
drivers/net/can/usb/usb_8dev.c
657
context->echo_index = i;
drivers/net/can/usb/usb_8dev.c
661
buf, size, usb_8dev_write_bulk_callback, context);
drivers/net/can/usb/usb_8dev.c
665
can_put_echo_skb(skb, netdev, context->echo_index, 0);
drivers/net/can/usb/usb_8dev.c
671
can_free_echo_skb(netdev, context->echo_index, NULL);
drivers/net/dsa/lantiq/mxl-gsw1xx.c
67
static int gsw1xx_regmap_read(void *context, unsigned int reg,
drivers/net/dsa/lantiq/mxl-gsw1xx.c
70
struct gsw1xx_priv *priv = context;
drivers/net/dsa/lantiq/mxl-gsw1xx.c
92
static int gsw1xx_regmap_write(void *context, unsigned int reg,
drivers/net/dsa/lantiq/mxl-gsw1xx.c
95
struct gsw1xx_priv *priv = context;
drivers/net/dsa/microchip/ksz_spi.c
61
static int ksz8463_spi_read(void *context,
drivers/net/dsa/microchip/ksz_spi.c
65
struct device *dev = context;
drivers/net/dsa/microchip/ksz_spi.c
97
static int ksz8463_spi_write(void *context, const void *data, size_t count)
drivers/net/dsa/microchip/ksz_spi.c
99
struct device *dev = context;
drivers/net/dsa/mt7530-mdio.c
19
mt7530_regmap_write(void *context, unsigned int reg, unsigned int val)
drivers/net/dsa/mt7530-mdio.c
21
struct mt7530_priv *priv = context;
drivers/net/dsa/mt7530-mdio.c
44
mt7530_regmap_read(void *context, unsigned int reg, unsigned int *val)
drivers/net/dsa/mt7530-mdio.c
46
struct mt7530_priv *priv = context;
drivers/net/dsa/qca/ar9331.c
943
static int ar9331_sw_bus_write(void *context, const void *data, size_t count)
drivers/net/dsa/qca/ar9331.c
948
return ar9331_mdio_write(context, reg, val);
drivers/net/dsa/xrs700x/xrs700x_i2c.c
18
static int xrs700x_i2c_reg_read(void *context, unsigned int reg,
drivers/net/dsa/xrs700x/xrs700x_i2c.c
21
struct device *dev = context;
drivers/net/dsa/xrs700x/xrs700x_i2c.c
44
static int xrs700x_i2c_reg_write(void *context, unsigned int reg,
drivers/net/dsa/xrs700x/xrs700x_i2c.c
47
struct device *dev = context;
drivers/net/dsa/xrs700x/xrs700x_mdio.c
24
static int xrs700x_mdio_reg_read(void *context, unsigned int reg,
drivers/net/dsa/xrs700x/xrs700x_mdio.c
27
struct mdio_device *mdiodev = context;
drivers/net/dsa/xrs700x/xrs700x_mdio.c
59
static int xrs700x_mdio_reg_write(void *context, unsigned int reg,
drivers/net/dsa/xrs700x/xrs700x_mdio.c
62
struct mdio_device *mdiodev = context;
drivers/net/dsa/yt921x.c
317
static int yt921x_reg_mdio_read(void *context, u32 reg, u32 *valp)
drivers/net/dsa/yt921x.c
319
struct yt921x_reg_mdio *mdio = context;
drivers/net/dsa/yt921x.c
358
static int yt921x_reg_mdio_write(void *context, u32 reg, u32 val)
drivers/net/dsa/yt921x.c
360
struct yt921x_reg_mdio *mdio = context;
drivers/net/dsa/yt921x.h
597
int (*read)(void *context, u32 reg, u32 *valp);
drivers/net/dsa/yt921x.h
598
int (*write)(void *context, u32 reg, u32 val);
drivers/net/ethernet/amd/xgbe/xgbe-drv.c
2228
unsigned int last, error, context_next, context;
drivers/net/ethernet/amd/xgbe/xgbe-drv.c
2278
context = XGMAC_GET_BITS(packet->attributes,
drivers/net/ethernet/amd/xgbe/xgbe-drv.c
2291
if (!context) {
drivers/net/ethernet/aquantia/atlantic/hw_atl/hw_atl_llh.c
1135
void hw_atl_rpo_outer_vlan_tag_mode_set(void *context,
drivers/net/ethernet/aquantia/atlantic/hw_atl/hw_atl_llh.c
1138
aq_hw_write_reg_bit(context, HW_ATL_RPO_OUTER_VL_INS_MODE_ADR,
drivers/net/ethernet/aquantia/atlantic/hw_atl/hw_atl_llh.c
1144
u32 hw_atl_rpo_outer_vlan_tag_mode_get(void *context)
drivers/net/ethernet/aquantia/atlantic/hw_atl/hw_atl_llh.c
1146
return aq_hw_read_reg_bit(context, HW_ATL_RPO_OUTER_VL_INS_MODE_ADR,
drivers/net/ethernet/aquantia/atlantic/hw_atl/hw_atl_llh.h
548
void hw_atl_rpo_outer_vlan_tag_mode_set(void *context,
drivers/net/ethernet/aquantia/atlantic/hw_atl/hw_atl_llh.h
551
u32 hw_atl_rpo_outer_vlan_tag_mode_get(void *context);
drivers/net/ethernet/broadcom/bnx2x/bnx2x.h
1649
struct hw_context context[ILT_MAX_L2_LINES];
drivers/net/ethernet/broadcom/bnx2x/bnx2x_main.c
14455
&bp->context[cxt_index].
drivers/net/ethernet/broadcom/bnx2x/bnx2x_main.c
7998
ilt->lines[cdu_ilt_start + i].page = bp->context[i].vcxt;
drivers/net/ethernet/broadcom/bnx2x/bnx2x_main.c
8000
bp->context[i].cxt_mapping;
drivers/net/ethernet/broadcom/bnx2x/bnx2x_main.c
8001
ilt->lines[cdu_ilt_start + i].size = bp->context[i].size;
drivers/net/ethernet/broadcom/bnx2x/bnx2x_main.c
8301
BNX2X_PCI_FREE(bp->context[i].vcxt, bp->context[i].cxt_mapping,
drivers/net/ethernet/broadcom/bnx2x/bnx2x_main.c
8302
bp->context[i].size);
drivers/net/ethernet/broadcom/bnx2x/bnx2x_main.c
8391
bp->context[i].size = min(CDU_ILT_PAGE_SZ,
drivers/net/ethernet/broadcom/bnx2x/bnx2x_main.c
8393
bp->context[i].vcxt = BNX2X_PCI_ALLOC(&bp->context[i].cxt_mapping,
drivers/net/ethernet/broadcom/bnx2x/bnx2x_main.c
8394
bp->context[i].size);
drivers/net/ethernet/broadcom/bnx2x/bnx2x_main.c
8395
if (!bp->context[i].vcxt)
drivers/net/ethernet/broadcom/bnx2x/bnx2x_main.c
8397
allocated += bp->context[i].size;
drivers/net/ethernet/broadcom/bnx2x/bnx2x_main.c
8787
&bp->context[cxt_index].vcxt[cxt_offset].eth;
drivers/net/ethernet/broadcom/bnx2x/bnx2x_sriov.c
1334
struct hw_dma *cxt = &bp->vfdb->context[i];
drivers/net/ethernet/broadcom/bnx2x/bnx2x_sriov.h
329
struct hw_dma context[BNX2X_VF_CIDS/ILT_PAGE_CIDS];
drivers/net/ethernet/broadcom/bnx2x/bnx2x_sriov.h
330
#define BP_VF_CXT_PAGE(bp, i) (&(bp)->vfdb->context[i])
drivers/net/ethernet/broadcom/bnxt/bnxt.c
10967
unsigned long context;
drivers/net/ethernet/broadcom/bnxt/bnxt.c
10970
xa_for_each(&bp->dev->ethtool->rss_ctx, context, ctx) {
drivers/net/ethernet/broadcom/bnxt/bnxt.c
10986
unsigned long context;
drivers/net/ethernet/broadcom/bnxt/bnxt.c
10988
xa_for_each(&bp->dev->ethtool->rss_ctx, context, ctx) {
drivers/net/ethernet/broadcom/bnxt/bnxt.c
11006
unsigned long context;
drivers/net/ethernet/broadcom/bnxt/bnxt.c
11008
xa_for_each(&bp->dev->ethtool->rss_ctx, context, ctx) {
drivers/net/ethernet/broadcom/cnic.c
3587
u32 l5_cid, struct cnic_sock **csk, void *context)
drivers/net/ethernet/broadcom/cnic.c
3613
csk1->context = context;
drivers/net/ethernet/broadcom/cnic_if.h
263
void *context;
drivers/net/ethernet/cavium/thunder/thunder_bgx.c
1406
u32 lvl, void *context, void **rv)
drivers/net/ethernet/cavium/thunder/thunder_bgx.c
1408
struct bgx *bgx = context;
drivers/net/ethernet/cavium/thunder/thunder_bgx.c
1427
void *context, void **ret_val)
drivers/net/ethernet/cavium/thunder/thunder_bgx.c
1430
struct bgx *bgx = context;
drivers/net/ethernet/davicom/dm9051.c
381
static int dm9051_phyread(void *context, unsigned int reg, unsigned int *val)
drivers/net/ethernet/davicom/dm9051.c
383
struct board_info *db = context;
drivers/net/ethernet/davicom/dm9051.c
409
static int dm9051_phywrite(void *context, unsigned int reg, unsigned int val)
drivers/net/ethernet/davicom/dm9051.c
411
struct board_info *db = context;
drivers/net/ethernet/emulex/benet/be_cmds.c
1019
AMAP_SET_BITS(struct amap_eq_context, valid, req->context, 1);
drivers/net/ethernet/emulex/benet/be_cmds.c
1021
AMAP_SET_BITS(struct amap_eq_context, size, req->context, 0);
drivers/net/ethernet/emulex/benet/be_cmds.c
1022
AMAP_SET_BITS(struct amap_eq_context, count, req->context,
drivers/net/ethernet/emulex/benet/be_cmds.c
1024
be_dws_cpu_to_le(req->context, sizeof(req->context));
drivers/net/ethernet/emulex/benet/be_cmds.c
1173
ctxt = &req->context;
drivers/net/ethernet/emulex/benet/be_cmds.c
1210
be_dws_cpu_to_le(ctxt, sizeof(req->context));
drivers/net/ethernet/emulex/benet/be_cmds.c
1251
ctxt = &req->context;
drivers/net/ethernet/emulex/benet/be_cmds.c
1285
be_dws_cpu_to_le(ctxt, sizeof(req->context));
drivers/net/ethernet/emulex/benet/be_cmds.c
1316
ctxt = &req->context;
drivers/net/ethernet/emulex/benet/be_cmds.c
1329
be_dws_cpu_to_le(ctxt, sizeof(req->context));
drivers/net/ethernet/emulex/benet/be_cmds.c
2364
ctxt = &req->context;
drivers/net/ethernet/emulex/benet/be_cmds.c
2375
be_dws_cpu_to_le(ctxt, sizeof(req->context));
drivers/net/ethernet/emulex/benet/be_cmds.c
3906
ctxt = &req->context;
drivers/net/ethernet/emulex/benet/be_cmds.c
3934
be_dws_cpu_to_le(req->context, sizeof(req->context));
drivers/net/ethernet/emulex/benet/be_cmds.c
3961
ctxt = &req->context;
drivers/net/ethernet/emulex/benet/be_cmds.c
3977
be_dws_cpu_to_le(req->context, sizeof(req->context));
drivers/net/ethernet/emulex/benet/be_cmds.c
3984
be_dws_le_to_cpu(&resp->context, sizeof(resp->context));
drivers/net/ethernet/emulex/benet/be_cmds.c
3986
pvid, &resp->context);
drivers/net/ethernet/emulex/benet/be_cmds.c
3991
port_fwd_type, &resp->context);
drivers/net/ethernet/emulex/benet/be_cmds.c
3995
spoofchk, &resp->context);
drivers/net/ethernet/emulex/benet/be_cmds.h
1470
u8 context[sizeof(struct amap_lancer_write_obj_context) / 8];
drivers/net/ethernet/emulex/benet/be_cmds.h
1867
u8 context[sizeof(struct amap_set_hsw_context) / 8];
drivers/net/ethernet/emulex/benet/be_cmds.h
1891
u8 context[sizeof(struct amap_get_hsw_req_context) / 8];
drivers/net/ethernet/emulex/benet/be_cmds.h
1896
u8 context[sizeof(struct amap_get_hsw_resp_context) / 8];
drivers/net/ethernet/emulex/benet/be_cmds.h
392
u8 context[sizeof(struct amap_eq_context) / 8];
drivers/net/ethernet/emulex/benet/be_cmds.h
495
u8 context[sizeof(struct amap_cq_context_be) / 8];
drivers/net/ethernet/emulex/benet/be_cmds.h
557
u8 context[sizeof(struct amap_mcc_context_be) / 8];
drivers/net/ethernet/emulex/benet/be_cmds.h
566
u8 context[sizeof(struct amap_mcc_context_v1) / 8];
drivers/net/ethernet/huawei/hinic/hinic_port.c
786
rss_type->ipv4 = HINIC_RSS_TYPE_GET(ctx_tbl.context, IPV4);
drivers/net/ethernet/huawei/hinic/hinic_port.c
787
rss_type->ipv6 = HINIC_RSS_TYPE_GET(ctx_tbl.context, IPV6);
drivers/net/ethernet/huawei/hinic/hinic_port.c
788
rss_type->ipv6_ext = HINIC_RSS_TYPE_GET(ctx_tbl.context, IPV6_EXT);
drivers/net/ethernet/huawei/hinic/hinic_port.c
789
rss_type->tcp_ipv4 = HINIC_RSS_TYPE_GET(ctx_tbl.context, TCP_IPV4);
drivers/net/ethernet/huawei/hinic/hinic_port.c
790
rss_type->tcp_ipv6 = HINIC_RSS_TYPE_GET(ctx_tbl.context, TCP_IPV6);
drivers/net/ethernet/huawei/hinic/hinic_port.c
791
rss_type->tcp_ipv6_ext = HINIC_RSS_TYPE_GET(ctx_tbl.context,
drivers/net/ethernet/huawei/hinic/hinic_port.c
793
rss_type->udp_ipv4 = HINIC_RSS_TYPE_GET(ctx_tbl.context, UDP_IPV4);
drivers/net/ethernet/huawei/hinic/hinic_port.c
794
rss_type->udp_ipv6 = HINIC_RSS_TYPE_GET(ctx_tbl.context, UDP_IPV6);
drivers/net/ethernet/huawei/hinic/hinic_port.h
402
u32 context;
drivers/net/ethernet/huawei/hinic3/hinic3_mgmt_interface.h
190
u32 context;
drivers/net/ethernet/huawei/hinic3/hinic3_rss.c
150
ctx_tbl.context = ctx;
drivers/net/ethernet/intel/ice/ice_flex_pipe.c
3098
prof->context = 0;
drivers/net/ethernet/intel/ice/ice_flex_type.h
182
u64 context;
drivers/net/ethernet/intel/ice/ice_fw_update.c
1024
priv.context.mode = PLDMFW_UPDATE_MODE_SINGLE_COMPONENT;
drivers/net/ethernet/intel/ice/ice_fw_update.c
1025
priv.context.component_identifier = NVM_COMP_ID_NVM;
drivers/net/ethernet/intel/ice/ice_fw_update.c
1032
priv.context.ops = &ice_fwu_ops_e822;
drivers/net/ethernet/intel/ice/ice_fw_update.c
1034
priv.context.ops = &ice_fwu_ops_e810;
drivers/net/ethernet/intel/ice/ice_fw_update.c
1035
priv.context.dev = dev;
drivers/net/ethernet/intel/ice/ice_fw_update.c
1054
err = pldmfw_flash_image(&priv.context, params->fw);
drivers/net/ethernet/intel/ice/ice_fw_update.c
13
struct pldmfw context;
drivers/net/ethernet/intel/ice/ice_fw_update.c
212
ice_send_component_table(struct pldmfw *context, struct pldmfw_component *component,
drivers/net/ethernet/intel/ice/ice_fw_update.c
215
struct ice_fwu_priv *priv = container_of(context, struct ice_fwu_priv, context);
drivers/net/ethernet/intel/ice/ice_fw_update.c
219
struct device *dev = context->dev;
drivers/net/ethernet/intel/ice/ice_fw_update.c
49
ice_send_package_data(struct pldmfw *context, const u8 *data, u16 length)
drivers/net/ethernet/intel/ice/ice_fw_update.c
51
struct ice_fwu_priv *priv = container_of(context, struct ice_fwu_priv, context);
drivers/net/ethernet/intel/ice/ice_fw_update.c
53
struct device *dev = context->dev;
drivers/net/ethernet/intel/ice/ice_fw_update.c
640
ice_flash_component(struct pldmfw *context, struct pldmfw_component *component)
drivers/net/ethernet/intel/ice/ice_fw_update.c
642
struct ice_fwu_priv *priv = container_of(context, struct ice_fwu_priv, context);
drivers/net/ethernet/intel/ice/ice_fw_update.c
701
static int ice_finalize_update(struct pldmfw *context)
drivers/net/ethernet/intel/ice/ice_fw_update.c
703
struct ice_fwu_priv *priv = container_of(context, struct ice_fwu_priv, context);
drivers/net/ethernet/intel/ice/ice_fw_update.c
772
ice_op_pci_match_record(struct pldmfw *context, struct pldmfw_record *record)
drivers/net/ethernet/intel/ice/ice_fw_update.c
774
struct pci_dev *pdev = to_pci_dev(context->dev);
drivers/net/ethernet/intel/idpf/idpf_controlq.c
309
memcpy(&desc->params, msg->ctx.indirect.context,
drivers/net/ethernet/intel/idpf/idpf_controlq.c
587
memcpy(q_msg[i].ctx.indirect.context,
drivers/net/ethernet/intel/idpf/idpf_controlq_api.h
69
u8 context[IDPF_INDIRECT_CTX_SIZE];
drivers/net/ethernet/intel/ixgbe/ixgbe_fw_update.c
12
struct pldmfw context;
drivers/net/ethernet/intel/ixgbe/ixgbe_fw_update.c
157
static int ixgbe_send_component_table(struct pldmfw *context,
drivers/net/ethernet/intel/ixgbe/ixgbe_fw_update.c
161
struct ixgbe_fwu_priv *priv = container_of(context,
drivers/net/ethernet/intel/ixgbe/ixgbe_fw_update.c
163
context);
drivers/net/ethernet/intel/ixgbe/ixgbe_fw_update.c
36
static int ixgbe_send_package_data(struct pldmfw *context,
drivers/net/ethernet/intel/ixgbe/ixgbe_fw_update.c
39
struct ixgbe_fwu_priv *priv = container_of(context,
drivers/net/ethernet/intel/ixgbe/ixgbe_fw_update.c
41
context);
drivers/net/ethernet/intel/ixgbe/ixgbe_fw_update.c
411
static int ixgbe_flash_component(struct pldmfw *context,
drivers/net/ethernet/intel/ixgbe/ixgbe_fw_update.c
414
struct ixgbe_fwu_priv *priv = container_of(context,
drivers/net/ethernet/intel/ixgbe/ixgbe_fw_update.c
416
context);
drivers/net/ethernet/intel/ixgbe/ixgbe_fw_update.c
467
static int ixgbe_finalize_update(struct pldmfw *context)
drivers/net/ethernet/intel/ixgbe/ixgbe_fw_update.c
469
struct ixgbe_fwu_priv *priv = container_of(context,
drivers/net/ethernet/intel/ixgbe/ixgbe_fw_update.c
471
context);
drivers/net/ethernet/intel/ixgbe/ixgbe_fw_update.c
676
priv.context.ops = &ixgbe_fwu_ops_e610;
drivers/net/ethernet/intel/ixgbe/ixgbe_fw_update.c
677
priv.context.dev = dev;
drivers/net/ethernet/intel/ixgbe/ixgbe_fw_update.c
696
err = pldmfw_flash_image(&priv.context, params->fw);
drivers/net/ethernet/marvell/mvpp2/mvpp2_cls.c
1706
u32 context = 0;
drivers/net/ethernet/marvell/mvpp2/mvpp2_cls.c
1712
ret = mvpp22_rss_context_create(port, &context);
drivers/net/ethernet/marvell/mvpp2/mvpp2_cls.c
1716
table = mvpp22_rss_table_get(port->priv, context);
drivers/net/ethernet/marvell/mvpp2/mvpp2_cls.c
1720
port->rss_ctx[0] = context;
drivers/net/ethernet/mellanox/mlx4/cmd.c
2622
priv->cmd.context = kmalloc_objs(struct mlx4_cmd_context,
drivers/net/ethernet/mellanox/mlx4/cmd.c
2624
if (!priv->cmd.context)
drivers/net/ethernet/mellanox/mlx4/cmd.c
2631
priv->cmd.context[i].token = i;
drivers/net/ethernet/mellanox/mlx4/cmd.c
2632
priv->cmd.context[i].next = i + 1;
drivers/net/ethernet/mellanox/mlx4/cmd.c
2637
init_completion(&priv->cmd.context[i].done);
drivers/net/ethernet/mellanox/mlx4/cmd.c
2640
priv->cmd.context[priv->cmd.max_cmds - 1].next = -1;
drivers/net/ethernet/mellanox/mlx4/cmd.c
2676
kfree(priv->cmd.context);
drivers/net/ethernet/mellanox/mlx4/cmd.c
2677
priv->cmd.context = NULL;
drivers/net/ethernet/mellanox/mlx4/cmd.c
2745
struct mlx4_cmd_context *context;
drivers/net/ethernet/mellanox/mlx4/cmd.c
2749
if (priv->cmd.context) {
drivers/net/ethernet/mellanox/mlx4/cmd.c
2751
context = &priv->cmd.context[i];
drivers/net/ethernet/mellanox/mlx4/cmd.c
2752
context->fw_status = CMD_STAT_INTERNAL_ERR;
drivers/net/ethernet/mellanox/mlx4/cmd.c
2753
context->result =
drivers/net/ethernet/mellanox/mlx4/cmd.c
2755
complete(&context->done);
drivers/net/ethernet/mellanox/mlx4/cmd.c
342
struct mlx4_cmd_context *context;
drivers/net/ethernet/mellanox/mlx4/cmd.c
350
context = &cmd->context[cmd->free_head];
drivers/net/ethernet/mellanox/mlx4/cmd.c
351
context->token += cmd->token_mask + 1;
drivers/net/ethernet/mellanox/mlx4/cmd.c
352
cmd->free_head = context->next;
drivers/net/ethernet/mellanox/mlx4/cmd.c
355
reinit_completion(&context->done);
drivers/net/ethernet/mellanox/mlx4/cmd.c
365
if (!wait_for_completion_timeout(&context->done,
drivers/net/ethernet/mellanox/mlx4/cmd.c
372
err = context->result;
drivers/net/ethernet/mellanox/mlx4/cmd.c
373
if (err && context->fw_status != CMD_STAT_MULTI_FUNC_REQ) {
drivers/net/ethernet/mellanox/mlx4/cmd.c
375
vhcr_cmd, context->fw_status);
drivers/net/ethernet/mellanox/mlx4/cmd.c
376
if (mlx4_closing_cmd_fatal_error(op, context->fw_status))
drivers/net/ethernet/mellanox/mlx4/cmd.c
399
context->next = cmd->free_head;
drivers/net/ethernet/mellanox/mlx4/cmd.c
400
cmd->free_head = context - cmd->context;
drivers/net/ethernet/mellanox/mlx4/cmd.c
663
struct mlx4_cmd_context *context =
drivers/net/ethernet/mellanox/mlx4/cmd.c
664
&priv->cmd.context[token & priv->cmd.token_mask];
drivers/net/ethernet/mellanox/mlx4/cmd.c
667
if (token != context->token)
drivers/net/ethernet/mellanox/mlx4/cmd.c
670
context->fw_status = status;
drivers/net/ethernet/mellanox/mlx4/cmd.c
671
context->result = mlx4_status_to_errno(status);
drivers/net/ethernet/mellanox/mlx4/cmd.c
672
context->out_param = out_param;
drivers/net/ethernet/mellanox/mlx4/cmd.c
674
complete(&context->done);
drivers/net/ethernet/mellanox/mlx4/cmd.c
682
struct mlx4_cmd_context *context;
drivers/net/ethernet/mellanox/mlx4/cmd.c
690
context = &cmd->context[cmd->free_head];
drivers/net/ethernet/mellanox/mlx4/cmd.c
691
context->token += cmd->token_mask + 1;
drivers/net/ethernet/mellanox/mlx4/cmd.c
692
cmd->free_head = context->next;
drivers/net/ethernet/mellanox/mlx4/cmd.c
702
reinit_completion(&context->done);
drivers/net/ethernet/mellanox/mlx4/cmd.c
705
in_modifier, op_modifier, op, context->token, 1);
drivers/net/ethernet/mellanox/mlx4/cmd.c
711
wait_for_completion_interruptible_timeout(&context->done,
drivers/net/ethernet/mellanox/mlx4/cmd.c
714
context->fw_status = 0;
drivers/net/ethernet/mellanox/mlx4/cmd.c
715
context->out_param = 0;
drivers/net/ethernet/mellanox/mlx4/cmd.c
716
context->result = 0;
drivers/net/ethernet/mellanox/mlx4/cmd.c
719
ret_wait = (long)wait_for_completion_timeout(&context->done,
drivers/net/ethernet/mellanox/mlx4/cmd.c
734
err = context->result;
drivers/net/ethernet/mellanox/mlx4/cmd.c
744
context->fw_status == CMD_STAT_BAD_SIZE)
drivers/net/ethernet/mellanox/mlx4/cmd.c
746
op, context->fw_status);
drivers/net/ethernet/mellanox/mlx4/cmd.c
749
op, context->fw_status);
drivers/net/ethernet/mellanox/mlx4/cmd.c
752
else if (mlx4_closing_cmd_fatal_error(op, context->fw_status))
drivers/net/ethernet/mellanox/mlx4/cmd.c
759
*out_param = context->out_param;
drivers/net/ethernet/mellanox/mlx4/cmd.c
766
context->next = cmd->free_head;
drivers/net/ethernet/mellanox/mlx4/cmd.c
767
cmd->free_head = context - cmd->context;
drivers/net/ethernet/mellanox/mlx4/en_resources.c
42
int user_prio, struct mlx4_qp_context *context)
drivers/net/ethernet/mellanox/mlx4/en_resources.c
47
memset(context, 0, sizeof(*context));
drivers/net/ethernet/mellanox/mlx4/en_resources.c
48
context->flags = cpu_to_be32(7 << 16 | rss << MLX4_RSS_QPC_FLAG_OFFSET);
drivers/net/ethernet/mellanox/mlx4/en_resources.c
49
context->pd = cpu_to_be32(mdev->priv_pdn);
drivers/net/ethernet/mellanox/mlx4/en_resources.c
50
context->mtu_msgmax = 0xff;
drivers/net/ethernet/mellanox/mlx4/en_resources.c
52
context->rq_size_stride = ilog2(size) << 3 | (ilog2(stride) - 4);
drivers/net/ethernet/mellanox/mlx4/en_resources.c
54
context->sq_size_stride = ilog2(size) << 3 | (ilog2(stride) - 4);
drivers/net/ethernet/mellanox/mlx4/en_resources.c
56
context->params2 |= cpu_to_be32(MLX4_QP_BIT_FPP);
drivers/net/ethernet/mellanox/mlx4/en_resources.c
59
context->sq_size_stride = ilog2(TXBB_SIZE) - 4;
drivers/net/ethernet/mellanox/mlx4/en_resources.c
61
context->usr_page = cpu_to_be32(mlx4_to_hw_uar_index(mdev->dev,
drivers/net/ethernet/mellanox/mlx4/en_resources.c
63
context->local_qpn = cpu_to_be32(qpn);
drivers/net/ethernet/mellanox/mlx4/en_resources.c
64
context->pri_path.ackto = 1 & 0x07;
drivers/net/ethernet/mellanox/mlx4/en_resources.c
65
context->pri_path.sched_queue = 0x83 | (priv->port - 1) << 6;
drivers/net/ethernet/mellanox/mlx4/en_resources.c
68
context->pri_path.sched_queue |= user_prio << 3;
drivers/net/ethernet/mellanox/mlx4/en_resources.c
69
context->pri_path.feup = MLX4_FEUP_FORCE_ETH_UP;
drivers/net/ethernet/mellanox/mlx4/en_resources.c
71
context->pri_path.counter_index = priv->counter_index;
drivers/net/ethernet/mellanox/mlx4/en_resources.c
72
context->cqn_send = cpu_to_be32(cqn);
drivers/net/ethernet/mellanox/mlx4/en_resources.c
73
context->cqn_recv = cpu_to_be32(cqn);
drivers/net/ethernet/mellanox/mlx4/en_resources.c
76
context->pri_path.counter_index !=
drivers/net/ethernet/mellanox/mlx4/en_resources.c
80
context->pri_path.fl |= MLX4_FL_ETH_SRC_CHECK_MC_LB;
drivers/net/ethernet/mellanox/mlx4/en_resources.c
81
context->pri_path.control |= MLX4_CTRL_ETH_SRC_CHECK_IF_COUNTER;
drivers/net/ethernet/mellanox/mlx4/en_resources.c
83
context->db_rec_addr = cpu_to_be64(priv->res.db.dma << 2);
drivers/net/ethernet/mellanox/mlx4/en_resources.c
85
context->param3 |= cpu_to_be32(1 << 30);
drivers/net/ethernet/mellanox/mlx4/en_resources.c
90
context->srqn = cpu_to_be32(7 << 28); /* this fills bits 30:28 */
drivers/net/ethernet/mellanox/mlx4/en_rx.c
1094
struct mlx4_qp_context *context;
drivers/net/ethernet/mellanox/mlx4/en_rx.c
1097
context = kzalloc_obj(*context);
drivers/net/ethernet/mellanox/mlx4/en_rx.c
1098
if (!context)
drivers/net/ethernet/mellanox/mlx4/en_rx.c
1109
qpn, ring->cqn, -1, context);
drivers/net/ethernet/mellanox/mlx4/en_rx.c
1110
context->db_rec_addr = cpu_to_be64(ring->wqres.db.dma);
drivers/net/ethernet/mellanox/mlx4/en_rx.c
1114
context->param3 |= cpu_to_be32(1 << 29);
drivers/net/ethernet/mellanox/mlx4/en_rx.c
1122
err = mlx4_qp_to_ready(mdev->dev, &ring->wqres.mtt, context, qp, state);
drivers/net/ethernet/mellanox/mlx4/en_rx.c
1129
kfree(context);
drivers/net/ethernet/mellanox/mlx4/en_rx.c
1170
struct mlx4_qp_context context;
drivers/net/ethernet/mellanox/mlx4/en_rx.c
1226
priv->rx_ring[0]->cqn, -1, &context);
drivers/net/ethernet/mellanox/mlx4/en_rx.c
1233
ptr = ((void *) &context) + offsetof(struct mlx4_qp_context, pri_path)
drivers/net/ethernet/mellanox/mlx4/en_rx.c
1263
err = mlx4_qp_to_ready(mdev->dev, &priv->res.mtt, &context,
drivers/net/ethernet/mellanox/mlx4/fw.c
3027
struct mlx4_set_port_general_context *context;
drivers/net/ethernet/mellanox/mlx4/fw.c
3034
context = mailbox->buf;
drivers/net/ethernet/mellanox/mlx4/fw.c
3036
context->flags2 |= SET_PORT_GEN_PHV_VALID;
drivers/net/ethernet/mellanox/mlx4/fw.c
3038
context->phv_en |= SET_PORT_GEN_PHV_EN;
drivers/net/ethernet/mellanox/mlx4/fw_qos.c
100
context->prio2tc[i >> 1] = prio2tc[i] << 4 | prio2tc[i + 1];
drivers/net/ethernet/mellanox/mlx4/fw_qos.c
115
struct mlx4_set_port_scheduler_context *context;
drivers/net/ethernet/mellanox/mlx4/fw_qos.c
124
context = mailbox->buf;
drivers/net/ethernet/mellanox/mlx4/fw_qos.c
127
struct mlx4_port_scheduler_tc_cfg_be *tc = &context->tc[i];
drivers/net/ethernet/mellanox/mlx4/fw_qos.c
88
struct mlx4_set_port_prio2tc_context *context;
drivers/net/ethernet/mellanox/mlx4/fw_qos.c
97
context = mailbox->buf;
drivers/net/ethernet/mellanox/mlx4/mlx4.h
640
struct mlx4_cmd_context *context;
drivers/net/ethernet/mellanox/mlx4/mlx4_en.h
743
struct mlx4_qp_context *context);
drivers/net/ethernet/mellanox/mlx4/port.c
1592
struct mlx4_set_port_general_context *context;
drivers/net/ethernet/mellanox/mlx4/port.c
1599
context = mailbox->buf;
drivers/net/ethernet/mellanox/mlx4/port.c
1600
context->flags = SET_PORT_GEN_ALL_VALID;
drivers/net/ethernet/mellanox/mlx4/port.c
1601
context->mtu = cpu_to_be16(mtu);
drivers/net/ethernet/mellanox/mlx4/port.c
1602
context->pptx = (pptx * (!pfctx)) << 7;
drivers/net/ethernet/mellanox/mlx4/port.c
1603
context->pfctx = pfctx;
drivers/net/ethernet/mellanox/mlx4/port.c
1604
context->pprx = (pprx * (!pfcrx)) << 7;
drivers/net/ethernet/mellanox/mlx4/port.c
1605
context->pfcrx = pfcrx;
drivers/net/ethernet/mellanox/mlx4/port.c
1608
context->flags |= SET_PORT_ROCE_2_FLAGS;
drivers/net/ethernet/mellanox/mlx4/port.c
1609
context->roce_mode |=
drivers/net/ethernet/mellanox/mlx4/port.c
1626
struct mlx4_set_port_rqp_calc_context *context;
drivers/net/ethernet/mellanox/mlx4/port.c
1638
context = mailbox->buf;
drivers/net/ethernet/mellanox/mlx4/port.c
1639
context->base_qpn = cpu_to_be32(base_qpn);
drivers/net/ethernet/mellanox/mlx4/port.c
1640
context->n_mac = dev->caps.log_num_macs;
drivers/net/ethernet/mellanox/mlx4/port.c
1641
context->promisc = cpu_to_be32(promisc << SET_PORT_PROMISC_SHIFT |
drivers/net/ethernet/mellanox/mlx4/port.c
1643
context->mcast = cpu_to_be32(m_promisc << SET_PORT_MC_PROMISC_SHIFT |
drivers/net/ethernet/mellanox/mlx4/port.c
1645
context->intra_no_vlan = 0;
drivers/net/ethernet/mellanox/mlx4/port.c
1646
context->no_vlan = MLX4_NO_VLAN_IDX;
drivers/net/ethernet/mellanox/mlx4/port.c
1647
context->intra_vlan_miss = 0;
drivers/net/ethernet/mellanox/mlx4/port.c
1648
context->vlan_miss = MLX4_VLAN_MISS_IDX;
drivers/net/ethernet/mellanox/mlx4/port.c
1663
struct mlx4_set_port_general_context *context;
drivers/net/ethernet/mellanox/mlx4/port.c
1670
context = mailbox->buf;
drivers/net/ethernet/mellanox/mlx4/port.c
1671
context->flags2 |= MLX4_FLAG2_V_USER_MTU_MASK;
drivers/net/ethernet/mellanox/mlx4/port.c
1672
context->user_mtu = cpu_to_be16(user_mtu);
drivers/net/ethernet/mellanox/mlx4/port.c
1687
struct mlx4_set_port_general_context *context;
drivers/net/ethernet/mellanox/mlx4/port.c
1694
context = mailbox->buf;
drivers/net/ethernet/mellanox/mlx4/port.c
1695
context->flags2 |= MLX4_FLAG2_V_USER_MAC_MASK;
drivers/net/ethernet/mellanox/mlx4/port.c
1696
memcpy(context->user_mac, user_mac, sizeof(context->user_mac));
drivers/net/ethernet/mellanox/mlx4/port.c
1711
struct mlx4_set_port_general_context *context;
drivers/net/ethernet/mellanox/mlx4/port.c
1718
context = mailbox->buf;
drivers/net/ethernet/mellanox/mlx4/port.c
1719
context->flags2 |= MLX4_FLAG2_V_IGNORE_FCS_MASK;
drivers/net/ethernet/mellanox/mlx4/port.c
1721
context->ignore_fcs |= MLX4_IGNORE_FCS_MASK;
drivers/net/ethernet/mellanox/mlx4/port.c
1723
context->ignore_fcs &= ~MLX4_IGNORE_FCS_MASK;
drivers/net/ethernet/mellanox/mlx4/port.c
1754
struct mlx4_set_port_vxlan_context *context;
drivers/net/ethernet/mellanox/mlx4/port.c
1759
context = mailbox->buf;
drivers/net/ethernet/mellanox/mlx4/port.c
1760
memset(context, 0, sizeof(*context));
drivers/net/ethernet/mellanox/mlx4/port.c
1762
context->modify_flags = VXLAN_ENABLE_MODIFY | VXLAN_STEERING_MODIFY;
drivers/net/ethernet/mellanox/mlx4/port.c
1764
context->enable_flags = VXLAN_ENABLE;
drivers/net/ethernet/mellanox/mlx4/port.c
1765
context->steering = steering;
drivers/net/ethernet/mellanox/mlx4/qp.c
170
context->mtt_base_addr_h = mtt_addr >> 32;
drivers/net/ethernet/mellanox/mlx4/qp.c
171
context->mtt_base_addr_l = cpu_to_be32(mtt_addr & 0xffffffff);
drivers/net/ethernet/mellanox/mlx4/qp.c
172
context->log_page_size = mtt->page_shift - MLX4_ICM_PAGE_SHIFT;
drivers/net/ethernet/mellanox/mlx4/qp.c
178
context->roce_entropy =
drivers/net/ethernet/mellanox/mlx4/qp.c
182
memcpy(mailbox->buf + 8, context, sizeof(*context));
drivers/net/ethernet/mellanox/mlx4/qp.c
215
struct mlx4_qp_context *context,
drivers/net/ethernet/mellanox/mlx4/qp.c
219
return __mlx4_qp_modify(dev, mtt, cur_state, new_state, context,
drivers/net/ethernet/mellanox/mlx4/qp.c
898
struct mlx4_qp_context *context)
drivers/net/ethernet/mellanox/mlx4/qp.c
911
memcpy(context, mailbox->buf + 8, sizeof(*context));
drivers/net/ethernet/mellanox/mlx4/qp.c
919
struct mlx4_qp_context *context,
drivers/net/ethernet/mellanox/mlx4/qp.c
932
context->flags &= cpu_to_be32(~(0xf << 28));
drivers/net/ethernet/mellanox/mlx4/qp.c
933
context->flags |= cpu_to_be32(states[i + 1] << 28);
drivers/net/ethernet/mellanox/mlx4/qp.c
935
context->params2 &= ~cpu_to_be32(MLX4_QP_BIT_FPP);
drivers/net/ethernet/mellanox/mlx4/qp.c
937
context, 0, 0, qp);
drivers/net/ethernet/mellanox/mlx4/qp.c
94
struct mlx4_qp_context *context,
drivers/net/ethernet/mellanox/mlx4/qp.c
953
struct mlx4_qp_context context;
drivers/net/ethernet/mellanox/mlx4/qp.c
958
err = mlx4_qp_query(dev, &qp, &context);
drivers/net/ethernet/mellanox/mlx4/qp.c
960
u32 dest_qpn = be32_to_cpu(context.remote_qpn) & 0xffffff;
drivers/net/ethernet/mellanox/mlx4/resource_tracker.c
2929
struct mlx4_qp_context *context)
drivers/net/ethernet/mellanox/mlx4/resource_tracker.c
2938
context->qkey = cpu_to_be32(qkey);
drivers/net/ethernet/mellanox/mlx4/resource_tracker.c
3755
struct mlx4_qp_context *context = inbox->buf + 8;
drivers/net/ethernet/mellanox/mlx4/resource_tracker.c
3756
adjust_proxy_tun_qkey(dev, vhcr, context);
drivers/net/ethernet/mellanox/mlx4/resource_tracker.c
3882
struct mlx4_qp_context *context = inbox->buf + 8;
drivers/net/ethernet/mellanox/mlx4/resource_tracker.c
3884
err = adjust_qp_sched_queue(dev, slave, context, inbox);
drivers/net/ethernet/mellanox/mlx4/resource_tracker.c
3893
adjust_proxy_tun_qkey(dev, vhcr, context);
drivers/net/ethernet/mellanox/mlx4/resource_tracker.c
3904
struct mlx4_qp_context *context = inbox->buf + 8;
drivers/net/ethernet/mellanox/mlx4/resource_tracker.c
3906
err = adjust_qp_sched_queue(dev, slave, context, inbox);
drivers/net/ethernet/mellanox/mlx4/resource_tracker.c
3915
adjust_proxy_tun_qkey(dev, vhcr, context);
drivers/net/ethernet/mellanox/mlx4/resource_tracker.c
3926
struct mlx4_qp_context *context = inbox->buf + 8;
drivers/net/ethernet/mellanox/mlx4/resource_tracker.c
3927
int err = adjust_qp_sched_queue(dev, slave, context, inbox);
drivers/net/ethernet/mellanox/mlx4/resource_tracker.c
3930
adjust_proxy_tun_qkey(dev, vhcr, context);
drivers/net/ethernet/mellanox/mlx4/resource_tracker.c
3941
struct mlx4_qp_context *context = inbox->buf + 8;
drivers/net/ethernet/mellanox/mlx4/resource_tracker.c
3943
err = adjust_qp_sched_queue(dev, slave, context, inbox);
drivers/net/ethernet/mellanox/mlx4/resource_tracker.c
3950
adjust_proxy_tun_qkey(dev, vhcr, context);
drivers/net/ethernet/mellanox/mlx4/resource_tracker.c
3963
struct mlx4_qp_context *context = inbox->buf + 8;
drivers/net/ethernet/mellanox/mlx4/resource_tracker.c
3965
err = adjust_qp_sched_queue(dev, slave, context, inbox);
drivers/net/ethernet/mellanox/mlx4/resource_tracker.c
3972
adjust_proxy_tun_qkey(dev, vhcr, context);
drivers/net/ethernet/mellanox/mlx5/core/cmd.c
1001
ent->callback(-EBUSY, ent->context);
drivers/net/ethernet/mellanox/mlx5/core/cmd.c
1016
ent->callback(-EAGAIN, ent->context);
drivers/net/ethernet/mellanox/mlx5/core/cmd.c
121
mlx5_cmd_cbk_t cbk, void *context, int page_queue)
drivers/net/ethernet/mellanox/mlx5/core/cmd.c
1259
void *context, int page_queue,
drivers/net/ethernet/mellanox/mlx5/core/cmd.c
1282
callback, context, page_queue);
drivers/net/ethernet/mellanox/mlx5/core/cmd.c
136
ent->context = context;
drivers/net/ethernet/mellanox/mlx5/core/cmd.c
1737
void *context;
drivers/net/ethernet/mellanox/mlx5/core/cmd.c
1806
context = ent->context;
drivers/net/ethernet/mellanox/mlx5/core/cmd.c
1821
callback(err, context);
drivers/net/ethernet/mellanox/mlx5/core/cmd.c
1953
int out_size, mlx5_cmd_cbk_t callback, void *context,
drivers/net/ethernet/mellanox/mlx5/core/cmd.c
2007
err = mlx5_cmd_invoke(dev, inb, outb, out, out_size, callback, context,
drivers/net/ethernet/mellanox/mlx5/core/en.h
1117
typedef int (*mlx5e_fp_preactivate)(struct mlx5e_priv *priv, void *context);
drivers/net/ethernet/mellanox/mlx5/core/en.h
1119
int fn##_ctx(struct mlx5e_priv *priv, void *context) \
drivers/net/ethernet/mellanox/mlx5/core/en.h
1127
void *context, bool reset);
drivers/net/ethernet/mellanox/mlx5/core/en.h
1129
int mlx5e_num_channels_changed_ctx(struct mlx5e_priv *priv, void *context);
drivers/net/ethernet/mellanox/mlx5/core/en.h
1130
int mlx5e_update_tc_and_tx_queues_ctx(struct mlx5e_priv *priv, void *context);
drivers/net/ethernet/mellanox/mlx5/core/en.h
1201
int mlx5e_set_dev_port_mtu_ctx(struct mlx5e_priv *priv, void *context);
drivers/net/ethernet/mellanox/mlx5/core/en/reporter_rx.c
212
void *context,
drivers/net/ethernet/mellanox/mlx5/core/en/reporter_rx.c
216
struct mlx5e_err_ctx *err_ctx = context;
drivers/net/ethernet/mellanox/mlx5/core/en/reporter_rx.c
599
struct devlink_fmsg *fmsg, void *context,
drivers/net/ethernet/mellanox/mlx5/core/en/reporter_rx.c
603
struct mlx5e_err_ctx *err_ctx = context;
drivers/net/ethernet/mellanox/mlx5/core/en/reporter_tx.c
252
void *context,
drivers/net/ethernet/mellanox/mlx5/core/en/reporter_tx.c
256
struct mlx5e_err_ctx *err_ctx = context;
drivers/net/ethernet/mellanox/mlx5/core/en/reporter_tx.c
523
struct devlink_fmsg *fmsg, void *context,
drivers/net/ethernet/mellanox/mlx5/core/en/reporter_tx.c
527
struct mlx5e_err_ctx *err_ctx = context;
drivers/net/ethernet/mellanox/mlx5/core/en_accel/ktls_tx.c
128
struct mlx5_async_work context;
drivers/net/ethernet/mellanox/mlx5/core/en_accel/ktls_tx.c
168
static void create_tis_callback(int status, struct mlx5_async_work *context)
drivers/net/ethernet/mellanox/mlx5/core/en_accel/ktls_tx.c
171
container_of(context, struct mlx5e_async_ctx, context);
drivers/net/ethernet/mellanox/mlx5/core/en_accel/ktls_tx.c
183
static void destroy_tis_callback(int status, struct mlx5_async_work *context)
drivers/net/ethernet/mellanox/mlx5/core/en_accel/ktls_tx.c
186
container_of(context, struct mlx5e_async_ctx, context);
drivers/net/ethernet/mellanox/mlx5/core/en_accel/ktls_tx.c
214
create_tis_callback, &async->context);
drivers/net/ethernet/mellanox/mlx5/core/en_accel/ktls_tx.c
237
destroy_tis_callback, &async->context);
drivers/net/ethernet/mellanox/mlx5/core/en_accel/ktls_tx.c
64
struct mlx5_async_work *context)
drivers/net/ethernet/mellanox/mlx5/core/en_accel/ktls_tx.c
72
out, outlen, callback, context);
drivers/net/ethernet/mellanox/mlx5/core/en_accel/ktls_tx.c
79
struct mlx5_async_work *context)
drivers/net/ethernet/mellanox/mlx5/core/en_accel/ktls_tx.c
87
out, outlen, callback, context);
drivers/net/ethernet/mellanox/mlx5/core/en_dcbnl.c
1160
static int mlx5e_update_trust_state_hw(struct mlx5e_priv *priv, void *context)
drivers/net/ethernet/mellanox/mlx5/core/en_dcbnl.c
1162
u8 *trust_state = context;
drivers/net/ethernet/mellanox/mlx5/core/en_main.c
3341
void *context)
drivers/net/ethernet/mellanox/mlx5/core/en_main.c
3351
err = preactivate(priv, context);
drivers/net/ethernet/mellanox/mlx5/core/en_main.c
3366
void *context)
drivers/net/ethernet/mellanox/mlx5/core/en_main.c
3383
err = preactivate(priv, context);
drivers/net/ethernet/mellanox/mlx5/core/en_main.c
3409
void *context, bool reset)
drivers/net/ethernet/mellanox/mlx5/core/en_main.c
3416
return mlx5e_switch_priv_params(priv, params, preactivate, context);
drivers/net/ethernet/mellanox/mlx5/core/en_main.c
3436
preactivate, context);
drivers/net/ethernet/mellanox/mlx5/core/lib/hv.c
53
int mlx5_hv_register_invalidate(struct mlx5_core_dev *dev, void *context,
drivers/net/ethernet/mellanox/mlx5/core/lib/hv.c
54
void (*block_invalidate)(void *context,
drivers/net/ethernet/mellanox/mlx5/core/lib/hv.c
57
return hyperv_reg_block_invalidate(dev->pdev, context,
drivers/net/ethernet/mellanox/mlx5/core/lib/hv.h
16
int mlx5_hv_register_invalidate(struct mlx5_core_dev *dev, void *context,
drivers/net/ethernet/mellanox/mlx5/core/lib/hv.h
17
void (*block_invalidate)(void *context,
drivers/net/ethernet/mellanox/mlx5/core/lib/hv_vhca.c
96
void mlx5_hv_vhca_invalidate(void *context, u64 block_mask)
drivers/net/ethernet/mellanox/mlx5/core/lib/hv_vhca.c
98
struct mlx5_hv_vhca *hv_vhca = (struct mlx5_hv_vhca *)context;
drivers/net/ethernet/mellanox/mlx5/core/lib/hv_vhca.h
36
void mlx5_hv_vhca_invalidate(void *context, u64 block_mask);
drivers/net/ethernet/mellanox/mlx5/core/lib/hv_vhca.h
46
void *context);
drivers/net/ethernet/mellanox/mlx5/core/lib/hv_vhca.h
74
static inline void mlx5_hv_vhca_invalidate(void *context,
drivers/net/ethernet/mellanox/mlx5/core/lib/hv_vhca.h
87
void *context)
drivers/net/ethernet/mellanox/mlx5/core/mlx5_core.h
250
void *context, u32 *element_id);
drivers/net/ethernet/mellanox/mlx5/core/mlx5_core.h
252
void *context, u32 element_id,
drivers/net/ethernet/meta/fbnic/fbnic_devlink.c
116
fbnic_pldm_match_record(struct pldmfw *context, struct pldmfw_record *record)
drivers/net/ethernet/meta/fbnic/fbnic_devlink.c
125
if (!pldmfw_op_pci_match_record(context, record))
drivers/net/ethernet/meta/fbnic/fbnic_devlink.c
128
pdev = to_pci_dev(context->dev);
drivers/net/ethernet/meta/fbnic/fbnic_devlink.c
194
fbnic_flash_component(struct pldmfw *context,
drivers/net/ethernet/meta/fbnic/fbnic_devlink.c
209
pdev = to_pci_dev(context->dev);
drivers/net/ethernet/meta/fbnic/fbnic_devlink.c
324
struct pldmfw context;
drivers/net/ethernet/meta/fbnic/fbnic_devlink.c
328
context.ops = &fbnic_pldmfw_ops;
drivers/net/ethernet/meta/fbnic/fbnic_devlink.c
329
context.dev = dev;
drivers/net/ethernet/meta/fbnic/fbnic_devlink.c
331
err = pldmfw_flash_image(&context, fw);
drivers/net/ethernet/microchip/encx24j600-regmap.c
168
static int regmap_encx24j600_sfr_write(void *context, u8 reg, u8 *val,
drivers/net/ethernet/microchip/encx24j600-regmap.c
171
struct encx24j600_context *ctx = context;
drivers/net/ethernet/microchip/encx24j600-regmap.c
188
static int regmap_encx24j600_reg_update_bits(void *context, unsigned int reg,
drivers/net/ethernet/microchip/encx24j600-regmap.c
192
struct encx24j600_context *ctx = context;
drivers/net/ethernet/microchip/encx24j600-regmap.c
220
int regmap_encx24j600_spi_write(void *context, u8 reg, const u8 *data,
drivers/net/ethernet/microchip/encx24j600-regmap.c
223
struct encx24j600_context *ctx = context;
drivers/net/ethernet/microchip/encx24j600-regmap.c
233
int regmap_encx24j600_spi_read(void *context, u8 reg, u8 *data, size_t count)
drivers/net/ethernet/microchip/encx24j600-regmap.c
235
struct encx24j600_context *ctx = context;
drivers/net/ethernet/microchip/encx24j600-regmap.c
244
static int regmap_encx24j600_write(void *context, const void *data,
drivers/net/ethernet/microchip/encx24j600-regmap.c
253
return regmap_encx24j600_spi_write(context, reg, dout, len);
drivers/net/ethernet/microchip/encx24j600-regmap.c
258
return regmap_encx24j600_sfr_write(context, reg, dout, len);
drivers/net/ethernet/microchip/encx24j600-regmap.c
261
static int regmap_encx24j600_read(void *context,
drivers/net/ethernet/microchip/encx24j600-regmap.c
273
return regmap_encx24j600_spi_read(context, reg, val, val_size);
drivers/net/ethernet/microchip/encx24j600-regmap.c
280
return regmap_encx24j600_sfr_read(context, reg, val, val_size);
drivers/net/ethernet/microchip/encx24j600-regmap.c
345
static int regmap_encx24j600_phy_reg_read(void *context, unsigned int reg,
drivers/net/ethernet/microchip/encx24j600-regmap.c
348
struct encx24j600_context *ctx = context;
drivers/net/ethernet/microchip/encx24j600-regmap.c
383
static int regmap_encx24j600_phy_reg_write(void *context, unsigned int reg,
drivers/net/ethernet/microchip/encx24j600-regmap.c
386
struct encx24j600_context *ctx = context;
drivers/net/ethernet/microchip/encx24j600-regmap.c
46
static void regmap_lock_mutex(void *context)
drivers/net/ethernet/microchip/encx24j600-regmap.c
48
struct encx24j600_context *ctx = context;
drivers/net/ethernet/microchip/encx24j600-regmap.c
53
static void regmap_unlock_mutex(void *context)
drivers/net/ethernet/microchip/encx24j600-regmap.c
55
struct encx24j600_context *ctx = context;
drivers/net/ethernet/microchip/encx24j600-regmap.c
60
static int regmap_encx24j600_sfr_read(void *context, u8 reg, u8 *val,
drivers/net/ethernet/microchip/encx24j600-regmap.c
63
struct encx24j600_context *ctx = context;
drivers/net/ethernet/microchip/encx24j600_hw.h
433
int regmap_encx24j600_spi_write(void *context, u8 reg, const u8 *data,
drivers/net/ethernet/microchip/encx24j600_hw.h
435
int regmap_encx24j600_spi_read(void *context, u8 reg, u8 *data, size_t count);
drivers/net/ethernet/microchip/lan743x_main.c
219
static void lan743x_tx_isr(void *context, u32 int_sts, u32 flags)
drivers/net/ethernet/microchip/lan743x_main.c
221
struct lan743x_tx *tx = context;
drivers/net/ethernet/microchip/lan743x_main.c
260
static void lan743x_rx_isr(void *context, u32 int_sts, u32 flags)
drivers/net/ethernet/microchip/lan743x_main.c
262
struct lan743x_rx *rx = context;
drivers/net/ethernet/microchip/lan743x_main.c
301
static void lan743x_intr_shared_isr(void *context, u32 int_sts, u32 flags)
drivers/net/ethernet/microchip/lan743x_main.c
303
struct lan743x_adapter *adapter = context;
drivers/net/ethernet/microchip/lan743x_main.c
387
vector->handler(vector->context,
drivers/net/ethernet/microchip/lan743x_main.c
435
void *context)
drivers/net/ethernet/microchip/lan743x_main.c
446
vector->context = context;
drivers/net/ethernet/microchip/lan743x_main.c
454
vector->context = NULL;
drivers/net/ethernet/microchip/lan743x_main.c
469
vector->context = NULL;
drivers/net/ethernet/microchip/lan743x_main.h
900
typedef void(*lan743x_vector_handler)(void *context, u32 int_sts, u32 flags);
drivers/net/ethernet/microchip/lan743x_main.h
926
void *context;
drivers/net/ethernet/microchip/lan743x_ptp.c
1356
void lan743x_ptp_isr(void *context)
drivers/net/ethernet/microchip/lan743x_ptp.c
1358
struct lan743x_adapter *adapter = (struct lan743x_adapter *)context;
drivers/net/ethernet/microchip/lan743x_ptp.h
44
void lan743x_ptp_isr(void *context);
drivers/net/ethernet/microsoft/mana/gdma_main.c
589
cq->cq.callback(cq->cq.context, cq);
drivers/net/ethernet/microsoft/mana/gdma_main.c
609
eq->eq.callback(eq->eq.context, eq, &event);
drivers/net/ethernet/microsoft/mana/gdma_main.c
871
queue->eq.context = spec->eq.context;
drivers/net/ethernet/microsoft/mana/gdma_main.c
899
queue->cq.context = spec->cq.context;
drivers/net/ethernet/microsoft/mana/hw_channel.c
322
spec.cq.context = ctx;
drivers/net/ethernet/microsoft/mana/hw_channel.c
339
spec.eq.context = ctx;
drivers/net/ethernet/microsoft/mana/mana_en.c
1650
spec.eq.context = ac->eqs;
drivers/net/ethernet/microsoft/mana/mana_en.c
2205
static int mana_cq_handler(void *context, struct gdma_queue *gdma_queue)
drivers/net/ethernet/microsoft/mana/mana_en.c
2207
struct mana_cq *cq = context;
drivers/net/ethernet/microsoft/mana/mana_en.c
2251
static void mana_schedule_napi(void *context, struct gdma_queue *gdma_queue)
drivers/net/ethernet/microsoft/mana/mana_en.c
2253
struct mana_cq *cq = context;
drivers/net/ethernet/microsoft/mana/mana_en.c
2401
spec.cq.context = cq;
drivers/net/ethernet/microsoft/mana/mana_en.c
2704
spec.cq.context = cq;
drivers/net/ethernet/netronome/nfp/nfp_net_debugdump.c
513
u32 reg_sz, u32 context, void *dest)
drivers/net/ethernet/netronome/nfp/nfp_net_debugdump.c
523
result = nfp_cpp_writel(cpp, cpp_id, csr_ctx_ptr_offs, context);
drivers/net/ethernet/qlogic/qed/qed_iwarp.c
3131
events.affiliated_event(events.context,
drivers/net/ethernet/qlogic/qed/qed_iwarp.c
3140
events.affiliated_event(events.context,
drivers/net/ethernet/qlogic/qed/qed_iwarp.c
3148
p_hwfn->p_rdma_info->events.context,
drivers/net/ethernet/qlogic/qed/qed_nvmetcp_fw_funcs.c
129
init_nvmetcp_task_params(struct e5_nvmetcp_task_context *context,
drivers/net/ethernet/qlogic/qed/qed_nvmetcp_fw_funcs.c
133
context->ystorm_st_context.state.cccid = task_params->host_cccid;
drivers/net/ethernet/qlogic/qed/qed_nvmetcp_fw_funcs.c
134
SET_FIELD(context->ustorm_st_context.error_flags, USTORM_NVMETCP_TASK_ST_CTX_NVME_TCP, 1);
drivers/net/ethernet/qlogic/qed/qed_nvmetcp_fw_funcs.c
135
context->ustorm_st_context.nvme_tcp_opaque_lo = cpu_to_le32(task_params->opq.lo);
drivers/net/ethernet/qlogic/qed/qed_nvmetcp_fw_funcs.c
136
context->ustorm_st_context.nvme_tcp_opaque_hi = cpu_to_le32(task_params->opq.hi);
drivers/net/ethernet/qlogic/qed/qed_nvmetcp_fw_funcs.c
145
struct e5_nvmetcp_task_context *context = task_params->context;
drivers/net/ethernet/qlogic/qed/qed_nvmetcp_fw_funcs.c
146
const u8 val_byte = context->mstorm_ag_context.cdu_validation;
drivers/net/ethernet/qlogic/qed/qed_nvmetcp_fw_funcs.c
149
memset(context, 0, sizeof(*context));
drivers/net/ethernet/qlogic/qed/qed_nvmetcp_fw_funcs.c
150
init_nvmetcp_task_params(context, task_params,
drivers/net/ethernet/qlogic/qed/qed_nvmetcp_fw_funcs.c
159
context->ystorm_st_context.pdu_hdr.task_hdr.reg[dw_index] =
drivers/net/ethernet/qlogic/qed/qed_nvmetcp_fw_funcs.c
165
context->ystorm_st_context.pdu_hdr.task_hdr.reg[dw_index] =
drivers/net/ethernet/qlogic/qed/qed_nvmetcp_fw_funcs.c
171
context->ystorm_st_context.pdu_hdr.task_hdr.reg[dw_index] =
drivers/net/ethernet/qlogic/qed/qed_nvmetcp_fw_funcs.c
176
context->mstorm_ag_context.cdu_validation = val_byte;
drivers/net/ethernet/qlogic/qed/qed_nvmetcp_fw_funcs.c
177
context->mstorm_st_context.task_type = (u8)(task_type);
drivers/net/ethernet/qlogic/qed/qed_nvmetcp_fw_funcs.c
178
context->mstorm_ag_context.task_cid = cpu_to_le16(task_params->conn_icid);
drivers/net/ethernet/qlogic/qed/qed_nvmetcp_fw_funcs.c
181
SET_FIELD(context->ustorm_ag_context.flags1, E5_USTORM_NVMETCP_TASK_AG_CTX_R2T2RECV, 1);
drivers/net/ethernet/qlogic/qed/qed_nvmetcp_fw_funcs.c
182
context->ustorm_st_context.task_type = (u8)(task_type);
drivers/net/ethernet/qlogic/qed/qed_nvmetcp_fw_funcs.c
183
context->ustorm_st_context.cq_rss_number = task_params->cq_rss_number;
drivers/net/ethernet/qlogic/qed/qed_nvmetcp_fw_funcs.c
184
context->ustorm_ag_context.icid = cpu_to_le16(task_params->conn_icid);
drivers/net/ethernet/qlogic/qed/qed_nvmetcp_fw_funcs.c
206
set_local_completion_context(struct e5_nvmetcp_task_context *context)
drivers/net/ethernet/qlogic/qed/qed_nvmetcp_fw_funcs.c
208
SET_FIELD(context->ystorm_st_context.state.flags,
drivers/net/ethernet/qlogic/qed/qed_nvmetcp_fw_funcs.c
210
SET_FIELD(context->ustorm_st_context.flags,
drivers/net/ethernet/qlogic/qed/qed_nvmetcp_fw_funcs.c
221
struct e5_nvmetcp_task_context *context = task_params->context;
drivers/net/ethernet/qlogic/qed/qed_nvmetcp_fw_funcs.c
231
init_scsi_sgl_context(&context->ystorm_st_context.state.sgl_params,
drivers/net/ethernet/qlogic/qed/qed_nvmetcp_fw_funcs.c
232
&context->ystorm_st_context.state.data_desc,
drivers/net/ethernet/qlogic/qed/qed_nvmetcp_fw_funcs.c
241
SET_FIELD(context->ystorm_st_context.state.flags,
drivers/net/ethernet/qlogic/qed/qed_nvmetcp_fw_funcs.c
246
init_scsi_sgl_context(&context->mstorm_st_context.sgl_params,
drivers/net/ethernet/qlogic/qed/qed_nvmetcp_fw_funcs.c
247
&context->mstorm_st_context.data_desc,
drivers/net/ethernet/qlogic/qed/qed_nvmetcp_fw_funcs.c
255
context->mstorm_st_context.rem_task_size = cpu_to_le32(task_size);
drivers/net/ethernet/qlogic/qed/qed_nvmetcp_fw_funcs.c
259
init_ustorm_task_contexts(&context->ustorm_st_context,
drivers/net/ethernet/qlogic/qed/qed_nvmetcp_fw_funcs.c
260
&context->ustorm_ag_context,
drivers/net/ethernet/qlogic/qed/qed_nvmetcp_fw_funcs.c
272
context->ustorm_ag_context.exp_data_acked = task_size;
drivers/net/ethernet/qlogic/qed/qed_nvmetcp_fw_funcs.c
274
context->ustorm_ag_context.exp_data_acked = 0;
drivers/net/ethernet/qlogic/qed/qed_nvmetcp_fw_funcs.c
276
context->ustorm_ag_context.exp_data_acked = 0;
drivers/net/ethernet/qlogic/qed/qed_nvmetcp_fw_funcs.c
279
context->ustorm_ag_context.exp_cont_len = 0;
drivers/net/ethernet/qlogic/qed/qed_nvmetcp_fw_funcs.c
327
struct e5_nvmetcp_task_context *context = task_params->context;
drivers/net/ethernet/qlogic/qed/qed_nvmetcp_fw_funcs.c
333
init_ustorm_task_contexts(&context->ustorm_st_context,
drivers/net/ethernet/qlogic/qed/qed_nvmetcp_fw_funcs.c
334
&context->ustorm_ag_context,
drivers/net/ethernet/qlogic/qed/qed_nvmetcp_fw_funcs.c
348
init_scsi_sgl_context(&context->ystorm_st_context.state.sgl_params,
drivers/net/ethernet/qlogic/qed/qed_nvmetcp_fw_funcs.c
349
&context->ystorm_st_context.state.data_desc,
drivers/net/ethernet/qlogic/qed/qed_nvmetcp_fw_funcs.c
352
init_scsi_sgl_context(&context->mstorm_st_context.sgl_params,
drivers/net/ethernet/qlogic/qed/qed_nvmetcp_fw_funcs.c
353
&context->mstorm_st_context.data_desc,
drivers/net/ethernet/qlogic/qed/qed_nvmetcp_fw_funcs.c
356
context->mstorm_st_context.rem_task_size =
drivers/net/ethernet/qlogic/qed/qed_rdma.c
405
events->context = params->events->context;
drivers/net/ethernet/qlogic/qed/qed_roce.c
59
events.affiliated_event(events.context, fw_event_code,
drivers/net/ethernet/qlogic/qed/qed_roce.c
62
events.affiliated_event(events.context, fw_event_code,
drivers/net/ethernet/sfc/ef10.c
2140
struct efx_msi_context *context = dev_id;
drivers/net/ethernet/sfc/ef10.c
2141
struct efx_nic *efx = context->efx;
drivers/net/ethernet/sfc/ef10.c
2148
if (context->index == efx->irq_level)
drivers/net/ethernet/sfc/ef10.c
2152
efx_schedule_channel_irq(efx->channel[context->index]);
drivers/net/ethernet/sfc/ef100_nic.c
330
struct efx_msi_context *context = dev_id;
drivers/net/ethernet/sfc/ef100_nic.c
331
struct efx_nic *efx = context->efx;
drivers/net/ethernet/sfc/ef100_nic.c
338
if (context->index == efx->irq_level)
drivers/net/ethernet/sfc/ef100_nic.c
342
efx_schedule_channel_irq(efx->channel[context->index]);
drivers/net/ethernet/sfc/falcon/farch.c
1586
struct ef4_msi_context *context = dev_id;
drivers/net/ethernet/sfc/falcon/farch.c
1587
struct ef4_nic *efx = context->efx;
drivers/net/ethernet/sfc/falcon/farch.c
1599
if (context->index == efx->irq_level) {
drivers/net/ethernet/sfc/falcon/farch.c
1607
ef4_schedule_channel_irq(efx->channel[context->index]);
drivers/net/ethernet/sfc/mcdi_filters.c
1864
static int efx_mcdi_get_rss_context_flags(struct efx_nic *efx, u32 context,
drivers/net/ethernet/sfc/mcdi_filters.c
1892
MCDI_SET_DWORD(inbuf, RSS_CONTEXT_GET_FLAGS_IN_RSS_CONTEXT_ID, context);
drivers/net/ethernet/sfc/mcdi_filters.c
1984
static int efx_mcdi_filter_free_rss_context(struct efx_nic *efx, u32 context)
drivers/net/ethernet/sfc/mcdi_filters.c
1989
context);
drivers/net/ethernet/sfc/mcdi_filters.c
1994
static int efx_mcdi_filter_populate_rss_table(struct efx_nic *efx, u32 context,
drivers/net/ethernet/sfc/mcdi_filters.c
2002
context);
drivers/net/ethernet/sfc/mcdi_filters.c
2022
context);
drivers/net/ethernet/sfc/mcdi_filters.c
2192
unsigned long context;
drivers/net/ethernet/sfc/mcdi_filters.c
2200
xa_for_each(&efx->net_dev->ethtool->rss_ctx, context, ctx) {
drivers/net/ethernet/sfc/mcdi_filters.c
2217
context, rc);
drivers/net/ethernet/sfc/siena/farch.c
1600
struct efx_msi_context *context = dev_id;
drivers/net/ethernet/sfc/siena/farch.c
1601
struct efx_nic *efx = context->efx;
drivers/net/ethernet/sfc/siena/farch.c
1613
if (context->index == efx->irq_level) {
drivers/net/ethernet/sfc/siena/farch.c
1621
efx_schedule_channel_irq(efx->channel[context->index]);
drivers/net/ethernet/synopsys/dwc-xlgmac-net.c
1113
unsigned int context_next, context;
drivers/net/ethernet/synopsys/dwc-xlgmac-net.c
1169
context = XLGMAC_GET_REG_BITS(
drivers/net/ethernet/synopsys/dwc-xlgmac-net.c
1186
if (!context) {
drivers/net/ethernet/ti/icssg/icss_iep.c
916
static int icss_iep_regmap_write(void *context, unsigned int reg,
drivers/net/ethernet/ti/icssg/icss_iep.c
919
struct icss_iep *iep = context;
drivers/net/ethernet/ti/icssg/icss_iep.c
926
static int icss_iep_regmap_read(void *context, unsigned int reg,
drivers/net/ethernet/ti/icssg/icss_iep.c
929
struct icss_iep *iep = context;
drivers/net/ethernet/ti/netcp_core.c
96
void (*txtstamp)(void *context, struct sk_buff *skb);
drivers/net/ethernet/ti/netcp_ethss.c
2529
static void gbe_txtstamp(void *context, struct sk_buff *skb)
drivers/net/ethernet/ti/netcp_ethss.c
2531
struct gbe_intf *gbe_intf = context;
drivers/net/ethernet/via/via-velocity.c
3107
static void velocity_save_context(struct velocity_info *vptr, struct velocity_context *context)
drivers/net/ethernet/via/via-velocity.c
3114
*((u32 *) (context->mac_reg + i)) = readl(ptr + i);
drivers/net/ethernet/via/via-velocity.c
3117
*((u32 *) (context->mac_reg + i)) = readl(ptr + i);
drivers/net/ethernet/via/via-velocity.c
3120
*((u32 *) (context->mac_reg + i)) = readl(ptr + i);
drivers/net/ethernet/via/via-velocity.c
3141
velocity_save_context(vptr, &vptr->context);
drivers/net/ethernet/via/via-velocity.c
3148
velocity_save_context(vptr, &vptr->context);
drivers/net/ethernet/via/via-velocity.c
3167
static void velocity_restore_context(struct velocity_info *vptr, struct velocity_context *context)
drivers/net/ethernet/via/via-velocity.c
3174
writel(*((u32 *) (context->mac_reg + i)), ptr + i);
drivers/net/ethernet/via/via-velocity.c
3179
writeb(~(*((u8 *) (context->mac_reg + i))), ptr + i + 4);
drivers/net/ethernet/via/via-velocity.c
3181
writeb(*((u8 *) (context->mac_reg + i)), ptr + i);
drivers/net/ethernet/via/via-velocity.c
3185
writel(*((u32 *) (context->mac_reg + i)), ptr + i);
drivers/net/ethernet/via/via-velocity.c
3188
writel(*((u32 *) (context->mac_reg + i)), ptr + i);
drivers/net/ethernet/via/via-velocity.c
3191
writeb(*((u8 *) (context->mac_reg + i)), ptr + i);
drivers/net/ethernet/via/via-velocity.c
3214
velocity_restore_context(vptr, &vptr->context);
drivers/net/ethernet/via/via-velocity.h
1438
struct velocity_context context;
drivers/net/ethernet/wangxun/txgbe/txgbe_phy.c
447
static int txgbe_i2c_read(void *context, unsigned int reg, unsigned int *val)
drivers/net/ethernet/wangxun/txgbe/txgbe_phy.c
449
struct wx *wx = context;
drivers/net/ethernet/wangxun/txgbe/txgbe_phy.c
456
static int txgbe_i2c_write(void *context, unsigned int reg, unsigned int val)
drivers/net/ethernet/wangxun/txgbe/txgbe_phy.c
458
struct wx *wx = context;
drivers/net/fjes/fjes_main.c
1475
void *context, void **return_value)
drivers/net/fjes/fjes_main.c
1478
bool *found = context;
drivers/net/hyperv/hyperv_net.h
239
void netvsc_channel_cb(void *context);
drivers/net/hyperv/netvsc.c
1740
void netvsc_channel_cb(void *context)
drivers/net/hyperv/netvsc.c
1742
struct netvsc_channel *nvchan = context;
drivers/net/hyperv/netvsc_drv.c
2146
struct net_device *ndev, int context)
drivers/net/hyperv/netvsc_drv.c
2172
if (context == VF_REG_IN_NOTIFIER)
drivers/net/hyperv/netvsc_drv.c
2314
static int netvsc_register_vf(struct net_device *vf_netdev, int context)
drivers/net/hyperv/netvsc_drv.c
2354
if (netvsc_vf_join(vf_netdev, ndev, context) != 0)
drivers/net/ieee802154/at86rf230.c
104
const u8 state, void (*complete)(void *context));
drivers/net/ieee802154/at86rf230.c
327
at86rf230_async_error_recover_complete(void *context)
drivers/net/ieee802154/at86rf230.c
329
struct at86rf230_state_change *ctx = context;
drivers/net/ieee802154/at86rf230.c
342
at86rf230_async_error_recover(void *context)
drivers/net/ieee802154/at86rf230.c
344
struct at86rf230_state_change *ctx = context;
drivers/net/ieee802154/at86rf230.c
370
void (*complete)(void *context))
drivers/net/ieee802154/at86rf230.c
386
void (*complete)(void *context))
drivers/net/ieee802154/at86rf230.c
399
at86rf230_async_state_assert(void *context)
drivers/net/ieee802154/at86rf230.c
401
struct at86rf230_state_change *ctx = context;
drivers/net/ieee802154/at86rf230.c
454
ctx->complete(context);
drivers/net/ieee802154/at86rf230.c
471
at86rf230_async_state_delay(void *context)
drivers/net/ieee802154/at86rf230.c
473
struct at86rf230_state_change *ctx = context;
drivers/net/ieee802154/at86rf230.c
562
at86rf230_async_state_change_start(void *context)
drivers/net/ieee802154/at86rf230.c
564
struct at86rf230_state_change *ctx = context;
drivers/net/ieee802154/at86rf230.c
580
ctx->complete(context);
drivers/net/ieee802154/at86rf230.c
597
const u8 state, void (*complete)(void *context))
drivers/net/ieee802154/at86rf230.c
607
at86rf230_sync_state_change_complete(void *context)
drivers/net/ieee802154/at86rf230.c
609
struct at86rf230_state_change *ctx = context;
drivers/net/ieee802154/at86rf230.c
638
at86rf230_tx_complete(void *context)
drivers/net/ieee802154/at86rf230.c
640
struct at86rf230_state_change *ctx = context;
drivers/net/ieee802154/at86rf230.c
652
at86rf230_tx_on(void *context)
drivers/net/ieee802154/at86rf230.c
654
struct at86rf230_state_change *ctx = context;
drivers/net/ieee802154/at86rf230.c
662
at86rf230_tx_trac_check(void *context)
drivers/net/ieee802154/at86rf230.c
664
struct at86rf230_state_change *ctx = context;
drivers/net/ieee802154/at86rf230.c
687
at86rf230_rx_read_frame_complete(void *context)
drivers/net/ieee802154/at86rf230.c
689
struct at86rf230_state_change *ctx = context;
drivers/net/ieee802154/at86rf230.c
70
void (*complete)(void *context);
drivers/net/ieee802154/at86rf230.c
715
at86rf230_rx_trac_check(void *context)
drivers/net/ieee802154/at86rf230.c
717
struct at86rf230_state_change *ctx = context;
drivers/net/ieee802154/at86rf230.c
733
at86rf230_irq_trx_end(void *context)
drivers/net/ieee802154/at86rf230.c
735
struct at86rf230_state_change *ctx = context;
drivers/net/ieee802154/at86rf230.c
749
at86rf230_irq_status(void *context)
drivers/net/ieee802154/at86rf230.c
751
struct at86rf230_state_change *ctx = context;
drivers/net/ieee802154/at86rf230.c
774
state->msg.context = state;
drivers/net/ieee802154/at86rf230.c
814
at86rf230_write_frame_complete(void *context)
drivers/net/ieee802154/at86rf230.c
816
struct at86rf230_state_change *ctx = context;
drivers/net/ieee802154/at86rf230.c
829
at86rf230_write_frame(void *context)
drivers/net/ieee802154/at86rf230.c
831
struct at86rf230_state_change *ctx = context;
drivers/net/ieee802154/at86rf230.c
852
at86rf230_xmit_tx_on(void *context)
drivers/net/ieee802154/at86rf230.c
854
struct at86rf230_state_change *ctx = context;
drivers/net/ieee802154/at86rf230.c
862
at86rf230_xmit_start(void *context)
drivers/net/ieee802154/at86rf230.c
864
struct at86rf230_state_change *ctx = context;
drivers/net/ieee802154/atusb.c
140
struct sk_buff *skb = urb->context;
drivers/net/ieee802154/atusb.c
162
urb->context = NULL;
drivers/net/ieee802154/atusb.c
219
struct sk_buff *skb = urb->context;
drivers/net/ieee802154/atusb.c
271
urb->context = NULL; /* skb is gone */
drivers/net/ieee802154/atusb.c
277
struct sk_buff *skb = urb->context;
drivers/net/ieee802154/atusb.c
285
urb->context = NULL;
drivers/net/ieee802154/atusb.c
308
kfree_skb(urb->context);
drivers/net/ieee802154/ca8210.c
797
static void ca8210_spi_transfer_complete(void *context)
drivers/net/ieee802154/ca8210.c
799
struct cas_control *cas_ctl = context;
drivers/net/ieee802154/ca8210.c
911
cas_ctl->msg.context = cas_ctl;
drivers/net/ieee802154/mcr20a.c
1014
lp->tx_buf_msg.context = lp;
drivers/net/ieee802154/mcr20a.c
1032
lp->reg_msg.context = lp;
drivers/net/ieee802154/mcr20a.c
1045
lp->rx_buf_msg.context = lp;
drivers/net/ieee802154/mcr20a.c
1065
lp->irq_msg.context = lp;
drivers/net/ieee802154/mcr20a.c
444
mcr20a_write_tx_buf_complete(void *context)
drivers/net/ieee802154/mcr20a.c
446
struct mcr20a_local *lp = context;
drivers/net/ieee802154/mcr20a.c
781
mcr20a_handle_rx_read_buf_complete(void *context)
drivers/net/ieee802154/mcr20a.c
783
struct mcr20a_local *lp = context;
drivers/net/ieee802154/mcr20a.c
814
mcr20a_handle_rx_read_len_complete(void *context)
drivers/net/ieee802154/mcr20a.c
816
struct mcr20a_local *lp = context;
drivers/net/ieee802154/mcr20a.c
882
mcr20a_irq_clean_complete(void *context)
drivers/net/ieee802154/mcr20a.c
884
struct mcr20a_local *lp = context;
drivers/net/ieee802154/mcr20a.c
931
static void mcr20a_irq_status_complete(void *context)
drivers/net/ieee802154/mcr20a.c
934
struct mcr20a_local *lp = context;
drivers/net/ieee802154/mrf24j40.c
1020
static void mrf24j40_intstat_complete(void *context)
drivers/net/ieee802154/mrf24j40.c
1022
struct mrf24j40 *devrec = context;
drivers/net/ieee802154/mrf24j40.c
1184
devrec->tx_msg.context = devrec;
drivers/net/ieee802154/mrf24j40.c
1195
devrec->tx_post_msg.context = devrec;
drivers/net/ieee802154/mrf24j40.c
1205
devrec->rx_msg.context = devrec;
drivers/net/ieee802154/mrf24j40.c
1212
devrec->rx_buf_msg.context = devrec;
drivers/net/ieee802154/mrf24j40.c
1228
devrec->irq_msg.context = devrec;
drivers/net/ieee802154/mrf24j40.c
505
static int mrf24j40_long_regmap_write(void *context, const void *data,
drivers/net/ieee802154/mrf24j40.c
508
struct spi_device *spi = context;
drivers/net/ieee802154/mrf24j40.c
525
mrf24j40_long_regmap_read(void *context, const void *reg, size_t reg_size,
drivers/net/ieee802154/mrf24j40.c
528
struct spi_device *spi = context;
drivers/net/ieee802154/mrf24j40.c
540
static void write_tx_buf_complete(void *context)
drivers/net/ieee802154/mrf24j40.c
542
struct mrf24j40 *devrec = context;
drivers/net/ieee802154/mrf24j40.c
752
static void mrf24j40_handle_rx_read_buf_complete(void *context)
drivers/net/ieee802154/mrf24j40.c
754
struct mrf24j40 *devrec = context;
drivers/net/ieee802154/mrf24j40.c
779
static void mrf24j40_handle_rx_read_buf(void *context)
drivers/net/ieee802154/mrf24j40.c
781
struct mrf24j40 *devrec = context;
drivers/net/ieee802154/mrf24j40.c
800
static void mrf24j40_handle_rx_read_len(void *context)
drivers/net/ieee802154/mrf24j40.c
802
struct mrf24j40 *devrec = context;
drivers/net/mctp/mctp-usb.c
156
struct sk_buff *skb = urb->context;
drivers/net/mctp/mctp-usb.c
40
struct sk_buff *skb = urb->context;
drivers/net/mdio/mdio-xgene.c
280
void *context, void **ret)
drivers/net/mdio/mdio-xgene.c
282
struct mii_bus *mdio = context;
drivers/net/phy/qcom/at803x.c
238
struct at803x_context *context)
drivers/net/phy/qcom/at803x.c
240
context->bmcr = phy_read(phydev, MII_BMCR);
drivers/net/phy/qcom/at803x.c
241
context->advertise = phy_read(phydev, MII_ADVERTISE);
drivers/net/phy/qcom/at803x.c
242
context->control1000 = phy_read(phydev, MII_CTRL1000);
drivers/net/phy/qcom/at803x.c
243
context->int_enable = phy_read(phydev, AT803X_INTR_ENABLE);
drivers/net/phy/qcom/at803x.c
244
context->smart_speed = phy_read(phydev, AT803X_SMART_SPEED);
drivers/net/phy/qcom/at803x.c
245
context->led_control = phy_read(phydev, AT803X_LED_CONTROL);
drivers/net/phy/qcom/at803x.c
250
const struct at803x_context *context)
drivers/net/phy/qcom/at803x.c
252
phy_write(phydev, MII_BMCR, context->bmcr);
drivers/net/phy/qcom/at803x.c
253
phy_write(phydev, MII_ADVERTISE, context->advertise);
drivers/net/phy/qcom/at803x.c
254
phy_write(phydev, MII_CTRL1000, context->control1000);
drivers/net/phy/qcom/at803x.c
255
phy_write(phydev, AT803X_INTR_ENABLE, context->int_enable);
drivers/net/phy/qcom/at803x.c
256
phy_write(phydev, AT803X_SMART_SPEED, context->smart_speed);
drivers/net/phy/qcom/at803x.c
257
phy_write(phydev, AT803X_LED_CONTROL, context->led_control);
drivers/net/phy/qcom/at803x.c
540
struct at803x_context context;
drivers/net/phy/qcom/at803x.c
542
at803x_context_save(phydev, &context);
drivers/net/phy/qcom/at803x.c
549
at803x_context_restore(phydev, &context);
drivers/net/usb/catc.c
218
struct catc *catc = urb->context;
drivers/net/usb/catc.c
284
struct catc *catc = urb->context;
drivers/net/usb/catc.c
378
struct catc *catc = urb->context;
drivers/net/usb/catc.c
504
struct catc *catc = urb->context;
drivers/net/usb/cdc-phonet.c
139
struct net_device *dev = req->context;
drivers/net/usb/cdc-phonet.c
85
struct sk_buff *skb = req->context;
drivers/net/usb/cdc_eem.c
43
dev_kfree_skb(urb->context);
drivers/net/usb/hso.c
1176
struct hso_serial *serial = urb->context;
drivers/net/usb/hso.c
1442
struct hso_serial *serial = urb->context;
drivers/net/usb/hso.c
1829
struct hso_shared_int *shared_int = urb->context;
drivers/net/usb/hso.c
1901
struct hso_serial *serial = urb->context;
drivers/net/usb/hso.c
1952
struct hso_serial *serial = urb->context;
drivers/net/usb/hso.c
740
struct hso_net *odev = urb->context;
drivers/net/usb/hso.c
981
struct hso_net *odev = urb->context;
drivers/net/usb/ipheth.c
209
dev = urb->context;
drivers/net/usb/ipheth.c
239
dev = urb->context;
drivers/net/usb/ipheth.c
291
dev = urb->context;
drivers/net/usb/ipheth.c
344
dev = urb->context;
drivers/net/usb/kaweth.c
411
struct kaweth_device *kaweth = u->context;
drivers/net/usb/kaweth.c
497
struct kaweth_device *kaweth = urb->context;
drivers/net/usb/kaweth.c
681
struct kaweth_device *kaweth = urb->context;
drivers/net/usb/lan78xx.c
3572
struct sk_buff *skb = (struct sk_buff *)urb->context;
drivers/net/usb/lan78xx.c
3971
struct sk_buff *skb = (struct sk_buff *)urb->context;
drivers/net/usb/lan78xx.c
4486
struct lan78xx_net *dev = urb->context;
drivers/net/usb/lan78xx.c
5207
struct sk_buff *skb = urb->context;
drivers/net/usb/pegasus.c
467
pegasus_t *pegasus = urb->context;
drivers/net/usb/pegasus.c
613
pegasus_t *pegasus = urb->context;
drivers/net/usb/pegasus.c
649
pegasus_t *pegasus = urb->context;
drivers/net/usb/pegasus.c
99
struct usb_ctrlrequest *req = (struct usb_ctrlrequest *)urb->context;
drivers/net/usb/r8152.c
1846
agg = urb->context;
drivers/net/usb/r8152.c
1850
tp = agg->context;
drivers/net/usb/r8152.c
1915
agg = urb->context;
drivers/net/usb/r8152.c
1919
tp = agg->context;
drivers/net/usb/r8152.c
1960
tp = urb->context;
drivers/net/usb/r8152.c
2061
rx_agg->context = tp;
drivers/net/usb/r8152.c
2158
tp->tx_info[i].context = tp;
drivers/net/usb/r8152.c
848
struct r8152 *context;
drivers/net/usb/r8152.c
856
struct r8152 *context;
drivers/net/usb/rtl8150.c
175
struct async_req *req = (struct async_req *)urb->context;
drivers/net/usb/rtl8150.c
388
dev = urb->context;
drivers/net/usb/rtl8150.c
457
dev = urb->context;
drivers/net/usb/rtl8150.c
477
dev = urb->context;
drivers/net/usb/usbnet.c
1300
struct sk_buff *skb = (struct sk_buff *) urb->context;
drivers/net/usb/usbnet.c
1690
dev_kfree_skb(urb->context);
drivers/net/usb/usbnet.c
195
struct usbnet *dev = urb->context;
drivers/net/usb/usbnet.c
1989
skb = (struct sk_buff *)res->context;
drivers/net/usb/usbnet.c
2197
struct usb_ctrlrequest *req = (struct usb_ctrlrequest *)urb->context;
drivers/net/usb/usbnet.c
603
struct sk_buff *skb = (struct sk_buff *) urb->context;
drivers/net/wan/fsl_qmc_hdlc.c
248
static void qmc_hcld_recv_complete(void *context, size_t length, unsigned int flags)
drivers/net/wan/fsl_qmc_hdlc.c
250
struct qmc_hdlc_desc *desc = context;
drivers/net/wan/fsl_qmc_hdlc.c
330
static void qmc_hdlc_xmit_complete(void *context)
drivers/net/wan/fsl_qmc_hdlc.c
332
struct qmc_hdlc_desc *desc = context;
drivers/net/wireless/ath/ar5523/ar5523.c
227
struct ar5523_tx_cmd *cmd = urb->context;
drivers/net/wireless/ath/ar5523/ar5523.c
536
struct ar5523_rx_data *data = urb->context;
drivers/net/wireless/ath/ar5523/ar5523.c
748
struct sk_buff *skb = urb->context;
drivers/net/wireless/ath/ar5523/ar5523.c
94
struct ar5523 *ar = urb->context;
drivers/net/wireless/ath/ath10k/usb.c
127
struct ath10k_urb_context *urb_context = urb->context;
drivers/net/wireless/ath/ath10k/usb.c
184
struct ath10k_urb_context *urb_context = urb->context;
drivers/net/wireless/ath/ath6kl/core.h
901
void ath6kl_tx_complete(struct htc_target *context,
drivers/net/wireless/ath/ath6kl/hif.c
53
int ath6kl_hif_rw_comp_handler(void *context, int status)
drivers/net/wireless/ath/ath6kl/hif.c
55
struct htc_packet *packet = context;
drivers/net/wireless/ath/ath6kl/hif.c
61
packet->completion(packet->context, packet);
drivers/net/wireless/ath/ath6kl/hif.h
269
int ath6kl_hif_rw_comp_handler(void *context, int status);
drivers/net/wireless/ath/ath6kl/htc.h
329
struct htc_target *context;
drivers/net/wireless/ath/ath6kl/htc.h
642
static inline void set_htc_pkt_info(struct htc_packet *packet, void *context,
drivers/net/wireless/ath/ath6kl/htc.h
646
packet->pkt_cntxt = context;
drivers/net/wireless/ath/ath6kl/htc.h
659
static inline void set_htc_rxpkt_info(struct htc_packet *packet, void *context,
drivers/net/wireless/ath/ath6kl/htc.h
663
packet->pkt_cntxt = context;
drivers/net/wireless/ath/ath6kl/htc_mbox.c
1566
static void htc_ctrl_rx(struct htc_target *context, struct htc_packet *packets)
drivers/net/wireless/ath/ath6kl/htc_mbox.c
1574
reclaim_rx_ctrl_buf(context, packets);
drivers/net/wireless/ath/ath6kl/htc_mbox.c
1588
htc_reclaim_rxbuf(context, packets, &context->endpoint[0]);
drivers/net/wireless/ath/ath6kl/htc_mbox.c
636
packet->context = target;
drivers/net/wireless/ath/ath6kl/htc_mbox.c
930
packet->completion(packet->context, packet);
drivers/net/wireless/ath/ath6kl/htc_pipe.c
1159
static void htc_rxctrl_complete(struct htc_target *context,
drivers/net/wireless/ath/ath6kl/sdio.c
1226
void *context;
drivers/net/wireless/ath/ath6kl/sdio.c
1243
context = req->packet;
drivers/net/wireless/ath/ath6kl/sdio.c
1245
ath6kl_hif_rw_comp_handler(context, -ECANCELED);
drivers/net/wireless/ath/ath6kl/sdio.c
450
void *context;
drivers/net/wireless/ath/ath6kl/sdio.c
456
context = req->packet;
drivers/net/wireless/ath/ath6kl/sdio.c
458
ath6kl_hif_rw_comp_handler(context, status);
drivers/net/wireless/ath/ath6kl/usb.c
504
struct ath6kl_urb_context *urb_context = urb->context;
drivers/net/wireless/ath/ath6kl/usb.c
561
struct ath6kl_urb_context *urb_context = urb->context;
drivers/net/wireless/ath/ath9k/ath9k_pci_owl_loader.c
125
static void owl_fw_cb(const struct firmware *fw, void *context)
drivers/net/wireless/ath/ath9k/ath9k_pci_owl_loader.c
127
struct owl_ctx *ctx = context;
drivers/net/wireless/ath/ath9k/hif_usb.c
1169
static void ath9k_hif_usb_firmware_cb(const struct firmware *fw, void *context);
drivers/net/wireless/ath/ath9k/hif_usb.c
1241
static void ath9k_hif_usb_firmware_cb(const struct firmware *fw, void *context)
drivers/net/wireless/ath/ath9k/hif_usb.c
1243
struct hif_device_usb *hif_dev = context;
drivers/net/wireless/ath/ath9k/hif_usb.c
137
struct cmd_buf *cmd = urb->context;
drivers/net/wireless/ath/ath9k/hif_usb.c
255
struct tx_buf *tx_buf = urb->context;
drivers/net/wireless/ath/ath9k/hif_usb.c
690
struct rx_buf *rx_buf = urb->context;
drivers/net/wireless/ath/ath9k/hif_usb.c
73
struct cmd_buf *cmd = urb->context;
drivers/net/wireless/ath/ath9k/hif_usb.c
736
struct rx_buf *rx_buf = urb->context;
drivers/net/wireless/ath/ath9k/hif_usb.c
801
urb->context = NULL;
drivers/net/wireless/ath/carl9170/usb.c
1029
void *context)
drivers/net/wireless/ath/carl9170/usb.c
1031
struct ar9170 *ar = context;
drivers/net/wireless/ath/carl9170/usb.c
172
dev_kfree_skb_irq(urb->context);
drivers/net/wireless/ath/carl9170/usb.c
181
carl9170_tx_callback(ar, urb->context);
drivers/net/wireless/ath/carl9170/usb.c
241
struct ar9170 *ar = urb->context;
drivers/net/wireless/ath/carl9170/usb.c
279
struct ar9170 *ar = urb->context;
drivers/net/wireless/ath/carl9170/usb.c
372
struct sk_buff *skb = urb->context;
drivers/net/wireless/ath/carl9170/usb.c
400
struct ar9170 *ar = urb->context;
drivers/net/wireless/ath/carl9170/usb.c
569
struct sk_buff *skb = urb->context;
drivers/net/wireless/atmel/at76c50x-usb.c
1194
struct at76_priv *priv = urb->context;
drivers/net/wireless/atmel/at76c50x-usb.c
1724
struct at76_priv *priv = urb->context;
drivers/net/wireless/broadcom/b43/main.c
2179
static void b43_fw_cb(const struct firmware *firmware, void *context)
drivers/net/wireless/broadcom/b43/main.c
2181
struct b43_request_fw_context *ctx = context;
drivers/net/wireless/broadcom/b43legacy/main.c
1486
static void b43legacy_fw_cb(const struct firmware *firmware, void *context)
drivers/net/wireless/broadcom/b43legacy/main.c
1488
struct b43legacy_wldev *dev = context;
drivers/net/wireless/broadcom/brcm80211/brcmfmac/usb.c
217
(struct brcmf_usbdev_info *)urb->context;
drivers/net/wireless/broadcom/brcm80211/brcmfmac/usb.c
229
(struct brcmf_usbdev_info *)urb->context;
drivers/net/wireless/broadcom/brcm80211/brcmfmac/usb.c
487
struct brcmf_usbreq *req = (struct brcmf_usbreq *)urb->context;
drivers/net/wireless/broadcom/brcm80211/brcmfmac/usb.c
509
struct brcmf_usbreq *req = (struct brcmf_usbreq *)urb->context;
drivers/net/wireless/broadcom/brcm80211/brcmfmac/usb.c
734
(struct brcmf_usbdev_info *)urb->context;
drivers/net/wireless/intel/iwlegacy/4965-mac.c
4662
void *context);
drivers/net/wireless/intel/iwlegacy/4965-mac.c
4759
il4965_ucode_callback(const struct firmware *ucode_raw, void *context)
drivers/net/wireless/intel/iwlegacy/4965-mac.c
4761
struct il_priv *il = context;
drivers/net/wireless/intel/iwlwifi/dvm/tx.c
238
static int iwl_sta_id_or_broadcast(struct iwl_rxon_context *context,
drivers/net/wireless/intel/iwlwifi/dvm/tx.c
244
return context->bcast_sta_id;
drivers/net/wireless/intel/iwlwifi/iwl-drv.c
1598
static void iwl_req_fw_callback(const struct firmware *ucode_raw, void *context)
drivers/net/wireless/intel/iwlwifi/iwl-drv.c
1601
struct iwl_drv *drv = context;
drivers/net/wireless/intel/iwlwifi/iwl-drv.c
295
void *context);
drivers/net/wireless/intel/iwlwifi/iwl-op-mode.h
101
enum iwl_fw_error_context context;
drivers/net/wireless/intel/iwlwifi/iwl-trans.c
631
mode.context = IWL_ERR_CONTEXT_FROM_OPMODE;
drivers/net/wireless/intel/iwlwifi/iwl-trans.c
632
trans->restart.mode.context = IWL_ERR_CONTEXT_ABORT;
drivers/net/wireless/intel/iwlwifi/iwl-trans.h
1081
trans->restart.mode.context = IWL_ERR_CONTEXT_WORKER;
drivers/net/wireless/intel/iwlwifi/mld/mld.c
686
if (mode->context == IWL_ERR_CONTEXT_FROM_OPMODE) {
drivers/net/wireless/intel/iwlwifi/mld/mld.c
691
if (mode->context != IWL_ERR_CONTEXT_ABORT)
drivers/net/wireless/intel/iwlwifi/mld/mld.h
497
u8 context;
drivers/net/wireless/intel/iwlwifi/mld/notif.c
48
.context = _context, \
drivers/net/wireless/intel/iwlwifi/mld/notif.c
560
if (rx_h->context == RX_HANDLER_SYNC) {
drivers/net/wireless/intel/iwlwifi/mld/notif.c
57
.context = _context, \
drivers/net/wireless/intel/iwlwifi/mld/notif.c
63
.context = _context, \
drivers/net/wireless/intel/iwlwifi/mld/notif.c
88
.context = RX_HANDLER_ASYNC, \
drivers/net/wireless/intel/iwlwifi/mvm/ops.c
1611
enum iwl_rx_handler_context context;
drivers/net/wireless/intel/iwlwifi/mvm/ops.c
1645
if (!(BIT(entry->context) & contexts))
drivers/net/wireless/intel/iwlwifi/mvm/ops.c
1653
if (entry->context != RX_HANDLER_ASYNC_UNLOCKED)
drivers/net/wireless/intel/iwlwifi/mvm/ops.c
1658
if (entry->context != RX_HANDLER_ASYNC_UNLOCKED)
drivers/net/wireless/intel/iwlwifi/mvm/ops.c
1745
if (rx_h->context == RX_HANDLER_SYNC) {
drivers/net/wireless/intel/iwlwifi/mvm/ops.c
1759
entry->context = rx_h->context;
drivers/net/wireless/intel/iwlwifi/mvm/ops.c
1763
if (rx_h->context == RX_HANDLER_ASYNC_LOCKED_WIPHY)
drivers/net/wireless/intel/iwlwifi/mvm/ops.c
1990
if (mode->context == IWL_ERR_CONTEXT_FROM_OPMODE) {
drivers/net/wireless/intel/iwlwifi/mvm/ops.c
1995
if (mode->context != IWL_ERR_CONTEXT_ABORT)
drivers/net/wireless/intel/iwlwifi/mvm/ops.c
295
enum iwl_rx_handler_context context;
drivers/net/wireless/intel/iwlwifi/mvm/ops.c
300
{ .cmd_id = _cmd_id, .fn = _fn, .context = _context, }
drivers/net/wireless/intel/iwlwifi/mvm/ops.c
302
{ .cmd_id = WIDE_ID(_grp, _cmd), .fn = _fn, .context = _context, }
drivers/net/wireless/intel/iwlwifi/mvm/ops.c
305
.context = _context, .min_size = sizeof(_struct), }
drivers/net/wireless/intel/iwlwifi/mvm/ops.c
308
.context = _context, .min_size = sizeof(_struct), }
drivers/net/wireless/intel/iwlwifi/pcie/gen1_2/trans-gen2.c
139
.context = IWL_ERR_CONTEXT_FROM_OPMODE,
drivers/net/wireless/intersil/p54/p54pci.c
497
void *context)
drivers/net/wireless/intersil/p54/p54pci.c
499
struct p54p_priv *priv = context;
drivers/net/wireless/intersil/p54/p54usb.c
142
struct sk_buff *skb = (struct sk_buff *) urb->context;
drivers/net/wireless/intersil/p54/p54usb.c
174
urb->context = skb;
drivers/net/wireless/intersil/p54/p54usb.c
197
struct sk_buff *skb = urb->context;
drivers/net/wireless/intersil/p54/p54usb.c
920
void *context)
drivers/net/wireless/intersil/p54/p54usb.c
922
struct p54u_priv *priv = context;
drivers/net/wireless/marvell/libertas/firmware.c
32
void (*cb)(const struct firmware *fw, void *context))
drivers/net/wireless/marvell/libertas/firmware.c
45
static void main_firmware_cb(const struct firmware *firmware, void *context)
drivers/net/wireless/marvell/libertas/firmware.c
47
struct lbs_private *priv = context;
drivers/net/wireless/marvell/libertas/firmware.c
64
static void helper_firmware_cb(const struct firmware *firmware, void *context)
drivers/net/wireless/marvell/libertas/firmware.c
66
struct lbs_private *priv = context;
drivers/net/wireless/marvell/libertas/if_usb.c
496
struct if_usb_card *cardp = urb->context;
drivers/net/wireless/marvell/libertas/if_usb.c
660
struct if_usb_card *cardp = urb->context;
drivers/net/wireless/marvell/libertas/if_usb.c
87
struct if_usb_card *cardp = (struct if_usb_card *) urb->context;
drivers/net/wireless/marvell/libertas_tf/if_usb.c
464
struct if_usb_card *cardp = urb->context;
drivers/net/wireless/marvell/libertas_tf/if_usb.c
628
struct if_usb_card *cardp = urb->context;
drivers/net/wireless/marvell/mwifiex/main.c
535
static int _mwifiex_fw_dpc(const struct firmware *firmware, void *context)
drivers/net/wireless/marvell/mwifiex/main.c
539
struct mwifiex_adapter *adapter = context;
drivers/net/wireless/marvell/mwifiex/main.c
689
static void mwifiex_fw_dpc(const struct firmware *firmware, void *context)
drivers/net/wireless/marvell/mwifiex/main.c
691
_mwifiex_fw_dpc(firmware, context);
drivers/net/wireless/marvell/mwifiex/pcie.c
2455
static irqreturn_t mwifiex_pcie_interrupt(int irq, void *context)
drivers/net/wireless/marvell/mwifiex/pcie.c
2457
struct mwifiex_msix_context *ctx = context;
drivers/net/wireless/marvell/mwifiex/usb.c
1069
context = &port->tx_data_list[port->tx_data_ix++];
drivers/net/wireless/marvell/mwifiex/usb.c
1071
context, skb_send);
drivers/net/wireless/marvell/mwifiex/usb.c
1099
context = &port->tx_data_list[port->tx_data_ix++];
drivers/net/wireless/marvell/mwifiex/usb.c
1101
context, skb_send);
drivers/net/wireless/marvell/mwifiex/usb.c
1169
struct urb_context *context = NULL;
drivers/net/wireless/marvell/mwifiex/usb.c
1187
context = &card->tx_cmd;
drivers/net/wireless/marvell/mwifiex/usb.c
1219
context = &port->tx_data_list[port->tx_data_ix++];
drivers/net/wireless/marvell/mwifiex/usb.c
1222
return mwifiex_usb_construct_send_urb(adapter, port, ep, context, skb);
drivers/net/wireless/marvell/mwifiex/usb.c
154
struct urb_context *context = (struct urb_context *)urb->context;
drivers/net/wireless/marvell/mwifiex/usb.c
155
struct mwifiex_adapter *adapter = context->adapter;
drivers/net/wireless/marvell/mwifiex/usb.c
156
struct sk_buff *skb = context->skb;
drivers/net/wireless/marvell/mwifiex/usb.c
167
if (card->rx_cmd_ep == context->ep)
drivers/net/wireless/marvell/mwifiex/usb.c
178
if (card->rx_cmd_ep != context->ep)
drivers/net/wireless/marvell/mwifiex/usb.c
187
status = mwifiex_usb_recv(adapter, skb, context->ep);
drivers/net/wireless/marvell/mwifiex/usb.c
199
if (card->rx_cmd_ep == context->ep)
drivers/net/wireless/marvell/mwifiex/usb.c
207
if (card->rx_cmd_ep != context->ep)
drivers/net/wireless/marvell/mwifiex/usb.c
220
if (card->rx_cmd_ep != context->ep)
drivers/net/wireless/marvell/mwifiex/usb.c
227
if (card->rx_cmd_ep == context->ep)
drivers/net/wireless/marvell/mwifiex/usb.c
232
if (card->rx_cmd_ep == context->ep) {
drivers/net/wireless/marvell/mwifiex/usb.c
233
mwifiex_usb_submit_rx_urb(context, size);
drivers/net/wireless/marvell/mwifiex/usb.c
236
mwifiex_usb_submit_rx_urb(context, size);
drivers/net/wireless/marvell/mwifiex/usb.c
238
context->skb = NULL;
drivers/net/wireless/marvell/mwifiex/usb.c
247
struct urb_context *context = (struct urb_context *)(urb->context);
drivers/net/wireless/marvell/mwifiex/usb.c
248
struct mwifiex_adapter *adapter = context->adapter;
drivers/net/wireless/marvell/mwifiex/usb.c
256
if (context->ep == card->tx_cmd_ep) {
drivers/net/wireless/marvell/mwifiex/usb.c
264
mwifiex_write_data_complete(adapter, context->skb, 0,
drivers/net/wireless/marvell/mwifiex/usb.c
268
if (context->ep == port->tx_data_ep) {
drivers/net/wireless/marvell/mwifiex/usb.c
807
struct urb_context *context,
drivers/net/wireless/marvell/mwifiex/usb.c
814
context->adapter = adapter;
drivers/net/wireless/marvell/mwifiex/usb.c
815
context->ep = ep;
drivers/net/wireless/marvell/mwifiex/usb.c
816
context->skb = skb_send;
drivers/net/wireless/marvell/mwifiex/usb.c
817
tx_urb = context->urb;
drivers/net/wireless/marvell/mwifiex/usb.c
824
(void *)context, card->tx_cmd_interval);
drivers/net/wireless/marvell/mwifiex/usb.c
829
mwifiex_usb_tx_complete, (void *)context);
drivers/net/wireless/marvell/mwifiex/usb.c
959
struct urb_context *context = NULL;
drivers/net/wireless/marvell/mwl8k.c
521
static void mwl8k_fw_state_machine(const struct firmware *fw, void *context);
drivers/net/wireless/marvell/mwl8k.c
5812
static void mwl8k_fw_state_machine(const struct firmware *fw, void *context)
drivers/net/wireless/marvell/mwl8k.c
5814
struct mwl8k_priv *priv = context;
drivers/net/wireless/mediatek/mt76/mt7915/coredump.c
283
dump->context.idx = idx;
drivers/net/wireless/mediatek/mt76/mt7915/coredump.c
284
dump->context.handler = mt76_rr(dev, MT_FW_CIRQ_LISR);
drivers/net/wireless/mediatek/mt76/mt7915/coredump.c
296
dump->context.idx = idx;
drivers/net/wireless/mediatek/mt76/mt7915/coredump.c
297
dump->context.handler = id;
drivers/net/wireless/mediatek/mt76/mt7915/coredump.h
71
} context;
drivers/net/wireless/mediatek/mt76/usb.c
420
void *context)
drivers/net/wireless/mediatek/mt76/usb.c
434
urb->context = context;
drivers/net/wireless/mediatek/mt76/usb.c
558
struct mt76_queue *q = urb->context;
drivers/net/wireless/mediatek/mt76/usb.c
824
struct mt76_queue_entry *e = urb->context;
drivers/net/wireless/mediatek/mt7601u/dma.c
193
struct mt7601u_dev *dev = urb->context;
drivers/net/wireless/mediatek/mt7601u/dma.c
241
struct mt7601u_tx_queue *q = urb->context;
drivers/net/wireless/mediatek/mt7601u/usb.c
57
usb_complete_t complete_fn, void *context)
drivers/net/wireless/mediatek/mt7601u/usb.c
69
complete_fn, context);
drivers/net/wireless/mediatek/mt7601u/usb.c
83
struct completion *cmpl = urb->context;
drivers/net/wireless/mediatek/mt7601u/usb.h
61
usb_complete_t complete_fn, void *context);
drivers/net/wireless/purelifi/plfxlc/usb.c
344
skb = urb->context;
drivers/net/wireless/purelifi/plfxlc/usb.c
493
void *context)
drivers/net/wireless/purelifi/plfxlc/usb.c
503
(void *)buffer, buffer_len, complete_fn, context);
drivers/net/wireless/purelifi/plfxlc/usb.c
83
if (!urb->context) {
drivers/net/wireless/purelifi/plfxlc/usb.c
87
usb = urb->context;
drivers/net/wireless/purelifi/plfxlc/usb.h
158
usb_complete_t complete_fn, void *context);
drivers/net/wireless/ralink/rt2x00/rt2500usb.c
1294
struct queue_entry *entry = (struct queue_entry *)urb->context;
drivers/net/wireless/ralink/rt2x00/rt2x00usb.c
180
struct rt2x00_async_read_data *rd = urb->context;
drivers/net/wireless/ralink/rt2x00/rt2x00usb.c
270
struct queue_entry *entry = (struct queue_entry *)urb->context;
drivers/net/wireless/ralink/rt2x00/rt2x00usb.c
374
struct queue_entry *entry = (struct queue_entry *)urb->context;
drivers/net/wireless/realtek/rtl818x/rtl8187/dev.c
118
kfree(urb->context);
drivers/net/wireless/realtek/rtl818x/rtl8187/dev.c
188
struct sk_buff *skb = (struct sk_buff *)urb->context;
drivers/net/wireless/realtek/rtl818x/rtl8187/dev.c
327
struct sk_buff *skb = (struct sk_buff *)urb->context;
drivers/net/wireless/realtek/rtl818x/rtl8187/dev.c
409
urb->context = skb;
drivers/net/wireless/realtek/rtl818x/rtl8187/dev.c
473
struct ieee80211_hw *hw = (struct ieee80211_hw *)urb->context;
drivers/net/wireless/realtek/rtl8xxxu/core.c
5115
struct sk_buff *skb = (struct sk_buff *)urb->context;
drivers/net/wireless/realtek/rtl8xxxu/core.c
5804
skb = (struct sk_buff *)rx_urb->urb.context;
drivers/net/wireless/realtek/rtl8xxxu/core.c
5852
skb = (struct sk_buff *)rx_urb->urb.context;
drivers/net/wireless/realtek/rtl8xxxu/core.c
6504
struct sk_buff *skb = (struct sk_buff *)urb->context;
drivers/net/wireless/realtek/rtl8xxxu/core.c
6513
rx_urb->urb.context = NULL;
drivers/net/wireless/realtek/rtl8xxxu/core.c
6559
struct rtl8xxxu_priv *priv = (struct rtl8xxxu_priv *)urb->context;
drivers/net/wireless/realtek/rtl8xxxu/core.c
7419
skb = (struct sk_buff *)rx_urb->urb.context;
drivers/net/wireless/realtek/rtlwifi/core.c
105
void rtl_fw_cb(const struct firmware *firmware, void *context)
drivers/net/wireless/realtek/rtlwifi/core.c
107
rtl_fw_do_work(firmware, context, false);
drivers/net/wireless/realtek/rtlwifi/core.c
111
void rtl_wowlan_fw_cb(const struct firmware *firmware, void *context)
drivers/net/wireless/realtek/rtlwifi/core.c
113
rtl_fw_do_work(firmware, context, true);
drivers/net/wireless/realtek/rtlwifi/core.c
61
static void rtl_fw_do_work(const struct firmware *firmware, void *context,
drivers/net/wireless/realtek/rtlwifi/core.c
64
struct ieee80211_hw *hw = context;
drivers/net/wireless/realtek/rtlwifi/core.h
56
void rtl_fw_cb(const struct firmware *firmware, void *context);
drivers/net/wireless/realtek/rtlwifi/core.h
57
void rtl_wowlan_fw_cb(const struct firmware *firmware, void *context);
drivers/net/wireless/realtek/rtlwifi/rtl8192se/sw.c
58
static void rtl92se_fw_cb(const struct firmware *firmware, void *context)
drivers/net/wireless/realtek/rtlwifi/rtl8192se/sw.c
60
struct ieee80211_hw *hw = context;
drivers/net/wireless/realtek/rtlwifi/usb.c
541
struct rtl_usb *rtlusb = (struct rtl_usb *)_urb->context;
drivers/net/wireless/realtek/rtlwifi/usb.c
782
skb = (struct sk_buff *)_urb->context;
drivers/net/wireless/realtek/rtlwifi/usb.c
812
struct sk_buff *skb = (struct sk_buff *)urb->context;
drivers/net/wireless/realtek/rtw88/main.c
1804
static void rtw_load_firmware_cb(const struct firmware *firmware, void *context)
drivers/net/wireless/realtek/rtw88/main.c
1806
struct rtw_fw_state *fw = context;
drivers/net/wireless/realtek/rtw88/usb.c
321
struct rtw_usb_txcb *txcb = urb->context;
drivers/net/wireless/realtek/rtw88/usb.c
366
usb_complete_t cb, void *context)
drivers/net/wireless/realtek/rtw88/usb.c
383
usb_fill_bulk_urb(urb, usbd, pipe, skb->data, skb->len, cb, context);
drivers/net/wireless/realtek/rtw88/usb.c
490
struct sk_buff *skb = urb->context;
drivers/net/wireless/realtek/rtw88/usb.c
739
struct rx_usb_ctrl_block *rxcb = urb->context;
drivers/net/wireless/realtek/rtw89/usb.c
172
struct rtw89_usb_tx_ctrl_block *txcb = urb->context;
drivers/net/wireless/realtek/rtw89/usb.c
236
void *data, int len, void *context)
drivers/net/wireless/realtek/rtw89/usb.c
256
rtw89_usb_write_port_complete, context);
drivers/net/wireless/realtek/rtw89/usb.c
522
struct rtw89_usb_rx_ctrl_block *rxcb = urb->context;
drivers/net/wireless/rsi/rsi_91x_usb.c
268
struct rx_usb_ctrl_block *rx_cb = urb->context;
drivers/net/wireless/silabs/wfx/debug.c
242
struct dbgfs_hif_msg *context = file->private_data;
drivers/net/wireless/silabs/wfx/debug.c
243
struct wfx_dev *wdev = context->wdev;
drivers/net/wireless/silabs/wfx/debug.c
246
if (completion_done(&context->complete)) {
drivers/net/wireless/silabs/wfx/debug.c
257
memset(context->reply, 0xFF, sizeof(context->reply));
drivers/net/wireless/silabs/wfx/debug.c
265
context->ret = wfx_cmd_send(wdev, request, context->reply, sizeof(context->reply), false);
drivers/net/wireless/silabs/wfx/debug.c
268
complete(&context->complete);
drivers/net/wireless/silabs/wfx/debug.c
275
struct dbgfs_hif_msg *context = file->private_data;
drivers/net/wireless/silabs/wfx/debug.c
278
if (count > sizeof(context->reply))
drivers/net/wireless/silabs/wfx/debug.c
280
ret = wait_for_completion_interruptible(&context->complete);
drivers/net/wireless/silabs/wfx/debug.c
283
if (context->ret < 0)
drivers/net/wireless/silabs/wfx/debug.c
284
return context->ret;
drivers/net/wireless/silabs/wfx/debug.c
286
if (copy_to_user(user_buf, context->reply, count))
drivers/net/wireless/silabs/wfx/debug.c
294
struct dbgfs_hif_msg *context = kzalloc_obj(*context);
drivers/net/wireless/silabs/wfx/debug.c
296
if (!context)
drivers/net/wireless/silabs/wfx/debug.c
298
context->wdev = inode->i_private;
drivers/net/wireless/silabs/wfx/debug.c
299
init_completion(&context->complete);
drivers/net/wireless/silabs/wfx/debug.c
300
file->private_data = context;
drivers/net/wireless/silabs/wfx/debug.c
306
struct dbgfs_hif_msg *context = file->private_data;
drivers/net/wireless/silabs/wfx/debug.c
308
kfree(context);
drivers/net/wireless/ti/wlcore/main.c
6547
static void wlcore_nvs_cb(const struct firmware *fw, void *context)
drivers/net/wireless/ti/wlcore/main.c
6549
struct wl1271 *wl = context;
drivers/net/wireless/zydas/zd1211rw/zd_mac.c
487
struct ieee80211_hw * hw = zd_usb_to_hw(urb->context);
drivers/net/wireless/zydas/zd1211rw/zd_usb.c
1773
struct zd_usb *usb = urb->context;
drivers/net/wireless/zydas/zd1211rw/zd_usb.c
359
struct zd_usb *usb = urb->context;
drivers/net/wireless/zydas/zd1211rw/zd_usb.c
374
struct zd_usb *usb = urb->context;
drivers/net/wireless/zydas/zd1211rw/zd_usb.c
384
struct zd_mac *mac = zd_hw_mac(zd_usb_to_hw(urb->context));
drivers/net/wireless/zydas/zd1211rw/zd_usb.c
461
usb = urb->context;
drivers/net/wireless/zydas/zd1211rw/zd_usb.c
675
usb = urb->context;
drivers/net/wireless/zydas/zd1211rw/zd_usb.c
965
skb = (struct sk_buff *)urb->context;
drivers/nfc/microread/microread.c
357
static void microread_im_transceive_cb(void *context, struct sk_buff *skb,
drivers/nfc/microread/microread.c
360
const struct microread_info *info = context;
drivers/nfc/nfcmrvl/usb.c
149
struct sk_buff *skb = urb->context;
drivers/nfc/nfcmrvl/usb.c
59
struct nfcmrvl_usb_drv_data *drv_data = urb->context;
drivers/nfc/pn533/usb.c
180
cntx = phy->out_urb->context;
drivers/nfc/pn533/usb.c
181
phy->out_urb->context = &arg;
drivers/nfc/pn533/usb.c
188
phy->out_urb->context = cntx;
drivers/nfc/pn533/usb.c
369
struct pn533_acr122_poweron_rdr_arg *arg = urb->context;
drivers/nfc/pn533/usb.c
395
cntx = phy->in_urb->context; /* backup context */
drivers/nfc/pn533/usb.c
398
phy->in_urb->context = &arg;
drivers/nfc/pn533/usb.c
420
phy->in_urb->context = cntx; /* restore context */
drivers/nfc/pn533/usb.c
427
struct pn533_out_arg *arg = urb->context;
drivers/nfc/pn533/usb.c
451
struct pn533_usb_phy *phy = urb->context;
drivers/nfc/pn533/usb.c
61
struct pn533_usb_phy *phy = urb->context;
drivers/nfc/pn533/usb.c
86
struct pn533_usb_phy *phy = urb->context;
drivers/nfc/pn544/pn544.c
573
static void pn544_hci_data_exchange_cb(void *context, struct sk_buff *skb,
drivers/nfc/pn544/pn544.c
576
struct pn544_hci_info *info = context;
drivers/nfc/pn544/pn544.h
16
typedef int (*fw_download_t)(void *context, const char *firmware_name,
drivers/nfc/port100.c
616
struct port100 *dev = urb->context;
drivers/nfc/port100.c
669
struct port100 *dev = urb->context;
drivers/nfc/port100.c
933
struct port100 *dev = urb->context;
drivers/nfc/st21nfca/core.c
736
static void st21nfca_hci_data_exchange_cb(void *context, struct sk_buff *skb,
drivers/nfc/st21nfca/core.c
739
struct st21nfca_hci_info *info = context;
drivers/nfc/st21nfca/dep.c
442
static void st21nfca_im_recv_atr_res_cb(void *context, struct sk_buff *skb,
drivers/nfc/st21nfca/dep.c
445
struct st21nfca_hci_info *info = context;
drivers/nfc/st21nfca/dep.c
553
static void st21nfca_im_recv_dep_res_cb(void *context, struct sk_buff *skb,
drivers/nfc/st21nfca/dep.c
556
struct st21nfca_hci_info *info = context;
drivers/nvme/common/auth.c
703
const u8 *context, unsigned int contextlen,
drivers/nvme/common/auth.c
732
memcpy(info + 4 + prefixlen + labellen, context, contextlen);
drivers/nvme/host/rdma.c
1889
struct nvme_rdma_queue *queue = cm_id->context;
drivers/nvme/host/rdma.c
239
static void nvme_rdma_qp_event(struct ib_event *event, void *context)
drivers/nvme/target/rdma.c
1196
struct nvmet_rdma_port *port = cm_id->context;
drivers/nvme/target/rdma.c
1428
struct nvmet_rdma_port *port = cm_id->context;
drivers/nvme/target/rdma.c
1749
port = cm_id->context;
drivers/nvme/target/rdma.c
1789
struct nvmet_rdma_port *port = cm_id->context;
drivers/nvmem/an8855-efuse.c
17
static int an8855_efuse_read(void *context, unsigned int offset,
drivers/nvmem/an8855-efuse.c
20
struct regmap *regmap = context;
drivers/nvmem/apple-efuses.c
18
static int apple_efuses_read(void *context, unsigned int offset, void *val,
drivers/nvmem/apple-efuses.c
21
struct apple_efuses_priv *priv = context;
drivers/nvmem/bcm-ocotp.c
151
static int bcm_otpc_read(void *context, unsigned int offset, void *val,
drivers/nvmem/bcm-ocotp.c
154
struct otpc_priv *priv = context;
drivers/nvmem/bcm-ocotp.c
182
static int bcm_otpc_write(void *context, unsigned int offset, void *val,
drivers/nvmem/bcm-ocotp.c
185
struct otpc_priv *priv = context;
drivers/nvmem/brcm_nvram.c
50
static int brcm_nvram_read(void *context, unsigned int offset, void *val,
drivers/nvmem/brcm_nvram.c
53
struct brcm_nvram *priv = context;
drivers/nvmem/brcm_nvram.c
99
static int brcm_nvram_read_post_process_macaddr(void *context, const char *id, int index,
drivers/nvmem/imx-iim.c
32
static int imx_iim_read(void *context, unsigned int offset,
drivers/nvmem/imx-iim.c
35
struct iim_priv *iim = context;
drivers/nvmem/imx-ocotp-ele.c
115
static int imx_ocotp_cell_pp(void *context, const char *id, int index,
drivers/nvmem/imx-ocotp-ele.c
48
static enum fuse_type imx_ocotp_fuse_type(void *context, u32 index)
drivers/nvmem/imx-ocotp-ele.c
50
struct imx_ocotp_priv *priv = context;
drivers/nvmem/imx-ocotp-ele.c
66
static int imx_ocotp_reg_read(void *context, unsigned int offset, void *val, size_t bytes)
drivers/nvmem/imx-ocotp-ele.c
68
struct imx_ocotp_priv *priv = context;
drivers/nvmem/imx-ocotp-ele.c
94
type = imx_ocotp_fuse_type(context, i);
drivers/nvmem/imx-ocotp-scu.c
132
static int imx_scu_ocotp_read(void *context, unsigned int offset,
drivers/nvmem/imx-ocotp-scu.c
135
struct ocotp_priv *priv = context;
drivers/nvmem/imx-ocotp-scu.c
157
if (in_hole(context, i)) {
drivers/nvmem/imx-ocotp-scu.c
180
static int imx_scu_ocotp_write(void *context, unsigned int offset,
drivers/nvmem/imx-ocotp-scu.c
183
struct ocotp_priv *priv = context;
drivers/nvmem/imx-ocotp-scu.c
196
if (in_hole(context, index))
drivers/nvmem/imx-ocotp-scu.c
199
if (in_ecc(context, index)) {
drivers/nvmem/imx-ocotp-scu.c
75
static bool in_hole(void *context, u32 index)
drivers/nvmem/imx-ocotp-scu.c
77
struct ocotp_priv *priv = context;
drivers/nvmem/imx-ocotp-scu.c
92
static bool in_ecc(void *context, u32 index)
drivers/nvmem/imx-ocotp-scu.c
94
struct ocotp_priv *priv = context;
drivers/nvmem/imx-ocotp.c
158
static int imx_ocotp_read(void *context, unsigned int offset,
drivers/nvmem/imx-ocotp.c
161
struct ocotp_priv *priv = context;
drivers/nvmem/imx-ocotp.c
224
static int imx_ocotp_cell_pp(void *context, const char *id, int index,
drivers/nvmem/imx-ocotp.c
313
static int imx_ocotp_write(void *context, unsigned int offset, void *val,
drivers/nvmem/imx-ocotp.c
316
struct ocotp_priv *priv = context;
drivers/nvmem/jz4780-efuse.c
69
static int jz4780_efuse_read(void *context, unsigned int offset,
drivers/nvmem/jz4780-efuse.c
72
struct jz4780_efuse *efuse = context;
drivers/nvmem/lan9662-otpc.c
122
static int lan9662_otp_read(void *context, unsigned int offset,
drivers/nvmem/lan9662-otpc.c
125
struct lan9662_otp *otp = context;
drivers/nvmem/lan9662-otpc.c
142
static int lan9662_otp_write(void *context, unsigned int offset,
drivers/nvmem/lan9662-otpc.c
145
struct lan9662_otp *otp = context;
drivers/nvmem/layerscape-sfp.c
29
static int layerscape_sfp_read(void *context, unsigned int offset, void *val,
drivers/nvmem/layerscape-sfp.c
32
struct layerscape_sfp_priv *priv = context;
drivers/nvmem/layouts/u-boot-env.c
36
static int u_boot_env_read_post_process_ethaddr(void *context, const char *id, int index,
drivers/nvmem/lpc18xx_eeprom.c
129
static int lpc18xx_eeprom_read(void *context, unsigned int offset,
drivers/nvmem/lpc18xx_eeprom.c
132
struct lpc18xx_eeprom_dev *eeprom = context;
drivers/nvmem/lpc18xx_eeprom.c
90
static int lpc18xx_eeprom_gather_write(void *context, unsigned int reg,
drivers/nvmem/lpc18xx_eeprom.c
93
struct lpc18xx_eeprom_dev *eeprom = context;
drivers/nvmem/lpc18xx_otp.c
40
static int lpc18xx_otp_read(void *context, unsigned int offset,
drivers/nvmem/lpc18xx_otp.c
43
struct lpc18xx_otp *otp = context;
drivers/nvmem/meson-efuse.c
17
static int meson_efuse_read(void *context, unsigned int offset,
drivers/nvmem/meson-efuse.c
20
struct meson_sm_firmware *fw = context;
drivers/nvmem/meson-efuse.c
29
static int meson_efuse_write(void *context, unsigned int offset,
drivers/nvmem/meson-efuse.c
32
struct meson_sm_firmware *fw = context;
drivers/nvmem/meson-mx-efuse.c
134
static int meson_mx_efuse_read(void *context, unsigned int offset,
drivers/nvmem/meson-mx-efuse.c
137
struct meson_mx_efuse *efuse = context;
drivers/nvmem/mtk-efuse.c
23
static int mtk_reg_read(void *context,
drivers/nvmem/mtk-efuse.c
26
struct mtk_efuse_priv *priv = context;
drivers/nvmem/mtk-efuse.c
37
static int mtk_efuse_gpu_speedbin_pp(void *context, const char *id, int index,
drivers/nvmem/mxs-ocotp.c
59
static int mxs_ocotp_read(void *context, unsigned int offset,
drivers/nvmem/mxs-ocotp.c
62
struct mxs_ocotp *otp = context;
drivers/nvmem/nintendo-otp.c
48
static int nintendo_otp_reg_read(void *context,
drivers/nvmem/nintendo-otp.c
51
struct nintendo_otp_priv *priv = context;
drivers/nvmem/qfprom.c
256
static int qfprom_reg_write(void *context, unsigned int reg, void *_val,
drivers/nvmem/qfprom.c
259
struct qfprom_priv *priv = context;
drivers/nvmem/qfprom.c
320
static int qfprom_reg_read(void *context,
drivers/nvmem/qfprom.c
323
struct qfprom_priv *priv = context;
drivers/nvmem/qoriq-efuse.c
17
static int qoriq_efuse_read(void *context, unsigned int offset, void *val,
drivers/nvmem/qoriq-efuse.c
20
struct qoriq_efuse_priv *priv = context;
drivers/nvmem/rmem.c
32
static int rmem_read(void *context, unsigned int offset,
drivers/nvmem/rmem.c
35
struct rmem *priv = context;
drivers/nvmem/rockchip-efuse.c
150
static int rockchip_rk3399_efuse_read(void *context, unsigned int offset,
drivers/nvmem/rockchip-efuse.c
153
struct rockchip_efuse_chip *efuse = context;
drivers/nvmem/rockchip-efuse.c
55
static int rockchip_rk3288_efuse_read(void *context, unsigned int offset,
drivers/nvmem/rockchip-efuse.c
58
struct rockchip_efuse_chip *efuse = context;
drivers/nvmem/rockchip-efuse.c
95
static int rockchip_rk3328_efuse_read(void *context, unsigned int offset,
drivers/nvmem/rockchip-efuse.c
98
struct rockchip_efuse_chip *efuse = context;
drivers/nvmem/rockchip-otp.c
147
static int px30_otp_read(void *context, unsigned int offset,
drivers/nvmem/rockchip-otp.c
150
struct rockchip_otp *otp = context;
drivers/nvmem/rockchip-otp.c
187
static int rk3588_otp_read(void *context, unsigned int offset,
drivers/nvmem/rockchip-otp.c
190
struct rockchip_otp *otp = context;
drivers/nvmem/rockchip-otp.c
233
static int rockchip_otp_read(void *context, unsigned int offset,
drivers/nvmem/rockchip-otp.c
236
struct rockchip_otp *otp = context;
drivers/nvmem/rockchip-otp.c
248
ret = otp->data->reg_read(context, offset, val, bytes);
drivers/nvmem/s32g-ocotp-nvmem.c
19
static int s32g_ocotp_read(void *context, unsigned int offset,
drivers/nvmem/s32g-ocotp-nvmem.c
22
struct s32g_ocotp_priv *s32g_data = context;
drivers/nvmem/sc27xx-efuse.c
124
static int sc27xx_efuse_read(void *context, u32 offset, void *val, size_t bytes)
drivers/nvmem/sc27xx-efuse.c
126
struct sc27xx_efuse *efuse = context;
drivers/nvmem/sec-qfprom.c
23
static int sec_qfprom_reg_read(void *context, unsigned int reg, void *_val, size_t bytes)
drivers/nvmem/sec-qfprom.c
25
struct sec_qfprom *priv = context;
drivers/nvmem/snvs_lpgpr.c
53
static int snvs_lpgpr_write(void *context, unsigned int offset, void *val,
drivers/nvmem/snvs_lpgpr.c
56
struct snvs_lpgpr_priv *priv = context;
drivers/nvmem/snvs_lpgpr.c
79
static int snvs_lpgpr_read(void *context, unsigned int offset, void *val,
drivers/nvmem/snvs_lpgpr.c
82
struct snvs_lpgpr_priv *priv = context;
drivers/nvmem/sprd-efuse.c
294
static int sprd_efuse_read(void *context, u32 offset, void *val, size_t bytes)
drivers/nvmem/sprd-efuse.c
296
struct sprd_efuse *efuse = context;
drivers/nvmem/sprd-efuse.c
324
static int sprd_efuse_write(void *context, u32 offset, void *val, size_t bytes)
drivers/nvmem/sprd-efuse.c
326
struct sprd_efuse *efuse = context;
drivers/nvmem/stm32-romem.c
121
static int stm32_bsec_write(void *context, unsigned int offset, void *buf,
drivers/nvmem/stm32-romem.c
124
struct stm32_romem_priv *priv = context;
drivers/nvmem/stm32-romem.c
148
static int stm32_bsec_pta_read(void *context, unsigned int offset, void *buf,
drivers/nvmem/stm32-romem.c
151
struct stm32_romem_priv *priv = context;
drivers/nvmem/stm32-romem.c
156
static int stm32_bsec_pta_write(void *context, unsigned int offset, void *buf,
drivers/nvmem/stm32-romem.c
159
struct stm32_romem_priv *priv = context;
drivers/nvmem/stm32-romem.c
43
static int stm32_romem_read(void *context, unsigned int offset, void *buf,
drivers/nvmem/stm32-romem.c
46
struct stm32_romem_priv *priv = context;
drivers/nvmem/stm32-romem.c
74
static int stm32_bsec_read(void *context, unsigned int offset, void *buf,
drivers/nvmem/stm32-romem.c
77
struct stm32_romem_priv *priv = context;
drivers/nvmem/sunxi_sid.c
39
static int sunxi_sid_read(void *context, unsigned int offset,
drivers/nvmem/sunxi_sid.c
42
struct sunxi_sid *sid = context;
drivers/nvmem/sunxi_sid.c
93
static int sun8i_sid_read_by_reg(void *context, unsigned int offset,
drivers/nvmem/sunxi_sid.c
96
struct sunxi_sid *sid = context;
drivers/nvmem/u-boot-env.c
24
static int u_boot_env_read(void *context, unsigned int offset, void *val,
drivers/nvmem/u-boot-env.c
27
struct u_boot_env *priv = context;
drivers/nvmem/uniphier-efuse.c
19
static int uniphier_reg_read(void *context,
drivers/nvmem/uniphier-efuse.c
22
struct uniphier_efuse_priv *priv = context;
drivers/nvmem/vf610-ocotp.c
146
static int vf610_ocotp_read(void *context, unsigned int offset,
drivers/nvmem/vf610-ocotp.c
149
struct vf610_ocotp *ocotp = context;
drivers/nvmem/zynqmp_nvmem.c
146
static int zynqmp_nvmem_read(void *context, unsigned int offset, void *val, size_t bytes)
drivers/nvmem/zynqmp_nvmem.c
148
struct device *dev = context;
drivers/nvmem/zynqmp_nvmem.c
173
ret = zynqmp_efuse_access(context, offset, val,
drivers/nvmem/zynqmp_nvmem.c
185
static int zynqmp_nvmem_write(void *context,
drivers/nvmem/zynqmp_nvmem.c
196
return zynqmp_efuse_access(context, offset,
drivers/nvmem/zynqmp_nvmem.c
59
static int zynqmp_efuse_access(void *context, unsigned int offset,
drivers/nvmem/zynqmp_nvmem.c
63
struct device *dev = context;
drivers/pci/controller/pci-hyperv-intf.c
43
int hyperv_reg_block_invalidate(struct pci_dev *dev, void *context,
drivers/pci/controller/pci-hyperv-intf.c
44
void (*block_invalidate)(void *context,
drivers/pci/controller/pci-hyperv-intf.c
50
return hvpci_block_ops.reg_blk_invalidate(dev, context,
drivers/pci/controller/pci-hyperv.c
1018
static void hv_pci_generic_compl(void *context, struct pci_response *resp,
drivers/pci/controller/pci-hyperv.c
1021
struct hv_pci_compl *comp_pkt = context;
drivers/pci/controller/pci-hyperv.c
1445
static void hv_pci_read_config_compl(void *context, struct pci_response *resp,
drivers/pci/controller/pci-hyperv.c
1448
struct hv_read_config_compl *comp = context;
drivers/pci/controller/pci-hyperv.c
1545
static void hv_pci_write_config_compl(void *context, struct pci_response *resp,
drivers/pci/controller/pci-hyperv.c
1548
struct hv_pci_compl *comp_pkt = context;
drivers/pci/controller/pci-hyperv.c
1633
static int hv_register_block_invalidate(struct pci_dev *pdev, void *context,
drivers/pci/controller/pci-hyperv.c
1634
void (*block_invalidate)(void *context,
drivers/pci/controller/pci-hyperv.c
1647
hpdev->invalidate_context = context;
drivers/pci/controller/pci-hyperv.c
1734
static void hv_pci_compose_compl(void *context, struct pci_response *resp,
drivers/pci/controller/pci-hyperv.c
1737
struct compose_comp_ctxt *comp_pkt = context;
drivers/pci/controller/pci-hyperv.c
2555
static void q_resource_requirements(void *context, struct pci_response *resp,
drivers/pci/controller/pci-hyperv.c
2558
struct q_res_req_compl *completion = context;
drivers/pci/controller/pci-hyperv.c
3037
static void hv_pci_onchannelcallback(void *context)
drivers/pci/controller/pci-hyperv.c
3041
struct hv_pcibus_device *hbus = context;
drivers/pci/controller/pci-hyperv.c
311
void (*completion_func)(void *context, struct pci_response *resp,
drivers/pci/controller/pci-hyperv.c
560
void (*block_invalidate)(void *context, u64 block_mask);
drivers/pci/controller/pci-hyperv.c
575
static void hv_pci_onchannelcallback(void *context);
drivers/pci/hotplug/acpi_pcihp.c
182
check_hotplug(acpi_handle handle, u32 lvl, void *context, void **rv)
drivers/pci/hotplug/acpi_pcihp.c
184
int *found = (int *)context;
drivers/pci/hotplug/acpiphp.h
61
struct acpiphp_context *context;
drivers/pci/hotplug/acpiphp_glue.c
100
static void acpiphp_put_context(struct acpiphp_context *context)
drivers/pci/hotplug/acpiphp_glue.c
102
if (--context->refcount)
drivers/pci/hotplug/acpiphp_glue.c
105
WARN_ON(context->bridge);
drivers/pci/hotplug/acpiphp_glue.c
106
context->hp.self->hp = NULL;
drivers/pci/hotplug/acpiphp_glue.c
107
kfree(context);
drivers/pci/hotplug/acpiphp_glue.c
122
struct acpiphp_context *context;
drivers/pci/hotplug/acpiphp_glue.c
126
context = acpiphp_get_context(adev);
drivers/pci/hotplug/acpiphp_glue.c
127
if (!context)
drivers/pci/hotplug/acpiphp_glue.c
130
if (context->func.parent->is_going_away) {
drivers/pci/hotplug/acpiphp_glue.c
131
acpiphp_put_context(context);
drivers/pci/hotplug/acpiphp_glue.c
132
context = NULL;
drivers/pci/hotplug/acpiphp_glue.c
136
get_bridge(context->func.parent);
drivers/pci/hotplug/acpiphp_glue.c
137
acpiphp_put_context(context);
drivers/pci/hotplug/acpiphp_glue.c
141
return context;
drivers/pci/hotplug/acpiphp_glue.c
144
static void acpiphp_let_context_go(struct acpiphp_context *context)
drivers/pci/hotplug/acpiphp_glue.c
146
put_bridge(context->func.parent);
drivers/pci/hotplug/acpiphp_glue.c
151
struct acpiphp_context *context;
drivers/pci/hotplug/acpiphp_glue.c
167
context = bridge->context;
drivers/pci/hotplug/acpiphp_glue.c
169
if (context) {
drivers/pci/hotplug/acpiphp_glue.c
171
put_bridge(context->func.parent);
drivers/pci/hotplug/acpiphp_glue.c
172
context->bridge = NULL;
drivers/pci/hotplug/acpiphp_glue.c
173
acpiphp_put_context(context);
drivers/pci/hotplug/acpiphp_glue.c
191
struct acpiphp_context *context = acpiphp_grab_context(adev);
drivers/pci/hotplug/acpiphp_glue.c
195
if (!context)
drivers/pci/hotplug/acpiphp_glue.c
198
bus = context->func.slot->bus;
drivers/pci/hotplug/acpiphp_glue.c
216
acpiphp_let_context_go(context);
drivers/pci/hotplug/acpiphp_glue.c
231
struct acpiphp_context *context;
drivers/pci/hotplug/acpiphp_glue.c
256
context = acpiphp_init_context(adev);
drivers/pci/hotplug/acpiphp_glue.c
257
if (!context) {
drivers/pci/hotplug/acpiphp_glue.c
262
newfunc = &context->func;
drivers/pci/hotplug/acpiphp_glue.c
285
acpiphp_put_context(context);
drivers/pci/hotplug/acpiphp_glue.c
50
static void hotplug_event(u32 type, struct acpiphp_context *context);
drivers/pci/hotplug/acpiphp_glue.c
61
struct acpiphp_context *context;
drivers/pci/hotplug/acpiphp_glue.c
63
context = kzalloc_obj(*context);
drivers/pci/hotplug/acpiphp_glue.c
64
if (!context)
drivers/pci/hotplug/acpiphp_glue.c
67
context->refcount = 1;
drivers/pci/hotplug/acpiphp_glue.c
68
context->hp.notify = acpiphp_hotplug_notify;
drivers/pci/hotplug/acpiphp_glue.c
69
context->hp.fixup = acpiphp_post_dock_fixup;
drivers/pci/hotplug/acpiphp_glue.c
70
acpi_set_hp_context(adev, &context->hp);
drivers/pci/hotplug/acpiphp_glue.c
71
return context;
drivers/pci/hotplug/acpiphp_glue.c
783
static void hotplug_event(u32 type, struct acpiphp_context *context)
drivers/pci/hotplug/acpiphp_glue.c
785
acpi_handle handle = context->hp.self->handle;
drivers/pci/hotplug/acpiphp_glue.c
786
struct acpiphp_func *func = &context->func;
drivers/pci/hotplug/acpiphp_glue.c
791
bridge = context->bridge;
drivers/pci/hotplug/acpiphp_glue.c
82
struct acpiphp_context *context;
drivers/pci/hotplug/acpiphp_glue.c
839
struct acpiphp_context *context;
drivers/pci/hotplug/acpiphp_glue.c
841
context = acpiphp_grab_context(adev);
drivers/pci/hotplug/acpiphp_glue.c
842
if (!context)
drivers/pci/hotplug/acpiphp_glue.c
845
hotplug_event(type, context);
drivers/pci/hotplug/acpiphp_glue.c
846
acpiphp_let_context_go(context);
drivers/pci/hotplug/acpiphp_glue.c
87
context = to_acpiphp_context(adev->hp);
drivers/pci/hotplug/acpiphp_glue.c
88
context->refcount++;
drivers/pci/hotplug/acpiphp_glue.c
89
return context;
drivers/pci/hotplug/acpiphp_glue.c
899
struct acpiphp_context *context;
drivers/pci/hotplug/acpiphp_glue.c
907
context = acpiphp_get_context(adev);
drivers/pci/hotplug/acpiphp_glue.c
908
if (!context)
drivers/pci/hotplug/acpiphp_glue.c
911
bridge->context = context;
drivers/pci/hotplug/acpiphp_glue.c
912
context->bridge = bridge;
drivers/pci/hotplug/acpiphp_glue.c
914
get_bridge(context->func.parent);
drivers/pci/hotplug/acpiphp_ibm.c
251
static void ibm_handle_events(acpi_handle handle, u32 event, void *context)
drivers/pci/hotplug/acpiphp_ibm.c
255
struct notification *note = context;
drivers/pci/hotplug/acpiphp_ibm.c
385
u32 lvl, void *context, void **rv)
drivers/pci/hotplug/acpiphp_ibm.c
387
acpi_handle *phandle = (acpi_handle *)context;
drivers/pci/hotplug/acpiphp_ibm.c
84
static void ibm_handle_events(acpi_handle handle, u32 event, void *context);
drivers/pci/hotplug/acpiphp_ibm.c
90
u32 lvl, void *context, void **rv);
drivers/pci/pci-acpi.c
65
static acpi_status acpi_match_rc(acpi_handle handle, u32 lvl, void *context,
drivers/pci/pci-acpi.c
68
u16 *segment = context;
drivers/pci/pci-acpi.c
838
static void pci_acpi_wake_bus(struct acpi_device_wakeup_context *context)
drivers/pci/pci-acpi.c
840
pci_pme_wakeup_bus(to_pci_host_bridge(context->dev)->bus);
drivers/pci/pci-acpi.c
847
static void pci_acpi_wake_dev(struct acpi_device_wakeup_context *context)
drivers/pci/pci-acpi.c
851
pci_dev = to_pci_dev(context->dev);
drivers/pci/pcie/aer.c
1451
static irqreturn_t aer_isr(int irq, void *context)
drivers/pci/pcie/aer.c
1453
struct pcie_device *dev = (struct pcie_device *)context;
drivers/pci/pcie/aer.c
1472
static irqreturn_t aer_irq(int irq, void *context)
drivers/pci/pcie/aer.c
1474
struct pcie_device *pdev = (struct pcie_device *)context;
drivers/pci/pcie/bwctrl.c
211
static irqreturn_t pcie_bwnotif_irq(int irq, void *context)
drivers/pci/pcie/bwctrl.c
213
struct pcie_device *srv = context;
drivers/pci/pcie/dpc.c
362
static irqreturn_t dpc_handler(int irq, void *context)
drivers/pci/pcie/dpc.c
364
struct pci_dev *pdev = context;
drivers/pci/pcie/dpc.c
383
static irqreturn_t dpc_irq(int irq, void *context)
drivers/pci/pcie/dpc.c
385
struct pci_dev *pdev = context;
drivers/pci/pcie/pme.c
266
static irqreturn_t pcie_pme_irq(int irq, void *context)
drivers/pci/pcie/pme.c
273
port = ((struct pcie_device *)context)->port;
drivers/pci/pcie/pme.c
274
data = get_service_data((struct pcie_device *)context);
drivers/phy/amlogic/phy-meson-g12a-usb3-pcie.c
101
static int phy_g12a_usb3_pcie_cr_bus_read(void *context, unsigned int addr,
drivers/phy/amlogic/phy-meson-g12a-usb3-pcie.c
104
struct phy_g12a_usb3_pcie_priv *priv = context;
drivers/phy/amlogic/phy-meson-g12a-usb3-pcie.c
134
static int phy_g12a_usb3_pcie_cr_bus_write(void *context, unsigned int addr,
drivers/phy/amlogic/phy-meson-g12a-usb3-pcie.c
137
struct phy_g12a_usb3_pcie_priv *priv = context;
drivers/phy/cadence/phy-cadence-sierra.c
414
static int cdns_regmap_write(void *context, unsigned int reg, unsigned int val)
drivers/phy/cadence/phy-cadence-sierra.c
416
struct cdns_regmap_cdb_context *ctx = context;
drivers/phy/cadence/phy-cadence-sierra.c
424
static int cdns_regmap_read(void *context, unsigned int reg, unsigned int *val)
drivers/phy/cadence/phy-cadence-sierra.c
426
struct cdns_regmap_cdb_context *ctx = context;
drivers/phy/cadence/phy-cadence-torrent.c
483
static int cdns_regmap_write(void *context, unsigned int reg, unsigned int val)
drivers/phy/cadence/phy-cadence-torrent.c
485
struct cdns_regmap_cdb_context *ctx = context;
drivers/phy/cadence/phy-cadence-torrent.c
493
static int cdns_regmap_read(void *context, unsigned int reg, unsigned int *val)
drivers/phy/cadence/phy-cadence-torrent.c
495
struct cdns_regmap_cdb_context *ctx = context;
drivers/phy/cadence/phy-cadence-torrent.c
502
static int cdns_regmap_dptx_write(void *context, unsigned int reg,
drivers/phy/cadence/phy-cadence-torrent.c
505
struct cdns_regmap_cdb_context *ctx = context;
drivers/phy/cadence/phy-cadence-torrent.c
513
static int cdns_regmap_dptx_read(void *context, unsigned int reg,
drivers/phy/cadence/phy-cadence-torrent.c
516
struct cdns_regmap_cdb_context *ctx = context;
drivers/phy/tegra/xusb-tegra186.c
1541
priv->context.vbus_id = padctl_readl(padctl, USB2_VBUS_ID);
drivers/phy/tegra/xusb-tegra186.c
1542
priv->context.usb2_pad_mux = padctl_readl(padctl, XUSB_PADCTL_USB2_PAD_MUX);
drivers/phy/tegra/xusb-tegra186.c
1543
priv->context.usb2_port_cap = padctl_readl(padctl, XUSB_PADCTL_USB2_PORT_CAP);
drivers/phy/tegra/xusb-tegra186.c
1544
priv->context.ss_port_cap = padctl_readl(padctl, XUSB_PADCTL_SS_PORT_CAP);
drivers/phy/tegra/xusb-tegra186.c
1551
padctl_writel(padctl, priv->context.usb2_pad_mux, XUSB_PADCTL_USB2_PAD_MUX);
drivers/phy/tegra/xusb-tegra186.c
1552
padctl_writel(padctl, priv->context.usb2_port_cap, XUSB_PADCTL_USB2_PORT_CAP);
drivers/phy/tegra/xusb-tegra186.c
1553
padctl_writel(padctl, priv->context.ss_port_cap, XUSB_PADCTL_SS_PORT_CAP);
drivers/phy/tegra/xusb-tegra186.c
1554
padctl_writel(padctl, priv->context.vbus_id, USB2_VBUS_ID);
drivers/phy/tegra/xusb-tegra186.c
278
struct tegra186_xusb_padctl_context context;
drivers/phy/tegra/xusb-tegra210.c
3195
priv->context.usb2_pad_mux =
drivers/phy/tegra/xusb-tegra210.c
3197
priv->context.usb2_port_cap =
drivers/phy/tegra/xusb-tegra210.c
3199
priv->context.ss_port_map =
drivers/phy/tegra/xusb-tegra210.c
3201
priv->context.usb3_pad_mux =
drivers/phy/tegra/xusb-tegra210.c
3210
padctl_writel(padctl, priv->context.usb2_pad_mux,
drivers/phy/tegra/xusb-tegra210.c
3212
padctl_writel(padctl, priv->context.usb2_port_cap,
drivers/phy/tegra/xusb-tegra210.c
3214
padctl_writel(padctl, priv->context.ss_port_map,
drivers/phy/tegra/xusb-tegra210.c
3222
padctl_writel(padctl, priv->context.usb3_pad_mux,
drivers/phy/tegra/xusb-tegra210.c
428
struct tegra210_xusb_padctl_context context;
drivers/pinctrl/intel/pinctrl-baytrail.c
1528
vg->context.pads = devm_kcalloc(vg->dev, gc->ngpio, sizeof(*vg->context.pads),
drivers/pinctrl/intel/pinctrl-baytrail.c
1530
if (!vg->context.pads)
drivers/pinctrl/intel/pinctrl-baytrail.c
1648
vg->context.pads[i].conf0 = value;
drivers/pinctrl/intel/pinctrl-baytrail.c
1656
vg->context.pads[i].val = value;
drivers/pinctrl/intel/pinctrl-baytrail.c
1681
vg->context.pads[i].conf0) {
drivers/pinctrl/intel/pinctrl-baytrail.c
1683
value |= vg->context.pads[i].conf0;
drivers/pinctrl/intel/pinctrl-baytrail.c
1695
vg->context.pads[i].val) {
drivers/pinctrl/intel/pinctrl-baytrail.c
1699
v |= vg->context.pads[i].val;
drivers/pinctrl/intel/pinctrl-cherryview.c
1244
struct intel_community_context *cctx = &pctrl->context.communities[0];
drivers/pinctrl/intel/pinctrl-cherryview.c
1274
struct intel_community_context *cctx = &pctrl->context.communities[0];
drivers/pinctrl/intel/pinctrl-cherryview.c
1397
struct intel_community_context *cctx = &pctrl->context.communities[0];
drivers/pinctrl/intel/pinctrl-cherryview.c
1619
pctrl->context.pads = devm_kcalloc(dev, pctrl->soc->npins,
drivers/pinctrl/intel/pinctrl-cherryview.c
1620
sizeof(*pctrl->context.pads),
drivers/pinctrl/intel/pinctrl-cherryview.c
1622
if (!pctrl->context.pads)
drivers/pinctrl/intel/pinctrl-cherryview.c
1626
pctrl->context.communities = devm_kcalloc(dev, pctrl->soc->ncommunities,
drivers/pinctrl/intel/pinctrl-cherryview.c
1627
sizeof(*pctrl->context.communities),
drivers/pinctrl/intel/pinctrl-cherryview.c
1629
if (!pctrl->context.communities)
drivers/pinctrl/intel/pinctrl-cherryview.c
1632
cctx = &pctrl->context.communities[0];
drivers/pinctrl/intel/pinctrl-cherryview.c
1678
struct intel_community_context *cctx = &pctrl->context.communities[0];
drivers/pinctrl/intel/pinctrl-cherryview.c
1687
struct intel_pad_context *ctx = &pctrl->context.pads[i];
drivers/pinctrl/intel/pinctrl-cherryview.c
1705
struct intel_community_context *cctx = &pctrl->context.communities[0];
drivers/pinctrl/intel/pinctrl-cherryview.c
1719
struct intel_pad_context *ctx = &pctrl->context.pads[i];
drivers/pinctrl/intel/pinctrl-cherryview.c
750
struct intel_community_context *cctx = &pctrl->context.communities[0];
drivers/pinctrl/intel/pinctrl-intel.c
1544
pctrl->context.pads = pads;
drivers/pinctrl/intel/pinctrl-intel.c
1545
pctrl->context.communities = communities;
drivers/pinctrl/intel/pinctrl-intel.c
1796
pads = pctrl->context.pads;
drivers/pinctrl/intel/pinctrl-intel.c
1815
communities = pctrl->context.communities;
drivers/pinctrl/intel/pinctrl-intel.c
1908
pads = pctrl->context.pads;
drivers/pinctrl/intel/pinctrl-intel.c
1925
communities = pctrl->context.communities;
drivers/pinctrl/intel/pinctrl-intel.h
264
struct intel_pinctrl_context context;
drivers/pinctrl/pinctrl-mcp23s08_spi.c
26
static int mcp23sxx_spi_write(void *context, const void *data, size_t count)
drivers/pinctrl/pinctrl-mcp23s08_spi.c
28
struct mcp23s08 *mcp = context;
drivers/pinctrl/pinctrl-mcp23s08_spi.c
41
static int mcp23sxx_spi_gather_write(void *context,
drivers/pinctrl/pinctrl-mcp23s08_spi.c
45
struct mcp23s08 *mcp = context;
drivers/pinctrl/pinctrl-mcp23s08_spi.c
60
static int mcp23sxx_spi_read(void *context, const void *reg, size_t reg_size,
drivers/pinctrl/pinctrl-mcp23s08_spi.c
63
struct mcp23s08 *mcp = context;
drivers/pinctrl/pinctrl-sx150x.c
1021
static int sx150x_regmap_reg_read(void *context, unsigned int reg,
drivers/pinctrl/pinctrl-sx150x.c
1025
struct sx150x_pinctrl *pctl = context;
drivers/pinctrl/pinctrl-sx150x.c
1074
static int sx150x_regmap_reg_write(void *context, unsigned int reg,
drivers/pinctrl/pinctrl-sx150x.c
1078
struct sx150x_pinctrl *pctl = context;
drivers/platform/arm64/lenovo-thinkpad-t14s.c
115
static int t14s_ec_write(void *context, unsigned int reg,
drivers/platform/arm64/lenovo-thinkpad-t14s.c
118
struct t14s_ec *ec = context;
drivers/platform/arm64/lenovo-thinkpad-t14s.c
131
static int t14s_ec_read(void *context, unsigned int reg,
drivers/platform/arm64/lenovo-thinkpad-t14s.c
134
struct t14s_ec *ec = context;
drivers/platform/chrome/cros_ec_lpc.c
478
void *context, void **retval)
drivers/platform/chrome/cros_ec_lpc.c
480
*(struct acpi_device **)context = acpi_fetch_acpi_dev(handle);
drivers/platform/mellanox/mlx-platform.c
7274
mlxplat_mlxcpld_reg_read(void *context, unsigned int reg, unsigned int *val)
drivers/platform/mellanox/mlx-platform.c
7276
struct mlxplat_mlxcpld_regmap_context *ctx = context;
drivers/platform/mellanox/mlx-platform.c
7283
mlxplat_mlxcpld_reg_write(void *context, unsigned int reg, unsigned int val)
drivers/platform/mellanox/mlx-platform.c
7285
struct mlxplat_mlxcpld_regmap_context *ctx = context;
drivers/platform/olpc/olpc-xo175-ec.c
286
priv->msg.context = priv;
drivers/platform/raspberrypi/vchiq-interface/vchiq_core.c
1071
copy_message_data(copy_callback, context,
drivers/platform/raspberrypi/vchiq-interface/vchiq_core.c
1134
copy_message_data(copy_callback, context,
drivers/platform/raspberrypi/vchiq-interface/vchiq_core.c
1173
ssize_t (*copy_callback)(void *context, void *dest,
drivers/platform/raspberrypi/vchiq-interface/vchiq_core.c
1175
void *context, int size)
drivers/platform/raspberrypi/vchiq-interface/vchiq_core.c
1211
callback_result = copy_message_data(copy_callback, context,
drivers/platform/raspberrypi/vchiq-interface/vchiq_core.c
3586
ssize_t (*copy_callback)(void *context, void *dest,
drivers/platform/raspberrypi/vchiq-interface/vchiq_core.c
3588
void *context,
drivers/platform/raspberrypi/vchiq-interface/vchiq_core.c
3616
copy_callback, context, size,
drivers/platform/raspberrypi/vchiq-interface/vchiq_core.c
3621
copy_callback, context, size);
drivers/platform/raspberrypi/vchiq-interface/vchiq_core.c
901
memcpy_copy_callback(void *context, void *dest, size_t offset, size_t maxsize)
drivers/platform/raspberrypi/vchiq-interface/vchiq_core.c
903
memcpy(dest + offset, context + offset, maxsize);
drivers/platform/raspberrypi/vchiq-interface/vchiq_core.c
908
copy_message_data(ssize_t (*copy_callback)(void *context, void *dest, size_t offset,
drivers/platform/raspberrypi/vchiq-interface/vchiq_core.c
910
void *context,
drivers/platform/raspberrypi/vchiq-interface/vchiq_core.c
920
callback_result = copy_callback(context, dest + pos, pos,
drivers/platform/raspberrypi/vchiq-interface/vchiq_core.c
942
ssize_t (*copy_callback)(void *context, void *dest,
drivers/platform/raspberrypi/vchiq-interface/vchiq_core.c
944
void *context, size_t size, int flags)
drivers/platform/raspberrypi/vchiq-interface/vchiq_dev.c
114
struct vchiq_io_copy_callback_context context;
drivers/platform/raspberrypi/vchiq-interface/vchiq_dev.c
119
context.element = elements;
drivers/platform/raspberrypi/vchiq-interface/vchiq_dev.c
120
context.element_offset = 0;
drivers/platform/raspberrypi/vchiq-interface/vchiq_dev.c
121
context.elements_to_go = count;
drivers/platform/raspberrypi/vchiq-interface/vchiq_dev.c
131
&context, total_size);
drivers/platform/raspberrypi/vchiq-interface/vchiq_dev.c
71
static ssize_t vchiq_ioc_copy_element_data(void *context, void *dest,
drivers/platform/raspberrypi/vchiq-interface/vchiq_dev.c
74
struct vchiq_io_copy_callback_context *cc = context;
drivers/platform/raspberrypi/vchiq-mmal/mmal-msg.h
90
u32 context; /* a u32 per message context */
drivers/platform/raspberrypi/vchiq-mmal/mmal-vchiq.c
414
m.h.context = msg_context->handle;
drivers/platform/raspberrypi/vchiq-mmal/mmal-vchiq.c
594
if (!msg->h.context) {
drivers/platform/raspberrypi/vchiq-mmal/mmal-vchiq.c
601
msg->h.context);
drivers/platform/raspberrypi/vchiq-mmal/mmal-vchiq.c
604
msg->h.context);
drivers/platform/raspberrypi/vchiq-mmal/mmal-vchiq.c
678
msg->h.context = msg_context->handle;
drivers/platform/surface/surface_acpi_notify.c
740
void *context, void **rv)
drivers/platform/surface/surface_acpi_notify.c
743
struct platform_device *pdev = context;
drivers/platform/wmi/core.c
1106
struct wmi_guid_count_context *context = data;
drivers/platform/wmi/core.c
1109
if (guid_equal(&wblock->gblock.guid, context->guid))
drivers/platform/wmi/core.c
1110
context->count++;
drivers/platform/wmi/core.c
1117
struct wmi_guid_count_context context = {
drivers/platform/wmi/core.c
1123
ret = bus_for_each_dev(&wmi_bus_type, NULL, &context, wmi_count_guids);
drivers/platform/wmi/core.c
1127
return context.count;
drivers/platform/wmi/core.c
126
return id->context;
drivers/platform/wmi/core.c
1428
static void acpi_wmi_notify_handler(acpi_handle handle, u32 event, void *context)
drivers/platform/wmi/core.c
1430
struct device *wmi_bus_dev = context;
drivers/platform/x86/acer-wmi.c
2471
static void acer_wmi_notify(union acpi_object *obj, void *context)
drivers/platform/x86/adv_swbutton.c
28
static void adv_swbutton_notify(acpi_handle handle, u32 event, void *context)
drivers/platform/x86/adv_swbutton.c
30
struct platform_device *device = context;
drivers/platform/x86/apple-gmux.c
663
static void gmux_notify_handler(acpi_handle device, u32 value, void *context)
drivers/platform/x86/apple-gmux.c
666
struct pnp_dev *pnp = (struct pnp_dev *)context;
drivers/platform/x86/asus-nb-wmi.c
54
void *context)
drivers/platform/x86/asus-wmi.c
4604
static void asus_wmi_notify(union acpi_object *obj, void *context)
drivers/platform/x86/asus-wmi.c
4606
struct asus_wmi *asus = context;
drivers/platform/x86/dell/alienware-wmi-legacy.c
58
static int legacy_wmi_probe(struct wmi_device *wdev, const void *context)
drivers/platform/x86/dell/alienware-wmi-wmax.c
1595
static int wmax_wmi_probe(struct wmi_device *wdev, const void *context)
drivers/platform/x86/dell/dell-laptop.c
729
void *context)
drivers/platform/x86/dell/dell-rbtn.c
245
static void ACPI_SYSTEM_XFACE rbtn_clear_suspended_flag(void *context)
drivers/platform/x86/dell/dell-rbtn.c
247
struct rbtn_data *rbtn_data = context;
drivers/platform/x86/dell/dell-smbios-wmi.c
221
static int dell_smbios_wmi_probe(struct wmi_device *wdev, const void *context)
drivers/platform/x86/dell/dell-wmi-aio.c
73
static void dell_wmi_aio_notify(union acpi_object *obj, void *context)
drivers/platform/x86/dell/dell-wmi-base.c
793
static int dell_wmi_probe(struct wmi_device *wdev, const void *context)
drivers/platform/x86/dell/dell-wmi-ddv.c
1047
static int dell_wmi_ddv_probe(struct wmi_device *wdev, const void *context)
drivers/platform/x86/dell/dell-wmi-descriptor.c
102
const void *context)
drivers/platform/x86/dell/dell-wmi-privacy.c
294
static int dell_privacy_wmi_probe(struct wmi_device *wdev, const void *context)
drivers/platform/x86/dell/dell-wmi-sysman/biosattr-interface.c
147
static int bios_attr_set_interface_probe(struct wmi_device *wdev, const void *context)
drivers/platform/x86/dell/dell-wmi-sysman/passwordattr-interface.c
113
static int bios_attr_pass_interface_probe(struct wmi_device *wdev, const void *context)
drivers/platform/x86/dell/dell_rbu.c
499
static void callbackfn_rbu(const struct firmware *fw, void *context)
drivers/platform/x86/dell/dell_rbu.c
579
&rbu_device->dev, GFP_KERNEL, &context,
drivers/platform/x86/dell/dell_rbu.c
83
static int context;
drivers/platform/x86/gigabyte-wmi.c
131
static int gigabyte_wmi_probe(struct wmi_device *wdev, const void *context)
drivers/platform/x86/hp/hp-bioscfg/biosattr-interface.c
275
static int hp_attr_set_interface_probe(struct wmi_device *wdev, const void *context)
drivers/platform/x86/hp/hp-wmi.c
1100
static void hp_wmi_notify(union acpi_object *obj, void *context)
drivers/platform/x86/hp/hp_accel.c
270
struct serio *port, void *context)
drivers/platform/x86/huawei-wmi.c
741
static void huawei_wmi_input_notify(union acpi_object *obj, void *context)
drivers/platform/x86/huawei-wmi.c
743
struct input_dev *idev = (struct input_dev *)context;
drivers/platform/x86/inspur_platform_profile.c
182
static int inspur_wmi_probe(struct wmi_device *wdev, const void *context)
drivers/platform/x86/intel/hid.c
560
static void notify_handler(acpi_handle handle, u32 event, void *context)
drivers/platform/x86/intel/hid.c
562
struct platform_device *device = context;
drivers/platform/x86/intel/int1092/intel_sar.c
130
struct wwan_sar_context *context = dev_get_drvdata(&device->dev);
drivers/platform/x86/intel/int1092/intel_sar.c
135
out = acpi_evaluate_dsm_typed(context->handle, &context->guid, rev,
drivers/platform/x86/intel/int1092/intel_sar.c
142
context->sar_data.device_mode = out->integer.value;
drivers/platform/x86/intel/int1092/intel_sar.c
143
update_sar_data(context);
drivers/platform/x86/intel/int1092/intel_sar.c
159
struct wwan_sar_context *context = dev_get_drvdata(dev);
drivers/platform/x86/intel/int1092/intel_sar.c
161
return sysfs_emit(buf, "%d %d %d %d\n", context->sar_data.device_mode,
drivers/platform/x86/intel/int1092/intel_sar.c
162
context->sar_data.bandtable_index,
drivers/platform/x86/intel/int1092/intel_sar.c
163
context->sar_data.antennatable_index,
drivers/platform/x86/intel/int1092/intel_sar.c
164
context->sar_data.sartable_index);
drivers/platform/x86/intel/int1092/intel_sar.c
170
struct wwan_sar_context *context = dev_get_drvdata(dev);
drivers/platform/x86/intel/int1092/intel_sar.c
172
return sysfs_emit(buf, "%d\n", context->reg_value);
drivers/platform/x86/intel/int1092/intel_sar.c
178
struct wwan_sar_context *context = dev_get_drvdata(dev);
drivers/platform/x86/intel/int1092/intel_sar.c
189
context->reg_value = value;
drivers/platform/x86/intel/int1092/intel_sar.c
190
update_sar_data(context);
drivers/platform/x86/intel/int1092/intel_sar.c
216
static void sar_get_data(int reg, struct wwan_sar_context *context)
drivers/platform/x86/intel/int1092/intel_sar.c
223
out = acpi_evaluate_dsm_typed(context->handle, &context->guid, rev,
drivers/platform/x86/intel/int1092/intel_sar.c
232
context->config_data[reg].version = out->package.elements[0].integer.value;
drivers/platform/x86/intel/int1092/intel_sar.c
233
context->config_data[reg].total_dev_mode =
drivers/platform/x86/intel/int1092/intel_sar.c
235
if (context->config_data[reg].total_dev_mode <= 0 ||
drivers/platform/x86/intel/int1092/intel_sar.c
236
context->config_data[reg].total_dev_mode > MAX_DEV_MODES) {
drivers/platform/x86/intel/int1092/intel_sar.c
240
parse_package(context, &out->package.elements[2]);
drivers/platform/x86/intel/int1092/intel_sar.c
247
struct wwan_sar_context *context;
drivers/platform/x86/intel/int1092/intel_sar.c
251
context = kzalloc_obj(*context);
drivers/platform/x86/intel/int1092/intel_sar.c
252
if (!context)
drivers/platform/x86/intel/int1092/intel_sar.c
255
context->sar_device = device;
drivers/platform/x86/intel/int1092/intel_sar.c
256
context->handle = ACPI_HANDLE(&device->dev);
drivers/platform/x86/intel/int1092/intel_sar.c
257
dev_set_drvdata(&device->dev, context);
drivers/platform/x86/intel/int1092/intel_sar.c
259
result = guid_parse(SAR_DSM_UUID, &context->guid);
drivers/platform/x86/intel/int1092/intel_sar.c
266
sar_get_data(reg, context);
drivers/platform/x86/intel/int1092/intel_sar.c
291
kfree(context);
drivers/platform/x86/intel/int1092/intel_sar.c
297
struct wwan_sar_context *context = dev_get_drvdata(&device->dev);
drivers/platform/x86/intel/int1092/intel_sar.c
304
kfree(context->config_data[reg].device_mode_info);
drivers/platform/x86/intel/int1092/intel_sar.c
306
kfree(context);
drivers/platform/x86/intel/int1092/intel_sar.c
38
static void update_sar_data(struct wwan_sar_context *context)
drivers/platform/x86/intel/int1092/intel_sar.c
41
&context->config_data[context->reg_value];
drivers/platform/x86/intel/int1092/intel_sar.c
44
context->sar_data.device_mode < config->total_dev_mode) {
drivers/platform/x86/intel/int1092/intel_sar.c
48
if (context->sar_data.device_mode ==
drivers/platform/x86/intel/int1092/intel_sar.c
53
context->sar_data.antennatable_index = dev_mode->antennatable_index;
drivers/platform/x86/intel/int1092/intel_sar.c
54
context->sar_data.bandtable_index = dev_mode->bandtable_index;
drivers/platform/x86/intel/int1092/intel_sar.c
55
context->sar_data.sartable_index = dev_mode->sartable_index;
drivers/platform/x86/intel/int1092/intel_sar.c
77
static acpi_status parse_package(struct wwan_sar_context *context, union acpi_object *item)
drivers/platform/x86/intel/int1092/intel_sar.c
89
data = &context->config_data[reg];
drivers/platform/x86/intel/vbtn.c
151
static void notify_handler(acpi_handle handle, u32 event, void *context)
drivers/platform/x86/intel/vbtn.c
153
struct platform_device *device = context;
drivers/platform/x86/intel/wmi/sbl-fw-update.c
102
const void *context)
drivers/platform/x86/lenovo/ideapad-laptop.c
2257
static int ideapad_wmi_probe(struct wmi_device *wdev, const void *context)
drivers/platform/x86/lenovo/ideapad-laptop.c
2265
*wpriv = *(const struct ideapad_wmi_private *)context;
drivers/platform/x86/lenovo/think-lmi.c
1827
static int tlmi_probe(struct wmi_device *wdev, const void *context)
drivers/platform/x86/lenovo/thinkpad_acpi.c
767
u32 level, void *context, void **return_value)
drivers/platform/x86/lenovo/thinkpad_acpi.c
769
if (!strcmp(context, "video")) {
drivers/platform/x86/lenovo/wmi-camera.c
100
static int lenovo_wmi_probe(struct wmi_device *wdev, const void *context)
drivers/platform/x86/lenovo/wmi-capdata.c
708
static int lwmi_cd_probe(struct wmi_device *wdev, const void *context)
drivers/platform/x86/lenovo/wmi-capdata.c
710
const struct lwmi_cd_info *info = context;
drivers/platform/x86/lenovo/wmi-capdata.c
803
.context = &lwmi_cd_table[_type],
drivers/platform/x86/lenovo/wmi-events.c
157
static int lwmi_events_probe(struct wmi_device *wdev, const void *context)
drivers/platform/x86/lenovo/wmi-events.c
161
if (!context)
drivers/platform/x86/lenovo/wmi-events.c
169
priv->type = *(enum lwmi_events_type *)context;
drivers/platform/x86/lenovo/wmi-events.c
25
.guid_string = (guid), .context = &(enum lwmi_events_type) \
drivers/platform/x86/lenovo/wmi-gamezone.c
357
static int lwmi_gz_probe(struct wmi_device *wdev, const void *context)
drivers/platform/x86/lenovo/wmi-hotkey-utilities.c
190
static int lenovo_super_hotkey_wmi_probe(struct wmi_device *wdev, const void *context)
drivers/platform/x86/lenovo/wmi-other.c
1093
static int lwmi_other_probe(struct wmi_device *wdev, const void *context)
drivers/platform/x86/lenovo/yogabook.c
334
static int yogabook_wmi_probe(struct wmi_device *wdev, const void *context)
drivers/platform/x86/lg-laptop.c
214
static void wmi_notify(union acpi_object *obj, void *context)
drivers/platform/x86/lg-laptop.c
216
long data = (long)context;
drivers/platform/x86/msi-laptop.c
810
void *context)
drivers/platform/x86/msi-wmi-platform.c
408
static int msi_wmi_platform_probe(struct wmi_device *wdev, const void *context)
drivers/platform/x86/msi-wmi.c
173
static void msi_wmi_notify(union acpi_object *obj, void *context)
drivers/platform/x86/panasonic-laptop.c
263
struct serio *port, void *context)
drivers/platform/x86/quickstart.c
147
static void quickstart_notify_remove(void *context)
drivers/platform/x86/quickstart.c
149
struct quickstart_data *data = context;
drivers/platform/x86/quickstart.c
74
static void quickstart_notify(acpi_handle handle, u32 event, void *context)
drivers/platform/x86/quickstart.c
76
struct quickstart_data *data = context;
drivers/platform/x86/redmi-wmi.c
67
static int redmi_wmi_probe(struct wmi_device *wdev, const void *context)
drivers/platform/x86/samsung-galaxybook.c
1171
void *context)
drivers/platform/x86/samsung-galaxybook.c
1173
struct samsung_galaxybook *galaxybook = context;
drivers/platform/x86/sony-laptop.c
1273
void *context, void **return_value)
drivers/platform/x86/sony-laptop.c
4155
sony_pic_read_possible_resource(struct acpi_resource *resource, void *context)
drivers/platform/x86/sony-laptop.c
4158
struct sony_pic_dev *dev = (struct sony_pic_dev *)context;
drivers/platform/x86/toshiba-wmi.c
35
static void toshiba_wmi_notify(union acpi_object *obj, void *context)
drivers/platform/x86/toshiba_acpi.c
2758
struct serio *port, void *context)
drivers/platform/x86/tuxedo/nb04/wmi_ab.c
852
static int tux_probe(struct wmi_device *wdev, const void *context __always_unused)
drivers/platform/x86/uniwill/uniwill-acpi.c
1421
static void uniwill_disable_manual_control(void *context)
drivers/platform/x86/uniwill/uniwill-acpi.c
1423
struct uniwill_data *data = context;
drivers/platform/x86/uniwill/uniwill-acpi.c
436
static int uniwill_ec_reg_write(void *context, unsigned int reg, unsigned int val)
drivers/platform/x86/uniwill/uniwill-acpi.c
452
struct uniwill_data *data = context;
drivers/platform/x86/uniwill/uniwill-acpi.c
468
static int uniwill_ec_reg_read(void *context, unsigned int reg, unsigned int *val)
drivers/platform/x86/uniwill/uniwill-acpi.c
478
struct uniwill_data *data = context;
drivers/platform/x86/wmi-bmof.c
56
static int wmi_bmof_probe(struct wmi_device *wdev, const void *context)
drivers/platform/x86/xiaomi-wmi.c
20
.context = &(const unsigned int){key}
drivers/platform/x86/xiaomi-wmi.c
28
static int xiaomi_wmi_probe(struct wmi_device *wdev, const void *context)
drivers/platform/x86/xiaomi-wmi.c
33
if (!context)
drivers/platform/x86/xiaomi-wmi.c
51
data->key_code = *((const unsigned int *)context);
drivers/pnp/pnpacpi/core.c
287
u32 lvl, void *context,
drivers/pwm/pwm-iqs620a.c
140
unsigned long event_flags, void *context)
drivers/pwm/pwm-iqs620a.c
176
static void iqs620_pwm_notifier_unregister(void *context)
drivers/pwm/pwm-iqs620a.c
178
struct iqs620_pwm_private *iqs620_pwm = context;
drivers/rapidio/devices/rio_mport_cdev.c
1498
static int rio_mport_pw_handler(struct rio_mport *mport, void *context,
drivers/rapidio/devices/rio_mport_cdev.c
1501
struct mport_dev *md = context;
drivers/rapidio/rio.c
1212
pwrite->pwcback(mport, pwrite->context, pw_msg, 0);
drivers/rapidio/rio.c
38
int (*pwcback)(struct rio_mport *mport, void *context,
drivers/rapidio/rio.c
40
void *context;
drivers/rapidio/rio.c
562
int rio_add_mport_pw_handler(struct rio_mport *mport, void *context,
drivers/rapidio/rio.c
564
void *context, union rio_pw_msg *msg, int step))
drivers/rapidio/rio.c
572
pwrite->context = context;
drivers/rapidio/rio.c
589
int rio_del_mport_pw_handler(struct rio_mport *mport, void *context,
drivers/rapidio/rio.c
591
void *context, union rio_pw_msg *msg, int step))
drivers/rapidio/rio.c
598
if (pwrite->pwcback == pwcback && pwrite->context == context) {
drivers/rapidio/rio_cm.c
1227
static int riocm_ch_bind(u16 ch_id, u8 mport_id, void *context)
drivers/rapidio/rio_cm.c
1263
ch->context = context;
drivers/rapidio/rio_cm.c
185
void *context;
drivers/rapidio/rio_cm.c
960
ch->context = NULL;
drivers/regulator/pf0900-regulator.c
345
static int pf0900_regmap_read(void *context, unsigned int reg,
drivers/regulator/pf0900-regulator.c
348
struct device *dev = context;
drivers/regulator/pf0900-regulator.c
388
static int pf0900_regmap_write(void *context, unsigned int reg,
drivers/regulator/pf0900-regulator.c
391
struct device *dev = context;
drivers/regulator/rt5133-regulator.c
368
static int rt5133_regmap_hw_read(void *context, const void *reg_buf,
drivers/regulator/rt5133-regulator.c
372
struct rt5133_priv *priv = context;
drivers/regulator/rt5133-regulator.c
412
static int rt5133_regmap_hw_write(void *context, const void *data, size_t count)
drivers/regulator/rt5133-regulator.c
414
struct rt5133_priv *priv = context;
drivers/regulator/rt6245-regulator.c
144
static int rt6245_reg_write(void *context, unsigned int reg, unsigned int val)
drivers/regulator/rt6245-regulator.c
146
struct i2c_client *i2c = context;
drivers/remoteproc/imx_dsp_rproc.c
1229
static void imx_dsp_load_firmware(const struct firmware *fw, void *context)
drivers/remoteproc/imx_dsp_rproc.c
1231
struct rproc *rproc = context;
drivers/remoteproc/remoteproc_core.c
1667
static void rproc_auto_boot_callback(const struct firmware *fw, void *context)
drivers/remoteproc/remoteproc_core.c
1669
struct rproc *rproc = context;
drivers/rtc/rtc-cmos.c
747
static u32 rtc_handler(void *context)
drivers/rtc/rtc-cmos.c
749
struct device *dev = context;
drivers/rtc/rtc-gamecube.c
125
static int exi_write(void *context, u32 reg, u32 data)
drivers/rtc/rtc-gamecube.c
127
struct priv *d = (struct priv *)context;
drivers/rtc/rtc-gamecube.c
96
static int exi_read(void *context, u32 reg, u32 *data)
drivers/rtc/rtc-gamecube.c
98
struct priv *d = (struct priv *)context;
drivers/rtc/rtc-meson.c
157
static int meson_rtc_serial_bus_reg_read(void *context, unsigned int reg,
drivers/rtc/rtc-meson.c
160
struct meson_rtc *rtc = context;
drivers/rtc/rtc-meson.c
176
static int meson_rtc_serial_bus_reg_write(void *context, unsigned int reg,
drivers/rtc/rtc-meson.c
179
struct meson_rtc *rtc = context;
drivers/rtc/rtc-meson.c
256
static int meson_rtc_regmem_read(void *context, unsigned int offset,
drivers/rtc/rtc-meson.c
259
struct meson_rtc *rtc = context;
drivers/rtc/rtc-meson.c
268
static int meson_rtc_regmem_write(void *context, unsigned int offset,
drivers/rtc/rtc-meson.c
271
struct meson_rtc *rtc = context;
drivers/rtc/rtc-palmas.c
199
static irqreturn_t palmas_rtc_interrupt(int irq, void *context)
drivers/rtc/rtc-palmas.c
201
struct palmas_rtc *palmas_rtc = context;
drivers/rtc/rtc-pcf2127.c
1375
static int pcf2127_i2c_write(void *context, const void *data, size_t count)
drivers/rtc/rtc-pcf2127.c
1377
struct device *dev = context;
drivers/rtc/rtc-pcf2127.c
1388
static int pcf2127_i2c_gather_write(void *context,
drivers/rtc/rtc-pcf2127.c
1392
struct device *dev = context;
drivers/rtc/rtc-pcf2127.c
1417
static int pcf2127_i2c_read(void *context, const void *reg, size_t reg_size,
drivers/rtc/rtc-pcf2127.c
1420
struct device *dev = context;
drivers/scsi/aacraid/aachba.c
1001
scsicmd = (struct scsi_cmnd *) context;
drivers/scsi/aacraid/aachba.c
1226
static void io_callback(void *context, struct fib * fibptr);
drivers/scsi/aacraid/aachba.c
1577
static void aac_srb_callback(void *context, struct fib * fibptr);
drivers/scsi/aacraid/aachba.c
2316
static void io_callback(void *context, struct fib * fibptr)
drivers/scsi/aacraid/aachba.c
2323
scsicmd = (struct scsi_cmnd *) context;
drivers/scsi/aacraid/aachba.c
2603
static void synchronize_callback(void *context, struct fib *fibptr)
drivers/scsi/aacraid/aachba.c
2606
struct scsi_cmnd *cmd = context;
drivers/scsi/aacraid/aachba.c
2691
static void aac_start_stop_callback(void *context, struct fib *fibptr)
drivers/scsi/aacraid/aachba.c
2693
struct scsi_cmnd *scsicmd = context;
drivers/scsi/aacraid/aachba.c
3375
static void aac_srb_callback(void *context, struct fib * fibptr)
drivers/scsi/aacraid/aachba.c
3380
scsicmd = (struct scsi_cmnd *) context;
drivers/scsi/aacraid/aachba.c
3649
void aac_hba_callback(void *context, struct fib *fibptr)
drivers/scsi/aacraid/aachba.c
3657
scsicmd = (struct scsi_cmnd *) context;
drivers/scsi/aacraid/aachba.c
532
static void get_container_name_callback(void *context, struct fib * fibptr)
drivers/scsi/aacraid/aachba.c
537
scsicmd = (struct scsi_cmnd *) context;
drivers/scsi/aacraid/aachba.c
633
static void _aac_probe_container2(void * context, struct fib * fibptr)
drivers/scsi/aacraid/aachba.c
637
struct scsi_cmnd *scsicmd = context;
drivers/scsi/aacraid/aachba.c
693
static void _aac_probe_container1(void * context, struct fib * fibptr)
drivers/scsi/aacraid/aachba.c
705
_aac_probe_container2(context, fibptr);
drivers/scsi/aacraid/aachba.c
709
scsicmd = (struct scsi_cmnd *) context;
drivers/scsi/aacraid/aachba.c
741
_aac_probe_container2(context, fibptr);
drivers/scsi/aacraid/aachba.c
994
static void get_container_serial_callback(void *context, struct fib * fibptr)
drivers/scsi/aacraid/aacraid.h
2701
void aac_fib_free(struct fib * context);
drivers/scsi/aacraid/aacraid.h
2702
void aac_fib_init(struct fib * context);
drivers/scsi/aacraid/aacraid.h
2704
int aac_fib_send(u16 command, struct fib * context, unsigned long size, int priority, int wait, int reply, fib_callback callback, void *ctxt);
drivers/scsi/aacraid/aacraid.h
2705
int aac_hba_send(u8 command, struct fib *context,
drivers/scsi/aacraid/aacraid.h
2709
int aac_fib_complete(struct fib * context);
drivers/scsi/aacraid/aacraid.h
2710
void aac_hba_callback(void *context, struct fib *fibptr);
drivers/scsi/aacraid/commctrl.c
178
struct aac_fib_context * context;
drivers/scsi/aacraid/commctrl.c
209
context = list_entry(entry, struct aac_fib_context, next);
drivers/scsi/aacraid/commctrl.c
210
if (context->unique == fibctx->unique) {
drivers/scsi/aacraid/dpcsup.c
228
static void aac_aif_callback(void *context, struct fib * fibptr)
drivers/scsi/aacraid/dpcsup.c
234
fibctx = (struct fib *)context;
drivers/scsi/aacraid/linit.c
871
static void aac_tmf_callback(void *context, struct fib *fibptr)
drivers/scsi/aacraid/linit.c
875
struct aac_hba_map_info *info = context;
drivers/scsi/aic94xx/aic94xx_sas.h
502
__le16 context; /* Clear nexus context */
drivers/scsi/be2iscsi/be_cmds.c
1032
be_dws_cpu_to_le(ctxt, sizeof(req->context));
drivers/scsi/be2iscsi/be_cmds.c
754
AMAP_SET_BITS(struct amap_eq_context, func, req->context,
drivers/scsi/be2iscsi/be_cmds.c
756
AMAP_SET_BITS(struct amap_eq_context, valid, req->context, 1);
drivers/scsi/be2iscsi/be_cmds.c
757
AMAP_SET_BITS(struct amap_eq_context, size, req->context, 0);
drivers/scsi/be2iscsi/be_cmds.c
758
AMAP_SET_BITS(struct amap_eq_context, count, req->context,
drivers/scsi/be2iscsi/be_cmds.c
760
AMAP_SET_BITS(struct amap_eq_context, delaymult, req->context,
drivers/scsi/be2iscsi/be_cmds.c
762
be_dws_cpu_to_le(req->context, sizeof(req->context));
drivers/scsi/be2iscsi/be_cmds.c
784
void *ctxt = &req->context;
drivers/scsi/be2iscsi/be_cmds.c
824
be_dws_cpu_to_le(ctxt, sizeof(req->context));
drivers/scsi/be2iscsi/be_cmds.c
866
ctxt = &req->context;
drivers/scsi/be2iscsi/be_cmds.c
885
be_dws_cpu_to_le(ctxt, sizeof(req->context));
drivers/scsi/be2iscsi/be_cmds.c
984
void *ctxt = &req->context;
drivers/scsi/be2iscsi/be_cmds.h
317
u8 context[sizeof(struct amap_eq_context) / 8]; /* dw[4] */
drivers/scsi/be2iscsi/be_cmds.h
617
u8 context[sizeof(struct amap_cq_context) / 8];
drivers/scsi/be2iscsi/be_cmds.h
652
u8 context[sizeof(struct amap_mcc_context) / 8];
drivers/scsi/be2iscsi/be_cmds.h
911
struct be_default_pdu_context context;
drivers/scsi/bnx2fc/bnx2fc.h
514
void bnx2fc_indicate_kcqe(void *context, struct kcqe *kcq[],
drivers/scsi/bnx2fc/bnx2fc_fcoe.c
858
static void bnx2fc_indicate_netevent(void *context, unsigned long event,
drivers/scsi/bnx2fc/bnx2fc_fcoe.c
861
struct bnx2fc_hba *hba = (struct bnx2fc_hba *)context;
drivers/scsi/bnx2fc/bnx2fc_hwi.c
1342
void bnx2fc_indicate_kcqe(void *context, struct kcqe *kcq[],
drivers/scsi/bnx2fc/bnx2fc_hwi.c
1345
struct bnx2fc_hba *hba = (struct bnx2fc_hba *)context;
drivers/scsi/bnx2i/bnx2i_hwi.c
2475
static void bnx2i_indicate_kcqe(void *context, struct kcqe *kcqe[],
drivers/scsi/bnx2i/bnx2i_hwi.c
2478
struct bnx2i_hba *hba = context;
drivers/scsi/bnx2i/bnx2i_hwi.c
2530
static void bnx2i_indicate_netevent(void *context, unsigned long event,
drivers/scsi/bnx2i/bnx2i_hwi.c
2533
struct bnx2i_hba *hba = context;
drivers/scsi/bnx2i/bnx2i_hwi.c
2571
struct bnx2i_endpoint *ep = (struct bnx2i_endpoint *) cm_sk->context;
drivers/scsi/bnx2i/bnx2i_hwi.c
2593
struct bnx2i_endpoint *ep = (struct bnx2i_endpoint *) cm_sk->context;
drivers/scsi/bnx2i/bnx2i_hwi.c
2609
struct bnx2i_endpoint *ep = (struct bnx2i_endpoint *) cm_sk->context;
drivers/scsi/bnx2i/bnx2i_hwi.c
2625
struct bnx2i_endpoint *ep = (struct bnx2i_endpoint *) cm_sk->context;
drivers/scsi/bnx2i/bnx2i_hwi.c
2641
struct bnx2i_endpoint *ep = (struct bnx2i_endpoint *) cm_sk->context;
drivers/scsi/bnx2i/bnx2i_hwi.c
2653
static int bnx2i_send_nl_mesg(void *context, u32 msg_type,
drivers/scsi/bnx2i/bnx2i_hwi.c
2656
struct bnx2i_hba *hba = context;
drivers/scsi/csiostor/csio_lnode.c
565
csio_ln_fdmi_start(struct csio_lnode *ln, void *context)
drivers/scsi/csiostor/csio_lnode.c
568
struct csio_rnode *fdmi_rn = (struct csio_rnode *)context;
drivers/scsi/elx/libefc_sli/sli4.c
3706
sli_cmd_common_nop(struct sli4 *sli4, void *buf, uint64_t context)
drivers/scsi/elx/libefc_sli/sli4.c
3718
memcpy(&nop->context, &context, sizeof(context));
drivers/scsi/elx/libefc_sli/sli4.h
2969
__le32 context[2];
drivers/scsi/elx/libefc_sli/sli4.h
2974
__le32 context[2];
drivers/scsi/elx/libefc_sli/sli4.h
3942
sli_cmd_common_nop(struct sli4 *sli4, void *buf, uint64_t context);
drivers/scsi/esas2r/esas2r.h
995
void esas2r_adapter_tasklet(unsigned long context);
drivers/scsi/esas2r/esas2r_ioctl.c
1199
struct esas2r_request *rq, void *context)
drivers/scsi/esas2r/esas2r_ioctl.c
1235
bi.context = NULL;
drivers/scsi/esas2r/esas2r_ioctl.c
269
if (!(*bi->callback)(a, rq, &sgc, bi->context)) {
drivers/scsi/esas2r/esas2r_ioctl.c
297
struct esas2r_sg_context *sgc, void *context)
drivers/scsi/esas2r/esas2r_ioctl.c
394
struct esas2r_sg_context *sgc, void *context)
drivers/scsi/esas2r/esas2r_ioctl.c
396
struct atto_csmi *ci = (struct atto_csmi *)context;
drivers/scsi/esas2r/esas2r_ioctl.c
609
struct esas2r_request *rq, void *context)
drivers/scsi/esas2r/esas2r_ioctl.c
611
struct atto_csmi *ci = (struct atto_csmi *)context;
drivers/scsi/esas2r/esas2r_ioctl.c
659
bi.context = ci;
drivers/scsi/esas2r/esas2r_ioctl.c
749
void *context)
drivers/scsi/esas2r/esas2r_ioctl.c
77
void *context;
drivers/scsi/esas2r/esas2r_main.c
1547
void esas2r_adapter_tasklet(unsigned long context)
drivers/scsi/esas2r/esas2r_main.c
1549
struct esas2r_adapter *a = (struct esas2r_adapter *)context;
drivers/scsi/hisi_sas/hisi_sas_v1_hw.c
1322
u32 irq_value, context, port_id, link_rate;
drivers/scsi/hisi_sas/hisi_sas_v1_hw.c
1335
context = hisi_sas_read32(hisi_hba, PHY_CONTEXT);
drivers/scsi/hisi_sas/hisi_sas_v1_hw.c
1336
if (context & 1 << phy_no) {
drivers/scsi/hisi_sas/hisi_sas_v2_hw.c
1404
u32 context;
drivers/scsi/hisi_sas/hisi_sas_v2_hw.c
1406
context = hisi_sas_read32(hisi_hba, PHY_CONTEXT);
drivers/scsi/hisi_sas/hisi_sas_v2_hw.c
1407
if (context & (1 << phy_no))
drivers/scsi/hisi_sas/hisi_sas_v3_hw.c
1586
u32 context, port_id, link_rate;
drivers/scsi/hisi_sas/hisi_sas_v3_hw.c
1607
context = hisi_sas_read32(hisi_hba, PHY_CONTEXT);
drivers/scsi/hisi_sas/hisi_sas_v3_hw.c
1608
if (context & (1 << phy_no)) {
drivers/scsi/hptiop.c
102
if (readl(&p->context))
drivers/scsi/hptiop.c
105
writel(1, &p->context);
drivers/scsi/hptiop.c
305
writel(0, &req->context);
drivers/scsi/hptiop.c
312
if (readl(&req->context))
drivers/scsi/hptiop.c
438
req->header.context = cpu_to_le32(IOP_REQUEST_TYPE_GET_CONFIG<<5);
drivers/scsi/hptiop.c
515
req->header.context = cpu_to_le32(IOP_REQUEST_TYPE_SET_CONFIG<<5);
drivers/scsi/hptiop.c
538
req->header.context = cpu_to_le32(IOP_REQUEST_TYPE_SET_CONFIG<<5);
drivers/scsi/hptiop.c
724
req->header.context, tag);
drivers/scsi/hptiop.c
805
readl(&req->context), tag);
drivers/scsi/hptiop.c
812
(readl(&req->context) |
drivers/scsi/hptiop.c
878
reqhdr->context = cpu_to_le32(IOPMU_QUEUE_ADDR_HOST_BIT |
drivers/scsi/hptiop.c
906
reqhdr->context = cpu_to_le32(_req->index<<8 |
drivers/scsi/hptiop.c
933
reqhdr->context = cpu_to_le32(IOPMU_QUEUE_ADDR_HOST_BIT |
drivers/scsi/hptiop.h
148
__le32 context; /* host context */
drivers/scsi/isci/remote_device.c
912
u32 context;
drivers/scsi/isci/remote_device.c
914
context = request |
drivers/scsi/isci/remote_device.c
919
sci_controller_post_request(iport->owning_controller, context);
drivers/scsi/lpfc/lpfc.h
148
struct lpfc_async_xchg_ctx *context;
drivers/scsi/lpfc/lpfc.h
168
void *context;
drivers/scsi/lpfc/lpfc_ct.c
1905
uint8_t retry, uint32_t context)
drivers/scsi/lpfc/lpfc_ct.c
1958
context);
drivers/scsi/lpfc/lpfc_ct.c
2002
CtReq->un.gid.Fc4Type = context;
drivers/scsi/lpfc/lpfc_ct.c
2014
CtReq->un.gid.PortType = context;
drivers/scsi/lpfc/lpfc_ct.c
2026
CtReq->un.gff.PortId = cpu_to_be32(context);
drivers/scsi/lpfc/lpfc_ct.c
2033
CtReq->un.gft.PortId = cpu_to_be32(context);
drivers/scsi/lpfc/lpfc_ct.c
2136
(context == FC_TYPE_NVME)) {
drivers/scsi/lpfc/lpfc_ct.c
2144
CtReq->un.rff.type_code = context;
drivers/scsi/lpfc/lpfc_ct.c
2148
(context == FC_TYPE_FCP))
drivers/scsi/lpfc/lpfc_ct.c
2149
CtReq->un.rff.type_code = context;
drivers/scsi/lpfc/lpfc_ct.c
2158
(context == FC_TYPE_NVME) ? "NVME" : "FCP",
drivers/scsi/lpfc/lpfc_hw4.h
1238
struct eq_context context;
drivers/scsi/lpfc/lpfc_hw4.h
1280
uint32_t context[2];
drivers/scsi/lpfc/lpfc_hw4.h
1365
struct cq_context context;
drivers/scsi/lpfc/lpfc_hw4.h
1686
struct rq_context context;
drivers/scsi/lpfc/lpfc_hw4.h
1738
struct rq_context context;
drivers/scsi/lpfc/lpfc_hw4.h
1804
struct mq_context context;
drivers/scsi/lpfc/lpfc_hw4.h
1855
struct mq_context context;
drivers/scsi/lpfc/lpfc_init.c
14576
lpfc_write_firmware(const struct firmware *fw, void *context)
drivers/scsi/lpfc/lpfc_init.c
14578
struct lpfc_hba *phba = (struct lpfc_hba *)context;
drivers/scsi/lpfc/lpfc_nvmet.c
1469
kfree(ctx_buf->context);
drivers/scsi/lpfc/lpfc_nvmet.c
1576
ctx_buf->context = kzalloc_obj(*ctx_buf->context);
drivers/scsi/lpfc/lpfc_nvmet.c
1577
if (!ctx_buf->context) {
drivers/scsi/lpfc/lpfc_nvmet.c
1584
ctx_buf->context->ctxbuf = ctx_buf;
drivers/scsi/lpfc/lpfc_nvmet.c
1585
ctx_buf->context->state = LPFC_NVME_STE_FREE;
drivers/scsi/lpfc/lpfc_nvmet.c
1589
kfree(ctx_buf->context);
drivers/scsi/lpfc/lpfc_nvmet.c
1608
kfree(ctx_buf->context);
drivers/scsi/lpfc/lpfc_nvmet.c
2195
struct lpfc_async_xchg_ctx *ctxp = ctx_buf->context;
drivers/scsi/lpfc/lpfc_nvmet.c
2468
ctxp = (struct lpfc_async_xchg_ctx *)ctx_buf->context;
drivers/scsi/lpfc/lpfc_nvmet.c
389
struct lpfc_async_xchg_ctx *ctxp = ctx_buf->context;
drivers/scsi/lpfc/lpfc_nvmet.c
441
ctxp = (struct lpfc_async_xchg_ctx *)ctx_buf->context;
drivers/scsi/lpfc/lpfc_scsi.c
5953
uint64_t lun_id, lpfc_ctx_cmd context)
drivers/scsi/lpfc/lpfc_scsi.c
5959
cnt = lpfc_sli_sum_iocb(vport, tgt_id, lun_id, context);
drivers/scsi/lpfc/lpfc_scsi.c
5963
tgt_id, lun_id, context);
drivers/scsi/lpfc/lpfc_scsi.c
5967
cnt = lpfc_sli_sum_iocb(vport, tgt_id, lun_id, context);
drivers/scsi/lpfc/lpfc_scsi.c
5972
((context == LPFC_CTX_LUN) ? "LUN" :
drivers/scsi/lpfc/lpfc_scsi.c
5973
((context == LPFC_CTX_TGT) ? "TGT" :
drivers/scsi/lpfc/lpfc_scsi.c
5974
((context == LPFC_CTX_HOST) ? "HOST" : "Unknown"))),
drivers/scsi/lpfc/lpfc_sli.c
16171
bf_set(lpfc_eq_context_size, &eq_create->u.request.context,
drivers/scsi/lpfc/lpfc_sli.c
16173
bf_set(lpfc_eq_context_valid, &eq_create->u.request.context, 1);
drivers/scsi/lpfc/lpfc_sli.c
16179
bf_set(lpfc_eq_context_autovalid, &eq_create->u.request.context,
drivers/scsi/lpfc/lpfc_sli.c
16185
bf_set(lpfc_eq_context_delay_multi, &eq_create->u.request.context,
drivers/scsi/lpfc/lpfc_sli.c
16198
bf_set(lpfc_eq_context_count, &eq_create->u.request.context,
drivers/scsi/lpfc/lpfc_sli.c
16202
bf_set(lpfc_eq_context_count, &eq_create->u.request.context,
drivers/scsi/lpfc/lpfc_sli.c
16206
bf_set(lpfc_eq_context_count, &eq_create->u.request.context,
drivers/scsi/lpfc/lpfc_sli.c
16210
bf_set(lpfc_eq_context_count, &eq_create->u.request.context,
drivers/scsi/lpfc/lpfc_sli.c
16214
bf_set(lpfc_eq_context_count, &eq_create->u.request.context,
drivers/scsi/lpfc/lpfc_sli.c
16367
bf_set(lpfc_cq_context_event, &cq_create->u.request.context, 1);
drivers/scsi/lpfc/lpfc_sli.c
16368
bf_set(lpfc_cq_context_valid, &cq_create->u.request.context, 1);
drivers/scsi/lpfc/lpfc_sli.c
16374
bf_set(lpfc_cq_eq_id_2, &cq_create->u.request.context,
drivers/scsi/lpfc/lpfc_sli.c
16376
bf_set(lpfc_cq_context_autovalid, &cq_create->u.request.context,
drivers/scsi/lpfc/lpfc_sli.c
16379
bf_set(lpfc_cq_eq_id, &cq_create->u.request.context,
drivers/scsi/lpfc/lpfc_sli.c
16387
cq_create->u.request.context.lpfc_cq_context_count =
drivers/scsi/lpfc/lpfc_sli.c
16390
&cq_create->u.request.context,
drivers/scsi/lpfc/lpfc_sli.c
16407
bf_set(lpfc_cq_context_count, &cq_create->u.request.context,
drivers/scsi/lpfc/lpfc_sli.c
16411
bf_set(lpfc_cq_context_count, &cq_create->u.request.context,
drivers/scsi/lpfc/lpfc_sli.c
16415
bf_set(lpfc_cq_context_count, &cq_create->u.request.context,
drivers/scsi/lpfc/lpfc_sli.c
16746
bf_set(lpfc_mq_context_cq_id, &mq_create->u.request.context,
drivers/scsi/lpfc/lpfc_sli.c
16748
bf_set(lpfc_mq_context_valid, &mq_create->u.request.context, 1);
drivers/scsi/lpfc/lpfc_sli.c
16751
bf_set(lpfc_mq_context_ring_size, &mq_create->u.request.context,
drivers/scsi/lpfc/lpfc_sli.c
16755
bf_set(lpfc_mq_context_ring_size, &mq_create->u.request.context,
drivers/scsi/lpfc/lpfc_sli.c
16759
bf_set(lpfc_mq_context_ring_size, &mq_create->u.request.context,
drivers/scsi/lpfc/lpfc_sli.c
16763
bf_set(lpfc_mq_context_ring_size, &mq_create->u.request.context,
drivers/scsi/lpfc/lpfc_sli.c
16838
bf_set(lpfc_mq_context_valid, &mq_create_ext->u.request.context, 1);
drivers/scsi/lpfc/lpfc_sli.c
16845
bf_set(lpfc_mq_context_cq_id, &mq_create_ext->u.request.context,
drivers/scsi/lpfc/lpfc_sli.c
16859
&mq_create_ext->u.request.context,
drivers/scsi/lpfc/lpfc_sli.c
16864
&mq_create_ext->u.request.context,
drivers/scsi/lpfc/lpfc_sli.c
16869
&mq_create_ext->u.request.context,
drivers/scsi/lpfc/lpfc_sli.c
16874
&mq_create_ext->u.request.context,
drivers/scsi/lpfc/lpfc_sli.c
17250
&rq_create->u.request.context,
drivers/scsi/lpfc/lpfc_sli.c
17252
rq_create->u.request.context.buffer_size = LPFC_HDR_BUF_SIZE;
drivers/scsi/lpfc/lpfc_sli.c
17254
&rq_create->u.request.context,
drivers/scsi/lpfc/lpfc_sli.c
17257
&rq_create->u.request.context,
drivers/scsi/lpfc/lpfc_sli.c
17272
&rq_create->u.request.context,
drivers/scsi/lpfc/lpfc_sli.c
17277
&rq_create->u.request.context,
drivers/scsi/lpfc/lpfc_sli.c
17282
&rq_create->u.request.context,
drivers/scsi/lpfc/lpfc_sli.c
17287
&rq_create->u.request.context,
drivers/scsi/lpfc/lpfc_sli.c
17291
bf_set(lpfc_rq_context_buf_size, &rq_create->u.request.context,
drivers/scsi/lpfc/lpfc_sli.c
17294
bf_set(lpfc_rq_context_cq_id, &rq_create->u.request.context,
drivers/scsi/lpfc/lpfc_sli.c
17385
&rq_create->u.request.context, hrq->entry_count);
drivers/scsi/lpfc/lpfc_sli.c
17387
rq_create->u.request.context.buffer_size =
drivers/scsi/lpfc/lpfc_sli.c
17390
rq_create->u.request.context.buffer_size =
drivers/scsi/lpfc/lpfc_sli.c
17392
bf_set(lpfc_rq_context_rqe_size, &rq_create->u.request.context,
drivers/scsi/lpfc/lpfc_sli.c
17394
bf_set(lpfc_rq_context_page_size, &rq_create->u.request.context,
drivers/scsi/lpfc/lpfc_sli.c
17409
&rq_create->u.request.context,
drivers/scsi/lpfc/lpfc_sli.c
17414
&rq_create->u.request.context,
drivers/scsi/lpfc/lpfc_sli.c
17419
&rq_create->u.request.context,
drivers/scsi/lpfc/lpfc_sli.c
17424
&rq_create->u.request.context,
drivers/scsi/lpfc/lpfc_sli.c
17430
&rq_create->u.request.context,
drivers/scsi/lpfc/lpfc_sli.c
17434
&rq_create->u.request.context,
drivers/scsi/lpfc/lpfc_sli.c
17437
bf_set(lpfc_rq_context_cq_id, &rq_create->u.request.context,
drivers/scsi/lpfc/lpfc_sli.c
17578
&rq_create->u.request.context,
drivers/scsi/lpfc/lpfc_sli.c
17581
&rq_create->u.request.context,
drivers/scsi/lpfc/lpfc_sli.c
17584
&rq_create->u.request.context,
drivers/scsi/lpfc/lpfc_sli.c
17587
&rq_create->u.request.context,
drivers/scsi/lpfc/lpfc_sli.c
17590
&rq_create->u.request.context,
drivers/scsi/lpfc/lpfc_sli.c
17593
&rq_create->u.request.context,
drivers/scsi/megaraid/megaraid_sas.h
1742
__le32 context; /*08h */
drivers/scsi/megaraid/megaraid_sas.h
1812
__le32 context; /*08h */
drivers/scsi/megaraid/megaraid_sas.h
1856
__le32 context; /*08h */
drivers/scsi/megaraid/megaraid_sas.h
1885
__le32 context; /*08h */
drivers/scsi/megaraid/megaraid_sas.h
1908
__le32 context; /*08h */
drivers/scsi/megaraid/megaraid_sas.h
1936
__le32 context; /*08h */
drivers/scsi/megaraid/megaraid_sas.h
1963
__le32 context; /*08h */
drivers/scsi/megaraid/megaraid_sas.h
1990
__le32 context; /*08h */
drivers/scsi/megaraid/megaraid_sas.h
2599
} context;
drivers/scsi/megaraid/megaraid_sas_base.c
2284
u32 context;
drivers/scsi/megaraid/megaraid_sas_base.c
2300
context = le32_to_cpu(instance->reply_queue[consumer]);
drivers/scsi/megaraid/megaraid_sas_base.c
2301
if (context >= instance->max_fw_cmds) {
drivers/scsi/megaraid/megaraid_sas_base.c
2303
context);
drivers/scsi/megaraid/megaraid_sas_base.c
2307
cmd = instance->cmd_list[context];
drivers/scsi/megaraid/megaraid_sas_base.c
367
cmd->frame->io.context = cpu_to_le32(cmd->index);
drivers/scsi/megaraid/megaraid_sas_base.c
4406
cmd->frame->io.context = cpu_to_le32(cmd->index);
drivers/scsi/megaraid/megaraid_sas_base.c
5486
__le32 context;
drivers/scsi/megaraid/megaraid_sas_base.c
5510
context = init_frame->context;
drivers/scsi/megaraid/megaraid_sas_base.c
5513
init_frame->context = context;
drivers/scsi/megaraid/megaraid_sas_base.c
8284
cmd->frame->hdr.context = cpu_to_le32(cmd->index);
drivers/scsi/megaraid/megaraid_sas_fusion.c
3904
mfi_cmd->context.smid = cmd->index;
drivers/scsi/megaraid/megaraid_sas_fusion.c
3953
index = cmd->context.smid;
drivers/scsi/megaraid/megaraid_sas_fusion.c
4322
smid = le16_to_cpu(cmd_mfi->context.smid);
drivers/scsi/megaraid/megaraid_sas_fusion.c
4560
cmd_mfi->context.smid = cmd_fusion->index;
drivers/scsi/mpt3sas/mpt3sas_ctl.c
365
event_log[i].context = ioc->event_context++;
drivers/scsi/mpt3sas/mpt3sas_ctl.h
241
uint32_t context;
drivers/scsi/qedf/drv_fcoe_fw_funcs.c
118
struct fcoe_task_context *ctx = task_params->context;
drivers/scsi/qedf/drv_fcoe_fw_funcs.c
25
struct fcoe_task_context *ctx = task_params->context;
drivers/scsi/qedf/drv_fcoe_fw_funcs.h
13
struct fcoe_task_context *context;
drivers/scsi/qedf/qedf_io.c
623
io_req->task_params->context = task_ctx;
drivers/scsi/qedf/qedf_io.c
702
io_req->task_params->context = task_ctx;
drivers/scsi/qedi/qedi_fw.c
1034
task_params.context = fw_task_ctx;
drivers/scsi/qedi/qedi_fw.c
1108
task_params.context = fw_task_ctx;
drivers/scsi/qedi/qedi_fw.c
1482
task_params.context = fw_task_ctx;
drivers/scsi/qedi/qedi_fw.c
1606
task_params.context = fw_task_ctx;
drivers/scsi/qedi/qedi_fw.c
1723
task_params.context = fw_task_ctx;
drivers/scsi/qedi/qedi_fw.c
2083
task_params.context = fw_task_ctx;
drivers/scsi/qedi/qedi_fw_api.c
205
struct iscsi_task_context *context;
drivers/scsi/qedi/qedi_fw_api.c
210
context = task_params->context;
drivers/scsi/qedi/qedi_fw_api.c
211
val_byte = context->mstorm_ag_context.cdu_validation;
drivers/scsi/qedi/qedi_fw_api.c
212
memset(context, 0, sizeof(*context));
drivers/scsi/qedi/qedi_fw_api.c
213
context->mstorm_ag_context.cdu_validation = val_byte;
drivers/scsi/qedi/qedi_fw_api.c
216
ARRAY_SIZE(context->ystorm_st_context.pdu_hdr.data.data);
drivers/scsi/qedi/qedi_fw_api.c
219
context->ystorm_st_context.pdu_hdr.data.data[index] = val;
drivers/scsi/qedi/qedi_fw_api.c
222
context->mstorm_st_context.task_type = task_type;
drivers/scsi/qedi/qedi_fw_api.c
223
context->mstorm_ag_context.task_cid =
drivers/scsi/qedi/qedi_fw_api.c
226
SET_FIELD(context->ustorm_ag_context.flags1,
drivers/scsi/qedi/qedi_fw_api.c
229
context->ustorm_st_context.task_type = task_type;
drivers/scsi/qedi/qedi_fw_api.c
230
context->ustorm_st_context.cq_rss_number = task_params->cq_rss_number;
drivers/scsi/qedi/qedi_fw_api.c
231
context->ustorm_ag_context.icid = cpu_to_le16(task_params->conn_icid);
drivers/scsi/qedi/qedi_fw_api.c
274
void set_rw_exp_data_acked_and_cont_len(struct iscsi_task_context *context,
drivers/scsi/qedi/qedi_fw_api.c
286
SET_FIELD(context->ustorm_st_context.flags2,
drivers/scsi/qedi/qedi_fw_api.c
298
context->ustorm_ag_context.exp_data_acked =
drivers/scsi/qedi/qedi_fw_api.c
307
context->ustorm_ag_context.exp_data_acked = val;
drivers/scsi/qedi/qedi_fw_api.c
310
context->ustorm_ag_context.exp_data_acked =
drivers/scsi/qedi/qedi_fw_api.c
317
context->ustorm_ag_context.exp_cont_len = val;
drivers/scsi/qedi/qedi_fw_api.c
473
static void set_local_completion_context(struct iscsi_task_context *context)
drivers/scsi/qedi/qedi_fw_api.c
475
SET_FIELD(context->ystorm_st_context.state.flags,
drivers/scsi/qedi/qedi_fw_api.c
477
SET_FIELD(context->ustorm_st_context.flags,
drivers/scsi/qedi/qedi_fw_api.c
501
cxt = task_params->context;
drivers/scsi/qedi/qedi_fw_api.c
563
init_ustorm_task_contexts(&task_params->context->ustorm_st_context,
drivers/scsi/qedi/qedi_fw_api.c
564
&task_params->context->ustorm_ag_context,
drivers/scsi/qedi/qedi_fw_api.c
569
set_rw_exp_data_acked_and_cont_len(task_params->context, conn_params,
drivers/scsi/qedi/qedi_fw_api.c
576
init_rtdif_task_context(&task_params->context->rdif_context,
drivers/scsi/qedi/qedi_fw_api.c
577
&task_params->context->tdif_context,
drivers/scsi/qedi/qedi_fw_api.c
620
cxt = task_params->context;
drivers/scsi/qedi/qedi_fw_api.c
662
cxt = task_params->context;
drivers/scsi/qedi/qedi_fw_api.c
669
set_local_completion_context(task_params->context);
drivers/scsi/qedi/qedi_fw_api.c
708
cxt = task_params->context;
drivers/scsi/qedi/qedi_fw_api.c
763
cxt = task_params->context;
drivers/scsi/qedi/qedi_fw_iscsi.h
13
struct iscsi_task_context *context;
drivers/scsi/qedi/qedi_main.c
74
static int qedi_iscsi_event_cb(void *context, u8 fw_event_code, void *fw_handle)
drivers/scsi/qedi/qedi_main.c
81
if (!context || !fw_handle) {
drivers/scsi/qedi/qedi_main.c
86
qedi = (struct qedi_ctx *)context;
drivers/scsi/qla2xxx/qla_bsg.c
1204
ql84_mgmt->mgmt.mgmtp.u.info.context);
drivers/scsi/qla2xxx/qla_bsg.h
120
uint32_t context;
drivers/scsi/smartpqi/smartpqi.h
1266
void *context);
drivers/scsi/smartpqi/smartpqi.h
1267
void *context;
drivers/scsi/smartpqi/smartpqi_init.c
3512
io_request->io_complete_callback(io_request, io_request->context);
drivers/scsi/smartpqi/smartpqi_init.c
4605
void *context)
drivers/scsi/smartpqi/smartpqi_init.c
4607
struct completion *waiting = context;
drivers/scsi/smartpqi/smartpqi_init.c
4683
io_request->context = &wait;
drivers/scsi/smartpqi/smartpqi_init.c
5553
void *context)
drivers/scsi/smartpqi/smartpqi_init.c
5702
void *context)
drivers/scsi/smartpqi/smartpqi_init.c
6305
void *context)
drivers/scsi/smartpqi/smartpqi_init.c
6307
struct completion *waiting = context;
drivers/scsi/smartpqi/smartpqi_init.c
6357
io_request->context = &wait;
drivers/scsi/smartpqi/smartpqi_init.c
9174
io_request->context);
drivers/scsi/storvsc_drv.c
1240
static void storvsc_on_channel_callback(void *context)
drivers/scsi/storvsc_drv.c
1242
struct vmbus_channel *channel = (struct vmbus_channel *)context;
drivers/scsi/storvsc_drv.c
372
static void storvsc_on_channel_callback(void *context);
drivers/scsi/vmw_pvscsi.c
202
pvscsi_get_context(const struct pvscsi_adapter *adapter, u64 context)
drivers/scsi/vmw_pvscsi.c
204
return &adapter->cmd_map[context - 1];
drivers/scsi/vmw_pvscsi.c
264
cmd.context = pvscsi_map_context(adapter, ctx);
drivers/scsi/vmw_pvscsi.c
552
ctx = pvscsi_get_context(adapter, e->context);
drivers/scsi/vmw_pvscsi.c
765
e->context = pvscsi_map_context(adapter, ctx);
drivers/scsi/vmw_pvscsi.h
170
u64 context;
drivers/scsi/vmw_pvscsi.h
322
u64 context;
drivers/scsi/vmw_pvscsi.h
375
u64 context;
drivers/soc/atmel/sfr.c
23
static int atmel_sfr_read(void *context, unsigned int offset,
drivers/soc/atmel/sfr.c
26
struct atmel_sfr_priv *priv = context;
drivers/soc/fsl/qe/qmc.c
1164
xfer_desc->context = NULL;
drivers/soc/fsl/qe/qmc.c
1198
xfer_desc->context = NULL;
drivers/soc/fsl/qe/qmc.c
199
void (*tx_complete)(void *context);
drivers/soc/fsl/qe/qmc.c
200
void (*rx_complete)(void *context, size_t length, unsigned int flags);
drivers/soc/fsl/qe/qmc.c
202
void *context;
drivers/soc/fsl/qe/qmc.c
443
void (*complete)(void *context), void *context)
drivers/soc/fsl/qe/qmc.c
481
xfer_desc->context = context;
drivers/soc/fsl/qe/qmc.c
511
void (*complete)(void *context);
drivers/soc/fsl/qe/qmc.c
513
void *context;
drivers/soc/fsl/qe/qmc.c
535
context = xfer_desc->context;
drivers/soc/fsl/qe/qmc.c
537
xfer_desc->context = NULL;
drivers/soc/fsl/qe/qmc.c
548
complete(context);
drivers/soc/fsl/qe/qmc.c
561
void (*complete)(void *context, size_t length, unsigned int flags),
drivers/soc/fsl/qe/qmc.c
562
void *context)
drivers/soc/fsl/qe/qmc.c
600
xfer_desc->context = context;
drivers/soc/fsl/qe/qmc.c
642
void (*complete)(void *context, size_t size, unsigned int flags);
drivers/soc/fsl/qe/qmc.c
646
void *context;
drivers/soc/fsl/qe/qmc.c
668
context = xfer_desc->context;
drivers/soc/fsl/qe/qmc.c
670
xfer_desc->context = NULL;
drivers/soc/fsl/qe/qmc.c
698
complete(context, datalen,
drivers/soc/hisilicon/kunpeng_hccs.c
110
void *context)
drivers/soc/hisilicon/kunpeng_hccs.c
113
struct hccs_register_ctx *ctx = context;
drivers/soc/mediatek/mtk-pmic-wrap.c
1578
static int pwrap_regmap_read(void *context, u32 adr, u32 *rdata)
drivers/soc/mediatek/mtk-pmic-wrap.c
1580
return pwrap_read(context, adr, rdata);
drivers/soc/mediatek/mtk-pmic-wrap.c
1583
static int pwrap_regmap_write(void *context, u32 adr, u32 wdata)
drivers/soc/mediatek/mtk-pmic-wrap.c
1585
return pwrap_write(context, adr, wdata);
drivers/soc/samsung/exynos-pmu.h
78
int tensor_sec_reg_write(void *context, unsigned int reg, unsigned int val);
drivers/soc/samsung/exynos-pmu.h
79
int tensor_sec_reg_read(void *context, unsigned int reg, unsigned int *val);
drivers/soc/samsung/exynos-pmu.h
80
int tensor_sec_update_bits(void *context, unsigned int reg, unsigned int mask,
drivers/soc/samsung/gs101-pmu.c
343
int tensor_sec_reg_write(void *context, unsigned int reg, unsigned int val)
drivers/soc/samsung/gs101-pmu.c
346
unsigned long pmu_base = (unsigned long)context;
drivers/soc/samsung/gs101-pmu.c
359
static int tensor_sec_reg_rmw(void *context, unsigned int reg,
drivers/soc/samsung/gs101-pmu.c
363
unsigned long pmu_base = (unsigned long)context;
drivers/soc/samsung/gs101-pmu.c
380
int tensor_sec_reg_read(void *context, unsigned int reg, unsigned int *val)
drivers/soc/samsung/gs101-pmu.c
396
static int tensor_set_bits_atomic(void *context, unsigned int offset, u32 val,
drivers/soc/samsung/gs101-pmu.c
413
ret = tensor_sec_reg_write(context, offset, i);
drivers/soc/samsung/gs101-pmu.c
439
int tensor_sec_update_bits(void *context, unsigned int reg, unsigned int mask,
drivers/soc/samsung/gs101-pmu.c
443
return tensor_sec_reg_rmw(context, reg, mask, val);
drivers/soc/samsung/gs101-pmu.c
445
return tensor_set_bits_atomic(context, reg, val, mask);
drivers/soc/tegra/pmc.c
3046
static int tegra_pmc_regmap_readl(void *context, unsigned int offset, unsigned int *value)
drivers/soc/tegra/pmc.c
3048
struct tegra_pmc *pmc = context;
drivers/soc/tegra/pmc.c
3054
static int tegra_pmc_regmap_writel(void *context, unsigned int offset, unsigned int value)
drivers/soc/tegra/pmc.c
3056
struct tegra_pmc *pmc = context;
drivers/soc/ti/wkup_m3_ipc.c
112
static void wkup_m3_scale_data_fw_cb(const struct firmware *fw, void *context)
drivers/soc/ti/wkup_m3_ipc.c
116
struct wkup_m3_ipc *m3_ipc = context;
drivers/spi/spi-altera-dfl.c
49
static int indirect_bus_reg_read(void *context, unsigned int reg,
drivers/spi/spi-altera-dfl.c
52
void __iomem *base = context;
drivers/spi/spi-altera-dfl.c
75
static int indirect_bus_reg_write(void *context, unsigned int reg,
drivers/spi/spi-altera-dfl.c
78
void __iomem *base = context;
drivers/spi/spi-ch341.c
94
struct ch341_spi_dev *ch341 = urb->context;
drivers/spi/spi-mpc512x-psc.c
316
m->complete(m->context);
drivers/spi/spi-mpc52xx.c
243
ms->message->complete(ms->message->context);
drivers/spi/spi-mpc52xx.c
300
ms->message->complete(ms->message->context);
drivers/spi/spi-mux.c
115
priv->child_msg_context = m->context;
drivers/spi/spi-mux.c
119
m->context = priv;
drivers/spi/spi-mux.c
42
void (*child_msg_complete)(void *context);
drivers/spi/spi-mux.c
86
static void spi_mux_complete_cb(void *context)
drivers/spi/spi-mux.c
88
struct spi_mux_priv *priv = (struct spi_mux_priv *)context;
drivers/spi/spi-mux.c
93
m->context = priv->child_msg_context;
drivers/spi/spi-sh.c
319
mesg->complete(mesg->context);
drivers/spi/spi-slave-system-control.c
104
priv->msg.context = priv;
drivers/spi/spi-slave-time.c
78
priv->msg.context = priv;
drivers/spi/spi-topcliff-pch.c
1122
pmsg->complete(pmsg->context);
drivers/spi/spi-topcliff-pch.c
1191
(data->current_msg->context);
drivers/spi/spi-topcliff-pch.c
362
data->current_msg->complete(data->current_msg->context);
drivers/spi/spi-topcliff-pch.c
567
pmsg->complete(pmsg->context);
drivers/spi/spi-topcliff-pch.c
620
data->current_msg->complete(data->current_msg->context);
drivers/spi/spi-topcliff-pch.c
652
pmsg->complete(pmsg->context);
drivers/spi/spi.c
2177
mesg->complete(mesg->context);
drivers/spi/spi.c
4690
message->context = &done;
drivers/spi/spi.c
4701
message->context = NULL;
drivers/staging/media/atomisp/pci/sh_css.c
8228
&sp_pipeline_input_terminal->context.virtual_input_system_stream[i];
drivers/staging/media/atomisp/pci/sh_css.c
943
sp_pipeline_input_terminal->context.virtual_input_system_stream[i].valid = 0;
drivers/staging/media/atomisp/pci/sh_css.c
969
&sp_pipeline_input_terminal->context.virtual_input_system_stream[i],
drivers/staging/media/atomisp/pci/sh_css.c
976
&sp_pipeline_input_terminal->context.virtual_input_system_stream[i],
drivers/staging/media/atomisp/pci/sh_css.c
981
&sp_pipeline_input_terminal->context.virtual_input_system_stream[i]);
drivers/staging/media/atomisp/pci/sh_css_internal.h
394
} context;
drivers/staging/rtl8723bs/core/rtw_cmd.c
373
int rtw_cmd_thread(void *context)
drivers/staging/rtl8723bs/core/rtw_cmd.c
380
struct adapter *padapter = context;
drivers/staging/rtl8723bs/core/rtw_mlme_ext.c
5982
p->func(p->context);
drivers/staging/rtl8723bs/core/rtw_xmit.c
2517
int rtw_xmit_thread(void *context)
drivers/staging/rtl8723bs/core/rtw_xmit.c
2523
padapter = context;
drivers/staging/rtl8723bs/hal/rtl8723bs_xmit.c
403
int rtl8723bs_xmit_thread(void *context)
drivers/staging/rtl8723bs/hal/rtl8723bs_xmit.c
406
struct adapter *padapter = context;
drivers/staging/rtl8723bs/include/rtl8723b_xmit.h
416
int rtl8723bs_xmit_thread(void *context);
drivers/staging/rtl8723bs/include/rtw_cmd.h
115
int rtw_cmd_thread(void *context);
drivers/staging/rtl8723bs/include/rtw_cmd.h
550
void *context;
drivers/staging/rtl8723bs/include/rtw_mlme.h
266
int event_thread(void *context);
drivers/staging/rtl8723bs/include/rtw_xmit.h
481
int rtw_xmit_thread(void *context);
drivers/target/iscsi/cxgbit/cxgbit_cm.c
510
conn->context = csk;
drivers/target/iscsi/cxgbit/cxgbit_cm.c
756
__cxgbit_free_conn(conn->context);
drivers/target/iscsi/cxgbit/cxgbit_ddp.c
233
struct cxgbit_sock *csk = conn->context;
drivers/target/iscsi/cxgbit/cxgbit_ddp.c
271
struct cxgbit_sock *csk = conn->context;
drivers/target/iscsi/cxgbit/cxgbit_target.c
1631
struct cxgbit_sock *csk = conn->context;
drivers/target/iscsi/cxgbit/cxgbit_target.c
1647
struct cxgbit_sock *csk = conn->context;
drivers/target/iscsi/cxgbit/cxgbit_target.c
517
struct cxgbit_sock *csk = conn->context;
drivers/target/iscsi/cxgbit/cxgbit_target.c
536
struct cxgbit_sock *csk = conn->context;
drivers/target/iscsi/cxgbit/cxgbit_target.c
574
struct cxgbit_sock *csk = conn->context;
drivers/target/iscsi/cxgbit/cxgbit_target.c
717
struct cxgbit_sock *csk = conn->context;
drivers/target/iscsi/cxgbit/cxgbit_target.c
777
struct cxgbit_sock *csk = conn->context;
drivers/target/iscsi/cxgbit/cxgbit_target.c
860
struct cxgbit_sock *csk = conn->context;
drivers/thermal/intel/int340x_thermal/int3400_thermal.c
153
struct acpi_osc_context context = {
drivers/thermal/intel/int340x_thermal/int3400_thermal.c
163
status = acpi_run_osc(handle, &context);
drivers/thermal/intel/int340x_thermal/int3400_thermal.c
165
ret = *((u32 *)(context.ret.pointer + 4));
drivers/thermal/intel/int340x_thermal/int3400_thermal.c
169
kfree(context.ret.pointer);
drivers/thunderbolt/test.c
15
static int __ida_init(struct kunit_resource *res, void *context)
drivers/thunderbolt/test.c
17
struct ida *ida = context;
drivers/tty/serial/mxs-auart.c
1076
static irqreturn_t mxs_auart_irq_handle(int irq, void *context)
drivers/tty/serial/mxs-auart.c
1079
struct mxs_auart_port *s = context;
drivers/tty/serial/serial_mctrl_gpio.c
177
static irqreturn_t mctrl_gpio_irq_handle(int irq, void *context)
drivers/tty/serial/serial_mctrl_gpio.c
179
struct mctrl_gpios *gpios = context;
drivers/tty/serial/ucc_uart.c
1140
static void uart_firmware_cont(const struct firmware *fw, void *context)
drivers/tty/serial/ucc_uart.c
1143
struct device *dev = context;
drivers/uio/uio_hv_generic.c
108
static void hv_uio_channel_cb(void *context)
drivers/uio/uio_hv_generic.c
110
struct vmbus_channel *chan = context;
drivers/usb/atm/cxacru.c
575
complete(urb->context);
drivers/usb/atm/speedtch.c
599
struct speedtch_instance_data *instance = int_urb->context;
drivers/usb/atm/ueagle-atm.c
2088
struct uea_softc *sc = urb->context;
drivers/usb/atm/ueagle-atm.c
601
void *context)
drivers/usb/atm/ueagle-atm.c
603
struct usb_device *usb = context;
drivers/usb/atm/usbatm.c
215
struct usbatm_channel *channel = urb->context;
drivers/usb/atm/usbatm.c
245
struct usbatm_channel *channel = urb->context;
drivers/usb/chipidea/udc.c
1225
struct ci_hdrc *ci = req->context;
drivers/usb/chipidea/udc.c
1264
ci->status->context = ci;
drivers/usb/class/cdc-acm.c
1680
acm_start_wb(acm, urb->context);
drivers/usb/class/cdc-acm.c
373
struct acm *acm = urb->context;
drivers/usb/class/cdc-acm.c
518
struct acm_rb *rb = urb->context;
drivers/usb/class/cdc-acm.c
592
struct acm_wb *wb = urb->context;
drivers/usb/class/cdc-acm.c
784
wb = urb->context;
drivers/usb/class/cdc-wdm.c
155
desc = urb->context;
drivers/usb/class/cdc-wdm.c
170
struct wdm_device *desc = urb->context;
drivers/usb/class/cdc-wdm.c
260
desc = urb->context;
drivers/usb/class/cdc-wdm.c
876
struct sk_buff *skb = urb->context;
drivers/usb/class/usblp.c
311
struct usblp *usblp = urb->context;
drivers/usb/class/usblp.c
335
struct usblp *usblp = urb->context;
drivers/usb/class/usbtmc.c
1087
struct usbtmc_file_data *file_data = urb->context;
drivers/usb/class/usbtmc.c
2299
struct usbtmc_device_data *data = urb->context;
drivers/usb/class/usbtmc.c
780
struct usbtmc_file_data *file_data = urb->context;
drivers/usb/core/devio.c
1132
complete((struct completion *) urb->context);
drivers/usb/core/devio.c
1146
urb->context = &ctx;
drivers/usb/core/devio.c
1910
as->urb->context = as;
drivers/usb/core/devio.c
2172
ps->disccontext.sival_int = ds.context;
drivers/usb/core/devio.c
2291
ps->disccontext.sival_ptr = ds.context;
drivers/usb/core/devio.c
625
struct async *as = urb->context;
drivers/usb/core/hcd.c
2130
struct completion *done = urb->context;
drivers/usb/core/hcd.c
2173
urb->context = done;
drivers/usb/core/hub.c
775
struct usb_hub *hub = urb->context;
drivers/usb/core/message.c
37
struct api_context *ctx = urb->context;
drivers/usb/core/message.c
466
struct usb_sg_request *io = urb->context;
drivers/usb/core/message.c
60
urb->context = &ctx;
drivers/usb/core/message.c
611
urb->context = io;
drivers/usb/dwc2/hcd.c
4013
struct dwc2_tt *dwc2_host_get_tt_info(struct dwc2_hsotg *hsotg, void *context,
drivers/usb/dwc2/hcd.c
4016
struct urb *urb = context;
drivers/usb/dwc2/hcd.c
4076
int dwc2_host_get_speed(struct dwc2_hsotg *hsotg, void *context)
drivers/usb/dwc2/hcd.c
4078
struct urb *urb = context;
drivers/usb/dwc2/hcd.c
477
static void dwc2_host_hub_info(struct dwc2_hsotg *hsotg, void *context,
drivers/usb/dwc2/hcd.c
480
struct urb *urb = context;
drivers/usb/dwc2/hcd.h
778
void *context, gfp_t mem_flags,
drivers/usb/dwc2/hcd.h
783
int dwc2_host_get_speed(struct dwc2_hsotg *hsotg, void *context);
drivers/usb/gadget/composite.c
1565
if (!req->context)
drivers/usb/gadget/composite.c
1568
cdev = req->context;
drivers/usb/gadget/composite.c
1785
req->context = cdev;
drivers/usb/gadget/composite.c
2081
req->context = cdev;
drivers/usb/gadget/composite.c
2265
req->context = cdev;
drivers/usb/gadget/composite.c
2425
cdev->req->context = cdev;
drivers/usb/gadget/composite.c
2475
cdev->os_desc_req->context = cdev;
drivers/usb/gadget/composite.c
2744
req->context = cdev;
drivers/usb/gadget/function/f_acm.c
560
struct f_acm *acm = req->context;
drivers/usb/gadget/function/f_acm.c
680
request->context = acm;
drivers/usb/gadget/function/f_ecm.c
437
struct f_ecm *ecm = req->context;
drivers/usb/gadget/function/f_ecm.c
762
request->context = ecm;
drivers/usb/gadget/function/f_eem.c
321
struct in_context *ctx = req->context;
drivers/usb/gadget/function/f_eem.c
474
req->context = ctx;
drivers/usb/gadget/function/f_fs.c
1120
req->context = io_data;
drivers/usb/gadget/function/f_fs.c
1173
req->context = io_data;
drivers/usb/gadget/function/f_fs.c
1417
ffs_dmabuf_signal_done(req->context, req->status);
drivers/usb/gadget/function/f_fs.c
144
u64 context;
drivers/usb/gadget/function/f_fs.c
1534
priv->context = dma_fence_context_alloc(1);
drivers/usb/gadget/function/f_fs.c
1685
&priv->lock, priv->context, seqno);
drivers/usb/gadget/function/f_fs.c
1699
usb_req->context = fence;
drivers/usb/gadget/function/f_fs.c
2294
ffs->ep0req->context = ffs;
drivers/usb/gadget/function/f_fs.c
302
struct ffs_data *ffs = req->context;
drivers/usb/gadget/function/f_fs.c
747
struct ffs_io_data *io_data = req->context;
drivers/usb/gadget/function/f_fs.c
888
struct ffs_io_data *io_data = req->context;
drivers/usb/gadget/function/f_hid.c
1081
req->context = hidg;
drivers/usb/gadget/function/f_hid.c
1159
hidg->get_req->context = hidg;
drivers/usb/gadget/function/f_hid.c
517
req->context = hidg;
drivers/usb/gadget/function/f_hid.c
771
struct f_hidg *hidg = (struct f_hidg *) req->context;
drivers/usb/gadget/function/f_hid.c
806
struct f_hidg *hidg = (struct f_hidg *)req->context;
drivers/usb/gadget/function/f_hid.c
896
req->context = hidg;
drivers/usb/gadget/function/f_loopback.c
246
struct usb_request *in_req = req->context;
drivers/usb/gadget/function/f_loopback.c
257
req = req->context;
drivers/usb/gadget/function/f_loopback.c
288
req->context);
drivers/usb/gadget/function/f_loopback.c
337
in_req->context = out_req;
drivers/usb/gadget/function/f_loopback.c
338
out_req->context = in_req;
drivers/usb/gadget/function/f_mass_storage.c
2390
bh->inreq->context = bh->outreq->context = bh;
drivers/usb/gadget/function/f_mass_storage.c
418
struct fsg_buffhd *bh = req->context;
drivers/usb/gadget/function/f_mass_storage.c
434
struct fsg_buffhd *bh = req->context;
drivers/usb/gadget/function/f_mass_storage.c
471
req->context = NULL;
drivers/usb/gadget/function/f_midi2.c
1034
struct f_midi2_req_ctx *ctx = req->context;
drivers/usb/gadget/function/f_midi2.c
1053
struct f_midi2_req_ctx *ctx = req->context;
drivers/usb/gadget/function/f_midi2.c
1155
usb_ep->reqs[i].req->context = &usb_ep->reqs[i];
drivers/usb/gadget/function/f_midi2.c
408
struct f_midi2_req_ctx *ctx = req->context;
drivers/usb/gadget/function/f_midi2.c
423
struct f_midi2_req_ctx *ctx = req->context;
drivers/usb/gadget/function/f_midi2.c
687
struct f_midi2_req_ctx *ctx = req->context;
drivers/usb/gadget/function/f_midi2.c
749
struct f_midi2_req_ctx *ctx = req->context;
drivers/usb/gadget/function/f_ncm.c
1527
request->context = ncm;
drivers/usb/gadget/function/f_ncm.c
623
struct f_ncm *ncm = req->context;
drivers/usb/gadget/function/f_ncm.c
653
struct usb_function *f = req->context;
drivers/usb/gadget/function/f_ncm.c
657
req->context = NULL;
drivers/usb/gadget/function/f_ncm.c
750
req->context = f;
drivers/usb/gadget/function/f_phonet.c
204
struct sk_buff *skb = req->context;
drivers/usb/gadget/function/f_phonet.c
244
req->context = skb;
drivers/usb/gadget/function/f_phonet.c
306
req->context = page;
drivers/usb/gadget/function/f_phonet.c
318
struct page *page = req->context;
drivers/usb/gadget/function/f_rndis.c
403
struct f_rndis *rndis = req->context;
drivers/usb/gadget/function/f_rndis.c
442
struct f_rndis *rndis = req->context;
drivers/usb/gadget/function/f_rndis.c
480
req->context = rndis;
drivers/usb/gadget/function/f_rndis.c
497
req->context = rndis;
drivers/usb/gadget/function/f_rndis.c
755
request->context = rndis;
drivers/usb/gadget/function/f_tcm.c
1079
struct usbg_cmd *cmd = req->context;
drivers/usb/gadget/function/f_tcm.c
1145
req->context = cmd;
drivers/usb/gadget/function/f_tcm.c
140
req->context = cmd;
drivers/usb/gadget/function/f_tcm.c
168
fu->bot_status.req->context = cmd;
drivers/usb/gadget/function/f_tcm.c
196
struct usbg_cmd *cmd = req->context;
drivers/usb/gadget/function/f_tcm.c
241
fu->bot_req_in->context = cmd;
drivers/usb/gadget/function/f_tcm.c
278
struct f_uas *fu = req->context;
drivers/usb/gadget/function/f_tcm.c
340
fu->cmd[0].req->context = fu;
drivers/usb/gadget/function/f_tcm.c
585
stream->req_in->context = cmd;
drivers/usb/gadget/function/f_tcm.c
608
stream->req_status->context = cmd;
drivers/usb/gadget/function/f_tcm.c
62
struct usbg_cmd *cmd = req->context;
drivers/usb/gadget/function/f_tcm.c
660
stream->req_status->context = cmd;
drivers/usb/gadget/function/f_tcm.c
671
struct usbg_cmd *cmd = req->context;
drivers/usb/gadget/function/f_tcm.c
802
stream->req_status->context = cmd;
drivers/usb/gadget/function/f_tcm.c
844
stream->req_status->context = cmd;
drivers/usb/gadget/function/f_tcm.c
85
fu->bot_status.req->context = cmd;
drivers/usb/gadget/function/f_tcm.c
864
struct f_uas *fu = req->context;
drivers/usb/gadget/function/f_tcm.c
918
fu->cmd[i].req->context = fu;
drivers/usb/gadget/function/f_tcm.c
93
struct usbg_cmd *cmd = req->context;
drivers/usb/gadget/function/f_uac1.c
425
struct g_audio *audio = req->context;
drivers/usb/gadget/function/f_uac1.c
466
req->context = audio;
drivers/usb/gadget/function/f_uac1.c
681
struct g_audio *audio = req->context;
drivers/usb/gadget/function/f_uac1.c
748
req->context = audio;
drivers/usb/gadget/function/f_uac1_legacy.c
323
struct f_audio *audio = req->context;
drivers/usb/gadget/function/f_uac1_legacy.c
362
struct f_audio *audio = req->context;
drivers/usb/gadget/function/f_uac1_legacy.c
441
req->context = audio;
drivers/usb/gadget/function/f_uac1_legacy.c
477
req->context = audio;
drivers/usb/gadget/function/f_uac1_legacy.c
653
req->context = audio;
drivers/usb/gadget/function/f_uac2.c
1360
struct g_audio *agdev = req->context;
drivers/usb/gadget/function/f_uac2.c
1407
req->context = agdev;
drivers/usb/gadget/function/f_uac2.c
1725
struct g_audio *agdev = req->context;
drivers/usb/gadget/function/f_uac2.c
1797
req->context = agdev;
drivers/usb/gadget/function/f_uvc.c
207
struct uvc_device *uvc = req->context;
drivers/usb/gadget/function/f_uvc.c
854
uvc->control_req->context = uvc;
drivers/usb/gadget/function/u_audio.c
156
struct uac_rtd_params *prm = req->context;
drivers/usb/gadget/function/u_audio.c
283
struct uac_rtd_params *prm = req->context;
drivers/usb/gadget/function/u_audio.c
624
req->context = prm;
drivers/usb/gadget/function/u_audio.c
661
req_fback->context = prm;
drivers/usb/gadget/function/u_audio.c
760
req->context = prm;
drivers/usb/gadget/function/u_ether.c
207
req->context = skb;
drivers/usb/gadget/function/u_ether.c
226
struct sk_buff *skb = req->context, *skb2;
drivers/usb/gadget/function/u_ether.c
411
struct sk_buff *skb = req->context;
drivers/usb/gadget/function/u_ether.c
558
req->context = skb;
drivers/usb/gadget/function/u_serial.c
1038
req->context = cons;
drivers/usb/gadget/function/u_serial.c
932
struct gs_console *cons = req->context;
drivers/usb/gadget/function/uvc_video.c
138
struct uvc_request *ureq = req->context;
drivers/usb/gadget/function/uvc_video.c
205
struct uvc_request *ureq = req->context;
drivers/usb/gadget/function/uvc_video.c
291
struct uvc_request *ureq = req->context;
drivers/usb/gadget/function/uvc_video.c
333
struct uvc_request *ureq = req->context;
drivers/usb/gadget/function/uvc_video.c
581
ureq->req->context = ureq;
drivers/usb/gadget/function/uvc_video.c
677
uvc_video_free_request(req->context, video->ep);
drivers/usb/gadget/function/uvc_video.c
733
uvc_video_free_request(req->context, video->ep);
drivers/usb/gadget/function/uvc_video.c
738
uvc_video_free_request(req->context, video->ep);
drivers/usb/gadget/function/uvc_video.c
95
struct uvc_request *ureq = req->context;
drivers/usb/gadget/legacy/inode.c
1381
req->context = NULL;
drivers/usb/gadget/legacy/inode.c
1689
dev->req->context = NULL;
drivers/usb/gadget/legacy/inode.c
283
if (!req->context)
drivers/usb/gadget/legacy/inode.c
289
complete ((struct completion *)req->context);
drivers/usb/gadget/legacy/inode.c
345
req->context = &done;
drivers/usb/gadget/legacy/inode.c
490
struct kiocb *iocb = req->context;
drivers/usb/gadget/legacy/inode.c
562
req->context = iocb;
drivers/usb/gadget/legacy/raw_gadget.c
1055
complete((struct completion *)req->context);
drivers/usb/gadget/legacy/raw_gadget.c
1106
ep->req->context = &done;
drivers/usb/gadget/legacy/raw_gadget.c
251
struct raw_dev *dev = req->context;
drivers/usb/gadget/legacy/raw_gadget.c
303
dev->req->context = dev;
drivers/usb/gadget/udc/dummy_hcd.c
738
req->req.context = dum;
drivers/usb/gadget/udc/renesas_usbf.c
2190
req->req.context = NULL;
drivers/usb/gadget/udc/tegra-xudc.c
1452
if (ep_ctx_read_state(ep->context) == EP_STATE_RUNNING) {
drivers/usb/gadget/udc/tegra-xudc.c
1457
deq_trb = trb_phys_to_virt(ep, ep_ctx_read_deq_ptr(ep->context));
drivers/usb/gadget/udc/tegra-xudc.c
1459
busy = (trb_read_cycle(deq_trb) == ep_ctx_read_dcs(ep->context));
drivers/usb/gadget/udc/tegra-xudc.c
1470
req->usb_req.actual = ep_ctx_read_edtla(ep->context);
drivers/usb/gadget/udc/tegra-xudc.c
1480
ep_ctx_write_edtla(ep->context, 0);
drivers/usb/gadget/udc/tegra-xudc.c
1481
ep_ctx_write_partial_td(ep->context, 0);
drivers/usb/gadget/udc/tegra-xudc.c
1482
ep_ctx_write_data_offset(ep->context, 0);
drivers/usb/gadget/udc/tegra-xudc.c
1490
ep_ctx_write_deq_ptr(ep->context, deq_ptr);
drivers/usb/gadget/udc/tegra-xudc.c
1491
ep_ctx_write_dcs(ep->context, ep->pcs);
drivers/usb/gadget/udc/tegra-xudc.c
1564
ep_ctx_write_state(ep->context, EP_STATE_DISABLED);
drivers/usb/gadget/udc/tegra-xudc.c
1568
ep_ctx_write_state(ep->context, EP_STATE_RUNNING);
drivers/usb/gadget/udc/tegra-xudc.c
1569
ep_ctx_write_rsvd(ep->context, 0);
drivers/usb/gadget/udc/tegra-xudc.c
1570
ep_ctx_write_partial_td(ep->context, 0);
drivers/usb/gadget/udc/tegra-xudc.c
1571
ep_ctx_write_splitxstate(ep->context, 0);
drivers/usb/gadget/udc/tegra-xudc.c
1572
ep_ctx_write_seq_num(ep->context, 0);
drivers/usb/gadget/udc/tegra-xudc.c
1646
memset(ep->context, 0, sizeof(*ep->context));
drivers/usb/gadget/udc/tegra-xudc.c
1648
ep_ctx_write_state(ep->context, EP_STATE_RUNNING);
drivers/usb/gadget/udc/tegra-xudc.c
1649
ep_ctx_write_interval(ep->context, desc->bInterval);
drivers/usb/gadget/udc/tegra-xudc.c
1652
ep_ctx_write_mult(ep->context,
drivers/usb/gadget/udc/tegra-xudc.c
1657
ep_ctx_write_max_pstreams(ep->context,
drivers/usb/gadget/udc/tegra-xudc.c
1660
ep_ctx_write_lsa(ep->context, 1);
drivers/usb/gadget/udc/tegra-xudc.c
1669
ep_ctx_write_type(ep->context, val);
drivers/usb/gadget/udc/tegra-xudc.c
1670
ep_ctx_write_cerr(ep->context, 0x3);
drivers/usb/gadget/udc/tegra-xudc.c
1671
ep_ctx_write_max_packet_size(ep->context, maxpacket);
drivers/usb/gadget/udc/tegra-xudc.c
1672
ep_ctx_write_max_burst_size(ep->context, maxburst);
drivers/usb/gadget/udc/tegra-xudc.c
1674
ep_ctx_write_deq_ptr(ep->context, ep->transfer_ring_phys);
drivers/usb/gadget/udc/tegra-xudc.c
1675
ep_ctx_write_dcs(ep->context, ep->pcs);
drivers/usb/gadget/udc/tegra-xudc.c
1692
ep_ctx_write_avg_trb_len(ep->context, val);
drivers/usb/gadget/udc/tegra-xudc.c
1693
ep_ctx_write_max_esit_payload(ep->context, esit);
drivers/usb/gadget/udc/tegra-xudc.c
1695
ep_ctx_write_cerrcnt(ep->context, 0x3);
drivers/usb/gadget/udc/tegra-xudc.c
1710
if (ep_ctx_read_state(ep->context) == EP_STATE_DISABLED) {
drivers/usb/gadget/udc/tegra-xudc.c
1716
ep_ctx_write_state(ep->context, EP_STATE_DISABLED);
drivers/usb/gadget/udc/tegra-xudc.c
1729
memset(ep->context, 0, sizeof(*ep->context));
drivers/usb/gadget/udc/tegra-xudc.c
1799
if (ep_ctx_read_state(ep->context) != EP_STATE_DISABLED)
drivers/usb/gadget/udc/tegra-xudc.c
2208
xudc->ep0_req->usb_req.context = xudc;
drivers/usb/gadget/udc/tegra-xudc.c
2220
xudc->ep0_req->usb_req.context = xudc;
drivers/usb/gadget/udc/tegra-xudc.c
2256
struct tegra_xudc *xudc = req->context;
drivers/usb/gadget/udc/tegra-xudc.c
2489
struct tegra_xudc *xudc = req->context;
drivers/usb/gadget/udc/tegra-xudc.c
2527
ep_ctx_write_devaddr(ep0->context, addr);
drivers/usb/gadget/udc/tegra-xudc.c
2711
if (ep_ctx_read_state(ep->context) == EP_STATE_DISABLED) {
drivers/usb/gadget/udc/tegra-xudc.c
2830
ep_ctx_write_rsvd(ep0->context, 0);
drivers/usb/gadget/udc/tegra-xudc.c
2831
ep_ctx_write_partial_td(ep0->context, 0);
drivers/usb/gadget/udc/tegra-xudc.c
2832
ep_ctx_write_splitxstate(ep0->context, 0);
drivers/usb/gadget/udc/tegra-xudc.c
2833
ep_ctx_write_seq_num(ep0->context, 0);
drivers/usb/gadget/udc/tegra-xudc.c
2838
ep_ctx_write_deq_ptr(ep0->context, deq_ptr);
drivers/usb/gadget/udc/tegra-xudc.c
2839
ep_ctx_write_dcs(ep0->context, ep0->pcs);
drivers/usb/gadget/udc/tegra-xudc.c
2882
ep_ctx_write_max_packet_size(ep0->context, maxpacket);
drivers/usb/gadget/udc/tegra-xudc.c
3159
ep->context = &xudc->ep_context[index];
drivers/usb/gadget/udc/tegra-xudc.c
401
struct tegra_xudc_ep_context *context;
drivers/usb/host/xhci-tegra.c
2062
const struct tegra_xusb_context_soc *soc = tegra->soc->context;
drivers/usb/host/xhci-tegra.c
2063
struct tegra_xusb_context *ctx = &tegra->context;
drivers/usb/host/xhci-tegra.c
2079
const struct tegra_xusb_context_soc *soc = tegra->soc->context;
drivers/usb/host/xhci-tegra.c
2080
struct tegra_xusb_context *ctx = &tegra->context;
drivers/usb/host/xhci-tegra.c
234
const struct tegra_xusb_context_soc *context;
drivers/usb/host/xhci-tegra.c
2551
.context = &tegra124_xusb_context,
drivers/usb/host/xhci-tegra.c
2589
.context = &tegra124_xusb_context,
drivers/usb/host/xhci-tegra.c
2632
.context = &tegra186_xusb_context,
drivers/usb/host/xhci-tegra.c
2666
.context = &tegra186_xusb_context,
drivers/usb/host/xhci-tegra.c
2699
.context = &tegra186_xusb_context,
drivers/usb/host/xhci-tegra.c
318
struct tegra_xusb_context context;
drivers/usb/host/xhci-tegra.c
934
const struct tegra_xusb_context_soc *soc = tegra->soc->context;
drivers/usb/host/xhci-tegra.c
936
tegra->context.ipfs = devm_kcalloc(tegra->dev, soc->ipfs.num_offsets,
drivers/usb/host/xhci-tegra.c
938
if (!tegra->context.ipfs)
drivers/usb/host/xhci-tegra.c
941
tegra->context.fpci = devm_kcalloc(tegra->dev, soc->fpci.num_offsets,
drivers/usb/host/xhci-tegra.c
943
if (!tegra->context.fpci)
drivers/usb/image/mdc800.c
273
struct mdc800_data* mdc800=urb->context;
drivers/usb/image/mdc800.c
365
struct mdc800_data* mdc800=urb->context;
drivers/usb/image/mdc800.c
383
struct mdc800_data* mdc800=urb->context;
drivers/usb/image/microtek.c
191
MTS_DEBUG("transfer = 0x%x context = 0x%x\n",(int)transfer,(int)context ); \
drivers/usb/image/microtek.c
192
MTS_DEBUG("status = 0x%x data-length = 0x%x sent = 0x%x\n",transfer->status,(int)context->data_length, (int)transfer->actual_length ); \
drivers/usb/image/microtek.c
193
mts_debug_dump(context->instance);\
drivers/usb/image/microtek.c
208
struct mts_transfer_context* context = (struct mts_transfer_context*)transfer->context; \
drivers/usb/image/microtek.c
379
context->instance->usb_dev,
drivers/usb/image/microtek.c
384
context
drivers/usb/image/microtek.c
390
set_host_byte(context->srb, DID_ERROR);
drivers/usb/image/microtek.c
401
if ( likely(context->final_callback != NULL) )
drivers/usb/image/microtek.c
402
context->final_callback(context->srb);
drivers/usb/image/microtek.c
409
context->srb->result &= MTS_SCSI_ERR_MASK;
drivers/usb/image/microtek.c
410
context->srb->result |= (unsigned)(*context->scsi_status)<<1;
drivers/usb/image/microtek.c
422
usb_rcvbulkpipe(context->instance->usb_dev,
drivers/usb/image/microtek.c
423
context->instance->ep_response),
drivers/usb/image/microtek.c
424
context->scsi_status,
drivers/usb/image/microtek.c
435
if ( context->data_length != transfer->actual_length ) {
drivers/usb/image/microtek.c
436
scsi_set_resid(context->srb, context->data_length -
drivers/usb/image/microtek.c
439
set_host_byte(context->srb, (status == -ENOENT ? DID_ABORT : DID_ERROR));
drivers/usb/image/microtek.c
456
set_host_byte(context->srb, DID_ABORT);
drivers/usb/image/microtek.c
461
set_host_byte(context->srb, DID_ERROR);
drivers/usb/image/microtek.c
468
if (context->srb->cmnd[0] == REQUEST_SENSE) {
drivers/usb/image/microtek.c
470
context->data_pipe,
drivers/usb/image/microtek.c
471
context->srb->sense_buffer,
drivers/usb/image/microtek.c
472
context->data_length,
drivers/usb/image/microtek.c
474
} else { if ( context->data ) {
drivers/usb/image/microtek.c
476
context->data_pipe,
drivers/usb/image/microtek.c
477
context->data,
drivers/usb/image/microtek.c
478
context->data_length,
drivers/usb/image/microtek.c
479
scsi_sg_count(context->srb) > 1 ?
drivers/usb/image/microtek.c
492
MTS_DEBUG("Processing fragment %d of %d\n", context->fragment,
drivers/usb/image/microtek.c
493
scsi_sg_count(context->srb));
drivers/usb/image/microtek.c
496
set_host_byte(context->srb, (status == -ENOENT ? DID_ABORT : DID_ERROR));
drivers/usb/image/microtek.c
500
context->curr_sg = sg_next(context->curr_sg);
drivers/usb/image/microtek.c
502
context->data_pipe,
drivers/usb/image/microtek.c
503
sg_virt(context->curr_sg),
drivers/usb/image/microtek.c
504
context->curr_sg->length,
drivers/usb/image/microtek.c
505
sg_is_last(context->curr_sg) ?
drivers/usb/image/microtek.c
528
desc->context.instance = desc;
drivers/usb/image/microtek.c
529
desc->context.srb = srb;
drivers/usb/image/microtek.c
532
desc->context.data = NULL;
drivers/usb/image/microtek.c
533
desc->context.data_length = 0;
drivers/usb/image/microtek.c
536
desc->context.curr_sg = scsi_sglist(srb);
drivers/usb/image/microtek.c
537
desc->context.data = sg_virt(desc->context.curr_sg);
drivers/usb/image/microtek.c
538
desc->context.data_length = desc->context.curr_sg->length;
drivers/usb/image/microtek.c
559
desc->context.data_pipe = pipe;
drivers/usb/image/microtek.c
593
&desc->context
drivers/usb/image/microtek.c
598
desc->context.final_callback = callback;
drivers/usb/image/microtek.c
734
new_desc->context.scsi_status = kmalloc(1, GFP_KERNEL);
drivers/usb/image/microtek.c
735
if (!new_desc->context.scsi_status)
drivers/usb/image/microtek.c
776
kfree(new_desc->context.scsi_status);
drivers/usb/image/microtek.c
796
kfree(desc->context.scsi_status);
drivers/usb/image/microtek.h
45
struct mts_transfer_context context;
drivers/usb/misc/adutux.c
158
struct adu_device *dev = urb->context;
drivers/usb/misc/adutux.c
203
struct adu_device *dev = urb->context;
drivers/usb/misc/appledisplay.c
79
struct appledisplay *pdata = urb->context;
drivers/usb/misc/chaoskey.c
329
struct chaoskey *dev = urb->context;
drivers/usb/misc/iowarrior.c
155
struct iowarrior *dev = urb->context;
drivers/usb/misc/iowarrior.c
228
dev = urb->context;
drivers/usb/misc/ldusb.c
220
struct ld_usb *dev = urb->context;
drivers/usb/misc/ldusb.c
281
struct ld_usb *dev = urb->context;
drivers/usb/misc/legousbtower.c
660
struct lego_usb_tower *dev = urb->context;
drivers/usb/misc/legousbtower.c
715
struct lego_usb_tower *dev = urb->context;
drivers/usb/misc/lvstest.c
387
struct lvs_rh *lvs = urb->context;
drivers/usb/misc/sisusbvga/sisusbvga.c
179
struct sisusb_urb_context *context = urb->context;
drivers/usb/misc/sisusbvga/sisusbvga.c
182
if (!context)
drivers/usb/misc/sisusbvga/sisusbvga.c
185
sisusb = context->sisusb;
drivers/usb/misc/sisusbvga/sisusbvga.c
191
if (context->actual_length)
drivers/usb/misc/sisusbvga/sisusbvga.c
192
*(context->actual_length) += urb->actual_length;
drivers/usb/misc/sisusbvga/sisusbvga.c
195
sisusb->urbstatus[context->urbindex] &= ~SU_URB_BUSY;
drivers/usb/misc/sisusbvga/sisusbvga.c
254
struct sisusb_usb_data *sisusb = urb->context;
drivers/usb/misc/usb-ljca.c
218
client->event_cb(client->context, header->cmd,
drivers/usb/misc/usb-ljca.c
263
struct ljca_adapter *adap = urb->context;
drivers/usb/misc/usb-ljca.c
397
void *context)
drivers/usb/misc/usb-ljca.c
412
client->context = context;
drivers/usb/misc/usb-ljca.c
427
client->context = NULL;
drivers/usb/misc/usbio.c
241
struct usbio_device *usbio = urb->context;
drivers/usb/misc/usblcd.c
200
dev = urb->context;
drivers/usb/misc/usbtest.c
1089
struct ctrl_ctx *ctx = urb->context;
drivers/usb/misc/usbtest.c
1205
struct ctrl_ctx context;
drivers/usb/misc/usbtest.c
1211
spin_lock_init(&context.lock);
drivers/usb/misc/usbtest.c
1212
context.dev = dev;
drivers/usb/misc/usbtest.c
1213
init_completion(&context.complete);
drivers/usb/misc/usbtest.c
1214
context.count = param->sglen * param->iterations;
drivers/usb/misc/usbtest.c
1215
context.pending = 0;
drivers/usb/misc/usbtest.c
1216
context.status = -ENOMEM;
drivers/usb/misc/usbtest.c
1217
context.param = param;
drivers/usb/misc/usbtest.c
1218
context.last = -1;
drivers/usb/misc/usbtest.c
1365
context.status = -EINVAL;
drivers/usb/misc/usbtest.c
1381
u->context = &context;
drivers/usb/misc/usbtest.c
1386
context.urb = urb;
drivers/usb/misc/usbtest.c
1387
spin_lock_irq(&context.lock);
drivers/usb/misc/usbtest.c
1389
context.status = usb_submit_urb(urb[i], GFP_ATOMIC);
drivers/usb/misc/usbtest.c
1390
if (context.status != 0) {
drivers/usb/misc/usbtest.c
1392
i, context.status);
drivers/usb/misc/usbtest.c
1393
context.count = context.pending;
drivers/usb/misc/usbtest.c
1396
context.pending++;
drivers/usb/misc/usbtest.c
1398
spin_unlock_irq(&context.lock);
drivers/usb/misc/usbtest.c
1403
if (context.pending > 0)
drivers/usb/misc/usbtest.c
1404
wait_for_completion(&context.complete);
drivers/usb/misc/usbtest.c
1415
return context.status;
drivers/usb/misc/usbtest.c
1431
complete(urb->context);
drivers/usb/misc/usbtest.c
1445
urb->context = &completion;
drivers/usb/misc/usbtest.c
1537
struct queued_ctx *ctx = urb->context;
drivers/usb/misc/usbtest.c
1928
struct transfer_context *ctx = urb->context;
drivers/usb/misc/usbtest.c
2042
struct transfer_context context;
drivers/usb/misc/usbtest.c
2059
memset(&context, 0, sizeof(context));
drivers/usb/misc/usbtest.c
2060
context.count = param->iterations * param->sglen;
drivers/usb/misc/usbtest.c
2061
context.dev = dev;
drivers/usb/misc/usbtest.c
2062
context.is_iso = !!desc;
drivers/usb/misc/usbtest.c
2063
init_completion(&context.done);
drivers/usb/misc/usbtest.c
2064
spin_lock_init(&context.lock);
drivers/usb/misc/usbtest.c
2069
if (context.is_iso)
drivers/usb/misc/usbtest.c
2081
urbs[i]->context = &context;
drivers/usb/misc/usbtest.c
2085
if (context.is_iso) {
drivers/usb/misc/usbtest.c
2107
spin_lock_irq(&context.lock);
drivers/usb/misc/usbtest.c
2109
++context.pending;
drivers/usb/misc/usbtest.c
2114
spin_unlock_irq(&context.lock);
drivers/usb/misc/usbtest.c
2120
context.pending--;
drivers/usb/misc/usbtest.c
2121
context.submit_error = 1;
drivers/usb/misc/usbtest.c
2125
spin_unlock_irq(&context.lock);
drivers/usb/misc/usbtest.c
2127
wait_for_completion(&context.done);
drivers/usb/misc/usbtest.c
2140
else if (context.submit_error)
drivers/usb/misc/usbtest.c
2142
else if (context.errors >
drivers/usb/misc/usbtest.c
2143
(context.is_iso ? context.packet_count / 10 : 0))
drivers/usb/misc/usbtest.c
254
complete(urb->context);
drivers/usb/misc/usbtest.c
469
urb->context = &completion;
drivers/usb/misc/uss720.c
103
rq = urb->context;
drivers/usb/misc/yurex.c
126
struct usb_yurex *dev = urb->context;
drivers/usb/misc/yurex.c
78
struct usb_yurex *dev = urb->context;
drivers/usb/musb/musb_core.c
2666
musb->context.frame = musb_readw(musb_base, MUSB_FRAME);
drivers/usb/musb/musb_core.c
2667
musb->context.testmode = musb_readb(musb_base, MUSB_TESTMODE);
drivers/usb/musb/musb_core.c
2668
musb->context.busctl = musb_readb(musb_base, MUSB_ULPI_BUSCONTROL);
drivers/usb/musb/musb_core.c
2669
musb->context.power = musb_readb(musb_base, MUSB_POWER);
drivers/usb/musb/musb_core.c
2670
musb->context.intrusbe = musb_readb(musb_base, MUSB_INTRUSBE);
drivers/usb/musb/musb_core.c
2671
musb->context.index = musb_readb(musb_base, MUSB_INDEX);
drivers/usb/musb/musb_core.c
2672
musb->context.devctl = musb_readb(musb_base, MUSB_DEVCTL);
drivers/usb/musb/musb_core.c
2680
musb->context.index_regs[i].txmaxp =
drivers/usb/musb/musb_core.c
2682
musb->context.index_regs[i].txcsr =
drivers/usb/musb/musb_core.c
2684
musb->context.index_regs[i].rxmaxp =
drivers/usb/musb/musb_core.c
2686
musb->context.index_regs[i].rxcsr =
drivers/usb/musb/musb_core.c
2690
musb->context.index_regs[i].txfifoadd =
drivers/usb/musb/musb_core.c
2692
musb->context.index_regs[i].rxfifoadd =
drivers/usb/musb/musb_core.c
2694
musb->context.index_regs[i].txfifosz =
drivers/usb/musb/musb_core.c
2696
musb->context.index_regs[i].rxfifosz =
drivers/usb/musb/musb_core.c
2700
musb->context.index_regs[i].txtype =
drivers/usb/musb/musb_core.c
2702
musb->context.index_regs[i].txinterval =
drivers/usb/musb/musb_core.c
2704
musb->context.index_regs[i].rxtype =
drivers/usb/musb/musb_core.c
2706
musb->context.index_regs[i].rxinterval =
drivers/usb/musb/musb_core.c
2709
musb->context.index_regs[i].txfunaddr =
drivers/usb/musb/musb_core.c
2711
musb->context.index_regs[i].txhubaddr =
drivers/usb/musb/musb_core.c
2713
musb->context.index_regs[i].txhubport =
drivers/usb/musb/musb_core.c
2716
musb->context.index_regs[i].rxfunaddr =
drivers/usb/musb/musb_core.c
2718
musb->context.index_regs[i].rxhubaddr =
drivers/usb/musb/musb_core.c
2720
musb->context.index_regs[i].rxhubport =
drivers/usb/musb/musb_core.c
2732
musb_writew(musb_base, MUSB_FRAME, musb->context.frame);
drivers/usb/musb/musb_core.c
2733
musb_writeb(musb_base, MUSB_TESTMODE, musb->context.testmode);
drivers/usb/musb/musb_core.c
2734
musb_writeb(musb_base, MUSB_ULPI_BUSCONTROL, musb->context.busctl);
drivers/usb/musb/musb_core.c
2739
musb->context.power &= ~(MUSB_POWER_SUSPENDM | MUSB_POWER_RESUME);
drivers/usb/musb/musb_core.c
2740
power |= musb->context.power;
drivers/usb/musb/musb_core.c
2745
musb_writeb(musb_base, MUSB_INTRUSBE, musb->context.intrusbe);
drivers/usb/musb/musb_core.c
2746
if (musb->context.devctl & MUSB_DEVCTL_SESSION)
drivers/usb/musb/musb_core.c
2747
musb_writeb(musb_base, MUSB_DEVCTL, musb->context.devctl);
drivers/usb/musb/musb_core.c
2756
musb->context.index_regs[i].txmaxp);
drivers/usb/musb/musb_core.c
2758
musb->context.index_regs[i].txcsr);
drivers/usb/musb/musb_core.c
2760
musb->context.index_regs[i].rxmaxp);
drivers/usb/musb/musb_core.c
2762
musb->context.index_regs[i].rxcsr);
drivers/usb/musb/musb_core.c
2766
musb->context.index_regs[i].txfifosz);
drivers/usb/musb/musb_core.c
2768
musb->context.index_regs[i].rxfifosz);
drivers/usb/musb/musb_core.c
2770
musb->context.index_regs[i].txfifoadd);
drivers/usb/musb/musb_core.c
2772
musb->context.index_regs[i].rxfifoadd);
drivers/usb/musb/musb_core.c
2776
musb->context.index_regs[i].txtype);
drivers/usb/musb/musb_core.c
2778
musb->context.index_regs[i].txinterval);
drivers/usb/musb/musb_core.c
2780
musb->context.index_regs[i].rxtype);
drivers/usb/musb/musb_core.c
2783
musb->context.index_regs[i].rxinterval);
drivers/usb/musb/musb_core.c
2785
musb->context.index_regs[i].txfunaddr);
drivers/usb/musb/musb_core.c
2787
musb->context.index_regs[i].txhubaddr);
drivers/usb/musb/musb_core.c
2789
musb->context.index_regs[i].txhubport);
drivers/usb/musb/musb_core.c
2792
musb->context.index_regs[i].rxfunaddr);
drivers/usb/musb/musb_core.c
2794
musb->context.index_regs[i].rxhubaddr);
drivers/usb/musb/musb_core.c
2796
musb->context.index_regs[i].rxhubport);
drivers/usb/musb/musb_core.c
2798
musb_writeb(musb_base, MUSB_INDEX, musb->context.index);
drivers/usb/musb/musb_core.c
2866
if ((devctl & mask) != (musb->context.devctl & mask))
drivers/usb/musb/musb_core.h
284
struct musb_context_registers context;
drivers/usb/musb/musb_debugfs.c
293
musb->context.devctl |= MUSB_DEVCTL_SESSION;
drivers/usb/musb/musb_dsps.c
1014
musb_writel(mbase, wrp->control, glue->context.control);
drivers/usb/musb/musb_dsps.c
1015
musb_writel(mbase, wrp->epintr_set, glue->context.epintr);
drivers/usb/musb/musb_dsps.c
1016
musb_writel(mbase, wrp->coreintr_set, glue->context.coreintr);
drivers/usb/musb/musb_dsps.c
1017
musb_writel(mbase, wrp->phy_utmi, glue->context.phy_utmi);
drivers/usb/musb/musb_dsps.c
1018
musb_writel(mbase, wrp->mode, glue->context.mode);
drivers/usb/musb/musb_dsps.c
1019
musb_writel(mbase, wrp->tx_mode, glue->context.tx_mode);
drivers/usb/musb/musb_dsps.c
1020
musb_writel(mbase, wrp->rx_mode, glue->context.rx_mode);
drivers/usb/musb/musb_dsps.c
111
struct dsps_context context;
drivers/usb/musb/musb_dsps.c
988
glue->context.control = musb_readl(mbase, wrp->control);
drivers/usb/musb/musb_dsps.c
989
glue->context.epintr = musb_readl(mbase, wrp->epintr_set);
drivers/usb/musb/musb_dsps.c
990
glue->context.coreintr = musb_readl(mbase, wrp->coreintr_set);
drivers/usb/musb/musb_dsps.c
991
glue->context.phy_utmi = musb_readl(mbase, wrp->phy_utmi);
drivers/usb/musb/musb_dsps.c
992
glue->context.mode = musb_readl(mbase, wrp->mode);
drivers/usb/musb/musb_dsps.c
993
glue->context.tx_mode = musb_readl(mbase, wrp->tx_mode);
drivers/usb/musb/musb_dsps.c
994
glue->context.rx_mode = musb_readl(mbase, wrp->rx_mode);
drivers/usb/musb/omap2430.c
492
musb->context.otg_interfsel = musb_readl(musb->mregs,
drivers/usb/musb/omap2430.c
522
musb->context.otg_interfsel);
drivers/usb/serial/aircable.c
119
struct usb_serial_port *port = urb->context;
drivers/usb/serial/ark3116.c
504
struct usb_serial_port *port = urb->context;
drivers/usb/serial/ark3116.c
567
struct usb_serial_port *port = urb->context;
drivers/usb/serial/belkin_sa.c
171
struct usb_serial_port *port = urb->context;
drivers/usb/serial/belkin_sa.c
236
struct usb_serial_port *port = urb->context;
drivers/usb/serial/ch341.c
776
struct usb_serial_port *port = urb->context;
drivers/usb/serial/cp210x.c
898
struct usb_serial_port *port = urb->context;
drivers/usb/serial/cyberjack.c
250
struct usb_serial_port *port = urb->context;
drivers/usb/serial/cyberjack.c
306
struct usb_serial_port *port = urb->context;
drivers/usb/serial/cyberjack.c
352
struct usb_serial_port *port = urb->context;
drivers/usb/serial/cypress_m8.c
1011
struct usb_serial_port *port = urb->context;
drivers/usb/serial/cypress_m8.c
1163
struct usb_serial_port *port = urb->context;
drivers/usb/serial/digi_acceleport.c
1295
struct usb_serial_port *port = urb->context;
drivers/usb/serial/digi_acceleport.c
1356
struct usb_serial_port *port = urb->context;
drivers/usb/serial/digi_acceleport.c
1447
struct usb_serial_port *port = urb->context;
drivers/usb/serial/digi_acceleport.c
959
struct usb_serial_port *port = urb->context;
drivers/usb/serial/f81232.c
312
struct usb_serial_port *port = urb->context;
drivers/usb/serial/f81232.c
379
struct usb_serial_port *port = urb->context;
drivers/usb/serial/f81232.c
411
struct usb_serial_port *port = urb->context;
drivers/usb/serial/f81534.c
1250
port = urb->context;
drivers/usb/serial/f81534.c
1274
struct usb_serial_port *port = urb->context;
drivers/usb/serial/ftdi_sio.c
2506
struct usb_serial_port *port = urb->context;
drivers/usb/serial/garmin_gps.c
1163
struct usb_serial_port *port = urb->context;
drivers/usb/serial/garmin_gps.c
1211
struct usb_serial_port *port = urb->context;
drivers/usb/serial/garmin_gps.c
951
struct usb_serial_port *port = urb->context;
drivers/usb/serial/generic.c
346
struct usb_serial_port *port = urb->context;
drivers/usb/serial/generic.c
371
struct usb_serial_port *port = urb->context;
drivers/usb/serial/generic.c
436
struct usb_serial_port *port = urb->context;
drivers/usb/serial/io_edgeport.c
572
struct edgeport_serial *edge_serial = urb->context;
drivers/usb/serial/io_edgeport.c
686
struct edgeport_serial *edge_serial = urb->context;
drivers/usb/serial/io_edgeport.c
747
struct edgeport_port *edge_port = urb->context;
drivers/usb/serial/io_edgeport.c
775
struct edgeport_port *edge_port = urb->context;
drivers/usb/serial/io_ti.c
1621
struct edgeport_serial *edge_serial = urb->context;
drivers/usb/serial/io_ti.c
1724
struct edgeport_port *edge_port = urb->context;
drivers/usb/serial/io_ti.c
1804
struct usb_serial_port *port = urb->context;
drivers/usb/serial/io_ti.c
1921
urb->context = edge_serial;
drivers/usb/serial/io_ti.c
1941
urb->context = edge_port;
drivers/usb/serial/ir-usb.c
314
struct usb_serial_port *port = urb->context;
drivers/usb/serial/ir-usb.c
358
struct usb_serial_port *port = urb->context;
drivers/usb/serial/iuu_phoenix.c
158
struct usb_serial_port *port = urb->context;
drivers/usb/serial/iuu_phoenix.c
210
struct usb_serial_port *port = urb->context;
drivers/usb/serial/iuu_phoenix.c
240
struct usb_serial_port *port = urb->context;
drivers/usb/serial/iuu_phoenix.c
349
struct usb_serial_port *port = urb->context;
drivers/usb/serial/iuu_phoenix.c
370
struct usb_serial_port *port = urb->context;
drivers/usb/serial/iuu_phoenix.c
578
struct usb_serial_port *port = urb->context;
drivers/usb/serial/iuu_phoenix.c
642
struct usb_serial_port *port = urb->context;
drivers/usb/serial/iuu_phoenix.c
721
struct usb_serial_port *port = urb->context;
drivers/usb/serial/keyspan.c
1040
serial = urb->context;
drivers/usb/serial/keyspan.c
1069
serial = urb->context;
drivers/usb/serial/keyspan.c
1133
port = urb->context;
drivers/usb/serial/keyspan.c
1177
serial = urb->context;
drivers/usb/serial/keyspan.c
1262
port = urb->context;
drivers/usb/serial/keyspan.c
1327
serial = urb->context;
drivers/usb/serial/keyspan.c
1371
port = urb->context;
drivers/usb/serial/keyspan.c
1393
serial = urb->context;
drivers/usb/serial/keyspan.c
1443
serial = urb->context;
drivers/usb/serial/keyspan.c
783
port = urb->context;
drivers/usb/serial/keyspan.c
832
port = urb->context;
drivers/usb/serial/keyspan.c
848
port = urb->context;
drivers/usb/serial/keyspan.c
868
serial = urb->context;
drivers/usb/serial/keyspan.c
922
port = urb->context;
drivers/usb/serial/keyspan.c
935
port = urb->context;
drivers/usb/serial/keyspan.c
965
port = urb->context;
drivers/usb/serial/keyspan.c
986
serial = urb->context;
drivers/usb/serial/keyspan_pda.c
147
struct usb_serial_port *port = urb->context;
drivers/usb/serial/keyspan_pda.c
526
struct usb_serial_port *port = urb->context;
drivers/usb/serial/kl5kusb105.c
344
struct usb_serial_port *port = urb->context;
drivers/usb/serial/kobil_sct.c
246
struct usb_serial_port *port = urb->context;
drivers/usb/serial/mct_u232.c
394
priv->read_urb->context = port;
drivers/usb/serial/mct_u232.c
508
struct usb_serial_port *port = urb->context;
drivers/usb/serial/metro-usb.c
109
struct usb_serial_port *port = urb->context;
drivers/usb/serial/mos7720.c
746
struct usb_serial_port *port = urb->context;
drivers/usb/serial/mos7720.c
777
port = urb->context;
drivers/usb/serial/mos7720.c
810
mos7720_port = urb->context;
drivers/usb/serial/mos7840.c
421
struct moschip_port *mos7840_port = urb->context;
drivers/usb/serial/mos7840.c
464
struct moschip_port *mos7840_port = urb->context;
drivers/usb/serial/mxuport.c
465
struct usb_serial_port *port = urb->context;
drivers/usb/serial/mxuport.c
512
struct usb_serial_port *port = urb->context;
drivers/usb/serial/mxuport.c
556
struct usb_serial_port *port = urb->context;
drivers/usb/serial/navman.c
29
struct usb_serial_port *port = urb->context;
drivers/usb/serial/omninet.c
139
struct usb_serial_port *port = urb->context;
drivers/usb/serial/opticon.c
166
struct usb_serial_port *port = urb->context;
drivers/usb/serial/opticon.c
73
struct usb_serial_port *port = urb->context;
drivers/usb/serial/option.c
2646
struct usb_serial_port *port = urb->context;
drivers/usb/serial/oti6858.c
641
struct usb_serial_port *port = urb->context;
drivers/usb/serial/oti6858.c
765
struct usb_serial_port *port = urb->context;
drivers/usb/serial/oti6858.c
796
struct usb_serial_port *port = urb->context;
drivers/usb/serial/pl2303.c
1187
struct usb_serial_port *port = urb->context;
drivers/usb/serial/pl2303.c
1226
struct usb_serial_port *port = urb->context;
drivers/usb/serial/quatech2.c
461
serial = urb->context;
drivers/usb/serial/quatech2.c
550
port = urb->context;
drivers/usb/serial/quatech2.c
564
struct usb_serial *serial = urb->context;
drivers/usb/serial/safe_serial.c
187
struct usb_serial_port *port = urb->context;
drivers/usb/serial/sierra.c
389
struct usb_serial_port *port = urb->context;
drivers/usb/serial/sierra.c
530
port = urb->context;
drivers/usb/serial/sierra.c
563
struct usb_serial_port *port = urb->context;
drivers/usb/serial/ssu100.c
466
struct usb_serial_port *port = urb->context;
drivers/usb/serial/symbolserial.c
34
struct usb_serial_port *port = urb->context;
drivers/usb/serial/ti_usb_3410_5052.c
1099
struct ti_device *tdev = urb->context;
drivers/usb/serial/ti_usb_3410_5052.c
1181
struct ti_port *tport = urb->context;
drivers/usb/serial/ti_usb_3410_5052.c
1240
struct ti_port *tport = urb->context;
drivers/usb/serial/ti_usb_3410_5052.c
1453
urb->context = tport;
drivers/usb/serial/ti_usb_3410_5052.c
670
urb->context = tdev;
drivers/usb/serial/ti_usb_3410_5052.c
733
urb->context = tport;
drivers/usb/serial/usb_debug.c
67
struct usb_serial_port *port = urb->context;
drivers/usb/serial/usb_wwan.c
223
port = urb->context;
drivers/usb/serial/usb_wwan.c
263
port = urb->context;
drivers/usb/serial/visor.c
269
struct usb_serial_port *port = urb->context;
drivers/usb/serial/whiteheat.c
473
struct usb_serial_port *command_port = urb->context;
drivers/usb/storage/onetouch.c
90
struct usb_onetouch *onetouch = urb->context;
drivers/usb/storage/transport.c
103
struct completion *urb_done_ptr = urb->context;
drivers/usb/storage/transport.c
129
us->current_urb->context = &urb_done;
drivers/usb/storage/uas.c
301
struct Scsi_Host *shost = urb->context;
drivers/usb/storage/uas.c
400
struct scsi_cmnd *cmnd = urb->context;
drivers/usb/typec/ucsi/ucsi_stm32g0.c
467
static void ucsi_stm32g0_fw_cb(const struct firmware *fw, void *context)
drivers/usb/typec/ucsi/ucsi_stm32g0.c
475
if (!context)
drivers/usb/typec/ucsi/ucsi_stm32g0.c
478
g0 = ucsi_get_drvdata(context);
drivers/usb/usb-skeleton.c
165
dev = urb->context;
drivers/usb/usb-skeleton.c
337
dev = urb->context;
drivers/usb/usbip/stub_rx.c
132
struct stub_priv *priv = (struct stub_priv *) urb->context;
drivers/usb/usbip/stub_rx.c
152
struct stub_priv *priv = (struct stub_priv *) urb->context;
drivers/usb/usbip/stub_rx.c
588
priv->urbs[i]->context = (void *) priv;
drivers/usb/usbip/stub_tx.c
120
struct stub_priv *priv = (struct stub_priv *) urb->context;
drivers/usb/usbip/stub_tx.c
43
struct stub_priv *priv = (struct stub_priv *) urb->context;
drivers/vdpa/mlx5/core/resources.c
322
static void virtqueue_cmd_callback(int status, struct mlx5_async_work *context)
drivers/vdpa/mlx5/core/resources.c
325
container_of(context, struct mlx5_vdpa_async_cmd, cb_work);
drivers/vdpa/mlx5/core/resources.c
327
cmd->err = mlx5_cmd_check(context->ctx->dev, status, cmd->in, cmd->out);
drivers/vfio/pci/mlx5/cmd.c
666
static void mlx5vf_save_callback(int status, struct mlx5_async_work *context)
drivers/vfio/pci/mlx5/cmd.c
668
struct mlx5vf_async_data *async_data = container_of(context,
drivers/video/fbdev/mmp/hw/mmp_spi.c
133
m->complete(m->context);
drivers/video/fbdev/omap2/omapfb/vrfb.c
177
u8 ctx = vrfb->context;
drivers/video/fbdev/omap2/omapfb/vrfb.c
253
int ctx = vrfb->context;
drivers/video/fbdev/omap2/omapfb/vrfb.c
273
vrfb->context = 0xff;
drivers/video/fbdev/omap2/omapfb/vrfb.c
29
#define SMS_ROT_CONTROL(context) (0x0 + 0x10 * context)
drivers/video/fbdev/omap2/omapfb/vrfb.c
30
#define SMS_ROT_SIZE(context) (0x4 + 0x10 * context)
drivers/video/fbdev/omap2/omapfb/vrfb.c
306
vrfb->context = ctx;
drivers/video/fbdev/omap2/omapfb/vrfb.c
31
#define SMS_ROT_PHYSICAL_BA(context) (0x8 + 0x10 * context)
drivers/video/fbdev/smscufx.c
1775
struct urb_node *unode = urb->context;
drivers/video/fbdev/udlfb.c
1775
struct urb_node *unode = urb->context;
drivers/virtio/virtio_ring.c
1302
bool context,
drivers/virtio/virtio_ring.c
1332
!context;
drivers/virtio/virtio_ring.c
1364
bool context,
drivers/virtio/virtio_ring.c
1380
context, notify, callback, name, map);
drivers/virtio/virtio_ring.c
2522
bool context,
drivers/virtio/virtio_ring.c
2552
!context;
drivers/virtio/virtio_ring.c
2584
bool context,
drivers/virtio/virtio_ring.c
2597
context, notify, callback, name, map);
drivers/virtio/virtio_ring.c
3267
bool context,
drivers/virtio/virtio_ring.c
3277
context, notify, callback, name, map);
drivers/virtio/virtio_ring.c
3281
context, notify, callback, name, map);
drivers/virtio/virtio_ring.c
3292
bool context,
drivers/virtio/virtio_ring.c
3302
context, notify, callback, name, map);
drivers/virtio/virtio_ring.c
3306
context, notify, callback, name, map);
drivers/virtio/virtio_ring.c
3406
bool context,
drivers/virtio/virtio_ring.c
3422
context, notify, callback,
drivers/virtio/virtio_ring.c
3428
context, notify, callback, name,
drivers/watchdog/pcwd_usb.c
168
(struct usb_pcwd_private *)urb->context;
drivers/xen/xen-acpi-processor.c
325
read_acpi_id(acpi_handle handle, u32 lvl, void *context, void **rv)
fs/binfmt_elf_fdpic.c
1405
t->prstatus.pr_exec_fdpic_loadmap = p->mm->context.exec_fdpic_loadmap;
fs/binfmt_elf_fdpic.c
1406
t->prstatus.pr_interp_fdpic_loadmap = p->mm->context.interp_fdpic_loadmap;
fs/binfmt_elf_fdpic.c
363
current->mm->context.exec_fdpic_loadmap = 0;
fs/binfmt_elf_fdpic.c
364
current->mm->context.interp_fdpic_loadmap = 0;
fs/binfmt_elf_fdpic.c
432
current->mm->context.end_brk = current->mm->start_brk;
fs/binfmt_elf_fdpic.c
571
current->mm->context.exec_fdpic_loadmap = (unsigned long) sp;
fs/binfmt_elf_fdpic.c
584
current->mm->context.interp_fdpic_loadmap = (unsigned long) sp;
fs/binfmt_flat.c
709
current->mm->context.end_brk = memp + memp_size - stack_len;
fs/binfmt_flat.c
908
((current->mm->context.end_brk + stack_len + 3) & ~3) - 4;
fs/ceph/addr.c
107
snapc = ceph_get_snap_context(capsnap->context);
fs/ceph/addr.c
2231
snapc = ceph_get_snap_context(capsnap->context);
fs/ceph/addr.c
633
capsnap, capsnap->context, capsnap->dirty_pages);
fs/ceph/addr.c
638
if (snapc && capsnap->context != page_snapc)
fs/ceph/addr.c
657
snapc = ceph_get_snap_context(capsnap->context);
fs/ceph/addr.c
693
if (capsnap->context == snapc) {
fs/ceph/caps.c
3208
ceph_put_snap_context(capsnap->context);
fs/ceph/caps.c
3377
if (iter->context == snapc) {
fs/ceph/caps.c
3405
inode, ceph_vinop(inode), capsnap, capsnap->context->seq,
fs/ceph/caps.c
4006
ceph_put_snap_context(capsnap->context);
fs/ceph/caps.c
5000
ceph_put_snap_context(capsnap->context);
fs/ceph/file.c
1425
snapc = ceph_get_snap_context(capsnap->context);
fs/ceph/file.c
2435
snapc = ceph_get_snap_context(capsnap->context);
fs/ceph/file.c
2592
snapc = ceph_get_snap_context(capsnap->context);
fs/ceph/snap.c
623
capsnap->context = old_snapc;
fs/ceph/snap.c
682
ceph_vinop(inode), capsnap, capsnap->context,
fs/ceph/snap.c
683
capsnap->context->seq,
fs/ceph/snap.c
696
ceph_vinop(inode), capsnap, capsnap->context,
fs/ceph/snap.c
697
capsnap->context->seq, ceph_cap_string(capsnap->dirty),
fs/ceph/snap.c
705
inode, ceph_vinop(inode), capsnap, capsnap->context,
fs/ceph/snap.c
706
capsnap->context->seq, ceph_cap_string(capsnap->dirty),
fs/ceph/super.h
256
struct ceph_snap_context *context;
fs/ceph/xattr.c
1433
ceph_pagelist_append(pagelist, as_ctx->lsmctx.context,
fs/crypto/fscrypt_private.h
404
void fscrypt_hkdf_expand(const struct hmac_sha512_key *hkdf, u8 context,
fs/crypto/hkdf.c
72
void fscrypt_hkdf_expand(const struct hmac_sha512_key *hkdf, u8 context,
fs/crypto/hkdf.c
88
hmac_sha512_update(&ctx, &context, 1);
fs/crypto/keysetup.c
279
u8 context, const u8 *info,
fs/crypto/keysetup.c
282
fscrypt_hkdf_expand(&mk->mk_secret.hkdf, context, info, infolen,
fs/exportfs/expfs.c
44
int (*acceptable)(void *context, struct dentry *dentry),
fs/exportfs/expfs.c
442
void *context)
fs/exportfs/expfs.c
45
void *context)
fs/exportfs/expfs.c
490
if (!acceptable(context, result)) {
fs/exportfs/expfs.c
50
if (acceptable(context, result))
fs/exportfs/expfs.c
511
alias = find_acceptable_alias(result, acceptable, context);
fs/exportfs/expfs.c
577
alias = find_acceptable_alias(result, acceptable, context);
fs/exportfs/expfs.c
595
void *context)
fs/exportfs/expfs.c
60
if (dentry != result && acceptable(context, dentry)) {
fs/exportfs/expfs.c
600
acceptable, context);
fs/fhandle.c
199
static int vfs_dentry_acceptable(void *context, struct dentry *dentry)
fs/fhandle.c
201
struct handle_to_path_ctx *ctx = context;
fs/fuse/dir.c
703
memcpy(ptr, lsmctx.context, lsmctx.len);
fs/nfs/filelayout/filelayout.c
211
err = filelayout_async_handle_error(task, hdr->args.context->state,
fs/nfs/filelayout/filelayout.c
272
if (unlikely(test_bit(NFS_CONTEXT_BAD, &hdr->args.context->flags))) {
fs/nfs/filelayout/filelayout.c
289
if (nfs4_set_rw_stateid(&hdr->args.stateid, hdr->args.context,
fs/nfs/filelayout/filelayout.c
321
err = filelayout_async_handle_error(task, hdr->args.context->state,
fs/nfs/filelayout/filelayout.c
370
if (unlikely(test_bit(NFS_CONTEXT_BAD, &hdr->args.context->flags))) {
fs/nfs/filelayout/filelayout.c
385
if (nfs4_set_rw_stateid(&hdr->args.stateid, hdr->args.context,
fs/nfs/flexfilelayout/flexfilelayout.c
1582
hdr->args.context->state,
fs/nfs/flexfilelayout/flexfilelayout.c
1688
if (unlikely(test_bit(NFS_CONTEXT_BAD, &hdr->args.context->flags))) {
fs/nfs/flexfilelayout/flexfilelayout.c
1787
hdr->args.context->state,
fs/nfs/flexfilelayout/flexfilelayout.c
1914
if (unlikely(test_bit(NFS_CONTEXT_BAD, &hdr->args.context->flags))) {
fs/nfs/fs_context.c
1184
memset(data->context, 0, sizeof(data->context));
fs/nfs/fs_context.c
1268
if (data->context[0]){
fs/nfs/fs_context.c
1272
data->context[NFS_MAX_CONTEXT_LEN] = '\0';
fs/nfs/fs_context.c
1273
ret = vfs_parse_fs_string(fc, "context", data->context);
fs/nfs/nfs4proc.c
136
label->label = shim.context;
fs/nfs/nfs4proc.c
146
shim.context = label->label;
fs/nfs/nfs4proc.c
5600
.state = hdr->args.context->state,
fs/nfs/nfs4proc.c
5624
args->context,
fs/nfs/nfs4proc.c
5701
if (nfs4_set_rw_stateid(&hdr->args.stateid, hdr->args.context,
fs/nfs/nfs4proc.c
5705
if (unlikely(test_bit(NFS_CONTEXT_BAD, &hdr->args.context->flags)))
fs/nfs/nfs4proc.c
5719
.state = hdr->args.context->state,
fs/nfs/nfs4proc.c
5745
args->context,
fs/nfs/nfs4trace.h
1654
hdr->args.context->state;
fs/nfs/nfs4trace.h
1726
hdr->args.context->state;
fs/nfs/pagelist.c
657
if (hdr->args.context)
fs/nfs/pagelist.c
658
put_nfs_open_context(hdr->args.context);
fs/nfs/pagelist.c
698
hdr->args.context = get_nfs_open_context(nfs_req_openctx(req));
fs/nfs/pagelist.c
967
&hdr->args.context->nfl,
fs/nfs/pagelist.c
968
hdr->args.context->mode);
fs/nfs/write.c
1620
put_nfs_open_context(data->context);
fs/nfs/write.c
1719
data->context = get_nfs_open_context(ctx);
fs/nfs/write.c
1779
data->args.fh, &data->context->nfl,
fs/nfs/write.c
1780
data->context->mode);
fs/nfsd/nfs4xdr.c
2965
const struct lsm_context *context)
fs/nfsd/nfs4xdr.c
2969
p = xdr_reserve_space(xdr, context->len + 4 + 4 + 4);
fs/nfsd/nfs4xdr.c
2979
p = xdr_encode_opaque(p, context->context, context->len);
fs/nfsd/nfs4xdr.c
2985
struct lsm_context *context)
fs/nfsd/nfs4xdr.c
3151
struct lsm_context context;
fs/nfsd/nfs4xdr.c
3635
return nfsd4_encode_security_label(xdr, args->rqstp, &args->context);
fs/nfsd/nfs4xdr.c
3890
args.context.context = NULL;
fs/nfsd/nfs4xdr.c
3989
&args.context);
fs/nfsd/nfs4xdr.c
4081
if (args.context.context)
fs/nfsd/nfs4xdr.c
4082
security_release_secctx(&args.context);
fs/ocfs2/move_extents.c
1014
context = kzalloc_obj(struct ocfs2_move_extents_context, GFP_NOFS);
fs/ocfs2/move_extents.c
1015
if (!context) {
fs/ocfs2/move_extents.c
1021
context->inode = inode;
fs/ocfs2/move_extents.c
1022
context->file = filp;
fs/ocfs2/move_extents.c
1037
context->range = ⦥
fs/ocfs2/move_extents.c
1057
context->auto_defrag = 1;
fs/ocfs2/move_extents.c
1060
context->partial = 1;
fs/ocfs2/move_extents.c
1074
status = ocfs2_move_extents(context);
fs/ocfs2/move_extents.c
1087
kfree(context);
fs/ocfs2/move_extents.c
114
ret = ocfs2_split_extent(handle, &context->et, path, index,
fs/ocfs2/move_extents.c
115
&replace_rec, context->meta_ac,
fs/ocfs2/move_extents.c
116
&context->dealloc);
fs/ocfs2/move_extents.c
122
context->new_phys_cpos = new_p_cpos;
fs/ocfs2/move_extents.c
132
len, context->meta_ac,
fs/ocfs2/move_extents.c
133
&context->dealloc, 1);
fs/ocfs2/move_extents.c
201
static int ocfs2_defrag_extent(struct ocfs2_move_extents_context *context,
fs/ocfs2/move_extents.c
204
int ret, credits = 0, extra_blocks = 0, partial = context->partial;
fs/ocfs2/move_extents.c
206
struct inode *inode = context->inode;
fs/ocfs2/move_extents.c
216
BUG_ON(!context->refcount_loc);
fs/ocfs2/move_extents.c
218
ret = ocfs2_lock_refcount_tree(osb, context->refcount_loc, 1,
fs/ocfs2/move_extents.c
226
context->refcount_loc,
fs/ocfs2/move_extents.c
237
ret = ocfs2_lock_meta_allocator_move_extents(inode, &context->et,
fs/ocfs2/move_extents.c
239
&context->meta_ac,
fs/ocfs2/move_extents.c
272
ret = ocfs2_reserve_clusters(osb, *len, &context->data_ac);
fs/ocfs2/move_extents.c
285
ret = __ocfs2_claim_clusters(handle, context->data_ac, 1, *len,
fs/ocfs2/move_extents.c
301
context->range->me_flags &= ~OCFS2_MOVE_EXT_FL_COMPLETE;
fs/ocfs2/move_extents.c
311
ret = __ocfs2_move_extent(handle, context, cpos, new_len, phys_cpos,
fs/ocfs2/move_extents.c
323
ret = ocfs2_cow_sync_writeback(inode->i_sb, context->inode, cpos, *len);
fs/ocfs2/move_extents.c
328
if (need_free && context->data_ac) {
fs/ocfs2/move_extents.c
329
struct ocfs2_alloc_context *data_ac = context->data_ac;
fs/ocfs2/move_extents.c
331
if (context->data_ac->ac_which == OCFS2_AC_USE_LOCAL)
fs/ocfs2/move_extents.c
347
if (context->data_ac) {
fs/ocfs2/move_extents.c
348
ocfs2_free_alloc_context(context->data_ac);
fs/ocfs2/move_extents.c
349
context->data_ac = NULL;
fs/ocfs2/move_extents.c
352
if (context->meta_ac) {
fs/ocfs2/move_extents.c
353
ocfs2_free_alloc_context(context->meta_ac);
fs/ocfs2/move_extents.c
354
context->meta_ac = NULL;
fs/ocfs2/move_extents.c
50
struct ocfs2_move_extents_context *context,
fs/ocfs2/move_extents.c
55
struct inode *inode = context->inode;
fs/ocfs2/move_extents.c
568
static int ocfs2_move_extent(struct ocfs2_move_extents_context *context,
fs/ocfs2/move_extents.c
574
struct inode *inode = context->inode;
fs/ocfs2/move_extents.c
583
context->range->me_threshold);
fs/ocfs2/move_extents.c
590
BUG_ON(!context->refcount_loc);
fs/ocfs2/move_extents.c
592
ret = ocfs2_lock_refcount_tree(osb, context->refcount_loc, 1,
fs/ocfs2/move_extents.c
60
u64 ino = ocfs2_metadata_cache_owner(context->et.et_ci);
fs/ocfs2/move_extents.c
600
context->refcount_loc,
fs/ocfs2/move_extents.c
611
ret = ocfs2_lock_meta_allocator_move_extents(inode, &context->et,
fs/ocfs2/move_extents.c
613
&context->meta_ac,
fs/ocfs2/move_extents.c
684
ret = __ocfs2_move_extent(handle, context, cpos, len, phys_cpos,
fs/ocfs2/move_extents.c
710
ret = ocfs2_cow_sync_writeback(inode->i_sb, context->inode, cpos, len);
fs/ocfs2/move_extents.c
727
if (context->meta_ac) {
fs/ocfs2/move_extents.c
728
ocfs2_free_alloc_context(context->meta_ac);
fs/ocfs2/move_extents.c
729
context->meta_ac = NULL;
fs/ocfs2/move_extents.c
76
path = ocfs2_new_path_from_et(&context->et);
fs/ocfs2/move_extents.c
769
struct ocfs2_move_extents_context *context)
fs/ocfs2/move_extents.c
775
struct inode *inode = context->inode;
fs/ocfs2/move_extents.c
777
struct ocfs2_move_extents *range = context->range;
fs/ocfs2/move_extents.c
786
context->refcount_loc = le64_to_cpu(di->i_refcount_loc);
fs/ocfs2/move_extents.c
788
ocfs2_init_dinode_extent_tree(&context->et, INODE_CACHE(inode), di_bh);
fs/ocfs2/move_extents.c
789
ocfs2_init_dealloc_ctxt(&context->dealloc);
fs/ocfs2/move_extents.c
797
do_defrag = context->auto_defrag;
fs/ocfs2/move_extents.c
867
ret = ocfs2_defrag_extent(context, cpos, phys_cpos,
fs/ocfs2/move_extents.c
870
ret = ocfs2_move_extent(context, cpos, phys_cpos,
fs/ocfs2/move_extents.c
887
context->clusters_moved += alloc_size;
fs/ocfs2/move_extents.c
898
context->clusters_moved);
fs/ocfs2/move_extents.c
900
context->new_phys_cpos);
fs/ocfs2/move_extents.c
903
ocfs2_run_deallocs(osb, &context->dealloc);
fs/ocfs2/move_extents.c
908
static int ocfs2_move_extents(struct ocfs2_move_extents_context *context)
fs/ocfs2/move_extents.c
912
struct inode *inode = context->inode;
fs/ocfs2/move_extents.c
942
status = __ocfs2_move_extents_range(di_bh, context);
fs/ocfs2/move_extents.c
995
struct ocfs2_move_extents_context *context;
fs/ocfs2/refcounttree.c
3107
struct ocfs2_cow_context *context,
fs/ocfs2/refcounttree.c
3113
struct ocfs2_caching_info *ci = context->data_et.et_ci;
fs/ocfs2/refcounttree.c
3121
ret = context->cow_duplicate_clusters(handle, context->inode,
fs/ocfs2/refcounttree.c
3129
ret = ocfs2_clear_ext_refcount(handle, &context->data_et,
fs/ocfs2/refcounttree.c
3131
context->meta_ac, &context->dealloc);
fs/ocfs2/refcounttree.c
3158
static int ocfs2_di_get_clusters(struct ocfs2_cow_context *context,
fs/ocfs2/refcounttree.c
3163
return ocfs2_get_clusters(context->inode, v_cluster, p_cluster,
fs/ocfs2/refcounttree.c
3168
struct ocfs2_cow_context *context,
fs/ocfs2/refcounttree.c
3178
struct ocfs2_caching_info *ref_ci = &context->ref_tree->rf_ci;
fs/ocfs2/refcounttree.c
3185
&context->data_et,
fs/ocfs2/refcounttree.c
3187
context->ref_root_bh,
fs/ocfs2/refcounttree.c
3188
&context->meta_ac,
fs/ocfs2/refcounttree.c
3189
&context->data_ac, &credits);
fs/ocfs2/refcounttree.c
3195
if (context->post_refcount)
fs/ocfs2/refcounttree.c
3196
credits += context->post_refcount->credits;
fs/ocfs2/refcounttree.c
3198
credits += context->extra_credits;
fs/ocfs2/refcounttree.c
3209
ret = ocfs2_get_refcount_rec(ref_ci, context->ref_root_bh,
fs/ocfs2/refcounttree.c
3232
&context->data_et,
fs/ocfs2/refcounttree.c
3235
context->meta_ac,
fs/ocfs2/refcounttree.c
3236
&context->dealloc);
fs/ocfs2/refcounttree.c
3245
context->data_ac,
fs/ocfs2/refcounttree.c
3253
ret = ocfs2_replace_clusters(handle, context,
fs/ocfs2/refcounttree.c
3264
context->ref_root_bh,
fs/ocfs2/refcounttree.c
3266
context->meta_ac,
fs/ocfs2/refcounttree.c
3267
&context->dealloc, delete);
fs/ocfs2/refcounttree.c
3281
if (context->post_refcount && context->post_refcount->func) {
fs/ocfs2/refcounttree.c
3282
ret = context->post_refcount->func(context->inode, handle,
fs/ocfs2/refcounttree.c
3283
context->post_refcount->para);
fs/ocfs2/refcounttree.c
3294
if (context->get_clusters == ocfs2_di_get_clusters) {
fs/ocfs2/refcounttree.c
3295
ret = ocfs2_cow_sync_writeback(sb, context->inode, cpos,
fs/ocfs2/refcounttree.c
3305
if (context->data_ac) {
fs/ocfs2/refcounttree.c
3306
ocfs2_free_alloc_context(context->data_ac);
fs/ocfs2/refcounttree.c
3307
context->data_ac = NULL;
fs/ocfs2/refcounttree.c
3309
if (context->meta_ac) {
fs/ocfs2/refcounttree.c
3310
ocfs2_free_alloc_context(context->meta_ac);
fs/ocfs2/refcounttree.c
3311
context->meta_ac = NULL;
fs/ocfs2/refcounttree.c
3318
static int ocfs2_replace_cow(struct ocfs2_cow_context *context)
fs/ocfs2/refcounttree.c
3321
struct inode *inode = context->inode;
fs/ocfs2/refcounttree.c
3322
u32 cow_start = context->cow_start, cow_len = context->cow_len;
fs/ocfs2/refcounttree.c
3332
ocfs2_init_dealloc_ctxt(&context->dealloc);
fs/ocfs2/refcounttree.c
3335
ret = context->get_clusters(context, cow_start, &p_cluster,
fs/ocfs2/refcounttree.c
3347
ret = ocfs2_make_clusters_writable(inode->i_sb, context,
fs/ocfs2/refcounttree.c
3359
if (ocfs2_dealloc_has_cluster(&context->dealloc)) {
fs/ocfs2/refcounttree.c
3361
ocfs2_run_deallocs(osb, &context->dealloc);
fs/ocfs2/refcounttree.c
3382
struct ocfs2_cow_context *context = NULL;
fs/ocfs2/refcounttree.c
3400
context = kzalloc_obj(struct ocfs2_cow_context, GFP_NOFS);
fs/ocfs2/refcounttree.c
3401
if (!context) {
fs/ocfs2/refcounttree.c
3414
context->inode = inode;
fs/ocfs2/refcounttree.c
3415
context->cow_start = cow_start;
fs/ocfs2/refcounttree.c
3416
context->cow_len = cow_len;
fs/ocfs2/refcounttree.c
3417
context->ref_tree = ref_tree;
fs/ocfs2/refcounttree.c
3418
context->ref_root_bh = ref_root_bh;
fs/ocfs2/refcounttree.c
3419
context->cow_duplicate_clusters = ocfs2_duplicate_clusters_by_page;
fs/ocfs2/refcounttree.c
3420
context->get_clusters = ocfs2_di_get_clusters;
fs/ocfs2/refcounttree.c
3422
ocfs2_init_dinode_extent_tree(&context->data_et,
fs/ocfs2/refcounttree.c
3425
ret = ocfs2_replace_cow(context);
fs/ocfs2/refcounttree.c
3439
kfree(context);
fs/ocfs2/refcounttree.c
3483
static int ocfs2_xattr_value_get_clusters(struct ocfs2_cow_context *context,
fs/ocfs2/refcounttree.c
3488
struct inode *inode = context->inode;
fs/ocfs2/refcounttree.c
3489
struct ocfs2_xattr_value_root *xv = context->cow_object;
fs/ocfs2/refcounttree.c
3594
struct ocfs2_cow_context *context = NULL;
fs/ocfs2/refcounttree.c
3609
context = kzalloc_obj(struct ocfs2_cow_context, GFP_NOFS);
fs/ocfs2/refcounttree.c
3610
if (!context) {
fs/ocfs2/refcounttree.c
3616
context->inode = inode;
fs/ocfs2/refcounttree.c
3617
context->cow_start = cow_start;
fs/ocfs2/refcounttree.c
3618
context->cow_len = cow_len;
fs/ocfs2/refcounttree.c
3619
context->ref_tree = ref_tree;
fs/ocfs2/refcounttree.c
3620
context->ref_root_bh = ref_root_bh;
fs/ocfs2/refcounttree.c
3621
context->cow_object = xv;
fs/ocfs2/refcounttree.c
3623
context->cow_duplicate_clusters = ocfs2_duplicate_clusters_by_jbd;
fs/ocfs2/refcounttree.c
3625
context->extra_credits =
fs/ocfs2/refcounttree.c
3627
context->get_clusters = ocfs2_xattr_value_get_clusters;
fs/ocfs2/refcounttree.c
3628
context->post_refcount = post;
fs/ocfs2/refcounttree.c
3630
ocfs2_init_xattr_value_extent_tree(&context->data_et,
fs/ocfs2/refcounttree.c
3633
ret = ocfs2_replace_cow(context);
fs/ocfs2/refcounttree.c
3638
kfree(context);
fs/ocfs2/refcounttree.c
57
int (*get_clusters)(struct ocfs2_cow_context *context,
fs/smb/client/asn1.c
22
int cifs_gssapi_this_mech(void *context, size_t hdrlen,
fs/smb/client/asn1.c
39
int cifs_neg_token_init_mech_type(void *context, size_t hdrlen,
fs/smb/client/asn1.c
43
struct TCP_Server_Info *server = context;
fs/smb/client/smb2transport.c
262
struct kvec context, __u8 *key, unsigned int key_size)
fs/smb/client/smb2transport.c
287
hmac_sha256_update(&hmac_ctx, context.iov_base, context.iov_len);
fs/smb/client/smb2transport.c
303
struct kvec context;
fs/smb/client/smb2transport.c
349
ptriplet->signing.context,
fs/smb/client/smb2transport.c
356
ptriplet->signing.context,
fs/smb/client/smb2transport.c
369
ptriplet->encryption.context,
fs/smb/client/smb2transport.c
375
ptriplet->decryption.context,
fs/smb/client/smb2transport.c
422
d->context.iov_base = "SmbSign";
fs/smb/client/smb2transport.c
423
d->context.iov_len = 8;
fs/smb/client/smb2transport.c
428
d->context.iov_base = "ServerIn ";
fs/smb/client/smb2transport.c
429
d->context.iov_len = 10;
fs/smb/client/smb2transport.c
434
d->context.iov_base = "ServerOut";
fs/smb/client/smb2transport.c
435
d->context.iov_len = 10;
fs/smb/client/smb2transport.c
451
d->context.iov_base = ses->preauth_sha_hash;
fs/smb/client/smb2transport.c
452
d->context.iov_len = 64;
fs/smb/client/smb2transport.c
457
d->context.iov_base = ses->preauth_sha_hash;
fs/smb/client/smb2transport.c
458
d->context.iov_len = 64;
fs/smb/client/smb2transport.c
463
d->context.iov_base = ses->preauth_sha_hash;
fs/smb/client/smb2transport.c
464
d->context.iov_len = 64;
fs/smb/client/smbdirect.c
334
struct smbdirect_socket *sc = id->context;
fs/smb/client/smbdirect.c
487
smbd_qp_async_error_upcall(struct ib_event *event, void *context)
fs/smb/client/smbdirect.c
489
struct smbdirect_socket *sc = context;
fs/smb/server/asn1.c
162
int ksmbd_gssapi_this_mech(void *context, size_t hdrlen, unsigned char tag,
fs/smb/server/asn1.c
179
int ksmbd_neg_token_init_mech_type(void *context, size_t hdrlen,
fs/smb/server/asn1.c
183
struct ksmbd_conn *conn = context;
fs/smb/server/asn1.c
211
static int ksmbd_neg_token_alloc(void *context, size_t hdrlen,
fs/smb/server/asn1.c
215
struct ksmbd_conn *conn = context;
fs/smb/server/asn1.c
229
int ksmbd_neg_token_init_mech_token(void *context, size_t hdrlen,
fs/smb/server/asn1.c
233
return ksmbd_neg_token_alloc(context, hdrlen, tag, value, vlen);
fs/smb/server/asn1.c
236
int ksmbd_neg_token_targ_resp_token(void *context, size_t hdrlen,
fs/smb/server/asn1.c
240
return ksmbd_neg_token_alloc(context, hdrlen, tag, value, vlen);
fs/smb/server/auth.c
537
struct kvec context;
fs/smb/server/auth.c
542
struct kvec label, struct kvec context, __u8 *key,
fs/smb/server/auth.c
557
hmac_sha256_update(&ctx, context.iov_base, context.iov_len);
fs/smb/server/auth.c
586
generate_key(conn, sess, signing->label, signing->context, key,
fs/smb/server/auth.c
604
d.context.iov_base = "SmbSign";
fs/smb/server/auth.c
605
d.context.iov_len = 8;
fs/smb/server/auth.c
624
d.context.iov_base = preauth_sess->Preauth_HashValue;
fs/smb/server/auth.c
626
d.context.iov_base = sess->Preauth_HashValue;
fs/smb/server/auth.c
628
d.context.iov_len = 64;
fs/smb/server/auth.c
644
ptwin->encryption.context, sess->smb3encryptionkey,
fs/smb/server/auth.c
648
ptwin->decryption.context,
fs/smb/server/auth.c
665
d->context.iov_base = "ServerOut";
fs/smb/server/auth.c
666
d->context.iov_len = 10;
fs/smb/server/auth.c
671
d->context.iov_base = "ServerIn ";
fs/smb/server/auth.c
672
d->context.iov_len = 10;
fs/smb/server/auth.c
686
d->context.iov_base = sess->Preauth_HashValue;
fs/smb/server/auth.c
687
d->context.iov_len = 64;
fs/smb/server/auth.c
692
d->context.iov_base = sess->Preauth_HashValue;
fs/smb/server/auth.c
693
d->context.iov_len = 64;
fs/smb/server/smb2pdu.c
2654
struct create_context *context;
fs/smb/server/smb2pdu.c
2661
context = smb2_find_context_vals(req, SMB2_CREATE_SD_BUFFER, 4);
fs/smb/server/smb2pdu.c
2662
if (!context)
fs/smb/server/smb2pdu.c
2664
else if (IS_ERR(context))
fs/smb/server/smb2pdu.c
2665
return PTR_ERR(context);
fs/smb/server/smb2pdu.c
2669
sd_buf = (struct create_sd_buf_req *)context;
fs/smb/server/smb2pdu.c
2670
if (le16_to_cpu(context->DataOffset) +
fs/smb/server/smb2pdu.c
2671
le32_to_cpu(context->DataLength) <
fs/smb/server/smb2pdu.c
2720
struct create_context *context;
fs/smb/server/smb2pdu.c
2729
context = smb2_find_context_vals(req, durable_arr[dh_idx - 1], 4);
fs/smb/server/smb2pdu.c
2730
if (IS_ERR(context)) {
fs/smb/server/smb2pdu.c
2731
err = PTR_ERR(context);
fs/smb/server/smb2pdu.c
2734
if (!context)
fs/smb/server/smb2pdu.c
2748
if (le16_to_cpu(context->DataOffset) +
fs/smb/server/smb2pdu.c
2749
le32_to_cpu(context->DataLength) <
fs/smb/server/smb2pdu.c
2755
recon_v2 = (struct create_durable_handle_reconnect_v2 *)context;
fs/smb/server/smb2pdu.c
2788
if (le16_to_cpu(context->DataOffset) +
fs/smb/server/smb2pdu.c
2789
le32_to_cpu(context->DataLength) <
fs/smb/server/smb2pdu.c
2795
recon = (create_durable_reconn_t *)context;
fs/smb/server/smb2pdu.c
2820
if (le16_to_cpu(context->DataOffset) +
fs/smb/server/smb2pdu.c
2821
le32_to_cpu(context->DataLength) <
fs/smb/server/smb2pdu.c
2828
(struct create_durable_req_v2 *)context;
fs/smb/server/smb2pdu.c
2902
struct create_context *context;
fs/smb/server/smb2pdu.c
2941
context = smb2_find_context_vals(req, SMB2_CREATE_TAG_POSIX, 16);
fs/smb/server/smb2pdu.c
2942
if (IS_ERR(context)) {
fs/smb/server/smb2pdu.c
2943
rc = PTR_ERR(context);
fs/smb/server/smb2pdu.c
2945
} else if (context) {
fs/smb/server/smb2pdu.c
2946
struct create_posix *posix = (struct create_posix *)context;
fs/smb/server/smb2pdu.c
2948
if (le16_to_cpu(context->DataOffset) +
fs/smb/server/smb2pdu.c
2949
le32_to_cpu(context->DataLength) <
fs/smb/server/smb2pdu.c
3107
context = smb2_find_context_vals(req, SMB2_CREATE_EA_BUFFER, 4);
fs/smb/server/smb2pdu.c
3108
if (IS_ERR(context)) {
fs/smb/server/smb2pdu.c
3109
rc = PTR_ERR(context);
fs/smb/server/smb2pdu.c
3111
} else if (context) {
fs/smb/server/smb2pdu.c
3112
ea_buf = (struct create_ea_buf_req *)context;
fs/smb/server/smb2pdu.c
3113
if (le16_to_cpu(context->DataOffset) +
fs/smb/server/smb2pdu.c
3114
le32_to_cpu(context->DataLength) <
fs/smb/server/smb2pdu.c
3126
context = smb2_find_context_vals(req,
fs/smb/server/smb2pdu.c
3128
if (IS_ERR(context)) {
fs/smb/server/smb2pdu.c
3129
rc = PTR_ERR(context);
fs/smb/server/smb2pdu.c
3131
} else if (context) {
fs/smb/server/smb2pdu.c
3137
context = smb2_find_context_vals(req,
fs/smb/server/smb2pdu.c
3139
if (IS_ERR(context)) {
fs/smb/server/smb2pdu.c
3140
rc = PTR_ERR(context);
fs/smb/server/smb2pdu.c
3142
} else if (context) {
fs/smb/server/smb2pdu.c
3577
context = smb2_find_context_vals(req, SMB2_CREATE_QUERY_ON_DISK_ID, 4);
fs/smb/server/smb2pdu.c
3578
if (IS_ERR(context)) {
fs/smb/server/smb2pdu.c
3579
rc = PTR_ERR(context);
fs/smb/server/smb2pdu.c
3581
} else if (context) {
fs/smb/server/smb2pdu.c
3587
context = smb2_find_context_vals(req, SMB2_CREATE_AAPL, 4);
fs/smb/server/smb2pdu.c
3588
if (IS_ERR(context)) {
fs/smb/server/smb2pdu.c
3589
rc = PTR_ERR(context);
fs/smb/server/smb2pdu.c
3591
} else if (context)
fs/smb/server/transport_rdma.c
1972
struct smbdirect_socket *sc = cm_id->context;
fs/smb/server/transport_rdma.c
2027
static void smb_direct_qpair_handler(struct ib_event *event, void *context)
fs/smb/server/transport_rdma.c
2029
struct smbdirect_socket *sc = context;
fs/smb/server/transport_rdma.c
2659
struct smb_direct_listener *listener = new_cm_id->context;
fs/smb/server/transport_rdma.c
438
cm_id->context = sc;
fs/xfs/libxfs/xfs_attr.h
50
typedef void (*put_listent_func_t)(struct xfs_attr_list_context *context,
fs/xfs/libxfs/xfs_attr_leaf.h
85
struct xfs_attr_list_context *context);
fs/xfs/xfs_attr_list.c
101
if (context->seen_enough)
fs/xfs/xfs_attr_list.c
105
trace_xfs_attr_list_sf_all(context);
fs/xfs/xfs_attr_list.c
110
if (context->bufsize == 0)
fs/xfs/xfs_attr_list.c
132
context->dp->i_mount, sfe,
fs/xfs/xfs_attr_list.c
187
if (XFS_IS_CORRUPT(context->dp->i_mount,
fs/xfs/xfs_attr_list.c
190
xfs_dirattr_mark_sick(context->dp, XFS_ATTR_FORK);
fs/xfs/xfs_attr_list.c
194
context->put_listent(context,
fs/xfs/xfs_attr_list.c
200
if (context->seen_enough)
fs/xfs/xfs_attr_list.c
215
struct xfs_attr_list_context *context,
fs/xfs/xfs_attr_list.c
222
struct xfs_inode *dp = context->dp;
fs/xfs/xfs_attr_list.c
224
struct xfs_trans *tp = context->tp;
fs/xfs/xfs_attr_list.c
273
trace_xfs_attr_list_node_descend(context,
fs/xfs/xfs_attr_list.c
312
struct xfs_attr_list_context *context)
fs/xfs/xfs_attr_list.c
314
struct xfs_attrlist_cursor_kern *cursor = &context->cursor;
fs/xfs/xfs_attr_list.c
319
struct xfs_inode *dp = context->dp;
fs/xfs/xfs_attr_list.c
324
trace_xfs_attr_node_list(context);
fs/xfs/xfs_attr_list.c
337
error = xfs_da3_node_read(context->tp, dp, cursor->blkno, &bp,
fs/xfs/xfs_attr_list.c
350
trace_xfs_attr_list_wrong_blk(context);
fs/xfs/xfs_attr_list.c
356
xfs_trans_brelse(context->tp, bp);
fs/xfs/xfs_attr_list.c
365
xfs_trans_brelse(context->tp, bp);
fs/xfs/xfs_attr_list.c
375
trace_xfs_attr_list_wrong_blk(context);
fs/xfs/xfs_attr_list.c
376
xfs_trans_brelse(context->tp, bp);
fs/xfs/xfs_attr_list.c
380
trace_xfs_attr_list_wrong_blk(context);
fs/xfs/xfs_attr_list.c
381
xfs_trans_brelse(context->tp, bp);
fs/xfs/xfs_attr_list.c
386
trace_xfs_attr_list_wrong_blk(context);
fs/xfs/xfs_attr_list.c
387
xfs_trans_brelse(context->tp, bp);
fs/xfs/xfs_attr_list.c
399
error = xfs_attr_node_list_lookup(context, cursor, &bp);
fs/xfs/xfs_attr_list.c
412
error = xfs_attr3_leaf_list_int(bp, context);
fs/xfs/xfs_attr_list.c
416
if (context->seen_enough || leafhdr.forw == 0)
fs/xfs/xfs_attr_list.c
419
xfs_trans_brelse(context->tp, bp);
fs/xfs/xfs_attr_list.c
420
error = xfs_attr3_leaf_read(context->tp, dp, dp->i_ino,
fs/xfs/xfs_attr_list.c
425
xfs_trans_brelse(context->tp, bp);
fs/xfs/xfs_attr_list.c
435
struct xfs_attr_list_context *context)
fs/xfs/xfs_attr_list.c
437
struct xfs_attrlist_cursor_kern *cursor = &context->cursor;
fs/xfs/xfs_attr_list.c
443
struct xfs_mount *mp = context->dp->i_mount;
fs/xfs/xfs_attr_list.c
445
trace_xfs_attr_list_leaf(context);
fs/xfs/xfs_attr_list.c
456
if (context->resynch) {
fs/xfs/xfs_attr_list.c
460
if (cursor->offset == context->dupcnt) {
fs/xfs/xfs_attr_list.c
461
context->dupcnt = 0;
fs/xfs/xfs_attr_list.c
464
context->dupcnt++;
fs/xfs/xfs_attr_list.c
467
context->dupcnt = 0;
fs/xfs/xfs_attr_list.c
472
trace_xfs_attr_list_notfound(context);
fs/xfs/xfs_attr_list.c
479
context->resynch = 0;
fs/xfs/xfs_attr_list.c
495
!context->allow_incomplete)
fs/xfs/xfs_attr_list.c
516
if (XFS_IS_CORRUPT(context->dp->i_mount,
fs/xfs/xfs_attr_list.c
519
xfs_dirattr_mark_sick(context->dp, XFS_ATTR_FORK);
fs/xfs/xfs_attr_list.c
522
context->put_listent(context, entry->flags,
fs/xfs/xfs_attr_list.c
524
if (context->seen_enough)
fs/xfs/xfs_attr_list.c
528
trace_xfs_attr_list_leaf_end(context);
fs/xfs/xfs_attr_list.c
537
struct xfs_attr_list_context *context)
fs/xfs/xfs_attr_list.c
542
trace_xfs_attr_leaf_list(context);
fs/xfs/xfs_attr_list.c
544
context->cursor.blkno = 0;
fs/xfs/xfs_attr_list.c
545
error = xfs_attr3_leaf_read(context->tp, context->dp,
fs/xfs/xfs_attr_list.c
546
context->dp->i_ino, 0, &bp);
fs/xfs/xfs_attr_list.c
55
struct xfs_attr_list_context *context)
fs/xfs/xfs_attr_list.c
550
error = xfs_attr3_leaf_list_int(bp, context);
fs/xfs/xfs_attr_list.c
551
xfs_trans_brelse(context->tp, bp);
fs/xfs/xfs_attr_list.c
557
struct xfs_attr_list_context *context)
fs/xfs/xfs_attr_list.c
559
struct xfs_inode *dp = context->dp;
fs/xfs/xfs_attr_list.c
57
struct xfs_attrlist_cursor_kern *cursor = &context->cursor;
fs/xfs/xfs_attr_list.c
570
return xfs_attr_shortform_list(context);
fs/xfs/xfs_attr_list.c
578
return xfs_attr_leaf_list(context);
fs/xfs/xfs_attr_list.c
579
return xfs_attr_node_list(context);
fs/xfs/xfs_attr_list.c
58
struct xfs_inode *dp = context->dp;
fs/xfs/xfs_attr_list.c
584
struct xfs_attr_list_context *context)
fs/xfs/xfs_attr_list.c
586
struct xfs_inode *dp = context->dp;
fs/xfs/xfs_attr_list.c
596
error = xfs_attr_list_ilocked(context);
fs/xfs/xfs_attr_list.c
69
trace_xfs_attr_list_sf(context);
fs/xfs/xfs_attr_list.c
80
if (context->bufsize == 0 ||
fs/xfs/xfs_attr_list.c
82
(dp->i_af.if_bytes + sf->count * 16) < context->bufsize)) {
fs/xfs/xfs_attr_list.c
84
if (XFS_IS_CORRUPT(context->dp->i_mount,
fs/xfs/xfs_attr_list.c
88
xfs_dirattr_mark_sick(context->dp, XFS_ATTR_FORK);
fs/xfs/xfs_attr_list.c
91
context->put_listent(context,
fs/xfs/xfs_handle.c
148
void *context,
fs/xfs/xfs_handle.c
329
struct xfs_attr_list_context *context,
fs/xfs/xfs_handle.c
336
struct xfs_attrlist *alist = context->buffer;
fs/xfs/xfs_handle.c
340
ASSERT(!context->seen_enough);
fs/xfs/xfs_handle.c
341
ASSERT(context->count >= 0);
fs/xfs/xfs_handle.c
342
ASSERT(context->count < (ATTR_MAX_VALUELEN/8));
fs/xfs/xfs_handle.c
343
ASSERT(context->firstu >= sizeof(*alist));
fs/xfs/xfs_handle.c
344
ASSERT(context->firstu <= context->bufsize);
fs/xfs/xfs_handle.c
349
if (context->attr_filter != (flags & XFS_ATTR_NSP_ONDISK_MASK))
fs/xfs/xfs_handle.c
353
context->count * sizeof(alist->al_offset[0]);
fs/xfs/xfs_handle.c
356
context->firstu -= round_up(offsetof(struct xfs_attrlist_ent, a_name) +
fs/xfs/xfs_handle.c
358
if (context->firstu < arraytop) {
fs/xfs/xfs_handle.c
359
trace_xfs_attr_list_full(context);
fs/xfs/xfs_handle.c
361
context->seen_enough = 1;
fs/xfs/xfs_handle.c
365
aep = context->buffer + context->firstu;
fs/xfs/xfs_handle.c
369
alist->al_offset[context->count++] = context->firstu;
fs/xfs/xfs_handle.c
370
alist->al_count = context->count;
fs/xfs/xfs_handle.c
371
trace_xfs_attr_list_add(context);
fs/xfs/xfs_handle.c
407
struct xfs_attr_list_context context = { };
fs/xfs/xfs_handle.c
427
if (copy_from_user(&context.cursor, ucursor, sizeof(context.cursor)))
fs/xfs/xfs_handle.c
429
if (context.cursor.pad1 || context.cursor.pad2)
fs/xfs/xfs_handle.c
431
if (!context.cursor.initted &&
fs/xfs/xfs_handle.c
432
(context.cursor.hashval || context.cursor.blkno ||
fs/xfs/xfs_handle.c
433
context.cursor.offset))
fs/xfs/xfs_handle.c
443
context.dp = dp;
fs/xfs/xfs_handle.c
444
context.resynch = 1;
fs/xfs/xfs_handle.c
445
context.attr_filter = xfs_attr_filter(flags);
fs/xfs/xfs_handle.c
446
context.bufsize = round_down(bufsize, sizeof(uint32_t));
fs/xfs/xfs_handle.c
447
context.buffer = buffer;
fs/xfs/xfs_handle.c
448
context.firstu = context.bufsize;
fs/xfs/xfs_handle.c
449
context.put_listent = xfs_ioc_attr_put_listent;
fs/xfs/xfs_handle.c
451
alist = context.buffer;
fs/xfs/xfs_handle.c
454
alist->al_offset[0] = context.bufsize;
fs/xfs/xfs_handle.c
456
error = xfs_attr_list(&context);
fs/xfs/xfs_handle.c
461
copy_to_user(ucursor, &context.cursor, sizeof(context.cursor)))
fs/xfs/xfs_handle.c
657
struct xfs_attr_list_context context;
fs/xfs/xfs_handle.c
682
struct xfs_attr_list_context *context,
fs/xfs/xfs_handle.c
690
container_of(context, struct xfs_getparents_ctx, context);
fs/xfs/xfs_handle.c
691
struct xfs_inode *ip = context->dp;
fs/xfs/xfs_handle.c
694
struct xfs_getparents_rec *gpr = gpx->krecords + context->firstu;
fs/xfs/xfs_handle.c
708
context->seen_enough = -EFSCORRUPTED;
fs/xfs/xfs_handle.c
717
if (context->firstu > context->bufsize - reclen) {
fs/xfs/xfs_handle.c
718
context->seen_enough = 1;
fs/xfs/xfs_handle.c
728
trace_xfs_getparents_put_listent(ip, gp, context, gpr);
fs/xfs/xfs_handle.c
730
context->firstu += reclen;
fs/xfs/xfs_handle.c
748
trace_xfs_getparents_expand_lastrec(gpx->ip, gp, &gpx->context, gpr);
fs/xfs/xfs_handle.c
782
gpx->context.dp = ip;
fs/xfs/xfs_handle.c
783
gpx->context.resynch = 1;
fs/xfs/xfs_handle.c
784
gpx->context.put_listent = xfs_getparents_put_listent;
fs/xfs/xfs_handle.c
785
gpx->context.bufsize = bufsize;
fs/xfs/xfs_handle.c
787
gpx->context.firstu = 0;
fs/xfs/xfs_handle.c
790
memcpy(&gpx->context.cursor, &gp->gp_cursor,
fs/xfs/xfs_handle.c
795
trace_xfs_getparents_begin(ip, gp, &gpx->context.cursor);
fs/xfs/xfs_handle.c
797
error = xfs_attr_list(&gpx->context);
fs/xfs/xfs_handle.c
800
if (gpx->context.seen_enough < 0) {
fs/xfs/xfs_handle.c
801
error = gpx->context.seen_enough;
fs/xfs/xfs_handle.c
807
memcpy(&gp->gp_cursor, &gpx->context.cursor,
fs/xfs/xfs_handle.c
814
if (gpx->context.seen_enough == 0) {
fs/xfs/xfs_handle.c
830
trace_xfs_getparents_end(ip, gp, &gpx->context.cursor);
fs/xfs/xfs_handle.c
832
ASSERT(gpx->context.firstu <= gpx->gph.gph_request.gp_bufsize);
fs/xfs/xfs_handle.c
836
gpx->krecords, gpx->context.firstu))
fs/xfs/xfs_trace.h
5645
const struct xfs_attr_list_context *context,
fs/xfs/xfs_trace.h
5647
TP_ARGS(ip, ppi, context, pptr),
fs/xfs/xfs_trace.h
5661
__entry->firstu = context->firstu;
fs/xfs/xfs_trace.h
5681
const struct xfs_attr_list_context *context, \
fs/xfs/xfs_trace.h
5683
TP_ARGS(ip, ppi, context, pptr))
fs/xfs/xfs_xattr.c
222
struct xfs_attr_list_context *context,
fs/xfs/xfs_xattr.c
231
if (context->count < 0 || context->seen_enough)
fs/xfs/xfs_xattr.c
234
if (!context->buffer)
fs/xfs/xfs_xattr.c
237
arraytop = context->count + prefix_len + namelen + 1;
fs/xfs/xfs_xattr.c
238
if (arraytop > context->firstu) {
fs/xfs/xfs_xattr.c
239
context->count = -1; /* insufficient space */
fs/xfs/xfs_xattr.c
240
context->seen_enough = 1;
fs/xfs/xfs_xattr.c
243
offset = context->buffer + context->count;
fs/xfs/xfs_xattr.c
251
context->count += prefix_len + namelen + 1;
fs/xfs/xfs_xattr.c
257
struct xfs_attr_list_context *context,
fs/xfs/xfs_xattr.c
267
ASSERT(context->count >= 0);
fs/xfs/xfs_xattr.c
279
context, XATTR_SYSTEM_PREFIX,
fs/xfs/xfs_xattr.c
287
context, XATTR_SYSTEM_PREFIX,
fs/xfs/xfs_xattr.c
311
__xfs_xattr_put_listent(context, prefix, prefix_len, name,
fs/xfs/xfs_xattr.c
322
struct xfs_attr_list_context context;
fs/xfs/xfs_xattr.c
332
memset(&context, 0, sizeof(context));
fs/xfs/xfs_xattr.c
333
context.dp = XFS_I(inode);
fs/xfs/xfs_xattr.c
334
context.resynch = 1;
fs/xfs/xfs_xattr.c
335
context.bufsize = size;
fs/xfs/xfs_xattr.c
336
context.buffer = size ? data : NULL;
fs/xfs/xfs_xattr.c
337
context.firstu = context.bufsize;
fs/xfs/xfs_xattr.c
338
context.put_listent = xfs_xattr_put_listent;
fs/xfs/xfs_xattr.c
340
error = xfs_attr_list(&context);
fs/xfs/xfs_xattr.c
343
if (context.count < 0)
fs/xfs/xfs_xattr.c
346
return context.count;
include/acpi/acpi_bus.h
338
void (*func)(struct acpi_device_wakeup_context *context);
include/acpi/acpi_bus.h
348
struct acpi_device_wakeup_context context;
include/acpi/acpi_bus.h
624
acpi_notify_handler handler, void *context);
include/acpi/acpi_bus.h
808
void (*func)(struct acpi_device_wakeup_context *context));
include/acpi/acpi_bus.h
819
void (*func)(struct acpi_device_wakeup_context *context))
include/acpi/acpiosxf.h
229
void *context);
include/acpi/acpiosxf.h
248
acpi_osd_exec_callback function, void *context);
include/acpi/acpixf.h
507
handler, void *context))
include/acpi/acpixf.h
523
void *context,
include/acpi/acpixf.h
528
void *context,
include/acpi/acpixf.h
600
void *context))
include/acpi/acpixf.h
607
void *context))
include/acpi/acpixf.h
614
*context))
include/acpi/acpixf.h
626
void *context))
include/acpi/acpixf.h
634
void *context))
include/acpi/acpixf.h
645
void *context))
include/acpi/acpixf.h
660
void *context))
include/acpi/acpixf.h
666
void *context))
include/acpi/acpixf.h
798
void *context);
include/acpi/acpixf.h
824
void *context))
include/acpi/acpixf.h
828
user_function, void *context))
include/acpi/actypes.h
1035
(ACPI_SYSTEM_XFACE * acpi_osd_handler) (void *context);
include/acpi/actypes.h
1038
(ACPI_SYSTEM_XFACE * acpi_osd_exec_callback) (void *context);
include/acpi/actypes.h
1044
u32 (*acpi_sci_handler) (void *context);
include/acpi/actypes.h
1049
u32 event_number, void *context);
include/acpi/actypes.h
1055
u32(*acpi_event_handler) (void *context);
include/acpi/actypes.h
1058
u32 (*acpi_gpe_handler) (acpi_handle gpe_device, u32 gpe_number, void *context);
include/acpi/actypes.h
1061
void (*acpi_notify_handler) (acpi_handle device, u32 value, void *context);
include/acpi/actypes.h
1075
u32 aml_offset, void *context);
include/acpi/actypes.h
1080
acpi_status (*acpi_table_handler) (u32 event, void *table, void *context);
include/acpi/actypes.h
1137
void *context, void **return_value);
include/drm/drm_edid.h
476
int (*read_block)(void *context, u8 *buf, unsigned int block, size_t len),
include/drm/drm_edid.h
477
void *context);
include/hyperv/hvgdk_mini.h
1242
u64 context;
include/kunit/resource.h
241
void *context)
include/kunit/resource.h
252
ret = __kunit_add_resource(test, init, free, res, context);
include/kunit/resource.h
284
void *context)
include/kunit/resource.h
293
if (!__kunit_add_resource(test, init, free, res, context))
include/kunit/try-catch.h
51
void *context;
include/kunit/try-catch.h
54
void kunit_try_catch_run(struct kunit_try_catch *try_catch, void *context);
include/linux/acpi.h
1085
bool (*wakeup)(void *context), void *context)
include/linux/acpi.h
1091
bool (*wakeup)(void *context), void *context) { }
include/linux/acpi.h
1094
static inline u32 acpi_osc_ctx_get_pci_control(struct acpi_osc_context *context)
include/linux/acpi.h
1099
static inline u32 acpi_osc_ctx_get_cxl_control(struct acpi_osc_context *context)
include/linux/acpi.h
142
int (*create_thread)(acpi_osd_exec_callback function, void *context);
include/linux/acpi.h
160
int acpi_debugger_create_thread(acpi_osd_exec_callback function, void *context);
include/linux/acpi.h
182
void *context)
include/linux/acpi.h
406
typedef void (*wmi_notify_handler) (union acpi_object *data, void *context);
include/linux/acpi.h
552
int wake_irq, bool (*wakeup)(void *context), void *context);
include/linux/acpi.h
554
bool (*wakeup)(void *context), void *context);
include/linux/acpi.h
563
acpi_status acpi_run_osc(acpi_handle handle, struct acpi_osc_context *context);
include/linux/acpi.h
646
static inline u32 acpi_osc_ctx_get_pci_control(struct acpi_osc_context *context)
include/linux/acpi.h
648
u32 *ret = context->ret.pointer;
include/linux/acpi.h
653
static inline u32 acpi_osc_ctx_get_cxl_control(struct acpi_osc_context *context)
include/linux/acpi.h
655
u32 *ret = context->ret.pointer;
include/linux/asn1_ber_bytecode.h
16
typedef int (*asn1_action_t)(void *context,
include/linux/asn1_decoder.h
17
void *context,
include/linux/clk/ti.h
179
u32 context;
include/linux/comedi/comedi_8254.h
96
unsigned long context;
include/linux/comedi/comedi_8255.h
49
int data, unsigned long context),
include/linux/comedi/comedi_8255.h
50
unsigned long context);
include/linux/comedi/comedi_pci.h
34
unsigned long context);
include/linux/comedi/comedi_usb.h
19
struct comedi_driver *driver, unsigned long context);
include/linux/comedi/comedidev.h
1026
unsigned long context),
include/linux/comedi/comedidev.h
1027
unsigned long context);
include/linux/comedi/comedidev.h
1036
struct comedi_driver *driver, unsigned long context);
include/linux/comedi/comedidev.h
443
int (*auto_attach)(struct comedi_device *dev, unsigned long context);
include/linux/comedi/comedidev.h
992
struct comedi_insn *insn, unsigned long context),
include/linux/comedi/comedidev.h
993
unsigned long context);
include/linux/damon.h
645
void (*init)(struct damon_ctx *context);
include/linux/damon.h
646
void (*update)(struct damon_ctx *context);
include/linux/damon.h
647
void (*prepare_access_checks)(struct damon_ctx *context);
include/linux/damon.h
648
unsigned int (*check_accesses)(struct damon_ctx *context);
include/linux/damon.h
649
int (*get_scheme_score)(struct damon_ctx *context,
include/linux/damon.h
652
unsigned long (*apply_scheme)(struct damon_ctx *context,
include/linux/dm-dirty-log.h
26
void *context;
include/linux/dm-io.h
30
typedef void (*io_notify_fn)(unsigned int long error, void *context);
include/linux/dm-io.h
54
void *context; /* Passed to callback */
include/linux/dm-kcopyd.h
65
void *context);
include/linux/dm-kcopyd.h
69
unsigned int flags, dm_kcopyd_notify_fn fn, void *context);
include/linux/dm-kcopyd.h
83
dm_kcopyd_notify_fn fn, void *context);
include/linux/dm-kcopyd.h
88
unsigned int flags, dm_kcopyd_notify_fn fn, void *context);
include/linux/dm-region-hash.h
39
void *context, void (*dispatch_bios)(void *context,
include/linux/dm-region-hash.h
41
void (*wakeup_workers)(void *context),
include/linux/dm-region-hash.h
42
void (*wakeup_all_recovery_waiters)(void *context),
include/linux/dma-fence-array.h
85
u64 context, unsigned seqno,
include/linux/dma-fence-array.h
90
u64 context, unsigned seqno,
include/linux/dma-fence-array.h
93
bool dma_fence_match_context(struct dma_fence *fence, u64 context);
include/linux/dma-fence.h
257
spinlock_t *lock, u64 context, u64 seqno);
include/linux/dma-fence.h
260
spinlock_t *lock, u64 context, u64 seqno);
include/linux/dma-fence.h
515
if (WARN_ON(f1->context != f2->context))
include/linux/dma-fence.h
548
if (WARN_ON(f1->context != f2->context))
include/linux/dma-fence.h
93
u64 context;
include/linux/dma-resv.h
472
void dma_resv_replace_fences(struct dma_resv *obj, uint64_t context,
include/linux/dmaengine.h
936
unsigned long flags, void *context);
include/linux/efi.h
1232
u64 param_buffer_addr, void *context);
include/linux/exportfs.h
390
void *context);
include/linux/exportfs.h
393
void *context);
include/linux/firewire.h
538
typedef void (*fw_iso_callback_t)(struct fw_iso_context *context,
include/linux/firewire.h
541
typedef void (*fw_iso_mc_callback_t)(struct fw_iso_context *context,
include/linux/firmware.h
103
struct device *device, gfp_t gfp, void *context,
include/linux/firmware.h
104
void (*cont)(const struct firmware *fw, void *context));
include/linux/firmware.h
111
const char *name, struct device *device, gfp_t gfp, void *context,
include/linux/firmware.h
112
void (*cont)(const struct firmware *fw, void *context));
include/linux/firmware.h
132
struct device *device, gfp_t gfp, void *context,
include/linux/firmware.h
133
void (*cont)(const struct firmware *fw, void *context))
include/linux/firmware.h
154
const char *name, struct device *device, gfp_t gfp, void *context,
include/linux/firmware.h
155
void (*cont)(const struct firmware *fw, void *context))
include/linux/fsl/bestcomm/bestcomm_priv.h
59
u32 context;
include/linux/hsi/hsi.h
189
void *context;
include/linux/hw_breakpoint.h
108
void *context,
include/linux/hw_breakpoint.h
120
void *context,
include/linux/hw_breakpoint.h
125
void *context) { return NULL; }
include/linux/hw_breakpoint.h
60
void *context,
include/linux/hw_breakpoint.h
76
void *context,
include/linux/hw_breakpoint.h
82
void *context);
include/linux/hyperv.h
1168
void (*onchannel_callback)(void *context),
include/linux/hyperv.h
1169
void *context);
include/linux/hyperv.h
1177
void (*onchannel_callback)(void *context),
include/linux/hyperv.h
1178
void *context);
include/linux/hyperv.h
1608
void (*callback)(void *context);
include/linux/hyperv.h
1735
int hyperv_reg_block_invalidate(struct pci_dev *dev, void *context,
include/linux/hyperv.h
1736
void (*block_invalidate)(void *context,
include/linux/hyperv.h
1744
int (*reg_blk_invalidate)(struct pci_dev *dev, void *context,
include/linux/hyperv.h
1745
void (*block_invalidate)(void *context,
include/linux/hyperv.h
836
void (*onchannel_callback)(void *context);
include/linux/i8042.h
71
void *context);
include/linux/i8042.h
78
int i8042_install_filter(i8042_filter_t filter, void *context);
include/linux/i8042.h
96
static inline int i8042_install_filter(i8042_filter_t filter, void *context)
include/linux/mfd/gsc.h
61
int gsc_read(void *context, unsigned int reg, unsigned int *val);
include/linux/mfd/gsc.h
62
int gsc_write(void *context, unsigned int reg, unsigned int val);
include/linux/mlx4/qp.h
480
struct mlx4_qp_context *context, enum mlx4_qp_optpar optpar,
include/linux/mlx4/qp.h
484
struct mlx4_qp_context *context);
include/linux/mlx4/qp.h
487
struct mlx4_qp_context *context,
include/linux/mlx5/driver.h
824
typedef void (*mlx5_cmd_cbk_t)(int status, void *context);
include/linux/mlx5/driver.h
839
void *context;
include/linux/mlx5/driver.h
978
typedef void (*mlx5_async_cbk_t)(int status, struct mlx5_async_work *context);
include/linux/mm_types.h
1271
mm_context_t context;
include/linux/mod_devicetable.h
851
const void *context;
include/linux/most.h
177
void *context;
include/linux/nfs_xdr.h
1721
struct nfs_open_context *context;
include/linux/nfs_xdr.h
674
struct nfs_open_context *context;
include/linux/objpool.h
122
gfp_t gfp, void *context, objpool_init_obj_cb objinit,
include/linux/objpool.h
67
typedef int (*objpool_init_obj_cb)(void *obj, void *context);
include/linux/objpool.h
70
typedef int (*objpool_fini_cb)(struct objpool_head *head, void *context);
include/linux/objpool.h
95
void *context;
include/linux/perf_event.h
1255
void *context);
include/linux/pldmfw.h
141
bool pldmfw_op_pci_match_record(struct pldmfw *context, struct pldmfw_record *record);
include/linux/pldmfw.h
163
bool (*match_record)(struct pldmfw *context, struct pldmfw_record *record);
include/linux/pldmfw.h
164
int (*send_package_data)(struct pldmfw *context, const u8 *data, u16 length);
include/linux/pldmfw.h
165
int (*send_component_table)(struct pldmfw *context, struct pldmfw_component *component,
include/linux/pldmfw.h
167
int (*flash_component)(struct pldmfw *context, struct pldmfw_component *component);
include/linux/pldmfw.h
168
int (*finalize_update)(struct pldmfw *context);
include/linux/pldmfw.h
171
int pldmfw_flash_image(struct pldmfw *context, const struct firmware *fw);
include/linux/qed/qed_iscsi_if.h
12
typedef int (*iscsi_event_cb_t) (void *context,
include/linux/qed/qed_nvmetcp_if.h
16
typedef int (*nvmetcp_event_cb_t) (void *context,
include/linux/qed/qed_nvmetcp_if.h
97
void *context; /* Output parameter - set/filled by the HSI function */
include/linux/qed/qed_rdma_if.h
45
void *context;
include/linux/qed/qed_rdma_if.h
46
void (*affiliated_event)(void *context, u8 fw_event_code,
include/linux/qed/qed_rdma_if.h
48
void (*unaffiliated_event)(void *context, u8 event_code);
include/linux/qed/qed_rdma_if.h
499
typedef int (*iwarp_event_handler) (void *context,
include/linux/raspberrypi/vchiq_core.h
601
ssize_t (*copy_callback)(void *context, void *dest,
include/linux/raspberrypi/vchiq_core.h
603
void *context,
include/linux/regmap.h
425
int (*reg_read)(void *context, unsigned int reg, unsigned int *val);
include/linux/regmap.h
426
int (*reg_write)(void *context, unsigned int reg, unsigned int val);
include/linux/regmap.h
427
int (*reg_update_bits)(void *context, unsigned int reg,
include/linux/regmap.h
430
int (*read)(void *context, const void *reg_buf, size_t reg_size,
include/linux/regmap.h
432
int (*write)(void *context, const void *data, size_t count);
include/linux/regmap.h
548
typedef int (*regmap_hw_write)(void *context, const void *data,
include/linux/regmap.h
550
typedef int (*regmap_hw_gather_write)(void *context,
include/linux/regmap.h
553
typedef int (*regmap_hw_async_write)(void *context,
include/linux/regmap.h
557
typedef int (*regmap_hw_read)(void *context,
include/linux/regmap.h
560
typedef int (*regmap_hw_reg_read)(void *context, unsigned int reg,
include/linux/regmap.h
562
typedef int (*regmap_hw_reg_noinc_read)(void *context, unsigned int reg,
include/linux/regmap.h
564
typedef int (*regmap_hw_reg_write)(void *context, unsigned int reg,
include/linux/regmap.h
566
typedef int (*regmap_hw_reg_noinc_write)(void *context, unsigned int reg,
include/linux/regmap.h
568
typedef int (*regmap_hw_reg_update_bits)(void *context, unsigned int reg,
include/linux/regmap.h
571
typedef void (*regmap_hw_free_context)(void *context);
include/linux/security.h
233
char *context; /* Provided by the module */
include/linux/spi/eeprom.h
34
void *context;
include/linux/spi/spi.h
1217
void (*complete)(void *context);
include/linux/spi/spi.h
1218
void *context;
include/linux/ssbi.h
16
ssbi_reg_read(void *context, unsigned int reg, unsigned int *val)
include/linux/ssbi.h
21
ret = ssbi_read(context, reg, &v, 1);
include/linux/ssbi.h
29
ssbi_reg_write(void *context, unsigned int reg, unsigned int val)
include/linux/ssbi.h
32
return ssbi_write(context, reg, &v, 1);
include/linux/usb.h
1663
void *context; /* (in) context for completion */
include/linux/usb.h
1703
void *context)
include/linux/usb.h
1711
urb->context = context;
include/linux/usb.h
1737
void *context)
include/linux/usb.h
1744
urb->context = context;
include/linux/usb.h
1777
void *context,
include/linux/usb.h
1785
urb->context = context;
include/linux/usb/gadget.h
122
void *context;
include/linux/usb/ljca.h
104
int ljca_register_event_cb(struct ljca_client *client, ljca_event_cb_t event_cb, void *context);
include/linux/usb/ljca.h
32
typedef void (*ljca_event_cb_t)(void *context, u8 cmd, const void *evt_data, int len);
include/linux/usb/ljca.h
54
void *context;
include/linux/usbdevice_fs.h
56
compat_caddr_t context;
include/linux/vmw_vmci_defs.h
194
u32 context;
include/linux/vmw_vmci_defs.h
199
(struct vmci_handle){ .context = _cid, .resource = _rid }
include/linux/vmw_vmci_defs.h
204
return h1.context == h2.context && h1.resource == h2.resource;
include/linux/vmw_vmci_defs.h
209
.context = VMCI_INVALID_ID,
include/linux/vmw_vmci_defs.h
225
.context = VMCI_ANON_SRC_CONTEXT_ID,
include/linux/wmi.h
107
int (*probe)(struct wmi_device *wdev, const void *context);
include/media/v4l2-common.h
417
func, context) \
include/media/v4l2-common.h
425
width, height, func, context); \
include/media/v4l2-common.h
434
const void *context),
include/media/v4l2-common.h
435
const void *context);
include/misc/ocxl.h
130
int ocxl_context_alloc(struct ocxl_context **context, struct ocxl_afu *afu,
include/net/mana/gdma.h
294
typedef void gdma_eq_callback(void *context, struct gdma_queue *q,
include/net/mana/gdma.h
297
typedef void gdma_cq_callback(void *context, struct gdma_queue *q);
include/net/mana/gdma.h
336
void *context;
include/net/mana/gdma.h
345
void *context;
include/net/mana/gdma.h
361
void *context;
include/net/mana/gdma.h
369
void *context;
include/net/nfc/nci_core.h
375
void nci_hci_data_received_cb(void *context, struct sk_buff *skb, int err);
include/net/nfc/nfc.h
41
typedef void (*data_exchange_cb_t)(void *context, struct sk_buff *skb,
include/net/nfc/nfc.h
44
typedef void (*se_io_cb_t)(void *context, u8 *apdu, size_t apdu_len, int err);
include/net/nsh.h
198
__be32 context[4];
include/net/rsi_91x.h
46
void (*set_bt_context)(void *priv, void *context);
include/net/tls.h
336
struct tls_record_info *tls_get_record(struct tls_offload_context_tx *context,
include/rdma/ib_addr.h
78
struct rdma_dev_addr *addr, void *context),
include/rdma/ib_addr.h
79
bool resolve_by_gid_attr, void *context);
include/rdma/ib_cache.h
29
void *context);
include/rdma/ib_cm.h
294
void *context;
include/rdma/ib_cm.h
317
void *context);
include/rdma/ib_mad.h
471
void *context[2];
include/rdma/ib_mad.h
576
void *context;
include/rdma/ib_mad.h
681
void *context,
include/rdma/ib_sa.h
431
unsigned int num_prs, void *context),
include/rdma/ib_sa.h
432
void *context, struct ib_sa_query **query);
include/rdma/ib_sa.h
442
void *context),
include/rdma/ib_sa.h
443
void *context, struct ib_sa_query **sa_query);
include/rdma/ib_sa.h
450
void *context;
include/rdma/ib_sa.h
489
void *context);
include/rdma/ib_sa.h
562
void *context),
include/rdma/ib_sa.h
563
void *context, struct ib_sa_query **sa_query);
include/rdma/ib_verbs.h
1567
struct ib_ucontext *context; /* associated user context */
include/rdma/ib_verbs.h
2485
int (*add_gid)(const struct ib_gid_attr *attr, void **context);
include/rdma/ib_verbs.h
2494
int (*del_gid)(const struct ib_gid_attr *attr, void **context);
include/rdma/ib_verbs.h
2497
int (*alloc_ucontext)(struct ib_ucontext *context,
include/rdma/ib_verbs.h
2499
void (*dealloc_ucontext)(struct ib_ucontext *context);
include/rdma/ib_verbs.h
2500
int (*mmap)(struct ib_ucontext *context, struct vm_area_struct *vma);
include/rdma/ib_verbs.h
2625
struct ib_ucontext *context,
include/rdma/ib_verbs.h
2752
int (*query_ucontext)(struct ib_ucontext *context,
include/rdma/iw_cm.h
105
iw_cm_handler cm_handler, void *context);
include/rdma/iw_cm.h
58
void *context; /* client cb context */
include/rdma/rdma_cm.h
122
void *context;
include/rdma/rdma_cm.h
134
void *context, enum rdma_ucm_port_space ps,
include/rdma/rdma_cm.h
137
void *context,
include/rdma/rdma_cm.h
157
#define rdma_create_id(net, event_handler, context, ps, qp_type) \
include/rdma/rdma_cm.h
158
__rdma_create_kernel_id(net, event_handler, context, ps, qp_type, \
include/rdma/rdma_cm.h
338
u8 join_state, void *context);
include/rdma/rdma_vt.h
175
struct ib_ucontext *context;
include/rdma/uverbs_ioctl.h
636
struct ib_ucontext *context;
include/rdma/uverbs_ioctl.h
667
(udata ? container_of(rdma_udata_to_uverbs_attr_bundle(udata)->context, \
include/rdma/uverbs_std_types.h
107
*ib_dev = attrs->context->device;
include/soc/fsl/qe/qmc.h
105
void (*complete)(void *context, size_t length,
include/soc/fsl/qe/qmc.h
107
void *context);
include/soc/fsl/qe/qmc.h
91
void (*complete)(void *context), void *context);
include/sound/tas2781-comlib-i2c.h
21
void (*cont)(const struct firmware *fw, void *context));
include/sound/tas2781-dsp.h
214
void tasdevice_select_cfg_blk(void *context, int conf_no,
include/sound/tas2781-dsp.h
216
void tasdevice_config_info_remove(void *context);
include/sound/tas2781-dsp.h
217
void tasdevice_dsp_remove(void *context);
include/sound/tas2781-dsp.h
218
int tasdevice_dsp_parser(void *context);
include/sound/tas2781-dsp.h
219
int tasdevice_rca_parser(void *context, const struct firmware *fmw);
include/sound/tas2781-dsp.h
220
void tasdevice_dsp_remove(void *context);
include/sound/tas2781-dsp.h
221
void tasdevice_calbin_remove(void *context);
include/sound/tas2781-dsp.h
222
int tasdevice_select_tuningprm_cfg(void *context, int prm,
include/sound/tas2781-dsp.h
224
int tasdevice_prmg_load(void *context, int prm_no);
include/sound/tas2781-dsp.h
225
void tasdevice_tuning_switch(void *context, int state);
include/sound/tas2781-dsp.h
226
int tas2781_load_calibration(void *context, char *file_name,
include/target/iscsi/iscsi_target_core.h
594
void *context;
include/trace/events/amdxdna.h
45
__entry->fence_context = sched_job->s_fence->finished.context;
include/trace/events/dma_fence.h
21
__field(unsigned int, context)
include/trace/events/dma_fence.h
28
__entry->context = fence->context;
include/trace/events/dma_fence.h
33
__get_str(driver), __get_str(timeline), __entry->context,
include/trace/events/dma_fence.h
51
__field(unsigned int, context)
include/trace/events/dma_fence.h
58
__entry->context = fence->context;
include/trace/events/dma_fence.h
63
__get_str(driver), __get_str(timeline), __entry->context,
include/trace/events/firewire.h
450
__field(u64, context)
include/trace/events/firewire.h
456
__entry->context = (uintptr_t)ctx;
include/trace/events/firewire.h
463
__entry->context,
include/trace/events/firewire.h
475
__field(u64, context)
include/trace/events/firewire.h
481
__entry->context = (uintptr_t)ctx;
include/trace/events/firewire.h
488
__entry->context,
include/trace/events/firewire.h
500
__field(u64, context)
include/trace/events/firewire.h
504
__entry->context = (uintptr_t)ctx;
include/trace/events/firewire.h
509
__entry->context,
include/trace/events/firewire.h
518
__field(u64, context)
include/trace/events/firewire.h
522
__entry->context = (uintptr_t)ctx;
include/trace/events/firewire.h
527
__entry->context,
include/trace/events/firewire.h
554
__field(u64, context)
include/trace/events/firewire.h
559
__entry->context = (uintptr_t)ctx;
include/trace/events/firewire.h
565
__entry->context,
include/trace/events/firewire.h
576
__field(u64, context)
include/trace/events/firewire.h
582
__entry->context = (uintptr_t)ctx;
include/trace/events/firewire.h
589
__entry->context,
include/trace/events/firewire.h
600
__field(u64, context)
include/trace/events/firewire.h
608
__entry->context = (uintptr_t)ctx;
include/trace/events/firewire.h
617
__entry->context,
include/trace/events/firewire.h
647
__field(u64, context)
include/trace/events/firewire.h
651
__entry->context = (uintptr_t)ctx;
include/trace/events/firewire.h
656
__entry->context,
include/trace/events/firewire.h
683
__field(u64, context)
include/trace/events/firewire.h
687
__entry->context = (uintptr_t)ctx;
include/trace/events/firewire.h
692
__entry->context,
include/trace/events/firewire.h
719
__field(u64, context)
include/trace/events/firewire.h
723
__entry->context = (uintptr_t)ctx;
include/trace/events/firewire.h
728
__entry->context,
include/trace/events/firewire.h
753
__field(u64, context) \
include/trace/events/firewire.h
765
__entry->context = (uintptr_t)ctx; \
include/trace/events/firewire.h
784
__entry->context,
include/trace/events/firewire.h
804
__entry->context,
include/trace/events/firewire.h
820
__entry->context,
include/trace/events/firewire.h
848
__field(u64, context)
include/trace/events/firewire.h
855
__entry->context = (uintptr_t)ctx;
include/trace/events/firewire.h
863
__entry->context,
include/trace/events/firewire.h
888
__field(u64, context)
include/trace/events/firewire.h
894
__entry->context = (uintptr_t)ctx;
include/trace/events/firewire.h
901
__entry->context,
include/trace/events/osnoise.h
101
__entry->context)
include/trace/events/osnoise.h
32
int context; /* timer context */
include/trace/events/osnoise.h
89
__field( int, context )
include/trace/events/osnoise.h
95
__entry->context = s->context;
include/uapi/drm/drm.h
297
int context;
include/uapi/drm/drm.h
402
int context; /**< Context handle */
include/uapi/drm/i915_drm.h
1568
#define i915_execbuffer2_set_context_id(eb2, context) \
include/uapi/drm/i915_drm.h
1569
(eb2).rsvd1 = context & I915_EXEC_CONTEXT_ID_MASK
include/uapi/drm/radeon_drm.h
428
drm_radeon_context_regs_t context;
include/uapi/drm/tegra_drm.h
178
__u64 context;
include/uapi/drm/tegra_drm.h
191
__u64 context;
include/uapi/drm/tegra_drm.h
204
__u64 context;
include/uapi/drm/tegra_drm.h
232
__u64 context;
include/uapi/drm/tegra_drm.h
401
__u64 context;
include/uapi/drm/tegra_drm.h
691
__u32 context;
include/uapi/drm/tegra_drm.h
716
__u32 context;
include/uapi/drm/tegra_drm.h
737
__u32 context;
include/uapi/drm/tegra_drm.h
768
__u32 context;
include/uapi/drm/tegra_drm.h
925
__u32 context;
include/uapi/linux/nfs_mount.h
46
char context[NFS_MAX_CONTEXT_LEN + 1]; /* 6 */
include/uapi/linux/openvswitch.h
574
__be32 context[NSH_MD1_CONTEXT_SIZE];
include/uapi/linux/usbdevice_fs.h
64
void __user *context;
include/video/omapvrfb.h
15
u8 context;
kernel/audit.c
1127
static void audit_log_common_recv_msg(struct audit_context *context,
kernel/audit.c
1138
*ab = audit_log_start(context, GFP_KERNEL, msg_type);
kernel/audit.c
1529
memcpy(sig_data->ctx, lsmctx.context, lsmctx.len);
kernel/audit.c
2303
audit_log_format(ab, " subj=%s", ctx.context);
kernel/audit.c
2328
audit_subj_lsms[i]->name, ctx.context);
kernel/audit.c
2366
audit_log_format(ab, " obj=%s", ctx.context);
kernel/audit.c
2386
audit_obj_lsms[i]->name, ctx.context);
kernel/audit.h
115
} context;
kernel/audit.h
305
extern void audit_kill_trees(struct audit_context *context);
kernel/audit.h
336
#define audit_kill_trees(context) BUG()
kernel/audit_tree.c
527
static void audit_tree_log_remove_rule(struct audit_context *context,
kernel/audit_tree.c
534
ab = audit_log_start(context, GFP_KERNEL, AUDIT_CONFIG_CHANGE);
kernel/audit_tree.c
544
static void kill_rules(struct audit_context *context, struct audit_tree *tree)
kernel/audit_tree.c
555
audit_tree_log_remove_rule(context, rule);
kernel/audit_tree.c
979
void audit_kill_trees(struct audit_context *context)
kernel/audit_tree.c
981
struct list_head *list = &context->killed_trees;
kernel/audit_tree.c
990
kill_rules(context, victim);
kernel/auditsc.c
1033
struct audit_context *context;
kernel/auditsc.c
1035
context = kzalloc_obj(*context);
kernel/auditsc.c
1036
if (!context)
kernel/auditsc.c
1038
context->context = AUDIT_CTX_UNUSED;
kernel/auditsc.c
1039
context->state = state;
kernel/auditsc.c
1040
context->prio = state == AUDIT_STATE_RECORD ? ~0ULL : 0;
kernel/auditsc.c
1041
INIT_LIST_HEAD(&context->killed_trees);
kernel/auditsc.c
1042
INIT_LIST_HEAD(&context->names_list);
kernel/auditsc.c
1043
context->fds[0] = -1;
kernel/auditsc.c
1044
context->return_valid = AUDITSC_INVALID;
kernel/auditsc.c
1045
return context;
kernel/auditsc.c
1059
struct audit_context *context;
kernel/auditsc.c
1072
context = audit_alloc_context(state);
kernel/auditsc.c
1073
if (!context) {
kernel/auditsc.c
1078
context->filterkey = key;
kernel/auditsc.c
1080
audit_set_context(tsk, context);
kernel/auditsc.c
1085
static inline void audit_free_context(struct audit_context *context)
kernel/auditsc.c
1088
audit_reset_context(context);
kernel/auditsc.c
1089
audit_proctitle_free(context);
kernel/auditsc.c
1090
free_tree_refs(context);
kernel/auditsc.c
1091
kfree(context->filterkey);
kernel/auditsc.c
1092
kfree(context);
kernel/auditsc.c
1095
static int audit_log_pid_context(struct audit_context *context, pid_t pid,
kernel/auditsc.c
1103
ab = audit_log_start(context, GFP_KERNEL, AUDIT_OBJ_PID);
kernel/auditsc.c
1120
static void audit_log_execve_info(struct audit_context *context,
kernel/auditsc.c
1157
audit_log_format(*ab, "argc=%d", context->execve.argc);
kernel/auditsc.c
1229
*ab = audit_log_start(context,
kernel/auditsc.c
1286
} while (arg < context->execve.argc);
kernel/auditsc.c
1317
static void audit_log_time(struct audit_context *context, struct audit_buffer **ab)
kernel/auditsc.c
1319
const struct audit_ntp_data *ntp = &context->time.ntp_data;
kernel/auditsc.c
1320
const struct timespec64 *tk = &context->time.tk_injoffset;
kernel/auditsc.c
1331
if (context->type == AUDIT_TIME_ADJNTPVAL) {
kernel/auditsc.c
1335
*ab = audit_log_start(context,
kernel/auditsc.c
1352
*ab = audit_log_start(context, GFP_KERNEL,
kernel/auditsc.c
1364
static void show_special(struct audit_context *context, int *call_panic)
kernel/auditsc.c
1369
ab = audit_log_start(context, GFP_KERNEL, context->type);
kernel/auditsc.c
1373
switch (context->type) {
kernel/auditsc.c
1375
int nargs = context->socketcall.nargs;
kernel/auditsc.c
1380
context->socketcall.args[i]);
kernel/auditsc.c
1384
from_kuid(&init_user_ns, context->ipc.uid),
kernel/auditsc.c
1385
from_kgid(&init_user_ns, context->ipc.gid),
kernel/auditsc.c
1386
context->ipc.mode);
kernel/auditsc.c
1387
if (lsmprop_is_set(&context->ipc.oprop)) {
kernel/auditsc.c
1388
if (audit_log_obj_ctx(ab, &context->ipc.oprop))
kernel/auditsc.c
1391
if (context->ipc.has_perm) {
kernel/auditsc.c
1393
ab = audit_log_start(context, GFP_KERNEL,
kernel/auditsc.c
1399
context->ipc.qbytes,
kernel/auditsc.c
1400
context->ipc.perm_uid,
kernel/auditsc.c
1401
context->ipc.perm_gid,
kernel/auditsc.c
1402
context->ipc.perm_mode);
kernel/auditsc.c
1409
context->mq_open.oflag, context->mq_open.mode,
kernel/auditsc.c
1410
context->mq_open.attr.mq_flags,
kernel/auditsc.c
1411
context->mq_open.attr.mq_maxmsg,
kernel/auditsc.c
1412
context->mq_open.attr.mq_msgsize,
kernel/auditsc.c
1413
context->mq_open.attr.mq_curmsgs);
kernel/auditsc.c
1419
context->mq_sendrecv.mqdes,
kernel/auditsc.c
1420
context->mq_sendrecv.msg_len,
kernel/auditsc.c
1421
context->mq_sendrecv.msg_prio,
kernel/auditsc.c
1422
(long long) context->mq_sendrecv.abs_timeout.tv_sec,
kernel/auditsc.c
1423
context->mq_sendrecv.abs_timeout.tv_nsec);
kernel/auditsc.c
1427
context->mq_notify.mqdes,
kernel/auditsc.c
1428
context->mq_notify.sigev_signo);
kernel/auditsc.c
1431
struct mq_attr *attr = &context->mq_getsetattr.mqstat;
kernel/auditsc.c
1436
context->mq_getsetattr.mqdes,
kernel/auditsc.c
1441
audit_log_format(ab, "pid=%d", context->capset.pid);
kernel/auditsc.c
1442
audit_log_cap(ab, "cap_pi", &context->capset.cap.inheritable);
kernel/auditsc.c
1443
audit_log_cap(ab, "cap_pp", &context->capset.cap.permitted);
kernel/auditsc.c
1444
audit_log_cap(ab, "cap_pe", &context->capset.cap.effective);
kernel/auditsc.c
1445
audit_log_cap(ab, "cap_pa", &context->capset.cap.ambient);
kernel/auditsc.c
1448
audit_log_format(ab, "fd=%d flags=0x%x", context->mmap.fd,
kernel/auditsc.c
1449
context->mmap.flags);
kernel/auditsc.c
1453
context->openat2.flags,
kernel/auditsc.c
1454
context->openat2.mode,
kernel/auditsc.c
1455
context->openat2.resolve);
kernel/auditsc.c
1458
audit_log_execve_info(context, &ab);
kernel/auditsc.c
1462
if (context->module.name) {
kernel/auditsc.c
1463
audit_log_untrustedstring(ab, context->module.name);
kernel/auditsc.c
1471
audit_log_time(context, &ab);
kernel/auditsc.c
1498
static void audit_log_name(struct audit_context *context, struct audit_names *n,
kernel/auditsc.c
1503
ab = audit_log_start(context, GFP_KERNEL, AUDIT_PATH);
kernel/auditsc.c
1522
if (context->pwd.dentry && context->pwd.mnt)
kernel/auditsc.c
1523
audit_log_d_path(ab, " name=", &context->pwd);
kernel/auditsc.c
1579
struct audit_context *context = audit_context();
kernel/auditsc.c
1582
ab = audit_log_start(context, GFP_KERNEL, AUDIT_PROCTITLE);
kernel/auditsc.c
1589
if (!context->proctitle.value) {
kernel/auditsc.c
1604
context->proctitle.value = buf;
kernel/auditsc.c
1605
context->proctitle.len = res;
kernel/auditsc.c
1607
msg = context->proctitle.value;
kernel/auditsc.c
1608
len = context->proctitle.len;
kernel/auditsc.c
1655
struct audit_context *context = audit_context();
kernel/auditsc.c
1660
context->personality = current->personality;
kernel/auditsc.c
1662
switch (context->context) {
kernel/auditsc.c
1664
ab = audit_log_start(context, GFP_KERNEL, AUDIT_SYSCALL);
kernel/auditsc.c
1668
context->arch, context->major);
kernel/auditsc.c
1669
if (context->personality != PER_LINUX)
kernel/auditsc.c
1670
audit_log_format(ab, " per=%lx", context->personality);
kernel/auditsc.c
1671
if (context->return_valid != AUDITSC_INVALID)
kernel/auditsc.c
1673
str_yes_no(context->return_valid ==
kernel/auditsc.c
1675
context->return_code);
kernel/auditsc.c
1678
context->argv[0],
kernel/auditsc.c
1679
context->argv[1],
kernel/auditsc.c
1680
context->argv[2],
kernel/auditsc.c
1681
context->argv[3],
kernel/auditsc.c
1682
context->name_count);
kernel/auditsc.c
1684
audit_log_key(ab, context->filterkey);
kernel/auditsc.c
1688
audit_log_uring(context);
kernel/auditsc.c
1695
for (aux = context->aux; aux; aux = aux->next) {
kernel/auditsc.c
1697
ab = audit_log_start(context, GFP_KERNEL, aux->type);
kernel/auditsc.c
1727
if (context->type)
kernel/auditsc.c
1728
show_special(context, &call_panic);
kernel/auditsc.c
1730
if (context->fds[0] >= 0) {
kernel/auditsc.c
1731
ab = audit_log_start(context, GFP_KERNEL, AUDIT_FD_PAIR);
kernel/auditsc.c
1734
context->fds[0], context->fds[1]);
kernel/auditsc.c
1739
if (context->sockaddr_len) {
kernel/auditsc.c
1740
ab = audit_log_start(context, GFP_KERNEL, AUDIT_SOCKADDR);
kernel/auditsc.c
1743
audit_log_n_hex(ab, (void *)context->sockaddr,
kernel/auditsc.c
1744
context->sockaddr_len);
kernel/auditsc.c
1749
for (aux = context->aux_pids; aux; aux = aux->next) {
kernel/auditsc.c
1753
if (audit_log_pid_context(context, axs->target_pid[i],
kernel/auditsc.c
1762
if (context->target_pid &&
kernel/auditsc.c
1763
audit_log_pid_context(context, context->target_pid,
kernel/auditsc.c
1764
context->target_auid, context->target_uid,
kernel/auditsc.c
1765
context->target_sessionid,
kernel/auditsc.c
1766
&context->target_ref,
kernel/auditsc.c
1767
context->target_comm))
kernel/auditsc.c
1770
if (context->pwd.dentry && context->pwd.mnt) {
kernel/auditsc.c
1771
ab = audit_log_start(context, GFP_KERNEL, AUDIT_CWD);
kernel/auditsc.c
1773
audit_log_d_path(ab, "cwd=", &context->pwd);
kernel/auditsc.c
1779
list_for_each_entry(n, &context->names_list, list) {
kernel/auditsc.c
1782
audit_log_name(context, n, NULL, i++, &call_panic);
kernel/auditsc.c
1785
if (context->context == AUDIT_CTX_SYSCALL)
kernel/auditsc.c
1789
ab = audit_log_start(context, GFP_KERNEL, AUDIT_EOE);
kernel/auditsc.c
1804
struct audit_context *context = tsk->audit_context;
kernel/auditsc.c
1806
if (!context)
kernel/auditsc.c
1810
if (!list_empty(&context->killed_trees))
kernel/auditsc.c
1811
audit_kill_trees(context);
kernel/auditsc.c
1818
if (tsk == current && !context->dummy) {
kernel/auditsc.c
1819
context->return_valid = AUDITSC_INVALID;
kernel/auditsc.c
1820
context->return_code = 0;
kernel/auditsc.c
1821
if (context->context == AUDIT_CTX_SYSCALL) {
kernel/auditsc.c
1822
audit_filter_syscall(tsk, context);
kernel/auditsc.c
1823
audit_filter_inodes(tsk, context);
kernel/auditsc.c
1824
if (context->current_state == AUDIT_STATE_RECORD)
kernel/auditsc.c
1826
} else if (context->context == AUDIT_CTX_URING) {
kernel/auditsc.c
1828
audit_filter_uring(tsk, context);
kernel/auditsc.c
1829
audit_filter_inodes(tsk, context);
kernel/auditsc.c
1830
if (context->current_state == AUDIT_STATE_RECORD)
kernel/auditsc.c
1831
audit_log_uring(context);
kernel/auditsc.c
1836
audit_free_context(context);
kernel/auditsc.c
1890
if (ctx->context == AUDIT_CTX_SYSCALL)
kernel/auditsc.c
1897
ctx->context = AUDIT_CTX_URING;
kernel/auditsc.c
1917
if (ctx->context != AUDIT_CTX_URING)
kernel/auditsc.c
1923
if (ctx->context == AUDIT_CTX_SYSCALL) {
kernel/auditsc.c
1989
struct audit_context *context = audit_context();
kernel/auditsc.c
1992
if (!audit_enabled || !context)
kernel/auditsc.c
1995
WARN_ON(context->context != AUDIT_CTX_UNUSED);
kernel/auditsc.c
1996
WARN_ON(context->name_count);
kernel/auditsc.c
1997
if (context->context != AUDIT_CTX_UNUSED || context->name_count) {
kernel/auditsc.c
2002
state = context->state;
kernel/auditsc.c
2006
context->dummy = !audit_n_rules;
kernel/auditsc.c
2007
if (!context->dummy && state == AUDIT_STATE_BUILD) {
kernel/auditsc.c
2008
context->prio = 0;
kernel/auditsc.c
2013
context->arch = syscall_get_arch(current);
kernel/auditsc.c
2014
context->major = major;
kernel/auditsc.c
2015
context->argv[0] = a1;
kernel/auditsc.c
2016
context->argv[1] = a2;
kernel/auditsc.c
2017
context->argv[2] = a3;
kernel/auditsc.c
2018
context->argv[3] = a4;
kernel/auditsc.c
2019
context->context = AUDIT_CTX_SYSCALL;
kernel/auditsc.c
2020
context->current_state = state;
kernel/auditsc.c
2021
ktime_get_coarse_real_ts64(&context->stamp.ctime);
kernel/auditsc.c
2037
struct audit_context *context = audit_context();
kernel/auditsc.c
2039
if (!context || context->dummy ||
kernel/auditsc.c
2040
context->context != AUDIT_CTX_SYSCALL)
kernel/auditsc.c
2044
if (!list_empty(&context->killed_trees))
kernel/auditsc.c
2045
audit_kill_trees(context);
kernel/auditsc.c
2047
audit_return_fixup(context, success, return_code);
kernel/auditsc.c
2049
audit_filter_syscall(current, context);
kernel/auditsc.c
2050
audit_filter_inodes(current, context);
kernel/auditsc.c
2051
if (context->current_state != AUDIT_STATE_RECORD)
kernel/auditsc.c
2057
audit_reset_context(context);
kernel/auditsc.c
2062
struct audit_context *context;
kernel/auditsc.c
2069
context = audit_context();
kernel/auditsc.c
2070
p = context->trees;
kernel/auditsc.c
2071
count = context->tree_count;
kernel/auditsc.c
2077
if (likely(put_tree_ref(context, chunk)))
kernel/auditsc.c
2079
if (unlikely(!grow_tree_refs(context))) {
kernel/auditsc.c
2081
audit_set_auditable(context);
kernel/auditsc.c
2083
unroll_tree_refs(context, p, count);
kernel/auditsc.c
2086
put_tree_ref(context, chunk);
kernel/auditsc.c
2091
struct audit_context *context;
kernel/auditsc.c
2098
context = audit_context();
kernel/auditsc.c
2099
p = context->trees;
kernel/auditsc.c
2100
count = context->tree_count;
kernel/auditsc.c
2114
if (unlikely(!put_tree_ref(context, chunk))) {
kernel/auditsc.c
2129
unroll_tree_refs(context, p, count);
kernel/auditsc.c
2133
if (grow_tree_refs(context)) {
kernel/auditsc.c
2135
unroll_tree_refs(context, p, count);
kernel/auditsc.c
2140
unroll_tree_refs(context, p, count);
kernel/auditsc.c
2141
audit_set_auditable(context);
kernel/auditsc.c
2147
static struct audit_names *audit_alloc_name(struct audit_context *context,
kernel/auditsc.c
2152
if (context->name_count < AUDIT_NAMES) {
kernel/auditsc.c
2153
aname = &context->preallocated_names[context->name_count];
kernel/auditsc.c
2164
list_add_tail(&aname->list, &context->names_list);
kernel/auditsc.c
2166
context->name_count++;
kernel/auditsc.c
2167
if (!context->pwd.dentry)
kernel/auditsc.c
2168
get_fs_pwd(current->fs, &context->pwd);
kernel/auditsc.c
2181
struct audit_context *context = audit_context();
kernel/auditsc.c
2184
if (context->context == AUDIT_CTX_UNUSED)
kernel/auditsc.c
2187
n = audit_alloc_name(context, AUDIT_TYPE_UNKNOWN);
kernel/auditsc.c
2248
struct audit_context *context = audit_context();
kernel/auditsc.c
2256
if (context->context == AUDIT_CTX_UNUSED)
kernel/auditsc.c
2294
list_for_each_entry_reverse(n, &context->names_list, list) {
kernel/auditsc.c
2321
n = audit_alloc_name(context, AUDIT_TYPE_UNKNOWN);
kernel/auditsc.c
2366
struct audit_context *context = audit_context();
kernel/auditsc.c
2374
if (context->context == AUDIT_CTX_UNUSED)
kernel/auditsc.c
2396
list_for_each_entry(n, &context->names_list, list) {
kernel/auditsc.c
2431
n = audit_alloc_name(context, AUDIT_TYPE_PARENT);
kernel/auditsc.c
2438
found_child = audit_alloc_name(context, type);
kernel/auditsc.c
2468
if (ctx->context == AUDIT_CTX_UNUSED)
kernel/auditsc.c
2489
struct audit_context *context = audit_context();
kernel/auditsc.c
2492
memcpy(&context->mq_open.attr, attr, sizeof(struct mq_attr));
kernel/auditsc.c
2494
memset(&context->mq_open.attr, 0, sizeof(struct mq_attr));
kernel/auditsc.c
2496
context->mq_open.oflag = oflag;
kernel/auditsc.c
2497
context->mq_open.mode = mode;
kernel/auditsc.c
2499
context->type = AUDIT_MQ_OPEN;
kernel/auditsc.c
2513
struct audit_context *context = audit_context();
kernel/auditsc.c
2514
struct timespec64 *p = &context->mq_sendrecv.abs_timeout;
kernel/auditsc.c
2521
context->mq_sendrecv.mqdes = mqdes;
kernel/auditsc.c
2522
context->mq_sendrecv.msg_len = msg_len;
kernel/auditsc.c
2523
context->mq_sendrecv.msg_prio = msg_prio;
kernel/auditsc.c
2525
context->type = AUDIT_MQ_SENDRECV;
kernel/auditsc.c
2537
struct audit_context *context = audit_context();
kernel/auditsc.c
2540
context->mq_notify.sigev_signo = notification->sigev_signo;
kernel/auditsc.c
2542
context->mq_notify.sigev_signo = 0;
kernel/auditsc.c
2544
context->mq_notify.mqdes = mqdes;
kernel/auditsc.c
2545
context->type = AUDIT_MQ_NOTIFY;
kernel/auditsc.c
2556
struct audit_context *context = audit_context();
kernel/auditsc.c
2558
context->mq_getsetattr.mqdes = mqdes;
kernel/auditsc.c
2559
context->mq_getsetattr.mqstat = *mqstat;
kernel/auditsc.c
2560
context->type = AUDIT_MQ_GETSETATTR;
kernel/auditsc.c
2570
struct audit_context *context = audit_context();
kernel/auditsc.c
2572
context->ipc.uid = ipcp->uid;
kernel/auditsc.c
2573
context->ipc.gid = ipcp->gid;
kernel/auditsc.c
2574
context->ipc.mode = ipcp->mode;
kernel/auditsc.c
2575
context->ipc.has_perm = 0;
kernel/auditsc.c
2576
security_ipc_getlsmprop(ipcp, &context->ipc.oprop);
kernel/auditsc.c
2577
context->type = AUDIT_IPC;
kernel/auditsc.c
2591
struct audit_context *context = audit_context();
kernel/auditsc.c
2593
context->ipc.qbytes = qbytes;
kernel/auditsc.c
2594
context->ipc.perm_uid = uid;
kernel/auditsc.c
2595
context->ipc.perm_gid = gid;
kernel/auditsc.c
2596
context->ipc.perm_mode = mode;
kernel/auditsc.c
2597
context->ipc.has_perm = 1;
kernel/auditsc.c
2602
struct audit_context *context = audit_context();
kernel/auditsc.c
2604
context->type = AUDIT_EXECVE;
kernel/auditsc.c
2605
context->execve.argc = bprm->argc;
kernel/auditsc.c
2617
struct audit_context *context = audit_context();
kernel/auditsc.c
2621
context->type = AUDIT_SOCKETCALL;
kernel/auditsc.c
2622
context->socketcall.nargs = nargs;
kernel/auditsc.c
2623
memcpy(context->socketcall.args, args, nargs * sizeof(unsigned long));
kernel/auditsc.c
2635
struct audit_context *context = audit_context();
kernel/auditsc.c
2637
context->fds[0] = fd1;
kernel/auditsc.c
2638
context->fds[1] = fd2;
kernel/auditsc.c
2650
struct audit_context *context = audit_context();
kernel/auditsc.c
2652
if (!context->sockaddr) {
kernel/auditsc.c
2657
context->sockaddr = p;
kernel/auditsc.c
2660
context->sockaddr_len = len;
kernel/auditsc.c
2661
memcpy(context->sockaddr, a, len);
kernel/auditsc.c
2667
struct audit_context *context = audit_context();
kernel/auditsc.c
2669
context->target_pid = task_tgid_nr(t);
kernel/auditsc.c
2670
context->target_auid = audit_get_loginuid(t);
kernel/auditsc.c
2671
context->target_uid = task_uid(t);
kernel/auditsc.c
2672
context->target_sessionid = audit_get_sessionid(t);
kernel/auditsc.c
2673
strscpy(context->target_comm, t->comm);
kernel/auditsc.c
2674
security_task_getlsmprop_obj(t, &context->target_ref);
kernel/auditsc.c
2743
struct audit_context *context = audit_context();
kernel/auditsc.c
2751
ax->d.next = context->aux;
kernel/auditsc.c
2752
context->aux = (void *)ax;
kernel/auditsc.c
2785
struct audit_context *context = audit_context();
kernel/auditsc.c
2787
context->capset.pid = task_tgid_nr(current);
kernel/auditsc.c
2788
context->capset.cap.effective = new->cap_effective;
kernel/auditsc.c
2789
context->capset.cap.inheritable = new->cap_effective;
kernel/auditsc.c
2790
context->capset.cap.permitted = new->cap_permitted;
kernel/auditsc.c
2791
context->capset.cap.ambient = new->cap_ambient;
kernel/auditsc.c
2792
context->type = AUDIT_CAPSET;
kernel/auditsc.c
2797
struct audit_context *context = audit_context();
kernel/auditsc.c
2799
context->mmap.fd = fd;
kernel/auditsc.c
2800
context->mmap.flags = flags;
kernel/auditsc.c
2801
context->type = AUDIT_MMAP;
kernel/auditsc.c
2806
struct audit_context *context = audit_context();
kernel/auditsc.c
2808
context->openat2.flags = how->flags;
kernel/auditsc.c
2809
context->openat2.mode = how->mode;
kernel/auditsc.c
2810
context->openat2.resolve = how->resolve;
kernel/auditsc.c
2811
context->type = AUDIT_OPENAT2;
kernel/auditsc.c
2816
struct audit_context *context = audit_context();
kernel/auditsc.c
2818
context->module.name = kstrdup(name, GFP_KERNEL);
kernel/auditsc.c
2819
if (!context->module.name)
kernel/auditsc.c
2821
context->type = AUDIT_KERN_MODULE;
kernel/auditsc.c
2843
struct audit_context *context = audit_context();
kernel/auditsc.c
2846
if (!context->type)
kernel/auditsc.c
2847
context->type = AUDIT_TIME_INJOFFSET;
kernel/auditsc.c
2848
memcpy(&context->time.tk_injoffset, &offset, sizeof(offset));
kernel/auditsc.c
2853
struct audit_context *context = audit_context();
kernel/auditsc.c
2859
context->type = AUDIT_TIME_ADJNTPVAL;
kernel/auditsc.c
2860
memcpy(&context->time.ntp_data, ad, sizeof(*ad));
kernel/auditsc.c
2980
if (likely(!ctx || ctx->context == AUDIT_CTX_UNUSED))
kernel/auditsc.c
916
static inline void audit_proctitle_free(struct audit_context *context)
kernel/auditsc.c
918
kfree(context->proctitle.value);
kernel/auditsc.c
919
context->proctitle.value = NULL;
kernel/auditsc.c
920
context->proctitle.len = 0;
kernel/auditsc.c
923
static inline void audit_free_module(struct audit_context *context)
kernel/auditsc.c
925
if (context->type == AUDIT_KERN_MODULE) {
kernel/auditsc.c
926
kfree(context->module.name);
kernel/auditsc.c
927
context->module.name = NULL;
kernel/auditsc.c
930
static inline void audit_free_names(struct audit_context *context)
kernel/auditsc.c
934
list_for_each_entry_safe(n, next, &context->names_list, list) {
kernel/auditsc.c
941
context->name_count = 0;
kernel/auditsc.c
942
path_put(&context->pwd);
kernel/auditsc.c
943
context->pwd.dentry = NULL;
kernel/auditsc.c
944
context->pwd.mnt = NULL;
kernel/auditsc.c
947
static inline void audit_free_aux(struct audit_context *context)
kernel/auditsc.c
951
while ((aux = context->aux)) {
kernel/auditsc.c
952
context->aux = aux->next;
kernel/auditsc.c
955
context->aux = NULL;
kernel/auditsc.c
956
while ((aux = context->aux_pids)) {
kernel/auditsc.c
957
context->aux_pids = aux->next;
kernel/auditsc.c
960
context->aux_pids = NULL;
kernel/auditsc.c
978
ctx->context = AUDIT_CTX_UNUSED;
kernel/events/core.c
13272
void *context, int cgroup_fd)
kernel/events/core.c
13356
context = parent_event->overflow_handler_context;
kernel/events/core.c
13369
event->overflow_handler_context = context;
kernel/events/core.c
14219
void *context)
kernel/events/core.c
14240
overflow_handler, context, -1);
kernel/events/hw_breakpoint.c
744
void *context,
kernel/events/hw_breakpoint.c
748
context);
kernel/events/hw_breakpoint.c
845
void *context)
kernel/events/hw_breakpoint.c
858
triggered, context);
kernel/kprobes.c
1940
static int kretprobe_init_inst(void *nod, void *context)
kernel/kprobes.c
1944
ri->rph = context;
kernel/kprobes.c
1947
static int kretprobe_fini_pool(struct objpool_head *head, void *context)
kernel/kprobes.c
1949
kfree(context);
kernel/ptrace.c
1299
tmp = mm->context.exec_fdpic_loadmap;
kernel/ptrace.c
1302
tmp = mm->context.interp_fdpic_loadmap;
kernel/trace/rethook.c
71
static int rethook_init_node(void *nod, void *context)
kernel/trace/rethook.c
75
node->rethook = context;
kernel/trace/rethook.c
79
static int rethook_fini_pool(struct objpool_head *head, void *context)
kernel/trace/rethook.c
81
kfree(context);
kernel/trace/trace_entries.h
451
__field( int, context )
kernel/trace/trace_entries.h
457
__entry->context,
kernel/trace/trace_events_hist.c
3425
struct snapshot_context *context = cond_data;
kernel/trace/trace_events_hist.c
3434
track_val = get_track_val(track_data->hist_data, context->elt,
kernel/trace/trace_events_hist.c
3441
memcpy(track_data->key, context->key, track_data->key_len);
kernel/trace/trace_events_hist.c
3443
elt_data = context->elt->private_data;
kernel/trace/trace_events_hist.c
3461
struct snapshot_context context;
kernel/trace/trace_events_hist.c
3463
context.elt = elt;
kernel/trace/trace_events_hist.c
3464
context.key = key;
kernel/trace/trace_events_hist.c
3466
tracing_snapshot_cond(file->tr, &context);
kernel/trace/trace_events_user.c
1632
int context;
kernel/trace/trace_events_user.c
1635
®s, &context);
kernel/trace/trace_events_user.c
1649
perf_trace_buf_submit(perf_entry, size, context,
kernel/trace/trace_events_user.c
1656
perf_swevent_put_recursion_context(context);
kernel/trace/trace_osnoise.c
1780
s.context = IRQ_CONTEXT;
kernel/trace/trace_osnoise.c
1899
s.context = THREAD_CONTEXT;
kernel/trace/trace_osnoise.c
2504
s.context = THREAD_URET;
kernel/trace/trace_osnoise.c
2539
s.context = THREAD_CONTEXT;
kernel/trace/trace_osnoise.c
564
entry->context = sample->context;
kernel/trace/trace_output.c
1655
timerlat_lat_context[field->context],
kernel/trace/trace_output.c
1672
field->context,
lib/asn1_decoder.c
168
void *context,
lib/asn1_decoder.c
346
ret = actions[act](context, hdr, tag, data + dp, len);
lib/asn1_decoder.c
439
ret = actions[act](context, hdr, 0, data + tdp, len);
lib/asn1_decoder.c
454
ret = actions[machine[pc + 1]](context, hdr, tag, data + tdp, len);
lib/kunit/kunit-example-test.c
284
static int example_resource_init(struct kunit_resource *res, void *context)
lib/kunit/kunit-example-test.c
290
*info = *(int *)context;
lib/kunit/kunit-test.c
169
static int fake_resource_init(struct kunit_resource *res, void *context)
lib/kunit/kunit-test.c
171
struct kunit_test_resource_context *ctx = context;
lib/kunit/kunit-test.c
323
static int fake_resource_2_init(struct kunit_resource *res, void *context)
lib/kunit/kunit-test.c
325
struct kunit_test_resource_context *ctx = context;
lib/kunit/kunit-test.c
341
static int fake_resource_1_init(struct kunit_resource *res, void *context)
lib/kunit/kunit-test.c
343
struct kunit_test_resource_context *ctx = context;
lib/kunit/platform.c
19
static int kunit_platform_device_alloc_init(struct kunit_resource *res, void *context)
lib/kunit/platform.c
21
struct kunit_platform_device_alloc_params *params = context;
lib/kunit/test.c
572
struct kunit_try_catch_context context;
lib/kunit/test.c
582
context.test = test;
lib/kunit/test.c
583
context.suite = suite;
lib/kunit/test.c
584
context.test_case = test_case;
lib/kunit/test.c
585
kunit_try_catch_run(try_catch, &context);
lib/kunit/test.c
593
kunit_try_catch_run(try_catch, &context);
lib/kunit/try-catch.c
30
try_catch->try(try_catch->context);
lib/kunit/try-catch.c
37
void kunit_try_catch_run(struct kunit_try_catch *try_catch, void *context)
lib/kunit/try-catch.c
44
try_catch->context = context;
lib/kunit/try-catch.c
50
try_catch->catch(try_catch->context);
lib/kunit/try-catch.c
89
try_catch->catch(try_catch->context);
lib/kunit/user_alloc.c
53
static int kunit_vm_mmap_init(struct kunit_resource *res, void *context)
lib/kunit/user_alloc.c
55
struct kunit_vm_mmap_params *p = context;
lib/objpool.c
119
gfp_t gfp, void *context, objpool_init_obj_cb objinit,
lib/objpool.c
143
pool->context = context;
lib/objpool.c
151
rc = objpool_init_percpu_slots(pool, nr_objs, context, objinit);
lib/objpool.c
172
pool->release(pool, pool->context);
lib/objpool.c
21
int nodes, void *context,
lib/objpool.c
32
int rc = objinit(obj, context);
lib/objpool.c
49
void *context, objpool_init_obj_cb objinit)
lib/objpool.c
96
rc = objpool_init_percpu_slot(pool, slot, nodes, context, objinit);
lib/pldmfw/pldmfw.c
126
struct device *dev = data->context->dev;
lib/pldmfw/pldmfw.c
205
struct device *dev = data->context->dev;
lib/pldmfw/pldmfw.c
21
struct pldmfw *context;
lib/pldmfw/pldmfw.c
383
dev_dbg(data->context->dev, "Unexpected record length. Measured record length is %zu bytes, expected length is %u bytes\n",
lib/pldmfw/pldmfw.c
444
struct device *dev = data->context->dev;
lib/pldmfw/pldmfw.c
484
if (data->context->mode == PLDMFW_UPDATE_MODE_SINGLE_COMPONENT &&
lib/pldmfw/pldmfw.c
485
data->context->component_identifier != component->identifier)
lib/pldmfw/pldmfw.c
491
if (data->context->mode == PLDMFW_UPDATE_MODE_SINGLE_COMPONENT &&
lib/pldmfw/pldmfw.c
524
struct device *dev = data->context->dev;
lib/pldmfw/pldmfw.c
596
if (WARN_ON(!(data->context->dev && data->fw->data && data->fw->size)))
lib/pldmfw/pldmfw.c
636
bool pldmfw_op_pci_match_record(struct pldmfw *context, struct pldmfw_record *record)
lib/pldmfw/pldmfw.c
638
struct pci_dev *pdev = to_pci_dev(context->dev);
lib/pldmfw/pldmfw.c
68
struct device *dev = data->context->dev;
lib/pldmfw/pldmfw.c
707
if (data->context->ops->match_record(data->context, record)) {
lib/pldmfw/pldmfw.c
729
const struct pldmfw_ops *ops = data->context->ops;
lib/pldmfw/pldmfw.c
734
return ops->send_package_data(data->context, record->package_data,
lib/pldmfw/pldmfw.c
761
if (!data->context->ops->send_component_table)
lib/pldmfw/pldmfw.c
774
err = data->context->ops->send_component_table(data->context,
lib/pldmfw/pldmfw.c
806
err = data->context->ops->flash_component(data->context, component);
lib/pldmfw/pldmfw.c
826
if (data->context->ops->finalize_update)
lib/pldmfw/pldmfw.c
827
return data->context->ops->finalize_update(data->context);
lib/pldmfw/pldmfw.c
846
int pldmfw_flash_image(struct pldmfw *context, const struct firmware *fw)
lib/pldmfw/pldmfw.c
859
data->context = context;
lib/test_firmware.c
761
static void trigger_async_request_cb(const struct firmware *fw, void *context)
lib/test_firmware.c
997
static void trigger_batched_cb(const struct firmware *fw, void *context)
lib/test_firmware.c
999
struct test_batched_req *req = context;
lib/test_objpool.c
147
static int ot_init_node(void *nod, void *context)
lib/test_objpool.c
149
struct ot_context *sop = context;
lib/test_objpool.c
437
static int ot_objpool_release(struct objpool_head *head, void *context)
lib/test_objpool.c
439
struct ot_context *sop = context;
lib/test_objpool.c
494
WARN_ON(sop != pool->context);
lib/tests/bitfield_kunit.c
103
static void __init test_bitfields_variables(struct kunit *context)
lib/tests/bitfield_kunit.c
129
static void __init test_bitfields_compile(struct kunit *context)
lib/tests/bitfield_kunit.c
16
KUNIT_ASSERT_FALSE_MSG(context, _res != res, \
lib/tests/bitfield_kunit.c
19
KUNIT_ASSERT_FALSE(context, \
lib/tests/bitfield_kunit.c
29
KUNIT_ASSERT_FALSE_MSG(context, \
lib/tests/bitfield_kunit.c
34
KUNIT_ASSERT_FALSE(context, \
lib/tests/bitfield_kunit.c
44
KUNIT_ASSERT_FALSE_MSG(context, \
lib/tests/bitfield_kunit.c
49
KUNIT_ASSERT_FALSE(context, \
lib/tests/bitfield_kunit.c
60
static void __init test_bitfields_constants(struct kunit *context)
lib/tests/bitfield_kunit.c
99
KUNIT_ASSERT_FALSE(context, \
mm/damon/paddr.c
345
static int damon_pa_scheme_score(struct damon_ctx *context,
mm/damon/paddr.c
351
return damon_cold_score(context, r, scheme);
mm/damon/paddr.c
353
return damon_hot_score(context, r, scheme);
mm/damon/paddr.c
355
return damon_cold_score(context, r, scheme);
mm/damon/paddr.c
357
return damon_hot_score(context, r, scheme);
mm/damon/paddr.c
359
return damon_cold_score(context, r, scheme);
mm/damon/sysfs.c
1005
if (ops_name->ops_id == context->ops_id)
mm/damon/sysfs.c
1014
struct damon_sysfs_context *context = container_of(kobj,
mm/damon/sysfs.c
1023
context->ops_id = ops_name->ops_id;
mm/damon/sysfs.c
1033
struct damon_sysfs_context *context = container_of(kobj,
mm/damon/sysfs.c
1036
return sysfs_emit(buf, "%lu\n", context->addr_unit);
mm/damon/sysfs.c
1042
struct damon_sysfs_context *context = container_of(kobj,
mm/damon/sysfs.c
1052
context->addr_unit = input_addr_unit;
mm/damon/sysfs.c
1116
struct damon_sysfs_context **contexts_arr, *context;
mm/damon/sysfs.c
1130
context = damon_sysfs_context_alloc(DAMON_OPS_VADDR);
mm/damon/sysfs.c
1131
if (!context) {
mm/damon/sysfs.c
1136
err = kobject_init_and_add(&context->kobj,
mm/damon/sysfs.c
1142
err = damon_sysfs_context_add_dirs(context);
mm/damon/sysfs.c
1146
contexts_arr[i] = context;
mm/damon/sysfs.c
1153
kobject_put(&context->kobj);
mm/damon/sysfs.c
874
struct damon_sysfs_context *context = kmalloc_obj(*context);
mm/damon/sysfs.c
876
if (!context)
mm/damon/sysfs.c
878
context->kobj = (struct kobject){};
mm/damon/sysfs.c
879
context->ops_id = ops_id;
mm/damon/sysfs.c
880
context->addr_unit = 1;
mm/damon/sysfs.c
881
return context;
mm/damon/sysfs.c
884
static int damon_sysfs_context_set_attrs(struct damon_sysfs_context *context)
mm/damon/sysfs.c
892
&context->kobj, "monitoring_attrs");
mm/damon/sysfs.c
898
context->attrs = attrs;
mm/damon/sysfs.c
906
static int damon_sysfs_context_set_targets(struct damon_sysfs_context *context)
mm/damon/sysfs.c
914
&context->kobj, "targets");
mm/damon/sysfs.c
919
context->targets = targets;
mm/damon/sysfs.c
923
static int damon_sysfs_context_set_schemes(struct damon_sysfs_context *context)
mm/damon/sysfs.c
931
&context->kobj, "schemes");
mm/damon/sysfs.c
936
context->schemes = schemes;
mm/damon/sysfs.c
940
static int damon_sysfs_context_add_dirs(struct damon_sysfs_context *context)
mm/damon/sysfs.c
944
err = damon_sysfs_context_set_attrs(context);
mm/damon/sysfs.c
948
err = damon_sysfs_context_set_targets(context);
mm/damon/sysfs.c
952
err = damon_sysfs_context_set_schemes(context);
mm/damon/sysfs.c
958
kobject_put(&context->targets->kobj);
mm/damon/sysfs.c
959
context->targets = NULL;
mm/damon/sysfs.c
961
damon_sysfs_attrs_rm_dirs(context->attrs);
mm/damon/sysfs.c
962
kobject_put(&context->attrs->kobj);
mm/damon/sysfs.c
963
context->attrs = NULL;
mm/damon/sysfs.c
967
static void damon_sysfs_context_rm_dirs(struct damon_sysfs_context *context)
mm/damon/sysfs.c
969
damon_sysfs_attrs_rm_dirs(context->attrs);
mm/damon/sysfs.c
970
kobject_put(&context->attrs->kobj);
mm/damon/sysfs.c
971
damon_sysfs_targets_rm_dirs(context->targets);
mm/damon/sysfs.c
972
kobject_put(&context->targets->kobj);
mm/damon/sysfs.c
973
damon_sysfs_schemes_rm_dirs(context->schemes);
mm/damon/sysfs.c
974
kobject_put(&context->schemes->kobj);
mm/damon/sysfs.c
997
struct damon_sysfs_context *context = container_of(kobj,
mm/damon/vaddr.c
987
static int damon_va_scheme_score(struct damon_ctx *context,
mm/damon/vaddr.c
994
return damon_cold_score(context, r, scheme);
mm/damon/vaddr.c
996
return damon_hot_score(context, r, scheme);
mm/damon/vaddr.c
998
return damon_cold_score(context, r, scheme);
mm/internal.h
816
enum meminit_context context);
mm/mm_init.c
874
enum meminit_context context,
mm/mm_init.c
907
if (context == MEMINIT_EARLY) {
mm/mm_init.c
918
if (context == MEMINIT_HOTPLUG) {
mm/nommu.c
384
if (brk < mm->start_brk || brk > mm->context.end_brk)
mm/page_alloc.c
1619
enum meminit_context context)
mm/page_alloc.c
1634
unlikely(context == MEMINIT_HOTPLUG)) {
net/9p/trans_rdma.c
129
struct p9_client *c = id->context;
net/9p/trans_rdma.c
244
static void qp_event_handler(struct ib_event *event, void *context)
net/9p/trans_rdma.c
247
event->event, context);
net/9p/trans_usbg.c
104
req->context = p9_tx_req;
net/9p/trans_usbg.c
111
req->context = NULL;
net/9p/trans_usbg.c
152
struct p9_req_t *p9_tx_req = req->context;
net/9p/trans_usbg.c
172
req->context = NULL;
net/9p/trans_usbg.c
318
usb9pfs->in_req->context = usb9pfs;
net/9p/trans_usbg.c
319
usb9pfs->out_req->context = usb9pfs;
net/9p/trans_usbg.c
434
req = usb9pfs->in_req->context;
net/core/dev.c
12273
unsigned long context;
net/core/dev.c
12276
xa_for_each(&dev->ethtool->rss_ctx, context, ctx) {
net/core/dev.c
12277
xa_erase(&dev->ethtool->rss_ctx, context);
net/core/dev.c
12278
dev->ethtool_ops->remove_rxfh_context(dev, ctx, context, NULL);
net/core/scm.c
439
ctx.context);
net/ethtool/common.c
739
unsigned long context;
net/ethtool/common.c
743
xa_for_each(&dev->ethtool->rss_ctx, context, ctx)
net/ipv4/ip_sockglue.c
143
put_cmsg(msg, SOL_IP, SCM_SECURITY, ctx.len, ctx.context);
net/ipv4/netfilter/nf_nat_snmp_basic_main.c
105
int snmp_helper(void *context, size_t hdrlen, unsigned char tag,
net/ipv4/netfilter/nf_nat_snmp_basic_main.c
108
struct snmp_ctx *ctx = (struct snmp_ctx *)context;
net/ipv4/netfilter/nf_nat_snmp_basic_main.c
95
int snmp_version(void *context, size_t hdrlen, unsigned char tag,
net/netfilter/nf_conntrack_netlink.c
373
if (nla_put_string(skb, CTA_SECCTX_NAME, ctx.context))
net/netfilter/nf_conntrack_standalone.c
196
seq_printf(s, "secctx=%s ", ctx.context);
net/netfilter/nfnetlink_queue.c
898
if (seclen > 0 && nla_put(skb, NFQA_SECCTX, ctx.len, ctx.context))
net/netlabel/netlabel_unlabeled.c
1128
ctx.context);
net/netlabel/netlabel_unlabeled.c
441
audit_log_format(audit_buf, " sec_obj=%s", ctx.context);
net/netlabel/netlabel_unlabeled.c
494
audit_log_format(audit_buf, " sec_obj=%s", ctx.context);
net/netlabel/netlabel_unlabeled.c
552
audit_log_format(audit_buf, " sec_obj=%s", ctx.context);
net/nfc/hci/command.c
39
static void nfc_hci_execute_cb(void *context, struct sk_buff *skb, int err)
net/nfc/hci/command.c
41
struct hcp_exec_waiter *hcp_ew = (struct hcp_exec_waiter *)context;
net/nfc/hci/core.c
699
static void hci_transceive_cb(void *context, struct sk_buff *skb, int err)
net/nfc/hci/core.c
701
struct nfc_hci_dev *hdev = context;
net/nfc/nci/core.c
409
static void nci_nfcc_loopback_cb(void *context, struct sk_buff *skb, int err)
net/nfc/nci/core.c
411
struct nci_dev *ndev = (struct nci_dev *)context;
net/nfc/nci/hci.c
427
void nci_hci_data_received_cb(void *context,
net/nfc/nci/hci.c
430
struct nci_dev *ndev = (struct nci_dev *)context;
net/nfc/netlink.c
1472
static void se_io_cb(void *context, u8 *apdu, size_t apdu_len, int err)
net/nfc/netlink.c
1474
struct se_io_ctx *ctx = context;
net/nfc/rawsock.c
150
static void rawsock_data_exchange_complete(void *context, struct sk_buff *skb,
net/nfc/rawsock.c
153
struct sock *sk = (struct sock *) context;
net/openvswitch/flow.c
657
memcpy(key->nsh.context, nh->md1.context,
net/openvswitch/flow.c
661
memset(key->nsh.context, 0,
net/openvswitch/flow.h
72
__be32 context[NSH_MD1_CONTEXT_SIZE];
net/openvswitch/flow_netlink.c
1425
SW_FLOW_KEY_PUT(match, nsh.context[i],
net/openvswitch/flow_netlink.c
1426
md1->context[i], is_mask);
net/openvswitch/flow_netlink.c
1973
sizeof(nsh->context), nsh->context))
net/rds/ib_cm.c
247
static void rds_ib_cq_comp_handler_recv(struct ib_cq *cq, void *context)
net/rds/ib_cm.c
249
struct rds_connection *conn = context;
net/rds/ib_cm.c
375
static void rds_ib_cq_comp_handler_send(struct ib_cq *cq, void *context)
net/rds/ib_cm.c
377
struct rds_connection *conn = context;
net/rds/ib_cm.c
896
BUG_ON(cm_id->context);
net/rds/ib_cm.c
900
cm_id->context = conn;
net/rds/ib_cm.c
933
struct rds_connection *conn = cm_id->context;
net/rds/ib_send.c
379
struct rds_connection *conn = ic->i_cm_id->context;
net/rds/rdma_transport.c
54
struct rds_connection *conn = cm_id->context;
net/sunrpc/xprtrdma/svc_rdma_transport.c
102
static void qp_event_handler(struct ib_event *event, void *context)
net/sunrpc/xprtrdma/svc_rdma_transport.c
104
struct svc_xprt *xprt = context;
net/sunrpc/xprtrdma/svc_rdma_transport.c
129
void *context)
net/sunrpc/xprtrdma/svc_rdma_transport.c
134
listen_id = rdma_create_id(net, svc_rdma_listen_handler, context,
net/sunrpc/xprtrdma/svc_rdma_transport.c
236
struct svcxprt_rdma *listen_xprt = new_cma_id->context;
net/sunrpc/xprtrdma/svc_rdma_transport.c
246
new_cma_id->context = newxprt;
net/sunrpc/xprtrdma/svc_rdma_transport.c
295
struct svcxprt_rdma *cma_xprt = cma_id->context;
net/sunrpc/xprtrdma/svc_rdma_transport.c
330
struct svcxprt_rdma *rdma = cma_id->context;
net/sunrpc/xprtrdma/verbs.c
227
struct rpcrdma_ep *ep = id->context;
net/tls/tls_device.c
1195
struct tls_offload_context_rx *context;
net/tls/tls_device.c
1227
context = kzalloc_obj(*context);
net/tls/tls_device.c
1228
if (!context) {
net/tls/tls_device.c
1232
context->resync_nh_reset = 1;
net/tls/tls_device.c
1234
ctx->priv_ctx_rx = context;
net/tls/tls_device.c
613
struct tls_record_info *tls_get_record(struct tls_offload_context_tx *context,
net/tls/tls_device.c
616
u64 record_sn = context->hint_record_sn;
net/tls/tls_device.c
619
info = context->retransmit_hint;
net/tls/tls_device.c
625
info = list_first_entry_or_null(&context->records_list,
net/tls/tls_device.c
640
last = list_last_entry(&context->records_list,
net/tls/tls_device.c
647
record_sn = context->unacked_record_sn;
net/tls/tls_device.c
652
list_for_each_entry_from_rcu(info, &context->records_list, list) {
net/tls/tls_device.c
654
if (!context->retransmit_hint ||
net/tls/tls_device.c
656
context->retransmit_hint->end_seq)) {
net/tls/tls_device.c
657
context->hint_record_sn = record_sn;
net/tls/tls_device.c
658
context->retransmit_hint = info;
net/vmw_vsock/vmci_transport.c
1024
vsock_addr_init(&vpending->local_addr, pkt->dg.dst.context,
net/vmw_vsock/vmci_transport.c
1026
vsock_addr_init(&vpending->remote_addr, pkt->dg.src.context,
net/vmw_vsock/vmci_transport.c
1210
pkt->dg.src.context,
net/vmw_vsock/vmci_transport.c
1318
|| pkt->dg.src.context != vsk->remote_addr.svm_cid
net/vmw_vsock/vmci_transport.c
1418
vsk->local_addr.svm_cid = pkt->dg.dst.context;
net/vmw_vsock/vmci_transport.c
1785
vsock_addr_init(vm_addr, dg->src.context, dg->src.resource);
net/vmw_vsock/vmci_transport.c
180
vsock_addr_init(local, pkt->dg.dst.context, pkt->dst_port);
net/vmw_vsock/vmci_transport.c
181
vsock_addr_init(remote, pkt->dg.src.context, pkt->src_port);
net/vmw_vsock/vmci_transport.c
332
vsock_addr_init(&dst, pkt->dg.src.context,
net/vmw_vsock/vmci_transport.c
487
vsock_addr_init(&src, pkt->dg.src.context, pkt->src_port);
net/vmw_vsock/vmci_transport.c
630
if (!vmci_transport_allow_dgram(vsk, dg->src.context))
net/vmw_vsock/vmci_transport.c
692
if (vmci_transport_peer_rid(dg->src.context) != dg->src.resource)
net/vmw_vsock/vmci_transport.c
705
vsock_addr_init(&src, pkt->dg.src.context, pkt->src_port);
net/vmw_vsock/vmci_transport.c
706
vsock_addr_init(&dst, pkt->dg.dst.context, pkt->dst_port);
net/vmw_vsock/vmci_transport.c
749
if (!vmci_transport_allow_dgram(vsk, pkt->dg.src.context)) {
net/vmw_vsock/vmci_transport.c
755
if (!vmci_transport_stream_allow(vsk, dg->src.context, -1)) {
net/vmw_vsock/vmci_transport.c
913
vsock_sk(sk)->local_addr.svm_cid = pkt->dg.dst.context;
net/vmw_vsock/vmci_transport.c
970
vsock_sk(pending)->local_addr.svm_cid = pkt->dg.dst.context;
net/wireless/reg.c
1002
static void regdb_fw_cb(const struct firmware *fw, void *context)
net/wireless/reg.c
1030
restore = context && query_regdb(context);
net/wireless/reg.c
1041
kfree(context);
security/apparmor/secid.c
112
kfree(cp->context);
security/apparmor/secid.c
113
cp->context = NULL;
security/apparmor/secid.c
64
len = aa_label_asxprint(&cp->context, root_ns, label,
security/keys/keyctl.c
1567
char *context;
security/keys/keyctl.c
1589
ret = security_key_getsecurity(key, &context);
security/keys/keyctl.c
1603
if (copy_to_user(buffer, context, buflen) != 0)
security/keys/keyctl.c
1607
kfree(context);
security/keys/trusted-keys/trusted_tpm2.c
134
int tpm2_key_parent(void *context, size_t hdrlen,
security/keys/trusted-keys/trusted_tpm2.c
138
struct tpm2_key_context *ctx = context;
security/keys/trusted-keys/trusted_tpm2.c
151
int tpm2_key_type(void *context, size_t hdrlen,
security/keys/trusted-keys/trusted_tpm2.c
169
int tpm2_key_pub(void *context, size_t hdrlen,
security/keys/trusted-keys/trusted_tpm2.c
173
struct tpm2_key_context *ctx = context;
security/keys/trusted-keys/trusted_tpm2.c
181
int tpm2_key_priv(void *context, size_t hdrlen,
security/keys/trusted-keys/trusted_tpm2.c
185
struct tpm2_key_context *ctx = context;
security/selinux/hooks.c
1077
char *context = NULL;
security/selinux/hooks.c
1081
rc = security_sid_to_context(sid, &context, &len);
security/selinux/hooks.c
1083
bool has_comma = strchr(context, ',');
security/selinux/hooks.c
1088
seq_escape(m, context, "\"\n\\");
security/selinux/hooks.c
1092
kfree(context);
security/selinux/hooks.c
1372
char *context;
security/selinux/hooks.c
1377
context = kmalloc(len + 1, GFP_NOFS);
security/selinux/hooks.c
1378
if (!context)
security/selinux/hooks.c
1381
context[len] = '\0';
security/selinux/hooks.c
1382
rc = __vfs_getxattr(dentry, inode, XATTR_NAME_SELINUX, context, len);
security/selinux/hooks.c
1384
kfree(context);
security/selinux/hooks.c
1392
context = kmalloc(len + 1, GFP_NOFS);
security/selinux/hooks.c
1393
if (!context)
security/selinux/hooks.c
1396
context[len] = '\0';
security/selinux/hooks.c
1398
context, len);
security/selinux/hooks.c
1401
kfree(context);
security/selinux/hooks.c
1411
rc = security_context_to_sid_default(context, rc, sid,
security/selinux/hooks.c
1419
ino, dev, context);
security/selinux/hooks.c
1422
__func__, context, -rc, dev, ino);
security/selinux/hooks.c
1425
kfree(context);
security/selinux/hooks.c
2915
return security_sid_to_context(newsid, &cp->context, &cp->len);
security/selinux/hooks.c
2949
char *context;
security/selinux/hooks.c
2973
&context, &clen);
security/selinux/hooks.c
2976
xattr->value = context;
security/selinux/hooks.c
3593
char *context = NULL;
security/selinux/hooks.c
3615
error = security_sid_to_context_force(isec->sid, &context,
security/selinux/hooks.c
3619
&context, &size);
security/selinux/hooks.c
3624
*buffer = context;
security/selinux/hooks.c
3627
kfree(context);
security/selinux/hooks.c
3726
char *context;
security/selinux/hooks.c
3735
context = kmalloc(clen, GFP_KERNEL);
security/selinux/hooks.c
3736
if (!context)
security/selinux/hooks.c
3739
rc = kernfs_xattr_get(kn_dir, XATTR_NAME_SELINUX, context, clen);
security/selinux/hooks.c
3741
kfree(context);
security/selinux/hooks.c
3745
rc = security_context_to_sid(context, clen, &parent_sid,
security/selinux/hooks.c
3747
kfree(context);
security/selinux/hooks.c
3771
&context, &clen);
security/selinux/hooks.c
3775
rc = kernfs_xattr_set(kn, XATTR_NAME_SELINUX, context, clen,
security/selinux/hooks.c
3777
kfree(context);
security/selinux/hooks.c
409
A(context, true),
security/selinux/hooks.c
6828
ret = security_sid_to_context(secid, &cp->context, &cp->len);
security/selinux/hooks.c
6854
kfree(cp->context);
security/selinux/hooks.c
6855
cp->context = NULL;
security/selinux/hooks.c
6894
(void **)&cp->context, true);
security/selinux/hooks.c
6966
char *context = NULL;
security/selinux/hooks.c
6971
&context, &len);
security/selinux/hooks.c
6974
*_buffer = context;
security/selinux/ss/context.c
14
u32 context_compute_hash(const struct context *c)
security/selinux/ss/context.h
102
static inline int mls_context_glblub(struct context *dst,
security/selinux/ss/context.h
103
const struct context *c1,
security/selinux/ss/context.h
104
const struct context *c2)
security/selinux/ss/context.h
135
static inline bool mls_context_equal(const struct context *c1,
security/selinux/ss/context.h
136
const struct context *c2)
security/selinux/ss/context.h
144
static inline void mls_context_destroy(struct context *c)
security/selinux/ss/context.h
151
static inline void context_init(struct context *c)
security/selinux/ss/context.h
156
static inline int context_cpy(struct context *dst, const struct context *src)
security/selinux/ss/context.h
182
static inline void context_destroy(struct context *c)
security/selinux/ss/context.h
191
static inline bool context_equal(const struct context *c1,
security/selinux/ss/context.h
192
const struct context *c2)
security/selinux/ss/context.h
202
u32 context_compute_hash(const struct context *c);
security/selinux/ss/context.h
37
static inline void mls_context_init(struct context *c)
security/selinux/ss/context.h
42
static inline int mls_context_cpy(struct context *dst,
security/selinux/ss/context.h
43
const struct context *src)
security/selinux/ss/context.h
63
static inline int mls_context_cpy_low(struct context *dst,
security/selinux/ss/context.h
64
const struct context *src)
security/selinux/ss/context.h
84
static inline int mls_context_cpy_high(struct context *dst,
security/selinux/ss/context.h
85
const struct context *src)
security/selinux/ss/mls.c
104
context->range.level[l].sens - 1));
security/selinux/ss/mls.c
110
e = &context->range.level[l].cat;
security/selinux/ss/mls.c
147
if (mls_level_eq(&context->range.level[0],
security/selinux/ss/mls.c
148
&context->range.level[1]))
security/selinux/ss/mls.c
189
int mls_context_isvalid(struct policydb *p, struct context *c)
security/selinux/ss/mls.c
231
struct context *context, struct sidtab *s, u32 def_sid)
security/selinux/ss/mls.c
255
struct context *defcon;
security/selinux/ss/mls.c
264
return mls_context_cpy(context, defcon);
security/selinux/ss/mls.c
292
context->range.level[l].sens = levdatum->level.sens;
security/selinux/ss/mls.c
312
rc = ebitmap_set_bit(&context->range.level[l].cat,
security/selinux/ss/mls.c
32
int mls_compute_context_len(struct policydb *p, struct context *context)
security/selinux/ss/mls.c
330
&context->range.level[l].cat, i, 1);
security/selinux/ss/mls.c
339
context->range.level[1].sens = context->range.level[0].sens;
security/selinux/ss/mls.c
340
rc = ebitmap_cpy(&context->range.level[1].cat,
security/selinux/ss/mls.c
341
&context->range.level[0].cat);
security/selinux/ss/mls.c
355
int mls_from_string(struct policydb *p, char *str, struct context *context,
security/selinux/ss/mls.c
368
rc = mls_context_to_sid(p, ':', tmpstr, context, NULL,
security/selinux/ss/mls.c
379
int mls_range_set(struct context *context, struct mls_range *range)
security/selinux/ss/mls.c
385
context->range.level[l].sens = range->level[l].sens;
security/selinux/ss/mls.c
386
rc = ebitmap_cpy(&context->range.level[l].cat,
security/selinux/ss/mls.c
395
int mls_setup_user_range(struct policydb *p, struct context *fromcon,
security/selinux/ss/mls.c
396
struct user_datum *user, struct context *usercon)
security/selinux/ss/mls.c
44
u32 index_sens = context->range.level[l].sens;
security/selinux/ss/mls.c
440
struct context *oldc, struct context *newc)
security/selinux/ss/mls.c
480
int mls_compute_sid(struct policydb *p, struct context *scontext,
security/selinux/ss/mls.c
481
struct context *tcontext, u16 tclass, u32 specified,
security/selinux/ss/mls.c
482
struct context *newcontext, bool sock)
security/selinux/ss/mls.c
50
e = &context->range.level[l].cat;
security/selinux/ss/mls.c
553
void mls_export_netlbl_lvl(struct policydb *p, struct context *context,
security/selinux/ss/mls.c
559
secattr->attr.mls.lvl = context->range.level[0].sens - 1;
security/selinux/ss/mls.c
574
void mls_import_netlbl_lvl(struct policydb *p, struct context *context,
security/selinux/ss/mls.c
580
context->range.level[0].sens = secattr->attr.mls.lvl + 1;
security/selinux/ss/mls.c
581
context->range.level[1].sens = context->range.level[0].sens;
security/selinux/ss/mls.c
595
int mls_export_netlbl_cat(struct policydb *p, struct context *context,
security/selinux/ss/mls.c
603
rc = ebitmap_netlbl_export(&context->range.level[0].cat,
security/selinux/ss/mls.c
624
int mls_import_netlbl_cat(struct policydb *p, struct context *context,
security/selinux/ss/mls.c
632
rc = ebitmap_netlbl_import(&context->range.level[0].cat,
security/selinux/ss/mls.c
636
memcpy(&context->range.level[1].cat, &context->range.level[0].cat,
security/selinux/ss/mls.c
637
sizeof(context->range.level[0].cat));
security/selinux/ss/mls.c
642
ebitmap_destroy(&context->range.level[0].cat);
security/selinux/ss/mls.c
70
if (mls_level_eq(&context->range.level[0],
security/selinux/ss/mls.c
71
&context->range.level[1]))
security/selinux/ss/mls.c
86
void mls_sid_to_context(struct policydb *p, struct context *context,
security/selinux/ss/mls.h
27
int mls_compute_context_len(struct policydb *p, struct context *context);
security/selinux/ss/mls.h
28
void mls_sid_to_context(struct policydb *p, struct context *context,
security/selinux/ss/mls.h
30
int mls_context_isvalid(struct policydb *p, struct context *c);
security/selinux/ss/mls.h
35
struct context *context, struct sidtab *s, u32 def_sid);
security/selinux/ss/mls.h
37
int mls_from_string(struct policydb *p, char *str, struct context *context,
security/selinux/ss/mls.h
40
int mls_range_set(struct context *context, struct mls_range *range);
security/selinux/ss/mls.h
43
struct context *oldc, struct context *newc);
security/selinux/ss/mls.h
45
int mls_compute_sid(struct policydb *p, struct context *scontext,
security/selinux/ss/mls.h
46
struct context *tcontext, u16 tclass, u32 specified,
security/selinux/ss/mls.h
47
struct context *newcontext, bool sock);
security/selinux/ss/mls.h
49
int mls_setup_user_range(struct policydb *p, struct context *fromcon,
security/selinux/ss/mls.h
50
struct user_datum *user, struct context *usercon);
security/selinux/ss/mls.h
53
void mls_export_netlbl_lvl(struct policydb *p, struct context *context,
security/selinux/ss/mls.h
55
void mls_import_netlbl_lvl(struct policydb *p, struct context *context,
security/selinux/ss/mls.h
57
int mls_export_netlbl_cat(struct policydb *p, struct context *context,
security/selinux/ss/mls.h
59
int mls_import_netlbl_cat(struct policydb *p, struct context *context,
security/selinux/ss/mls.h
63
struct context *context,
security/selinux/ss/mls.h
69
struct context *context,
security/selinux/ss/mls.h
75
struct context *context,
security/selinux/ss/mls.h
81
struct context *context,
security/selinux/ss/policydb.c
1063
static int context_read_and_validate(struct context *c, struct policydb *p,
security/selinux/ss/policydb.c
2207
rc = context_read_and_validate(&newc->context[0], p,
security/selinux/ss/policydb.c
2282
rc = context_read_and_validate(&c->context[0],
security/selinux/ss/policydb.c
2302
rc = context_read_and_validate(&c->context[0],
security/selinux/ss/policydb.c
2306
rc = context_read_and_validate(&c->context[1],
security/selinux/ss/policydb.c
2318
rc = context_read_and_validate(&c->context[0],
security/selinux/ss/policydb.c
2329
rc = context_read_and_validate(&c->context[0],
security/selinux/ss/policydb.c
2352
rc = context_read_and_validate(&c->context[0],
security/selinux/ss/policydb.c
2367
rc = context_read_and_validate(&c->context[0],
security/selinux/ss/policydb.c
2399
rc = context_read_and_validate(&c->context[0],
security/selinux/ss/policydb.c
2426
rc = context_read_and_validate(&c->context[0],
security/selinux/ss/policydb.c
2943
static int context_write(struct policydb *p, struct context *c, struct policy_file *fp)
security/selinux/ss/policydb.c
3331
rc = context_write(p, &c->context[0], fp);
security/selinux/ss/policydb.c
3345
rc = context_write(p, &c->context[0], fp);
security/selinux/ss/policydb.c
3348
rc = context_write(p, &c->context[1], fp);
security/selinux/ss/policydb.c
3359
rc = context_write(p, &c->context[0], fp);
security/selinux/ss/policydb.c
3369
rc = context_write(p, &c->context[0], fp);
security/selinux/ss/policydb.c
3383
rc = context_write(p, &c->context[0], fp);
security/selinux/ss/policydb.c
3399
rc = context_write(p, &c->context[0], fp);
security/selinux/ss/policydb.c
3418
rc = context_write(p, &c->context[0], fp);
security/selinux/ss/policydb.c
3433
rc = context_write(p, &c->context[0], fp);
security/selinux/ss/policydb.c
3487
rc = context_write(p, &c->context[0], fp);
security/selinux/ss/policydb.c
376
context_destroy(&c->context[0]);
security/selinux/ss/policydb.c
377
context_destroy(&c->context[1]);
security/selinux/ss/policydb.c
904
rc = sidtab_set_initial(s, sid, &c->context[0]);
security/selinux/ss/policydb.c
922
&c->context[0]);
security/selinux/ss/policydb.c
959
int policydb_context_isvalid(struct policydb *p, struct context *c)
security/selinux/ss/policydb.h
201
struct context context[2]; /* security context(s) */
security/selinux/ss/policydb.h
324
extern int policydb_context_isvalid(struct policydb *p, struct context *c);
security/selinux/ss/services.c
1037
struct context *scontext, *tcontext;
security/selinux/ss/services.c
1133
struct context *scontext = NULL, *tcontext = NULL;
security/selinux/ss/services.c
1199
struct context *scontext = NULL, *tcontext = NULL;
security/selinux/ss/services.c
1262
struct context *context,
security/selinux/ss/services.c
1271
if (context->len) {
security/selinux/ss/services.c
1272
*scontext_len = context->len;
security/selinux/ss/services.c
1274
*scontext = kstrdup(context->str, GFP_ATOMIC);
security/selinux/ss/services.c
1282
*scontext_len += strlen(sym_name(p, SYM_USERS, context->user - 1)) + 1;
security/selinux/ss/services.c
1283
*scontext_len += strlen(sym_name(p, SYM_ROLES, context->role - 1)) + 1;
security/selinux/ss/services.c
1284
*scontext_len += strlen(sym_name(p, SYM_TYPES, context->type - 1)) + 1;
security/selinux/ss/services.c
1285
*scontext_len += mls_compute_context_len(p, context);
security/selinux/ss/services.c
1300
sym_name(p, SYM_USERS, context->user - 1),
security/selinux/ss/services.c
1301
sym_name(p, SYM_ROLES, context->role - 1),
security/selinux/ss/services.c
1302
sym_name(p, SYM_TYPES, context->type - 1));
security/selinux/ss/services.c
1304
mls_sid_to_context(p, context, &scontextp);
security/selinux/ss/services.c
1321
rc = context_struct_to_string(p, &entry->context, scontext,
security/selinux/ss/services.c
1416
if (only_invalid && !entry->context.len)
security/selinux/ss/services.c
1477
struct context *ctx,
security/selinux/ss/services.c
1560
struct context context;
security/selinux/ss/services.c
1601
&context, def_sid);
security/selinux/ss/services.c
1603
context.str = str;
security/selinux/ss/services.c
1604
context.len = strlen(str) + 1;
security/selinux/ss/services.c
1608
rc = sidtab_context_to_sid(sidtab, &context, sid);
security/selinux/ss/services.c
1611
if (context.str) {
security/selinux/ss/services.c
1612
str = context.str;
security/selinux/ss/services.c
1613
context.str = NULL;
security/selinux/ss/services.c
1615
context_destroy(&context);
security/selinux/ss/services.c
1618
context_destroy(&context);
security/selinux/ss/services.c
1690
struct context *newcontext)
security/selinux/ss/services.c
1724
struct context *newcontext,
security/selinux/ss/services.c
1765
struct context *scontext, *tcontext, newcontext;
security/selinux/ss/services.c
1820
scontext = &sentry->context;
security/selinux/ss/services.c
1821
tcontext = &tentry->context;
security/selinux/ss/services.c
2028
struct context *context)
security/selinux/ss/services.c
2036
if (!context_struct_to_string(policydb, context, &s, &len)) {
security/selinux/ss/services.c
2057
struct context *oldc, struct context *newc,
security/selinux/ss/services.c
2144
rc = mls_range_set(newc, &oc->context[0].range);
security/selinux/ss/services.c
2426
rc = sidtab_context_to_sid(sidtab, &c->context[index], &sid);
security/selinux/ss/services.c
266
struct context *scontext,
security/selinux/ss/services.c
267
struct context *tcontext,
security/selinux/ss/services.c
268
struct context *xcontext,
security/selinux/ss/services.c
272
struct context *c;
security/selinux/ss/services.c
2773
struct context *fromcon, usercon;
security/selinux/ss/services.c
3233
struct context *context1;
security/selinux/ss/services.c
3234
struct context *context2;
security/selinux/ss/services.c
3235
struct context newcon;
security/selinux/ss/services.c
3345
struct context *nlbl_ctx;
security/selinux/ss/services.c
3346
struct context *xfrm_ctx;
security/selinux/ss/services.c
3570
struct context au_ctxt;
security/selinux/ss/services.c
3720
struct context *ctxt;
security/selinux/ss/services.c
3884
struct context *ctx;
security/selinux/ss/services.c
3885
struct context ctx_new;
security/selinux/ss/services.c
3958
struct context *ctx;
security/selinux/ss/services.c
457
struct context *scontext,
security/selinux/ss/services.c
458
struct context *tcontext,
security/selinux/ss/services.c
534
struct context *scontext,
security/selinux/ss/services.c
535
struct context *tcontext,
security/selinux/ss/services.c
539
struct context lo_scontext;
security/selinux/ss/services.c
540
struct context lo_tcontext, *tcontextp = tcontext;
security/selinux/ss/services.c
623
struct context *scontext,
security/selinux/ss/services.c
624
struct context *tcontext,
security/selinux/ss/services.c
79
struct context *context,
security/selinux/ss/services.c
817
if (!constraint_expr_eval(policydb, &oentry->context,
security/selinux/ss/services.c
818
&nentry->context, &tentry->context,
security/selinux/ss/services.c
897
if (old_entry->context.type == new_entry->context.type)
security/selinux/ss/services.c
90
struct context *scontext,
security/selinux/ss/services.c
900
index = new_entry->context.type;
security/selinux/ss/services.c
91
struct context *tcontext,
security/selinux/ss/services.c
912
if (type->bounds == old_entry->context.type)
security/selinux/ss/services.h
44
struct context *oldc, struct context *newc,
security/selinux/ss/sidtab.c
106
if (!context_to_sid(s, context, hash)) {
security/selinux/ss/sidtab.c
251
if (entry && (!entry->context.len || force))
security/selinux/ss/sidtab.c
268
int sidtab_context_to_sid(struct sidtab *s, struct context *context, u32 *sid)
security/selinux/ss/sidtab.c
271
u32 count, hash = context_compute_hash(context);
security/selinux/ss/sidtab.c
276
*sid = context_to_sid(s, context, hash);
security/selinux/ss/sidtab.c
284
*sid = context_to_sid(s, context, hash);
security/selinux/ss/sidtab.c
313
rc = context_cpy(&dst->context, context);
security/selinux/ss/sidtab.c
328
context_destroy(&dst->context);
security/selinux/ss/sidtab.c
332
rc = services_convert_context(convert->args, context,
security/selinux/ss/sidtab.c
333
&dst_convert->context,
security/selinux/ss/sidtab.c
336
context_destroy(&dst->context);
security/selinux/ss/sidtab.c
340
dst_convert->hash = context_compute_hash(&dst_convert->context);
security/selinux/ss/sidtab.c
347
if (context->len)
security/selinux/ss/sidtab.c
349
context->str);
security/selinux/ss/sidtab.c
371
entry->hash = context_compute_hash(&entry->context);
security/selinux/ss/sidtab.c
413
&esrc->ptr_leaf->entries[i].context,
security/selinux/ss/sidtab.c
414
&edst->ptr_leaf->entries[i].context,
security/selinux/ss/sidtab.c
510
context_destroy(&entry->context);
security/selinux/ss/sidtab.c
570
if (entry->context.len)
security/selinux/ss/sidtab.c
60
static u32 context_to_sid(struct sidtab *s, struct context *context, u32 hash)
security/selinux/ss/sidtab.c
614
if (entry->context.len)
security/selinux/ss/sidtab.c
69
if (context_equal(&entry->context, context)) {
security/selinux/ss/sidtab.c
78
int sidtab_set_initial(struct sidtab *s, u32 sid, struct context *context)
security/selinux/ss/sidtab.c
89
rc = context_cpy(&isid->entry.context, context);
security/selinux/ss/sidtab.c
98
hash = context_compute_hash(context);
security/selinux/ss/sidtab.h
108
int sidtab_set_initial(struct sidtab *s, u32 sid, struct context *context);
security/selinux/ss/sidtab.h
112
static inline struct context *sidtab_search(struct sidtab *s, u32 sid)
security/selinux/ss/sidtab.h
116
return entry ? &entry->context : NULL;
security/selinux/ss/sidtab.h
119
static inline struct context *sidtab_search_force(struct sidtab *s, u32 sid)
security/selinux/ss/sidtab.h
123
return entry ? &entry->context : NULL;
security/selinux/ss/sidtab.h
135
int sidtab_context_to_sid(struct sidtab *s, struct context *context, u32 *sid);
security/selinux/ss/sidtab.h
24
struct context context;
security/smack/smack_lsm.c
4887
cp->context = skp->smk_known;
security/smack/smack_lsm.c
4965
cp->context = skp->smk_known;
sound/firewire/amdtp-stream-trace.h
38
__entry->channel = s->context->channel;
sound/firewire/amdtp-stream.c
108
s->context = ERR_PTR(-1);
sound/firewire/amdtp-stream.c
1156
static void process_rx_packets(struct fw_iso_context *context, u32 tstamp, size_t header_length,
sound/firewire/amdtp-stream.c
1227
static void skip_rx_packets(struct fw_iso_context *context, u32 tstamp, size_t header_length,
sound/firewire/amdtp-stream.c
1259
static void irq_target_callback(struct fw_iso_context *context, u32 tstamp, size_t header_length,
sound/firewire/amdtp-stream.c
1262
static void process_rx_packets_intermediately(struct fw_iso_context *context, u32 tstamp,
sound/firewire/amdtp-stream.c
1290
skip_rx_packets(context, tstamp, length, ctx_header, private_data);
sound/firewire/amdtp-stream.c
1305
process_rx_packets(context, tstamp, header_length, ctx_header, private_data);
sound/firewire/amdtp-stream.c
1310
s->context->callback.sc = irq_target_callback;
sound/firewire/amdtp-stream.c
1312
s->context->callback.sc = process_rx_packets;
sound/firewire/amdtp-stream.c
1316
static void process_tx_packets(struct fw_iso_context *context, u32 tstamp, size_t header_length,
sound/firewire/amdtp-stream.c
1363
static void drop_tx_packets(struct fw_iso_context *context, u32 tstamp, size_t header_length,
sound/firewire/amdtp-stream.c
1391
static void process_tx_packets_intermediately(struct fw_iso_context *context, u32 tstamp,
sound/firewire/amdtp-stream.c
1422
drop_tx_packets(context, tstamp, length, ctx_header, s);
sound/firewire/amdtp-stream.c
1434
process_tx_packets(context, tstamp, header_length, ctx_header, s);
sound/firewire/amdtp-stream.c
1438
context->callback.sc = process_tx_packets;
sound/firewire/amdtp-stream.c
1442
static void drop_tx_packets_initially(struct fw_iso_context *context, u32 tstamp,
sound/firewire/amdtp-stream.c
1495
drop_tx_packets(context, tstamp, header_length, header, s);
sound/firewire/amdtp-stream.c
1527
s->context->callback.sc = process_tx_packets_intermediately;
sound/firewire/amdtp-stream.c
1541
fw_iso_context_flush_completions(s->context);
sound/firewire/amdtp-stream.c
1558
static void irq_target_callback(struct fw_iso_context *context, u32 tstamp, size_t header_length,
sound/firewire/amdtp-stream.c
1564
process_rx_packets(context, tstamp, header_length, header, private_data);
sound/firewire/amdtp-stream.c
1568
static void irq_target_callback_intermediately(struct fw_iso_context *context, u32 tstamp,
sound/firewire/amdtp-stream.c
1574
process_rx_packets_intermediately(context, tstamp, header_length, header, private_data);
sound/firewire/amdtp-stream.c
1578
static void irq_target_callback_skip(struct fw_iso_context *context, u32 tstamp,
sound/firewire/amdtp-stream.c
1585
skip_rx_packets(context, tstamp, header_length, header, private_data);
sound/firewire/amdtp-stream.c
1624
s->context->callback.sc = irq_target_callback_intermediately;
sound/firewire/amdtp-stream.c
1626
s->context->callback.sc = process_rx_packets_intermediately;
sound/firewire/amdtp-stream.c
1635
static void amdtp_stream_first_callback(struct fw_iso_context *context,
sound/firewire/amdtp-stream.c
1643
context->callback.sc = drop_tx_packets_initially;
sound/firewire/amdtp-stream.c
1646
context->callback.sc = irq_target_callback_skip;
sound/firewire/amdtp-stream.c
1648
context->callback.sc = skip_rx_packets;
sound/firewire/amdtp-stream.c
1651
context->callback.sc(context, tstamp, header_length, header, s);
sound/firewire/amdtp-stream.c
1714
s->context = fw_iso_context_create_with_header_storage_size(
sound/firewire/amdtp-stream.c
1717
if (IS_ERR(s->context)) {
sound/firewire/amdtp-stream.c
1718
err = PTR_ERR(s->context);
sound/firewire/amdtp-stream.c
1829
err = fw_iso_context_start(s->context, -1, 0, tag);
sound/firewire/amdtp-stream.c
1844
fw_iso_context_destroy(s->context);
sound/firewire/amdtp-stream.c
1845
s->context = ERR_PTR(-1);
sound/firewire/amdtp-stream.c
1869
fw_iso_context_flush_completions(irq_target->context);
sound/firewire/amdtp-stream.c
1890
fw_iso_context_flush_completions(irq_target->context);
sound/firewire/amdtp-stream.c
1923
fw_iso_context_stop(s->context);
sound/firewire/amdtp-stream.c
1924
fw_iso_context_destroy(s->context);
sound/firewire/amdtp-stream.c
1925
s->context = ERR_PTR(-1);
sound/firewire/amdtp-stream.c
646
err = fw_iso_context_queue(s->context, params, &s->buffer.iso_buffer,
sound/firewire/amdtp-stream.h
128
struct fw_iso_context *context;
sound/firewire/amdtp-stream.h
246
return !IS_ERR(s->context);
sound/firewire/bebob/bebob_maudio.c
274
bebob->rx_stream.context = ERR_PTR(-1);
sound/firewire/bebob/bebob_maudio.c
275
bebob->tx_stream.context = ERR_PTR(-1);
sound/firewire/isight.c
165
static void isight_packet(struct fw_iso_context *context, u32 cycle,
sound/firewire/isight.c
196
err = fw_iso_context_queue(isight->context, &audio_packet,
sound/firewire/isight.c
205
fw_iso_context_queue_flush(isight->context);
sound/firewire/isight.c
311
if (!isight->context)
sound/firewire/isight.c
314
fw_iso_context_stop(isight->context);
sound/firewire/isight.c
315
fw_iso_context_destroy(isight->context);
sound/firewire/isight.c
316
isight->context = NULL;
sound/firewire/isight.c
341
if (isight->context) {
sound/firewire/isight.c
360
isight->context = fw_iso_context_create(isight->device->card,
sound/firewire/isight.c
365
if (IS_ERR(isight->context)) {
sound/firewire/isight.c
366
err = PTR_ERR(isight->context);
sound/firewire/isight.c
367
isight->context = NULL;
sound/firewire/isight.c
372
err = fw_iso_context_queue(isight->context, &audio_packet,
sound/firewire/isight.c
382
err = fw_iso_context_start(isight->context, -1, 0,
sound/firewire/isight.c
390
fw_iso_context_destroy(isight->context);
sound/firewire/isight.c
391
isight->context = NULL;
sound/firewire/isight.c
58
struct fw_iso_context *context;
sound/hda/codecs/side-codecs/tas2781_hda_i2c.c
431
static void tasdevice_dspfw_init(void *context)
sound/hda/codecs/side-codecs/tas2781_hda_i2c.c
433
struct tasdevice_priv *tas_priv = context;
sound/hda/codecs/side-codecs/tas2781_hda_i2c.c
484
static void tasdev_fw_ready(const struct firmware *fmw, void *context)
sound/hda/codecs/side-codecs/tas2781_hda_i2c.c
486
struct tasdevice_priv *tas_priv = context;
sound/hda/codecs/side-codecs/tas2781_hda_i2c.c
513
tasdevice_dspfw_init(context);
sound/hda/codecs/side-codecs/tas2781_hda_i2c.c
519
tasdevice_dspfw_init(context);
sound/hda/codecs/side-codecs/tas2781_hda_i2c.c
522
tasdevice_dspfw_init(context);
sound/hda/codecs/side-codecs/tas2781_hda_spi.c
198
void (*cont)(const struct firmware *fw, void *context))
sound/hda/codecs/side-codecs/tas2781_hda_spi.c
629
static void tasdev_fw_ready(const struct firmware *fmw, void *context)
sound/hda/codecs/side-codecs/tas2781_hda_spi.c
631
struct tasdevice_priv *tas_priv = context;
sound/hda/controllers/intel.c
2039
static void azx_firmware_cb(const struct firmware *fw, void *context)
sound/hda/controllers/intel.c
2041
struct snd_card *card = context;
sound/hda/core/regmap.c
240
static int hda_reg_read(void *context, unsigned int reg, unsigned int *val)
sound/hda/core/regmap.c
242
struct hdac_device *codec = context;
sound/hda/core/regmap.c
279
static int hda_reg_write(void *context, unsigned int reg, unsigned int val)
sound/hda/core/regmap.c
281
struct hdac_device *codec = context;
sound/pci/cs46xx/cs46xx_dsp_task_types.h
229
struct dsp_hf_save_area context;
sound/soc/amd/ps/pci-ps.c
183
static irqreturn_t acp63_irq_thread(int irq, void *context)
sound/soc/amd/ps/pci-ps.c
185
struct acp63_dev_data *adata = context;
sound/soc/codecs/ab8500-codec.c
158
static int ab8500_codec_read_reg(void *context, unsigned int reg,
sound/soc/codecs/ab8500-codec.c
161
struct device *dev = context;
sound/soc/codecs/ab8500-codec.c
173
static int ab8500_codec_write_reg(void *context, unsigned int reg,
sound/soc/codecs/ab8500-codec.c
176
struct device *dev = context;
sound/soc/codecs/adau1701.c
182
static int adau1701_reg_write(void *context, unsigned int reg,
sound/soc/codecs/adau1701.c
185
struct i2c_client *client = context;
sound/soc/codecs/adau1701.c
212
static int adau1701_reg_read(void *context, unsigned int reg,
sound/soc/codecs/adau1701.c
219
struct i2c_client *client = context;
sound/soc/codecs/cs35l56-sdw.c
114
static int cs35l56_sdw_read(void *context, const void *reg_buf,
sound/soc/codecs/cs35l56-sdw.c
118
struct sdw_slave *peripheral = context;
sound/soc/codecs/cs35l56-sdw.c
177
static int cs35l56_sdw_gather_write(void *context,
sound/soc/codecs/cs35l56-sdw.c
181
struct sdw_slave *peripheral = context;
sound/soc/codecs/cs35l56-sdw.c
217
static int cs35l56_sdw_write(void *context, const void *val_buf, size_t val_size)
sound/soc/codecs/cs35l56-sdw.c
222
return cs35l56_sdw_gather_write(context, &src_buf[0], 4, &src_buf[4], val_size - 4);
sound/soc/codecs/cs35l56-shared-test.c
117
static int cs35l56_shared_test_reg_write(void *context, unsigned int reg, unsigned int val)
sound/soc/codecs/cs35l56-shared-test.c
119
struct cs35l56_shared_test_priv *priv = context;
sound/soc/codecs/cs35l56-shared-test.c
97
static int cs35l56_shared_test_reg_read(void *context, unsigned int reg, unsigned int *val)
sound/soc/codecs/cs35l56-shared-test.c
99
struct cs35l56_shared_test_priv *priv = context;
sound/soc/codecs/cs42l42-sdw.c
221
static int cs42l42_sdw_read(void *context, unsigned int reg, unsigned int *val)
sound/soc/codecs/cs42l42-sdw.c
223
struct sdw_slave *peripheral = context;
sound/soc/codecs/cs42l42-sdw.c
271
static int cs42l42_sdw_write(void *context, unsigned int reg, unsigned int val)
sound/soc/codecs/cs42l42-sdw.c
273
struct sdw_slave *peripheral = context;
sound/soc/codecs/cx2072x.c
508
static int cx2072x_reg_write(void *context, unsigned int reg,
sound/soc/codecs/cx2072x.c
524
return cx2072x_reg_raw_write(context, reg, &raw_value, size);
sound/soc/codecs/cx2072x.c
527
static int cx2072x_reg_read(void *context, unsigned int reg,
sound/soc/codecs/cx2072x.c
530
struct i2c_client *client = context;
sound/soc/codecs/jz4725b.c
548
static int jz4725b_codec_reg_read(void *context, unsigned int reg,
sound/soc/codecs/jz4725b.c
551
struct jz_icdc *icdc = context;
sound/soc/codecs/jz4725b.c
573
static int jz4725b_codec_reg_write(void *context, unsigned int reg,
sound/soc/codecs/jz4725b.c
576
struct jz_icdc *icdc = context;
sound/soc/codecs/jz4760.c
749
static int jz4760_codec_reg_read(void *context, unsigned int reg,
sound/soc/codecs/jz4760.c
752
struct jz_codec *codec = context;
sound/soc/codecs/jz4760.c
774
static int jz4760_codec_reg_write(void *context, unsigned int reg,
sound/soc/codecs/jz4760.c
777
struct jz_codec *codec = context;
sound/soc/codecs/jz4770.c
794
static int jz4770_codec_reg_read(void *context, unsigned int reg,
sound/soc/codecs/jz4770.c
797
struct jz_codec *codec = context;
sound/soc/codecs/jz4770.c
819
static int jz4770_codec_reg_write(void *context, unsigned int reg,
sound/soc/codecs/jz4770.c
822
struct jz_codec *codec = context;
sound/soc/codecs/mt6660.c
45
static int mt6660_reg_write(void *context, unsigned int reg, unsigned int val)
sound/soc/codecs/mt6660.c
47
struct mt6660_chip *chip = context;
sound/soc/codecs/mt6660.c
58
static int mt6660_reg_read(void *context, unsigned int reg, unsigned int *val)
sound/soc/codecs/mt6660.c
60
struct mt6660_chip *chip = context;
sound/soc/codecs/peb2466.c
198
static int peb2466_reg_write(void *context, unsigned int reg, unsigned int val)
sound/soc/codecs/peb2466.c
200
struct peb2466 *peb2466 = context;
sound/soc/codecs/peb2466.c
220
static int peb2466_reg_read(void *context, unsigned int reg, unsigned int *val)
sound/soc/codecs/peb2466.c
222
struct peb2466 *peb2466 = context;
sound/soc/codecs/rl6347a.c
16
int rl6347a_hw_write(void *context, unsigned int reg, unsigned int value)
sound/soc/codecs/rl6347a.c
18
struct i2c_client *client = context;
sound/soc/codecs/rl6347a.c
59
int rl6347a_hw_read(void *context, unsigned int reg, unsigned int *value)
sound/soc/codecs/rl6347a.c
61
struct i2c_client *client = context;
sound/soc/codecs/rl6347a.h
28
int rl6347a_hw_write(void *context, unsigned int reg, unsigned int value);
sound/soc/codecs/rl6347a.h
29
int rl6347a_hw_read(void *context, unsigned int reg, unsigned int *value);
sound/soc/codecs/rt5514.c
1115
static int rt5514_i2c_read(void *context, unsigned int reg, unsigned int *val)
sound/soc/codecs/rt5514.c
1117
struct i2c_client *client = context;
sound/soc/codecs/rt5514.c
1125
static int rt5514_i2c_write(void *context, unsigned int reg, unsigned int val)
sound/soc/codecs/rt5514.c
1127
struct i2c_client *client = context;
sound/soc/codecs/rt5575.c
215
static int rt5575_i2c_read(void *context, unsigned int reg, unsigned int *val)
sound/soc/codecs/rt5575.c
217
struct i2c_client *client = context;
sound/soc/codecs/rt5575.c
223
static int rt5575_i2c_write(void *context, unsigned int reg, unsigned int val)
sound/soc/codecs/rt5575.c
225
struct i2c_client *client = context;
sound/soc/codecs/rt5677.c
4976
static int rt5677_read(void *context, unsigned int reg, unsigned int *val)
sound/soc/codecs/rt5677.c
4978
struct i2c_client *client = context;
sound/soc/codecs/rt5677.c
4998
static int rt5677_write(void *context, unsigned int reg, unsigned int val)
sound/soc/codecs/rt5677.c
5000
struct i2c_client *client = context;
sound/soc/codecs/rt5682-sdw.c
39
static int rt5682_sdw_read(void *context, unsigned int reg, unsigned int *val)
sound/soc/codecs/rt5682-sdw.c
41
struct device *dev = context;
sound/soc/codecs/rt5682-sdw.c
58
static int rt5682_sdw_write(void *context, unsigned int reg, unsigned int val)
sound/soc/codecs/rt5682-sdw.c
60
struct device *dev = context;
sound/soc/codecs/rt700-sdw.c
208
static int rt700_sdw_write(void *context, unsigned int reg, unsigned int val)
sound/soc/codecs/rt700-sdw.c
210
struct device *dev = context;
sound/soc/codecs/rt700-sdw.c
85
static int rt700_sdw_read(void *context, unsigned int reg, unsigned int *val)
sound/soc/codecs/rt700-sdw.c
87
struct device *dev = context;
sound/soc/codecs/rt711-sdw.c
214
static int rt711_sdw_write(void *context, unsigned int reg, unsigned int val)
sound/soc/codecs/rt711-sdw.c
216
struct device *dev = context;
sound/soc/codecs/rt711-sdw.c
91
static int rt711_sdw_read(void *context, unsigned int reg, unsigned int *val)
sound/soc/codecs/rt711-sdw.c
93
struct device *dev = context;
sound/soc/codecs/rt715-sdw.c
148
static int rt715_sdw_read(void *context, unsigned int reg, unsigned int *val)
sound/soc/codecs/rt715-sdw.c
150
struct device *dev = context;
sound/soc/codecs/rt715-sdw.c
271
static int rt715_sdw_write(void *context, unsigned int reg, unsigned int val)
sound/soc/codecs/rt715-sdw.c
273
struct device *dev = context;
sound/soc/codecs/rt9120.c
387
static int rt9120_reg_read(void *context, unsigned int reg, unsigned int *val)
sound/soc/codecs/rt9120.c
389
struct rt9120_data *data = context;
sound/soc/codecs/rt9120.c
418
static int rt9120_reg_write(void *context, unsigned int reg, unsigned int val)
sound/soc/codecs/rt9120.c
420
struct rt9120_data *data = context;
sound/soc/codecs/rtq9124.c
377
static int rtq9124_regmap_read(void *context, const void *reg_buf, size_t reg_size, void *val_buf,
sound/soc/codecs/rtq9124.c
380
struct i2c_client *i2c = context;
sound/soc/codecs/rtq9124.c
397
static int rtq9124_regmap_write(void *context, const void *data, size_t count)
sound/soc/codecs/rtq9124.c
399
struct i2c_client *i2c = context;
sound/soc/codecs/rtq9128.c
112
static int rtq9128_i2c_write(void *context, const void *data, size_t count)
sound/soc/codecs/rtq9128.c
114
struct device *dev = context;
sound/soc/codecs/rtq9128.c
128
static int rtq9128_i2c_read(void *context, const void *reg_buf, size_t reg_size, void *val_buf,
sound/soc/codecs/rtq9128.c
131
struct device *dev = context;
sound/soc/codecs/sti-sas.c
77
static int sti_sas_read_reg(void *context, unsigned int reg,
sound/soc/codecs/sti-sas.c
80
struct sti_sas_data *drvdata = context;
sound/soc/codecs/sti-sas.c
91
static int sti_sas_write_reg(void *context, unsigned int reg,
sound/soc/codecs/sti-sas.c
94
struct sti_sas_data *drvdata = context;
sound/soc/codecs/tas2781-comlib-i2c.c
338
void (*cont)(const struct firmware *fw, void *context))
sound/soc/codecs/tas2781-comlib.c
194
void tasdevice_dsp_remove(void *context)
sound/soc/codecs/tas2781-comlib.c
196
struct tasdevice_priv *tas_dev = (struct tasdevice_priv *) context;
sound/soc/codecs/tas2781-fmwlib.c
2189
int tas2781_load_calibration(void *context, char *file_name,
sound/soc/codecs/tas2781-fmwlib.c
2192
struct tasdevice_priv *tas_priv = (struct tasdevice_priv *)context;
sound/soc/codecs/tas2781-fmwlib.c
2262
void *context)
sound/soc/codecs/tas2781-fmwlib.c
2264
struct tasdevice_priv *tas_priv = (struct tasdevice_priv *) context;
sound/soc/codecs/tas2781-fmwlib.c
2347
int tasdevice_dsp_parser(void *context)
sound/soc/codecs/tas2781-fmwlib.c
2349
struct tasdevice_priv *tas_priv = (struct tasdevice_priv *)context;
sound/soc/codecs/tas2781-fmwlib.c
2404
void tasdevice_calbin_remove(void *context)
sound/soc/codecs/tas2781-fmwlib.c
2406
struct tasdevice_priv *tas_priv = (struct tasdevice_priv *) context;
sound/soc/codecs/tas2781-fmwlib.c
2423
void tasdevice_config_info_remove(void *context)
sound/soc/codecs/tas2781-fmwlib.c
2425
struct tasdevice_priv *tas_priv = (struct tasdevice_priv *) context;
sound/soc/codecs/tas2781-fmwlib.c
2600
int tasdevice_select_tuningprm_cfg(void *context, int prm_no,
sound/soc/codecs/tas2781-fmwlib.c
2603
struct tasdevice_priv *tas_priv = (struct tasdevice_priv *) context;
sound/soc/codecs/tas2781-fmwlib.c
2704
int tasdevice_prmg_load(void *context, int prm_no)
sound/soc/codecs/tas2781-fmwlib.c
2706
struct tasdevice_priv *tas_priv = (struct tasdevice_priv *) context;
sound/soc/codecs/tas2781-fmwlib.c
2749
void tasdevice_tuning_switch(void *context, int state)
sound/soc/codecs/tas2781-fmwlib.c
2751
struct tasdevice_priv *tas_priv = (struct tasdevice_priv *) context;
sound/soc/codecs/tas2781-fmwlib.c
301
int tasdevice_rca_parser(void *context, const struct firmware *fmw)
sound/soc/codecs/tas2781-fmwlib.c
303
struct tasdevice_priv *tas_priv = context;
sound/soc/codecs/tas2781-fmwlib.c
876
static int tasdevice_process_block(void *context, unsigned char *data,
sound/soc/codecs/tas2781-fmwlib.c
879
struct tasdevice_priv *tas_priv = (struct tasdevice_priv *)context;
sound/soc/codecs/tas2781-i2c.c
1623
void *context)
sound/soc/codecs/tas2781-i2c.c
1625
struct tasdevice_priv *tas_priv = context;
sound/soc/codecs/tas2781-i2c.c
1942
static void tasdevice_deinit(void *context)
sound/soc/codecs/tas2781-i2c.c
1944
struct tasdevice_priv *tas_priv = (struct tasdevice_priv *) context;
sound/soc/codecs/tas2783-sdw.c
728
static void tas2783_fw_ready(const struct firmware *fmw, void *context)
sound/soc/codecs/tas2783-sdw.c
731
(struct tas2783_prv *)context;
sound/soc/codecs/tas5086.c
164
static int tas5086_reg_write(void *context, unsigned int reg,
sound/soc/codecs/tas5086.c
167
struct i2c_client *client = context;
sound/soc/codecs/tas5086.c
192
static int tas5086_reg_read(void *context, unsigned int reg,
sound/soc/codecs/tas5086.c
195
struct i2c_client *client = context;
sound/soc/codecs/tas571x.c
102
static int tas571x_reg_read(void *context, unsigned int reg,
sound/soc/codecs/tas571x.c
105
struct i2c_client *client = context;
sound/soc/codecs/tas571x.c
76
static int tas571x_reg_write(void *context, unsigned int reg,
sound/soc/codecs/tas571x.c
79
struct i2c_client *client = context;
sound/soc/codecs/wm0010.c
426
xfer->m.context = xfer;
sound/soc/codecs/wm8958-dsp2.c
861
static void wm8958_enh_eq_loaded(const struct firmware *fw, void *context)
sound/soc/codecs/wm8958-dsp2.c
863
struct snd_soc_component *component = context;
sound/soc/codecs/wm8958-dsp2.c
873
static void wm8958_mbc_vss_loaded(const struct firmware *fw, void *context)
sound/soc/codecs/wm8958-dsp2.c
875
struct snd_soc_component *component = context;
sound/soc/codecs/wm8958-dsp2.c
885
static void wm8958_mbc_loaded(const struct firmware *fw, void *context)
sound/soc/codecs/wm8958-dsp2.c
887
struct snd_soc_component *component = context;
sound/soc/codecs/zl38060.c
479
static int zl38_bus_read(void *context,
sound/soc/codecs/zl38060.c
483
struct spi_device *spi = context;
sound/soc/codecs/zl38060.c
508
static int zl38_bus_write(void *context, const void *data, size_t count)
sound/soc/codecs/zl38060.c
510
struct spi_device *spi = context;
sound/soc/fsl/fsl_mqs.c
80
static int fsl_mqs_sm_read(void *context, unsigned int reg, unsigned int *val)
sound/soc/fsl/fsl_mqs.c
82
struct fsl_mqs *mqs_priv = context;
sound/soc/fsl/fsl_mqs.c
92
static int fsl_mqs_sm_write(void *context, unsigned int reg, unsigned int val)
sound/soc/fsl/fsl_mqs.c
94
struct fsl_mqs *mqs_priv = context;
sound/soc/fsl/fsl_qmc_audio.c
114
static void qmc_audio_pcm_write_complete(void *context);
sound/soc/fsl/fsl_qmc_audio.c
137
static void qmc_audio_pcm_write_complete(void *context)
sound/soc/fsl/fsl_qmc_audio.c
139
struct qmc_dai_prtd *prtd = context;
sound/soc/fsl/fsl_qmc_audio.c
154
static void qmc_audio_pcm_read_complete(void *context, size_t length, unsigned int flags);
sound/soc/fsl/fsl_qmc_audio.c
177
static void qmc_audio_pcm_read_complete(void *context, size_t length, unsigned int flags)
sound/soc/fsl/fsl_qmc_audio.c
179
struct qmc_dai_prtd *prtd = context;
sound/soc/fsl/fsl_xcvr.c
304
static int fsl_xcvr_phy_reg_read(void *context, unsigned int reg, unsigned int *val)
sound/soc/fsl/fsl_xcvr.c
306
struct fsl_xcvr *xcvr = context;
sound/soc/fsl/fsl_xcvr.c
311
static int fsl_xcvr_phy_reg_write(void *context, unsigned int reg, unsigned int val)
sound/soc/fsl/fsl_xcvr.c
313
struct fsl_xcvr *xcvr = context;
sound/soc/fsl/fsl_xcvr.c
318
static int fsl_xcvr_pll_reg_read(void *context, unsigned int reg, unsigned int *val)
sound/soc/fsl/fsl_xcvr.c
320
struct fsl_xcvr *xcvr = context;
sound/soc/fsl/fsl_xcvr.c
325
static int fsl_xcvr_pll_reg_write(void *context, unsigned int reg, unsigned int val)
sound/soc/fsl/fsl_xcvr.c
327
struct fsl_xcvr *xcvr = context;
sound/soc/intel/atom/sst/sst.c
115
static irqreturn_t intel_sst_irq_thread_mrfld(int irq, void *context)
sound/soc/intel/atom/sst/sst.c
117
struct intel_sst_drv *drv = (struct intel_sst_drv *) context;
sound/soc/intel/atom/sst/sst.c
46
static irqreturn_t intel_sst_interrupt_mrfld(int irq, void *context)
sound/soc/intel/atom/sst/sst.c
53
struct intel_sst_drv *drv = (struct intel_sst_drv *) context;
sound/soc/intel/atom/sst/sst.h
484
void sst_firmware_load_cb(const struct firmware *fw, void *context);
sound/soc/intel/atom/sst/sst_loader.c
310
void sst_firmware_load_cb(const struct firmware *fw, void *context)
sound/soc/intel/atom/sst/sst_loader.c
312
struct intel_sst_drv *ctx = context;
sound/soc/intel/catpt/messages.c
123
struct catpt_dx_context *context)
sound/soc/intel/catpt/messages.c
131
reply.size = sizeof(*context);
sound/soc/intel/catpt/messages.c
132
reply.data = context;
sound/soc/intel/catpt/messages.h
272
struct catpt_dx_context *context);
sound/soc/intel/catpt/messages.h
46
u32 context:19; /* stream or module specific */
sound/soc/soc-acpi.c
56
void *context, void **ret)
sound/soc/soc-acpi.c
60
struct snd_soc_acpi_package_context *pkg_ctx = context;
sound/soc/soc-ops-test.c
333
static int mock_regmap_read(void *context, const void *reg_buf,
sound/soc/soc-ops-test.c
337
struct soc_ops_test_priv *priv = context;
sound/soc/soc-ops-test.c
344
static int mock_regmap_gather_write(void *context,
sound/soc/soc-ops-test.c
348
struct soc_ops_test_priv *priv = context;
sound/soc/soc-ops-test.c
355
static int mock_regmap_write(void *context, const void *val_buf,
sound/soc/soc-ops-test.c
358
struct soc_ops_test_priv *priv = context;
sound/soc/sof/amd/acp-ipc.c
155
irqreturn_t acp_sof_ipc_irq_thread(int irq, void *context)
sound/soc/sof/amd/acp-ipc.c
157
struct snd_sof_dev *sdev = context;
sound/soc/sof/amd/acp.c
455
static irqreturn_t acp_irq_thread(int irq, void *context)
sound/soc/sof/amd/acp.c
457
struct snd_sof_dev *sdev = context;
sound/soc/sof/amd/acp.h
301
irqreturn_t acp_sof_ipc_irq_thread(int irq, void *context);
sound/soc/sof/intel/atom.c
103
irqreturn_t atom_irq_handler(int irq, void *context)
sound/soc/sof/intel/atom.c
105
struct snd_sof_dev *sdev = context;
sound/soc/sof/intel/atom.c
136
irqreturn_t atom_irq_thread(int irq, void *context)
sound/soc/sof/intel/atom.c
138
struct snd_sof_dev *sdev = context;
sound/soc/sof/intel/atom.h
57
irqreturn_t atom_irq_handler(int irq, void *context);
sound/soc/sof/intel/atom.h
58
irqreturn_t atom_irq_thread(int irq, void *context);
sound/soc/sof/intel/bdw.c
289
static irqreturn_t bdw_irq_handler(int irq, void *context)
sound/soc/sof/intel/bdw.c
291
struct snd_sof_dev *sdev = context;
sound/soc/sof/intel/bdw.c
303
static irqreturn_t bdw_irq_thread(int irq, void *context)
sound/soc/sof/intel/bdw.c
305
struct snd_sof_dev *sdev = context;
sound/soc/sof/intel/cnl.c
112
irqreturn_t cnl_ipc_irq_thread(int irq, void *context)
sound/soc/sof/intel/cnl.c
114
struct snd_sof_dev *sdev = context;
sound/soc/sof/intel/cnl.c
36
irqreturn_t cnl_ipc4_irq_thread(int irq, void *context)
sound/soc/sof/intel/cnl.c
39
struct snd_sof_dev *sdev = context;
sound/soc/sof/intel/hda-ipc.c
167
irqreturn_t hda_dsp_ipc4_irq_thread(int irq, void *context)
sound/soc/sof/intel/hda-ipc.c
170
struct snd_sof_dev *sdev = context;
sound/soc/sof/intel/hda-ipc.c
248
irqreturn_t hda_dsp_ipc_irq_thread(int irq, void *context)
sound/soc/sof/intel/hda-ipc.c
250
struct snd_sof_dev *sdev = context;
sound/soc/sof/intel/hda-ipc.h
51
irqreturn_t cnl_ipc_irq_thread(int irq, void *context);
sound/soc/sof/intel/hda-stream.c
865
irqreturn_t hda_dsp_stream_threaded_handler(int irq, void *context)
sound/soc/sof/intel/hda-stream.c
867
struct snd_sof_dev *sdev = context;
sound/soc/sof/intel/hda.c
310
static irqreturn_t hda_dsp_sdw_thread(int irq, void *context)
sound/soc/sof/intel/hda.c
312
return sdw_intel_thread(irq, context);
sound/soc/sof/intel/hda.c
402
static inline irqreturn_t hda_dsp_sdw_thread(int irq, void *context)
sound/soc/sof/intel/hda.c
678
static irqreturn_t hda_dsp_interrupt_handler(int irq, void *context)
sound/soc/sof/intel/hda.c
680
struct snd_sof_dev *sdev = context;
sound/soc/sof/intel/hda.c
701
static irqreturn_t hda_dsp_interrupt_thread(int irq, void *context)
sound/soc/sof/intel/hda.c
703
struct snd_sof_dev *sdev = context;
sound/soc/sof/intel/hda.h
679
irqreturn_t hda_dsp_stream_threaded_handler(int irq, void *context);
sound/soc/sof/intel/hda.h
721
irqreturn_t hda_dsp_ipc_irq_thread(int irq, void *context);
sound/soc/sof/intel/hda.h
997
irqreturn_t cnl_ipc4_irq_thread(int irq, void *context);
sound/soc/sof/intel/hda.h
999
irqreturn_t hda_dsp_ipc4_irq_thread(int irq, void *context);
sound/soc/sof/intel/mtl.c
558
static irqreturn_t mtl_ipc_irq_thread(int irq, void *context)
sound/soc/sof/intel/mtl.c
561
struct snd_sof_dev *sdev = context;
sound/soc/sof/sof-priv.h
216
irqreturn_t (*irq_handler)(int irq, void *context); /* optional */
sound/soc/sof/sof-priv.h
217
irqreturn_t (*irq_thread)(int irq, void *context); /* optional */
sound/soc/sunxi/sun8i-adda-pr-regmap.c
29
static int adda_reg_read(void *context, unsigned int reg, unsigned int *val)
sound/soc/sunxi/sun8i-adda-pr-regmap.c
31
void __iomem *base = (void __iomem *)context;
sound/soc/sunxi/sun8i-adda-pr-regmap.c
52
static int adda_reg_write(void *context, unsigned int reg, unsigned int val)
sound/soc/sunxi/sun8i-adda-pr-regmap.c
54
void __iomem *base = (void __iomem *)context;
sound/soc/ti/davinci-mcasp.c
132
struct davinci_mcasp_context context;
sound/soc/ti/davinci-mcasp.c
2294
mcasp->context.xrsr_regs = devm_kcalloc(&pdev->dev,
sound/soc/ti/davinci-mcasp.c
2297
if (!mcasp->context.xrsr_regs)
sound/soc/ti/davinci-mcasp.c
2774
struct davinci_mcasp_context *context = &mcasp->context;
sound/soc/ti/davinci-mcasp.c
2779
context->config_regs[i] = mcasp_get_reg(mcasp, context_regs[i]);
sound/soc/ti/davinci-mcasp.c
2783
context->afifo_regs[0] = mcasp_get_reg(mcasp, reg);
sound/soc/ti/davinci-mcasp.c
2787
context->afifo_regs[1] = mcasp_get_reg(mcasp, reg);
sound/soc/ti/davinci-mcasp.c
2791
context->xrsr_regs[i] = mcasp_get_reg(mcasp,
sound/soc/ti/davinci-mcasp.c
2800
struct davinci_mcasp_context *context = &mcasp->context;
sound/soc/ti/davinci-mcasp.c
2805
mcasp_set_reg(mcasp, context_regs[i], context->config_regs[i]);
sound/soc/ti/davinci-mcasp.c
2809
mcasp_set_reg(mcasp, reg, context->afifo_regs[0]);
sound/soc/ti/davinci-mcasp.c
2813
mcasp_set_reg(mcasp, reg, context->afifo_regs[1]);
sound/soc/ti/davinci-mcasp.c
2818
context->xrsr_regs[i]);
sound/usb/6fire/comm.c
171
urb->context = rt;
sound/usb/6fire/comm.c
22
u8 *buffer, void *context, void(*handler)(struct urb *urb))
sound/usb/6fire/comm.c
28
urb->context = context;
sound/usb/6fire/comm.c
35
struct comm_runtime *rt = urb->context;
sound/usb/6fire/comm.h
28
void *context, void(*handler)(struct urb *urb));
sound/usb/6fire/midi.c
24
struct midi_runtime *rt = urb->context;
sound/usb/6fire/pcm.c
288
struct pcm_urb *in_urb = usb_urb->context;
sound/usb/6fire/pcm.c
379
struct pcm_urb *urb = usb_urb->context;
sound/usb/6fire/pcm.c
551
urb->instance.context = urb;
sound/usb/bcd2000/bcd2000.c
207
struct bcd2000 *bcd2k = urb->context;
sound/usb/bcd2000/bcd2000.c
225
struct bcd2000 *bcd2k = urb->context;
sound/usb/caiaq/audio.c
587
struct snd_usb_caiaq_cb_info *info = urb->context;
sound/usb/caiaq/audio.c
644
struct snd_usb_caiaq_cb_info *oinfo = out->context;
sound/usb/caiaq/audio.c
662
struct snd_usb_caiaq_cb_info *info = urb->context;
sound/usb/caiaq/audio.c
717
urbs[i]->context = &cdev->data_cb_info[i];
sound/usb/caiaq/device.c
135
struct snd_usb_caiaqdev *cdev = urb->context;
sound/usb/caiaq/input.c
492
struct snd_usb_caiaqdev *cdev = urb->context;
sound/usb/caiaq/midi.c
152
struct snd_usb_caiaqdev *cdev = urb->context;
sound/usb/endpoint.c
1260
u->urb->context = u;
sound/usb/endpoint.c
1304
u->urb->context = u;
sound/usb/endpoint.c
1610
err = prepare_outbound_urb(ep, urb->context, true);
sound/usb/endpoint.c
1612
err = prepare_inbound_urb(ep, urb->context);
sound/usb/endpoint.c
1773
in_ctx = urb->context;
sound/usb/endpoint.c
542
struct snd_urb_ctx *ctx = urb->context;
sound/usb/fcp.c
927
struct usb_mixer_interface *mixer = urb->context;
sound/usb/hiface/pcm.c
304
struct pcm_urb *out_urb = usb_urb->context;
sound/usb/line6/capture.c
148
struct snd_line6_pcm *line6pcm = (struct snd_line6_pcm *)urb->context;
sound/usb/line6/capture.c
51
urb_in->context = line6pcm;
sound/usb/line6/driver.c
142
struct message *msg = (struct message *)urb->context;
sound/usb/line6/driver.c
287
struct usb_line6 *line6 = (struct usb_line6 *)urb->context;
sound/usb/line6/midi.c
77
struct usb_line6 *line6 = (struct usb_line6 *)urb->context;
sound/usb/line6/playback.c
201
urb_out->context = line6pcm;
sound/usb/line6/playback.c
306
struct snd_line6_pcm *line6pcm = (struct snd_line6_pcm *)urb->context;
sound/usb/midi.c
241
struct snd_usb_midi_in_endpoint *ep = urb->context;
sound/usb/midi.c
265
struct out_urb_context *context = urb->context;
sound/usb/midi.c
266
struct snd_usb_midi_out_endpoint *ep = context->ep;
sound/usb/midi.c
270
urb_index = context - ep->urbs;
sound/usb/midi2.c
161
struct snd_usb_midi2_urb *ctx = urb->context;
sound/usb/midi2.c
188
struct snd_usb_midi2_urb *ctx = urb->context;
sound/usb/misc/ua101.c
1077
urb->urb.context = ua;
sound/usb/misc/ua101.c
173
struct ua101 *ua = urb->urb.context;
sound/usb/misc/ua101.c
198
struct ua101 *ua = urb->context;
sound/usb/misc/ua101.c
345
struct ua101 *ua = urb->context;
sound/usb/misc/ua101.c
416
struct ua101 *ua = urb->context;
sound/usb/mixer.c
3505
struct usb_mixer_interface *mixer = urb->context;
sound/usb/mixer_quirks.c
201
struct usb_mixer_interface *mixer = urb->context;
sound/usb/mixer_scarlett2.c
8122
struct usb_mixer_interface *mixer = urb->context;
sound/usb/pcm.c
1390
struct snd_urb_ctx *ctx = urb->context;
sound/usb/pcm.c
1526
struct snd_urb_ctx *ctx = urb->context;
sound/usb/pcm.c
1651
struct snd_urb_ctx *ctx = urb->context;
sound/usb/usx2y/us144mkii.c
139
urb->context = tascam;
sound/usb/usx2y/us144mkii.c
165
f_urb->context = tascam;
sound/usb/usx2y/us144mkii_capture.c
264
struct tascam_card *tascam = urb->context;
sound/usb/usx2y/us144mkii_midi.c
167
struct tascam_card *tascam = urb->context;
sound/usb/usx2y/us144mkii_midi.c
41
struct tascam_card *tascam = urb->context;
sound/usb/usx2y/us144mkii_playback.c
160
struct tascam_card *tascam = urb->context;
sound/usb/usx2y/us144mkii_playback.c
259
struct tascam_card *tascam = urb->context;
sound/usb/usx2y/usb_stream.c
515
struct usb_stream_kernel *sk = urb->context;
sound/usb/usx2y/usb_stream.c
523
struct usb_stream_kernel *sk = urb->context;
sound/usb/usx2y/usb_stream.c
612
struct usb_stream_kernel *sk = urb->context;
sound/usb/usx2y/usb_stream.c
63
urb->context = sk;
sound/usb/usx2y/usb_stream.c
659
struct usb_stream_kernel *sk = urb->context;
sound/usb/usx2y/usbusx2y.c
168
struct usx2ydev *usx2y = urb->context;
sound/usb/usx2y/usbusx2y.c
180
struct usx2ydev *usx2y = urb->context;
sound/usb/usx2y/usbusx2yaudio.c
277
struct snd_usx2y_substream *subs = urb->context;
sound/usb/usx2y/usbusx2yaudio.c
339
struct snd_usx2y_substream *subs = urb->context;
sound/usb/usx2y/usbusx2yaudio.c
439
(*purb)->context = subs;
sound/usb/usx2y/usbusx2yaudio.c
642
struct usx2ydev *usx2y = urb->context;
sound/usb/usx2y/usx2yhwdeppcm.c
232
struct snd_usx2y_substream *subs = urb->context;
sound/usb/usx2y/usx2yhwdeppcm.c
292
struct snd_usx2y_substream *subs = urb->context;
sound/usb/usx2y/usx2yhwdeppcm.c
350
(*purb)->context = subs;
tools/include/uapi/drm/drm.h
297
int context;
tools/include/uapi/drm/drm.h
402
int context; /**< Context handle */
tools/include/uapi/drm/i915_drm.h
1568
#define i915_execbuffer2_set_context_id(eb2, context) \
tools/include/uapi/drm/i915_drm.h
1569
(eb2).rsvd1 = context & I915_EXEC_CONTEXT_ID_MASK
tools/objtool/signal.c
27
static void signal_handler(int sig_num, siginfo_t *info, void *context)
tools/perf/builtin-top.c
1635
annotate_opts.context = 4;
tools/perf/builtin-trace.c
1905
void *context __maybe_unused)
tools/perf/scripts/perl/Perf-Trace-Util/Context.c
42
struct scripting_context * context = INT2PTR(struct scripting_context *,SvIV(ST(0)));
tools/perf/scripts/perl/Perf-Trace-Util/Context.c
46
RETVAL = common_pc(context);
tools/perf/scripts/perl/Perf-Trace-Util/Context.c
65
struct scripting_context * context = INT2PTR(struct scripting_context *,SvIV(ST(0)));
tools/perf/scripts/perl/Perf-Trace-Util/Context.c
69
RETVAL = common_flags(context);
tools/perf/scripts/perl/Perf-Trace-Util/Context.c
88
struct scripting_context * context = INT2PTR(struct scripting_context *,SvIV(ST(0)));
tools/perf/scripts/perl/Perf-Trace-Util/Context.c
92
RETVAL = common_lock_depth(context);
tools/perf/scripts/python/Perf-Trace-Util/Context.c
39
PyObject *context;
tools/perf/scripts/python/Perf-Trace-Util/Context.c
41
if (!PyArg_UnpackTuple(args, name, 1, cnt, &context, arg2))
tools/perf/scripts/python/Perf-Trace-Util/Context.c
44
return _PyCapsule_GetPointer(context, NULL);
tools/perf/trace/beauty/include/uapi/linux/usbdevice_fs.h
64
void __user *context;
tools/perf/util/annotate.c
1236
bool context = opts->context;
tools/perf/util/annotate.c
1276
if (context && queue == NULL) {
tools/perf/util/annotate.c
1286
if (context) {
tools/perf/util/annotate.c
1302
if (!context)
tools/perf/util/annotate.c
1304
if (queue_len == context)
tools/perf/util/annotate.h
66
int context;
tools/perf/util/cs-etm-decoder/cs-etm-decoder.c
53
cs_etm_decoder__mem_access(const void *context,
tools/perf/util/cs-etm-decoder/cs-etm-decoder.c
556
if (elem->context.ctxt_id_valid)
tools/perf/util/cs-etm-decoder/cs-etm-decoder.c
557
tid = elem->context.context_id;
tools/perf/util/cs-etm-decoder/cs-etm-decoder.c
560
if (elem->context.vmid_valid)
tools/perf/util/cs-etm-decoder/cs-etm-decoder.c
561
tid = elem->context.vmid;
tools/perf/util/cs-etm-decoder/cs-etm-decoder.c
569
elem->context.exception_level))
tools/perf/util/cs-etm-decoder/cs-etm-decoder.c
585
const void *context,
tools/perf/util/cs-etm-decoder/cs-etm-decoder.c
592
struct cs_etm_decoder *decoder = (struct cs_etm_decoder *) context;
tools/perf/util/cs-etm-decoder/cs-etm-decoder.c
60
struct cs_etm_decoder *decoder = (struct cs_etm_decoder *) context;
tools/perf/util/evsel.c
710
#define MOD_PRINT(context, mod) do { \
tools/perf/util/evsel.c
711
if (!attr->exclude_##context) { \
tools/perf/util/scripting-engines/trace-event-python.c
944
PyObject *handler, *context, *t, *obj = NULL, *callchain;
tools/perf/util/scripting-engines/trace-event-python.c
992
context = _PyCapsule_New(scripting_context, NULL, NULL);
tools/perf/util/scripting-engines/trace-event-python.c
995
PyTuple_SetItem(t, n++, context);
tools/perf/util/thread-stack.c
526
u64 context = callchain_context(ip, kernel_start);
tools/perf/util/thread-stack.c
535
chain->ips[0] = context;
tools/perf/util/thread-stack.c
543
last_context = context;
tools/perf/util/thread-stack.c
547
context = callchain_context(ip, kernel_start);
tools/perf/util/thread-stack.c
548
if (context != last_context) {
tools/perf/util/thread-stack.c
551
chain->ips[i++] = context;
tools/perf/util/thread-stack.c
552
last_context = context;
tools/perf/util/thread-stack.c
570
u64 last_context, context, ip;
tools/perf/util/thread-stack.c
588
context = callchain_context(ip, kernel_start);
tools/perf/util/thread-stack.c
589
if (context == PERF_CONTEXT_USER ||
tools/perf/util/thread-stack.c
590
(context == sample_context && ip == sample_ip))
tools/perf/util/thread-stack.c
598
context = callchain_context(ip, kernel_start);
tools/perf/util/thread-stack.c
599
if (context != last_context) {
tools/perf/util/thread-stack.c
602
chain->ips[nr++] = context;
tools/perf/util/thread-stack.c
603
last_context = context;
tools/perf/util/trace-event-parse.c
17
static int get_common_field(struct scripting_context *context,
tools/perf/util/trace-event-parse.c
20
struct tep_handle *pevent = context->pevent;
tools/perf/util/trace-event-parse.c
37
return tep_read_number(pevent, context->event_data + *offset, *size);
tools/perf/util/trace-event-parse.c
40
int common_lock_depth(struct scripting_context *context)
tools/perf/util/trace-event-parse.c
46
ret = get_common_field(context, &size, &offset,
tools/perf/util/trace-event-parse.c
54
int common_flags(struct scripting_context *context)
tools/perf/util/trace-event-parse.c
60
ret = get_common_field(context, &size, &offset,
tools/perf/util/trace-event-parse.c
68
int common_pc(struct scripting_context *context)
tools/perf/util/trace-event-parse.c
74
ret = get_common_field(context, &size, &offset,
tools/perf/util/trace-event.h
140
int common_pc(struct scripting_context *context);
tools/perf/util/trace-event.h
141
int common_flags(struct scripting_context *context);
tools/perf/util/trace-event.h
142
int common_lock_depth(struct scripting_context *context);
tools/power/acpi/os_specific/service_layers/osunixxf.c
1270
acpi_osd_exec_callback function, void *context)
tools/power/acpi/os_specific/service_layers/osunixxf.c
1276
pthread_create(&thread, NULL, (PTHREAD_CALLBACK) function, context);
tools/power/acpi/os_specific/service_layers/osunixxf.c
1291
acpi_osd_exec_callback function, void *context)
tools/power/acpi/os_specific/service_layers/osunixxf.c
1294
function(context);
tools/power/acpi/os_specific/service_layers/osunixxf.c
898
void *context)
tools/testing/nvdimm/test/nfit.c
170
u32 context;
tools/testing/nvdimm/test/nfit.c
277
fw->context++;
tools/testing/nvdimm/test/nfit.c
280
nd_cmd->context = fw->context;
tools/testing/nvdimm/test/nfit.c
282
dev_dbg(dev, "%s: context issued: %#x\n", __func__, nd_cmd->context);
tools/testing/nvdimm/test/nfit.c
313
if (nd_cmd->context != fw->context) {
tools/testing/nvdimm/test/nfit.c
315
__func__, nd_cmd->context, fw->context);
tools/testing/nvdimm/test/nfit.c
355
__func__, nd_cmd->context, nd_cmd->ctrl_flags);
tools/testing/nvdimm/test/nfit.c
359
if (nd_cmd->context != fw->context) {
tools/testing/nvdimm/test/nfit.c
361
__func__, nd_cmd->context,
tools/testing/nvdimm/test/nfit.c
362
fw->context);
tools/testing/nvdimm/test/nfit.c
402
if (nd_cmd->context != fw->context) {
tools/testing/nvdimm/test/nfit.c
404
__func__, nd_cmd->context, fw->context);
tools/testing/nvdimm/test/nfit.c
409
dev_dbg(dev, "%s context: %#x\n", __func__, nd_cmd->context);
tools/testing/nvdimm/test/nfit_test.h
175
__u32 context;
tools/testing/nvdimm/test/nfit_test.h
180
__u32 context;
tools/testing/nvdimm/test/nfit_test.h
191
__u32 context;
tools/testing/nvdimm/test/nfit_test.h
196
__u32 context;
tools/testing/selftests/arm64/abi/hwcap.c
1192
static void handle_##SIG(int sig, siginfo_t *info, void *context) \
tools/testing/selftests/arm64/abi/hwcap.c
1194
ucontext_t *uc = context; \
tools/testing/selftests/arm64/abi/hwcap.c
600
static void ignore_signal(int sig, siginfo_t *info, void *context)
tools/testing/selftests/arm64/abi/hwcap.c
602
ucontext_t *uc = context;
tools/testing/selftests/arm64/fp/fp-ptrace.c
110
static void handle_alarm(int sig, siginfo_t *info, void *context)
tools/testing/selftests/arm64/fp/fp-stress.c
272
static void handle_child_signal(int sig, siginfo_t *info, void *context)
tools/testing/selftests/arm64/fp/fp-stress.c
291
static void handle_exit_signal(int sig, siginfo_t *info, void *context)
tools/testing/selftests/arm64/fp/kernel-test.c
36
static void handle_exit_signal(int sig, siginfo_t *info, void *context)
tools/testing/selftests/arm64/fp/kernel-test.c
43
static void handle_kick_signal(int sig, siginfo_t *info, void *context)
tools/testing/selftests/arm64/gcs/gcs-stress.c
288
static void handle_child_signal(int sig, siginfo_t *info, void *context)
tools/testing/selftests/arm64/gcs/gcs-stress.c
307
static void handle_exit_signal(int sig, siginfo_t *info, void *context)
tools/testing/selftests/arm64/signal/testcases/fpmr_siginfo.c
22
} context;
tools/testing/selftests/arm64/signal/testcases/fpmr_siginfo.c
41
struct _aarch64_ctx *head = GET_BUF_RESV_HEAD(context);
tools/testing/selftests/arm64/signal/testcases/fpmr_siginfo.c
52
if (!get_current_context(td, &context.uc, sizeof(context)))
tools/testing/selftests/arm64/signal/testcases/gcs_frame.c
16
} context;
tools/testing/selftests/arm64/signal/testcases/gcs_frame.c
21
struct _aarch64_ctx *head = GET_BUF_RESV_HEAD(context);
tools/testing/selftests/arm64/signal/testcases/gcs_frame.c
38
if (!get_current_context(td, &context.uc, sizeof(context))) {
tools/testing/selftests/arm64/signal/testcases/gcs_frame.c
53
head = get_header(head, GCS_MAGIC, GET_BUF_RESV_SIZE(context),
tools/testing/selftests/arm64/signal/testcases/poe_siginfo.c
22
} context;
tools/testing/selftests/arm64/signal/testcases/poe_siginfo.c
41
struct _aarch64_ctx *head = GET_BUF_RESV_HEAD(context);
tools/testing/selftests/arm64/signal/testcases/poe_siginfo.c
52
if (!get_current_context(td, &context.uc, sizeof(context)))
tools/testing/selftests/arm64/signal/testcases/ssve_regs.c
21
} context;
tools/testing/selftests/arm64/signal/testcases/ssve_regs.c
46
struct _aarch64_ctx *head = GET_BUF_RESV_HEAD(context);
tools/testing/selftests/arm64/signal/testcases/ssve_regs.c
63
if (!get_current_context(td, &context.uc, sizeof(context)))
tools/testing/selftests/arm64/signal/testcases/ssve_regs.c
66
head = get_header(head, SVE_MAGIC, GET_BUF_RESV_SIZE(context),
tools/testing/selftests/arm64/signal/testcases/ssve_za_regs.c
21
} context;
tools/testing/selftests/arm64/signal/testcases/ssve_za_regs.c
51
struct _aarch64_ctx *head = GET_BUF_RESV_HEAD(context);
tools/testing/selftests/arm64/signal/testcases/ssve_za_regs.c
70
if (!get_current_context(td, &context.uc, sizeof(context)))
tools/testing/selftests/arm64/signal/testcases/ssve_za_regs.c
73
regs = get_header(head, SVE_MAGIC, GET_BUF_RESV_SIZE(context),
tools/testing/selftests/arm64/signal/testcases/ssve_za_regs.c
94
regs = get_header(head, ZA_MAGIC, GET_BUF_RESV_SIZE(context),
tools/testing/selftests/arm64/signal/testcases/sve_regs.c
21
} context;
tools/testing/selftests/arm64/signal/testcases/sve_regs.c
46
struct _aarch64_ctx *head = GET_BUF_RESV_HEAD(context);
tools/testing/selftests/arm64/signal/testcases/sve_regs.c
61
if (!get_current_context(td, &context.uc, sizeof(context)))
tools/testing/selftests/arm64/signal/testcases/sve_regs.c
64
head = get_header(head, SVE_MAGIC, GET_BUF_RESV_SIZE(context),
tools/testing/selftests/arm64/signal/testcases/tpidr2_siginfo.c
22
} context;
tools/testing/selftests/arm64/signal/testcases/tpidr2_siginfo.c
41
struct _aarch64_ctx *head = GET_BUF_RESV_HEAD(context);
tools/testing/selftests/arm64/signal/testcases/tpidr2_siginfo.c
52
if (!get_current_context(td, &context.uc, sizeof(context)))
tools/testing/selftests/arm64/signal/testcases/za_no_regs.c
21
} context;
tools/testing/selftests/arm64/signal/testcases/za_no_regs.c
40
struct _aarch64_ctx *head = GET_BUF_RESV_HEAD(context);
tools/testing/selftests/arm64/signal/testcases/za_no_regs.c
54
if (!get_current_context(td, &context.uc, sizeof(context)))
tools/testing/selftests/arm64/signal/testcases/za_no_regs.c
57
head = get_header(head, ZA_MAGIC, GET_BUF_RESV_SIZE(context), &offset);
tools/testing/selftests/arm64/signal/testcases/za_regs.c
21
} context;
tools/testing/selftests/arm64/signal/testcases/za_regs.c
48
struct _aarch64_ctx *head = GET_BUF_RESV_HEAD(context);
tools/testing/selftests/arm64/signal/testcases/za_regs.c
63
if (!get_current_context(td, &context.uc, sizeof(context)))
tools/testing/selftests/arm64/signal/testcases/za_regs.c
66
head = get_header(head, ZA_MAGIC, GET_BUF_RESV_SIZE(context), &offset);
tools/testing/selftests/arm64/signal/testcases/zt_no_regs.c
19
} context;
tools/testing/selftests/arm64/signal/testcases/zt_no_regs.c
24
struct _aarch64_ctx *head = GET_BUF_RESV_HEAD(context);
tools/testing/selftests/arm64/signal/testcases/zt_no_regs.c
30
if (!get_current_context(td, &context.uc, sizeof(context)))
tools/testing/selftests/arm64/signal/testcases/zt_no_regs.c
33
head = get_header(head, ZT_MAGIC, GET_BUF_RESV_SIZE(context), &offset);
tools/testing/selftests/arm64/signal/testcases/zt_regs.c
19
} context;
tools/testing/selftests/arm64/signal/testcases/zt_regs.c
30
struct _aarch64_ctx *head = GET_BUF_RESV_HEAD(context);
tools/testing/selftests/arm64/signal/testcases/zt_regs.c
39
if (!get_current_context(td, &context.uc, sizeof(context)))
tools/testing/selftests/arm64/signal/testcases/zt_regs.c
42
head = get_header(head, ZT_MAGIC, GET_BUF_RESV_SIZE(context), &offset);
tools/testing/selftests/bpf/progs/user_ringbuf_fail.c
116
null_context_read(struct bpf_dynptr *dynptr, void *context)
tools/testing/selftests/bpf/progs/user_ringbuf_fail.c
118
__u64 id = *((__u64 *)context);
tools/testing/selftests/bpf/progs/user_ringbuf_fail.c
138
try_discard_dynptr(struct bpf_dynptr *dynptr, void *context)
tools/testing/selftests/bpf/progs/user_ringbuf_fail.c
158
try_submit_dynptr(struct bpf_dynptr *dynptr, void *context)
tools/testing/selftests/bpf/progs/user_ringbuf_fail.c
178
invalid_drain_callback_return(struct bpf_dynptr *dynptr, void *context)
tools/testing/selftests/bpf/progs/user_ringbuf_fail.c
196
try_reinit_dynptr_mem(struct bpf_dynptr *dynptr, void *context)
tools/testing/selftests/bpf/progs/user_ringbuf_fail.c
203
try_reinit_dynptr_ringbuf(struct bpf_dynptr *dynptr, void *context)
tools/testing/selftests/bpf/progs/user_ringbuf_fail.c
30
bad_access1(struct bpf_dynptr *dynptr, void *context)
tools/testing/selftests/bpf/progs/user_ringbuf_fail.c
53
bad_access2(struct bpf_dynptr *dynptr, void *context)
tools/testing/selftests/bpf/progs/user_ringbuf_fail.c
76
write_forbidden(struct bpf_dynptr *dynptr, void *context)
tools/testing/selftests/bpf/progs/user_ringbuf_fail.c
96
null_context_write(struct bpf_dynptr *dynptr, void *context)
tools/testing/selftests/bpf/progs/user_ringbuf_fail.c
98
*((__u64 *)context) = 0;
tools/testing/selftests/bpf/progs/user_ringbuf_success.c
105
static int publish_next_kern_msg(__u32 index, void *context)
tools/testing/selftests/bpf/progs/user_ringbuf_success.c
193
do_nothing_cb(struct bpf_dynptr *dynptr, void *context)
tools/testing/selftests/bpf/progs/user_ringbuf_success.c
38
record_sample(struct bpf_dynptr *dynptr, void *context)
tools/testing/selftests/bpf/progs/user_ringbuf_success.c
89
read_protocol_msg(struct bpf_dynptr *dynptr, void *context)
tools/testing/selftests/bpf/progs/xdp_synproxy_kern.c
264
static int tscookie_tcpopt_parse_batch(__u32 index, void *context)
tools/testing/selftests/bpf/progs/xdp_synproxy_kern.c
269
if (tscookie_tcpopt_parse(context))
tools/testing/selftests/mqueue/mq_perf_tests.c
155
void sig_action_SIGUSR1(int signum, siginfo_t *info, void *context);
tools/testing/selftests/mqueue/mq_perf_tests.c
156
void sig_action(int signum, siginfo_t *info, void *context);
tools/testing/selftests/mqueue/mq_perf_tests.c
213
void sig_action_SIGUSR1(int signum, siginfo_t *info, void *context)
tools/testing/selftests/mqueue/mq_perf_tests.c
226
void sig_action(int signum, siginfo_t *info, void *context)
tools/testing/selftests/powerpc/dexcr/dexcr.c
16
static void generic_signal_handler(int signum, siginfo_t *info, void *context)
tools/testing/selftests/powerpc/dexcr/hashchk_test.c
39
static void hashchk_handler(int signum, siginfo_t *info, void *context)
tools/testing/selftests/powerpc/math/fpu_signal.c
39
void signal_fpu_sig(int sig, siginfo_t *info, void *context)
tools/testing/selftests/powerpc/math/fpu_signal.c
42
ucontext_t *uc = context;
tools/testing/selftests/powerpc/math/vmx_signal.c
43
void signal_vmx_sig(int sig, siginfo_t *info, void *context)
tools/testing/selftests/powerpc/math/vmx_signal.c
46
ucontext_t *uc = context;
tools/testing/selftests/riscv/hwprobe/cbo.c
31
static void fault_handler(int sig, siginfo_t *info, void *context)
tools/testing/selftests/riscv/hwprobe/cbo.c
33
unsigned long *regs = (unsigned long *)&((ucontext_t *)context)->uc_mcontext;
tools/testing/selftests/riscv/sigreturn/sigreturn.c
15
ucontext_t *context = vcontext;
tools/testing/selftests/riscv/sigreturn/sigreturn.c
17
context->uc_mcontext.__gregs[REG_PC] = context->uc_mcontext.__gregs[REG_PC] + 4;
tools/testing/selftests/riscv/sigreturn/sigreturn.c
22
ucontext_t *context = vcontext;
tools/testing/selftests/riscv/sigreturn/sigreturn.c
29
ext = (void *)(&context->uc_mcontext.__fpregs);
tools/testing/selftests/riscv/sigreturn/sigreturn.c
39
context->uc_mcontext.__gregs[REG_PC] = context->uc_mcontext.__gregs[REG_PC] + 4;
tools/testing/selftests/sched_ext/runner.c
102
test->cleanup(context);
tools/testing/selftests/sched_ext/runner.c
91
void *context = NULL;
tools/testing/selftests/sched_ext/runner.c
94
status = test->setup(&context);
tools/testing/selftests/sched_ext/runner.c
99
status = test->run(context);
tools/testing/selftests/sched_ext/test_example.c
15
static int context = 10;
tools/testing/selftests/sched_ext/test_example.c
20
*ctx = &context;
tools/testing/selftests/sched_ext/test_example.c
31
SCX_EQ(*arg, context);
tools/tracing/rtla/src/common.c
131
retval = osnoise_set_cpus(tool->context, params->cpus ? params->cpus : "all");
tools/tracing/rtla/src/common.c
165
retval = osnoise_set_workload(tool->context, params->kernel_workload);
tools/tracing/rtla/src/common.c
437
retval = osnoise_set_stop_us(tool->context, params->stop_us);
tools/tracing/rtla/src/common.c
443
retval = osnoise_set_stop_total_us(tool->context, params->stop_total_us);
tools/tracing/rtla/src/common.h
125
struct osnoise_context *context;
tools/tracing/rtla/src/common.h
146
int osnoise_set_cpus(struct osnoise_context *context, char *cpus);
tools/tracing/rtla/src/common.h
147
void osnoise_restore_cpus(struct osnoise_context *context);
tools/tracing/rtla/src/common.h
149
int osnoise_set_workload(struct osnoise_context *context, bool onoff);
tools/tracing/rtla/src/common.h
155
int osnoise_set_stop_us(struct osnoise_context *context, long long stop_us);
tools/tracing/rtla/src/common.h
156
int osnoise_set_stop_total_us(struct osnoise_context *context,
tools/tracing/rtla/src/osnoise.c
100
debug_msg("restoring cpus to %s", context->orig_cpus);
tools/tracing/rtla/src/osnoise.c
1005
if (top->context)
tools/tracing/rtla/src/osnoise.c
1006
osnoise_put_context(top->context);
tools/tracing/rtla/src/osnoise.c
102
retval = tracefs_instance_file_write(NULL, "osnoise/cpus", context->orig_cpus);
tools/tracing/rtla/src/osnoise.c
1026
top->context = osnoise_context_alloc();
tools/tracing/rtla/src/osnoise.c
1027
if (!top->context)
tools/tracing/rtla/src/osnoise.c
107
free(context->curr_cpus);
tools/tracing/rtla/src/osnoise.c
108
context->curr_cpus = NULL;
tools/tracing/rtla/src/osnoise.c
1117
retval = osnoise_set_runtime_period(tool->context,
tools/tracing/rtla/src/osnoise.c
1121
retval = osnoise_set_runtime_period(tool->context,
tools/tracing/rtla/src/osnoise.c
1131
retval = osnoise_set_tracing_thresh(tool->context, params->threshold);
tools/tracing/rtla/src/osnoise.c
114
void osnoise_put_cpus(struct osnoise_context *context)
tools/tracing/rtla/src/osnoise.c
116
osnoise_restore_cpus(context);
tools/tracing/rtla/src/osnoise.c
118
if (!context->orig_cpus)
tools/tracing/rtla/src/osnoise.c
121
free(context->orig_cpus);
tools/tracing/rtla/src/osnoise.c
122
context->orig_cpus = NULL;
tools/tracing/rtla/src/osnoise.c
172
unsigned long long osnoise_get_runtime(struct osnoise_context *context)
tools/tracing/rtla/src/osnoise.c
176
if (context->runtime_us != OSNOISE_TIME_INIT_VAL)
tools/tracing/rtla/src/osnoise.c
177
return context->runtime_us;
tools/tracing/rtla/src/osnoise.c
179
if (context->orig_runtime_us != OSNOISE_TIME_INIT_VAL)
tools/tracing/rtla/src/osnoise.c
180
return context->orig_runtime_us;
tools/tracing/rtla/src/osnoise.c
186
context->orig_runtime_us = runtime_us;
tools/tracing/rtla/src/osnoise.c
198
unsigned long long osnoise_get_period(struct osnoise_context *context)
tools/tracing/rtla/src/osnoise.c
202
if (context->period_us != OSNOISE_TIME_INIT_VAL)
tools/tracing/rtla/src/osnoise.c
203
return context->period_us;
tools/tracing/rtla/src/osnoise.c
205
if (context->orig_period_us != OSNOISE_TIME_INIT_VAL)
tools/tracing/rtla/src/osnoise.c
206
return context->orig_period_us;
tools/tracing/rtla/src/osnoise.c
212
context->orig_period_us = period_us;
tools/tracing/rtla/src/osnoise.c
219
static int __osnoise_write_runtime(struct osnoise_context *context,
tools/tracing/rtla/src/osnoise.c
224
if (context->orig_runtime_us == OSNOISE_TIME_INIT_VAL)
tools/tracing/rtla/src/osnoise.c
231
context->runtime_us = runtime;
tools/tracing/rtla/src/osnoise.c
235
static int __osnoise_write_period(struct osnoise_context *context,
tools/tracing/rtla/src/osnoise.c
240
if (context->orig_period_us == OSNOISE_TIME_INIT_VAL)
tools/tracing/rtla/src/osnoise.c
247
context->period_us = period;
tools/tracing/rtla/src/osnoise.c
258
int osnoise_set_runtime_period(struct osnoise_context *context,
tools/tracing/rtla/src/osnoise.c
269
curr_runtime_us = osnoise_get_runtime(context);
tools/tracing/rtla/src/osnoise.c
270
curr_period_us = osnoise_get_period(context);
tools/tracing/rtla/src/osnoise.c
279
return __osnoise_write_runtime(context, runtime);
tools/tracing/rtla/src/osnoise.c
28
char *osnoise_get_cpus(struct osnoise_context *context)
tools/tracing/rtla/src/osnoise.c
283
return __osnoise_write_period(context, period);
tools/tracing/rtla/src/osnoise.c
287
retval = __osnoise_write_period(context, period);
tools/tracing/rtla/src/osnoise.c
290
retval = __osnoise_write_runtime(context, runtime);
tools/tracing/rtla/src/osnoise.c
294
retval = __osnoise_write_runtime(context, runtime);
tools/tracing/rtla/src/osnoise.c
297
retval = __osnoise_write_period(context, period);
tools/tracing/rtla/src/osnoise.c
30
if (context->curr_cpus)
tools/tracing/rtla/src/osnoise.c
308
void osnoise_restore_runtime_period(struct osnoise_context *context)
tools/tracing/rtla/src/osnoise.c
31
return context->curr_cpus;
tools/tracing/rtla/src/osnoise.c
310
unsigned long long orig_runtime = context->orig_runtime_us;
tools/tracing/rtla/src/osnoise.c
311
unsigned long long orig_period = context->orig_period_us;
tools/tracing/rtla/src/osnoise.c
312
unsigned long long curr_runtime = context->runtime_us;
tools/tracing/rtla/src/osnoise.c
313
unsigned long long curr_period = context->period_us;
tools/tracing/rtla/src/osnoise.c
322
retval = osnoise_set_runtime_period(context, orig_runtime, orig_period);
tools/tracing/rtla/src/osnoise.c
327
context->runtime_us = OSNOISE_TIME_INIT_VAL;
tools/tracing/rtla/src/osnoise.c
328
context->period_us = OSNOISE_TIME_INIT_VAL;
tools/tracing/rtla/src/osnoise.c
33
if (context->orig_cpus)
tools/tracing/rtla/src/osnoise.c
334
void osnoise_put_runtime_period(struct osnoise_context *context)
tools/tracing/rtla/src/osnoise.c
336
osnoise_restore_runtime_period(context);
tools/tracing/rtla/src/osnoise.c
338
if (context->orig_runtime_us != OSNOISE_TIME_INIT_VAL)
tools/tracing/rtla/src/osnoise.c
339
context->orig_runtime_us = OSNOISE_TIME_INIT_VAL;
tools/tracing/rtla/src/osnoise.c
34
return context->orig_cpus;
tools/tracing/rtla/src/osnoise.c
341
if (context->orig_period_us != OSNOISE_TIME_INIT_VAL)
tools/tracing/rtla/src/osnoise.c
342
context->orig_period_us = OSNOISE_TIME_INIT_VAL;
tools/tracing/rtla/src/osnoise.c
349
osnoise_get_timerlat_period_us(struct osnoise_context *context)
tools/tracing/rtla/src/osnoise.c
353
if (context->timerlat_period_us != OSNOISE_TIME_INIT_VAL)
tools/tracing/rtla/src/osnoise.c
354
return context->timerlat_period_us;
tools/tracing/rtla/src/osnoise.c
356
if (context->orig_timerlat_period_us != OSNOISE_TIME_INIT_VAL)
tools/tracing/rtla/src/osnoise.c
357
return context->orig_timerlat_period_us;
tools/tracing/rtla/src/osnoise.c
36
context->orig_cpus = tracefs_instance_file_read(NULL, "osnoise/cpus", NULL);
tools/tracing/rtla/src/osnoise.c
363
context->orig_timerlat_period_us = timerlat_period_us;
tools/tracing/rtla/src/osnoise.c
373
int osnoise_set_timerlat_period_us(struct osnoise_context *context, long long timerlat_period_us)
tools/tracing/rtla/src/osnoise.c
375
long long curr_timerlat_period_us = osnoise_get_timerlat_period_us(context);
tools/tracing/rtla/src/osnoise.c
385
context->timerlat_period_us = timerlat_period_us;
tools/tracing/rtla/src/osnoise.c
393
void osnoise_restore_timerlat_period_us(struct osnoise_context *context)
tools/tracing/rtla/src/osnoise.c
397
if (context->orig_timerlat_period_us == OSNOISE_TIME_INIT_VAL)
tools/tracing/rtla/src/osnoise.c
400
if (context->orig_timerlat_period_us == context->timerlat_period_us)
tools/tracing/rtla/src/osnoise.c
403
retval = osnoise_write_ll_config("osnoise/timerlat_period_us", context->orig_timerlat_period_us);
tools/tracing/rtla/src/osnoise.c
408
context->timerlat_period_us = OSNOISE_TIME_INIT_VAL;
tools/tracing/rtla/src/osnoise.c
414
void osnoise_put_timerlat_period_us(struct osnoise_context *context)
tools/tracing/rtla/src/osnoise.c
416
osnoise_restore_timerlat_period_us(context);
tools/tracing/rtla/src/osnoise.c
418
if (context->orig_timerlat_period_us == OSNOISE_TIME_INIT_VAL)
tools/tracing/rtla/src/osnoise.c
42
return context->orig_cpus;
tools/tracing/rtla/src/osnoise.c
421
context->orig_timerlat_period_us = OSNOISE_TIME_INIT_VAL;
tools/tracing/rtla/src/osnoise.c
428
osnoise_get_stop_us(struct osnoise_context *context)
tools/tracing/rtla/src/osnoise.c
432
if (context->stop_us != OSNOISE_OPTION_INIT_VAL)
tools/tracing/rtla/src/osnoise.c
433
return context->stop_us;
tools/tracing/rtla/src/osnoise.c
435
if (context->orig_stop_us != OSNOISE_OPTION_INIT_VAL)
tools/tracing/rtla/src/osnoise.c
436
return context->orig_stop_us;
tools/tracing/rtla/src/osnoise.c
442
context->orig_stop_us = stop_us;
tools/tracing/rtla/src/osnoise.c
452
int osnoise_set_stop_us(struct osnoise_context *context, long long stop_us)
tools/tracing/rtla/src/osnoise.c
454
long long curr_stop_us = osnoise_get_stop_us(context);
tools/tracing/rtla/src/osnoise.c
464
context->stop_us = stop_us;
tools/tracing/rtla/src/osnoise.c
472
void osnoise_restore_stop_us(struct osnoise_context *context)
tools/tracing/rtla/src/osnoise.c
476
if (context->orig_stop_us == OSNOISE_OPTION_INIT_VAL)
tools/tracing/rtla/src/osnoise.c
479
if (context->orig_stop_us == context->stop_us)
tools/tracing/rtla/src/osnoise.c
482
retval = osnoise_write_ll_config("osnoise/stop_tracing_us", context->orig_stop_us);
tools/tracing/rtla/src/osnoise.c
487
context->stop_us = OSNOISE_OPTION_INIT_VAL;
tools/tracing/rtla/src/osnoise.c
493
void osnoise_put_stop_us(struct osnoise_context *context)
tools/tracing/rtla/src/osnoise.c
495
osnoise_restore_stop_us(context);
tools/tracing/rtla/src/osnoise.c
497
if (context->orig_stop_us == OSNOISE_OPTION_INIT_VAL)
tools/tracing/rtla/src/osnoise.c
500
context->orig_stop_us = OSNOISE_OPTION_INIT_VAL;
tools/tracing/rtla/src/osnoise.c
507
osnoise_get_stop_total_us(struct osnoise_context *context)
tools/tracing/rtla/src/osnoise.c
511
if (context->stop_total_us != OSNOISE_OPTION_INIT_VAL)
tools/tracing/rtla/src/osnoise.c
512
return context->stop_total_us;
tools/tracing/rtla/src/osnoise.c
514
if (context->orig_stop_total_us != OSNOISE_OPTION_INIT_VAL)
tools/tracing/rtla/src/osnoise.c
515
return context->orig_stop_total_us;
tools/tracing/rtla/src/osnoise.c
52
int osnoise_set_cpus(struct osnoise_context *context, char *cpus)
tools/tracing/rtla/src/osnoise.c
521
context->orig_stop_total_us = stop_total_us;
tools/tracing/rtla/src/osnoise.c
531
int osnoise_set_stop_total_us(struct osnoise_context *context, long long stop_total_us)
tools/tracing/rtla/src/osnoise.c
533
long long curr_stop_total_us = osnoise_get_stop_total_us(context);
tools/tracing/rtla/src/osnoise.c
54
char *orig_cpus = osnoise_get_cpus(context);
tools/tracing/rtla/src/osnoise.c
543
context->stop_total_us = stop_total_us;
tools/tracing/rtla/src/osnoise.c
551
void osnoise_restore_stop_total_us(struct osnoise_context *context)
tools/tracing/rtla/src/osnoise.c
555
if (context->orig_stop_total_us == OSNOISE_OPTION_INIT_VAL)
tools/tracing/rtla/src/osnoise.c
558
if (context->orig_stop_total_us == context->stop_total_us)
tools/tracing/rtla/src/osnoise.c
562
context->orig_stop_total_us);
tools/tracing/rtla/src/osnoise.c
567
context->stop_total_us = OSNOISE_OPTION_INIT_VAL;
tools/tracing/rtla/src/osnoise.c
573
void osnoise_put_stop_total_us(struct osnoise_context *context)
tools/tracing/rtla/src/osnoise.c
575
osnoise_restore_stop_total_us(context);
tools/tracing/rtla/src/osnoise.c
577
if (context->orig_stop_total_us == OSNOISE_OPTION_INIT_VAL)
tools/tracing/rtla/src/osnoise.c
580
context->orig_stop_total_us = OSNOISE_OPTION_INIT_VAL;
tools/tracing/rtla/src/osnoise.c
587
osnoise_get_print_stack(struct osnoise_context *context)
tools/tracing/rtla/src/osnoise.c
591
if (context->print_stack != OSNOISE_OPTION_INIT_VAL)
tools/tracing/rtla/src/osnoise.c
592
return context->print_stack;
tools/tracing/rtla/src/osnoise.c
594
if (context->orig_print_stack != OSNOISE_OPTION_INIT_VAL)
tools/tracing/rtla/src/osnoise.c
595
return context->orig_print_stack;
tools/tracing/rtla/src/osnoise.c
601
context->orig_print_stack = print_stack;
tools/tracing/rtla/src/osnoise.c
61
context->curr_cpus = strdup(cpus);
tools/tracing/rtla/src/osnoise.c
611
int osnoise_set_print_stack(struct osnoise_context *context, long long print_stack)
tools/tracing/rtla/src/osnoise.c
613
long long curr_print_stack = osnoise_get_print_stack(context);
tools/tracing/rtla/src/osnoise.c
62
if (!context->curr_cpus)
tools/tracing/rtla/src/osnoise.c
623
context->print_stack = print_stack;
tools/tracing/rtla/src/osnoise.c
631
void osnoise_restore_print_stack(struct osnoise_context *context)
tools/tracing/rtla/src/osnoise.c
635
if (context->orig_print_stack == OSNOISE_OPTION_INIT_VAL)
tools/tracing/rtla/src/osnoise.c
638
if (context->orig_print_stack == context->print_stack)
tools/tracing/rtla/src/osnoise.c
641
retval = osnoise_write_ll_config("osnoise/print_stack", context->orig_print_stack);
tools/tracing/rtla/src/osnoise.c
646
context->print_stack = OSNOISE_OPTION_INIT_VAL;
tools/tracing/rtla/src/osnoise.c
652
void osnoise_put_print_stack(struct osnoise_context *context)
tools/tracing/rtla/src/osnoise.c
654
osnoise_restore_print_stack(context);
tools/tracing/rtla/src/osnoise.c
656
if (context->orig_print_stack == OSNOISE_OPTION_INIT_VAL)
tools/tracing/rtla/src/osnoise.c
659
context->orig_print_stack = OSNOISE_OPTION_INIT_VAL;
tools/tracing/rtla/src/osnoise.c
666
osnoise_get_tracing_thresh(struct osnoise_context *context)
tools/tracing/rtla/src/osnoise.c
67
debug_msg("setting cpus to %s from %s", cpus, context->orig_cpus);
tools/tracing/rtla/src/osnoise.c
670
if (context->tracing_thresh != OSNOISE_OPTION_INIT_VAL)
tools/tracing/rtla/src/osnoise.c
671
return context->tracing_thresh;
tools/tracing/rtla/src/osnoise.c
673
if (context->orig_tracing_thresh != OSNOISE_OPTION_INIT_VAL)
tools/tracing/rtla/src/osnoise.c
674
return context->orig_tracing_thresh;
tools/tracing/rtla/src/osnoise.c
680
context->orig_tracing_thresh = tracing_thresh;
tools/tracing/rtla/src/osnoise.c
690
int osnoise_set_tracing_thresh(struct osnoise_context *context, long long tracing_thresh)
tools/tracing/rtla/src/osnoise.c
692
long long curr_tracing_thresh = osnoise_get_tracing_thresh(context);
tools/tracing/rtla/src/osnoise.c
702
context->tracing_thresh = tracing_thresh;
tools/tracing/rtla/src/osnoise.c
71
free(context->curr_cpus);
tools/tracing/rtla/src/osnoise.c
710
void osnoise_restore_tracing_thresh(struct osnoise_context *context)
tools/tracing/rtla/src/osnoise.c
714
if (context->orig_tracing_thresh == OSNOISE_OPTION_INIT_VAL)
tools/tracing/rtla/src/osnoise.c
717
if (context->orig_tracing_thresh == context->tracing_thresh)
tools/tracing/rtla/src/osnoise.c
72
context->curr_cpus = NULL;
tools/tracing/rtla/src/osnoise.c
720
retval = osnoise_write_ll_config("tracing_thresh", context->orig_tracing_thresh);
tools/tracing/rtla/src/osnoise.c
725
context->tracing_thresh = OSNOISE_OPTION_INIT_VAL;
tools/tracing/rtla/src/osnoise.c
731
void osnoise_put_tracing_thresh(struct osnoise_context *context)
tools/tracing/rtla/src/osnoise.c
733
osnoise_restore_tracing_thresh(context);
tools/tracing/rtla/src/osnoise.c
735
if (context->orig_tracing_thresh == OSNOISE_OPTION_INIT_VAL)
tools/tracing/rtla/src/osnoise.c
738
context->orig_tracing_thresh = OSNOISE_OPTION_INIT_VAL;
tools/tracing/rtla/src/osnoise.c
787
static int osnoise_get_irq_disable(struct osnoise_context *context)
tools/tracing/rtla/src/osnoise.c
789
if (context->opt_irq_disable != OSNOISE_OPTION_INIT_VAL)
tools/tracing/rtla/src/osnoise.c
790
return context->opt_irq_disable;
tools/tracing/rtla/src/osnoise.c
792
if (context->orig_opt_irq_disable != OSNOISE_OPTION_INIT_VAL)
tools/tracing/rtla/src/osnoise.c
793
return context->orig_opt_irq_disable;
tools/tracing/rtla/src/osnoise.c
795
context->orig_opt_irq_disable = osnoise_options_get_option("OSNOISE_IRQ_DISABLE");
tools/tracing/rtla/src/osnoise.c
797
return context->orig_opt_irq_disable;
tools/tracing/rtla/src/osnoise.c
800
int osnoise_set_irq_disable(struct osnoise_context *context, bool onoff)
tools/tracing/rtla/src/osnoise.c
802
int opt_irq_disable = osnoise_get_irq_disable(context);
tools/tracing/rtla/src/osnoise.c
815
context->opt_irq_disable = onoff;
tools/tracing/rtla/src/osnoise.c
820
static void osnoise_restore_irq_disable(struct osnoise_context *context)
tools/tracing/rtla/src/osnoise.c
824
if (context->orig_opt_irq_disable == OSNOISE_OPTION_INIT_VAL)
tools/tracing/rtla/src/osnoise.c
827
if (context->orig_opt_irq_disable == context->opt_irq_disable)
tools/tracing/rtla/src/osnoise.c
830
retval = osnoise_options_set_option("OSNOISE_IRQ_DISABLE", context->orig_opt_irq_disable);
tools/tracing/rtla/src/osnoise.c
835
context->orig_opt_irq_disable = OSNOISE_OPTION_INIT_VAL;
tools/tracing/rtla/src/osnoise.c
838
static void osnoise_put_irq_disable(struct osnoise_context *context)
tools/tracing/rtla/src/osnoise.c
840
osnoise_restore_irq_disable(context);
tools/tracing/rtla/src/osnoise.c
842
if (context->orig_opt_irq_disable == OSNOISE_OPTION_INIT_VAL)
tools/tracing/rtla/src/osnoise.c
845
context->orig_opt_irq_disable = OSNOISE_OPTION_INIT_VAL;
tools/tracing/rtla/src/osnoise.c
848
static int osnoise_get_workload(struct osnoise_context *context)
tools/tracing/rtla/src/osnoise.c
850
if (context->opt_workload != OSNOISE_OPTION_INIT_VAL)
tools/tracing/rtla/src/osnoise.c
851
return context->opt_workload;
tools/tracing/rtla/src/osnoise.c
853
if (context->orig_opt_workload != OSNOISE_OPTION_INIT_VAL)
tools/tracing/rtla/src/osnoise.c
854
return context->orig_opt_workload;
tools/tracing/rtla/src/osnoise.c
856
context->orig_opt_workload = osnoise_options_get_option("OSNOISE_WORKLOAD");
tools/tracing/rtla/src/osnoise.c
858
return context->orig_opt_workload;
tools/tracing/rtla/src/osnoise.c
86
void osnoise_restore_cpus(struct osnoise_context *context)
tools/tracing/rtla/src/osnoise.c
861
int osnoise_set_workload(struct osnoise_context *context, bool onoff)
tools/tracing/rtla/src/osnoise.c
863
int opt_workload = osnoise_get_workload(context);
tools/tracing/rtla/src/osnoise.c
876
context->opt_workload = onoff;
tools/tracing/rtla/src/osnoise.c
881
static void osnoise_restore_workload(struct osnoise_context *context)
tools/tracing/rtla/src/osnoise.c
885
if (context->orig_opt_workload == OSNOISE_OPTION_INIT_VAL)
tools/tracing/rtla/src/osnoise.c
888
if (context->orig_opt_workload == context->opt_workload)
tools/tracing/rtla/src/osnoise.c
891
retval = osnoise_options_set_option("OSNOISE_WORKLOAD", context->orig_opt_workload);
tools/tracing/rtla/src/osnoise.c
896
context->orig_opt_workload = OSNOISE_OPTION_INIT_VAL;
tools/tracing/rtla/src/osnoise.c
899
static void osnoise_put_workload(struct osnoise_context *context)
tools/tracing/rtla/src/osnoise.c
90
if (!context->orig_cpus)
tools/tracing/rtla/src/osnoise.c
901
osnoise_restore_workload(context);
tools/tracing/rtla/src/osnoise.c
903
if (context->orig_opt_workload == OSNOISE_OPTION_INIT_VAL)
tools/tracing/rtla/src/osnoise.c
906
context->orig_opt_workload = OSNOISE_OPTION_INIT_VAL;
tools/tracing/rtla/src/osnoise.c
917
int osnoise_get_context(struct osnoise_context *context)
tools/tracing/rtla/src/osnoise.c
921
if (context->flags & FLAG_CONTEXT_DELETED) {
tools/tracing/rtla/src/osnoise.c
924
context->ref++;
tools/tracing/rtla/src/osnoise.c
93
if (!context->curr_cpus)
tools/tracing/rtla/src/osnoise.c
939
struct osnoise_context *context;
tools/tracing/rtla/src/osnoise.c
941
context = calloc(1, sizeof(*context));
tools/tracing/rtla/src/osnoise.c
942
if (!context)
tools/tracing/rtla/src/osnoise.c
945
context->orig_stop_us = OSNOISE_OPTION_INIT_VAL;
tools/tracing/rtla/src/osnoise.c
946
context->stop_us = OSNOISE_OPTION_INIT_VAL;
tools/tracing/rtla/src/osnoise.c
948
context->orig_stop_total_us = OSNOISE_OPTION_INIT_VAL;
tools/tracing/rtla/src/osnoise.c
949
context->stop_total_us = OSNOISE_OPTION_INIT_VAL;
tools/tracing/rtla/src/osnoise.c
951
context->orig_print_stack = OSNOISE_OPTION_INIT_VAL;
tools/tracing/rtla/src/osnoise.c
952
context->print_stack = OSNOISE_OPTION_INIT_VAL;
tools/tracing/rtla/src/osnoise.c
954
context->orig_tracing_thresh = OSNOISE_OPTION_INIT_VAL;
tools/tracing/rtla/src/osnoise.c
955
context->tracing_thresh = OSNOISE_OPTION_INIT_VAL;
tools/tracing/rtla/src/osnoise.c
957
context->orig_opt_irq_disable = OSNOISE_OPTION_INIT_VAL;
tools/tracing/rtla/src/osnoise.c
958
context->opt_irq_disable = OSNOISE_OPTION_INIT_VAL;
tools/tracing/rtla/src/osnoise.c
960
context->orig_opt_workload = OSNOISE_OPTION_INIT_VAL;
tools/tracing/rtla/src/osnoise.c
961
context->opt_workload = OSNOISE_OPTION_INIT_VAL;
tools/tracing/rtla/src/osnoise.c
963
osnoise_get_context(context);
tools/tracing/rtla/src/osnoise.c
965
return context;
tools/tracing/rtla/src/osnoise.c
97
if (!strcmp(context->orig_cpus, context->curr_cpus))
tools/tracing/rtla/src/osnoise.c
974
void osnoise_put_context(struct osnoise_context *context)
tools/tracing/rtla/src/osnoise.c
976
if (--context->ref < 1)
tools/tracing/rtla/src/osnoise.c
977
context->flags |= FLAG_CONTEXT_DELETED;
tools/tracing/rtla/src/osnoise.c
979
if (!(context->flags & FLAG_CONTEXT_DELETED))
tools/tracing/rtla/src/osnoise.c
982
osnoise_put_cpus(context);
tools/tracing/rtla/src/osnoise.c
983
osnoise_put_runtime_period(context);
tools/tracing/rtla/src/osnoise.c
984
osnoise_put_stop_us(context);
tools/tracing/rtla/src/osnoise.c
985
osnoise_put_stop_total_us(context);
tools/tracing/rtla/src/osnoise.c
986
osnoise_put_timerlat_period_us(context);
tools/tracing/rtla/src/osnoise.c
987
osnoise_put_print_stack(context);
tools/tracing/rtla/src/osnoise.c
988
osnoise_put_tracing_thresh(context);
tools/tracing/rtla/src/osnoise.c
989
osnoise_put_irq_disable(context);
tools/tracing/rtla/src/osnoise.c
990
osnoise_put_workload(context);
tools/tracing/rtla/src/osnoise.c
992
free(context);
tools/tracing/rtla/src/osnoise.h
29
int osnoise_get_context(struct osnoise_context *context);
tools/tracing/rtla/src/osnoise.h
30
void osnoise_put_context(struct osnoise_context *context);
tools/tracing/rtla/src/osnoise.h
32
int osnoise_set_runtime_period(struct osnoise_context *context,
tools/tracing/rtla/src/osnoise.h
35
void osnoise_restore_runtime_period(struct osnoise_context *context);
tools/tracing/rtla/src/osnoise.h
37
void osnoise_restore_stop_us(struct osnoise_context *context);
tools/tracing/rtla/src/osnoise.h
38
void osnoise_restore_stop_total_us(struct osnoise_context *context);
tools/tracing/rtla/src/osnoise.h
40
int osnoise_set_timerlat_period_us(struct osnoise_context *context,
tools/tracing/rtla/src/osnoise.h
42
void osnoise_restore_timerlat_period_us(struct osnoise_context *context);
tools/tracing/rtla/src/osnoise.h
44
int osnoise_set_tracing_thresh(struct osnoise_context *context,
tools/tracing/rtla/src/osnoise.h
46
void osnoise_restore_tracing_thresh(struct osnoise_context *context);
tools/tracing/rtla/src/osnoise.h
48
void osnoise_restore_print_stack(struct osnoise_context *context);
tools/tracing/rtla/src/osnoise.h
49
int osnoise_set_print_stack(struct osnoise_context *context,
tools/tracing/rtla/src/osnoise.h
52
int osnoise_set_irq_disable(struct osnoise_context *context, bool onoff);
tools/tracing/rtla/src/osnoise_top.c
476
retval = osnoise_set_irq_disable(tool->context, 1);
tools/tracing/rtla/src/osnoise_top.c
81
struct tep_event *event, void *context)
tools/tracing/rtla/src/osnoise_top.c
83
struct trace_instance *trace = context;
tools/tracing/rtla/src/timerlat.bpf.c
14
int context;
tools/tracing/rtla/src/timerlat.bpf.c
153
if (tp_args->context == 0) {
tools/tracing/rtla/src/timerlat.bpf.c
159
} else if (tp_args->context == 1) {
tools/tracing/rtla/src/timerlat.c
61
retval = osnoise_set_timerlat_period_us(tool->context,
tools/tracing/rtla/src/timerlat.c
71
retval = osnoise_set_print_stack(tool->context, params->print_stack);
tools/tracing/rtla/src/timerlat_aa.c
240
struct tep_event *event, void *context)
tools/tracing/rtla/src/timerlat_aa.c
263
struct tep_event *event, void *context)
tools/tracing/rtla/src/timerlat_aa.c
306
struct tep_event *event, void *context)
tools/tracing/rtla/src/timerlat_aa.c
402
struct tep_event *event, void *context)
tools/tracing/rtla/src/timerlat_aa.c
435
struct tep_event *event, void *context)
tools/tracing/rtla/src/timerlat_aa.c
480
struct tep_event *event, void *context)
tools/tracing/rtla/src/timerlat_aa.c
512
struct tep_event *event, void *context)
tools/tracing/rtla/src/timerlat_aa.c
543
struct tep_event *event, void *context)
tools/tracing/rtla/src/timerlat_hist.c
138
unsigned long long context,
tools/tracing/rtla/src/timerlat_hist.c
152
if (!context) {
tools/tracing/rtla/src/timerlat_hist.c
158
} else if (context == 1) {
tools/tracing/rtla/src/timerlat_hist.c
186
unsigned long long context, latency;
tools/tracing/rtla/src/timerlat_hist.c
192
tep_get_field_val(s, event, "context", record, &context, 1);
tools/tracing/rtla/src/timerlat_hist.c
195
timerlat_hist_update(tool, cpu, context, latency);
tools/tracing/rtla/src/timerlat_top.c
167
struct tep_event *event, void *context)
tools/tracing/rtla/src/timerlat_top.c
169
struct trace_instance *trace = context;
tools/tracing/rtla/src/trace.c
119
int cpu, void *context)
tools/tracing/rtla/src/trace.c
121
struct trace_instance *trace = context;
tools/tracing/rtla/src/trace.c
129
event->handler(s, record, event, context);
tools/tracing/rtla/src/trace.c
143
int cpu, void *context)
tools/tracing/rtla/src/trace.c
145
struct trace_instance *trace = context;
tools/tracing/rtla/src/trace.h
38
int cpu, void *context);
tools/verification/rv/include/trace.h
16
int cpu, void *context);
tools/verification/rv/src/in_kernel.c
417
struct tep_event *trace_event, void *context)
tools/verification/rv/src/in_kernel.c
478
struct tep_event *trace_event, void *context)
tools/verification/rv/src/trace.c
46
int cpu, void *context)
tools/verification/rv/src/trace.c
48
struct trace_instance *trace = context;
tools/verification/rv/src/trace.c
57
event->handler(s, record, event, context);