arch/arm64/kvm/hyp/include/nvhe/gfp.h
26
void *hyp_alloc_pages(struct hyp_pool *pool, u8 order);
arch/arm64/kvm/hyp/include/nvhe/gfp.h
28
void hyp_get_page(struct hyp_pool *pool, void *addr);
arch/arm64/kvm/hyp/include/nvhe/gfp.h
29
void hyp_put_page(struct hyp_pool *pool, void *addr);
arch/arm64/kvm/hyp/include/nvhe/gfp.h
32
int hyp_pool_init(struct hyp_pool *pool, u64 pfn, unsigned int nr_pages,
arch/arm64/kvm/hyp/include/nvhe/memory.h
97
#define hyp_page_to_pool(page) (((struct hyp_page *)page)->pool)
arch/arm64/kvm/hyp/include/nvhe/pkvm.h
43
struct hyp_pool pool;
arch/arm64/kvm/hyp/nvhe/mem_protect.c
1242
hyp_put_page(&selftest_vm.pool, hyp_page_to_virt(&p[i]));
arch/arm64/kvm/hyp/nvhe/mem_protect.c
171
void *addr = hyp_alloc_pages(¤t_vm->pool, get_order(size));
arch/arm64/kvm/hyp/nvhe/mem_protect.c
185
hyp_put_page(¤t_vm->pool, addr + (i * PAGE_SIZE));
arch/arm64/kvm/hyp/nvhe/mem_protect.c
193
addr = hyp_alloc_pages(¤t_vm->pool, 0);
arch/arm64/kvm/hyp/nvhe/mem_protect.c
211
hyp_get_page(¤t_vm->pool, addr);
arch/arm64/kvm/hyp/nvhe/mem_protect.c
216
hyp_put_page(¤t_vm->pool, addr);
arch/arm64/kvm/hyp/nvhe/mem_protect.c
264
ret = hyp_pool_init(&vm->pool, hyp_virt_to_pfn(pgd), nr_pages, 0);
arch/arm64/kvm/hyp/nvhe/mem_protect.c
305
addr = hyp_alloc_pages(&vm->pool, 0);
arch/arm64/kvm/hyp/nvhe/mem_protect.c
312
addr = hyp_alloc_pages(&vm->pool, 0);
arch/arm64/kvm/hyp/nvhe/mem_protect.c
84
static void *host_s2_zalloc_page(void *pool)
arch/arm64/kvm/hyp/nvhe/mem_protect.c
86
return hyp_alloc_pages(pool, 0);
arch/arm64/kvm/hyp/nvhe/page_alloc.c
103
if (phys < pool->range_start || phys >= pool->range_end)
arch/arm64/kvm/hyp/nvhe/page_alloc.c
113
for (; (order + 1) <= pool->max_order; order++) {
arch/arm64/kvm/hyp/nvhe/page_alloc.c
114
buddy = __find_buddy_avail(pool, p, order);
arch/arm64/kvm/hyp/nvhe/page_alloc.c
127
page_add_to_list(p, &pool->free_area[order]);
arch/arm64/kvm/hyp/nvhe/page_alloc.c
130
static struct hyp_page *__hyp_extract_page(struct hyp_pool *pool,
arch/arm64/kvm/hyp/nvhe/page_alloc.c
145
buddy = __find_buddy_nocheck(pool, p, p->order);
arch/arm64/kvm/hyp/nvhe/page_alloc.c
147
page_add_to_list(buddy, &pool->free_area[buddy->order]);
arch/arm64/kvm/hyp/nvhe/page_alloc.c
153
static void __hyp_put_page(struct hyp_pool *pool, struct hyp_page *p)
arch/arm64/kvm/hyp/nvhe/page_alloc.c
156
__hyp_attach_page(pool, p);
arch/arm64/kvm/hyp/nvhe/page_alloc.c
166
void hyp_put_page(struct hyp_pool *pool, void *addr)
arch/arm64/kvm/hyp/nvhe/page_alloc.c
170
hyp_spin_lock(&pool->lock);
arch/arm64/kvm/hyp/nvhe/page_alloc.c
171
__hyp_put_page(pool, p);
arch/arm64/kvm/hyp/nvhe/page_alloc.c
172
hyp_spin_unlock(&pool->lock);
arch/arm64/kvm/hyp/nvhe/page_alloc.c
175
void hyp_get_page(struct hyp_pool *pool, void *addr)
arch/arm64/kvm/hyp/nvhe/page_alloc.c
179
hyp_spin_lock(&pool->lock);
arch/arm64/kvm/hyp/nvhe/page_alloc.c
181
hyp_spin_unlock(&pool->lock);
arch/arm64/kvm/hyp/nvhe/page_alloc.c
198
void *hyp_alloc_pages(struct hyp_pool *pool, u8 order)
arch/arm64/kvm/hyp/nvhe/page_alloc.c
203
hyp_spin_lock(&pool->lock);
arch/arm64/kvm/hyp/nvhe/page_alloc.c
206
while (i <= pool->max_order && list_empty(&pool->free_area[i]))
arch/arm64/kvm/hyp/nvhe/page_alloc.c
208
if (i > pool->max_order) {
arch/arm64/kvm/hyp/nvhe/page_alloc.c
209
hyp_spin_unlock(&pool->lock);
arch/arm64/kvm/hyp/nvhe/page_alloc.c
214
p = node_to_page(pool->free_area[i].next);
arch/arm64/kvm/hyp/nvhe/page_alloc.c
215
p = __hyp_extract_page(pool, p, order);
arch/arm64/kvm/hyp/nvhe/page_alloc.c
218
hyp_spin_unlock(&pool->lock);
arch/arm64/kvm/hyp/nvhe/page_alloc.c
223
int hyp_pool_init(struct hyp_pool *pool, u64 pfn, unsigned int nr_pages,
arch/arm64/kvm/hyp/nvhe/page_alloc.c
230
hyp_spin_lock_init(&pool->lock);
arch/arm64/kvm/hyp/nvhe/page_alloc.c
231
pool->max_order = min(MAX_PAGE_ORDER,
arch/arm64/kvm/hyp/nvhe/page_alloc.c
233
for (i = 0; i <= pool->max_order; i++)
arch/arm64/kvm/hyp/nvhe/page_alloc.c
234
INIT_LIST_HEAD(&pool->free_area[i]);
arch/arm64/kvm/hyp/nvhe/page_alloc.c
235
pool->range_start = phys;
arch/arm64/kvm/hyp/nvhe/page_alloc.c
236
pool->range_end = phys + (nr_pages << PAGE_SHIFT);
arch/arm64/kvm/hyp/nvhe/page_alloc.c
245
__hyp_put_page(pool, &p[i]);
arch/arm64/kvm/hyp/nvhe/page_alloc.c
33
static struct hyp_page *__find_buddy_nocheck(struct hyp_pool *pool,
arch/arm64/kvm/hyp/nvhe/page_alloc.c
45
if (addr < pool->range_start || addr >= pool->range_end)
arch/arm64/kvm/hyp/nvhe/page_alloc.c
52
static struct hyp_page *__find_buddy_avail(struct hyp_pool *pool,
arch/arm64/kvm/hyp/nvhe/page_alloc.c
56
struct hyp_page *buddy = __find_buddy_nocheck(pool, p, order);
arch/arm64/kvm/hyp/nvhe/page_alloc.c
93
static void __hyp_attach_page(struct hyp_pool *pool,
arch/mips/cavium-octeon/executive/cvmx-helper.c
857
g_buffer.s.pool = CVMX_FPA_WQE_POOL;
arch/mips/cavium-octeon/executive/cvmx-helper.c
869
pkt_buffer.s.pool = CVMX_FPA_PACKET_POOL;
arch/mips/cavium-octeon/executive/cvmx-pko.c
195
config.s.pool = CVMX_FPA_OUTPUT_BUFFER_POOL;
arch/mips/include/asm/octeon/cvmx-fpa-defs.h
76
uint64_t pool:5;
arch/mips/include/asm/octeon/cvmx-fpa-defs.h
80
uint64_t pool:5;
arch/mips/include/asm/octeon/cvmx-fpa.h
106
static inline const char *cvmx_fpa_get_name(uint64_t pool)
arch/mips/include/asm/octeon/cvmx-fpa.h
108
return cvmx_fpa_pool_info[pool].name;
arch/mips/include/asm/octeon/cvmx-fpa.h
117
static inline void *cvmx_fpa_get_base(uint64_t pool)
arch/mips/include/asm/octeon/cvmx-fpa.h
119
return cvmx_fpa_pool_info[pool].base;
arch/mips/include/asm/octeon/cvmx-fpa.h
131
static inline int cvmx_fpa_is_member(uint64_t pool, void *ptr)
arch/mips/include/asm/octeon/cvmx-fpa.h
133
return ((ptr >= cvmx_fpa_pool_info[pool].base) &&
arch/mips/include/asm/octeon/cvmx-fpa.h
135
((char *)(cvmx_fpa_pool_info[pool].base)) +
arch/mips/include/asm/octeon/cvmx-fpa.h
136
cvmx_fpa_pool_info[pool].size *
arch/mips/include/asm/octeon/cvmx-fpa.h
137
cvmx_fpa_pool_info[pool].starting_element_count));
arch/mips/include/asm/octeon/cvmx-fpa.h
185
static inline void *cvmx_fpa_alloc(uint64_t pool)
arch/mips/include/asm/octeon/cvmx-fpa.h
188
cvmx_read_csr(CVMX_ADDR_DID(CVMX_FULL_DID(CVMX_OCT_DID_FPA, pool)));
arch/mips/include/asm/octeon/cvmx-fpa.h
202
static inline void cvmx_fpa_async_alloc(uint64_t scr_addr, uint64_t pool)
arch/mips/include/asm/octeon/cvmx-fpa.h
212
data.s.did = CVMX_FULL_DID(CVMX_OCT_DID_FPA, pool);
arch/mips/include/asm/octeon/cvmx-fpa.h
226
static inline void cvmx_fpa_free_nosync(void *ptr, uint64_t pool,
arch/mips/include/asm/octeon/cvmx-fpa.h
232
CVMX_ADDR_DIDSPACE(CVMX_FULL_DID(CVMX_OCT_DID_FPA, pool));
arch/mips/include/asm/octeon/cvmx-fpa.h
248
static inline void cvmx_fpa_free(void *ptr, uint64_t pool,
arch/mips/include/asm/octeon/cvmx-fpa.h
254
CVMX_ADDR_DIDSPACE(CVMX_FULL_DID(CVMX_OCT_DID_FPA, pool));
arch/mips/include/asm/octeon/cvmx-fpa.h
277
extern uint64_t cvmx_fpa_shutdown_pool(uint64_t pool);
arch/mips/include/asm/octeon/cvmx-fpa.h
286
uint64_t cvmx_fpa_get_block_size(uint64_t pool);
arch/mips/include/asm/octeon/cvmx-helper-util.h
168
buffer_ptr.s.pool, 0);
arch/mips/include/asm/octeon/cvmx-packet.h
54
uint64_t pool:3;
arch/mips/include/asm/octeon/cvmx-packet.h
62
uint64_t pool:3;
arch/mips/include/asm/octeon/cvmx-pko-defs.h
138
uint64_t pool:3;
arch/mips/include/asm/octeon/cvmx-pko-defs.h
144
uint64_t pool:3;
arch/mips/include/asm/octeon/cvmx-pko-defs.h
1459
uint64_t pool:3;
arch/mips/include/asm/octeon/cvmx-pko-defs.h
1465
uint64_t pool:3;
arch/mips/include/asm/octeon/cvmx-pko-defs.h
194
uint64_t pool:3;
arch/mips/include/asm/octeon/cvmx-pko-defs.h
200
uint64_t pool:3;
arch/mips/include/asm/octeon/cvmx-pko-defs.h
209
uint64_t pool:3;
arch/mips/include/asm/octeon/cvmx-pko-defs.h
215
uint64_t pool:3;
arch/mips/include/asm/octeon/cvmx-pko-defs.h
307
uint64_t pool:3;
arch/mips/include/asm/octeon/cvmx-pko-defs.h
313
uint64_t pool:3;
arch/mips/include/asm/octeon/cvmx-pko-defs.h
360
uint64_t pool:3;
arch/mips/include/asm/octeon/cvmx-pko-defs.h
366
uint64_t pool:3;
arch/mips/include/asm/octeon/cvmx-pko-defs.h
386
uint64_t pool:3;
arch/mips/include/asm/octeon/cvmx-pko-defs.h
392
uint64_t pool:3;
arch/powerpc/kernel/iommu.c
229
struct iommu_pool *pool;
arch/powerpc/kernel/iommu.c
252
pool = &(tbl->large_pool);
arch/powerpc/kernel/iommu.c
254
pool = &(tbl->pools[pool_nr]);
arch/powerpc/kernel/iommu.c
256
spin_lock_irqsave(&(pool->lock), flags);
arch/powerpc/kernel/iommu.c
260
(*handle >= pool->start) && (*handle < pool->end))
arch/powerpc/kernel/iommu.c
263
start = pool->hint;
arch/powerpc/kernel/iommu.c
265
limit = pool->end;
arch/powerpc/kernel/iommu.c
272
start = pool->start;
arch/powerpc/kernel/iommu.c
281
spin_unlock(&(pool->lock));
arch/powerpc/kernel/iommu.c
282
pool = &(tbl->pools[0]);
arch/powerpc/kernel/iommu.c
283
spin_lock(&(pool->lock));
arch/powerpc/kernel/iommu.c
284
start = pool->start;
arch/powerpc/kernel/iommu.c
296
pool->hint = pool->start;
arch/powerpc/kernel/iommu.c
302
spin_unlock(&(pool->lock));
arch/powerpc/kernel/iommu.c
304
pool = &tbl->pools[pool_nr];
arch/powerpc/kernel/iommu.c
305
spin_lock(&(pool->lock));
arch/powerpc/kernel/iommu.c
306
pool->hint = pool->start;
arch/powerpc/kernel/iommu.c
312
spin_unlock(&pool->lock);
arch/powerpc/kernel/iommu.c
313
pool = &tbl->large_pool;
arch/powerpc/kernel/iommu.c
314
spin_lock(&pool->lock);
arch/powerpc/kernel/iommu.c
315
pool->hint = pool->start;
arch/powerpc/kernel/iommu.c
321
spin_unlock_irqrestore(&(pool->lock), flags);
arch/powerpc/kernel/iommu.c
331
pool->hint = end;
arch/powerpc/kernel/iommu.c
334
pool->hint = (end + tbl->it_blocksize - 1) &
arch/powerpc/kernel/iommu.c
342
spin_unlock_irqrestore(&(pool->lock), flags);
arch/powerpc/kernel/iommu.c
442
struct iommu_pool *pool;
arch/powerpc/kernel/iommu.c
447
pool = get_pool(tbl, free_entry);
arch/powerpc/kernel/iommu.c
454
spin_lock_irqsave(&(pool->lock), flags);
arch/powerpc/kernel/iommu.c
456
spin_unlock_irqrestore(&(pool->lock), flags);
arch/powerpc/platforms/pseries/rtas-work-area.c
139
struct gen_pool *pool;
arch/powerpc/platforms/pseries/rtas-work-area.c
147
pool = gen_pool_create(order, nid);
arch/powerpc/platforms/pseries/rtas-work-area.c
148
if (!pool)
arch/powerpc/platforms/pseries/rtas-work-area.c
155
gen_pool_set_algo(pool, gen_pool_first_fit_order_align, NULL);
arch/powerpc/platforms/pseries/rtas-work-area.c
157
err = gen_pool_add(pool, (unsigned long)rwa_state.arena,
arch/powerpc/platforms/pseries/rtas-work-area.c
167
rwa_state.gen_pool = pool;
arch/powerpc/platforms/pseries/rtas-work-area.c
179
gen_pool_destroy(pool);
arch/sparc/kernel/iommu-common.c
106
struct iommu_pool *pool;
arch/sparc/kernel/iommu-common.c
126
pool = &(iommu->large_pool);
arch/sparc/kernel/iommu-common.c
131
pool = &(iommu->pools[pool_nr]);
arch/sparc/kernel/iommu-common.c
133
spin_lock_irqsave(&pool->lock, flags);
arch/sparc/kernel/iommu-common.c
137
(*handle >= pool->start) && (*handle < pool->end))
arch/sparc/kernel/iommu-common.c
140
start = pool->hint;
arch/sparc/kernel/iommu-common.c
142
limit = pool->end;
arch/sparc/kernel/iommu-common.c
151
start = pool->start;
arch/sparc/kernel/iommu-common.c
160
spin_unlock(&(pool->lock));
arch/sparc/kernel/iommu-common.c
161
pool = &(iommu->pools[0]);
arch/sparc/kernel/iommu-common.c
162
spin_lock(&(pool->lock));
arch/sparc/kernel/iommu-common.c
163
start = pool->start;
arch/sparc/kernel/iommu-common.c
186
pool->hint = pool->start;
arch/sparc/kernel/iommu-common.c
191
spin_unlock(&(pool->lock));
arch/sparc/kernel/iommu-common.c
193
pool = &(iommu->pools[pool_nr]);
arch/sparc/kernel/iommu-common.c
194
spin_lock(&(pool->lock));
arch/sparc/kernel/iommu-common.c
195
pool->hint = pool->start;
arch/sparc/kernel/iommu-common.c
206
(n < pool->hint || need_flush(iommu))) {
arch/sparc/kernel/iommu-common.c
212
pool->hint = end;
arch/sparc/kernel/iommu-common.c
218
spin_unlock_irqrestore(&(pool->lock), flags);
arch/sparc/kernel/iommu-common.c
249
struct iommu_pool *pool;
arch/sparc/kernel/iommu-common.c
255
pool = get_pool(iommu, entry);
arch/sparc/kernel/iommu-common.c
257
spin_lock_irqsave(&(pool->lock), flags);
arch/sparc/kernel/iommu-common.c
259
spin_unlock_irqrestore(&(pool->lock), flags);
arch/sparc/kernel/pci_sun4v.c
721
struct iommu_pool *pool;
arch/sparc/kernel/pci_sun4v.c
727
pool = &(iommu->pools[pool_nr]);
arch/sparc/kernel/pci_sun4v.c
728
for (i = pool->start; i <= pool->end; i++) {
block/bio.c
162
void bvec_free(mempool_t *pool, struct bio_vec *bv, unsigned short nr_vecs)
block/bio.c
167
mempool_free(bv, pool);
block/bio.c
182
struct bio_vec *bvec_alloc(mempool_t *pool, unsigned short *nr_vecs,
block/bio.c
1881
int biovec_init_pool(mempool_t *pool, int pool_entries)
block/bio.c
1885
return mempool_init_slab_pool(pool, pool_entries, bp->slab);
block/bio.c
210
return mempool_alloc(pool, gfp_mask);
block/blk.h
112
struct bio_vec *bvec_alloc(mempool_t *pool, unsigned short *nr_vecs,
block/blk.h
114
void bvec_free(mempool_t *pool, struct bio_vec *bv, unsigned short nr_vecs);
drivers/accel/habanalabs/common/mmu/mmu.c
752
static void mmu_dma_mem_free_from_chunk(struct gen_pool *pool,
drivers/accel/habanalabs/common/mmu/mmu.c
783
struct gen_pool **pool = &hr_priv->mmu_pgt_pool;
drivers/accel/habanalabs/common/mmu/mmu.c
787
if (ZERO_OR_NULL_PTR(*pool))
drivers/accel/habanalabs/common/mmu/mmu.c
797
gen_pool_free(*pool, (uintptr_t) hop0_pgt->virt_addr, hop_table_size);
drivers/accel/habanalabs/common/mmu/mmu.c
801
gen_pool_for_each_chunk(*pool, mmu_dma_mem_free_from_chunk, hdev);
drivers/accel/habanalabs/common/mmu/mmu.c
802
gen_pool_destroy(*pool);
drivers/accel/habanalabs/common/mmu/mmu.c
808
*pool = NULL;
drivers/atm/idt77252.c
584
struct sb_pool *pool = &card->sbpool[queue];
drivers/atm/idt77252.c
587
index = pool->index;
drivers/atm/idt77252.c
588
while (pool->skb[index]) {
drivers/atm/idt77252.c
590
if (index == pool->index)
drivers/atm/idt77252.c
594
pool->skb[index] = skb;
drivers/atm/idt77252.c
597
pool->index = (index + 1) & FBQ_MASK;
drivers/atm/idt77252.h
791
u32 pool; /* sb_pool handle */
drivers/atm/idt77252.h
799
(((struct idt77252_skb_prv *)(ATM_SKB(skb)+1))->pool)
drivers/block/rbd.c
1450
req->r_base_oloc.pool = rbd_dev->layout.pool_id;
drivers/block/rbd.c
5226
static DEVICE_ATTR(pool, 0444, rbd_pool_show, NULL);
drivers/block/rbd.c
5365
rbd_dev->header_oloc.pool = spec->pool_id;
drivers/char/random.c
1009
unsigned long pool[4];
drivers/char/random.c
1020
.pool = { SIPHASH_CONST_0, SIPHASH_CONST_1, SIPHASH_CONST_2, SIPHASH_CONST_3 },
drivers/char/random.c
1023
.pool = { HSIPHASH_CONST_0, HSIPHASH_CONST_1, HSIPHASH_CONST_2, HSIPHASH_CONST_3 },
drivers/char/random.c
1077
unsigned long pool[2];
drivers/char/random.c
1091
memcpy(pool, fast_pool->pool, sizeof(pool));
drivers/char/random.c
1097
mix_pool_bytes(pool, sizeof(pool));
drivers/char/random.c
1098
credit_init_bits(clamp_t(unsigned int, (count & U16_MAX) / 64, 1, sizeof(pool) * 8));
drivers/char/random.c
1100
memzero_explicit(pool, sizeof(pool));
drivers/char/random.c
1111
fast_mix(fast_pool->pool, entropy,
drivers/char/random.c
1153
fast_mix(this_cpu_ptr(&irq_randomness)->pool, entropy, num);
drivers/crypto/cavium/nitrox/nitrox_lib.c
234
ctx->pool = ndev->ctx_pool;
drivers/crypto/cavium/nitrox/nitrox_lib.c
238
chdr->pool = ndev->ctx_pool;
drivers/crypto/cavium/nitrox/nitrox_lib.c
257
dma_pool_free(ctxp->pool, ctxp->vaddr, ctxp->dma);
drivers/crypto/cavium/nitrox/nitrox_req.h
197
struct dma_pool *pool;
drivers/crypto/cavium/nitrox/nitrox_req.h
447
struct dma_pool *pool;
drivers/crypto/hisilicon/sgl.c
115
pool->sgl_num_per_block = sgl_num_per_block;
drivers/crypto/hisilicon/sgl.c
116
pool->block_num = remain_sgl ? block_num + 1 : block_num;
drivers/crypto/hisilicon/sgl.c
117
pool->count = count;
drivers/crypto/hisilicon/sgl.c
118
pool->sgl_size = sgl_size;
drivers/crypto/hisilicon/sgl.c
119
pool->sge_nr = sge_nr;
drivers/crypto/hisilicon/sgl.c
121
return pool;
drivers/crypto/hisilicon/sgl.c
128
kfree_sensitive(pool);
drivers/crypto/hisilicon/sgl.c
140
void hisi_acc_free_sgl_pool(struct device *dev, struct hisi_acc_sgl_pool *pool)
drivers/crypto/hisilicon/sgl.c
145
if (!dev || !pool)
drivers/crypto/hisilicon/sgl.c
148
block = pool->mem_block;
drivers/crypto/hisilicon/sgl.c
150
for (i = 0; i < pool->block_num; i++)
drivers/crypto/hisilicon/sgl.c
154
kfree(pool);
drivers/crypto/hisilicon/sgl.c
158
static struct hisi_acc_hw_sgl *acc_get_sgl(struct hisi_acc_sgl_pool *pool,
drivers/crypto/hisilicon/sgl.c
164
block = pool->mem_block;
drivers/crypto/hisilicon/sgl.c
165
block_index = index / pool->sgl_num_per_block;
drivers/crypto/hisilicon/sgl.c
166
offset = index % pool->sgl_num_per_block;
drivers/crypto/hisilicon/sgl.c
168
*hw_sgl_dma = block[block_index].sgl_dma + pool->sgl_size * offset;
drivers/crypto/hisilicon/sgl.c
169
return (void *)block[block_index].sgl + pool->sgl_size * offset;
drivers/crypto/hisilicon/sgl.c
220
struct hisi_acc_sgl_pool *pool, u32 index,
drivers/crypto/hisilicon/sgl.c
230
if (!dev || !sgl || !pool || !hw_sgl_dma || index >= pool->count)
drivers/crypto/hisilicon/sgl.c
241
if (sg_n_mapped > pool->sge_nr) {
drivers/crypto/hisilicon/sgl.c
247
curr_hw_sgl = acc_get_sgl(pool, index, &curr_sgl_dma);
drivers/crypto/hisilicon/sgl.c
248
curr_hw_sgl->entry_length_in_sgl = cpu_to_le16(pool->sge_nr);
drivers/crypto/hisilicon/sgl.c
257
update_hw_sgl_sum_sge(curr_hw_sgl, pool->sge_nr);
drivers/crypto/hisilicon/sgl.c
61
struct hisi_acc_sgl_pool *pool;
drivers/crypto/hisilicon/sgl.c
86
pool = kzalloc_obj(*pool);
drivers/crypto/hisilicon/sgl.c
87
if (!pool)
drivers/crypto/hisilicon/sgl.c
89
block = pool->mem_block;
drivers/crypto/hisilicon/zip/zip_crypto.c
253
struct hisi_acc_sgl_pool *pool = qp_ctx->sgl_pool;
drivers/crypto/hisilicon/zip/zip_crypto.c
264
req->hw_src = hisi_acc_sg_buf_map_to_hw_sgl(dev, a_req->src, pool,
drivers/crypto/hisilicon/zip/zip_crypto.c
273
req->hw_dst = hisi_acc_sg_buf_map_to_hw_sgl(dev, a_req->dst, pool,
drivers/crypto/marvell/cesa/cesa.c
374
engine->pool = of_gen_pool_get(cesa->dev->of_node,
drivers/crypto/marvell/cesa/cesa.c
376
if (engine->pool) {
drivers/crypto/marvell/cesa/cesa.c
377
engine->sram_pool = gen_pool_dma_alloc(engine->pool,
drivers/crypto/marvell/cesa/cesa.c
383
engine->pool = NULL;
drivers/crypto/marvell/cesa/cesa.c
405
if (engine->pool)
drivers/crypto/marvell/cesa/cesa.c
406
gen_pool_free(engine->pool, (unsigned long)engine->sram_pool,
drivers/crypto/marvell/cesa/cesa.h
465
struct gen_pool *pool;
drivers/crypto/marvell/cesa/cipher.c
109
if (engine->pool)
drivers/crypto/marvell/cesa/cipher.c
114
} else if (engine->pool)
drivers/crypto/marvell/cesa/cipher.c
226
} else if (engine->pool)
drivers/crypto/marvell/cesa/cipher.c
95
if (engine->pool)
drivers/crypto/marvell/cesa/hash.c
174
if (engine->pool)
drivers/crypto/marvell/cesa/hash.c
189
if (engine->pool)
drivers/crypto/marvell/cesa/hash.c
234
if (engine->pool)
drivers/crypto/marvell/cesa/hash.c
248
if (engine->pool)
drivers/crypto/marvell/cesa/hash.c
269
if (engine->pool)
drivers/crypto/marvell/cesa/tdma.c
390
if (engine->pool)
drivers/crypto/marvell/cesa/tdma.c
397
if (engine->pool)
drivers/dma/amba-pl08x.c
1268
txd->llis_va = dma_pool_alloc(pl08x->pool, GFP_NOWAIT, &txd->llis_bus);
drivers/dma/amba-pl08x.c
1498
dma_pool_free(pl08x->pool, txd->llis_va, txd->llis_bus);
drivers/dma/amba-pl08x.c
2831
pl08x->pool = dma_pool_create(DRIVER_NAME, &pl08x->adev->dev,
drivers/dma/amba-pl08x.c
2833
if (!pl08x->pool) {
drivers/dma/amba-pl08x.c
287
struct dma_pool *pool;
drivers/dma/amba-pl08x.c
2969
dma_pool_destroy(pl08x->pool);
drivers/dma/dmaengine.c
1347
mempool_t *pool;
drivers/dma/dmaengine.c
1404
mempool_free(unmap, __get_unmap_pool(cnt)->pool);
drivers/dma/dmaengine.c
1421
mempool_destroy(p->pool);
drivers/dma/dmaengine.c
1422
p->pool = NULL;
drivers/dma/dmaengine.c
1443
p->pool = mempool_create_slab_pool(1, p->cache);
drivers/dma/dmaengine.c
1444
if (!p->pool)
drivers/dma/dmaengine.c
1460
unmap = mempool_alloc(__get_unmap_pool(nr)->pool, flags);
drivers/dma/idma64.c
222
dma_pool_free(idma64c->pool, hw->lli, hw->llp);
drivers/dma/idma64.c
318
hw->lli = dma_pool_alloc(idma64c->pool, GFP_NOWAIT, &hw->llp);
drivers/dma/idma64.c
510
idma64c->pool = dma_pool_create(dev_name(chan2dev(chan)),
drivers/dma/idma64.c
513
if (!idma64c->pool) {
drivers/dma/idma64.c
526
dma_pool_destroy(idma64c->pool);
drivers/dma/idma64.c
527
idma64c->pool = NULL;
drivers/dma/idma64.h
137
void *pool;
drivers/dma/k3dma.c
107
struct dma_pool *pool;
drivers/dma/k3dma.c
478
ds->desc_hw = dma_pool_zalloc(d->pool, GFP_NOWAIT, &ds->desc_hw_lli);
drivers/dma/k3dma.c
710
dma_pool_free(d->pool, ds->desc_hw, ds->desc_hw_lli);
drivers/dma/k3dma.c
879
d->pool = dmam_pool_create(DRIVER_NAME, &op->dev,
drivers/dma/k3dma.c
881
if (!d->pool)
drivers/dma/loongson2-apb-dma.c
126
void *pool;
drivers/dma/loongson2-apb-dma.c
177
dma_pool_free(lchan->pool, desc->sg[i].hw,
drivers/dma/loongson2-apb-dma.c
285
lchan->pool = dma_pool_create(dev_name(chan2dev(chan)),
drivers/dma/loongson2-apb-dma.c
288
if (!lchan->pool) {
drivers/dma/loongson2-apb-dma.c
305
dma_pool_destroy(lchan->pool);
drivers/dma/loongson2-apb-dma.c
306
lchan->pool = NULL;
drivers/dma/loongson2-apb-dma.c
350
ldma_sg->hw = dma_pool_alloc(lchan->pool, GFP_NOWAIT, &ldma_sg->llp);
drivers/dma/loongson2-apb-dma.c
416
ldma_sg->hw = dma_pool_alloc(lchan->pool, GFP_NOWAIT, &ldma_sg->llp);
drivers/dma/mmp_tdma.c
126
struct gen_pool *pool;
drivers/dma/mmp_tdma.c
361
gpool = tdmac->pool;
drivers/dma/mmp_tdma.c
413
gpool = tdmac->pool;
drivers/dma/mmp_tdma.c
562
int type, struct gen_pool *pool)
drivers/dma/mmp_tdma.c
583
tdmac->pool = pool;
drivers/dma/mmp_tdma.c
641
struct gen_pool *pool = NULL;
drivers/dma/mmp_tdma.c
663
pool = of_gen_pool_get(pdev->dev.of_node, "asram", 0);
drivers/dma/mmp_tdma.c
664
if (!pool) {
drivers/dma/mmp_tdma.c
680
ret = mmp_tdma_chan_init(tdev, i, irq, type, pool);
drivers/dma/pch_dma.c
118
struct dma_pool *pool;
drivers/dma/pch_dma.c
427
desc = dma_pool_zalloc(pd->pool, flags, &addr);
drivers/dma/pch_dma.c
539
dma_pool_free(pd->pool, desc, desc->txd.phys);
drivers/dma/pch_dma.c
855
pd->pool = dma_pool_create("pch_dma_desc_pool", &pdev->dev,
drivers/dma/pch_dma.c
857
if (!pd->pool) {
drivers/dma/pch_dma.c
904
dma_pool_destroy(pd->pool);
drivers/dma/pch_dma.c
936
dma_pool_destroy(pd->pool);
drivers/dma/pl330.c
2543
static int add_desc(struct list_head *pool, spinlock_t *lock,
drivers/dma/pl330.c
2558
list_add_tail(&desc[i].node, pool);
drivers/dma/pl330.c
2566
static struct dma_pl330_desc *pluck_desc(struct list_head *pool,
drivers/dma/pl330.c
2574
if (!list_empty(pool)) {
drivers/dma/pl330.c
2575
desc = list_entry(pool->next,
drivers/dma/pl330.c
2602
LIST_HEAD(pool);
drivers/dma/pl330.c
2604
if (!add_desc(&pool, &lock, GFP_ATOMIC, 1))
drivers/dma/pl330.c
2607
desc = pluck_desc(&pool, &lock);
drivers/dma/pl330.c
2608
WARN_ON(!desc || !list_empty(&pool));
drivers/dma/ste_dma40.c
3358
struct d40_lcla_pool *pool = &base->lcla_pool;
drivers/dma/ste_dma40.c
3422
pool->dma_addr = dma_map_single(base->dev, pool->base,
drivers/dma/ste_dma40.c
3425
if (dma_mapping_error(base->dev, pool->dma_addr)) {
drivers/dma/ste_dma40.c
3426
pool->dma_addr = 0;
drivers/dma/ste_dma40.c
853
struct d40_lcla_pool *pool = &chan->base->lcla_pool;
drivers/dma/ste_dma40.c
913
struct d40_log_lli *lcla = pool->base + lcla_offset;
drivers/dma/ste_dma40.c
948
pool->dma_addr, lcla_offset,
drivers/dma/sun6i-dma.c
1350
sdc->pool = dmam_pool_create(dev_name(&pdev->dev), &pdev->dev,
drivers/dma/sun6i-dma.c
1352
if (!sdc->pool) {
drivers/dma/sun6i-dma.c
207
struct dma_pool *pool;
drivers/dma/sun6i-dma.c
426
dma_pool_free(sdev->pool, v_lli, p_lli);
drivers/dma/sun6i-dma.c
685
v_lli = dma_pool_alloc(sdev->pool, GFP_DMA32 | GFP_NOWAIT, &p_lli);
drivers/dma/sun6i-dma.c
744
v_lli = dma_pool_alloc(sdev->pool, GFP_DMA32 | GFP_NOWAIT, &p_lli);
drivers/dma/sun6i-dma.c
793
dma_pool_free(sdev->pool, v_lli, p_lli);
drivers/dma/sun6i-dma.c
827
v_lli = dma_pool_alloc(sdev->pool, GFP_DMA32 | GFP_NOWAIT, &p_lli);
drivers/dma/sun6i-dma.c
874
dma_pool_free(sdev->pool, v_lli, p_lli);
drivers/edac/igen6_edac.c
838
struct gen_pool *pool;
drivers/edac/igen6_edac.c
840
pool = gen_pool_create(ilog2(sizeof(struct ecclog_node)), -1);
drivers/edac/igen6_edac.c
841
if (!pool)
drivers/edac/igen6_edac.c
844
if (gen_pool_add(pool, (unsigned long)ecclog_buf, ECCLOG_POOL_SIZE, -1)) {
drivers/edac/igen6_edac.c
845
gen_pool_destroy(pool);
drivers/edac/igen6_edac.c
849
return pool;
drivers/firmware/qcom/qcom_tzmem.c
194
static int qcom_tzmem_pool_add_memory(struct qcom_tzmem_pool *pool,
drivers/firmware/qcom/qcom_tzmem.c
218
ret = gen_pool_add_virt(pool->genpool, (unsigned long)area->vaddr,
drivers/firmware/qcom/qcom_tzmem.c
226
scoped_guard(spinlock_irqsave, &pool->lock)
drivers/firmware/qcom/qcom_tzmem.c
227
list_add_tail(&area->list, &pool->areas);
drivers/firmware/qcom/qcom_tzmem.c
265
struct qcom_tzmem_pool *pool __free(kfree) = kzalloc_obj(*pool);
drivers/firmware/qcom/qcom_tzmem.c
266
if (!pool)
drivers/firmware/qcom/qcom_tzmem.c
269
pool->genpool = gen_pool_create(PAGE_SHIFT, -1);
drivers/firmware/qcom/qcom_tzmem.c
270
if (!pool->genpool)
drivers/firmware/qcom/qcom_tzmem.c
273
gen_pool_set_algo(pool->genpool, gen_pool_best_fit, NULL);
drivers/firmware/qcom/qcom_tzmem.c
275
pool->policy = config->policy;
drivers/firmware/qcom/qcom_tzmem.c
276
pool->increment = config->increment;
drivers/firmware/qcom/qcom_tzmem.c
277
pool->max_size = config->max_size;
drivers/firmware/qcom/qcom_tzmem.c
278
INIT_LIST_HEAD(&pool->areas);
drivers/firmware/qcom/qcom_tzmem.c
279
spin_lock_init(&pool->lock);
drivers/firmware/qcom/qcom_tzmem.c
282
ret = qcom_tzmem_pool_add_memory(pool, config->initial_size,
drivers/firmware/qcom/qcom_tzmem.c
285
gen_pool_destroy(pool->genpool);
drivers/firmware/qcom/qcom_tzmem.c
290
return_ptr(pool);
drivers/firmware/qcom/qcom_tzmem.c
301
void qcom_tzmem_pool_free(struct qcom_tzmem_pool *pool)
drivers/firmware/qcom/qcom_tzmem.c
311
if (!pool)
drivers/firmware/qcom/qcom_tzmem.c
319
if (chunk->owner == pool)
drivers/firmware/qcom/qcom_tzmem.c
326
list_for_each_entry_safe(area, next, &pool->areas, list) {
drivers/firmware/qcom/qcom_tzmem.c
334
gen_pool_destroy(pool->genpool);
drivers/firmware/qcom/qcom_tzmem.c
335
kfree(pool);
drivers/firmware/qcom/qcom_tzmem.c
341
struct qcom_tzmem_pool *pool = data;
drivers/firmware/qcom/qcom_tzmem.c
343
qcom_tzmem_pool_free(pool);
drivers/firmware/qcom/qcom_tzmem.c
359
struct qcom_tzmem_pool *pool;
drivers/firmware/qcom/qcom_tzmem.c
362
pool = qcom_tzmem_pool_new(config);
drivers/firmware/qcom/qcom_tzmem.c
363
if (IS_ERR(pool))
drivers/firmware/qcom/qcom_tzmem.c
364
return pool;
drivers/firmware/qcom/qcom_tzmem.c
366
ret = devm_add_action_or_reset(dev, devm_qcom_tzmem_pool_free, pool);
drivers/firmware/qcom/qcom_tzmem.c
370
return pool;
drivers/firmware/qcom/qcom_tzmem.c
374
static bool qcom_tzmem_try_grow_pool(struct qcom_tzmem_pool *pool,
drivers/firmware/qcom/qcom_tzmem.c
377
size_t current_size = gen_pool_size(pool->genpool);
drivers/firmware/qcom/qcom_tzmem.c
379
if (pool->max_size && (current_size + requested) > pool->max_size)
drivers/firmware/qcom/qcom_tzmem.c
382
switch (pool->policy) {
drivers/firmware/qcom/qcom_tzmem.c
386
requested = current_size * pool->increment;
drivers/firmware/qcom/qcom_tzmem.c
392
return !qcom_tzmem_pool_add_memory(pool, requested, gfp);
drivers/firmware/qcom/qcom_tzmem.c
407
void *qcom_tzmem_alloc(struct qcom_tzmem_pool *pool, size_t size, gfp_t gfp)
drivers/firmware/qcom/qcom_tzmem.c
423
vaddr = gen_pool_alloc(pool->genpool, size);
drivers/firmware/qcom/qcom_tzmem.c
425
if (qcom_tzmem_try_grow_pool(pool, size, gfp))
drivers/firmware/qcom/qcom_tzmem.c
432
chunk->owner = pool;
drivers/firmware/qcom/qcom_tzmem.c
437
gen_pool_free(pool->genpool, vaddr, size);
drivers/firmware/tegra/bpmp-tegra186.c
126
if (priv->rx.pool) {
drivers/firmware/tegra/bpmp-tegra186.c
186
if (priv->tx.pool) {
drivers/firmware/tegra/bpmp-tegra186.c
187
gen_pool_free(priv->tx.pool, (unsigned long)priv->tx.sram, 4096);
drivers/firmware/tegra/bpmp-tegra186.c
188
gen_pool_free(priv->rx.pool, (unsigned long)priv->rx.sram, 4096);
drivers/firmware/tegra/bpmp-tegra186.c
22
struct gen_pool *pool;
drivers/firmware/tegra/bpmp-tegra186.c
236
priv->tx.pool = of_gen_pool_get(bpmp->dev->of_node, "shmem", 0);
drivers/firmware/tegra/bpmp-tegra186.c
237
if (!priv->tx.pool) {
drivers/firmware/tegra/bpmp-tegra186.c
242
priv->tx.sram = (void __iomem *)gen_pool_dma_alloc(priv->tx.pool, 4096,
drivers/firmware/tegra/bpmp-tegra186.c
249
priv->rx.pool = of_gen_pool_get(bpmp->dev->of_node, "shmem", 1);
drivers/firmware/tegra/bpmp-tegra186.c
250
if (!priv->rx.pool) {
drivers/firmware/tegra/bpmp-tegra186.c
256
priv->rx.sram = (void __iomem *)gen_pool_dma_alloc(priv->rx.pool, 4096,
drivers/firmware/tegra/bpmp-tegra186.c
267
gen_pool_free(priv->tx.pool, (unsigned long)priv->tx.sram, 4096);
drivers/gpu/drm/amd/amdgpu/amdgpu_ring.h
569
enum amdgpu_ib_pool_type pool,
drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c
1200
struct ttm_pool *pool;
drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c
1216
pool = &adev->mman.ttm_pools[gtt->pool_id];
drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c
1218
pool = &adev->mman.bdev.pool;
drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c
1219
ret = ttm_pool_alloc(pool, ttm, ctx);
drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c
1240
struct ttm_pool *pool;
drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c
1261
pool = &adev->mman.ttm_pools[gtt->pool_id];
drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c
1263
pool = &adev->mman.bdev.pool;
drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c
1265
return ttm_pool_free(pool, ttm);
drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c
2370
enum amdgpu_ib_pool_type pool = AMDGPU_IB_POOL_DELAYED;
drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c
2374
num_dw * 4, pool, job, k_job_id);
drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c
2634
return ttm_pool_debugfs(&adev->mman.bdev.pool, m);
drivers/gpu/drm/amd/amdgpu/amdgpu_vm_sdma.c
45
enum amdgpu_ib_pool_type pool = p->immediate ? AMDGPU_IB_POOL_IMMEDIATE
drivers/gpu/drm/amd/amdgpu/amdgpu_vm_sdma.c
59
ndw * 4, pool, &p->job, k_job_id);
drivers/gpu/drm/amd/display/dc/core/dc.c
2502
const struct resource_pool *pool = dc->res_pool;
drivers/gpu/drm/amd/display/dc/core/dc.c
2506
if (pool && res_ctx) {
drivers/gpu/drm/amd/display/dc/core/dc.c
2509
for (pipe_idx = 0; pipe_idx < pool->pipe_count; pipe_idx++) {
drivers/gpu/drm/amd/display/dc/core/dc.c
2520
if (acquire && pool->funcs->acquire_post_bldn_3dlut)
drivers/gpu/drm/amd/display/dc/core/dc.c
2521
ret = pool->funcs->acquire_post_bldn_3dlut(res_ctx, pool, mpcc_id, lut, shaper);
drivers/gpu/drm/amd/display/dc/core/dc.c
2522
else if (!acquire && pool->funcs->release_post_bldn_3dlut)
drivers/gpu/drm/amd/display/dc/core/dc.c
2523
ret = pool->funcs->release_post_bldn_3dlut(res_ctx, pool, lut, shaper);
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
1758
const struct resource_pool *pool,
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
1793
int preferred_pipe_idx = (pool->pipe_count - 1) - primary_pipe->pipe_idx;
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
1805
for (i = pool->pipe_count - 1; i >= 0; i--) {
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
1865
const struct resource_pool *pool)
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
1871
for (i = 0; i < pool->pipe_count; i++) {
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
1888
const struct resource_pool *pool)
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
1894
for (i = 0; i < pool->pipe_count; i++) {
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
1911
const struct resource_pool *pool)
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
1917
for (i = 0; i < pool->pipe_count; i++) {
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
1935
const struct resource_pool *pool)
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
1941
for (i = 0; i < pool->pipe_count; i++) {
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
1958
const struct resource_pool *pool)
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
1964
for (i = 0; i < pool->pipe_count; i++) {
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
2556
const struct resource_pool *pool)
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
2562
for (i = 0; i < pool->pipe_count && result; i++) {
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
2568
if (pool->funcs->build_pipe_pix_clk_params)
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
2569
pool->funcs->build_pipe_pix_clk_params(otg_master);
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
2579
const struct resource_pool *pool)
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
2585
for (i = 0; i < pool->pipe_count && result; i++) {
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
2595
const struct resource_pool *pool,
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
2600
for (i = 0; i < pool->pipe_count; i++) {
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
2613
split_pipe->stream_res.tg = pool->timing_generators[i];
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
2614
split_pipe->plane_res.hubp = pool->hubps[i];
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
2615
split_pipe->plane_res.ipp = pool->ipps[i];
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
2616
split_pipe->plane_res.dpp = pool->dpps[i];
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
2617
split_pipe->stream_res.opp = pool->opps[i];
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
2618
split_pipe->plane_res.mpcc_inst = pool->dpps[i]->inst;
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
2630
const struct resource_pool *pool,
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
2636
for (i = 0; i < pool->stream_enc_count; i++) {
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
2637
if (pool->stream_enc[i] == stream_enc)
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
2644
const struct resource_pool *pool,
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
2650
for (i = 0; i < pool->hpo_dp_stream_enc_count; i++) {
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
2651
if (pool->hpo_dp_stream_enc[i] == hpo_dp_stream_enc)
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
2671
const struct resource_pool *pool)
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
2680
i < pool->hpo_dp_link_enc_count) ? i : -1;
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
2708
const struct resource_pool *pool,
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
2719
enc_index = find_free_hpo_dp_link_enc(res_ctx, pool);
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
2725
pipe_ctx->link_res.hpo_dp_link_enc = pool->hpo_dp_link_enc[enc_index];
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
2765
const struct dc_link *link, const struct resource_pool *pool, struct dc_stream_state *stream)
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
2769
int enc_count = pool->dig_link_enc_count;
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
2773
for (i = 0; i < pool->pipe_count; i++) {
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
2843
const struct resource_pool *pool,
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
2859
if (pipe_ctx && pipe_ctx->link_res.dio_link_enc == pool->link_encoders[old_encoder])
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
2860
pipe_ctx->link_res.dio_link_enc = pool->link_encoders[new_encoder];
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
2866
const struct resource_pool *pool,
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
2879
enc_index = find_free_dio_link_enc(res_ctx, stream->link, pool, stream);
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
2889
int new_enc_index = find_free_dio_link_enc(res_ctx, dc->links[link_index], pool, stream);
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
2892
swap_dio_link_enc_to_muxable_ctx(context, pool, new_enc_index, enc_index);
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
2903
pipe_ctx->link_res.dio_link_enc = pool->link_encoders[enc_index];
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
2923
static int get_num_of_free_pipes(const struct resource_pool *pool, const struct dc_state *context)
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
2928
for (i = 0; i < pool->pipe_count; i++)
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
2935
const struct resource_pool *pool,
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
2944
const struct resource_pool *pool,
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
2958
pool,
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
2964
&context->res_ctx, pool,
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
2977
pool,
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
2982
pool,
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
2985
if (pool->funcs->remove_stream_from_ctx)
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
2986
pool->funcs->remove_stream_from_ctx(
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
3070
struct resource_pool *pool)
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
3077
if (!pool->funcs->acquire_free_pipe_as_secondary_dpp_pipe) {
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
3084
if (get_num_of_free_pipes(pool, new_ctx) < opp_head_count)
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
3089
sec_pipe = pool->funcs->acquire_free_pipe_as_secondary_dpp_pipe(
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
3092
pool,
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
3116
struct resource_pool *pool,
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
3128
cur_ctx, pool);
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
3132
plane_state, new_ctx, pool);
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
3135
pool, plane_state);
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
3143
const struct resource_pool *pool,
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
3148
for (i = pool->pipe_count - 1; i >= 0; i--) {
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
3211
const struct resource_pool *pool)
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
3218
if (!pool->funcs->acquire_free_pipe_as_secondary_opp_head) {
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
3222
new_opp_head = pool->funcs->acquire_free_pipe_as_secondary_opp_head(
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
3223
cur_ctx, new_ctx, pool,
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
3237
new_bottom_dpp_pipe = pool->funcs->acquire_free_pipe_as_secondary_dpp_pipe(
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
3238
cur_ctx, new_ctx, pool,
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
3290
const struct resource_pool *pool)
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
3295
if (!pool->funcs->release_pipe) {
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
3306
pool->funcs->release_pipe(context, tail_pipe->bottom_pipe, pool);
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
3310
pool->funcs->release_pipe(context, last_opp_head, pool);
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
3350
const struct resource_pool *pool)
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
3357
if (!pool->funcs->acquire_free_pipe_as_secondary_dpp_pipe) {
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
3361
new_dpp_pipe = pool->funcs->acquire_free_pipe_as_secondary_dpp_pipe(
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
3362
cur_ctx, new_ctx, pool, opp_head);
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
3412
const struct resource_pool *pool)
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
3417
if (!pool->funcs->release_pipe) {
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
3429
pool->funcs->release_pipe(context, last_dpp_pipe, pool);
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
3437
const struct resource_pool *pool,
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
3458
otg_master, new_ctx, cur_ctx, pool);
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
3462
otg_master, new_ctx, pool);
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
3465
otg_master, new_ctx, pool);
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
3472
const struct resource_pool *pool,
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
3493
dpp_pipes[0], new_ctx, cur_ctx, pool);
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
3497
dpp_pipes[0], new_ctx, pool);
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
3500
dpp_pipes[0]->plane_state, new_ctx, pool);
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
3596
const struct resource_pool *pool,
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
3601
for (i = 0; i < pool->audio_count; i++) {
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
3602
if (pool->audios[i] == audio)
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
3609
const struct resource_pool *pool,
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
3614
for (i = 0; i < pool->hpo_dp_stream_enc_count; i++) {
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
3616
pool->hpo_dp_stream_enc[i]) {
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
3618
return pool->hpo_dp_stream_enc[i];
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
3627
const struct resource_pool *pool,
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
3636
available_audio_count = pool->audio_count;
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
3643
return pool->audios[i];
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
3649
return pool->audios[id];
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
3654
return pool->audios[i];
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
3731
const struct resource_pool *pool,
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
3748
for (i = 0; i < pool->stream_enc_count; i++) {
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
3749
if (pool->stream_enc[i]->id == inst) {
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
3750
tg_inst = pool->stream_enc[i]->funcs->dig_source_otg(
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
3751
pool->stream_enc[i]);
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
3757
if (i == pool->stream_enc_count)
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
3760
if (tg_inst >= pool->timing_generator_count)
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
3766
pipe_ctx->stream_res.tg = pool->timing_generators[tg_inst];
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
3785
pipe_ctx->stream_res.tg = pool->timing_generators[tg_inst];
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
3786
pipe_ctx->plane_res.mi = pool->mis[id_src[i]];
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
3787
pipe_ctx->plane_res.hubp = pool->hubps[id_src[i]];
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
3788
pipe_ctx->plane_res.ipp = pool->ipps[id_src[i]];
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
3789
pipe_ctx->plane_res.xfm = pool->transforms[id_src[i]];
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
3790
pipe_ctx->plane_res.dpp = pool->dpps[id_src[i]];
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
3791
pipe_ctx->stream_res.opp = pool->opps[id_src[i]];
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
3793
if (pool->dpps[id_src[i]]) {
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
3794
pipe_ctx->plane_res.mpcc_inst = pool->dpps[id_src[i]]->inst;
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
3796
if (pool->mpc->funcs->read_mpcc_state) {
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
3799
pool->mpc->funcs->read_mpcc_state(pool->mpc, pipe_ctx->plane_res.mpcc_inst, &s);
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
3802
pool->mpc->mpcc_array[pipe_ctx->plane_res.mpcc_inst].dpp_id =
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
3806
pool->mpc->mpcc_array[pipe_ctx->plane_res.mpcc_inst].mpcc_bot =
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
3807
&pool->mpc->mpcc_array[s.bot_mpcc_id];
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
3815
if (id_src[i] >= pool->timing_generator_count) {
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
3816
id_src[i] = pool->timing_generator_count - 1;
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
3818
pipe_ctx->stream_res.tg = pool->timing_generators[id_src[i]];
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
3819
pipe_ctx->stream_res.opp = pool->opps[id_src[i]];
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
3878
const struct resource_pool *pool,
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
3908
&cur_ctx->res_ctx, &new_ctx->res_ctx, pool);
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
3915
&cur_ctx->res_ctx, &new_ctx->res_ctx, pool);
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
3923
&cur_ctx->res_ctx, &new_ctx->res_ctx, pool);
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
3925
pipe_idx = resource_find_any_free_pipe(&new_ctx->res_ctx, pool);
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
3930
pipe_ctx->stream_res.tg = pool->timing_generators[pipe_idx];
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
3931
pipe_ctx->plane_res.mi = pool->mis[pipe_idx];
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
3932
pipe_ctx->plane_res.hubp = pool->hubps[pipe_idx];
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
3933
pipe_ctx->plane_res.ipp = pool->ipps[pipe_idx];
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
3934
pipe_ctx->plane_res.xfm = pool->transforms[pipe_idx];
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
3935
pipe_ctx->plane_res.dpp = pool->dpps[pipe_idx];
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
3936
pipe_ctx->stream_res.opp = pool->opps[pipe_idx];
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
3937
if (pool->dpps[pipe_idx])
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
3938
pipe_ctx->plane_res.mpcc_inst = pool->dpps[pipe_idx]->inst;
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
3940
if (pipe_idx >= pool->timing_generator_count && pool->timing_generator_count != 0) {
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
3941
int tg_inst = pool->timing_generator_count - 1;
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
3943
pipe_ctx->stream_res.tg = pool->timing_generators[tg_inst];
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
3944
pipe_ctx->stream_res.opp = pool->opps[tg_inst];
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
3949
pipe_idx = acquire_first_split_pipe(&new_ctx->res_ctx, pool, stream);
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
3960
const struct resource_pool *pool = dc->res_pool;
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
3975
pool,
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
3987
context, pool, stream);
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
3996
&context->res_ctx, pool, stream);
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
4002
&context->res_ctx, pool,
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
4022
&context->res_ctx, pool, stream);
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
4028
&context->res_ctx, pool,
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
4031
if (!add_hpo_dp_link_enc_to_ctx(&context->res_ctx, pool, pipe_ctx, stream))
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
4037
if (!add_dio_link_enc_to_ctx(dc, context, pool, pipe_ctx, stream))
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
4047
&context->res_ctx, pool, pipe_ctx->stream_res.stream_enc->id, dc_ctx->dce_version);
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
4055
update_audio_usage(&context->res_ctx, pool,
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
4061
if (pool->abm)
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
4062
pipe_ctx->stream_res.abm = pool->abm;
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
4064
pipe_ctx->stream_res.abm = pool->multiple_abms[pipe_ctx->stream_res.tg->inst];
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
437
struct resource_pool *pool,
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
441
const struct resource_caps *caps = pool->res_cap;
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
449
pool->audio_count = 0;
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
458
update_num_audio(&straps, &num_audio, &pool->audio_support);
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
470
pool->audios[i] = aud;
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
471
pool->audio_count++;
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
475
pool->stream_enc_count = 0;
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
478
pool->stream_enc[i] = create_funcs->create_stream_encoder(i, ctx);
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
479
if (pool->stream_enc[i] == NULL)
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
481
pool->stream_enc_count++;
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
485
pool->stream_enc[caps->num_stream_encoder + i] =
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
4868
const struct resource_pool *pool)
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
487
if (pool->stream_enc[caps->num_stream_encoder + i] == NULL)
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
4872
for (i = 0; i < pool->clk_src_count; ++i) {
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
4874
return pool->clock_sources[i];
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
489
pool->stream_enc_count++;
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
493
pool->hpo_dp_stream_enc_count = 0;
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
4934
const struct resource_pool *pool = dc->res_pool;
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
4943
pipe_ctx->clock_source = pool->dp_clock_source;
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
4956
pool);
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
496
pool->hpo_dp_stream_enc[i] = create_funcs->create_hpo_dp_stream_encoder(i+ENGINE_ID_HPO_DP_0, ctx);
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
4963
&context->res_ctx, pool,
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
497
if (pool->hpo_dp_stream_enc[i] == NULL)
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
499
pool->hpo_dp_stream_enc_count++;
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
504
pool->hpo_dp_link_enc_count = 0;
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
507
pool->hpo_dp_link_enc[i] = create_funcs->create_hpo_dp_link_encoder(i, ctx);
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
508
if (pool->hpo_dp_link_enc[i] == NULL)
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
510
pool->hpo_dp_link_enc_count++;
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
515
pool->mpc_lut[i] = dc_create_3dlut_func();
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
516
if (pool->mpc_lut[i] == NULL)
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
518
pool->mpc_shaper[i] = dc_create_transfer_func();
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
519
if (pool->mpc_shaper[i] == NULL)
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
524
if (pool->audio_count < pool->stream_enc_count) {
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
528
pool->stream_enc[pool->stream_enc_count] =
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
5287
const struct resource_pool *const pool,
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
5299
enc_index = find_free_dio_link_enc(res_ctx, link, pool, NULL);
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
5302
link_enc = pool->link_encoders[enc_index];
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
5309
const struct resource_pool *const pool,
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
531
if (pool->stream_enc[pool->stream_enc_count] == NULL) {
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
5318
enc_index = find_free_hpo_dp_link_enc(res_ctx, pool);
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
5321
hpo_dp_link_enc = pool->hpo_dp_link_enc[enc_index];
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
535
pool->stream_enc_count++;
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
543
const struct resource_pool *pool,
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
549
for (i = 0; i < pool->clk_src_count; i++) {
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
550
if (pool->clock_sources[i] == clock_source)
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
5537
const struct resource_pool *pool = dc->res_pool;
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
5555
sec_pipe->plane_res.mi = pool->mis[pipe_idx];
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
5556
sec_pipe->plane_res.hubp = pool->hubps[pipe_idx];
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
5557
sec_pipe->plane_res.ipp = pool->ipps[pipe_idx];
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
5558
sec_pipe->plane_res.xfm = pool->transforms[pipe_idx];
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
5559
sec_pipe->plane_res.dpp = pool->dpps[pipe_idx];
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
5560
sec_pipe->plane_res.mpcc_inst = pool->dpps[pipe_idx]->inst;
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
5564
sec_pipe->stream_res.opp = pool->opps[pipe_idx];
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
558
const struct resource_pool *pool,
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
561
int i = find_matching_clock_source(pool, clock_source);
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
566
if (pool->dp_clock_source == clock_source)
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
572
const struct resource_pool *pool,
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
575
int i = find_matching_clock_source(pool, clock_source);
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
580
if (pool->dp_clock_source == clock_source)
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
586
const struct resource_pool *pool,
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
589
int i = find_matching_clock_source(pool, clock_source);
drivers/gpu/drm/amd/display/dc/core/dc_resource.c
594
if (pool->dp_clock_source == clock_source)
drivers/gpu/drm/amd/display/dc/core/dc_state.c
468
struct resource_pool *pool = dc->res_pool;
drivers/gpu/drm/amd/display/dc/core/dc_state.c
490
dc->current_state, pool, otg_master_pipe, plane_state);
drivers/gpu/drm/amd/display/dc/core/dc_state.c
499
dc->current_state, pool,
drivers/gpu/drm/amd/display/dc/core/dc_state.c
512
dc->current_state, pool,
drivers/gpu/drm/amd/display/dc/core/dc_state.c
538
struct resource_pool *pool = dc->res_pool;
drivers/gpu/drm/amd/display/dc/core/dc_state.c
555
state, pool, plane_state);
drivers/gpu/drm/amd/display/dc/dce/dce_i2c.c
29
struct resource_pool *pool,
drivers/gpu/drm/amd/display/dc/dce/dce_i2c.c
55
struct resource_pool *pool,
drivers/gpu/drm/amd/display/dc/dce/dce_i2c.c
72
dce_i2c_hw = acquire_i2c_hw_engine(pool, ddc);
drivers/gpu/drm/amd/display/dc/dce/dce_i2c.c
75
return dce_i2c_submit_command_hw(pool, ddc, cmd, dce_i2c_hw);
drivers/gpu/drm/amd/display/dc/dce/dce_i2c.c
79
return dce_i2c_submit_command_sw(pool, ddc, cmd, &dce_i2c_sw);
drivers/gpu/drm/amd/display/dc/dce/dce_i2c.h
34
struct resource_pool *pool,
drivers/gpu/drm/amd/display/dc/dce/dce_i2c.h
40
struct resource_pool *pool,
drivers/gpu/drm/amd/display/dc/dce/dce_i2c_hw.c
443
struct resource_pool *pool,
drivers/gpu/drm/amd/display/dc/dce/dce_i2c_hw.c
456
if (line < pool->res_cap->num_ddc)
drivers/gpu/drm/amd/display/dc/dce/dce_i2c_hw.c
457
dce_i2c_hw = pool->hw_i2cs[line];
drivers/gpu/drm/amd/display/dc/dce/dce_i2c_hw.c
463
if (pool->i2c_hw_buffer_in_use || !is_engine_available(dce_i2c_hw))
drivers/gpu/drm/amd/display/dc/dce/dce_i2c_hw.c
490
pool->i2c_hw_buffer_in_use = true;
drivers/gpu/drm/amd/display/dc/dce/dce_i2c_hw.c
629
struct resource_pool *pool,
drivers/gpu/drm/amd/display/dc/dce/dce_i2c_hw.c
655
pool->i2c_hw_buffer_in_use = false;
drivers/gpu/drm/amd/display/dc/dce/dce_i2c_hw.h
346
struct resource_pool *pool,
drivers/gpu/drm/amd/display/dc/dce/dce_i2c_hw.h
352
struct resource_pool *pool,
drivers/gpu/drm/amd/display/dc/dce/dce_i2c_sw.c
467
struct resource_pool *pool,
drivers/gpu/drm/amd/display/dc/dce/dce_i2c_sw.c
493
release_engine_dce_sw(pool, dce_i2c_sw);
drivers/gpu/drm/amd/display/dc/dce/dce_i2c_sw.c
67
struct resource_pool *pool,
drivers/gpu/drm/amd/display/dc/dce/dce_i2c_sw.h
47
struct resource_pool *pool,
drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer_debug.c
113
struct resource_pool *pool = dc->res_pool;
drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer_debug.c
134
for (i = 0; i < pool->pipe_count; i++) {
drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer_debug.c
135
struct hubp *hubp = pool->hubps[i];
drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer_debug.c
191
struct resource_pool *pool = dc->res_pool;
drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer_debug.c
204
for (i = 0; i < pool->pipe_count; i++) {
drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer_debug.c
205
struct dcn_hubp_state *s = &(TO_DCN10_HUBP(pool->hubps[i])->state);
drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer_debug.c
213
pool->hubps[i]->inst, rq_regs->drq_expansion_mode, rq_regs->prq_expansion_mode, rq_regs->mrq_expansion_mode,
drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer_debug.c
233
struct resource_pool *pool = dc->res_pool;
drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer_debug.c
249
for (i = 0; i < pool->pipe_count; i++) {
drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer_debug.c
250
struct dcn_hubp_state *s = &(TO_DCN10_HUBP(pool->hubps[i])->state);
drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer_debug.c
261
pool->hubps[i]->inst, dlg_regs->refcyc_h_blank_end, dlg_regs->dlg_vblank_end, dlg_regs->min_dst_y_next_start,
drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer_debug.c
290
struct resource_pool *pool = dc->res_pool;
drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer_debug.c
303
for (i = 0; i < pool->pipe_count; i++) {
drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer_debug.c
304
struct dcn_hubp_state *s = &(TO_DCN10_HUBP(pool->hubps[i])->state);
drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer_debug.c
312
pool->hubps[i]->inst, ttu_regs->qos_level_low_wm, ttu_regs->qos_level_high_wm, ttu_regs->min_ttu_vblank,
drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer_debug.c
330
struct resource_pool *pool = dc->res_pool;
drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer_debug.c
342
for (i = 0; i < pool->pipe_count; i++) {
drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer_debug.c
343
struct dpp *dpp = pool->dpps[i];
drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer_debug.c
385
struct resource_pool *pool = dc->res_pool;
drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer_debug.c
395
for (i = 0; i < pool->mpcc_count; i++) {
drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer_debug.c
398
pool->mpc->funcs->read_mpcc_state(pool->mpc, i, &s);
drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer_debug.c
416
struct resource_pool *pool = dc->res_pool;
drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer_debug.c
427
for (i = 0; i < pool->timing_generator_count; i++) {
drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer_debug.c
428
struct timing_generator *tg = pool->timing_generators[i];
drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer_debug.c
493
struct resource_pool *pool = dc->res_pool;
drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer_debug.c
496
for (i = 0; i < pool->timing_generator_count; i++) {
drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer_debug.c
497
struct timing_generator *tg = pool->timing_generators[i];
drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer_debug.c
510
struct resource_pool *pool = dc->res_pool;
drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer_debug.c
513
for (i = 0; i < pool->pipe_count; i++) {
drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer_debug.c
514
struct hubp *hubp = pool->hubps[i];
drivers/gpu/drm/amd/display/dc/dml/calcs/dcn_calcs.c
1199
for (i = 0, input_idx = 0; i < pool->pipe_count; i++) {
drivers/gpu/drm/amd/display/dc/dml/calcs/dcn_calcs.c
1261
hsplit_pipe = resource_find_free_secondary_pipe_legacy(&context->res_ctx, pool, pipe);
drivers/gpu/drm/amd/display/dc/dml/calcs/dcn_calcs.c
1263
split_stream_across_pipes(&context->res_ctx, pool, pipe, hsplit_pipe);
drivers/gpu/drm/amd/display/dc/dml/calcs/dcn_calcs.c
524
const struct resource_pool *pool,
drivers/gpu/drm/amd/display/dc/dml/calcs/dcn_calcs.c
536
secondary_pipe->plane_res.mi = pool->mis[secondary_pipe->pipe_idx];
drivers/gpu/drm/amd/display/dc/dml/calcs/dcn_calcs.c
537
secondary_pipe->plane_res.hubp = pool->hubps[secondary_pipe->pipe_idx];
drivers/gpu/drm/amd/display/dc/dml/calcs/dcn_calcs.c
538
secondary_pipe->plane_res.ipp = pool->ipps[secondary_pipe->pipe_idx];
drivers/gpu/drm/amd/display/dc/dml/calcs/dcn_calcs.c
539
secondary_pipe->plane_res.xfm = pool->transforms[secondary_pipe->pipe_idx];
drivers/gpu/drm/amd/display/dc/dml/calcs/dcn_calcs.c
540
secondary_pipe->plane_res.dpp = pool->dpps[secondary_pipe->pipe_idx];
drivers/gpu/drm/amd/display/dc/dml/calcs/dcn_calcs.c
541
secondary_pipe->plane_res.mpcc_inst = pool->dpps[secondary_pipe->pipe_idx]->inst;
drivers/gpu/drm/amd/display/dc/dml/calcs/dcn_calcs.c
759
const struct resource_pool *pool = dc->res_pool;
drivers/gpu/drm/amd/display/dc/dml/calcs/dcn_calcs.c
893
for (i = 0, input_idx = 0; i < pool->pipe_count; i++) {
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
2404
struct dcn21_resource_pool *pool = TO_DCN21_RES_POOL(dc->res_pool);
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
2411
dcn2_1_ip.max_num_otg = pool->base.res_cap->num_timing_generator;
drivers/gpu/drm/amd/display/dc/dml/dcn20/dcn20_fpu.c
2412
dcn2_1_ip.max_num_dpp = pool->base.pipe_count;
drivers/gpu/drm/amd/display/dc/dml/dcn301/dcn301_fpu.c
326
struct dcn301_resource_pool *pool = TO_DCN301_RES_POOL(dc->res_pool);
drivers/gpu/drm/amd/display/dc/dml/dcn301/dcn301_fpu.c
336
dcn3_01_ip.max_num_otg = pool->base.res_cap->num_timing_generator;
drivers/gpu/drm/amd/display/dc/dml/dcn301/dcn301_fpu.c
337
dcn3_01_ip.max_num_dpp = pool->base.pipe_count;
drivers/gpu/drm/amd/display/dc/dml/dcn301/dcn301_fpu.c
34
#define TO_DCN301_RES_POOL(pool)\
drivers/gpu/drm/amd/display/dc/dml/dcn301/dcn301_fpu.c
35
container_of(pool, struct dcn301_resource_pool, base)
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1859
const struct resource_pool *pool = dc->res_pool;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1887
sec_pipe->plane_res.mi = pool->mis[pipe_idx];
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1888
sec_pipe->plane_res.hubp = pool->hubps[pipe_idx];
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1889
sec_pipe->plane_res.ipp = pool->ipps[pipe_idx];
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1890
sec_pipe->plane_res.xfm = pool->transforms[pipe_idx];
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1891
sec_pipe->plane_res.dpp = pool->dpps[pipe_idx];
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1892
sec_pipe->plane_res.mpcc_inst = pool->dpps[pipe_idx]->inst;
drivers/gpu/drm/amd/display/dc/dml/dcn32/dcn32_fpu.c
1913
sec_pipe->stream_res.opp = pool->opps[pipe_idx];
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_dc_resource_mgmt.c
681
static bool is_pipe_used(const struct dc_plane_pipe_pool *pool, unsigned int pipe_idx)
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_dc_resource_mgmt.c
685
for (i = 0; i < pool->num_pipes_assigned_to_plane_for_odm_combine; i++) {
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_dc_resource_mgmt.c
686
for (j = 0; j < pool->num_pipes_assigned_to_plane_for_mpcc_combine; j++) {
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_dc_resource_mgmt.c
687
if (pool->pipes_assigned_to_plane[i][j] == pipe_idx && pool->pipe_used[i][j])
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_dc_resource_mgmt.c
701
const struct dc_plane_state *plane, const struct dc_plane_pipe_pool *pool, unsigned int stream_id, int plane_index)
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_dc_resource_mgmt.c
711
!is_pipe_used(pool, state->res_ctx.pipe_ctx[i].pipe_idx)) {
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_wrapper.h
141
void (*release_dsc)(struct resource_context *res_ctx, const struct resource_pool *pool, struct display_stream_compressor **dsc);
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_wrapper.h
81
const struct resource_pool *pool,
drivers/gpu/drm/amd/display/dc/dml2_0/dml2_wrapper.h
87
const struct resource_pool *pool,
drivers/gpu/drm/amd/display/dc/hwss/dce110/dce110_hwseq.c
2079
const struct resource_pool *pool)
drivers/gpu/drm/amd/display/dc/hwss/dce110/dce110_hwseq.c
2082
int underlay_idx = pool->underlay_pipe_index;
drivers/gpu/drm/amd/display/dc/hwss/dce110/dce110_hwseq.h
68
const struct resource_pool *pool);
drivers/gpu/drm/amd/display/dc/hwss/dcn10/dcn10_hwseq.c
298
struct resource_pool *pool = dc->res_pool;
drivers/gpu/drm/amd/display/dc/hwss/dcn10/dcn10_hwseq.c
303
for (i = 0; i < pool->pipe_count; i++) {
drivers/gpu/drm/amd/display/dc/hwss/dcn10/dcn10_hwseq.c
304
struct hubp *hubp = pool->hubps[i];
drivers/gpu/drm/amd/display/dc/hwss/dcn10/dcn10_hwseq.c
338
for (i = 0; i < pool->pipe_count; i++) {
drivers/gpu/drm/amd/display/dc/hwss/dcn10/dcn10_hwseq.c
339
struct dcn_hubp_state *s = &(TO_DCN10_HUBP(pool->hubps[i])->state);
drivers/gpu/drm/amd/display/dc/hwss/dcn10/dcn10_hwseq.c
345
pool->hubps[i]->inst,
drivers/gpu/drm/amd/display/dc/hwss/dcn10/dcn10_hwseq.c
375
for (i = 0; i < pool->pipe_count; i++) {
drivers/gpu/drm/amd/display/dc/hwss/dcn10/dcn10_hwseq.c
376
struct dcn_hubp_state *s = &(TO_DCN10_HUBP(pool->hubps[i])->state);
drivers/gpu/drm/amd/display/dc/hwss/dcn10/dcn10_hwseq.c
381
pool->hubps[i]->inst, rq_regs->drq_expansion_mode, rq_regs->prq_expansion_mode, rq_regs->mrq_expansion_mode,
drivers/gpu/drm/amd/display/dc/hwss/dcn10/dcn10_hwseq.c
401
for (i = 0; i < pool->pipe_count; i++) {
drivers/gpu/drm/amd/display/dc/hwss/dcn10/dcn10_hwseq.c
402
struct dcn_hubp_state *s = &(TO_DCN10_HUBP(pool->hubps[i])->state);
drivers/gpu/drm/amd/display/dc/hwss/dcn10/dcn10_hwseq.c
409
pool->hubps[i]->inst, dlg_regs->refcyc_h_blank_end, dlg_regs->dlg_vblank_end, dlg_regs->min_dst_y_next_start,
drivers/gpu/drm/amd/display/dc/hwss/dcn10/dcn10_hwseq.c
4110
struct resource_pool *pool = dc->res_pool;
drivers/gpu/drm/amd/display/dc/hwss/dcn10/dcn10_hwseq.c
4113
for (i = 0; i < pool->pipe_count; i++) {
drivers/gpu/drm/amd/display/dc/hwss/dcn10/dcn10_hwseq.c
4114
struct hubp *hubp = pool->hubps[i];
drivers/gpu/drm/amd/display/dc/hwss/dcn10/dcn10_hwseq.c
433
for (i = 0; i < pool->pipe_count; i++) {
drivers/gpu/drm/amd/display/dc/hwss/dcn10/dcn10_hwseq.c
434
struct dcn_hubp_state *s = &(TO_DCN10_HUBP(pool->hubps[i])->state);
drivers/gpu/drm/amd/display/dc/hwss/dcn10/dcn10_hwseq.c
439
pool->hubps[i]->inst, ttu_regs->qos_level_low_wm, ttu_regs->qos_level_high_wm, ttu_regs->min_ttu_vblank,
drivers/gpu/drm/amd/display/dc/hwss/dcn10/dcn10_hwseq.c
454
struct resource_pool *pool = dc->res_pool;
drivers/gpu/drm/amd/display/dc/hwss/dcn10/dcn10_hwseq.c
463
for (i = 0; i < pool->pipe_count; i++) {
drivers/gpu/drm/amd/display/dc/hwss/dcn10/dcn10_hwseq.c
464
struct dpp *dpp = pool->dpps[i];
drivers/gpu/drm/amd/display/dc/hwss/dcn10/dcn10_hwseq.c
539
for (i = 0; i < pool->mpcc_count; i++) {
drivers/gpu/drm/amd/display/dc/hwss/dcn10/dcn10_hwseq.c
542
pool->mpc->funcs->read_mpcc_state(pool->mpc, i, &s);
drivers/gpu/drm/amd/display/dc/hwss/dcn10/dcn10_hwseq.c
562
for (i = 0; i < pool->mpcc_count; i++) {
drivers/gpu/drm/amd/display/dc/hwss/dcn10/dcn10_hwseq.c
565
pool->mpc->funcs->read_mpcc_state(pool->mpc, i, &s);
drivers/gpu/drm/amd/display/dc/hwss/dcn10/dcn10_hwseq.c
597
for (i = 0; i < pool->mpcc_count; i++) {
drivers/gpu/drm/amd/display/dc/hwss/dcn10/dcn10_hwseq.c
600
pool->mpc->funcs->read_mpcc_state(pool->mpc, i, &s);
drivers/gpu/drm/amd/display/dc/hwss/dcn10/dcn10_hwseq.c
615
struct resource_pool *pool = dc->res_pool;
drivers/gpu/drm/amd/display/dc/hwss/dcn10/dcn10_hwseq.c
631
for (i = 0; i < pool->timing_generator_count; i++) {
drivers/gpu/drm/amd/display/dc/hwss/dcn10/dcn10_hwseq.c
632
struct timing_generator *tg = pool->timing_generators[i];
drivers/gpu/drm/amd/display/dc/hwss/dcn10/dcn10_hwseq.c
645
if (pool->opps[i]->funcs->dpg_is_blanked)
drivers/gpu/drm/amd/display/dc/hwss/dcn10/dcn10_hwseq.c
646
s.blank_enabled = pool->opps[i]->funcs->dpg_is_blanked(pool->opps[i]);
drivers/gpu/drm/amd/display/dc/hwss/dcn10/dcn10_hwseq.c
686
for (i = 0; i < pool->res_cap->num_dsc; i++) {
drivers/gpu/drm/amd/display/dc/hwss/dcn10/dcn10_hwseq.c
687
struct display_stream_compressor *dsc = pool->dscs[i];
drivers/gpu/drm/amd/display/dc/hwss/dcn10/dcn10_hwseq.c
702
for (i = 0; i < pool->stream_enc_count; i++) {
drivers/gpu/drm/amd/display/dc/hwss/dcn10/dcn10_hwseq.c
703
struct stream_encoder *enc = pool->stream_enc[i];
drivers/gpu/drm/amd/display/dc/hwss/dcn10/dcn10_hwseq.c
753
if (pool->hpo_dp_stream_enc_count > 0) {
drivers/gpu/drm/amd/display/dc/hwss/dcn10/dcn10_hwseq.c
755
for (i = 0; i < pool->hpo_dp_stream_enc_count; i++) {
drivers/gpu/drm/amd/display/dc/hwss/dcn10/dcn10_hwseq.c
757
struct hpo_dp_stream_encoder *hpo_dp_stream_enc = pool->hpo_dp_stream_enc[i];
drivers/gpu/drm/amd/display/dc/hwss/dcn10/dcn10_hwseq.c
783
if (pool->hpo_dp_link_enc_count) {
drivers/gpu/drm/amd/display/dc/hwss/dcn10/dcn10_hwseq.c
786
for (i = 0; i < pool->hpo_dp_link_enc_count; i++) {
drivers/gpu/drm/amd/display/dc/hwss/dcn10/dcn10_hwseq.c
787
struct hpo_dp_link_encoder *hpo_dp_link_enc = pool->hpo_dp_link_enc[i];
drivers/gpu/drm/amd/display/dc/hwss/dcn20/dcn20_hwseq.c
169
for (i = 0; i < pool->mpcc_count; i++) {
drivers/gpu/drm/amd/display/dc/hwss/dcn20/dcn20_hwseq.c
172
pool->mpc->funcs->read_mpcc_state(pool->mpc, i, &s);
drivers/gpu/drm/amd/display/dc/hwss/dcn20/dcn20_hwseq.c
79
struct resource_pool *pool = dc->res_pool;
drivers/gpu/drm/amd/display/dc/hwss/dcn20/dcn20_hwseq.c
89
for (i = 0; i < pool->pipe_count; i++) {
drivers/gpu/drm/amd/display/dc/hwss/dcn20/dcn20_hwseq.c
90
struct dpp *dpp = pool->dpps[i];
drivers/gpu/drm/amd/display/dc/hwss/dcn30/dcn30_hwseq.c
182
for (i = 0; i < pool->mpcc_count; i++) {
drivers/gpu/drm/amd/display/dc/hwss/dcn30/dcn30_hwseq.c
185
pool->mpc->funcs->read_mpcc_state(pool->mpc, i, &s);
drivers/gpu/drm/amd/display/dc/hwss/dcn30/dcn30_hwseq.c
186
mpc3_get_gamut_remap(pool->mpc, i, &s.gamut_remap);
drivers/gpu/drm/amd/display/dc/hwss/dcn30/dcn30_hwseq.c
79
struct resource_pool *pool = dc->res_pool;
drivers/gpu/drm/amd/display/dc/hwss/dcn30/dcn30_hwseq.c
90
for (i = 0; i < pool->pipe_count; i++) {
drivers/gpu/drm/amd/display/dc/hwss/dcn30/dcn30_hwseq.c
91
struct dpp *dpp = pool->dpps[i];
drivers/gpu/drm/amd/display/dc/inc/core_types.h
143
const struct resource_pool *pool,
drivers/gpu/drm/amd/display/dc/inc/core_types.h
149
const struct resource_pool *pool,
drivers/gpu/drm/amd/display/dc/inc/core_types.h
154
const struct resource_pool *pool);
drivers/gpu/drm/amd/display/dc/inc/core_types.h
175
const struct resource_pool *pool,
drivers/gpu/drm/amd/display/dc/inc/core_types.h
194
const struct resource_pool *pool,
drivers/gpu/drm/amd/display/dc/inc/core_types.h
201
const struct resource_pool *pool,
drivers/gpu/drm/amd/display/dc/inc/core_types.h
75
void (*destroy)(struct resource_pool **pool);
drivers/gpu/drm/amd/display/dc/inc/resource.h
100
struct resource_pool *pool,
drivers/gpu/drm/amd/display/dc/inc/resource.h
128
const struct resource_pool *pool,
drivers/gpu/drm/amd/display/dc/inc/resource.h
133
const struct resource_pool *pool,
drivers/gpu/drm/amd/display/dc/inc/resource.h
138
const struct resource_pool *pool,
drivers/gpu/drm/amd/display/dc/inc/resource.h
155
const struct resource_pool *pool);
drivers/gpu/drm/amd/display/dc/inc/resource.h
162
const struct resource_pool *pool);
drivers/gpu/drm/amd/display/dc/inc/resource.h
294
const struct resource_pool *pool,
drivers/gpu/drm/amd/display/dc/inc/resource.h
303
const struct resource_pool *pool,
drivers/gpu/drm/amd/display/dc/inc/resource.h
314
struct resource_pool *pool,
drivers/gpu/drm/amd/display/dc/inc/resource.h
325
const struct resource_pool *pool,
drivers/gpu/drm/amd/display/dc/inc/resource.h
346
const struct resource_pool *pool,
drivers/gpu/drm/amd/display/dc/inc/resource.h
370
const struct resource_pool *pool,
drivers/gpu/drm/amd/display/dc/inc/resource.h
517
const struct resource_pool *pool);
drivers/gpu/drm/amd/display/dc/inc/resource.h
529
const struct resource_pool *pool);
drivers/gpu/drm/amd/display/dc/inc/resource.h
540
const struct resource_pool *pool);
drivers/gpu/drm/amd/display/dc/inc/resource.h
551
const struct resource_pool *pool);
drivers/gpu/drm/amd/display/dc/inc/resource.h
559
const struct resource_pool *pool);
drivers/gpu/drm/amd/display/dc/inc/resource.h
568
const struct resource_pool *pool,
drivers/gpu/drm/amd/display/dc/inc/resource.h
576
const struct resource_pool *pool);
drivers/gpu/drm/amd/display/dc/inc/resource.h
597
const struct resource_pool *pool,
drivers/gpu/drm/amd/display/dc/inc/resource.h
661
const struct resource_pool *const pool,
drivers/gpu/drm/amd/display/dc/resource/dce100/dce100_resource.c
1013
return pool->stream_enc[j];
drivers/gpu/drm/amd/display/dc/resource/dce100/dce100_resource.c
1032
struct dce110_resource_pool *pool)
drivers/gpu/drm/amd/display/dc/resource/dce100/dce100_resource.c
1040
pool->base.res_cap = &res_cap;
drivers/gpu/drm/amd/display/dc/resource/dce100/dce100_resource.c
1041
pool->base.funcs = &dce100_res_pool_funcs;
drivers/gpu/drm/amd/display/dc/resource/dce100/dce100_resource.c
1042
pool->base.underlay_pipe_index = NO_UNDERLAY_PIPE;
drivers/gpu/drm/amd/display/dc/resource/dce100/dce100_resource.c
1047
pool->base.dp_clock_source =
drivers/gpu/drm/amd/display/dc/resource/dce100/dce100_resource.c
1050
pool->base.clock_sources[0] =
drivers/gpu/drm/amd/display/dc/resource/dce100/dce100_resource.c
1052
pool->base.clock_sources[1] =
drivers/gpu/drm/amd/display/dc/resource/dce100/dce100_resource.c
1054
pool->base.clock_sources[2] =
drivers/gpu/drm/amd/display/dc/resource/dce100/dce100_resource.c
1056
pool->base.clk_src_count = 3;
drivers/gpu/drm/amd/display/dc/resource/dce100/dce100_resource.c
1059
pool->base.dp_clock_source =
drivers/gpu/drm/amd/display/dc/resource/dce100/dce100_resource.c
1062
pool->base.clock_sources[0] =
drivers/gpu/drm/amd/display/dc/resource/dce100/dce100_resource.c
1064
pool->base.clock_sources[1] =
drivers/gpu/drm/amd/display/dc/resource/dce100/dce100_resource.c
1066
pool->base.clk_src_count = 2;
drivers/gpu/drm/amd/display/dc/resource/dce100/dce100_resource.c
1069
if (pool->base.dp_clock_source == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce100/dce100_resource.c
1075
for (i = 0; i < pool->base.clk_src_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dce100/dce100_resource.c
1076
if (pool->base.clock_sources[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce100/dce100_resource.c
1083
pool->base.dmcu = dce_dmcu_create(ctx,
drivers/gpu/drm/amd/display/dc/resource/dce100/dce100_resource.c
1087
if (pool->base.dmcu == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce100/dce100_resource.c
1093
pool->base.abm = dce_abm_create(ctx,
drivers/gpu/drm/amd/display/dc/resource/dce100/dce100_resource.c
1097
if (pool->base.abm == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce100/dce100_resource.c
1106
pool->base.irqs = dal_irq_service_dce110_create(&init_data);
drivers/gpu/drm/amd/display/dc/resource/dce100/dce100_resource.c
1107
if (!pool->base.irqs)
drivers/gpu/drm/amd/display/dc/resource/dce100/dce100_resource.c
1114
pool->base.underlay_pipe_index = NO_UNDERLAY_PIPE;
drivers/gpu/drm/amd/display/dc/resource/dce100/dce100_resource.c
1115
pool->base.pipe_count = res_cap.num_timing_generator;
drivers/gpu/drm/amd/display/dc/resource/dce100/dce100_resource.c
1116
pool->base.timing_generator_count = pool->base.res_cap->num_timing_generator;
drivers/gpu/drm/amd/display/dc/resource/dce100/dce100_resource.c
1128
for (i = 0; i < pool->base.pipe_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dce100/dce100_resource.c
1129
pool->base.timing_generators[i] =
drivers/gpu/drm/amd/display/dc/resource/dce100/dce100_resource.c
1134
if (pool->base.timing_generators[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce100/dce100_resource.c
1140
pool->base.mis[i] = dce100_mem_input_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dce100/dce100_resource.c
1141
if (pool->base.mis[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce100/dce100_resource.c
1148
pool->base.ipps[i] = dce100_ipp_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dce100/dce100_resource.c
1149
if (pool->base.ipps[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce100/dce100_resource.c
1156
pool->base.transforms[i] = dce100_transform_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dce100/dce100_resource.c
1157
if (pool->base.transforms[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce100/dce100_resource.c
1164
pool->base.opps[i] = dce100_opp_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dce100/dce100_resource.c
1165
if (pool->base.opps[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce100/dce100_resource.c
1173
for (i = 0; i < pool->base.res_cap->num_ddc; i++) {
drivers/gpu/drm/amd/display/dc/resource/dce100/dce100_resource.c
1174
pool->base.engines[i] = dce100_aux_engine_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dce100/dce100_resource.c
1175
if (pool->base.engines[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce100/dce100_resource.c
1181
pool->base.hw_i2cs[i] = dce100_i2c_hw_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dce100/dce100_resource.c
1182
if (pool->base.hw_i2cs[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce100/dce100_resource.c
1188
pool->base.sw_i2cs[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dce100/dce100_resource.c
1191
dc->caps.max_planes = pool->base.pipe_count;
drivers/gpu/drm/amd/display/dc/resource/dce100/dce100_resource.c
1196
if (!resource_construct(num_virtual_links, dc, &pool->base,
drivers/gpu/drm/amd/display/dc/resource/dce100/dce100_resource.c
1205
dce100_resource_destruct(pool);
drivers/gpu/drm/amd/display/dc/resource/dce100/dce100_resource.c
1214
struct dce110_resource_pool *pool =
drivers/gpu/drm/amd/display/dc/resource/dce100/dce100_resource.c
1217
if (!pool)
drivers/gpu/drm/amd/display/dc/resource/dce100/dce100_resource.c
1220
if (dce100_resource_construct(num_virtual_links, dc, pool))
drivers/gpu/drm/amd/display/dc/resource/dce100/dce100_resource.c
1221
return &pool->base;
drivers/gpu/drm/amd/display/dc/resource/dce100/dce100_resource.c
1223
kfree(pool);
drivers/gpu/drm/amd/display/dc/resource/dce100/dce100_resource.c
781
static void dce100_resource_destruct(struct dce110_resource_pool *pool)
drivers/gpu/drm/amd/display/dc/resource/dce100/dce100_resource.c
785
for (i = 0; i < pool->base.pipe_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dce100/dce100_resource.c
786
if (pool->base.opps[i] != NULL)
drivers/gpu/drm/amd/display/dc/resource/dce100/dce100_resource.c
787
dce110_opp_destroy(&pool->base.opps[i]);
drivers/gpu/drm/amd/display/dc/resource/dce100/dce100_resource.c
789
if (pool->base.transforms[i] != NULL)
drivers/gpu/drm/amd/display/dc/resource/dce100/dce100_resource.c
790
dce100_transform_destroy(&pool->base.transforms[i]);
drivers/gpu/drm/amd/display/dc/resource/dce100/dce100_resource.c
792
if (pool->base.ipps[i] != NULL)
drivers/gpu/drm/amd/display/dc/resource/dce100/dce100_resource.c
793
dce_ipp_destroy(&pool->base.ipps[i]);
drivers/gpu/drm/amd/display/dc/resource/dce100/dce100_resource.c
795
if (pool->base.mis[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce100/dce100_resource.c
796
kfree(TO_DCE_MEM_INPUT(pool->base.mis[i]));
drivers/gpu/drm/amd/display/dc/resource/dce100/dce100_resource.c
797
pool->base.mis[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dce100/dce100_resource.c
800
if (pool->base.timing_generators[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce100/dce100_resource.c
801
kfree(DCE110TG_FROM_TG(pool->base.timing_generators[i]));
drivers/gpu/drm/amd/display/dc/resource/dce100/dce100_resource.c
802
pool->base.timing_generators[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dce100/dce100_resource.c
806
for (i = 0; i < pool->base.res_cap->num_ddc; i++) {
drivers/gpu/drm/amd/display/dc/resource/dce100/dce100_resource.c
807
if (pool->base.engines[i] != NULL)
drivers/gpu/drm/amd/display/dc/resource/dce100/dce100_resource.c
808
dce110_engine_destroy(&pool->base.engines[i]);
drivers/gpu/drm/amd/display/dc/resource/dce100/dce100_resource.c
809
if (pool->base.hw_i2cs[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce100/dce100_resource.c
810
kfree(pool->base.hw_i2cs[i]);
drivers/gpu/drm/amd/display/dc/resource/dce100/dce100_resource.c
811
pool->base.hw_i2cs[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dce100/dce100_resource.c
813
if (pool->base.sw_i2cs[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce100/dce100_resource.c
814
kfree(pool->base.sw_i2cs[i]);
drivers/gpu/drm/amd/display/dc/resource/dce100/dce100_resource.c
815
pool->base.sw_i2cs[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dce100/dce100_resource.c
819
for (i = 0; i < pool->base.stream_enc_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dce100/dce100_resource.c
820
if (pool->base.stream_enc[i] != NULL)
drivers/gpu/drm/amd/display/dc/resource/dce100/dce100_resource.c
821
kfree(DCE110STRENC_FROM_STRENC(pool->base.stream_enc[i]));
drivers/gpu/drm/amd/display/dc/resource/dce100/dce100_resource.c
824
for (i = 0; i < pool->base.clk_src_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dce100/dce100_resource.c
825
if (pool->base.clock_sources[i] != NULL)
drivers/gpu/drm/amd/display/dc/resource/dce100/dce100_resource.c
826
dce100_clock_source_destroy(&pool->base.clock_sources[i]);
drivers/gpu/drm/amd/display/dc/resource/dce100/dce100_resource.c
829
if (pool->base.dp_clock_source != NULL)
drivers/gpu/drm/amd/display/dc/resource/dce100/dce100_resource.c
830
dce100_clock_source_destroy(&pool->base.dp_clock_source);
drivers/gpu/drm/amd/display/dc/resource/dce100/dce100_resource.c
832
for (i = 0; i < pool->base.audio_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dce100/dce100_resource.c
833
if (pool->base.audios[i] != NULL)
drivers/gpu/drm/amd/display/dc/resource/dce100/dce100_resource.c
834
dce_aud_destroy(&pool->base.audios[i]);
drivers/gpu/drm/amd/display/dc/resource/dce100/dce100_resource.c
837
if (pool->base.abm != NULL)
drivers/gpu/drm/amd/display/dc/resource/dce100/dce100_resource.c
838
dce_abm_destroy(&pool->base.abm);
drivers/gpu/drm/amd/display/dc/resource/dce100/dce100_resource.c
840
if (pool->base.dmcu != NULL)
drivers/gpu/drm/amd/display/dc/resource/dce100/dce100_resource.c
841
dce_dmcu_destroy(&pool->base.dmcu);
drivers/gpu/drm/amd/display/dc/resource/dce100/dce100_resource.c
843
if (pool->base.irqs != NULL)
drivers/gpu/drm/amd/display/dc/resource/dce100/dce100_resource.c
844
dal_irq_service_destroy(&pool->base.irqs);
drivers/gpu/drm/amd/display/dc/resource/dce100/dce100_resource.c
953
static void dce100_destroy_resource_pool(struct resource_pool **pool)
drivers/gpu/drm/amd/display/dc/resource/dce100/dce100_resource.c
955
struct dce110_resource_pool *dce110_pool = TO_DCE110_RES_POOL(*pool);
drivers/gpu/drm/amd/display/dc/resource/dce100/dce100_resource.c
959
*pool = NULL;
drivers/gpu/drm/amd/display/dc/resource/dce100/dce100_resource.c
973
const struct resource_pool *pool,
drivers/gpu/drm/amd/display/dc/resource/dce100/dce100_resource.c
987
for (i = 0; i < pool->stream_enc_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dce100/dce100_resource.c
989
pool->stream_enc[i]) {
drivers/gpu/drm/amd/display/dc/resource/dce100/dce100_resource.c
994
if (pool->stream_enc[i]->id == preferred_engine)
drivers/gpu/drm/amd/display/dc/resource/dce100/dce100_resource.c
995
return pool->stream_enc[i];
drivers/gpu/drm/amd/display/dc/resource/dce100/dce100_resource.h
60
const struct resource_pool *pool,
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
1130
const struct resource_pool *pool,
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
1137
unsigned int underlay_idx = pool->underlay_pipe_index;
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
1143
pipe_ctx->stream_res.tg = pool->timing_generators[underlay_idx];
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
1144
pipe_ctx->plane_res.mi = pool->mis[underlay_idx];
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
1146
pipe_ctx->plane_res.xfm = pool->transforms[underlay_idx];
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
1147
pipe_ctx->stream_res.opp = pool->opps[underlay_idx];
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
1197
static void dce110_destroy_resource_pool(struct resource_pool **pool)
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
1199
struct dce110_resource_pool *dce110_pool = TO_DCE110_RES_POOL(*pool);
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
1203
*pool = NULL;
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
1208
const struct resource_pool *pool,
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
1215
for (i = 0; i < pool->stream_enc_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
1217
pool->stream_enc[i]) {
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
1222
if (pool->stream_enc[i]->id ==
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
1224
return pool->stream_enc[i];
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
1233
return pool->stream_enc[j];
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
1251
static bool underlay_create(struct dc_context *ctx, struct resource_pool *pool)
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
1272
pool->opps[pool->pipe_count] = &dce110_oppv->base;
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
1273
pool->timing_generators[pool->pipe_count] = &dce110_tgv->base;
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
1274
pool->mis[pool->pipe_count] = &dce110_miv->base;
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
1275
pool->transforms[pool->pipe_count] = &dce110_xfmv->base;
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
1276
pool->pipe_count++;
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
1354
struct dce110_resource_pool *pool,
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
1363
pool->base.res_cap = dce110_resource_cap(&ctx->asic_id);
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
1364
pool->base.funcs = &dce110_res_pool_funcs;
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
1370
pool->base.pipe_count = pool->base.res_cap->num_timing_generator;
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
1371
pool->base.underlay_pipe_index = pool->base.pipe_count;
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
1372
pool->base.timing_generator_count = pool->base.res_cap->num_timing_generator;
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
1390
pool->base.dp_clock_source =
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
1393
pool->base.clock_sources[0] =
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
1396
pool->base.clock_sources[1] =
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
1400
pool->base.clk_src_count = 2;
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
1405
if (pool->base.dp_clock_source == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
1411
for (i = 0; i < pool->base.clk_src_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
1412
if (pool->base.clock_sources[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
1419
pool->base.dmcu = dce_dmcu_create(ctx,
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
1423
if (pool->base.dmcu == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
1429
pool->base.abm = dce_abm_create(ctx,
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
1433
if (pool->base.abm == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
1442
pool->base.irqs = dal_irq_service_dce110_create(&init_data);
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
1443
if (!pool->base.irqs)
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
1447
for (i = 0; i < pool->base.pipe_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
1448
pool->base.timing_generators[i] = dce110_timing_generator_create(
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
1450
if (pool->base.timing_generators[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
1456
pool->base.mis[i] = dce110_mem_input_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
1457
if (pool->base.mis[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
1464
pool->base.ipps[i] = dce110_ipp_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
1465
if (pool->base.ipps[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
1472
pool->base.transforms[i] = dce110_transform_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
1473
if (pool->base.transforms[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
1480
pool->base.opps[i] = dce110_opp_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
1481
if (pool->base.opps[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
1489
for (i = 0; i < pool->base.res_cap->num_ddc; i++) {
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
1490
pool->base.engines[i] = dce110_aux_engine_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
1491
if (pool->base.engines[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
1497
pool->base.hw_i2cs[i] = dce110_i2c_hw_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
1498
if (pool->base.hw_i2cs[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
1504
pool->base.sw_i2cs[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
1510
if (!underlay_create(ctx, &pool->base))
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
1513
if (!resource_construct(num_virtual_links, dc, &pool->base,
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
1520
dc->caps.max_planes = pool->base.pipe_count;
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
1522
for (i = 0; i < pool->base.underlay_pipe_index; ++i)
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
1525
dc->caps.planes[pool->base.underlay_pipe_index] = underlay_plane_cap;
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
1534
dce110_resource_destruct(pool);
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
1543
struct dce110_resource_pool *pool =
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
1546
if (!pool)
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
1549
if (dce110_resource_construct(num_virtual_links, dc, pool, asic_id))
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
1550
return &pool->base;
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
1552
kfree(pool);
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
816
static void dce110_resource_destruct(struct dce110_resource_pool *pool)
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
820
for (i = 0; i < pool->base.pipe_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
821
if (pool->base.opps[i] != NULL)
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
822
dce110_opp_destroy(&pool->base.opps[i]);
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
824
if (pool->base.transforms[i] != NULL)
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
825
dce110_transform_destroy(&pool->base.transforms[i]);
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
827
if (pool->base.ipps[i] != NULL)
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
828
dce_ipp_destroy(&pool->base.ipps[i]);
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
830
if (pool->base.mis[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
831
kfree(TO_DCE_MEM_INPUT(pool->base.mis[i]));
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
832
pool->base.mis[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
835
if (pool->base.timing_generators[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
836
kfree(DCE110TG_FROM_TG(pool->base.timing_generators[i]));
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
837
pool->base.timing_generators[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
841
for (i = 0; i < pool->base.res_cap->num_ddc; i++) {
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
842
if (pool->base.engines[i] != NULL)
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
843
dce110_engine_destroy(&pool->base.engines[i]);
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
844
if (pool->base.hw_i2cs[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
845
kfree(pool->base.hw_i2cs[i]);
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
846
pool->base.hw_i2cs[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
848
if (pool->base.sw_i2cs[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
849
kfree(pool->base.sw_i2cs[i]);
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
850
pool->base.sw_i2cs[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
854
for (i = 0; i < pool->base.stream_enc_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
855
if (pool->base.stream_enc[i] != NULL)
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
856
kfree(DCE110STRENC_FROM_STRENC(pool->base.stream_enc[i]));
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
859
for (i = 0; i < pool->base.clk_src_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
860
if (pool->base.clock_sources[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
861
dce110_clock_source_destroy(&pool->base.clock_sources[i]);
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
865
if (pool->base.dp_clock_source != NULL)
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
866
dce110_clock_source_destroy(&pool->base.dp_clock_source);
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
868
for (i = 0; i < pool->base.audio_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
869
if (pool->base.audios[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
870
dce_aud_destroy(&pool->base.audios[i]);
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
874
if (pool->base.abm != NULL)
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
875
dce_abm_destroy(&pool->base.abm);
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
877
if (pool->base.dmcu != NULL)
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
878
dce_dmcu_destroy(&pool->base.dmcu);
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
880
if (pool->base.irqs != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.c
881
dal_irq_service_destroy(&pool->base.irqs);
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.h
34
#define TO_DCE110_RES_POOL(pool)\
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.h
35
container_of(pool, struct dce110_resource_pool, base)
drivers/gpu/drm/amd/display/dc/resource/dce110/dce110_resource.h
50
const struct resource_pool *pool,
drivers/gpu/drm/amd/display/dc/resource/dce112/dce112_resource.c
1046
static void dce112_destroy_resource_pool(struct resource_pool **pool)
drivers/gpu/drm/amd/display/dc/resource/dce112/dce112_resource.c
1048
struct dce110_resource_pool *dce110_pool = TO_DCE110_RES_POOL(*pool);
drivers/gpu/drm/amd/display/dc/resource/dce112/dce112_resource.c
1052
*pool = NULL;
drivers/gpu/drm/amd/display/dc/resource/dce112/dce112_resource.c
1230
struct dce110_resource_pool *pool)
drivers/gpu/drm/amd/display/dc/resource/dce112/dce112_resource.c
1237
pool->base.res_cap = dce112_resource_cap(&ctx->asic_id);
drivers/gpu/drm/amd/display/dc/resource/dce112/dce112_resource.c
1238
pool->base.funcs = &dce112_res_pool_funcs;
drivers/gpu/drm/amd/display/dc/resource/dce112/dce112_resource.c
1243
pool->base.underlay_pipe_index = NO_UNDERLAY_PIPE;
drivers/gpu/drm/amd/display/dc/resource/dce112/dce112_resource.c
1244
pool->base.pipe_count = pool->base.res_cap->num_timing_generator;
drivers/gpu/drm/amd/display/dc/resource/dce112/dce112_resource.c
1245
pool->base.timing_generator_count = pool->base.res_cap->num_timing_generator;
drivers/gpu/drm/amd/display/dc/resource/dce112/dce112_resource.c
1260
pool->base.clock_sources[DCE112_CLK_SRC_PLL0] =
drivers/gpu/drm/amd/display/dc/resource/dce112/dce112_resource.c
1265
pool->base.clock_sources[DCE112_CLK_SRC_PLL1] =
drivers/gpu/drm/amd/display/dc/resource/dce112/dce112_resource.c
1270
pool->base.clock_sources[DCE112_CLK_SRC_PLL2] =
drivers/gpu/drm/amd/display/dc/resource/dce112/dce112_resource.c
1275
pool->base.clock_sources[DCE112_CLK_SRC_PLL3] =
drivers/gpu/drm/amd/display/dc/resource/dce112/dce112_resource.c
1280
pool->base.clock_sources[DCE112_CLK_SRC_PLL4] =
drivers/gpu/drm/amd/display/dc/resource/dce112/dce112_resource.c
1285
pool->base.clock_sources[DCE112_CLK_SRC_PLL5] =
drivers/gpu/drm/amd/display/dc/resource/dce112/dce112_resource.c
1290
pool->base.clk_src_count = DCE112_CLK_SRC_TOTAL;
drivers/gpu/drm/amd/display/dc/resource/dce112/dce112_resource.c
1292
pool->base.dp_clock_source = dce112_clock_source_create(
drivers/gpu/drm/amd/display/dc/resource/dce112/dce112_resource.c
1297
for (i = 0; i < pool->base.clk_src_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dce112/dce112_resource.c
1298
if (pool->base.clock_sources[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce112/dce112_resource.c
1305
pool->base.dmcu = dce_dmcu_create(ctx,
drivers/gpu/drm/amd/display/dc/resource/dce112/dce112_resource.c
1309
if (pool->base.dmcu == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce112/dce112_resource.c
1315
pool->base.abm = dce_abm_create(ctx,
drivers/gpu/drm/amd/display/dc/resource/dce112/dce112_resource.c
1319
if (pool->base.abm == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce112/dce112_resource.c
1328
pool->base.irqs = dal_irq_service_dce110_create(&init_data);
drivers/gpu/drm/amd/display/dc/resource/dce112/dce112_resource.c
1329
if (!pool->base.irqs)
drivers/gpu/drm/amd/display/dc/resource/dce112/dce112_resource.c
1333
for (i = 0; i < pool->base.pipe_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dce112/dce112_resource.c
1334
pool->base.timing_generators[i] =
drivers/gpu/drm/amd/display/dc/resource/dce112/dce112_resource.c
1339
if (pool->base.timing_generators[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce112/dce112_resource.c
1345
pool->base.mis[i] = dce112_mem_input_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dce112/dce112_resource.c
1346
if (pool->base.mis[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce112/dce112_resource.c
1353
pool->base.ipps[i] = dce112_ipp_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dce112/dce112_resource.c
1354
if (pool->base.ipps[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce112/dce112_resource.c
1361
pool->base.transforms[i] = dce112_transform_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dce112/dce112_resource.c
1362
if (pool->base.transforms[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce112/dce112_resource.c
1369
pool->base.opps[i] = dce112_opp_create(
drivers/gpu/drm/amd/display/dc/resource/dce112/dce112_resource.c
1372
if (pool->base.opps[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce112/dce112_resource.c
1380
for (i = 0; i < pool->base.res_cap->num_ddc; i++) {
drivers/gpu/drm/amd/display/dc/resource/dce112/dce112_resource.c
1381
pool->base.engines[i] = dce112_aux_engine_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dce112/dce112_resource.c
1382
if (pool->base.engines[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce112/dce112_resource.c
1388
pool->base.hw_i2cs[i] = dce112_i2c_hw_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dce112/dce112_resource.c
1389
if (pool->base.hw_i2cs[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce112/dce112_resource.c
1395
pool->base.sw_i2cs[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dce112/dce112_resource.c
1398
if (!resource_construct(num_virtual_links, dc, &pool->base,
drivers/gpu/drm/amd/display/dc/resource/dce112/dce112_resource.c
1402
dc->caps.max_planes = pool->base.pipe_count;
drivers/gpu/drm/amd/display/dc/resource/dce112/dce112_resource.c
1417
dce112_resource_destruct(pool);
drivers/gpu/drm/amd/display/dc/resource/dce112/dce112_resource.c
1425
struct dce110_resource_pool *pool =
drivers/gpu/drm/amd/display/dc/resource/dce112/dce112_resource.c
1428
if (!pool)
drivers/gpu/drm/amd/display/dc/resource/dce112/dce112_resource.c
1431
if (dce112_resource_construct(num_virtual_links, dc, pool))
drivers/gpu/drm/amd/display/dc/resource/dce112/dce112_resource.c
1432
return &pool->base;
drivers/gpu/drm/amd/display/dc/resource/dce112/dce112_resource.c
1434
kfree(pool);
drivers/gpu/drm/amd/display/dc/resource/dce112/dce112_resource.c
781
static void dce112_resource_destruct(struct dce110_resource_pool *pool)
drivers/gpu/drm/amd/display/dc/resource/dce112/dce112_resource.c
785
for (i = 0; i < pool->base.pipe_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dce112/dce112_resource.c
786
if (pool->base.opps[i] != NULL)
drivers/gpu/drm/amd/display/dc/resource/dce112/dce112_resource.c
787
dce110_opp_destroy(&pool->base.opps[i]);
drivers/gpu/drm/amd/display/dc/resource/dce112/dce112_resource.c
789
if (pool->base.transforms[i] != NULL)
drivers/gpu/drm/amd/display/dc/resource/dce112/dce112_resource.c
790
dce112_transform_destroy(&pool->base.transforms[i]);
drivers/gpu/drm/amd/display/dc/resource/dce112/dce112_resource.c
792
if (pool->base.ipps[i] != NULL)
drivers/gpu/drm/amd/display/dc/resource/dce112/dce112_resource.c
793
dce_ipp_destroy(&pool->base.ipps[i]);
drivers/gpu/drm/amd/display/dc/resource/dce112/dce112_resource.c
795
if (pool->base.mis[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce112/dce112_resource.c
796
kfree(TO_DCE_MEM_INPUT(pool->base.mis[i]));
drivers/gpu/drm/amd/display/dc/resource/dce112/dce112_resource.c
797
pool->base.mis[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dce112/dce112_resource.c
800
if (pool->base.timing_generators[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce112/dce112_resource.c
801
kfree(DCE110TG_FROM_TG(pool->base.timing_generators[i]));
drivers/gpu/drm/amd/display/dc/resource/dce112/dce112_resource.c
802
pool->base.timing_generators[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dce112/dce112_resource.c
806
for (i = 0; i < pool->base.res_cap->num_ddc; i++) {
drivers/gpu/drm/amd/display/dc/resource/dce112/dce112_resource.c
807
if (pool->base.engines[i] != NULL)
drivers/gpu/drm/amd/display/dc/resource/dce112/dce112_resource.c
808
dce110_engine_destroy(&pool->base.engines[i]);
drivers/gpu/drm/amd/display/dc/resource/dce112/dce112_resource.c
809
if (pool->base.hw_i2cs[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce112/dce112_resource.c
810
kfree(pool->base.hw_i2cs[i]);
drivers/gpu/drm/amd/display/dc/resource/dce112/dce112_resource.c
811
pool->base.hw_i2cs[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dce112/dce112_resource.c
813
if (pool->base.sw_i2cs[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce112/dce112_resource.c
814
kfree(pool->base.sw_i2cs[i]);
drivers/gpu/drm/amd/display/dc/resource/dce112/dce112_resource.c
815
pool->base.sw_i2cs[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dce112/dce112_resource.c
819
for (i = 0; i < pool->base.stream_enc_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dce112/dce112_resource.c
820
if (pool->base.stream_enc[i] != NULL)
drivers/gpu/drm/amd/display/dc/resource/dce112/dce112_resource.c
821
kfree(DCE110STRENC_FROM_STRENC(pool->base.stream_enc[i]));
drivers/gpu/drm/amd/display/dc/resource/dce112/dce112_resource.c
824
for (i = 0; i < pool->base.clk_src_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dce112/dce112_resource.c
825
if (pool->base.clock_sources[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce112/dce112_resource.c
826
dce112_clock_source_destroy(&pool->base.clock_sources[i]);
drivers/gpu/drm/amd/display/dc/resource/dce112/dce112_resource.c
830
if (pool->base.dp_clock_source != NULL)
drivers/gpu/drm/amd/display/dc/resource/dce112/dce112_resource.c
831
dce112_clock_source_destroy(&pool->base.dp_clock_source);
drivers/gpu/drm/amd/display/dc/resource/dce112/dce112_resource.c
833
for (i = 0; i < pool->base.audio_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dce112/dce112_resource.c
834
if (pool->base.audios[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce112/dce112_resource.c
835
dce_aud_destroy(&pool->base.audios[i]);
drivers/gpu/drm/amd/display/dc/resource/dce112/dce112_resource.c
839
if (pool->base.abm != NULL)
drivers/gpu/drm/amd/display/dc/resource/dce112/dce112_resource.c
840
dce_abm_destroy(&pool->base.abm);
drivers/gpu/drm/amd/display/dc/resource/dce112/dce112_resource.c
842
if (pool->base.dmcu != NULL)
drivers/gpu/drm/amd/display/dc/resource/dce112/dce112_resource.c
843
dce_dmcu_destroy(&pool->base.dmcu);
drivers/gpu/drm/amd/display/dc/resource/dce112/dce112_resource.c
845
if (pool->base.irqs != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce112/dce112_resource.c
846
dal_irq_service_destroy(&pool->base.irqs);
drivers/gpu/drm/amd/display/dc/resource/dce112/dce112_resource.c
852
const struct resource_pool *pool,
drivers/gpu/drm/amd/display/dc/resource/dce112/dce112_resource.c
857
return pool->clock_sources[DCE112_CLK_SRC_PLL0];
drivers/gpu/drm/amd/display/dc/resource/dce112/dce112_resource.c
859
return pool->clock_sources[DCE112_CLK_SRC_PLL1];
drivers/gpu/drm/amd/display/dc/resource/dce112/dce112_resource.c
861
return pool->clock_sources[DCE112_CLK_SRC_PLL2];
drivers/gpu/drm/amd/display/dc/resource/dce112/dce112_resource.c
863
return pool->clock_sources[DCE112_CLK_SRC_PLL3];
drivers/gpu/drm/amd/display/dc/resource/dce112/dce112_resource.c
865
return pool->clock_sources[DCE112_CLK_SRC_PLL4];
drivers/gpu/drm/amd/display/dc/resource/dce112/dce112_resource.c
867
return pool->clock_sources[DCE112_CLK_SRC_PLL5];
drivers/gpu/drm/amd/display/dc/resource/dce120/dce120_resource.c
1065
struct dce110_resource_pool *pool)
drivers/gpu/drm/amd/display/dc/resource/dce120/dce120_resource.c
1078
pool->base.res_cap = &res_cap;
drivers/gpu/drm/amd/display/dc/resource/dce120/dce120_resource.c
1079
pool->base.funcs = &dce120_res_pool_funcs;
drivers/gpu/drm/amd/display/dc/resource/dce120/dce120_resource.c
1082
pool->base.pipe_count = res_cap.num_timing_generator;
drivers/gpu/drm/amd/display/dc/resource/dce120/dce120_resource.c
1083
pool->base.timing_generator_count = pool->base.res_cap->num_timing_generator;
drivers/gpu/drm/amd/display/dc/resource/dce120/dce120_resource.c
1084
pool->base.underlay_pipe_index = NO_UNDERLAY_PIPE;
drivers/gpu/drm/amd/display/dc/resource/dce120/dce120_resource.c
1101
pool->base.clock_sources[DCE120_CLK_SRC_PLL0] =
drivers/gpu/drm/amd/display/dc/resource/dce120/dce120_resource.c
1105
pool->base.clock_sources[DCE120_CLK_SRC_PLL1] =
drivers/gpu/drm/amd/display/dc/resource/dce120/dce120_resource.c
1109
pool->base.clock_sources[DCE120_CLK_SRC_PLL2] =
drivers/gpu/drm/amd/display/dc/resource/dce120/dce120_resource.c
1113
pool->base.clock_sources[DCE120_CLK_SRC_PLL3] =
drivers/gpu/drm/amd/display/dc/resource/dce120/dce120_resource.c
1117
pool->base.clock_sources[DCE120_CLK_SRC_PLL4] =
drivers/gpu/drm/amd/display/dc/resource/dce120/dce120_resource.c
1121
pool->base.clock_sources[DCE120_CLK_SRC_PLL5] =
drivers/gpu/drm/amd/display/dc/resource/dce120/dce120_resource.c
1125
pool->base.clk_src_count = DCE120_CLK_SRC_TOTAL;
drivers/gpu/drm/amd/display/dc/resource/dce120/dce120_resource.c
1127
pool->base.dp_clock_source =
drivers/gpu/drm/amd/display/dc/resource/dce120/dce120_resource.c
1132
for (i = 0; i < pool->base.clk_src_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dce120/dce120_resource.c
1133
if (pool->base.clock_sources[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce120/dce120_resource.c
1140
pool->base.dmcu = dce_dmcu_create(ctx,
drivers/gpu/drm/amd/display/dc/resource/dce120/dce120_resource.c
1144
if (pool->base.dmcu == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce120/dce120_resource.c
1150
pool->base.abm = dce_abm_create(ctx,
drivers/gpu/drm/amd/display/dc/resource/dce120/dce120_resource.c
1154
if (pool->base.abm == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce120/dce120_resource.c
1162
pool->base.irqs = dal_irq_service_dce120_create(&irq_init_data);
drivers/gpu/drm/amd/display/dc/resource/dce120/dce120_resource.c
1163
if (!pool->base.irqs)
drivers/gpu/drm/amd/display/dc/resource/dce120/dce120_resource.c
1172
for (i = 0; i < pool->base.pipe_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dce120/dce120_resource.c
1180
pool->base.timing_generators[j] =
drivers/gpu/drm/amd/display/dc/resource/dce120/dce120_resource.c
1185
if (pool->base.timing_generators[j] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce120/dce120_resource.c
1191
pool->base.mis[j] = dce120_mem_input_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dce120/dce120_resource.c
1193
if (pool->base.mis[j] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce120/dce120_resource.c
1200
pool->base.ipps[j] = dce120_ipp_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dce120/dce120_resource.c
1201
if (pool->base.ipps[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce120/dce120_resource.c
1208
pool->base.transforms[j] = dce120_transform_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dce120/dce120_resource.c
1209
if (pool->base.transforms[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce120/dce120_resource.c
1216
pool->base.opps[j] = dce120_opp_create(
drivers/gpu/drm/amd/display/dc/resource/dce120/dce120_resource.c
1219
if (pool->base.opps[j] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce120/dce120_resource.c
1229
for (i = 0; i < pool->base.res_cap->num_ddc; i++) {
drivers/gpu/drm/amd/display/dc/resource/dce120/dce120_resource.c
1230
pool->base.engines[i] = dce120_aux_engine_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dce120/dce120_resource.c
1231
if (pool->base.engines[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce120/dce120_resource.c
1237
pool->base.hw_i2cs[i] = dce120_i2c_hw_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dce120/dce120_resource.c
1238
if (pool->base.hw_i2cs[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce120/dce120_resource.c
1244
pool->base.sw_i2cs[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dce120/dce120_resource.c
1248
pool->base.pipe_count = j;
drivers/gpu/drm/amd/display/dc/resource/dce120/dce120_resource.c
1249
pool->base.timing_generator_count = j;
drivers/gpu/drm/amd/display/dc/resource/dce120/dce120_resource.c
1256
if (!resource_construct(num_virtual_links, dc, &pool->base, res_funcs))
drivers/gpu/drm/amd/display/dc/resource/dce120/dce120_resource.c
1263
dc->caps.max_planes = pool->base.pipe_count;
drivers/gpu/drm/amd/display/dc/resource/dce120/dce120_resource.c
1278
pool->base.oem_device = dc->link_srv->create_ddc_service(&ddc_init_data);
drivers/gpu/drm/amd/display/dc/resource/dce120/dce120_resource.c
1288
dce120_resource_destruct(pool);
drivers/gpu/drm/amd/display/dc/resource/dce120/dce120_resource.c
1297
struct dce110_resource_pool *pool =
drivers/gpu/drm/amd/display/dc/resource/dce120/dce120_resource.c
1300
if (!pool)
drivers/gpu/drm/amd/display/dc/resource/dce120/dce120_resource.c
1303
if (dce120_resource_construct(num_virtual_links, dc, pool))
drivers/gpu/drm/amd/display/dc/resource/dce120/dce120_resource.c
1304
return &pool->base;
drivers/gpu/drm/amd/display/dc/resource/dce120/dce120_resource.c
1306
kfree(pool);
drivers/gpu/drm/amd/display/dc/resource/dce120/dce120_resource.c
600
static void dce120_resource_destruct(struct dce110_resource_pool *pool)
drivers/gpu/drm/amd/display/dc/resource/dce120/dce120_resource.c
604
for (i = 0; i < pool->base.pipe_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dce120/dce120_resource.c
605
if (pool->base.opps[i] != NULL)
drivers/gpu/drm/amd/display/dc/resource/dce120/dce120_resource.c
606
dce110_opp_destroy(&pool->base.opps[i]);
drivers/gpu/drm/amd/display/dc/resource/dce120/dce120_resource.c
608
if (pool->base.transforms[i] != NULL)
drivers/gpu/drm/amd/display/dc/resource/dce120/dce120_resource.c
609
dce120_transform_destroy(&pool->base.transforms[i]);
drivers/gpu/drm/amd/display/dc/resource/dce120/dce120_resource.c
611
if (pool->base.ipps[i] != NULL)
drivers/gpu/drm/amd/display/dc/resource/dce120/dce120_resource.c
612
dce_ipp_destroy(&pool->base.ipps[i]);
drivers/gpu/drm/amd/display/dc/resource/dce120/dce120_resource.c
614
if (pool->base.mis[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce120/dce120_resource.c
615
kfree(TO_DCE_MEM_INPUT(pool->base.mis[i]));
drivers/gpu/drm/amd/display/dc/resource/dce120/dce120_resource.c
616
pool->base.mis[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dce120/dce120_resource.c
619
if (pool->base.irqs != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce120/dce120_resource.c
620
dal_irq_service_destroy(&pool->base.irqs);
drivers/gpu/drm/amd/display/dc/resource/dce120/dce120_resource.c
623
if (pool->base.timing_generators[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce120/dce120_resource.c
624
kfree(DCE110TG_FROM_TG(pool->base.timing_generators[i]));
drivers/gpu/drm/amd/display/dc/resource/dce120/dce120_resource.c
625
pool->base.timing_generators[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dce120/dce120_resource.c
629
for (i = 0; i < pool->base.res_cap->num_ddc; i++) {
drivers/gpu/drm/amd/display/dc/resource/dce120/dce120_resource.c
630
if (pool->base.engines[i] != NULL)
drivers/gpu/drm/amd/display/dc/resource/dce120/dce120_resource.c
631
dce110_engine_destroy(&pool->base.engines[i]);
drivers/gpu/drm/amd/display/dc/resource/dce120/dce120_resource.c
632
if (pool->base.hw_i2cs[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce120/dce120_resource.c
633
kfree(pool->base.hw_i2cs[i]);
drivers/gpu/drm/amd/display/dc/resource/dce120/dce120_resource.c
634
pool->base.hw_i2cs[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dce120/dce120_resource.c
636
if (pool->base.sw_i2cs[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce120/dce120_resource.c
637
kfree(pool->base.sw_i2cs[i]);
drivers/gpu/drm/amd/display/dc/resource/dce120/dce120_resource.c
638
pool->base.sw_i2cs[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dce120/dce120_resource.c
642
for (i = 0; i < pool->base.audio_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dce120/dce120_resource.c
643
if (pool->base.audios[i])
drivers/gpu/drm/amd/display/dc/resource/dce120/dce120_resource.c
644
dce_aud_destroy(&pool->base.audios[i]);
drivers/gpu/drm/amd/display/dc/resource/dce120/dce120_resource.c
647
for (i = 0; i < pool->base.stream_enc_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dce120/dce120_resource.c
648
if (pool->base.stream_enc[i] != NULL)
drivers/gpu/drm/amd/display/dc/resource/dce120/dce120_resource.c
649
kfree(DCE110STRENC_FROM_STRENC(pool->base.stream_enc[i]));
drivers/gpu/drm/amd/display/dc/resource/dce120/dce120_resource.c
652
for (i = 0; i < pool->base.clk_src_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dce120/dce120_resource.c
653
if (pool->base.clock_sources[i] != NULL)
drivers/gpu/drm/amd/display/dc/resource/dce120/dce120_resource.c
655
&pool->base.clock_sources[i]);
drivers/gpu/drm/amd/display/dc/resource/dce120/dce120_resource.c
658
if (pool->base.dp_clock_source != NULL)
drivers/gpu/drm/amd/display/dc/resource/dce120/dce120_resource.c
659
dce120_clock_source_destroy(&pool->base.dp_clock_source);
drivers/gpu/drm/amd/display/dc/resource/dce120/dce120_resource.c
661
if (pool->base.abm != NULL)
drivers/gpu/drm/amd/display/dc/resource/dce120/dce120_resource.c
662
dce_abm_destroy(&pool->base.abm);
drivers/gpu/drm/amd/display/dc/resource/dce120/dce120_resource.c
664
if (pool->base.dmcu != NULL)
drivers/gpu/drm/amd/display/dc/resource/dce120/dce120_resource.c
665
dce_dmcu_destroy(&pool->base.dmcu);
drivers/gpu/drm/amd/display/dc/resource/dce120/dce120_resource.c
667
if (pool->base.oem_device != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce120/dce120_resource.c
668
struct dc *dc = pool->base.oem_device->ctx->dc;
drivers/gpu/drm/amd/display/dc/resource/dce120/dce120_resource.c
670
dc->link_srv->destroy_ddc_service(&pool->base.oem_device);
drivers/gpu/drm/amd/display/dc/resource/dce120/dce120_resource.c
905
static void dce120_destroy_resource_pool(struct resource_pool **pool)
drivers/gpu/drm/amd/display/dc/resource/dce120/dce120_resource.c
907
struct dce110_resource_pool *dce110_pool = TO_DCE110_RES_POOL(*pool);
drivers/gpu/drm/amd/display/dc/resource/dce120/dce120_resource.c
911
*pool = NULL;
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
1005
pool->base.irqs = dal_irq_service_dce60_create(&init_data);
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
1006
if (!pool->base.irqs)
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
1010
for (i = 0; i < pool->base.pipe_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
1011
pool->base.timing_generators[i] = dce60_timing_generator_create(
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
1013
if (pool->base.timing_generators[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
1019
pool->base.mis[i] = dce60_mem_input_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
1020
if (pool->base.mis[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
1026
pool->base.ipps[i] = dce60_ipp_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
1027
if (pool->base.ipps[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
1033
pool->base.transforms[i] = dce60_transform_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
1034
if (pool->base.transforms[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
1040
pool->base.opps[i] = dce60_opp_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
1041
if (pool->base.opps[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
1048
for (i = 0; i < pool->base.res_cap->num_ddc; i++) {
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
1049
pool->base.engines[i] = dce60_aux_engine_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
1050
if (pool->base.engines[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
1056
pool->base.hw_i2cs[i] = dce60_i2c_hw_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
1057
if (pool->base.hw_i2cs[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
1063
pool->base.sw_i2cs[i] = dce60_i2c_sw_create(ctx);
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
1064
if (pool->base.sw_i2cs[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
1072
dc->caps.max_planes = pool->base.pipe_count;
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
1079
if (!resource_construct(num_virtual_links, dc, &pool->base,
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
1089
dce60_resource_destruct(pool);
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
1097
struct dce110_resource_pool *pool =
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
1100
if (!pool)
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
1103
if (dce60_construct(num_virtual_links, dc, pool))
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
1104
return &pool->base;
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
1106
kfree(pool);
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
1114
struct dce110_resource_pool *pool)
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
1122
pool->base.res_cap = &res_cap_61;
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
1123
pool->base.funcs = &dce60_res_pool_funcs;
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
1129
pool->base.underlay_pipe_index = NO_UNDERLAY_PIPE;
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
1130
pool->base.pipe_count = res_cap_61.num_timing_generator;
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
1131
pool->base.timing_generator_count = res_cap_61.num_timing_generator;
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
1144
pool->base.dp_clock_source =
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
1147
pool->base.clock_sources[0] =
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
1149
pool->base.clock_sources[1] =
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
1151
pool->base.clock_sources[2] =
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
1153
pool->base.clk_src_count = 3;
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
1156
pool->base.dp_clock_source =
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
1159
pool->base.clock_sources[0] =
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
1161
pool->base.clock_sources[1] =
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
1163
pool->base.clk_src_count = 2;
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
1166
if (pool->base.dp_clock_source == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
1172
for (i = 0; i < pool->base.clk_src_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
1173
if (pool->base.clock_sources[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
1180
pool->base.dmcu = dce_dmcu_create(ctx,
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
1184
if (pool->base.dmcu == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
1190
pool->base.abm = dce_abm_create(ctx,
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
1194
if (pool->base.abm == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
1203
pool->base.irqs = dal_irq_service_dce60_create(&init_data);
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
1204
if (!pool->base.irqs)
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
1208
for (i = 0; i < pool->base.pipe_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
1209
pool->base.timing_generators[i] = dce60_timing_generator_create(
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
1211
if (pool->base.timing_generators[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
1217
pool->base.mis[i] = dce60_mem_input_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
1218
if (pool->base.mis[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
1224
pool->base.ipps[i] = dce60_ipp_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
1225
if (pool->base.ipps[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
1231
pool->base.transforms[i] = dce60_transform_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
1232
if (pool->base.transforms[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
1238
pool->base.opps[i] = dce60_opp_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
1239
if (pool->base.opps[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
1246
for (i = 0; i < pool->base.res_cap->num_ddc; i++) {
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
1247
pool->base.engines[i] = dce60_aux_engine_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
1248
if (pool->base.engines[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
1254
pool->base.hw_i2cs[i] = dce60_i2c_hw_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
1255
if (pool->base.hw_i2cs[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
1261
pool->base.sw_i2cs[i] = dce60_i2c_sw_create(ctx);
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
1262
if (pool->base.sw_i2cs[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
1270
dc->caps.max_planes = pool->base.pipe_count;
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
1277
if (!resource_construct(num_virtual_links, dc, &pool->base,
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
1287
dce60_resource_destruct(pool);
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
1295
struct dce110_resource_pool *pool =
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
1298
if (!pool)
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
1301
if (dce61_construct(num_virtual_links, dc, pool))
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
1302
return &pool->base;
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
1304
kfree(pool);
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
1312
struct dce110_resource_pool *pool)
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
1320
pool->base.res_cap = &res_cap_64;
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
1321
pool->base.funcs = &dce60_res_pool_funcs;
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
1327
pool->base.underlay_pipe_index = NO_UNDERLAY_PIPE;
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
1328
pool->base.pipe_count = res_cap_64.num_timing_generator;
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
1329
pool->base.timing_generator_count = res_cap_64.num_timing_generator;
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
1342
pool->base.dp_clock_source =
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
1346
pool->base.clock_sources[0] =
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
1348
pool->base.clock_sources[1] =
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
1350
pool->base.clk_src_count = 2;
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
1353
pool->base.dp_clock_source =
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
1356
pool->base.clock_sources[0] =
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
1358
pool->base.clock_sources[1] =
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
1360
pool->base.clk_src_count = 2;
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
1363
if (pool->base.dp_clock_source == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
1369
for (i = 0; i < pool->base.clk_src_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
1370
if (pool->base.clock_sources[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
1377
pool->base.dmcu = dce_dmcu_create(ctx,
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
1381
if (pool->base.dmcu == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
1387
pool->base.abm = dce_abm_create(ctx,
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
1391
if (pool->base.abm == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
1400
pool->base.irqs = dal_irq_service_dce60_create(&init_data);
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
1401
if (!pool->base.irqs)
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
1405
for (i = 0; i < pool->base.pipe_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
1406
pool->base.timing_generators[i] = dce60_timing_generator_create(
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
1408
if (pool->base.timing_generators[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
1414
pool->base.mis[i] = dce60_mem_input_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
1415
if (pool->base.mis[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
1421
pool->base.ipps[i] = dce60_ipp_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
1422
if (pool->base.ipps[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
1428
pool->base.transforms[i] = dce60_transform_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
1429
if (pool->base.transforms[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
1435
pool->base.opps[i] = dce60_opp_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
1436
if (pool->base.opps[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
1443
for (i = 0; i < pool->base.res_cap->num_ddc; i++) {
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
1444
pool->base.engines[i] = dce60_aux_engine_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
1445
if (pool->base.engines[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
1451
pool->base.hw_i2cs[i] = dce60_i2c_hw_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
1452
if (pool->base.hw_i2cs[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
1458
pool->base.sw_i2cs[i] = dce60_i2c_sw_create(ctx);
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
1459
if (pool->base.sw_i2cs[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
1467
dc->caps.max_planes = pool->base.pipe_count;
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
1474
if (!resource_construct(num_virtual_links, dc, &pool->base,
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
1484
dce60_resource_destruct(pool);
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
1492
struct dce110_resource_pool *pool =
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
1495
if (!pool)
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
1498
if (dce64_construct(num_virtual_links, dc, pool))
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
1499
return &pool->base;
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
1501
kfree(pool);
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
824
static void dce60_resource_destruct(struct dce110_resource_pool *pool)
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
828
for (i = 0; i < pool->base.pipe_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
829
if (pool->base.opps[i] != NULL)
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
830
dce110_opp_destroy(&pool->base.opps[i]);
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
832
if (pool->base.transforms[i] != NULL)
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
833
dce60_transform_destroy(&pool->base.transforms[i]);
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
835
if (pool->base.ipps[i] != NULL)
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
836
dce_ipp_destroy(&pool->base.ipps[i]);
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
838
if (pool->base.mis[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
839
kfree(TO_DCE_MEM_INPUT(pool->base.mis[i]));
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
840
pool->base.mis[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
843
if (pool->base.timing_generators[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
844
kfree(DCE110TG_FROM_TG(pool->base.timing_generators[i]));
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
845
pool->base.timing_generators[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
849
for (i = 0; i < pool->base.res_cap->num_ddc; i++) {
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
850
if (pool->base.engines[i] != NULL)
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
851
dce110_engine_destroy(&pool->base.engines[i]);
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
852
if (pool->base.hw_i2cs[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
853
kfree(pool->base.hw_i2cs[i]);
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
854
pool->base.hw_i2cs[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
856
if (pool->base.sw_i2cs[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
857
kfree(pool->base.sw_i2cs[i]);
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
858
pool->base.sw_i2cs[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
862
for (i = 0; i < pool->base.stream_enc_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
863
if (pool->base.stream_enc[i] != NULL)
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
864
kfree(DCE110STRENC_FROM_STRENC(pool->base.stream_enc[i]));
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
867
for (i = 0; i < pool->base.clk_src_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
868
if (pool->base.clock_sources[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
869
dce60_clock_source_destroy(&pool->base.clock_sources[i]);
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
873
if (pool->base.abm != NULL)
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
874
dce_abm_destroy(&pool->base.abm);
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
876
if (pool->base.dmcu != NULL)
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
877
dce_dmcu_destroy(&pool->base.dmcu);
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
879
if (pool->base.dp_clock_source != NULL)
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
880
dce60_clock_source_destroy(&pool->base.dp_clock_source);
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
882
for (i = 0; i < pool->base.audio_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
883
if (pool->base.audios[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
884
dce_aud_destroy(&pool->base.audios[i]);
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
888
if (pool->base.irqs != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
889
dal_irq_service_destroy(&pool->base.irqs);
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
893
static void dce60_destroy_resource_pool(struct resource_pool **pool)
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
895
struct dce110_resource_pool *dce110_pool = TO_DCE110_RES_POOL(*pool);
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
899
*pool = NULL;
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
916
struct dce110_resource_pool *pool)
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
924
pool->base.res_cap = &res_cap;
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
925
pool->base.funcs = &dce60_res_pool_funcs;
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
931
pool->base.underlay_pipe_index = NO_UNDERLAY_PIPE;
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
932
pool->base.pipe_count = res_cap.num_timing_generator;
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
933
pool->base.timing_generator_count = res_cap.num_timing_generator;
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
947
pool->base.dp_clock_source =
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
951
pool->base.clock_sources[0] =
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
953
pool->base.clock_sources[1] =
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
955
pool->base.clk_src_count = 2;
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
958
pool->base.dp_clock_source =
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
961
pool->base.clock_sources[0] =
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
963
pool->base.clock_sources[1] =
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
965
pool->base.clk_src_count = 2;
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
968
if (pool->base.dp_clock_source == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
974
for (i = 0; i < pool->base.clk_src_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
975
if (pool->base.clock_sources[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
982
pool->base.dmcu = dce_dmcu_create(ctx,
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
986
if (pool->base.dmcu == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
992
pool->base.abm = dce_abm_create(ctx,
drivers/gpu/drm/amd/display/dc/resource/dce60/dce60_resource.c
996
if (pool->base.abm == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
1003
pool->base.abm = dce_abm_create(ctx,
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
1007
if (pool->base.abm == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
1016
pool->base.irqs = dal_irq_service_dce80_create(&init_data);
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
1017
if (!pool->base.irqs)
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
1021
for (i = 0; i < pool->base.pipe_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
1022
pool->base.timing_generators[i] = dce80_timing_generator_create(
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
1024
if (pool->base.timing_generators[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
1030
pool->base.mis[i] = dce80_mem_input_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
1031
if (pool->base.mis[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
1037
pool->base.ipps[i] = dce80_ipp_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
1038
if (pool->base.ipps[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
1044
pool->base.transforms[i] = dce80_transform_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
1045
if (pool->base.transforms[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
1051
pool->base.opps[i] = dce80_opp_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
1052
if (pool->base.opps[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
1059
for (i = 0; i < pool->base.res_cap->num_ddc; i++) {
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
1060
pool->base.engines[i] = dce80_aux_engine_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
1061
if (pool->base.engines[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
1067
pool->base.hw_i2cs[i] = dce80_i2c_hw_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
1068
if (pool->base.hw_i2cs[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
1074
pool->base.sw_i2cs[i] = dce80_i2c_sw_create(ctx);
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
1075
if (pool->base.sw_i2cs[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
1083
dc->caps.max_planes = pool->base.pipe_count;
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
1090
if (!resource_construct(num_virtual_links, dc, &pool->base,
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
1100
dce80_resource_destruct(pool);
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
1108
struct dce110_resource_pool *pool =
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
1111
if (!pool)
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
1114
if (dce80_construct(num_virtual_links, dc, pool))
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
1115
return &pool->base;
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
1117
kfree(pool);
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
1125
struct dce110_resource_pool *pool)
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
1133
pool->base.res_cap = &res_cap_81;
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
1134
pool->base.funcs = &dce80_res_pool_funcs;
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
1140
pool->base.underlay_pipe_index = NO_UNDERLAY_PIPE;
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
1141
pool->base.pipe_count = res_cap_81.num_timing_generator;
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
1142
pool->base.timing_generator_count = res_cap_81.num_timing_generator;
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
1157
pool->base.dp_clock_source =
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
1160
pool->base.clock_sources[0] =
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
1162
pool->base.clock_sources[1] =
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
1164
pool->base.clock_sources[2] =
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
1166
pool->base.clk_src_count = 3;
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
1169
pool->base.dp_clock_source =
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
1172
pool->base.clock_sources[0] =
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
1174
pool->base.clock_sources[1] =
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
1176
pool->base.clk_src_count = 2;
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
1179
if (pool->base.dp_clock_source == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
1185
for (i = 0; i < pool->base.clk_src_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
1186
if (pool->base.clock_sources[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
1193
pool->base.dmcu = dce_dmcu_create(ctx,
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
1197
if (pool->base.dmcu == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
1203
pool->base.abm = dce_abm_create(ctx,
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
1207
if (pool->base.abm == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
1216
pool->base.irqs = dal_irq_service_dce80_create(&init_data);
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
1217
if (!pool->base.irqs)
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
1221
for (i = 0; i < pool->base.pipe_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
1222
pool->base.timing_generators[i] = dce80_timing_generator_create(
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
1224
if (pool->base.timing_generators[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
1230
pool->base.mis[i] = dce80_mem_input_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
1231
if (pool->base.mis[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
1237
pool->base.ipps[i] = dce80_ipp_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
1238
if (pool->base.ipps[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
1244
pool->base.transforms[i] = dce80_transform_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
1245
if (pool->base.transforms[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
1251
pool->base.opps[i] = dce80_opp_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
1252
if (pool->base.opps[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
1259
for (i = 0; i < pool->base.res_cap->num_ddc; i++) {
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
1260
pool->base.engines[i] = dce80_aux_engine_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
1261
if (pool->base.engines[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
1267
pool->base.hw_i2cs[i] = dce80_i2c_hw_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
1268
if (pool->base.hw_i2cs[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
1274
pool->base.sw_i2cs[i] = dce80_i2c_sw_create(ctx);
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
1275
if (pool->base.sw_i2cs[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
1283
dc->caps.max_planes = pool->base.pipe_count;
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
1290
if (!resource_construct(num_virtual_links, dc, &pool->base,
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
1300
dce80_resource_destruct(pool);
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
1308
struct dce110_resource_pool *pool =
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
1311
if (!pool)
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
1314
if (dce81_construct(num_virtual_links, dc, pool))
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
1315
return &pool->base;
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
1317
kfree(pool);
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
1325
struct dce110_resource_pool *pool)
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
1333
pool->base.res_cap = &res_cap_83;
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
1334
pool->base.funcs = &dce80_res_pool_funcs;
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
1340
pool->base.underlay_pipe_index = NO_UNDERLAY_PIPE;
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
1341
pool->base.pipe_count = res_cap_83.num_timing_generator;
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
1342
pool->base.timing_generator_count = res_cap_83.num_timing_generator;
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
1359
pool->base.dp_clock_source =
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
1362
pool->base.clock_sources[0] =
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
1364
pool->base.clock_sources[1] =
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
1366
pool->base.clk_src_count = 2;
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
1369
pool->base.dp_clock_source =
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
1372
pool->base.clock_sources[0] =
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
1374
pool->base.clk_src_count = 1;
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
1377
if (pool->base.dp_clock_source == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
1383
for (i = 0; i < pool->base.clk_src_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
1384
if (pool->base.clock_sources[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
1391
pool->base.dmcu = dce_dmcu_create(ctx,
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
1395
if (pool->base.dmcu == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
1401
pool->base.abm = dce_abm_create(ctx,
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
1405
if (pool->base.abm == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
1414
pool->base.irqs = dal_irq_service_dce80_create(&init_data);
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
1415
if (!pool->base.irqs)
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
1419
for (i = 0; i < pool->base.pipe_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
1420
pool->base.timing_generators[i] = dce80_timing_generator_create(
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
1422
if (pool->base.timing_generators[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
1428
pool->base.mis[i] = dce80_mem_input_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
1429
if (pool->base.mis[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
1435
pool->base.ipps[i] = dce80_ipp_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
1436
if (pool->base.ipps[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
1442
pool->base.transforms[i] = dce80_transform_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
1443
if (pool->base.transforms[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
1449
pool->base.opps[i] = dce80_opp_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
1450
if (pool->base.opps[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
1457
for (i = 0; i < pool->base.res_cap->num_ddc; i++) {
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
1458
pool->base.engines[i] = dce80_aux_engine_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
1459
if (pool->base.engines[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
1465
pool->base.hw_i2cs[i] = dce80_i2c_hw_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
1466
if (pool->base.hw_i2cs[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
1472
pool->base.sw_i2cs[i] = dce80_i2c_sw_create(ctx);
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
1473
if (pool->base.sw_i2cs[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
1481
dc->caps.max_planes = pool->base.pipe_count;
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
1488
if (!resource_construct(num_virtual_links, dc, &pool->base,
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
1498
dce80_resource_destruct(pool);
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
1506
struct dce110_resource_pool *pool =
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
1509
if (!pool)
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
1512
if (dce83_construct(num_virtual_links, dc, pool))
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
1513
return &pool->base;
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
1515
kfree(pool);
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
830
static void dce80_resource_destruct(struct dce110_resource_pool *pool)
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
834
for (i = 0; i < pool->base.pipe_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
835
if (pool->base.opps[i] != NULL)
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
836
dce110_opp_destroy(&pool->base.opps[i]);
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
838
if (pool->base.transforms[i] != NULL)
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
839
dce80_transform_destroy(&pool->base.transforms[i]);
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
841
if (pool->base.ipps[i] != NULL)
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
842
dce_ipp_destroy(&pool->base.ipps[i]);
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
844
if (pool->base.mis[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
845
kfree(TO_DCE_MEM_INPUT(pool->base.mis[i]));
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
846
pool->base.mis[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
849
if (pool->base.timing_generators[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
850
kfree(DCE110TG_FROM_TG(pool->base.timing_generators[i]));
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
851
pool->base.timing_generators[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
855
for (i = 0; i < pool->base.res_cap->num_ddc; i++) {
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
856
if (pool->base.engines[i] != NULL)
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
857
dce110_engine_destroy(&pool->base.engines[i]);
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
858
if (pool->base.hw_i2cs[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
859
kfree(pool->base.hw_i2cs[i]);
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
860
pool->base.hw_i2cs[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
862
if (pool->base.sw_i2cs[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
863
kfree(pool->base.sw_i2cs[i]);
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
864
pool->base.sw_i2cs[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
868
for (i = 0; i < pool->base.stream_enc_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
869
if (pool->base.stream_enc[i] != NULL)
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
870
kfree(DCE110STRENC_FROM_STRENC(pool->base.stream_enc[i]));
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
873
for (i = 0; i < pool->base.clk_src_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
874
if (pool->base.clock_sources[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
875
dce80_clock_source_destroy(&pool->base.clock_sources[i]);
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
879
if (pool->base.abm != NULL)
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
880
dce_abm_destroy(&pool->base.abm);
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
882
if (pool->base.dmcu != NULL)
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
883
dce_dmcu_destroy(&pool->base.dmcu);
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
885
if (pool->base.dp_clock_source != NULL)
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
886
dce80_clock_source_destroy(&pool->base.dp_clock_source);
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
888
for (i = 0; i < pool->base.audio_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
889
if (pool->base.audios[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
890
dce_aud_destroy(&pool->base.audios[i]);
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
894
if (pool->base.irqs != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
895
dal_irq_service_destroy(&pool->base.irqs);
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
899
static void dce80_destroy_resource_pool(struct resource_pool **pool)
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
901
struct dce110_resource_pool *dce110_pool = TO_DCE110_RES_POOL(*pool);
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
905
*pool = NULL;
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
922
struct dce110_resource_pool *pool)
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
930
pool->base.res_cap = &res_cap;
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
931
pool->base.funcs = &dce80_res_pool_funcs;
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
937
pool->base.underlay_pipe_index = NO_UNDERLAY_PIPE;
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
938
pool->base.pipe_count = res_cap.num_timing_generator;
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
939
pool->base.timing_generator_count = res_cap.num_timing_generator;
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
957
pool->base.dp_clock_source =
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
960
pool->base.clock_sources[0] =
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
962
pool->base.clock_sources[1] =
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
964
pool->base.clock_sources[2] =
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
966
pool->base.clk_src_count = 3;
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
969
pool->base.dp_clock_source =
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
972
pool->base.clock_sources[0] =
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
974
pool->base.clock_sources[1] =
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
976
pool->base.clk_src_count = 2;
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
979
if (pool->base.dp_clock_source == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
985
for (i = 0; i < pool->base.clk_src_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
986
if (pool->base.clock_sources[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
993
pool->base.dmcu = dce_dmcu_create(ctx,
drivers/gpu/drm/amd/display/dc/resource/dce80/dce80_resource.c
997
if (pool->base.dmcu == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn10/dcn10_resource.c
1000
dcn10_clock_source_destroy(&pool->base.dp_clock_source);
drivers/gpu/drm/amd/display/dc/resource/dcn10/dcn10_resource.c
1001
pool->base.dp_clock_source = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn10/dcn10_resource.c
1004
if (pool->base.abm != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn10/dcn10_resource.c
1005
dce_abm_destroy(&pool->base.abm);
drivers/gpu/drm/amd/display/dc/resource/dcn10/dcn10_resource.c
1007
if (pool->base.dmcu != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn10/dcn10_resource.c
1008
dce_dmcu_destroy(&pool->base.dmcu);
drivers/gpu/drm/amd/display/dc/resource/dcn10/dcn10_resource.c
1010
kfree(pool->base.pp_smu);
drivers/gpu/drm/amd/display/dc/resource/dcn10/dcn10_resource.c
1116
const struct resource_pool *pool,
drivers/gpu/drm/amd/display/dc/resource/dcn10/dcn10_resource.c
1121
struct pipe_ctx *idle_pipe = resource_find_free_secondary_pipe_legacy(res_ctx, pool, head_pipe);
drivers/gpu/drm/amd/display/dc/resource/dcn10/dcn10_resource.c
1136
idle_pipe->plane_res.hubp = pool->hubps[idle_pipe->pipe_idx];
drivers/gpu/drm/amd/display/dc/resource/dcn10/dcn10_resource.c
1137
idle_pipe->plane_res.ipp = pool->ipps[idle_pipe->pipe_idx];
drivers/gpu/drm/amd/display/dc/resource/dcn10/dcn10_resource.c
1138
idle_pipe->plane_res.dpp = pool->dpps[idle_pipe->pipe_idx];
drivers/gpu/drm/amd/display/dc/resource/dcn10/dcn10_resource.c
1139
idle_pipe->plane_res.mpcc_inst = pool->dpps[idle_pipe->pipe_idx]->inst;
drivers/gpu/drm/amd/display/dc/resource/dcn10/dcn10_resource.c
1154
static void dcn10_destroy_resource_pool(struct resource_pool **pool)
drivers/gpu/drm/amd/display/dc/resource/dcn10/dcn10_resource.c
1156
struct dcn10_resource_pool *dcn10_pool = TO_DCN10_RES_POOL(*pool);
drivers/gpu/drm/amd/display/dc/resource/dcn10/dcn10_resource.c
1160
*pool = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn10/dcn10_resource.c
1263
const struct resource_pool *pool,
drivers/gpu/drm/amd/display/dc/resource/dcn10/dcn10_resource.c
1270
for (i = 0; i < pool->stream_enc_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn10/dcn10_resource.c
1272
pool->stream_enc[i]) {
drivers/gpu/drm/amd/display/dc/resource/dcn10/dcn10_resource.c
1277
if (pool->stream_enc[i]->id != ENGINE_ID_VIRTUAL)
drivers/gpu/drm/amd/display/dc/resource/dcn10/dcn10_resource.c
1280
if (link->ep_type == DISPLAY_ENDPOINT_PHY && pool->stream_enc[i]->id ==
drivers/gpu/drm/amd/display/dc/resource/dcn10/dcn10_resource.c
1282
return pool->stream_enc[i];
drivers/gpu/drm/amd/display/dc/resource/dcn10/dcn10_resource.c
1284
if (link->ep_type == DISPLAY_ENDPOINT_USB4_DPIA && pool->stream_enc[i]->id ==
drivers/gpu/drm/amd/display/dc/resource/dcn10/dcn10_resource.c
1286
return pool->stream_enc[i];
drivers/gpu/drm/amd/display/dc/resource/dcn10/dcn10_resource.c
1295
return pool->stream_enc[j];
drivers/gpu/drm/amd/display/dc/resource/dcn10/dcn10_resource.c
1349
struct dcn10_resource_pool *pool)
drivers/gpu/drm/amd/display/dc/resource/dcn10/dcn10_resource.c
1362
pool->base.res_cap = &rv2_res_cap;
drivers/gpu/drm/amd/display/dc/resource/dcn10/dcn10_resource.c
1364
pool->base.res_cap = &res_cap;
drivers/gpu/drm/amd/display/dc/resource/dcn10/dcn10_resource.c
1365
pool->base.funcs = &dcn10_res_pool_funcs;
drivers/gpu/drm/amd/display/dc/resource/dcn10/dcn10_resource.c
1375
pool->base.underlay_pipe_index = NO_UNDERLAY_PIPE;
drivers/gpu/drm/amd/display/dc/resource/dcn10/dcn10_resource.c
1378
pool->base.pipe_count = pool->base.res_cap->num_timing_generator;
drivers/gpu/drm/amd/display/dc/resource/dcn10/dcn10_resource.c
1381
pool->base.pipe_count = 3;
drivers/gpu/drm/amd/display/dc/resource/dcn10/dcn10_resource.c
1442
pool->base.clock_sources[DCN10_CLK_SRC_PLL0] =
drivers/gpu/drm/amd/display/dc/resource/dcn10/dcn10_resource.c
1446
pool->base.clock_sources[DCN10_CLK_SRC_PLL1] =
drivers/gpu/drm/amd/display/dc/resource/dcn10/dcn10_resource.c
1450
pool->base.clock_sources[DCN10_CLK_SRC_PLL2] =
drivers/gpu/drm/amd/display/dc/resource/dcn10/dcn10_resource.c
1456
pool->base.clock_sources[DCN10_CLK_SRC_PLL3] =
drivers/gpu/drm/amd/display/dc/resource/dcn10/dcn10_resource.c
1462
pool->base.clk_src_count = DCN10_CLK_SRC_TOTAL;
drivers/gpu/drm/amd/display/dc/resource/dcn10/dcn10_resource.c
1465
pool->base.clk_src_count = DCN101_CLK_SRC_TOTAL;
drivers/gpu/drm/amd/display/dc/resource/dcn10/dcn10_resource.c
1467
pool->base.dp_clock_source =
drivers/gpu/drm/amd/display/dc/resource/dcn10/dcn10_resource.c
1473
for (i = 0; i < pool->base.clk_src_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn10/dcn10_resource.c
1474
if (pool->base.clock_sources[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn10/dcn10_resource.c
1481
pool->base.dmcu = dcn10_dmcu_create(ctx,
drivers/gpu/drm/amd/display/dc/resource/dcn10/dcn10_resource.c
1485
if (pool->base.dmcu == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn10/dcn10_resource.c
1491
pool->base.abm = dce_abm_create(ctx,
drivers/gpu/drm/amd/display/dc/resource/dcn10/dcn10_resource.c
1495
if (pool->base.abm == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn10/dcn10_resource.c
1527
pool->base.pp_smu = dcn10_pp_smu_create(ctx);
drivers/gpu/drm/amd/display/dc/resource/dcn10/dcn10_resource.c
1533
if (pool->base.pp_smu != NULL
drivers/gpu/drm/amd/display/dc/resource/dcn10/dcn10_resource.c
1534
&& pool->base.pp_smu->rv_funcs.set_pme_wa_enable != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn10/dcn10_resource.c
1576
dc->res_pool = &pool->base;
drivers/gpu/drm/amd/display/dc/resource/dcn10/dcn10_resource.c
1588
pool->base.irqs = dal_irq_service_dcn10_create(&init_data);
drivers/gpu/drm/amd/display/dc/resource/dcn10/dcn10_resource.c
1589
if (!pool->base.irqs)
drivers/gpu/drm/amd/display/dc/resource/dcn10/dcn10_resource.c
1596
for (i = 0; i < pool->base.pipe_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn10/dcn10_resource.c
1603
pool->base.hubps[j] = dcn10_hubp_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dcn10/dcn10_resource.c
1604
if (pool->base.hubps[j] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn10/dcn10_resource.c
1611
pool->base.ipps[j] = dcn10_ipp_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dcn10/dcn10_resource.c
1612
if (pool->base.ipps[j] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn10/dcn10_resource.c
1619
pool->base.dpps[j] = dcn10_dpp_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dcn10/dcn10_resource.c
1620
if (pool->base.dpps[j] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn10/dcn10_resource.c
1627
pool->base.opps[j] = dcn10_opp_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dcn10/dcn10_resource.c
1628
if (pool->base.opps[j] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn10/dcn10_resource.c
1635
pool->base.timing_generators[j] = dcn10_timing_generator_create(
drivers/gpu/drm/amd/display/dc/resource/dcn10/dcn10_resource.c
1637
if (pool->base.timing_generators[j] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn10/dcn10_resource.c
1646
for (i = 0; i < pool->base.res_cap->num_ddc; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn10/dcn10_resource.c
1647
pool->base.engines[i] = dcn10_aux_engine_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dcn10/dcn10_resource.c
1648
if (pool->base.engines[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn10/dcn10_resource.c
1654
pool->base.hw_i2cs[i] = dcn10_i2c_hw_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dcn10/dcn10_resource.c
1655
if (pool->base.hw_i2cs[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn10/dcn10_resource.c
1661
pool->base.sw_i2cs[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn10/dcn10_resource.c
1665
pool->base.pipe_count = j;
drivers/gpu/drm/amd/display/dc/resource/dcn10/dcn10_resource.c
1666
pool->base.timing_generator_count = j;
drivers/gpu/drm/amd/display/dc/resource/dcn10/dcn10_resource.c
1667
pool->base.mpcc_count = j;
drivers/gpu/drm/amd/display/dc/resource/dcn10/dcn10_resource.c
1672
dc->dml.ip.max_num_dpp = pool->base.pipe_count;
drivers/gpu/drm/amd/display/dc/resource/dcn10/dcn10_resource.c
1673
dc->dcn_ip->max_num_dpp = pool->base.pipe_count;
drivers/gpu/drm/amd/display/dc/resource/dcn10/dcn10_resource.c
1675
pool->base.mpc = dcn10_mpc_create(ctx);
drivers/gpu/drm/amd/display/dc/resource/dcn10/dcn10_resource.c
1676
if (pool->base.mpc == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn10/dcn10_resource.c
1682
pool->base.hubbub = dcn10_hubbub_create(ctx);
drivers/gpu/drm/amd/display/dc/resource/dcn10/dcn10_resource.c
1683
if (pool->base.hubbub == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn10/dcn10_resource.c
1690
pool->base.dio = dcn10_dio_create(ctx);
drivers/gpu/drm/amd/display/dc/resource/dcn10/dcn10_resource.c
1691
if (pool->base.dio == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn10/dcn10_resource.c
1697
if (!resource_construct(num_virtual_links, dc, &pool->base,
drivers/gpu/drm/amd/display/dc/resource/dcn10/dcn10_resource.c
1702
dc->caps.max_planes = pool->base.pipe_count;
drivers/gpu/drm/amd/display/dc/resource/dcn10/dcn10_resource.c
1713
dcn10_resource_destruct(pool);
drivers/gpu/drm/amd/display/dc/resource/dcn10/dcn10_resource.c
1722
struct dcn10_resource_pool *pool =
drivers/gpu/drm/amd/display/dc/resource/dcn10/dcn10_resource.c
1725
if (!pool)
drivers/gpu/drm/amd/display/dc/resource/dcn10/dcn10_resource.c
1728
if (dcn10_resource_construct(init_data->num_virtual_links, dc, pool))
drivers/gpu/drm/amd/display/dc/resource/dcn10/dcn10_resource.c
1729
return &pool->base;
drivers/gpu/drm/amd/display/dc/resource/dcn10/dcn10_resource.c
1731
kfree(pool);
drivers/gpu/drm/amd/display/dc/resource/dcn10/dcn10_resource.c
929
static void dcn10_resource_destruct(struct dcn10_resource_pool *pool)
drivers/gpu/drm/amd/display/dc/resource/dcn10/dcn10_resource.c
933
for (i = 0; i < pool->base.stream_enc_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn10/dcn10_resource.c
934
if (pool->base.stream_enc[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn10/dcn10_resource.c
935
kfree(DCN10STRENC_FROM_STRENC(pool->base.stream_enc[i]));
drivers/gpu/drm/amd/display/dc/resource/dcn10/dcn10_resource.c
936
pool->base.stream_enc[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn10/dcn10_resource.c
940
if (pool->base.mpc != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn10/dcn10_resource.c
941
kfree(TO_DCN10_MPC(pool->base.mpc));
drivers/gpu/drm/amd/display/dc/resource/dcn10/dcn10_resource.c
942
pool->base.mpc = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn10/dcn10_resource.c
945
kfree(pool->base.hubbub);
drivers/gpu/drm/amd/display/dc/resource/dcn10/dcn10_resource.c
946
pool->base.hubbub = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn10/dcn10_resource.c
948
if (pool->base.dio != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn10/dcn10_resource.c
949
kfree(TO_DCN10_DIO(pool->base.dio));
drivers/gpu/drm/amd/display/dc/resource/dcn10/dcn10_resource.c
950
pool->base.dio = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn10/dcn10_resource.c
953
for (i = 0; i < pool->base.pipe_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn10/dcn10_resource.c
954
if (pool->base.opps[i] != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn10/dcn10_resource.c
955
pool->base.opps[i]->funcs->opp_destroy(&pool->base.opps[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn10/dcn10_resource.c
957
if (pool->base.dpps[i] != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn10/dcn10_resource.c
958
dcn10_dpp_destroy(&pool->base.dpps[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn10/dcn10_resource.c
960
if (pool->base.ipps[i] != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn10/dcn10_resource.c
961
pool->base.ipps[i]->funcs->ipp_destroy(&pool->base.ipps[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn10/dcn10_resource.c
963
if (pool->base.hubps[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn10/dcn10_resource.c
964
kfree(TO_DCN10_HUBP(pool->base.hubps[i]));
drivers/gpu/drm/amd/display/dc/resource/dcn10/dcn10_resource.c
965
pool->base.hubps[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn10/dcn10_resource.c
968
if (pool->base.irqs != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn10/dcn10_resource.c
969
dal_irq_service_destroy(&pool->base.irqs);
drivers/gpu/drm/amd/display/dc/resource/dcn10/dcn10_resource.c
972
if (pool->base.timing_generators[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn10/dcn10_resource.c
973
kfree(DCN10TG_FROM_TG(pool->base.timing_generators[i]));
drivers/gpu/drm/amd/display/dc/resource/dcn10/dcn10_resource.c
974
pool->base.timing_generators[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn10/dcn10_resource.c
978
for (i = 0; i < pool->base.res_cap->num_ddc; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn10/dcn10_resource.c
979
if (pool->base.engines[i] != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn10/dcn10_resource.c
980
dce110_engine_destroy(&pool->base.engines[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn10/dcn10_resource.c
981
kfree(pool->base.hw_i2cs[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn10/dcn10_resource.c
982
pool->base.hw_i2cs[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn10/dcn10_resource.c
983
kfree(pool->base.sw_i2cs[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn10/dcn10_resource.c
984
pool->base.sw_i2cs[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn10/dcn10_resource.c
987
for (i = 0; i < pool->base.audio_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn10/dcn10_resource.c
988
if (pool->base.audios[i])
drivers/gpu/drm/amd/display/dc/resource/dcn10/dcn10_resource.c
989
dce_aud_destroy(&pool->base.audios[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn10/dcn10_resource.c
992
for (i = 0; i < pool->base.clk_src_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn10/dcn10_resource.c
993
if (pool->base.clock_sources[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn10/dcn10_resource.c
994
dcn10_clock_source_destroy(&pool->base.clock_sources[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn10/dcn10_resource.c
995
pool->base.clock_sources[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn10/dcn10_resource.c
999
if (pool->base.dp_clock_source != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn10/dcn10_resource.h
32
#define TO_DCN10_RES_POOL(pool)\
drivers/gpu/drm/amd/display/dc/resource/dcn10/dcn10_resource.h
33
container_of(pool, struct dcn10_resource_pool, base)
drivers/gpu/drm/amd/display/dc/resource/dcn10/dcn10_resource.h
51
const struct resource_pool *pool,
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
1111
static void dcn20_resource_destruct(struct dcn20_resource_pool *pool)
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
1115
for (i = 0; i < pool->base.stream_enc_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
1116
if (pool->base.stream_enc[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
1117
kfree(DCN10STRENC_FROM_STRENC(pool->base.stream_enc[i]));
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
1118
pool->base.stream_enc[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
1122
for (i = 0; i < pool->base.res_cap->num_dsc; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
1123
if (pool->base.dscs[i] != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
1124
dcn20_dsc_destroy(&pool->base.dscs[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
1127
if (pool->base.mpc != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
1128
kfree(TO_DCN20_MPC(pool->base.mpc));
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
1129
pool->base.mpc = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
1131
if (pool->base.hubbub != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
1132
kfree(pool->base.hubbub);
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
1133
pool->base.hubbub = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
1136
if (pool->base.dio != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
1137
kfree(TO_DCN10_DIO(pool->base.dio));
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
1138
pool->base.dio = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
1141
for (i = 0; i < pool->base.pipe_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
1142
if (pool->base.dpps[i] != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
1143
dcn20_dpp_destroy(&pool->base.dpps[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
1145
if (pool->base.ipps[i] != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
1146
pool->base.ipps[i]->funcs->ipp_destroy(&pool->base.ipps[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
1148
if (pool->base.hubps[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
1149
kfree(TO_DCN20_HUBP(pool->base.hubps[i]));
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
1150
pool->base.hubps[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
1153
if (pool->base.irqs != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
1154
dal_irq_service_destroy(&pool->base.irqs);
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
1158
for (i = 0; i < pool->base.res_cap->num_ddc; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
1159
if (pool->base.engines[i] != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
1160
dce110_engine_destroy(&pool->base.engines[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
1161
if (pool->base.hw_i2cs[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
1162
kfree(pool->base.hw_i2cs[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
1163
pool->base.hw_i2cs[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
1165
if (pool->base.sw_i2cs[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
1166
kfree(pool->base.sw_i2cs[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
1167
pool->base.sw_i2cs[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
1171
for (i = 0; i < pool->base.res_cap->num_opp; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
1172
if (pool->base.opps[i] != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
1173
pool->base.opps[i]->funcs->opp_destroy(&pool->base.opps[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
1176
for (i = 0; i < pool->base.res_cap->num_timing_generator; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
1177
if (pool->base.timing_generators[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
1178
kfree(DCN10TG_FROM_TG(pool->base.timing_generators[i]));
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
1179
pool->base.timing_generators[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
1183
for (i = 0; i < pool->base.res_cap->num_dwb; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
1184
if (pool->base.dwbc[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
1185
kfree(TO_DCN20_DWBC(pool->base.dwbc[i]));
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
1186
pool->base.dwbc[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
1188
if (pool->base.mcif_wb[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
1189
kfree(TO_DCN20_MMHUBBUB(pool->base.mcif_wb[i]));
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
1190
pool->base.mcif_wb[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
1194
for (i = 0; i < pool->base.audio_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
1195
if (pool->base.audios[i])
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
1196
dce_aud_destroy(&pool->base.audios[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
1199
for (i = 0; i < pool->base.clk_src_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
1200
if (pool->base.clock_sources[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
1201
dcn20_clock_source_destroy(&pool->base.clock_sources[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
1202
pool->base.clock_sources[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
1206
if (pool->base.dp_clock_source != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
1207
dcn20_clock_source_destroy(&pool->base.dp_clock_source);
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
1208
pool->base.dp_clock_source = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
1212
if (pool->base.abm != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
1213
dce_abm_destroy(&pool->base.abm);
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
1215
if (pool->base.dmcu != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
1216
dce_dmcu_destroy(&pool->base.dmcu);
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
1218
if (pool->base.dccg != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
1219
dcn_dccg_destroy(&pool->base.dccg);
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
1221
if (pool->base.pp_smu != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
1222
dcn20_pp_smu_destroy(&pool->base.pp_smu);
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
1224
if (pool->base.oem_device != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
1225
struct dc *dc = pool->base.oem_device->ctx->dc;
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
1227
dc->link_srv->destroy_ddc_service(&pool->base.oem_device);
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
1328
struct resource_pool *pool = pipe_ctx->stream->ctx->dc->res_pool;
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
1330
if (pool->funcs->build_pipe_pix_clk_params) {
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
1331
pool->funcs->build_pipe_pix_clk_params(pipe_ctx);
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
1366
const struct resource_pool *pool = dc->res_pool;
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
1373
if (pool->res_cap->num_dsc == pool->res_cap->num_opp) {
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
1374
*dsc = pool->dscs[pipe_idx];
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
1387
for (i = 0; i < pool->res_cap->num_dsc; i++)
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
1389
*dsc = pool->dscs[i];
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
1396
const struct resource_pool *pool,
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
1401
for (i = 0; i < pool->res_cap->num_dsc; i++)
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
1402
if (pool->dscs[i] == *dsc) {
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
1520
const struct resource_pool *pool = dc->res_pool;
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
1525
next_odm_pipe->plane_res.mi = pool->mis[next_odm_pipe->pipe_idx];
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
1526
next_odm_pipe->plane_res.hubp = pool->hubps[next_odm_pipe->pipe_idx];
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
1527
next_odm_pipe->plane_res.ipp = pool->ipps[next_odm_pipe->pipe_idx];
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
1528
next_odm_pipe->plane_res.xfm = pool->transforms[next_odm_pipe->pipe_idx];
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
1529
next_odm_pipe->plane_res.dpp = pool->dpps[next_odm_pipe->pipe_idx];
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
1530
next_odm_pipe->plane_res.mpcc_inst = pool->dpps[next_odm_pipe->pipe_idx]->inst;
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
1555
next_odm_pipe->stream_res.opp = pool->opps[next_odm_pipe->pipe_idx];
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
1570
const struct resource_pool *pool,
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
1581
secondary_pipe->plane_res.mi = pool->mis[secondary_pipe->pipe_idx];
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
1582
secondary_pipe->plane_res.hubp = pool->hubps[secondary_pipe->pipe_idx];
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
1583
secondary_pipe->plane_res.ipp = pool->ipps[secondary_pipe->pipe_idx];
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
1584
secondary_pipe->plane_res.xfm = pool->transforms[secondary_pipe->pipe_idx];
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
1585
secondary_pipe->plane_res.dpp = pool->dpps[secondary_pipe->pipe_idx];
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
1586
secondary_pipe->plane_res.mpcc_inst = pool->dpps[secondary_pipe->pipe_idx]->inst;
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
1716
const struct resource_pool *pool,
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
2183
const struct resource_pool *pool,
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
2188
struct pipe_ctx *sec_dpp_pipe = resource_find_free_secondary_pipe_legacy(res_ctx, pool, otg_master);
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
2199
sec_dpp_pipe->plane_res.hubp = pool->hubps[sec_dpp_pipe->pipe_idx];
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
2200
sec_dpp_pipe->plane_res.ipp = pool->ipps[sec_dpp_pipe->pipe_idx];
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
2201
sec_dpp_pipe->plane_res.dpp = pool->dpps[sec_dpp_pipe->pipe_idx];
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
2202
sec_dpp_pipe->plane_res.mpcc_inst = pool->dpps[sec_dpp_pipe->pipe_idx]->inst;
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
2218
static void dcn20_destroy_resource_pool(struct resource_pool **pool)
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
2220
struct dcn20_resource_pool *dcn20_pool = TO_DCN20_RES_POOL(*pool);
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
2224
*pool = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
2247
const struct resource_pool *pool)
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
2250
dcn20_release_dsc(&context->res_ctx, pool, &pipe->stream_res.dsc);
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
2272
bool dcn20_dwbc_create(struct dc_context *ctx, struct resource_pool *pool)
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
2275
uint32_t pipe_count = pool->res_cap->num_dwb;
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
2289
pool->dwbc[i] = &dwbc20->base;
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
2294
bool dcn20_mmhubbub_create(struct dc_context *ctx, struct resource_pool *pool)
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
2297
uint32_t pipe_count = pool->res_cap->num_dwb;
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
2315
pool->mcif_wb[i] = &mcif_wb20->base;
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
2371
struct dcn20_resource_pool *pool)
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
2380
if (pool->base.pp_smu) {
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
2388
if (pool->base.pp_smu->nv_funcs.get_uclk_dpm_states) {
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
2389
status = (pool->base.pp_smu->nv_funcs.get_uclk_dpm_states)
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
2390
(&pool->base.pp_smu->nv_funcs.pp_smu, uclk_states, &num_states);
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
2395
if (pool->base.pp_smu->nv_funcs.get_maximum_sustainable_clocks) {
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
2396
status = (*pool->base.pp_smu->nv_funcs.get_maximum_sustainable_clocks)
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
2397
(&pool->base.pp_smu->nv_funcs.pp_smu, &max_clocks);
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
2416
loaded_ip->max_num_otg = pool->base.res_cap->num_timing_generator;
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
2417
loaded_ip->max_num_dpp = pool->base.pipe_count;
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
2427
struct dcn20_resource_pool *pool)
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
2441
pool->base.funcs = &dcn20_res_pool_funcs;
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
2444
pool->base.res_cap = &res_cap_nv14;
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
2445
pool->base.pipe_count = 5;
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
2446
pool->base.mpcc_count = 5;
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
2448
pool->base.res_cap = &res_cap_nv10;
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
2449
pool->base.pipe_count = 6;
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
2450
pool->base.mpcc_count = 6;
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
2455
pool->base.underlay_pipe_index = NO_UNDERLAY_PIPE;
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
2524
pool->base.clock_sources[DCN20_CLK_SRC_PLL0] =
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
2528
pool->base.clock_sources[DCN20_CLK_SRC_PLL1] =
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
2532
pool->base.clock_sources[DCN20_CLK_SRC_PLL2] =
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
2536
pool->base.clock_sources[DCN20_CLK_SRC_PLL3] =
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
2540
pool->base.clock_sources[DCN20_CLK_SRC_PLL4] =
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
2544
pool->base.clock_sources[DCN20_CLK_SRC_PLL5] =
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
2548
pool->base.clk_src_count = DCN20_CLK_SRC_TOTAL;
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
2550
pool->base.dp_clock_source =
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
2555
for (i = 0; i < pool->base.clk_src_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
2556
if (pool->base.clock_sources[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
2563
pool->base.dccg = dccg2_create(ctx, &dccg_regs, &dccg_shift, &dccg_mask);
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
2564
if (pool->base.dccg == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
2570
pool->base.dmcu = dcn20_dmcu_create(ctx,
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
2574
if (pool->base.dmcu == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
2580
pool->base.abm = dce_abm_create(ctx,
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
2584
if (pool->base.abm == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
2590
pool->base.pp_smu = dcn20_pp_smu_create(ctx);
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
2593
if (!init_soc_bounding_box(dc, pool)) {
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
2640
if (pool->base.pp_smu && pool->base.pp_smu->nv_funcs.set_wm_ranges)
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
2641
pool->base.pp_smu->nv_funcs.set_wm_ranges(&pool->base.pp_smu->nv_funcs.pp_smu, &ranges);
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
2645
pool->base.irqs = dal_irq_service_dcn20_create(&init_data);
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
2646
if (!pool->base.irqs)
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
2650
for (i = 0; i < pool->base.pipe_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
2651
pool->base.hubps[i] = dcn20_hubp_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
2652
if (pool->base.hubps[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
2659
pool->base.ipps[i] = dcn20_ipp_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
2660
if (pool->base.ipps[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
2667
pool->base.dpps[i] = dcn20_dpp_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
2668
if (pool->base.dpps[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
2675
for (i = 0; i < pool->base.res_cap->num_ddc; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
2676
pool->base.engines[i] = dcn20_aux_engine_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
2677
if (pool->base.engines[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
2683
pool->base.hw_i2cs[i] = dcn20_i2c_hw_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
2684
if (pool->base.hw_i2cs[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
2690
pool->base.sw_i2cs[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
2693
for (i = 0; i < pool->base.res_cap->num_opp; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
2694
pool->base.opps[i] = dcn20_opp_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
2695
if (pool->base.opps[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
2703
for (i = 0; i < pool->base.res_cap->num_timing_generator; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
2704
pool->base.timing_generators[i] = dcn20_timing_generator_create(
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
2706
if (pool->base.timing_generators[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
2713
pool->base.timing_generator_count = i;
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
2715
pool->base.mpc = dcn20_mpc_create(ctx);
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
2716
if (pool->base.mpc == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
2722
pool->base.hubbub = dcn20_hubbub_create(ctx);
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
2723
if (pool->base.hubbub == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
2730
pool->base.dio = dcn20_dio_create(ctx);
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
2731
if (pool->base.dio == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
2737
for (i = 0; i < pool->base.res_cap->num_dsc; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
2738
pool->base.dscs[i] = dcn20_dsc_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
2739
if (pool->base.dscs[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
2746
if (!dcn20_dwbc_create(ctx, &pool->base)) {
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
2751
if (!dcn20_mmhubbub_create(ctx, &pool->base)) {
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
2757
if (!resource_construct(num_virtual_links, dc, &pool->base,
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
2776
dc->caps.max_planes = pool->base.pipe_count;
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
2791
pool->base.oem_device = dc->link_srv->create_ddc_service(&ddc_init_data);
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
2793
pool->base.oem_device = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
2800
dcn20_resource_destruct(pool);
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
2809
struct dcn20_resource_pool *pool =
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
2812
if (!pool)
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
2815
if (dcn20_resource_construct(init_data->num_virtual_links, dc, pool))
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
2816
return &pool->base;
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.c
2819
kfree(pool);
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.h
114
bool dcn20_dwbc_create(struct dc_context *ctx, struct resource_pool *pool);
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.h
115
bool dcn20_mmhubbub_create(struct dc_context *ctx, struct resource_pool *pool);
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.h
133
const struct resource_pool *pool,
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.h
138
const struct resource_pool *pool,
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.h
152
const struct resource_pool *pool,
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.h
32
#define TO_DCN20_RES_POOL(pool)\
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.h
33
container_of(pool, struct dcn20_resource_pool, base)
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.h
64
const struct resource_pool *pool,
drivers/gpu/drm/amd/display/dc/resource/dcn20/dcn20_resource.h
68
const struct resource_pool *pool);
drivers/gpu/drm/amd/display/dc/resource/dcn201/dcn201_resource.c
1000
if (pool->base.clock_sources[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn201/dcn201_resource.c
1001
dcn201_clock_source_destroy(&pool->base.clock_sources[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn201/dcn201_resource.c
1002
pool->base.clock_sources[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn201/dcn201_resource.c
1006
if (pool->base.dp_clock_source != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn201/dcn201_resource.c
1007
dcn201_clock_source_destroy(&pool->base.dp_clock_source);
drivers/gpu/drm/amd/display/dc/resource/dcn201/dcn201_resource.c
1008
pool->base.dp_clock_source = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn201/dcn201_resource.c
1011
if (pool->base.dccg != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn201/dcn201_resource.c
1012
dcn_dccg_destroy(&pool->base.dccg);
drivers/gpu/drm/amd/display/dc/resource/dcn201/dcn201_resource.c
1036
const struct resource_pool *pool,
drivers/gpu/drm/amd/display/dc/resource/dcn201/dcn201_resource.c
1041
struct pipe_ctx *idle_pipe = resource_find_free_secondary_pipe_legacy(res_ctx, pool, head_pipe);
drivers/gpu/drm/amd/display/dc/resource/dcn201/dcn201_resource.c
1055
idle_pipe->plane_res.hubp = pool->hubps[idle_pipe->pipe_idx];
drivers/gpu/drm/amd/display/dc/resource/dcn201/dcn201_resource.c
1056
idle_pipe->plane_res.ipp = pool->ipps[idle_pipe->pipe_idx];
drivers/gpu/drm/amd/display/dc/resource/dcn201/dcn201_resource.c
1057
idle_pipe->plane_res.dpp = pool->dpps[idle_pipe->pipe_idx];
drivers/gpu/drm/amd/display/dc/resource/dcn201/dcn201_resource.c
1058
idle_pipe->plane_res.mpcc_inst = pool->dpps[idle_pipe->pipe_idx]->inst;
drivers/gpu/drm/amd/display/dc/resource/dcn201/dcn201_resource.c
1082
static void dcn201_destroy_resource_pool(struct resource_pool **pool)
drivers/gpu/drm/amd/display/dc/resource/dcn201/dcn201_resource.c
1084
struct dcn201_resource_pool *dcn201_pool = TO_DCN201_RES_POOL(*pool);
drivers/gpu/drm/amd/display/dc/resource/dcn201/dcn201_resource.c
1088
*pool = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn201/dcn201_resource.c
1123
struct dcn201_resource_pool *pool)
drivers/gpu/drm/amd/display/dc/resource/dcn201/dcn201_resource.c
1130
pool->base.res_cap = &res_cap_dnc201;
drivers/gpu/drm/amd/display/dc/resource/dcn201/dcn201_resource.c
1131
pool->base.funcs = &dcn201_res_pool_funcs;
drivers/gpu/drm/amd/display/dc/resource/dcn201/dcn201_resource.c
1136
pool->base.underlay_pipe_index = NO_UNDERLAY_PIPE;
drivers/gpu/drm/amd/display/dc/resource/dcn201/dcn201_resource.c
1138
pool->base.pipe_count = 4;
drivers/gpu/drm/amd/display/dc/resource/dcn201/dcn201_resource.c
1139
pool->base.mpcc_count = 5;
drivers/gpu/drm/amd/display/dc/resource/dcn201/dcn201_resource.c
1199
pool->base.clock_sources[DCN20_CLK_SRC_PLL0] =
drivers/gpu/drm/amd/display/dc/resource/dcn201/dcn201_resource.c
1203
pool->base.clock_sources[DCN20_CLK_SRC_PLL1] =
drivers/gpu/drm/amd/display/dc/resource/dcn201/dcn201_resource.c
1208
pool->base.clk_src_count = DCN20_CLK_SRC_TOTAL_DCN201;
drivers/gpu/drm/amd/display/dc/resource/dcn201/dcn201_resource.c
1211
pool->base.dp_clock_source =
drivers/gpu/drm/amd/display/dc/resource/dcn201/dcn201_resource.c
1216
for (i = 0; i < pool->base.clk_src_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn201/dcn201_resource.c
1217
if (pool->base.clock_sources[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn201/dcn201_resource.c
1223
pool->base.dccg = dccg201_create(ctx, &dccg_regs, &dccg_shift, &dccg_mask);
drivers/gpu/drm/amd/display/dc/resource/dcn201/dcn201_resource.c
1224
if (pool->base.dccg == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn201/dcn201_resource.c
1229
dcn201_ip.max_num_otg = pool->base.res_cap->num_timing_generator;
drivers/gpu/drm/amd/display/dc/resource/dcn201/dcn201_resource.c
1230
dcn201_ip.max_num_dpp = pool->base.pipe_count;
drivers/gpu/drm/amd/display/dc/resource/dcn201/dcn201_resource.c
1235
pool->base.irqs = dal_irq_service_dcn201_create(&init_data);
drivers/gpu/drm/amd/display/dc/resource/dcn201/dcn201_resource.c
1236
if (!pool->base.irqs)
drivers/gpu/drm/amd/display/dc/resource/dcn201/dcn201_resource.c
1241
for (i = 0; i < pool->base.pipe_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn201/dcn201_resource.c
1242
pool->base.hubps[i] = dcn201_hubp_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dcn201/dcn201_resource.c
1243
if (pool->base.hubps[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn201/dcn201_resource.c
1249
pool->base.ipps[i] = dcn201_ipp_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dcn201/dcn201_resource.c
1250
if (pool->base.ipps[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn201/dcn201_resource.c
1256
pool->base.dpps[i] = dcn201_dpp_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dcn201/dcn201_resource.c
1257
if (pool->base.dpps[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn201/dcn201_resource.c
1264
for (i = 0; i < pool->base.res_cap->num_opp; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn201/dcn201_resource.c
1265
pool->base.opps[i] = dcn201_opp_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dcn201/dcn201_resource.c
1266
if (pool->base.opps[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn201/dcn201_resource.c
1273
for (i = 0; i < pool->base.res_cap->num_ddc; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn201/dcn201_resource.c
1274
pool->base.engines[i] = dcn201_aux_engine_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dcn201/dcn201_resource.c
1275
if (pool->base.engines[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn201/dcn201_resource.c
1280
pool->base.hw_i2cs[i] = dcn201_i2c_hw_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dcn201/dcn201_resource.c
1281
if (pool->base.hw_i2cs[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn201/dcn201_resource.c
1286
pool->base.sw_i2cs[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn201/dcn201_resource.c
1289
for (i = 0; i < pool->base.res_cap->num_timing_generator; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn201/dcn201_resource.c
1290
pool->base.timing_generators[i] = dcn201_timing_generator_create(
drivers/gpu/drm/amd/display/dc/resource/dcn201/dcn201_resource.c
1292
if (pool->base.timing_generators[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn201/dcn201_resource.c
1298
pool->base.timing_generator_count = i;
drivers/gpu/drm/amd/display/dc/resource/dcn201/dcn201_resource.c
1300
pool->base.mpc = dcn201_mpc_create(ctx, pool->base.mpcc_count);
drivers/gpu/drm/amd/display/dc/resource/dcn201/dcn201_resource.c
1301
if (pool->base.mpc == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn201/dcn201_resource.c
1306
pool->base.hubbub = dcn201_hubbub_create(ctx);
drivers/gpu/drm/amd/display/dc/resource/dcn201/dcn201_resource.c
1307
if (pool->base.hubbub == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn201/dcn201_resource.c
1313
pool->base.dio = dcn201_dio_create(ctx);
drivers/gpu/drm/amd/display/dc/resource/dcn201/dcn201_resource.c
1314
if (pool->base.dio == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn201/dcn201_resource.c
1320
if (!resource_construct(num_virtual_links, dc, &pool->base,
drivers/gpu/drm/amd/display/dc/resource/dcn201/dcn201_resource.c
1326
dc->caps.max_planes = pool->base.pipe_count;
drivers/gpu/drm/amd/display/dc/resource/dcn201/dcn201_resource.c
1339
dcn201_resource_destruct(pool);
drivers/gpu/drm/amd/display/dc/resource/dcn201/dcn201_resource.c
1348
struct dcn201_resource_pool *pool =
drivers/gpu/drm/amd/display/dc/resource/dcn201/dcn201_resource.c
1351
if (!pool)
drivers/gpu/drm/amd/display/dc/resource/dcn201/dcn201_resource.c
1354
if (dcn201_resource_construct(init_data->num_virtual_links, dc, pool))
drivers/gpu/drm/amd/display/dc/resource/dcn201/dcn201_resource.c
1355
return &pool->base;
drivers/gpu/drm/amd/display/dc/resource/dcn201/dcn201_resource.c
1357
kfree(pool);
drivers/gpu/drm/amd/display/dc/resource/dcn201/dcn201_resource.c
939
static void dcn201_resource_destruct(struct dcn201_resource_pool *pool)
drivers/gpu/drm/amd/display/dc/resource/dcn201/dcn201_resource.c
943
for (i = 0; i < pool->base.stream_enc_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn201/dcn201_resource.c
944
if (pool->base.stream_enc[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn201/dcn201_resource.c
945
kfree(DCN10STRENC_FROM_STRENC(pool->base.stream_enc[i]));
drivers/gpu/drm/amd/display/dc/resource/dcn201/dcn201_resource.c
946
pool->base.stream_enc[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn201/dcn201_resource.c
951
if (pool->base.mpc != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn201/dcn201_resource.c
952
kfree(TO_DCN201_MPC(pool->base.mpc));
drivers/gpu/drm/amd/display/dc/resource/dcn201/dcn201_resource.c
953
pool->base.mpc = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn201/dcn201_resource.c
956
if (pool->base.hubbub != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn201/dcn201_resource.c
957
kfree(pool->base.hubbub);
drivers/gpu/drm/amd/display/dc/resource/dcn201/dcn201_resource.c
958
pool->base.hubbub = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn201/dcn201_resource.c
961
if (pool->base.dio != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn201/dcn201_resource.c
962
kfree(TO_DCN10_DIO(pool->base.dio));
drivers/gpu/drm/amd/display/dc/resource/dcn201/dcn201_resource.c
963
pool->base.dio = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn201/dcn201_resource.c
966
for (i = 0; i < pool->base.pipe_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn201/dcn201_resource.c
967
if (pool->base.dpps[i] != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn201/dcn201_resource.c
968
dcn201_dpp_destroy(&pool->base.dpps[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn201/dcn201_resource.c
970
if (pool->base.ipps[i] != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn201/dcn201_resource.c
971
pool->base.ipps[i]->funcs->ipp_destroy(&pool->base.ipps[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn201/dcn201_resource.c
973
if (pool->base.hubps[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn201/dcn201_resource.c
974
kfree(TO_DCN10_HUBP(pool->base.hubps[i]));
drivers/gpu/drm/amd/display/dc/resource/dcn201/dcn201_resource.c
975
pool->base.hubps[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn201/dcn201_resource.c
978
if (pool->base.irqs != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn201/dcn201_resource.c
979
dal_irq_service_destroy(&pool->base.irqs);
drivers/gpu/drm/amd/display/dc/resource/dcn201/dcn201_resource.c
983
for (i = 0; i < pool->base.res_cap->num_opp; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn201/dcn201_resource.c
984
if (pool->base.opps[i] != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn201/dcn201_resource.c
985
pool->base.opps[i]->funcs->opp_destroy(&pool->base.opps[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn201/dcn201_resource.c
988
for (i = 0; i < pool->base.res_cap->num_timing_generator; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn201/dcn201_resource.c
989
if (pool->base.timing_generators[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn201/dcn201_resource.c
990
kfree(DCN10TG_FROM_TG(pool->base.timing_generators[i]));
drivers/gpu/drm/amd/display/dc/resource/dcn201/dcn201_resource.c
991
pool->base.timing_generators[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn201/dcn201_resource.c
994
for (i = 0; i < pool->base.audio_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn201/dcn201_resource.c
995
if (pool->base.audios[i])
drivers/gpu/drm/amd/display/dc/resource/dcn201/dcn201_resource.c
996
dce_aud_destroy(&pool->base.audios[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn201/dcn201_resource.c
999
for (i = 0; i < pool->base.clk_src_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn201/dcn201_resource.h
36
#define TO_DCN201_RES_POOL(pool)\
drivers/gpu/drm/amd/display/dc/resource/dcn201/dcn201_resource.h
37
container_of(pool, struct dcn201_resource_pool, base)
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
1413
struct dcn21_resource_pool *pool)
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
1423
pool->base.res_cap = &res_cap_rn;
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
1425
pool->base.funcs = &dcn21_res_pool_funcs;
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
1430
pool->base.underlay_pipe_index = NO_UNDERLAY_PIPE;
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
1433
pool->base.pipe_count = pool->base.res_cap->num_timing_generator;
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
1500
pool->base.clock_sources[DCN20_CLK_SRC_PLL0] =
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
1504
pool->base.clock_sources[DCN20_CLK_SRC_PLL1] =
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
1508
pool->base.clock_sources[DCN20_CLK_SRC_PLL2] =
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
1512
pool->base.clock_sources[DCN20_CLK_SRC_PLL3] =
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
1516
pool->base.clock_sources[DCN20_CLK_SRC_PLL4] =
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
1521
pool->base.clk_src_count = DCN20_CLK_SRC_TOTAL_DCN21;
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
1524
pool->base.dp_clock_source =
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
1529
for (i = 0; i < pool->base.clk_src_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
1530
if (pool->base.clock_sources[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
1537
pool->base.dccg = dccg21_create(ctx, &dccg_regs, &dccg_shift, &dccg_mask);
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
1538
if (pool->base.dccg == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
1545
pool->base.dmcu = dcn21_dmcu_create(ctx,
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
1549
if (pool->base.dmcu == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
1559
pool->base.psr = dmub_psr_create(ctx);
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
1561
if (pool->base.psr == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
1569
pool->base.abm = dmub_abm_create(ctx,
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
1574
pool->base.abm = dce_abm_create(ctx,
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
1579
pool->base.pp_smu = dcn21_pp_smu_create(ctx);
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
1592
pool->base.irqs = dal_irq_service_dcn21_create(&init_data);
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
1593
if (!pool->base.irqs)
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
1598
for (i = 0; i < pool->base.pipe_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
1605
pool->base.hubps[j] = dcn21_hubp_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
1606
if (pool->base.hubps[j] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
1613
pool->base.ipps[j] = dcn21_ipp_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
1614
if (pool->base.ipps[j] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
1621
pool->base.dpps[j] = dcn21_dpp_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
1622
if (pool->base.dpps[j] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
1629
pool->base.opps[j] = dcn21_opp_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
1630
if (pool->base.opps[j] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
1637
pool->base.timing_generators[j] = dcn21_timing_generator_create(
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
1639
if (pool->base.timing_generators[j] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
1647
for (i = 0; i < pool->base.res_cap->num_ddc; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
1648
pool->base.engines[i] = dcn21_aux_engine_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
1649
if (pool->base.engines[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
1655
pool->base.hw_i2cs[i] = dcn21_i2c_hw_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
1656
if (pool->base.hw_i2cs[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
1662
pool->base.sw_i2cs[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
1665
pool->base.timing_generator_count = j;
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
1666
pool->base.pipe_count = j;
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
1667
pool->base.mpcc_count = j;
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
1669
pool->base.mpc = dcn21_mpc_create(ctx);
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
1670
if (pool->base.mpc == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
1676
pool->base.hubbub = dcn21_hubbub_create(ctx);
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
1677
if (pool->base.hubbub == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
1684
pool->base.dio = dcn21_dio_create(ctx);
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
1685
if (pool->base.dio == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
1691
for (i = 0; i < pool->base.res_cap->num_dsc; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
1692
pool->base.dscs[i] = dcn21_dsc_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
1693
if (pool->base.dscs[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
1700
if (!dcn20_dwbc_create(ctx, &pool->base)) {
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
1705
if (!dcn20_mmhubbub_create(ctx, &pool->base)) {
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
1711
if (!resource_construct(num_virtual_links, dc, &pool->base,
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
1717
dc->caps.max_planes = pool->base.pipe_count;
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
1730
dcn21_resource_destruct(pool);
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
1739
struct dcn21_resource_pool *pool =
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
1742
if (!pool)
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
1745
if (dcn21_resource_construct(init_data->num_virtual_links, dc, pool))
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
1746
return &pool->base;
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
1749
kfree(pool);
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
676
static void dcn21_resource_destruct(struct dcn21_resource_pool *pool)
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
680
for (i = 0; i < pool->base.stream_enc_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
681
if (pool->base.stream_enc[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
682
kfree(DCN10STRENC_FROM_STRENC(pool->base.stream_enc[i]));
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
683
pool->base.stream_enc[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
687
for (i = 0; i < pool->base.res_cap->num_dsc; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
688
if (pool->base.dscs[i] != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
689
dcn20_dsc_destroy(&pool->base.dscs[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
692
if (pool->base.mpc != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
693
kfree(TO_DCN20_MPC(pool->base.mpc));
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
694
pool->base.mpc = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
696
if (pool->base.hubbub != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
697
kfree(pool->base.hubbub);
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
698
pool->base.hubbub = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
701
if (pool->base.dio != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
702
kfree(TO_DCN10_DIO(pool->base.dio));
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
703
pool->base.dio = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
706
for (i = 0; i < pool->base.pipe_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
707
if (pool->base.dpps[i] != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
708
dcn20_dpp_destroy(&pool->base.dpps[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
710
if (pool->base.ipps[i] != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
711
pool->base.ipps[i]->funcs->ipp_destroy(&pool->base.ipps[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
713
if (pool->base.hubps[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
714
kfree(TO_DCN20_HUBP(pool->base.hubps[i]));
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
715
pool->base.hubps[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
718
if (pool->base.irqs != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
719
dal_irq_service_destroy(&pool->base.irqs);
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
722
for (i = 0; i < pool->base.res_cap->num_ddc; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
723
if (pool->base.engines[i] != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
724
dce110_engine_destroy(&pool->base.engines[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
725
if (pool->base.hw_i2cs[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
726
kfree(pool->base.hw_i2cs[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
727
pool->base.hw_i2cs[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
729
if (pool->base.sw_i2cs[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
730
kfree(pool->base.sw_i2cs[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
731
pool->base.sw_i2cs[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
735
for (i = 0; i < pool->base.res_cap->num_opp; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
736
if (pool->base.opps[i] != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
737
pool->base.opps[i]->funcs->opp_destroy(&pool->base.opps[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
740
for (i = 0; i < pool->base.res_cap->num_timing_generator; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
741
if (pool->base.timing_generators[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
742
kfree(DCN10TG_FROM_TG(pool->base.timing_generators[i]));
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
743
pool->base.timing_generators[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
747
for (i = 0; i < pool->base.res_cap->num_dwb; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
748
if (pool->base.dwbc[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
749
kfree(TO_DCN20_DWBC(pool->base.dwbc[i]));
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
750
pool->base.dwbc[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
752
if (pool->base.mcif_wb[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
753
kfree(TO_DCN20_MMHUBBUB(pool->base.mcif_wb[i]));
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
754
pool->base.mcif_wb[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
758
for (i = 0; i < pool->base.audio_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
759
if (pool->base.audios[i])
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
760
dce_aud_destroy(&pool->base.audios[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
763
for (i = 0; i < pool->base.clk_src_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
764
if (pool->base.clock_sources[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
765
dcn20_clock_source_destroy(&pool->base.clock_sources[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
766
pool->base.clock_sources[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
770
if (pool->base.dp_clock_source != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
771
dcn20_clock_source_destroy(&pool->base.dp_clock_source);
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
772
pool->base.dp_clock_source = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
775
if (pool->base.abm != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
776
if (pool->base.abm->ctx->dc->config.disable_dmcu)
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
777
dmub_abm_destroy(&pool->base.abm);
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
779
dce_abm_destroy(&pool->base.abm);
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
782
if (pool->base.dmcu != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
783
dce_dmcu_destroy(&pool->base.dmcu);
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
785
if (pool->base.psr != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
786
dmub_psr_destroy(&pool->base.psr);
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
788
if (pool->base.dccg != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
789
dcn_dccg_destroy(&pool->base.dccg);
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
791
if (pool->base.pp_smu != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
792
dcn21_pp_smu_destroy(&pool->base.pp_smu);
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
972
static void dcn21_destroy_resource_pool(struct resource_pool **pool)
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
974
struct dcn21_resource_pool *dcn21_pool = TO_DCN21_RES_POOL(*pool);
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.c
978
*pool = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.h
31
#define TO_DCN21_RES_POOL(pool)\
drivers/gpu/drm/amd/display/dc/resource/dcn21/dcn21_resource.h
32
container_of(pool, struct dcn21_resource_pool, base)
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1094
static void dcn30_resource_destruct(struct dcn30_resource_pool *pool)
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1098
for (i = 0; i < pool->base.stream_enc_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1099
if (pool->base.stream_enc[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1100
if (pool->base.stream_enc[i]->vpg != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1101
kfree(DCN30_VPG_FROM_VPG(pool->base.stream_enc[i]->vpg));
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1102
pool->base.stream_enc[i]->vpg = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1104
if (pool->base.stream_enc[i]->afmt != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1105
kfree(DCN30_AFMT_FROM_AFMT(pool->base.stream_enc[i]->afmt));
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1106
pool->base.stream_enc[i]->afmt = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1108
kfree(DCN10STRENC_FROM_STRENC(pool->base.stream_enc[i]));
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1109
pool->base.stream_enc[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1113
for (i = 0; i < pool->base.res_cap->num_dsc; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1114
if (pool->base.dscs[i] != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1115
dcn20_dsc_destroy(&pool->base.dscs[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1118
if (pool->base.mpc != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1119
kfree(TO_DCN20_MPC(pool->base.mpc));
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1120
pool->base.mpc = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1122
if (pool->base.hubbub != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1123
kfree(pool->base.hubbub);
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1124
pool->base.hubbub = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1127
if (pool->base.dio != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1128
kfree(TO_DCN10_DIO(pool->base.dio));
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1129
pool->base.dio = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1132
for (i = 0; i < pool->base.pipe_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1133
if (pool->base.dpps[i] != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1134
dcn30_dpp_destroy(&pool->base.dpps[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1136
if (pool->base.ipps[i] != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1137
pool->base.ipps[i]->funcs->ipp_destroy(&pool->base.ipps[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1139
if (pool->base.hubps[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1140
kfree(TO_DCN20_HUBP(pool->base.hubps[i]));
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1141
pool->base.hubps[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1144
if (pool->base.irqs != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1145
dal_irq_service_destroy(&pool->base.irqs);
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1149
for (i = 0; i < pool->base.res_cap->num_ddc; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1150
if (pool->base.engines[i] != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1151
dce110_engine_destroy(&pool->base.engines[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1152
if (pool->base.hw_i2cs[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1153
kfree(pool->base.hw_i2cs[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1154
pool->base.hw_i2cs[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1156
if (pool->base.sw_i2cs[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1157
kfree(pool->base.sw_i2cs[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1158
pool->base.sw_i2cs[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1162
for (i = 0; i < pool->base.res_cap->num_opp; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1163
if (pool->base.opps[i] != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1164
pool->base.opps[i]->funcs->opp_destroy(&pool->base.opps[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1167
for (i = 0; i < pool->base.res_cap->num_timing_generator; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1168
if (pool->base.timing_generators[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1169
kfree(DCN10TG_FROM_TG(pool->base.timing_generators[i]));
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1170
pool->base.timing_generators[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1174
for (i = 0; i < pool->base.res_cap->num_dwb; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1175
if (pool->base.dwbc[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1176
kfree(TO_DCN30_DWBC(pool->base.dwbc[i]));
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1177
pool->base.dwbc[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1179
if (pool->base.mcif_wb[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1180
kfree(TO_DCN30_MMHUBBUB(pool->base.mcif_wb[i]));
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1181
pool->base.mcif_wb[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1185
for (i = 0; i < pool->base.audio_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1186
if (pool->base.audios[i])
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1187
dce_aud_destroy(&pool->base.audios[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1190
for (i = 0; i < pool->base.clk_src_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1191
if (pool->base.clock_sources[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1192
dcn20_clock_source_destroy(&pool->base.clock_sources[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1193
pool->base.clock_sources[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1197
for (i = 0; i < pool->base.res_cap->num_mpc_3dlut; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1198
if (pool->base.mpc_lut[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1199
dc_3dlut_func_release(pool->base.mpc_lut[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1200
pool->base.mpc_lut[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1202
if (pool->base.mpc_shaper[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1203
dc_transfer_func_release(pool->base.mpc_shaper[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1204
pool->base.mpc_shaper[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1208
if (pool->base.dp_clock_source != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1209
dcn20_clock_source_destroy(&pool->base.dp_clock_source);
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1210
pool->base.dp_clock_source = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1213
for (i = 0; i < pool->base.pipe_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1214
if (pool->base.multiple_abms[i] != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1215
dce_abm_destroy(&pool->base.multiple_abms[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1218
if (pool->base.psr != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1219
dmub_psr_destroy(&pool->base.psr);
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1221
if (pool->base.dccg != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1222
dcn_dccg_destroy(&pool->base.dccg);
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1224
if (pool->base.oem_device != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1225
struct dc *dc = pool->base.oem_device->ctx->dc;
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1227
dc->link_srv->destroy_ddc_service(&pool->base.oem_device);
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1250
static bool dcn30_dwbc_create(struct dc_context *ctx, struct resource_pool *pool)
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1253
uint32_t pipe_count = pool->res_cap->num_dwb;
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1269
pool->dwbc[i] = &dwbc30->base;
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1274
static bool dcn30_mmhubbub_create(struct dc_context *ctx, struct resource_pool *pool)
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1277
uint32_t pipe_count = pool->res_cap->num_dwb;
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1293
pool->mcif_wb[i] = &mcif_wb30->base;
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1319
static void dcn30_destroy_resource_pool(struct resource_pool **pool)
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1321
struct dcn30_resource_pool *dcn30_pool = TO_DCN30_RES_POOL(*pool);
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1325
*pool = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1463
const struct resource_pool *pool,
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1476
for (i = 0; i < pool->res_cap->num_mpc_3dlut; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1478
*lut = pool->mpc_lut[i];
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1479
*shaper = pool->mpc_shaper[i];
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1480
state = &pool->mpc_lut[i]->state;
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1499
const struct resource_pool *pool,
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1506
for (i = 0; i < pool->res_cap->num_mpc_3dlut; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1507
if (pool->mpc_lut[i] == *lut && pool->mpc_shaper[i] == *shaper) {
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1509
pool->mpc_lut[i]->state.raw = 0;
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1530
struct dcn30_resource_pool *pool)
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1542
loaded_ip->max_num_otg = pool->base.res_cap->num_timing_generator;
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1543
loaded_ip->max_num_dpp = pool->base.pipe_count;
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1561
const struct resource_pool *pool = dc->res_pool;
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1566
sec_pipe->plane_res.mi = pool->mis[pipe_idx];
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1567
sec_pipe->plane_res.hubp = pool->hubps[pipe_idx];
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1568
sec_pipe->plane_res.ipp = pool->ipps[pipe_idx];
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1569
sec_pipe->plane_res.xfm = pool->transforms[pipe_idx];
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1570
sec_pipe->plane_res.dpp = pool->dpps[pipe_idx];
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1571
sec_pipe->plane_res.mpcc_inst = pool->dpps[pipe_idx]->inst;
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
1591
sec_pipe->stream_res.opp = pool->opps[pipe_idx];
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
2304
struct dcn30_resource_pool *pool)
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
2324
pool->base.res_cap = &res_cap_dcn3;
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
2326
pool->base.funcs = &dcn30_res_pool_funcs;
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
2331
pool->base.underlay_pipe_index = NO_UNDERLAY_PIPE;
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
2332
pool->base.pipe_count = pool->base.res_cap->num_timing_generator;
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
2333
pool->base.mpcc_count = pool->base.res_cap->num_timing_generator;
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
2378
dc->caps.color.mpc.num_3dluts = pool->base.res_cap->num_mpc_3dlut; //3
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
2424
pool->base.clock_sources[DCN30_CLK_SRC_PLL0] =
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
2428
pool->base.clock_sources[DCN30_CLK_SRC_PLL1] =
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
2432
pool->base.clock_sources[DCN30_CLK_SRC_PLL2] =
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
2436
pool->base.clock_sources[DCN30_CLK_SRC_PLL3] =
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
2440
pool->base.clock_sources[DCN30_CLK_SRC_PLL4] =
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
2444
pool->base.clock_sources[DCN30_CLK_SRC_PLL5] =
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
2449
pool->base.clk_src_count = DCN30_CLK_SRC_TOTAL;
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
2452
pool->base.dp_clock_source =
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
2457
for (i = 0; i < pool->base.clk_src_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
2458
if (pool->base.clock_sources[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
2466
pool->base.dccg = dccg30_create(ctx, &dccg_regs, &dccg_shift, &dccg_mask);
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
2467
if (pool->base.dccg == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
2474
init_soc_bounding_box(dc, pool);
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
2489
pool->base.irqs = dal_irq_service_dcn30_create(&init_data);
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
2490
if (!pool->base.irqs)
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
2494
pool->base.hubbub = dcn30_hubbub_create(ctx);
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
2495
if (pool->base.hubbub == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
2502
pool->base.dio = dcn30_dio_create(ctx);
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
2503
if (pool->base.dio == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
2510
for (i = 0; i < pool->base.pipe_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
2511
pool->base.hubps[i] = dcn30_hubp_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
2512
if (pool->base.hubps[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
2519
pool->base.dpps[i] = dcn30_dpp_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
2520
if (pool->base.dpps[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
2528
for (i = 0; i < pool->base.res_cap->num_opp; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
2529
pool->base.opps[i] = dcn30_opp_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
2530
if (pool->base.opps[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
2538
for (i = 0; i < pool->base.res_cap->num_timing_generator; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
2539
pool->base.timing_generators[i] = dcn30_timing_generator_create(
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
2541
if (pool->base.timing_generators[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
2547
pool->base.timing_generator_count = i;
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
2549
pool->base.psr = dmub_psr_create(ctx);
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
2551
if (pool->base.psr == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
2558
for (i = 0; i < pool->base.res_cap->num_timing_generator; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
2559
pool->base.multiple_abms[i] = dmub_abm_create(ctx,
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
2563
if (pool->base.multiple_abms[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
2570
pool->base.mpc = dcn30_mpc_create(ctx, pool->base.mpcc_count, pool->base.res_cap->num_mpc_3dlut);
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
2571
if (pool->base.mpc == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
2577
for (i = 0; i < pool->base.res_cap->num_dsc; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
2578
pool->base.dscs[i] = dcn30_dsc_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
2579
if (pool->base.dscs[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
2587
if (!dcn30_dwbc_create(ctx, &pool->base)) {
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
2593
if (!dcn30_mmhubbub_create(ctx, &pool->base)) {
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
2600
for (i = 0; i < pool->base.res_cap->num_ddc; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
2601
pool->base.engines[i] = dcn30_aux_engine_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
2602
if (pool->base.engines[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
2608
pool->base.hw_i2cs[i] = dcn30_i2c_hw_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
2609
if (pool->base.hw_i2cs[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
2615
pool->base.sw_i2cs[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
2619
if (!resource_construct(num_virtual_links, dc, &pool->base,
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
2626
dc->caps.max_planes = pool->base.pipe_count;
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
2641
pool->base.oem_device = dc->link_srv->create_ddc_service(&ddc_init_data);
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
2643
pool->base.oem_device = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
2653
dcn30_resource_destruct(pool);
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
2662
struct dcn30_resource_pool *pool =
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
2665
if (!pool)
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
2668
if (dcn30_resource_construct(init_data->num_virtual_links, dc, pool))
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
2669
return &pool->base;
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.c
2672
kfree(pool);
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.h
31
#define TO_DCN30_RES_POOL(pool)\
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.h
32
container_of(pool, struct dcn30_resource_pool, base)
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.h
85
const struct resource_pool *pool,
drivers/gpu/drm/amd/display/dc/resource/dcn30/dcn30_resource.h
92
const struct resource_pool *pool,
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1065
static void dcn301_destruct(struct dcn301_resource_pool *pool)
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1069
for (i = 0; i < pool->base.stream_enc_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1070
if (pool->base.stream_enc[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1071
if (pool->base.stream_enc[i]->vpg != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1072
kfree(DCN30_VPG_FROM_VPG(pool->base.stream_enc[i]->vpg));
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1073
pool->base.stream_enc[i]->vpg = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1075
if (pool->base.stream_enc[i]->afmt != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1076
kfree(DCN30_AFMT_FROM_AFMT(pool->base.stream_enc[i]->afmt));
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1077
pool->base.stream_enc[i]->afmt = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1079
kfree(DCN10STRENC_FROM_STRENC(pool->base.stream_enc[i]));
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1080
pool->base.stream_enc[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1084
for (i = 0; i < pool->base.res_cap->num_dsc; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1085
if (pool->base.dscs[i] != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1086
dcn20_dsc_destroy(&pool->base.dscs[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1089
if (pool->base.mpc != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1090
kfree(TO_DCN20_MPC(pool->base.mpc));
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1091
pool->base.mpc = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1093
if (pool->base.hubbub != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1094
kfree(pool->base.hubbub);
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1095
pool->base.hubbub = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1098
if (pool->base.dio != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1099
kfree(TO_DCN10_DIO(pool->base.dio));
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1100
pool->base.dio = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1103
for (i = 0; i < pool->base.pipe_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1104
if (pool->base.dpps[i] != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1105
dcn301_dpp_destroy(&pool->base.dpps[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1107
if (pool->base.ipps[i] != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1108
pool->base.ipps[i]->funcs->ipp_destroy(&pool->base.ipps[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1110
if (pool->base.hubps[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1111
kfree(TO_DCN20_HUBP(pool->base.hubps[i]));
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1112
pool->base.hubps[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1115
if (pool->base.irqs != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1116
dal_irq_service_destroy(&pool->base.irqs);
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1120
for (i = 0; i < pool->base.res_cap->num_ddc; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1121
if (pool->base.engines[i] != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1122
dce110_engine_destroy(&pool->base.engines[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1123
if (pool->base.hw_i2cs[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1124
kfree(pool->base.hw_i2cs[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1125
pool->base.hw_i2cs[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1127
if (pool->base.sw_i2cs[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1128
kfree(pool->base.sw_i2cs[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1129
pool->base.sw_i2cs[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1133
for (i = 0; i < pool->base.res_cap->num_opp; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1134
if (pool->base.opps[i] != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1135
pool->base.opps[i]->funcs->opp_destroy(&pool->base.opps[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1138
for (i = 0; i < pool->base.res_cap->num_timing_generator; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1139
if (pool->base.timing_generators[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1140
kfree(DCN10TG_FROM_TG(pool->base.timing_generators[i]));
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1141
pool->base.timing_generators[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1145
for (i = 0; i < pool->base.res_cap->num_dwb; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1146
if (pool->base.dwbc[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1147
kfree(TO_DCN30_DWBC(pool->base.dwbc[i]));
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1148
pool->base.dwbc[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1150
if (pool->base.mcif_wb[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1151
kfree(TO_DCN30_MMHUBBUB(pool->base.mcif_wb[i]));
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1152
pool->base.mcif_wb[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1156
for (i = 0; i < pool->base.audio_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1157
if (pool->base.audios[i])
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1158
dce_aud_destroy(&pool->base.audios[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1161
for (i = 0; i < pool->base.clk_src_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1162
if (pool->base.clock_sources[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1163
dcn20_clock_source_destroy(&pool->base.clock_sources[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1164
pool->base.clock_sources[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1168
for (i = 0; i < pool->base.res_cap->num_mpc_3dlut; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1169
if (pool->base.mpc_lut[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1170
dc_3dlut_func_release(pool->base.mpc_lut[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1171
pool->base.mpc_lut[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1173
if (pool->base.mpc_shaper[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1174
dc_transfer_func_release(pool->base.mpc_shaper[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1175
pool->base.mpc_shaper[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1179
if (pool->base.dp_clock_source != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1180
dcn20_clock_source_destroy(&pool->base.dp_clock_source);
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1181
pool->base.dp_clock_source = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1184
for (i = 0; i < pool->base.res_cap->num_timing_generator; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1185
if (pool->base.multiple_abms[i] != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1186
dce_abm_destroy(&pool->base.multiple_abms[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1189
if (pool->base.dccg != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1190
dcn_dccg_destroy(&pool->base.dccg);
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1210
static bool dcn301_dwbc_create(struct dc_context *ctx, struct resource_pool *pool)
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1213
uint32_t pipe_count = pool->res_cap->num_dwb;
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1229
pool->dwbc[i] = &dwbc30->base;
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1234
static bool dcn301_mmhubbub_create(struct dc_context *ctx, struct resource_pool *pool)
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1237
uint32_t pipe_count = pool->res_cap->num_dwb;
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1253
pool->mcif_wb[i] = &mcif_wb30->base;
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1274
static void dcn301_destroy_resource_pool(struct resource_pool **pool)
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1276
struct dcn301_resource_pool *dcn301_pool = TO_DCN301_RES_POOL(*pool);
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1280
*pool = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1323
struct dcn301_resource_pool *pool)
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1335
loaded_ip->max_num_otg = pool->base.res_cap->num_timing_generator;
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1336
loaded_ip->max_num_dpp = pool->base.pipe_count;
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1443
struct dcn301_resource_pool *pool)
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1457
pool->base.res_cap = &res_cap_dcn301;
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1459
pool->base.funcs = &dcn301_res_pool_funcs;
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1464
pool->base.underlay_pipe_index = NO_UNDERLAY_PIPE;
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1465
pool->base.pipe_count = pool->base.res_cap->num_timing_generator;
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1466
pool->base.mpcc_count = pool->base.res_cap->num_timing_generator;
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1507
dc->caps.color.mpc.num_3dluts = pool->base.res_cap->num_mpc_3dlut; //2
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1548
pool->base.clock_sources[DCN301_CLK_SRC_PLL0] =
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1552
pool->base.clock_sources[DCN301_CLK_SRC_PLL1] =
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1556
pool->base.clock_sources[DCN301_CLK_SRC_PLL2] =
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1560
pool->base.clock_sources[DCN301_CLK_SRC_PLL3] =
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1565
pool->base.clk_src_count = DCN301_CLK_SRC_TOTAL;
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1568
pool->base.dp_clock_source =
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1573
for (i = 0; i < pool->base.clk_src_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1574
if (pool->base.clock_sources[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1582
pool->base.dccg = dccg301_create(ctx, &dccg_regs, &dccg_shift, &dccg_mask);
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1583
if (pool->base.dccg == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1589
init_soc_bounding_box(dc, pool);
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1591
if (!dc->debug.disable_pplib_wm_range && pool->base.pp_smu->nv_funcs.set_wm_ranges)
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1592
set_wm_ranges(pool->base.pp_smu, &dcn3_01_soc);
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1607
pool->base.irqs = dal_irq_service_dcn30_create(&init_data);
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1608
if (!pool->base.irqs)
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1612
pool->base.hubbub = dcn301_hubbub_create(ctx);
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1613
if (pool->base.hubbub == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1620
pool->base.dio = dcn301_dio_create(ctx);
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1621
if (pool->base.dio == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1629
for (i = 0; i < pool->base.pipe_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1639
pool->base.hubps[j] = dcn301_hubp_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1640
if (pool->base.hubps[j] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1647
pool->base.dpps[j] = dcn301_dpp_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1648
if (pool->base.dpps[j] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1655
pool->base.opps[j] = dcn301_opp_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1656
if (pool->base.opps[j] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1663
pool->base.timing_generators[j] = dcn301_timing_generator_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1664
if (pool->base.timing_generators[j] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1671
pool->base.timing_generator_count = j;
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1672
pool->base.pipe_count = j;
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1673
pool->base.mpcc_count = j;
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1677
for (i = 0; i < pool->base.res_cap->num_timing_generator; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1678
pool->base.multiple_abms[i] = dmub_abm_create(ctx,
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1682
if (pool->base.multiple_abms[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1690
pool->base.mpc = dcn301_mpc_create(ctx, pool->base.mpcc_count, pool->base.res_cap->num_mpc_3dlut);
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1691
if (pool->base.mpc == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1697
for (i = 0; i < pool->base.res_cap->num_dsc; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1698
pool->base.dscs[i] = dcn301_dsc_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1699
if (pool->base.dscs[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1707
if (!dcn301_dwbc_create(ctx, &pool->base)) {
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1713
if (!dcn301_mmhubbub_create(ctx, &pool->base)) {
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1720
for (i = 0; i < pool->base.res_cap->num_ddc; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1721
pool->base.engines[i] = dcn301_aux_engine_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1722
if (pool->base.engines[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1728
pool->base.hw_i2cs[i] = dcn301_i2c_hw_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1729
if (pool->base.hw_i2cs[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1735
pool->base.sw_i2cs[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1739
if (!resource_construct(num_virtual_links, dc, &pool->base,
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1746
dc->caps.max_planes = pool->base.pipe_count;
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1759
dcn301_destruct(pool);
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1768
struct dcn301_resource_pool *pool =
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1771
if (!pool)
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1774
if (dcn301_resource_construct(init_data->num_virtual_links, dc, pool))
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1775
return &pool->base;
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
1778
kfree(pool);
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
93
#define TO_DCN301_RES_POOL(pool)\
drivers/gpu/drm/amd/display/dc/resource/dcn301/dcn301_resource.c
94
container_of(pool, struct dcn301_resource_pool, base)
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
1019
static void dcn302_resource_destruct(struct resource_pool *pool)
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
1023
for (i = 0; i < pool->stream_enc_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
1024
if (pool->stream_enc[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
1025
if (pool->stream_enc[i]->vpg != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
1026
kfree(DCN30_VPG_FROM_VPG(pool->stream_enc[i]->vpg));
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
1027
pool->stream_enc[i]->vpg = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
1029
if (pool->stream_enc[i]->afmt != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
1030
kfree(DCN30_AFMT_FROM_AFMT(pool->stream_enc[i]->afmt));
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
1031
pool->stream_enc[i]->afmt = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
1033
kfree(DCN10STRENC_FROM_STRENC(pool->stream_enc[i]));
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
1034
pool->stream_enc[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
1038
for (i = 0; i < pool->res_cap->num_dsc; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
1039
if (pool->dscs[i] != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
1040
dcn20_dsc_destroy(&pool->dscs[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
1043
if (pool->mpc != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
1044
kfree(TO_DCN20_MPC(pool->mpc));
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
1045
pool->mpc = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
1048
if (pool->hubbub != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
1049
kfree(pool->hubbub);
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
1050
pool->hubbub = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
1053
if (pool->dio != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
1054
kfree(TO_DCN10_DIO(pool->dio));
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
1055
pool->dio = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
1058
for (i = 0; i < pool->pipe_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
1059
if (pool->dpps[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
1060
kfree(TO_DCN20_DPP(pool->dpps[i]));
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
1061
pool->dpps[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
1064
if (pool->hubps[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
1065
kfree(TO_DCN20_HUBP(pool->hubps[i]));
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
1066
pool->hubps[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
1069
if (pool->irqs != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
1070
dal_irq_service_destroy(&pool->irqs);
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
1073
for (i = 0; i < pool->res_cap->num_ddc; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
1074
if (pool->engines[i] != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
1075
dce110_engine_destroy(&pool->engines[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
1076
if (pool->hw_i2cs[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
1077
kfree(pool->hw_i2cs[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
1078
pool->hw_i2cs[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
1080
if (pool->sw_i2cs[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
1081
kfree(pool->sw_i2cs[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
1082
pool->sw_i2cs[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
1086
for (i = 0; i < pool->res_cap->num_opp; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
1087
if (pool->opps[i] != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
1088
pool->opps[i]->funcs->opp_destroy(&pool->opps[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
1091
for (i = 0; i < pool->res_cap->num_timing_generator; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
1092
if (pool->timing_generators[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
1093
kfree(DCN10TG_FROM_TG(pool->timing_generators[i]));
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
1094
pool->timing_generators[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
1098
for (i = 0; i < pool->res_cap->num_dwb; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
1099
if (pool->dwbc[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
1100
kfree(TO_DCN30_DWBC(pool->dwbc[i]));
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
1101
pool->dwbc[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
1103
if (pool->mcif_wb[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
1104
kfree(TO_DCN30_MMHUBBUB(pool->mcif_wb[i]));
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
1105
pool->mcif_wb[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
1109
for (i = 0; i < pool->audio_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
1110
if (pool->audios[i])
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
1111
dce_aud_destroy(&pool->audios[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
1114
for (i = 0; i < pool->clk_src_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
1115
if (pool->clock_sources[i] != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
1116
dcn20_clock_source_destroy(&pool->clock_sources[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
1119
if (pool->dp_clock_source != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
1120
dcn20_clock_source_destroy(&pool->dp_clock_source);
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
1122
for (i = 0; i < pool->res_cap->num_mpc_3dlut; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
1123
if (pool->mpc_lut[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
1124
dc_3dlut_func_release(pool->mpc_lut[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
1125
pool->mpc_lut[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
1127
if (pool->mpc_shaper[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
1128
dc_transfer_func_release(pool->mpc_shaper[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
1129
pool->mpc_shaper[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
1133
for (i = 0; i < pool->pipe_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
1134
if (pool->multiple_abms[i] != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
1135
dce_abm_destroy(&pool->multiple_abms[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
1138
if (pool->psr != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
1139
dmub_psr_destroy(&pool->psr);
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
1141
if (pool->dccg != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
1142
dcn_dccg_destroy(&pool->dccg);
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
1144
if (pool->oem_device != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
1145
struct dc *dc = pool->oem_device->ctx->dc;
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
1147
dc->link_srv->destroy_ddc_service(&pool->oem_device);
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
1151
static void dcn302_destroy_resource_pool(struct resource_pool **pool)
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
1153
dcn302_resource_destruct(*pool);
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
1154
kfree(*pool);
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
1155
*pool = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
1237
struct resource_pool *pool)
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
1246
pool->res_cap = &res_cap_dcn302;
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
1248
pool->funcs = &dcn302_res_pool_funcs;
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
1253
pool->underlay_pipe_index = NO_UNDERLAY_PIPE;
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
1254
pool->pipe_count = pool->res_cap->num_timing_generator;
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
1255
pool->mpcc_count = pool->res_cap->num_timing_generator;
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
1301
dc->caps.color.mpc.num_3dluts = pool->res_cap->num_mpc_3dlut; //3
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
1343
pool->clock_sources[DCN302_CLK_SRC_PLL0] =
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
1347
pool->clock_sources[DCN302_CLK_SRC_PLL1] =
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
1351
pool->clock_sources[DCN302_CLK_SRC_PLL2] =
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
1355
pool->clock_sources[DCN302_CLK_SRC_PLL3] =
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
1359
pool->clock_sources[DCN302_CLK_SRC_PLL4] =
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
1364
pool->clk_src_count = DCN302_CLK_SRC_TOTAL;
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
1367
pool->dp_clock_source =
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
1372
for (i = 0; i < pool->clk_src_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
1373
if (pool->clock_sources[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
1381
pool->dccg = dccg30_create(ctx, &dccg_regs, &dccg_shift, &dccg_mask);
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
1382
if (pool->dccg == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
1389
init_soc_bounding_box(dc, pool);
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
1396
pool->irqs = dal_irq_service_dcn302_create(&init_data);
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
1397
if (!pool->irqs)
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
1401
pool->hubbub = dcn302_hubbub_create(ctx);
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
1402
if (pool->hubbub == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
1409
pool->dio = dcn302_dio_create(ctx);
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
1410
if (pool->dio == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
1417
for (i = 0; i < pool->pipe_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
1418
pool->hubps[i] = dcn302_hubp_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
1419
if (pool->hubps[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
1425
pool->dpps[i] = dcn302_dpp_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
1426
if (pool->dpps[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
1433
for (i = 0; i < pool->res_cap->num_opp; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
1434
pool->opps[i] = dcn302_opp_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
1435
if (pool->opps[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
1442
for (i = 0; i < pool->res_cap->num_timing_generator; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
1443
pool->timing_generators[i] = dcn302_timing_generator_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
1444
if (pool->timing_generators[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
1450
pool->timing_generator_count = i;
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
1453
pool->psr = dmub_psr_create(ctx);
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
1454
if (pool->psr == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
1461
for (i = 0; i < pool->res_cap->num_timing_generator; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
1462
pool->multiple_abms[i] = dmub_abm_create(ctx, &abm_regs[i], &abm_shift, &abm_mask);
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
1463
if (pool->multiple_abms[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
1471
pool->mpc = dcn302_mpc_create(ctx, pool->mpcc_count, pool->res_cap->num_mpc_3dlut);
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
1472
if (pool->mpc == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
1478
for (i = 0; i < pool->res_cap->num_dsc; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
1479
pool->dscs[i] = dcn302_dsc_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
1480
if (pool->dscs[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
1488
if (!dcn302_dwbc_create(ctx, pool)) {
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
1494
if (!dcn302_mmhubbub_create(ctx, pool)) {
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
1501
for (i = 0; i < pool->res_cap->num_ddc; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
1502
pool->engines[i] = dcn302_aux_engine_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
1503
if (pool->engines[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
1508
pool->hw_i2cs[i] = dcn302_i2c_hw_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
1509
if (pool->hw_i2cs[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
1514
pool->sw_i2cs[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
1518
if (!resource_construct(num_virtual_links, dc, pool,
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
1525
dc->caps.max_planes = pool->pipe_count;
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
1540
pool->oem_device = dc->link_srv->create_ddc_service(&ddc_init_data);
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
1542
pool->oem_device = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
1549
dcn302_resource_destruct(pool);
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
1556
struct resource_pool *pool = kzalloc_obj(struct resource_pool);
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
1558
if (!pool)
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
1561
if (dcn302_resource_construct(init_data->num_virtual_links, dc, pool))
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
1562
return pool;
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
1565
kfree(pool);
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
739
static bool dcn302_dwbc_create(struct dc_context *ctx, struct resource_pool *pool)
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
742
uint32_t pipe_count = pool->res_cap->num_dwb;
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
754
pool->dwbc[i] = &dwbc30->base;
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
774
static bool dcn302_mmhubbub_create(struct dc_context *ctx, struct resource_pool *pool)
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
777
uint32_t pipe_count = pool->res_cap->num_dwb;
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
789
pool->mcif_wb[i] = &mcif_wb30->base;
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
985
static bool init_soc_bounding_box(struct dc *dc, struct resource_pool *pool)
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
997
loaded_ip->max_num_otg = pool->pipe_count;
drivers/gpu/drm/amd/display/dc/resource/dcn302/dcn302_resource.c
998
loaded_ip->max_num_dpp = pool->pipe_count;
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
1002
for (i = 0; i < pool->pipe_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
1003
if (pool->dpps[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
1004
kfree(TO_DCN20_DPP(pool->dpps[i]));
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
1005
pool->dpps[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
1008
if (pool->hubps[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
1009
kfree(TO_DCN20_HUBP(pool->hubps[i]));
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
1010
pool->hubps[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
1013
if (pool->irqs != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
1014
dal_irq_service_destroy(&pool->irqs);
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
1017
for (i = 0; i < pool->res_cap->num_ddc; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
1018
if (pool->engines[i] != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
1019
dce110_engine_destroy(&pool->engines[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
1020
if (pool->hw_i2cs[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
1021
kfree(pool->hw_i2cs[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
1022
pool->hw_i2cs[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
1024
if (pool->sw_i2cs[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
1025
kfree(pool->sw_i2cs[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
1026
pool->sw_i2cs[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
1030
for (i = 0; i < pool->res_cap->num_opp; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
1031
if (pool->opps[i] != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
1032
pool->opps[i]->funcs->opp_destroy(&pool->opps[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
1035
for (i = 0; i < pool->res_cap->num_timing_generator; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
1036
if (pool->timing_generators[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
1037
kfree(DCN10TG_FROM_TG(pool->timing_generators[i]));
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
1038
pool->timing_generators[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
1042
for (i = 0; i < pool->res_cap->num_dwb; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
1043
if (pool->dwbc[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
1044
kfree(TO_DCN30_DWBC(pool->dwbc[i]));
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
1045
pool->dwbc[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
1047
if (pool->mcif_wb[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
1048
kfree(TO_DCN30_MMHUBBUB(pool->mcif_wb[i]));
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
1049
pool->mcif_wb[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
1053
for (i = 0; i < pool->audio_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
1054
if (pool->audios[i])
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
1055
dce_aud_destroy(&pool->audios[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
1058
for (i = 0; i < pool->clk_src_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
1059
if (pool->clock_sources[i] != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
1060
dcn20_clock_source_destroy(&pool->clock_sources[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
1063
if (pool->dp_clock_source != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
1064
dcn20_clock_source_destroy(&pool->dp_clock_source);
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
1066
for (i = 0; i < pool->res_cap->num_mpc_3dlut; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
1067
if (pool->mpc_lut[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
1068
dc_3dlut_func_release(pool->mpc_lut[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
1069
pool->mpc_lut[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
1071
if (pool->mpc_shaper[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
1072
dc_transfer_func_release(pool->mpc_shaper[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
1073
pool->mpc_shaper[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
1077
for (i = 0; i < pool->pipe_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
1078
if (pool->multiple_abms[i] != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
1079
dce_abm_destroy(&pool->multiple_abms[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
1082
if (pool->psr != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
1083
dmub_psr_destroy(&pool->psr);
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
1085
if (pool->dccg != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
1086
dcn_dccg_destroy(&pool->dccg);
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
1088
if (pool->oem_device != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
1089
struct dc *dc = pool->oem_device->ctx->dc;
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
1091
dc->link_srv->destroy_ddc_service(&pool->oem_device);
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
1095
static void dcn303_destroy_resource_pool(struct resource_pool **pool)
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
1097
dcn303_resource_destruct(*pool);
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
1098
kfree(*pool);
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
1099
*pool = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
1178
struct resource_pool *pool)
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
1187
pool->res_cap = &res_cap_dcn303;
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
1189
pool->funcs = &dcn303_res_pool_funcs;
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
1194
pool->underlay_pipe_index = NO_UNDERLAY_PIPE;
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
1195
pool->pipe_count = pool->res_cap->num_timing_generator;
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
1196
pool->mpcc_count = pool->res_cap->num_timing_generator;
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
1245
dc->caps.color.mpc.num_3dluts = pool->res_cap->num_mpc_3dlut; //3
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
1287
pool->clock_sources[DCN303_CLK_SRC_PLL0] =
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
1291
pool->clock_sources[DCN303_CLK_SRC_PLL1] =
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
1296
pool->clk_src_count = DCN303_CLK_SRC_TOTAL;
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
1299
pool->dp_clock_source =
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
1304
for (i = 0; i < pool->clk_src_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
1305
if (pool->clock_sources[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
1313
pool->dccg = dccg30_create(ctx, &dccg_regs, &dccg_shift, &dccg_mask);
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
1314
if (pool->dccg == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
1321
init_soc_bounding_box(dc, pool);
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
1328
pool->irqs = dal_irq_service_dcn303_create(&init_data);
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
1329
if (!pool->irqs)
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
1333
pool->hubbub = dcn303_hubbub_create(ctx);
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
1334
if (pool->hubbub == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
1341
pool->dio = dcn303_dio_create(ctx);
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
1342
if (pool->dio == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
1349
for (i = 0; i < pool->pipe_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
1350
pool->hubps[i] = dcn303_hubp_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
1351
if (pool->hubps[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
1357
pool->dpps[i] = dcn303_dpp_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
1358
if (pool->dpps[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
1365
for (i = 0; i < pool->res_cap->num_opp; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
1366
pool->opps[i] = dcn303_opp_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
1367
if (pool->opps[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
1374
for (i = 0; i < pool->res_cap->num_timing_generator; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
1375
pool->timing_generators[i] = dcn303_timing_generator_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
1376
if (pool->timing_generators[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
1382
pool->timing_generator_count = i;
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
1385
pool->psr = dmub_psr_create(ctx);
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
1386
if (pool->psr == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
1393
for (i = 0; i < pool->res_cap->num_timing_generator; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
1394
pool->multiple_abms[i] = dmub_abm_create(ctx, &abm_regs[i], &abm_shift, &abm_mask);
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
1395
if (pool->multiple_abms[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
1403
pool->mpc = dcn303_mpc_create(ctx, pool->mpcc_count, pool->res_cap->num_mpc_3dlut);
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
1404
if (pool->mpc == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
1410
for (i = 0; i < pool->res_cap->num_dsc; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
1411
pool->dscs[i] = dcn303_dsc_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
1412
if (pool->dscs[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
1420
if (!dcn303_dwbc_create(ctx, pool)) {
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
1426
if (!dcn303_mmhubbub_create(ctx, pool)) {
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
1433
for (i = 0; i < pool->res_cap->num_ddc; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
1434
pool->engines[i] = dcn303_aux_engine_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
1435
if (pool->engines[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
1440
pool->hw_i2cs[i] = dcn303_i2c_hw_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
1441
if (pool->hw_i2cs[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
1446
pool->sw_i2cs[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
1450
if (!resource_construct(num_virtual_links, dc, pool,
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
1457
dc->caps.max_planes = pool->pipe_count;
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
1472
pool->oem_device = dc->link_srv->create_ddc_service(&ddc_init_data);
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
1474
pool->oem_device = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
1481
dcn303_resource_destruct(pool);
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
1488
struct resource_pool *pool = kzalloc_obj(struct resource_pool);
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
1490
if (!pool)
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
1493
if (dcn303_resource_construct(init_data->num_virtual_links, dc, pool))
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
1494
return pool;
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
1497
kfree(pool);
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
700
static bool dcn303_dwbc_create(struct dc_context *ctx, struct resource_pool *pool)
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
703
uint32_t pipe_count = pool->res_cap->num_dwb;
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
715
pool->dwbc[i] = &dwbc30->base;
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
735
static bool dcn303_mmhubbub_create(struct dc_context *ctx, struct resource_pool *pool)
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
738
uint32_t pipe_count = pool->res_cap->num_dwb;
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
750
pool->mcif_wb[i] = &mcif_wb30->base;
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
930
static bool init_soc_bounding_box(struct dc *dc, struct resource_pool *pool)
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
942
loaded_ip->max_num_otg = pool->pipe_count;
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
943
loaded_ip->max_num_dpp = pool->pipe_count;
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
963
static void dcn303_resource_destruct(struct resource_pool *pool)
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
967
for (i = 0; i < pool->stream_enc_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
968
if (pool->stream_enc[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
969
if (pool->stream_enc[i]->vpg != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
970
kfree(DCN30_VPG_FROM_VPG(pool->stream_enc[i]->vpg));
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
971
pool->stream_enc[i]->vpg = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
973
if (pool->stream_enc[i]->afmt != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
974
kfree(DCN30_AFMT_FROM_AFMT(pool->stream_enc[i]->afmt));
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
975
pool->stream_enc[i]->afmt = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
977
kfree(DCN10STRENC_FROM_STRENC(pool->stream_enc[i]));
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
978
pool->stream_enc[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
982
for (i = 0; i < pool->res_cap->num_dsc; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
983
if (pool->dscs[i] != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
984
dcn20_dsc_destroy(&pool->dscs[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
987
if (pool->mpc != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
988
kfree(TO_DCN20_MPC(pool->mpc));
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
989
pool->mpc = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
992
if (pool->hubbub != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
993
kfree(pool->hubbub);
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
994
pool->hubbub = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
997
if (pool->dio != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
998
kfree(TO_DCN10_DIO(pool->dio));
drivers/gpu/drm/amd/display/dc/resource/dcn303/dcn303_resource.c
999
pool->dio = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
1373
static void dcn31_resource_destruct(struct dcn31_resource_pool *pool)
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
1377
for (i = 0; i < pool->base.stream_enc_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
1378
if (pool->base.stream_enc[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
1379
if (pool->base.stream_enc[i]->vpg != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
1380
kfree(DCN30_VPG_FROM_VPG(pool->base.stream_enc[i]->vpg));
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
1381
pool->base.stream_enc[i]->vpg = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
1383
if (pool->base.stream_enc[i]->afmt != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
1384
kfree(DCN30_AFMT_FROM_AFMT(pool->base.stream_enc[i]->afmt));
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
1385
pool->base.stream_enc[i]->afmt = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
1387
kfree(DCN10STRENC_FROM_STRENC(pool->base.stream_enc[i]));
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
1388
pool->base.stream_enc[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
1392
for (i = 0; i < pool->base.hpo_dp_stream_enc_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
1393
if (pool->base.hpo_dp_stream_enc[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
1394
if (pool->base.hpo_dp_stream_enc[i]->vpg != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
1395
kfree(DCN30_VPG_FROM_VPG(pool->base.hpo_dp_stream_enc[i]->vpg));
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
1396
pool->base.hpo_dp_stream_enc[i]->vpg = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
1398
if (pool->base.hpo_dp_stream_enc[i]->apg != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
1399
kfree(DCN31_APG_FROM_APG(pool->base.hpo_dp_stream_enc[i]->apg));
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
1400
pool->base.hpo_dp_stream_enc[i]->apg = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
1402
kfree(DCN3_1_HPO_DP_STREAM_ENC_FROM_HPO_STREAM_ENC(pool->base.hpo_dp_stream_enc[i]));
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
1403
pool->base.hpo_dp_stream_enc[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
1407
for (i = 0; i < pool->base.hpo_dp_link_enc_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
1408
if (pool->base.hpo_dp_link_enc[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
1409
kfree(DCN3_1_HPO_DP_LINK_ENC_FROM_HPO_LINK_ENC(pool->base.hpo_dp_link_enc[i]));
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
1410
pool->base.hpo_dp_link_enc[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
1414
for (i = 0; i < pool->base.res_cap->num_dsc; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
1415
if (pool->base.dscs[i] != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
1416
dcn20_dsc_destroy(&pool->base.dscs[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
1419
if (pool->base.mpc != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
1420
kfree(TO_DCN20_MPC(pool->base.mpc));
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
1421
pool->base.mpc = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
1423
if (pool->base.hubbub != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
1424
kfree(pool->base.hubbub);
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
1425
pool->base.hubbub = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
1427
if (pool->base.dio != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
1428
kfree(TO_DCN10_DIO(pool->base.dio));
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
1429
pool->base.dio = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
1431
for (i = 0; i < pool->base.pipe_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
1432
if (pool->base.dpps[i] != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
1433
dcn31_dpp_destroy(&pool->base.dpps[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
1435
if (pool->base.ipps[i] != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
1436
pool->base.ipps[i]->funcs->ipp_destroy(&pool->base.ipps[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
1438
if (pool->base.hubps[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
1439
kfree(TO_DCN20_HUBP(pool->base.hubps[i]));
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
1440
pool->base.hubps[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
1443
if (pool->base.irqs != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
1444
dal_irq_service_destroy(&pool->base.irqs);
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
1448
for (i = 0; i < pool->base.res_cap->num_ddc; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
1449
if (pool->base.engines[i] != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
1450
dce110_engine_destroy(&pool->base.engines[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
1451
if (pool->base.hw_i2cs[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
1452
kfree(pool->base.hw_i2cs[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
1453
pool->base.hw_i2cs[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
1455
if (pool->base.sw_i2cs[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
1456
kfree(pool->base.sw_i2cs[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
1457
pool->base.sw_i2cs[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
1461
for (i = 0; i < pool->base.res_cap->num_opp; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
1462
if (pool->base.opps[i] != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
1463
pool->base.opps[i]->funcs->opp_destroy(&pool->base.opps[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
1466
for (i = 0; i < pool->base.res_cap->num_timing_generator; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
1467
if (pool->base.timing_generators[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
1468
kfree(DCN10TG_FROM_TG(pool->base.timing_generators[i]));
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
1469
pool->base.timing_generators[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
1473
for (i = 0; i < pool->base.res_cap->num_dwb; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
1474
if (pool->base.dwbc[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
1475
kfree(TO_DCN30_DWBC(pool->base.dwbc[i]));
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
1476
pool->base.dwbc[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
1478
if (pool->base.mcif_wb[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
1479
kfree(TO_DCN30_MMHUBBUB(pool->base.mcif_wb[i]));
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
1480
pool->base.mcif_wb[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
1484
for (i = 0; i < pool->base.audio_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
1485
if (pool->base.audios[i])
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
1486
dce_aud_destroy(&pool->base.audios[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
1489
for (i = 0; i < pool->base.clk_src_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
1490
if (pool->base.clock_sources[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
1491
dcn20_clock_source_destroy(&pool->base.clock_sources[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
1492
pool->base.clock_sources[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
1496
for (i = 0; i < pool->base.res_cap->num_mpc_3dlut; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
1497
if (pool->base.mpc_lut[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
1498
dc_3dlut_func_release(pool->base.mpc_lut[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
1499
pool->base.mpc_lut[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
1501
if (pool->base.mpc_shaper[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
1502
dc_transfer_func_release(pool->base.mpc_shaper[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
1503
pool->base.mpc_shaper[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
1507
if (pool->base.dp_clock_source != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
1508
dcn20_clock_source_destroy(&pool->base.dp_clock_source);
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
1509
pool->base.dp_clock_source = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
1512
for (i = 0; i < pool->base.res_cap->num_timing_generator; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
1513
if (pool->base.multiple_abms[i] != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
1514
dce_abm_destroy(&pool->base.multiple_abms[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
1517
if (pool->base.psr != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
1518
dmub_psr_destroy(&pool->base.psr);
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
1520
if (pool->base.replay != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
1521
dmub_replay_destroy(&pool->base.replay);
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
1523
if (pool->base.dccg != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
1524
dcn_dccg_destroy(&pool->base.dccg);
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
1546
static bool dcn31_dwbc_create(struct dc_context *ctx, struct resource_pool *pool)
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
1549
uint32_t pipe_count = pool->res_cap->num_dwb;
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
1565
pool->dwbc[i] = &dwbc30->base;
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
1570
static bool dcn31_mmhubbub_create(struct dc_context *ctx, struct resource_pool *pool)
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
1573
uint32_t pipe_count = pool->res_cap->num_dwb;
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
1589
pool->mcif_wb[i] = &mcif_wb30->base;
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
1609
static void dcn31_destroy_resource_pool(struct resource_pool **pool)
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
1611
struct dcn31_resource_pool *dcn31_pool = TO_DCN31_RES_POOL(*pool);
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
1615
*pool = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
1916
struct dcn31_resource_pool *pool)
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
1924
pool->base.res_cap = &res_cap_dcn31;
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
1926
pool->base.funcs = &dcn31_res_pool_funcs;
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
1931
pool->base.underlay_pipe_index = NO_UNDERLAY_PIPE;
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
1932
pool->base.pipe_count = pool->base.res_cap->num_timing_generator;
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
1933
pool->base.mpcc_count = pool->base.res_cap->num_timing_generator;
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
1981
dc->caps.color.mpc.num_3dluts = pool->base.res_cap->num_mpc_3dlut; //2
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
2026
pool->base.clock_sources[DCN31_CLK_SRC_PLL0] =
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
2030
pool->base.clock_sources[DCN31_CLK_SRC_PLL1] =
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
2036
pool->base.clock_sources[DCN31_CLK_SRC_PLL2] =
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
2040
pool->base.clock_sources[DCN31_CLK_SRC_PLL3] =
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
2045
pool->base.clock_sources[DCN31_CLK_SRC_PLL2] =
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
2049
pool->base.clock_sources[DCN31_CLK_SRC_PLL3] =
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
2055
pool->base.clock_sources[DCN31_CLK_SRC_PLL4] =
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
2060
pool->base.clk_src_count = DCN30_CLK_SRC_TOTAL;
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
2063
pool->base.dp_clock_source =
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
2068
for (i = 0; i < pool->base.clk_src_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
2069
if (pool->base.clock_sources[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
2077
pool->base.dccg = dccg31_create(ctx, &dccg_regs, &dccg_shift, &dccg_mask);
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
2078
if (pool->base.dccg == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
2086
pool->base.irqs = dal_irq_service_dcn31_create(&init_data);
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
2087
if (!pool->base.irqs)
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
2091
pool->base.hubbub = dcn31_hubbub_create(ctx);
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
2092
if (pool->base.hubbub == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
2099
pool->base.dio = dcn31_dio_create(ctx);
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
2100
if (pool->base.dio == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
2107
for (i = 0; i < pool->base.pipe_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
2108
pool->base.hubps[i] = dcn31_hubp_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
2109
if (pool->base.hubps[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
2116
pool->base.dpps[i] = dcn31_dpp_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
2117
if (pool->base.dpps[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
2125
for (i = 0; i < pool->base.res_cap->num_opp; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
2126
pool->base.opps[i] = dcn31_opp_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
2127
if (pool->base.opps[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
2135
for (i = 0; i < pool->base.res_cap->num_timing_generator; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
2136
pool->base.timing_generators[i] = dcn31_timing_generator_create(
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
2138
if (pool->base.timing_generators[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
2144
pool->base.timing_generator_count = i;
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
2147
pool->base.psr = dmub_psr_create(ctx);
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
2148
if (pool->base.psr == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
2155
pool->base.replay = dmub_replay_create(ctx);
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
2156
if (pool->base.replay == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
2163
for (i = 0; i < pool->base.res_cap->num_timing_generator; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
2164
pool->base.multiple_abms[i] = dmub_abm_create(ctx,
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
2168
if (pool->base.multiple_abms[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
2176
pool->base.mpc = dcn31_mpc_create(ctx, pool->base.mpcc_count, pool->base.res_cap->num_mpc_3dlut);
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
2177
if (pool->base.mpc == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
2183
for (i = 0; i < pool->base.res_cap->num_dsc; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
2184
pool->base.dscs[i] = dcn31_dsc_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
2185
if (pool->base.dscs[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
2193
if (!dcn31_dwbc_create(ctx, &pool->base)) {
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
2199
if (!dcn31_mmhubbub_create(ctx, &pool->base)) {
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
2206
for (i = 0; i < pool->base.res_cap->num_ddc; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
2207
pool->base.engines[i] = dcn31_aux_engine_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
2208
if (pool->base.engines[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
2214
pool->base.hw_i2cs[i] = dcn31_i2c_hw_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
2215
if (pool->base.hw_i2cs[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
2221
pool->base.sw_i2cs[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
2228
pool->base.usb4_dpia_count = 4;
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
2232
pool->base.usb4_dpia_count = 4;
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
2235
if (!resource_construct(num_virtual_links, dc, &pool->base,
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
2242
dc->caps.max_planes = pool->base.pipe_count;
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
2256
dcn31_resource_destruct(pool);
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
2265
struct dcn31_resource_pool *pool =
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
2268
if (!pool)
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
2271
if (dcn31_resource_construct(init_data->num_virtual_links, dc, pool))
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
2272
return &pool->base;
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.c
2275
kfree(pool);
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.h
31
#define TO_DCN31_RES_POOL(pool)\
drivers/gpu/drm/amd/display/dc/resource/dcn31/dcn31_resource.h
32
container_of(pool, struct dcn31_resource_pool, base)
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
1432
static void dcn314_resource_destruct(struct dcn314_resource_pool *pool)
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
1436
for (i = 0; i < pool->base.stream_enc_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
1437
if (pool->base.stream_enc[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
1438
if (pool->base.stream_enc[i]->vpg != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
1439
kfree(DCN30_VPG_FROM_VPG(pool->base.stream_enc[i]->vpg));
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
1440
pool->base.stream_enc[i]->vpg = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
1442
if (pool->base.stream_enc[i]->afmt != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
1443
kfree(DCN30_AFMT_FROM_AFMT(pool->base.stream_enc[i]->afmt));
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
1444
pool->base.stream_enc[i]->afmt = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
1446
kfree(DCN10STRENC_FROM_STRENC(pool->base.stream_enc[i]));
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
1447
pool->base.stream_enc[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
1451
for (i = 0; i < pool->base.hpo_dp_stream_enc_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
1452
if (pool->base.hpo_dp_stream_enc[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
1453
if (pool->base.hpo_dp_stream_enc[i]->vpg != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
1454
kfree(DCN30_VPG_FROM_VPG(pool->base.hpo_dp_stream_enc[i]->vpg));
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
1455
pool->base.hpo_dp_stream_enc[i]->vpg = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
1457
if (pool->base.hpo_dp_stream_enc[i]->apg != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
1458
kfree(DCN31_APG_FROM_APG(pool->base.hpo_dp_stream_enc[i]->apg));
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
1459
pool->base.hpo_dp_stream_enc[i]->apg = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
1461
kfree(DCN3_1_HPO_DP_STREAM_ENC_FROM_HPO_STREAM_ENC(pool->base.hpo_dp_stream_enc[i]));
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
1462
pool->base.hpo_dp_stream_enc[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
1466
for (i = 0; i < pool->base.hpo_dp_link_enc_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
1467
if (pool->base.hpo_dp_link_enc[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
1468
kfree(DCN3_1_HPO_DP_LINK_ENC_FROM_HPO_LINK_ENC(pool->base.hpo_dp_link_enc[i]));
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
1469
pool->base.hpo_dp_link_enc[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
1473
for (i = 0; i < pool->base.res_cap->num_dsc; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
1474
if (pool->base.dscs[i] != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
1475
dcn20_dsc_destroy(&pool->base.dscs[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
1478
if (pool->base.mpc != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
1479
kfree(TO_DCN20_MPC(pool->base.mpc));
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
1480
pool->base.mpc = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
1482
if (pool->base.hubbub != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
1483
kfree(pool->base.hubbub);
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
1484
pool->base.hubbub = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
1486
if (pool->base.dio != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
1487
kfree(TO_DCN10_DIO(pool->base.dio));
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
1488
pool->base.dio = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
1490
for (i = 0; i < pool->base.pipe_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
1491
if (pool->base.dpps[i] != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
1492
dcn31_dpp_destroy(&pool->base.dpps[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
1494
if (pool->base.ipps[i] != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
1495
pool->base.ipps[i]->funcs->ipp_destroy(&pool->base.ipps[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
1497
if (pool->base.hubps[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
1498
kfree(TO_DCN20_HUBP(pool->base.hubps[i]));
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
1499
pool->base.hubps[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
1502
if (pool->base.irqs != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
1503
dal_irq_service_destroy(&pool->base.irqs);
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
1506
for (i = 0; i < pool->base.res_cap->num_ddc; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
1507
if (pool->base.engines[i] != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
1508
dce110_engine_destroy(&pool->base.engines[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
1509
if (pool->base.hw_i2cs[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
1510
kfree(pool->base.hw_i2cs[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
1511
pool->base.hw_i2cs[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
1513
if (pool->base.sw_i2cs[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
1514
kfree(pool->base.sw_i2cs[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
1515
pool->base.sw_i2cs[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
1519
for (i = 0; i < pool->base.res_cap->num_opp; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
1520
if (pool->base.opps[i] != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
1521
pool->base.opps[i]->funcs->opp_destroy(&pool->base.opps[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
1524
for (i = 0; i < pool->base.res_cap->num_timing_generator; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
1525
if (pool->base.timing_generators[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
1526
kfree(DCN10TG_FROM_TG(pool->base.timing_generators[i]));
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
1527
pool->base.timing_generators[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
1531
for (i = 0; i < pool->base.res_cap->num_dwb; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
1532
if (pool->base.dwbc[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
1533
kfree(TO_DCN30_DWBC(pool->base.dwbc[i]));
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
1534
pool->base.dwbc[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
1536
if (pool->base.mcif_wb[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
1537
kfree(TO_DCN30_MMHUBBUB(pool->base.mcif_wb[i]));
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
1538
pool->base.mcif_wb[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
1542
for (i = 0; i < pool->base.audio_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
1543
if (pool->base.audios[i])
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
1544
dce_aud_destroy(&pool->base.audios[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
1547
for (i = 0; i < pool->base.clk_src_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
1548
if (pool->base.clock_sources[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
1549
dcn20_clock_source_destroy(&pool->base.clock_sources[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
1550
pool->base.clock_sources[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
1554
for (i = 0; i < pool->base.res_cap->num_mpc_3dlut; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
1555
if (pool->base.mpc_lut[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
1556
dc_3dlut_func_release(pool->base.mpc_lut[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
1557
pool->base.mpc_lut[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
1559
if (pool->base.mpc_shaper[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
1560
dc_transfer_func_release(pool->base.mpc_shaper[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
1561
pool->base.mpc_shaper[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
1565
if (pool->base.dp_clock_source != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
1566
dcn20_clock_source_destroy(&pool->base.dp_clock_source);
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
1567
pool->base.dp_clock_source = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
1570
for (i = 0; i < pool->base.res_cap->num_timing_generator; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
1571
if (pool->base.multiple_abms[i] != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
1572
dce_abm_destroy(&pool->base.multiple_abms[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
1575
if (pool->base.psr != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
1576
dmub_psr_destroy(&pool->base.psr);
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
1578
if (pool->base.replay != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
1579
dmub_replay_destroy(&pool->base.replay);
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
1581
if (pool->base.dccg != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
1582
dcn_dccg_destroy(&pool->base.dccg);
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
1604
static bool dcn31_dwbc_create(struct dc_context *ctx, struct resource_pool *pool)
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
1607
uint32_t pipe_count = pool->res_cap->num_dwb;
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
1623
pool->dwbc[i] = &dwbc30->base;
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
1628
static bool dcn31_mmhubbub_create(struct dc_context *ctx, struct resource_pool *pool)
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
1631
uint32_t pipe_count = pool->res_cap->num_dwb;
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
1647
pool->mcif_wb[i] = &mcif_wb30->base;
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
1667
static void dcn314_destroy_resource_pool(struct resource_pool **pool)
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
1669
struct dcn314_resource_pool *dcn314_pool = TO_DCN314_RES_POOL(*pool);
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
1673
*pool = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
1847
struct dcn314_resource_pool *pool)
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
1855
pool->base.res_cap = &res_cap_dcn314;
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
1856
pool->base.funcs = &dcn314_res_pool_funcs;
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
1861
pool->base.underlay_pipe_index = NO_UNDERLAY_PIPE;
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
1862
pool->base.pipe_count = pool->base.res_cap->num_timing_generator;
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
1863
pool->base.mpcc_count = pool->base.res_cap->num_timing_generator;
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
1912
dc->caps.color.mpc.num_3dluts = pool->base.res_cap->num_mpc_3dlut; //2
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
1965
pool->base.clock_sources[DCN31_CLK_SRC_PLL0] =
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
1969
pool->base.clock_sources[DCN31_CLK_SRC_PLL1] =
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
1973
pool->base.clock_sources[DCN31_CLK_SRC_PLL2] =
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
1977
pool->base.clock_sources[DCN31_CLK_SRC_PLL3] =
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
1981
pool->base.clock_sources[DCN31_CLK_SRC_PLL4] =
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
1986
pool->base.clk_src_count = DCN30_CLK_SRC_TOTAL;
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
1989
pool->base.dp_clock_source =
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
1994
for (i = 0; i < pool->base.clk_src_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
1995
if (pool->base.clock_sources[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
2002
pool->base.dccg = dccg314_create(ctx, &dccg_regs, &dccg_shift, &dccg_mask);
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
2003
if (pool->base.dccg == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
2010
pool->base.irqs = dal_irq_service_dcn314_create(&init_data);
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
2011
if (!pool->base.irqs)
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
2015
pool->base.hubbub = dcn31_hubbub_create(ctx);
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
2016
if (pool->base.hubbub == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
2023
pool->base.dio = dcn314_dio_create(ctx);
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
2024
if (pool->base.dio == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
2031
for (i = 0; i < pool->base.pipe_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
2032
pool->base.hubps[i] = dcn31_hubp_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
2033
if (pool->base.hubps[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
2040
pool->base.dpps[i] = dcn31_dpp_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
2041
if (pool->base.dpps[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
2049
for (i = 0; i < pool->base.res_cap->num_opp; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
2050
pool->base.opps[i] = dcn31_opp_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
2051
if (pool->base.opps[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
2059
for (i = 0; i < pool->base.res_cap->num_timing_generator; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
2060
pool->base.timing_generators[i] = dcn31_timing_generator_create(
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
2062
if (pool->base.timing_generators[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
2068
pool->base.timing_generator_count = i;
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
2071
pool->base.psr = dmub_psr_create(ctx);
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
2072
if (pool->base.psr == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
2079
pool->base.replay = dmub_replay_create(ctx);
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
2080
if (pool->base.replay == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
2087
for (i = 0; i < pool->base.res_cap->num_timing_generator; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
2088
pool->base.multiple_abms[i] = dmub_abm_create(ctx,
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
2092
if (pool->base.multiple_abms[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
2100
pool->base.mpc = dcn31_mpc_create(ctx, pool->base.mpcc_count, pool->base.res_cap->num_mpc_3dlut);
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
2101
if (pool->base.mpc == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
2107
for (i = 0; i < pool->base.res_cap->num_dsc; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
2108
pool->base.dscs[i] = dcn314_dsc_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
2109
if (pool->base.dscs[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
2117
if (!dcn31_dwbc_create(ctx, &pool->base)) {
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
2123
if (!dcn31_mmhubbub_create(ctx, &pool->base)) {
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
2130
for (i = 0; i < pool->base.res_cap->num_ddc; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
2131
pool->base.engines[i] = dcn31_aux_engine_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
2132
if (pool->base.engines[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
2138
pool->base.hw_i2cs[i] = dcn31_i2c_hw_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
2139
if (pool->base.hw_i2cs[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
2145
pool->base.sw_i2cs[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
2149
pool->base.usb4_dpia_count = 4;
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
2152
if (!resource_construct(num_virtual_links, dc, &pool->base,
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
2159
dc->caps.max_planes = pool->base.pipe_count;
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
2174
dcn314_resource_destruct(pool);
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
2183
struct dcn314_resource_pool *pool =
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
2186
if (!pool)
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
2189
if (dcn314_resource_construct(init_data->num_virtual_links, dc, pool))
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
2190
return &pool->base;
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.c
2193
kfree(pool);
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.h
35
#define TO_DCN314_RES_POOL(pool)\
drivers/gpu/drm/amd/display/dc/resource/dcn314/dcn314_resource.h
36
container_of(pool, struct dcn314_resource_pool, base)
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
1374
static void dcn315_resource_destruct(struct dcn315_resource_pool *pool)
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
1378
for (i = 0; i < pool->base.stream_enc_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
1379
if (pool->base.stream_enc[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
1380
if (pool->base.stream_enc[i]->vpg != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
1381
kfree(DCN30_VPG_FROM_VPG(pool->base.stream_enc[i]->vpg));
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
1382
pool->base.stream_enc[i]->vpg = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
1384
if (pool->base.stream_enc[i]->afmt != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
1385
kfree(DCN30_AFMT_FROM_AFMT(pool->base.stream_enc[i]->afmt));
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
1386
pool->base.stream_enc[i]->afmt = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
1388
kfree(DCN10STRENC_FROM_STRENC(pool->base.stream_enc[i]));
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
1389
pool->base.stream_enc[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
1393
for (i = 0; i < pool->base.hpo_dp_stream_enc_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
1394
if (pool->base.hpo_dp_stream_enc[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
1395
if (pool->base.hpo_dp_stream_enc[i]->vpg != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
1396
kfree(DCN30_VPG_FROM_VPG(pool->base.hpo_dp_stream_enc[i]->vpg));
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
1397
pool->base.hpo_dp_stream_enc[i]->vpg = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
1399
if (pool->base.hpo_dp_stream_enc[i]->apg != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
1400
kfree(DCN31_APG_FROM_APG(pool->base.hpo_dp_stream_enc[i]->apg));
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
1401
pool->base.hpo_dp_stream_enc[i]->apg = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
1403
kfree(DCN3_1_HPO_DP_STREAM_ENC_FROM_HPO_STREAM_ENC(pool->base.hpo_dp_stream_enc[i]));
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
1404
pool->base.hpo_dp_stream_enc[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
1408
for (i = 0; i < pool->base.hpo_dp_link_enc_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
1409
if (pool->base.hpo_dp_link_enc[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
1410
kfree(DCN3_1_HPO_DP_LINK_ENC_FROM_HPO_LINK_ENC(pool->base.hpo_dp_link_enc[i]));
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
1411
pool->base.hpo_dp_link_enc[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
1415
for (i = 0; i < pool->base.res_cap->num_dsc; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
1416
if (pool->base.dscs[i] != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
1417
dcn20_dsc_destroy(&pool->base.dscs[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
1420
if (pool->base.mpc != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
1421
kfree(TO_DCN20_MPC(pool->base.mpc));
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
1422
pool->base.mpc = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
1424
if (pool->base.hubbub != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
1425
kfree(pool->base.hubbub);
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
1426
pool->base.hubbub = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
1428
if (pool->base.dio != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
1429
kfree(TO_DCN10_DIO(pool->base.dio));
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
1430
pool->base.dio = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
1432
for (i = 0; i < pool->base.pipe_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
1433
if (pool->base.dpps[i] != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
1434
dcn31_dpp_destroy(&pool->base.dpps[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
1436
if (pool->base.ipps[i] != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
1437
pool->base.ipps[i]->funcs->ipp_destroy(&pool->base.ipps[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
1439
if (pool->base.hubps[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
1440
kfree(TO_DCN20_HUBP(pool->base.hubps[i]));
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
1441
pool->base.hubps[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
1444
if (pool->base.irqs != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
1445
dal_irq_service_destroy(&pool->base.irqs);
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
1449
for (i = 0; i < pool->base.res_cap->num_ddc; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
1450
if (pool->base.engines[i] != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
1451
dce110_engine_destroy(&pool->base.engines[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
1452
if (pool->base.hw_i2cs[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
1453
kfree(pool->base.hw_i2cs[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
1454
pool->base.hw_i2cs[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
1456
if (pool->base.sw_i2cs[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
1457
kfree(pool->base.sw_i2cs[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
1458
pool->base.sw_i2cs[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
1462
for (i = 0; i < pool->base.res_cap->num_opp; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
1463
if (pool->base.opps[i] != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
1464
pool->base.opps[i]->funcs->opp_destroy(&pool->base.opps[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
1467
for (i = 0; i < pool->base.res_cap->num_timing_generator; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
1468
if (pool->base.timing_generators[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
1469
kfree(DCN10TG_FROM_TG(pool->base.timing_generators[i]));
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
1470
pool->base.timing_generators[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
1474
for (i = 0; i < pool->base.res_cap->num_dwb; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
1475
if (pool->base.dwbc[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
1476
kfree(TO_DCN30_DWBC(pool->base.dwbc[i]));
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
1477
pool->base.dwbc[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
1479
if (pool->base.mcif_wb[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
1480
kfree(TO_DCN30_MMHUBBUB(pool->base.mcif_wb[i]));
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
1481
pool->base.mcif_wb[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
1485
for (i = 0; i < pool->base.audio_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
1486
if (pool->base.audios[i])
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
1487
dce_aud_destroy(&pool->base.audios[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
1490
for (i = 0; i < pool->base.clk_src_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
1491
if (pool->base.clock_sources[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
1492
dcn20_clock_source_destroy(&pool->base.clock_sources[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
1493
pool->base.clock_sources[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
1497
for (i = 0; i < pool->base.res_cap->num_mpc_3dlut; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
1498
if (pool->base.mpc_lut[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
1499
dc_3dlut_func_release(pool->base.mpc_lut[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
1500
pool->base.mpc_lut[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
1502
if (pool->base.mpc_shaper[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
1503
dc_transfer_func_release(pool->base.mpc_shaper[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
1504
pool->base.mpc_shaper[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
1508
if (pool->base.dp_clock_source != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
1509
dcn20_clock_source_destroy(&pool->base.dp_clock_source);
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
1510
pool->base.dp_clock_source = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
1513
for (i = 0; i < pool->base.res_cap->num_timing_generator; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
1514
if (pool->base.multiple_abms[i] != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
1515
dce_abm_destroy(&pool->base.multiple_abms[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
1518
if (pool->base.psr != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
1519
dmub_psr_destroy(&pool->base.psr);
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
1521
if (pool->base.replay != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
1522
dmub_replay_destroy(&pool->base.replay);
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
1524
if (pool->base.dccg != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
1525
dcn_dccg_destroy(&pool->base.dccg);
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
1547
static bool dcn31_dwbc_create(struct dc_context *ctx, struct resource_pool *pool)
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
1550
uint32_t pipe_count = pool->res_cap->num_dwb;
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
1566
pool->dwbc[i] = &dwbc30->base;
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
1571
static bool dcn31_mmhubbub_create(struct dc_context *ctx, struct resource_pool *pool)
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
1574
uint32_t pipe_count = pool->res_cap->num_dwb;
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
1590
pool->mcif_wb[i] = &mcif_wb30->base;
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
1610
static void dcn315_destroy_resource_pool(struct resource_pool **pool)
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
1612
struct dcn315_resource_pool *dcn31_pool = TO_DCN315_RES_POOL(*pool);
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
1616
*pool = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
1887
struct dcn315_resource_pool *pool)
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
1895
pool->base.res_cap = &res_cap_dcn31;
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
1897
pool->base.funcs = &dcn315_res_pool_funcs;
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
1902
pool->base.underlay_pipe_index = NO_UNDERLAY_PIPE;
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
1903
pool->base.pipe_count = pool->base.res_cap->num_timing_generator;
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
1904
pool->base.mpcc_count = pool->base.res_cap->num_timing_generator;
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
1950
dc->caps.color.mpc.num_3dluts = pool->base.res_cap->num_mpc_3dlut; //2
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
1988
pool->base.clock_sources[DCN31_CLK_SRC_PLL0] =
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
1992
pool->base.clock_sources[DCN31_CLK_SRC_PLL1] =
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
1996
pool->base.clock_sources[DCN31_CLK_SRC_PLL2] =
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
2000
pool->base.clock_sources[DCN31_CLK_SRC_PLL3] =
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
2004
pool->base.clock_sources[DCN31_CLK_SRC_PLL4] =
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
2009
pool->base.clk_src_count = DCN30_CLK_SRC_TOTAL;
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
2012
pool->base.dp_clock_source =
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
2017
for (i = 0; i < pool->base.clk_src_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
2018
if (pool->base.clock_sources[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
2026
pool->base.dccg = dccg31_create(ctx, &dccg_regs, &dccg_shift, &dccg_mask);
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
2027
if (pool->base.dccg == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
2035
pool->base.irqs = dal_irq_service_dcn315_create(&init_data);
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
2036
if (!pool->base.irqs)
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
2040
pool->base.hubbub = dcn31_hubbub_create(ctx);
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
2041
if (pool->base.hubbub == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
2048
pool->base.dio = dcn315_dio_create(ctx);
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
2049
if (pool->base.dio == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
2056
for (i = 0; i < pool->base.pipe_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
2057
pool->base.hubps[i] = dcn31_hubp_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
2058
if (pool->base.hubps[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
2065
pool->base.dpps[i] = dcn31_dpp_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
2066
if (pool->base.dpps[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
2074
for (i = 0; i < pool->base.res_cap->num_opp; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
2075
pool->base.opps[i] = dcn31_opp_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
2076
if (pool->base.opps[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
2084
for (i = 0; i < pool->base.res_cap->num_timing_generator; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
2085
pool->base.timing_generators[i] = dcn31_timing_generator_create(
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
2087
if (pool->base.timing_generators[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
2093
pool->base.timing_generator_count = i;
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
2096
pool->base.psr = dmub_psr_create(ctx);
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
2097
if (pool->base.psr == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
2104
pool->base.replay = dmub_replay_create(ctx);
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
2105
if (pool->base.replay == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
2112
for (i = 0; i < pool->base.res_cap->num_timing_generator; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
2113
pool->base.multiple_abms[i] = dmub_abm_create(ctx,
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
2117
if (pool->base.multiple_abms[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
2125
pool->base.mpc = dcn31_mpc_create(ctx, pool->base.mpcc_count, pool->base.res_cap->num_mpc_3dlut);
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
2126
if (pool->base.mpc == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
2132
for (i = 0; i < pool->base.res_cap->num_dsc; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
2133
pool->base.dscs[i] = dcn31_dsc_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
2134
if (pool->base.dscs[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
2142
if (!dcn31_dwbc_create(ctx, &pool->base)) {
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
2148
if (!dcn31_mmhubbub_create(ctx, &pool->base)) {
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
2155
for (i = 0; i < pool->base.res_cap->num_ddc; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
2156
pool->base.engines[i] = dcn31_aux_engine_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
2157
if (pool->base.engines[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
2163
pool->base.hw_i2cs[i] = dcn31_i2c_hw_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
2164
if (pool->base.hw_i2cs[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
2170
pool->base.sw_i2cs[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
2174
if (!resource_construct(num_virtual_links, dc, &pool->base,
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
2181
dc->caps.max_planes = pool->base.pipe_count;
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
2196
dcn315_resource_destruct(pool);
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
2205
struct dcn315_resource_pool *pool =
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
2208
if (!pool)
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
2211
if (dcn315_resource_construct(init_data->num_virtual_links, dc, pool))
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
2212
return &pool->base;
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.c
2215
kfree(pool);
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.h
31
#define TO_DCN315_RES_POOL(pool)\
drivers/gpu/drm/amd/display/dc/resource/dcn315/dcn315_resource.h
32
container_of(pool, struct dcn315_resource_pool, base)
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1369
static void dcn316_resource_destruct(struct dcn316_resource_pool *pool)
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1373
for (i = 0; i < pool->base.stream_enc_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1374
if (pool->base.stream_enc[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1375
if (pool->base.stream_enc[i]->vpg != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1376
kfree(DCN30_VPG_FROM_VPG(pool->base.stream_enc[i]->vpg));
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1377
pool->base.stream_enc[i]->vpg = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1379
if (pool->base.stream_enc[i]->afmt != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1380
kfree(DCN30_AFMT_FROM_AFMT(pool->base.stream_enc[i]->afmt));
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1381
pool->base.stream_enc[i]->afmt = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1383
kfree(DCN10STRENC_FROM_STRENC(pool->base.stream_enc[i]));
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1384
pool->base.stream_enc[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1388
for (i = 0; i < pool->base.hpo_dp_stream_enc_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1389
if (pool->base.hpo_dp_stream_enc[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1390
if (pool->base.hpo_dp_stream_enc[i]->vpg != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1391
kfree(DCN30_VPG_FROM_VPG(pool->base.hpo_dp_stream_enc[i]->vpg));
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1392
pool->base.hpo_dp_stream_enc[i]->vpg = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1394
if (pool->base.hpo_dp_stream_enc[i]->apg != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1395
kfree(DCN31_APG_FROM_APG(pool->base.hpo_dp_stream_enc[i]->apg));
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1396
pool->base.hpo_dp_stream_enc[i]->apg = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1398
kfree(DCN3_1_HPO_DP_STREAM_ENC_FROM_HPO_STREAM_ENC(pool->base.hpo_dp_stream_enc[i]));
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1399
pool->base.hpo_dp_stream_enc[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1403
for (i = 0; i < pool->base.hpo_dp_link_enc_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1404
if (pool->base.hpo_dp_link_enc[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1405
kfree(DCN3_1_HPO_DP_LINK_ENC_FROM_HPO_LINK_ENC(pool->base.hpo_dp_link_enc[i]));
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1406
pool->base.hpo_dp_link_enc[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1410
for (i = 0; i < pool->base.res_cap->num_dsc; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1411
if (pool->base.dscs[i] != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1412
dcn20_dsc_destroy(&pool->base.dscs[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1415
if (pool->base.mpc != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1416
kfree(TO_DCN20_MPC(pool->base.mpc));
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1417
pool->base.mpc = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1419
if (pool->base.hubbub != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1420
kfree(pool->base.hubbub);
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1421
pool->base.hubbub = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1423
if (pool->base.dio != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1424
kfree(TO_DCN10_DIO(pool->base.dio));
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1425
pool->base.dio = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1427
for (i = 0; i < pool->base.pipe_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1428
if (pool->base.dpps[i] != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1429
dcn31_dpp_destroy(&pool->base.dpps[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1431
if (pool->base.ipps[i] != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1432
pool->base.ipps[i]->funcs->ipp_destroy(&pool->base.ipps[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1434
if (pool->base.hubps[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1435
kfree(TO_DCN20_HUBP(pool->base.hubps[i]));
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1436
pool->base.hubps[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1439
if (pool->base.irqs != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1440
dal_irq_service_destroy(&pool->base.irqs);
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1444
for (i = 0; i < pool->base.res_cap->num_ddc; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1445
if (pool->base.engines[i] != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1446
dce110_engine_destroy(&pool->base.engines[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1447
if (pool->base.hw_i2cs[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1448
kfree(pool->base.hw_i2cs[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1449
pool->base.hw_i2cs[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1451
if (pool->base.sw_i2cs[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1452
kfree(pool->base.sw_i2cs[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1453
pool->base.sw_i2cs[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1457
for (i = 0; i < pool->base.res_cap->num_opp; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1458
if (pool->base.opps[i] != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1459
pool->base.opps[i]->funcs->opp_destroy(&pool->base.opps[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1462
for (i = 0; i < pool->base.res_cap->num_timing_generator; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1463
if (pool->base.timing_generators[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1464
kfree(DCN10TG_FROM_TG(pool->base.timing_generators[i]));
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1465
pool->base.timing_generators[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1469
for (i = 0; i < pool->base.res_cap->num_dwb; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1470
if (pool->base.dwbc[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1471
kfree(TO_DCN30_DWBC(pool->base.dwbc[i]));
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1472
pool->base.dwbc[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1474
if (pool->base.mcif_wb[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1475
kfree(TO_DCN30_MMHUBBUB(pool->base.mcif_wb[i]));
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1476
pool->base.mcif_wb[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1480
for (i = 0; i < pool->base.audio_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1481
if (pool->base.audios[i])
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1482
dce_aud_destroy(&pool->base.audios[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1485
for (i = 0; i < pool->base.clk_src_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1486
if (pool->base.clock_sources[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1487
dcn20_clock_source_destroy(&pool->base.clock_sources[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1488
pool->base.clock_sources[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1492
for (i = 0; i < pool->base.res_cap->num_mpc_3dlut; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1493
if (pool->base.mpc_lut[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1494
dc_3dlut_func_release(pool->base.mpc_lut[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1495
pool->base.mpc_lut[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1497
if (pool->base.mpc_shaper[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1498
dc_transfer_func_release(pool->base.mpc_shaper[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1499
pool->base.mpc_shaper[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1503
if (pool->base.dp_clock_source != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1504
dcn20_clock_source_destroy(&pool->base.dp_clock_source);
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1505
pool->base.dp_clock_source = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1508
for (i = 0; i < pool->base.res_cap->num_timing_generator; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1509
if (pool->base.multiple_abms[i] != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1510
dce_abm_destroy(&pool->base.multiple_abms[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1513
if (pool->base.psr != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1514
dmub_psr_destroy(&pool->base.psr);
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1516
if (pool->base.dccg != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1517
dcn_dccg_destroy(&pool->base.dccg);
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1539
static bool dcn31_dwbc_create(struct dc_context *ctx, struct resource_pool *pool)
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1542
uint32_t pipe_count = pool->res_cap->num_dwb;
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1558
pool->dwbc[i] = &dwbc30->base;
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1563
static bool dcn31_mmhubbub_create(struct dc_context *ctx, struct resource_pool *pool)
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1566
uint32_t pipe_count = pool->res_cap->num_dwb;
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1582
pool->mcif_wb[i] = &mcif_wb30->base;
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1602
static void dcn316_destroy_resource_pool(struct resource_pool **pool)
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1604
struct dcn316_resource_pool *dcn31_pool = TO_DCN316_RES_POOL(*pool);
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1608
*pool = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1762
struct dcn316_resource_pool *pool)
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1770
pool->base.res_cap = &res_cap_dcn31;
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1772
pool->base.funcs = &dcn316_res_pool_funcs;
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1777
pool->base.underlay_pipe_index = NO_UNDERLAY_PIPE;
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1778
pool->base.pipe_count = pool->base.res_cap->num_timing_generator;
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1779
pool->base.mpcc_count = pool->base.res_cap->num_timing_generator;
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1825
dc->caps.color.mpc.num_3dluts = pool->base.res_cap->num_mpc_3dlut; //2
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1863
pool->base.clock_sources[DCN31_CLK_SRC_PLL0] =
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1867
pool->base.clock_sources[DCN31_CLK_SRC_PLL1] =
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1871
pool->base.clock_sources[DCN31_CLK_SRC_PLL2] =
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1875
pool->base.clock_sources[DCN31_CLK_SRC_PLL3] =
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1879
pool->base.clock_sources[DCN31_CLK_SRC_PLL4] =
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1884
pool->base.clk_src_count = DCN30_CLK_SRC_TOTAL;
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1887
pool->base.dp_clock_source =
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1892
for (i = 0; i < pool->base.clk_src_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1893
if (pool->base.clock_sources[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1901
pool->base.dccg = dccg31_create(ctx, &dccg_regs, &dccg_shift, &dccg_mask);
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1902
if (pool->base.dccg == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1910
pool->base.irqs = dal_irq_service_dcn31_create(&init_data);
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1911
if (!pool->base.irqs)
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1915
pool->base.hubbub = dcn31_hubbub_create(ctx);
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1916
if (pool->base.hubbub == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1923
pool->base.dio = dcn316_dio_create(ctx);
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1924
if (pool->base.dio == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1931
for (i = 0; i < pool->base.pipe_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1932
pool->base.hubps[i] = dcn31_hubp_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1933
if (pool->base.hubps[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1940
pool->base.dpps[i] = dcn31_dpp_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1941
if (pool->base.dpps[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1949
for (i = 0; i < pool->base.res_cap->num_opp; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1950
pool->base.opps[i] = dcn31_opp_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1951
if (pool->base.opps[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1959
for (i = 0; i < pool->base.res_cap->num_timing_generator; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1960
pool->base.timing_generators[i] = dcn31_timing_generator_create(
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1962
if (pool->base.timing_generators[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1968
pool->base.timing_generator_count = i;
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1971
pool->base.psr = dmub_psr_create(ctx);
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1972
if (pool->base.psr == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1979
for (i = 0; i < pool->base.res_cap->num_timing_generator; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1980
pool->base.multiple_abms[i] = dmub_abm_create(ctx,
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1984
if (pool->base.multiple_abms[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1992
pool->base.mpc = dcn31_mpc_create(ctx, pool->base.mpcc_count, pool->base.res_cap->num_mpc_3dlut);
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1993
if (pool->base.mpc == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
1999
for (i = 0; i < pool->base.res_cap->num_dsc; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
2000
pool->base.dscs[i] = dcn31_dsc_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
2001
if (pool->base.dscs[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
2009
if (!dcn31_dwbc_create(ctx, &pool->base)) {
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
2015
if (!dcn31_mmhubbub_create(ctx, &pool->base)) {
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
2022
for (i = 0; i < pool->base.res_cap->num_ddc; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
2023
pool->base.engines[i] = dcn31_aux_engine_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
2024
if (pool->base.engines[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
2030
pool->base.hw_i2cs[i] = dcn31_i2c_hw_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
2031
if (pool->base.hw_i2cs[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
2037
pool->base.sw_i2cs[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
2041
if (!resource_construct(num_virtual_links, dc, &pool->base,
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
2048
dc->caps.max_planes = pool->base.pipe_count;
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
2063
dcn316_resource_destruct(pool);
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
2072
struct dcn316_resource_pool *pool =
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
2075
if (!pool)
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
2078
if (dcn316_resource_construct(init_data->num_virtual_links, dc, pool))
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
2079
return &pool->base;
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.c
2082
kfree(pool);
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.h
31
#define TO_DCN316_RES_POOL(pool)\
drivers/gpu/drm/amd/display/dc/resource/dcn316/dcn316_resource.h
32
container_of(pool, struct dcn316_resource_pool, base)
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1381
static void dcn32_resource_destruct(struct dcn32_resource_pool *pool)
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1385
for (i = 0; i < pool->base.stream_enc_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1386
if (pool->base.stream_enc[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1387
if (pool->base.stream_enc[i]->vpg != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1388
kfree(DCN30_VPG_FROM_VPG(pool->base.stream_enc[i]->vpg));
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1389
pool->base.stream_enc[i]->vpg = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1391
if (pool->base.stream_enc[i]->afmt != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1392
kfree(DCN30_AFMT_FROM_AFMT(pool->base.stream_enc[i]->afmt));
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1393
pool->base.stream_enc[i]->afmt = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1395
kfree(DCN10STRENC_FROM_STRENC(pool->base.stream_enc[i]));
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1396
pool->base.stream_enc[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1400
for (i = 0; i < pool->base.hpo_dp_stream_enc_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1401
if (pool->base.hpo_dp_stream_enc[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1402
if (pool->base.hpo_dp_stream_enc[i]->vpg != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1403
kfree(DCN30_VPG_FROM_VPG(pool->base.hpo_dp_stream_enc[i]->vpg));
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1404
pool->base.hpo_dp_stream_enc[i]->vpg = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1406
if (pool->base.hpo_dp_stream_enc[i]->apg != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1407
kfree(DCN31_APG_FROM_APG(pool->base.hpo_dp_stream_enc[i]->apg));
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1408
pool->base.hpo_dp_stream_enc[i]->apg = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1410
kfree(DCN3_1_HPO_DP_STREAM_ENC_FROM_HPO_STREAM_ENC(pool->base.hpo_dp_stream_enc[i]));
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1411
pool->base.hpo_dp_stream_enc[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1415
for (i = 0; i < pool->base.hpo_dp_link_enc_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1416
if (pool->base.hpo_dp_link_enc[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1417
kfree(DCN3_1_HPO_DP_LINK_ENC_FROM_HPO_LINK_ENC(pool->base.hpo_dp_link_enc[i]));
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1418
pool->base.hpo_dp_link_enc[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1422
for (i = 0; i < pool->base.res_cap->num_dsc; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1423
if (pool->base.dscs[i] != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1424
dcn20_dsc_destroy(&pool->base.dscs[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1427
if (pool->base.mpc != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1428
kfree(TO_DCN20_MPC(pool->base.mpc));
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1429
pool->base.mpc = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1431
if (pool->base.hubbub != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1432
kfree(TO_DCN20_HUBBUB(pool->base.hubbub));
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1433
pool->base.hubbub = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1435
for (i = 0; i < pool->base.pipe_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1436
if (pool->base.dpps[i] != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1437
dcn32_dpp_destroy(&pool->base.dpps[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1439
if (pool->base.ipps[i] != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1440
pool->base.ipps[i]->funcs->ipp_destroy(&pool->base.ipps[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1442
if (pool->base.hubps[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1443
kfree(TO_DCN20_HUBP(pool->base.hubps[i]));
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1444
pool->base.hubps[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1447
if (pool->base.irqs != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1448
dal_irq_service_destroy(&pool->base.irqs);
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1452
for (i = 0; i < pool->base.res_cap->num_ddc; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1453
if (pool->base.engines[i] != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1454
dce110_engine_destroy(&pool->base.engines[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1455
if (pool->base.hw_i2cs[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1456
kfree(pool->base.hw_i2cs[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1457
pool->base.hw_i2cs[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1459
if (pool->base.sw_i2cs[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1460
kfree(pool->base.sw_i2cs[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1461
pool->base.sw_i2cs[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1465
for (i = 0; i < pool->base.res_cap->num_opp; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1466
if (pool->base.opps[i] != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1467
pool->base.opps[i]->funcs->opp_destroy(&pool->base.opps[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1470
for (i = 0; i < pool->base.res_cap->num_timing_generator; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1471
if (pool->base.timing_generators[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1472
kfree(DCN10TG_FROM_TG(pool->base.timing_generators[i]));
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1473
pool->base.timing_generators[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1477
for (i = 0; i < pool->base.res_cap->num_dwb; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1478
if (pool->base.dwbc[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1479
kfree(TO_DCN30_DWBC(pool->base.dwbc[i]));
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1480
pool->base.dwbc[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1482
if (pool->base.mcif_wb[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1483
kfree(TO_DCN30_MMHUBBUB(pool->base.mcif_wb[i]));
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1484
pool->base.mcif_wb[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1488
for (i = 0; i < pool->base.audio_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1489
if (pool->base.audios[i])
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1490
dce_aud_destroy(&pool->base.audios[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1493
for (i = 0; i < pool->base.clk_src_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1494
if (pool->base.clock_sources[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1495
dcn20_clock_source_destroy(&pool->base.clock_sources[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1496
pool->base.clock_sources[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1500
for (i = 0; i < pool->base.res_cap->num_mpc_3dlut; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1501
if (pool->base.mpc_lut[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1502
dc_3dlut_func_release(pool->base.mpc_lut[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1503
pool->base.mpc_lut[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1505
if (pool->base.mpc_shaper[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1506
dc_transfer_func_release(pool->base.mpc_shaper[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1507
pool->base.mpc_shaper[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1511
if (pool->base.dp_clock_source != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1512
dcn20_clock_source_destroy(&pool->base.dp_clock_source);
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1513
pool->base.dp_clock_source = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1516
for (i = 0; i < pool->base.res_cap->num_timing_generator; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1517
if (pool->base.multiple_abms[i] != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1518
dce_abm_destroy(&pool->base.multiple_abms[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1521
if (pool->base.psr != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1522
dmub_psr_destroy(&pool->base.psr);
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1524
if (pool->base.dccg != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1525
dcn_dccg_destroy(&pool->base.dccg);
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1527
if (pool->base.dio != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1528
kfree(TO_DCN10_DIO(pool->base.dio));
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1529
pool->base.dio = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1532
if (pool->base.oem_device != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1533
struct dc *dc = pool->base.oem_device->ctx->dc;
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1535
dc->link_srv->destroy_ddc_service(&pool->base.oem_device);
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1540
static bool dcn32_dwbc_create(struct dc_context *ctx, struct resource_pool *pool)
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1543
uint32_t dwb_count = pool->res_cap->num_dwb;
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1563
pool->dwbc[i] = &dwbc30->base;
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1568
static bool dcn32_mmhubbub_create(struct dc_context *ctx, struct resource_pool *pool)
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1571
uint32_t dwb_count = pool->res_cap->num_dwb;
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1591
pool->mcif_wb[i] = &mcif_wb30->base;
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1621
static void dcn32_destroy_resource_pool(struct resource_pool **pool)
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1623
struct dcn32_resource_pool *dcn32_pool = TO_DCN32_RES_POOL(*pool);
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1627
*pool = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1632
const struct resource_pool *pool,
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1644
*lut = pool->mpc_lut[mpcc_id];
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1645
*shaper = pool->mpc_shaper[mpcc_id];
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1654
const struct resource_pool *pool,
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1661
for (i = 0; i < pool->res_cap->num_mpc_3dlut; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1662
if (pool->mpc_lut[i] == *lut && pool->mpc_shaper[i] == *shaper) {
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
1664
pool->mpc_lut[i]->state.raw = 0;
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
2168
struct dcn32_resource_pool *pool)
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
2202
pool->base.res_cap = &res_cap_dcn32;
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
2204
num_pipes = pool->base.res_cap->num_timing_generator;
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
2207
for (i = 0; i < pool->base.res_cap->num_timing_generator; i++)
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
2223
pool->base.funcs = &dcn32_res_pool_funcs;
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
2228
pool->base.underlay_pipe_index = NO_UNDERLAY_PIPE;
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
2229
pool->base.timing_generator_count = num_pipes;
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
2230
pool->base.pipe_count = num_pipes;
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
2231
pool->base.mpcc_count = num_pipes;
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
2304
dc->caps.color.mpc.num_3dluts = pool->base.res_cap->num_mpc_3dlut; //4, configurable to be before or after BLND in MPCC
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
2349
pool->base.clock_sources[DCN32_CLK_SRC_PLL0] =
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
2353
pool->base.clock_sources[DCN32_CLK_SRC_PLL1] =
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
2357
pool->base.clock_sources[DCN32_CLK_SRC_PLL2] =
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
2361
pool->base.clock_sources[DCN32_CLK_SRC_PLL3] =
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
2365
pool->base.clock_sources[DCN32_CLK_SRC_PLL4] =
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
2370
pool->base.clk_src_count = DCN32_CLK_SRC_TOTAL;
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
2373
pool->base.dp_clock_source =
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
2378
for (i = 0; i < pool->base.clk_src_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
2379
if (pool->base.clock_sources[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
2387
pool->base.dccg = dccg32_create(ctx, &dccg_regs, &dccg_shift, &dccg_mask);
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
2388
if (pool->base.dccg == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
2399
pool->base.irqs = dal_irq_service_dcn32_create(&init_data);
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
2400
if (!pool->base.irqs)
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
2404
pool->base.hubbub = dcn32_hubbub_create(ctx);
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
2405
if (pool->base.hubbub == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
2412
pool->base.dio = dcn32_dio_create(ctx);
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
2413
if (pool->base.dio == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
2420
for (i = 0, j = 0; i < pool->base.res_cap->num_timing_generator; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
2429
pool->base.hubps[j] = dcn32_hubp_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
2430
if (pool->base.hubps[j] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
2438
pool->base.dpps[j] = dcn32_dpp_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
2439
if (pool->base.dpps[j] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
2447
pool->base.opps[j] = dcn32_opp_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
2448
if (pool->base.opps[j] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
2456
pool->base.timing_generators[j] = dcn32_timing_generator_create(
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
2458
if (pool->base.timing_generators[j] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
2465
pool->base.multiple_abms[j] = dmub_abm_create(ctx,
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
2469
if (pool->base.multiple_abms[j] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
2480
pool->base.psr = dmub_psr_create(ctx);
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
2481
if (pool->base.psr == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
2488
pool->base.mpc = dcn32_mpc_create(ctx, pool->base.res_cap->num_timing_generator, pool->base.res_cap->num_mpc_3dlut);
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
2489
if (pool->base.mpc == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
2496
for (i = 0; i < pool->base.res_cap->num_dsc; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
2497
pool->base.dscs[i] = dcn32_dsc_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
2498
if (pool->base.dscs[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
2506
if (!dcn32_dwbc_create(ctx, &pool->base)) {
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
2513
if (!dcn32_mmhubbub_create(ctx, &pool->base)) {
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
2520
for (i = 0; i < pool->base.res_cap->num_ddc; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
2521
pool->base.engines[i] = dcn32_aux_engine_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
2522
if (pool->base.engines[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
2528
pool->base.hw_i2cs[i] = dcn32_i2c_hw_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
2529
if (pool->base.hw_i2cs[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
2535
pool->base.sw_i2cs[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
2539
if (!resource_construct(num_virtual_links, dc, &pool->base,
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
2546
dc->caps.max_planes = pool->base.pipe_count;
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
2561
pool->base.oem_device = dc->link_srv->create_ddc_service(&ddc_init_data);
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
2563
pool->base.oem_device = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
2566
dc->dml2_options.dcn_pipe_count = pool->base.pipe_count;
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
2573
dc->dml2_options.svp_pstate.callbacks.calculate_mall_ways_from_bytes = pool->base.funcs->calculate_mall_ways_from_bytes;
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
2606
dcn32_resource_destruct(pool);
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
2615
struct dcn32_resource_pool *pool =
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
2618
if (!pool)
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
2621
if (dcn32_resource_construct(init_data->num_virtual_links, dc, pool))
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
2622
return &pool->base;
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
2625
kfree(pool);
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
2674
const struct resource_pool *pool,
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
2690
cur_res_ctx, new_res_ctx, pool);
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
2700
cur_res_ctx, new_res_ctx, pool);
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
2703
free_pipe_idx = resource_find_any_free_pipe(new_res_ctx, pool);
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
2710
const struct resource_pool *pool,
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
2745
preferred_pipe_idx = (pool->pipe_count - 1) - primary_pipe->pipe_idx;
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
2758
for (i = pool->pipe_count - 1; i >= 0; i--) {
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
2772
const struct resource_pool *pool,
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
2798
idle_pipe = find_idle_secondary_pipe_check_mpo(res_ctx, pool, head_pipe);
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
2807
idle_pipe->plane_res.hubp = pool->hubps[idle_pipe->pipe_idx];
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
2808
idle_pipe->plane_res.ipp = pool->ipps[idle_pipe->pipe_idx];
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
2809
idle_pipe->plane_res.dpp = pool->dpps[idle_pipe->pipe_idx];
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
2810
idle_pipe->plane_res.mpcc_inst = pool->dpps[idle_pipe->pipe_idx]->inst;
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
2818
const struct resource_pool *pool,
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
2834
cur_res_ctx, new_res_ctx, pool);
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
2837
free_pipe_idx = resource_find_any_free_pipe(new_res_ctx, pool);
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
2845
const struct resource_pool *pool,
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
2854
new_ctx, pool, opp_head_pipe->stream, opp_head_pipe);
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
2858
pool, opp_head_pipe);
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
2866
free_pipe->plane_res.hubp = pool->hubps[free_pipe->pipe_idx];
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
2867
free_pipe->plane_res.ipp = pool->ipps[free_pipe->pipe_idx];
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
2868
free_pipe->plane_res.dpp = pool->dpps[free_pipe->pipe_idx];
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
2870
pool->dpps[free_pipe->pipe_idx]->inst;
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
2882
const struct resource_pool *pool,
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
2887
pool, otg_master);
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
2896
free_pipe->stream_res.opp = pool->opps[free_pipe_idx];
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
2897
free_pipe->plane_res.mi = pool->mis[free_pipe_idx];
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
2898
free_pipe->plane_res.hubp = pool->hubps[free_pipe_idx];
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
2899
free_pipe->plane_res.ipp = pool->ipps[free_pipe_idx];
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
2900
free_pipe->plane_res.xfm = pool->transforms[free_pipe_idx];
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
2901
free_pipe->plane_res.dpp = pool->dpps[free_pipe_idx];
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.c
2902
free_pipe->plane_res.mpcc_inst = pool->dpps[free_pipe_idx]->inst;
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.h
143
const struct resource_pool *pool,
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.h
149
const struct resource_pool *pool,
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.h
155
const struct resource_pool *pool,
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.h
160
const struct resource_pool *pool);
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.h
47
#define TO_DCN32_RES_POOL(pool)\
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.h
48
container_of(pool, struct dcn32_resource_pool, base)
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.h
84
const struct resource_pool *pool,
drivers/gpu/drm/amd/display/dc/resource/dcn32/dcn32_resource.h
91
const struct resource_pool *pool,
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1362
static void dcn321_resource_destruct(struct dcn321_resource_pool *pool)
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1366
for (i = 0; i < pool->base.stream_enc_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1367
if (pool->base.stream_enc[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1368
if (pool->base.stream_enc[i]->vpg != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1369
kfree(DCN30_VPG_FROM_VPG(pool->base.stream_enc[i]->vpg));
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1370
pool->base.stream_enc[i]->vpg = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1372
if (pool->base.stream_enc[i]->afmt != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1373
kfree(DCN30_AFMT_FROM_AFMT(pool->base.stream_enc[i]->afmt));
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1374
pool->base.stream_enc[i]->afmt = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1376
kfree(DCN10STRENC_FROM_STRENC(pool->base.stream_enc[i]));
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1377
pool->base.stream_enc[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1381
for (i = 0; i < pool->base.hpo_dp_stream_enc_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1382
if (pool->base.hpo_dp_stream_enc[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1383
if (pool->base.hpo_dp_stream_enc[i]->vpg != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1384
kfree(DCN30_VPG_FROM_VPG(pool->base.hpo_dp_stream_enc[i]->vpg));
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1385
pool->base.hpo_dp_stream_enc[i]->vpg = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1387
if (pool->base.hpo_dp_stream_enc[i]->apg != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1388
kfree(DCN31_APG_FROM_APG(pool->base.hpo_dp_stream_enc[i]->apg));
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1389
pool->base.hpo_dp_stream_enc[i]->apg = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1391
kfree(DCN3_1_HPO_DP_STREAM_ENC_FROM_HPO_STREAM_ENC(pool->base.hpo_dp_stream_enc[i]));
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1392
pool->base.hpo_dp_stream_enc[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1396
for (i = 0; i < pool->base.hpo_dp_link_enc_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1397
if (pool->base.hpo_dp_link_enc[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1398
kfree(DCN3_1_HPO_DP_LINK_ENC_FROM_HPO_LINK_ENC(pool->base.hpo_dp_link_enc[i]));
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1399
pool->base.hpo_dp_link_enc[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1403
for (i = 0; i < pool->base.res_cap->num_dsc; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1404
if (pool->base.dscs[i] != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1405
dcn20_dsc_destroy(&pool->base.dscs[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1408
if (pool->base.mpc != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1409
kfree(TO_DCN20_MPC(pool->base.mpc));
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1410
pool->base.mpc = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1412
if (pool->base.hubbub != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1413
kfree(TO_DCN20_HUBBUB(pool->base.hubbub));
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1414
pool->base.hubbub = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1416
for (i = 0; i < pool->base.pipe_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1417
if (pool->base.dpps[i] != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1418
dcn321_dpp_destroy(&pool->base.dpps[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1420
if (pool->base.ipps[i] != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1421
pool->base.ipps[i]->funcs->ipp_destroy(&pool->base.ipps[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1423
if (pool->base.hubps[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1424
kfree(TO_DCN20_HUBP(pool->base.hubps[i]));
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1425
pool->base.hubps[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1428
if (pool->base.irqs != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1429
dal_irq_service_destroy(&pool->base.irqs);
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1432
for (i = 0; i < pool->base.res_cap->num_ddc; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1433
if (pool->base.engines[i] != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1434
dce110_engine_destroy(&pool->base.engines[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1435
if (pool->base.hw_i2cs[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1436
kfree(pool->base.hw_i2cs[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1437
pool->base.hw_i2cs[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1439
if (pool->base.sw_i2cs[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1440
kfree(pool->base.sw_i2cs[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1441
pool->base.sw_i2cs[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1445
for (i = 0; i < pool->base.res_cap->num_opp; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1446
if (pool->base.opps[i] != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1447
pool->base.opps[i]->funcs->opp_destroy(&pool->base.opps[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1450
for (i = 0; i < pool->base.res_cap->num_timing_generator; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1451
if (pool->base.timing_generators[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1452
kfree(DCN10TG_FROM_TG(pool->base.timing_generators[i]));
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1453
pool->base.timing_generators[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1457
for (i = 0; i < pool->base.res_cap->num_dwb; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1458
if (pool->base.dwbc[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1459
kfree(TO_DCN30_DWBC(pool->base.dwbc[i]));
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1460
pool->base.dwbc[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1462
if (pool->base.mcif_wb[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1463
kfree(TO_DCN30_MMHUBBUB(pool->base.mcif_wb[i]));
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1464
pool->base.mcif_wb[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1468
for (i = 0; i < pool->base.audio_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1469
if (pool->base.audios[i])
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1470
dce_aud_destroy(&pool->base.audios[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1473
for (i = 0; i < pool->base.clk_src_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1474
if (pool->base.clock_sources[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1475
dcn20_clock_source_destroy(&pool->base.clock_sources[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1476
pool->base.clock_sources[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1480
for (i = 0; i < pool->base.res_cap->num_mpc_3dlut; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1481
if (pool->base.mpc_lut[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1482
dc_3dlut_func_release(pool->base.mpc_lut[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1483
pool->base.mpc_lut[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1485
if (pool->base.mpc_shaper[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1486
dc_transfer_func_release(pool->base.mpc_shaper[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1487
pool->base.mpc_shaper[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1491
if (pool->base.dp_clock_source != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1492
dcn20_clock_source_destroy(&pool->base.dp_clock_source);
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1493
pool->base.dp_clock_source = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1496
for (i = 0; i < pool->base.res_cap->num_timing_generator; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1497
if (pool->base.multiple_abms[i] != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1498
dce_abm_destroy(&pool->base.multiple_abms[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1501
if (pool->base.psr != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1502
dmub_psr_destroy(&pool->base.psr);
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1504
if (pool->base.dccg != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1505
dcn_dccg_destroy(&pool->base.dccg);
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1507
if (pool->base.dio != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1508
kfree(TO_DCN10_DIO(pool->base.dio));
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1509
pool->base.dio = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1512
if (pool->base.oem_device != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1513
struct dc *dc = pool->base.oem_device->ctx->dc;
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1515
dc->link_srv->destroy_ddc_service(&pool->base.oem_device);
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1520
static bool dcn321_dwbc_create(struct dc_context *ctx, struct resource_pool *pool)
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1523
uint32_t dwb_count = pool->res_cap->num_dwb;
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1543
pool->dwbc[i] = &dwbc30->base;
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1548
static bool dcn321_mmhubbub_create(struct dc_context *ctx, struct resource_pool *pool)
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1551
uint32_t dwb_count = pool->res_cap->num_dwb;
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1571
pool->mcif_wb[i] = &mcif_wb30->base;
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1601
static void dcn321_destroy_resource_pool(struct resource_pool **pool)
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1603
struct dcn321_resource_pool *dcn321_pool = TO_DCN321_RES_POOL(*pool);
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1607
*pool = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1671
struct dcn321_resource_pool *pool)
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1706
pool->base.res_cap = &res_cap_dcn321;
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1708
num_pipes = pool->base.res_cap->num_timing_generator;
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1711
for (i = 0; i < pool->base.res_cap->num_timing_generator; i++)
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1727
pool->base.funcs = &dcn321_res_pool_funcs;
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1732
pool->base.underlay_pipe_index = NO_UNDERLAY_PIPE;
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1733
pool->base.timing_generator_count = num_pipes;
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1734
pool->base.pipe_count = num_pipes;
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1735
pool->base.mpcc_count = num_pipes;
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1803
dc->caps.color.mpc.num_3dluts = pool->base.res_cap->num_mpc_3dlut; //4, configurable to be before or after BLND in MPCC
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1848
pool->base.clock_sources[DCN321_CLK_SRC_PLL0] =
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1852
pool->base.clock_sources[DCN321_CLK_SRC_PLL1] =
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1856
pool->base.clock_sources[DCN321_CLK_SRC_PLL2] =
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1860
pool->base.clock_sources[DCN321_CLK_SRC_PLL3] =
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1864
pool->base.clock_sources[DCN321_CLK_SRC_PLL4] =
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1869
pool->base.clk_src_count = DCN321_CLK_SRC_TOTAL;
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1872
pool->base.dp_clock_source =
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1877
for (i = 0; i < pool->base.clk_src_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1878
if (pool->base.clock_sources[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1886
pool->base.dccg = dccg32_create(ctx, &dccg_regs, &dccg_shift, &dccg_mask);
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1887
if (pool->base.dccg == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1898
pool->base.irqs = dal_irq_service_dcn32_create(&init_data);
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1899
if (!pool->base.irqs)
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1903
pool->base.hubbub = dcn321_hubbub_create(ctx);
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1904
if (pool->base.hubbub == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1911
pool->base.dio = dcn321_dio_create(ctx);
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1912
if (pool->base.dio == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1919
for (i = 0, j = 0; i < pool->base.res_cap->num_timing_generator; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1927
pool->base.hubps[j] = dcn321_hubp_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1928
if (pool->base.hubps[j] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1935
pool->base.dpps[j] = dcn321_dpp_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1936
if (pool->base.dpps[j] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1943
pool->base.opps[j] = dcn321_opp_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1944
if (pool->base.opps[j] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1951
pool->base.timing_generators[j] = dcn321_timing_generator_create(
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1953
if (pool->base.timing_generators[j] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1959
pool->base.multiple_abms[j] = dmub_abm_create(ctx,
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1963
if (pool->base.multiple_abms[j] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1974
pool->base.psr = dmub_psr_create(ctx);
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1975
if (pool->base.psr == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1982
pool->base.mpc = dcn321_mpc_create(ctx, pool->base.res_cap->num_timing_generator, pool->base.res_cap->num_mpc_3dlut);
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1983
if (pool->base.mpc == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1990
for (i = 0; i < pool->base.res_cap->num_dsc; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1991
pool->base.dscs[i] = dcn321_dsc_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
1992
if (pool->base.dscs[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
2000
if (!dcn321_dwbc_create(ctx, &pool->base)) {
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
2007
if (!dcn321_mmhubbub_create(ctx, &pool->base)) {
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
2014
for (i = 0; i < pool->base.res_cap->num_ddc; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
2015
pool->base.engines[i] = dcn321_aux_engine_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
2016
if (pool->base.engines[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
2022
pool->base.hw_i2cs[i] = dcn321_i2c_hw_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
2023
if (pool->base.hw_i2cs[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
2029
pool->base.sw_i2cs[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
2033
if (!resource_construct(num_virtual_links, dc, &pool->base,
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
2040
dc->caps.max_planes = pool->base.pipe_count;
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
2055
pool->base.oem_device = dc->link_srv->create_ddc_service(&ddc_init_data);
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
2057
pool->base.oem_device = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
2060
dc->dml2_options.dcn_pipe_count = pool->base.pipe_count;
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
2067
dc->dml2_options.svp_pstate.callbacks.calculate_mall_ways_from_bytes = pool->base.funcs->calculate_mall_ways_from_bytes;
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
2096
dcn321_resource_destruct(pool);
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
2105
struct dcn321_resource_pool *pool =
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
2108
if (!pool)
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
2111
if (dcn321_resource_construct(init_data->num_virtual_links, dc, pool))
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
2112
return &pool->base;
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.c
2115
kfree(pool);
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.h
31
#define TO_DCN321_RES_POOL(pool)\
drivers/gpu/drm/amd/display/dc/resource/dcn321/dcn321_resource.h
32
container_of(pool, struct dcn321_resource_pool, base)
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
1445
static void dcn35_resource_destruct(struct dcn35_resource_pool *pool)
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
1449
for (i = 0; i < pool->base.stream_enc_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
1450
if (pool->base.stream_enc[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
1451
if (pool->base.stream_enc[i]->vpg != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
1452
kfree(DCN30_VPG_FROM_VPG(pool->base.stream_enc[i]->vpg));
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
1453
pool->base.stream_enc[i]->vpg = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
1455
if (pool->base.stream_enc[i]->afmt != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
1456
kfree(DCN30_AFMT_FROM_AFMT(pool->base.stream_enc[i]->afmt));
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
1457
pool->base.stream_enc[i]->afmt = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
1459
kfree(DCN10STRENC_FROM_STRENC(pool->base.stream_enc[i]));
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
1460
pool->base.stream_enc[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
1464
for (i = 0; i < pool->base.hpo_dp_stream_enc_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
1465
if (pool->base.hpo_dp_stream_enc[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
1466
if (pool->base.hpo_dp_stream_enc[i]->vpg != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
1467
kfree(DCN30_VPG_FROM_VPG(pool->base.hpo_dp_stream_enc[i]->vpg));
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
1468
pool->base.hpo_dp_stream_enc[i]->vpg = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
1470
if (pool->base.hpo_dp_stream_enc[i]->apg != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
1471
kfree(DCN31_APG_FROM_APG(pool->base.hpo_dp_stream_enc[i]->apg));
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
1472
pool->base.hpo_dp_stream_enc[i]->apg = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
1474
kfree(DCN3_1_HPO_DP_STREAM_ENC_FROM_HPO_STREAM_ENC(pool->base.hpo_dp_stream_enc[i]));
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
1475
pool->base.hpo_dp_stream_enc[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
1479
for (i = 0; i < pool->base.hpo_dp_link_enc_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
1480
if (pool->base.hpo_dp_link_enc[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
1481
kfree(DCN3_1_HPO_DP_LINK_ENC_FROM_HPO_LINK_ENC(pool->base.hpo_dp_link_enc[i]));
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
1482
pool->base.hpo_dp_link_enc[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
1486
for (i = 0; i < pool->base.res_cap->num_dsc; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
1487
if (pool->base.dscs[i] != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
1488
dcn20_dsc_destroy(&pool->base.dscs[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
1491
if (pool->base.mpc != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
1492
kfree(TO_DCN20_MPC(pool->base.mpc));
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
1493
pool->base.mpc = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
1495
if (pool->base.hubbub != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
1496
kfree(pool->base.hubbub);
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
1497
pool->base.hubbub = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
1499
for (i = 0; i < pool->base.pipe_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
1500
if (pool->base.dpps[i] != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
1501
dcn35_dpp_destroy(&pool->base.dpps[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
1503
if (pool->base.ipps[i] != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
1504
pool->base.ipps[i]->funcs->ipp_destroy(&pool->base.ipps[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
1506
if (pool->base.hubps[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
1507
kfree(TO_DCN20_HUBP(pool->base.hubps[i]));
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
1508
pool->base.hubps[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
1511
if (pool->base.irqs != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
1512
dal_irq_service_destroy(&pool->base.irqs);
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
1516
for (i = 0; i < pool->base.res_cap->num_ddc; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
1517
if (pool->base.engines[i] != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
1518
dce110_engine_destroy(&pool->base.engines[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
1519
if (pool->base.hw_i2cs[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
1520
kfree(pool->base.hw_i2cs[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
1521
pool->base.hw_i2cs[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
1523
if (pool->base.sw_i2cs[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
1524
kfree(pool->base.sw_i2cs[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
1525
pool->base.sw_i2cs[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
1529
for (i = 0; i < pool->base.res_cap->num_opp; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
1530
if (pool->base.opps[i] != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
1531
pool->base.opps[i]->funcs->opp_destroy(&pool->base.opps[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
1534
for (i = 0; i < pool->base.res_cap->num_timing_generator; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
1535
if (pool->base.timing_generators[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
1536
kfree(DCN10TG_FROM_TG(pool->base.timing_generators[i]));
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
1537
pool->base.timing_generators[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
1541
for (i = 0; i < pool->base.res_cap->num_dwb; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
1542
if (pool->base.dwbc[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
1543
kfree(TO_DCN30_DWBC(pool->base.dwbc[i]));
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
1544
pool->base.dwbc[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
1546
if (pool->base.mcif_wb[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
1547
kfree(TO_DCN30_MMHUBBUB(pool->base.mcif_wb[i]));
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
1548
pool->base.mcif_wb[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
1552
for (i = 0; i < pool->base.audio_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
1553
if (pool->base.audios[i])
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
1554
dce_aud_destroy(&pool->base.audios[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
1557
for (i = 0; i < pool->base.clk_src_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
1558
if (pool->base.clock_sources[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
1559
dcn20_clock_source_destroy(&pool->base.clock_sources[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
1560
pool->base.clock_sources[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
1564
for (i = 0; i < pool->base.res_cap->num_mpc_3dlut; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
1565
if (pool->base.mpc_lut[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
1566
dc_3dlut_func_release(pool->base.mpc_lut[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
1567
pool->base.mpc_lut[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
1569
if (pool->base.mpc_shaper[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
1570
dc_transfer_func_release(pool->base.mpc_shaper[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
1571
pool->base.mpc_shaper[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
1575
if (pool->base.dp_clock_source != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
1576
dcn20_clock_source_destroy(&pool->base.dp_clock_source);
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
1577
pool->base.dp_clock_source = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
1580
for (i = 0; i < pool->base.res_cap->num_timing_generator; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
1581
if (pool->base.multiple_abms[i] != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
1582
dce_abm_destroy(&pool->base.multiple_abms[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
1585
if (pool->base.psr != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
1586
dmub_psr_destroy(&pool->base.psr);
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
1588
if (pool->base.replay != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
1589
dmub_replay_destroy(&pool->base.replay);
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
1591
if (pool->base.pg_cntl != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
1592
dcn_pg_cntl_destroy(&pool->base.pg_cntl);
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
1594
if (pool->base.dccg != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
1595
dcn_dccg_destroy(&pool->base.dccg);
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
1597
if (pool->base.dio != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
1598
kfree(TO_DCN10_DIO(pool->base.dio));
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
1599
pool->base.dio = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
1635
static bool dcn35_dwbc_create(struct dc_context *ctx, struct resource_pool *pool)
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
1638
uint32_t pipe_count = pool->res_cap->num_dwb;
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
1658
pool->dwbc[i] = &dwbc30->base;
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
1673
static bool dcn35_mmhubbub_create(struct dc_context *ctx, struct resource_pool *pool)
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
1676
uint32_t pipe_count = pool->res_cap->num_dwb;
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
1698
pool->mcif_wb[i] = &mcif_wb30->base;
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
1727
static void dcn35_destroy_resource_pool(struct resource_pool **pool)
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
1729
struct dcn35_resource_pool *dcn35_pool = TO_DCN35_RES_POOL(*pool);
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
1733
*pool = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
1846
struct dcn35_resource_pool *pool)
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
1877
pool->base.res_cap = &res_cap_dcn35;
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
1879
pool->base.funcs = &dcn35_res_pool_funcs;
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
1884
pool->base.underlay_pipe_index = NO_UNDERLAY_PIPE;
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
1885
pool->base.pipe_count = pool->base.res_cap->num_timing_generator;
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
1886
pool->base.mpcc_count = pool->base.res_cap->num_timing_generator;
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
1939
dc->caps.color.mpc.num_3dluts = pool->base.res_cap->num_mpc_3dlut; //2
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
1999
pool->base.clock_sources[DCN35_CLK_SRC_PLL0] =
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
2003
pool->base.clock_sources[DCN35_CLK_SRC_PLL1] =
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
2007
pool->base.clock_sources[DCN35_CLK_SRC_PLL2] =
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
2011
pool->base.clock_sources[DCN35_CLK_SRC_PLL3] =
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
2015
pool->base.clock_sources[DCN35_CLK_SRC_PLL4] =
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
2020
pool->base.clk_src_count = DCN35_CLK_SRC_TOTAL;
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
2023
pool->base.dp_clock_source =
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
2028
for (i = 0; i < pool->base.clk_src_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
2029
if (pool->base.clock_sources[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
2039
pool->base.dccg = dccg35_create(ctx, &dccg_regs, &dccg_shift, &dccg_mask);
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
2040
if (pool->base.dccg == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
2050
pool->base.pg_cntl = pg_cntl35_create(ctx, &pg_cntl_regs, &pg_cntl_shift, &pg_cntl_mask);
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
2051
if (pool->base.pg_cntl == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
2059
pool->base.irqs = dal_irq_service_dcn35_create(&init_data);
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
2060
if (!pool->base.irqs)
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
2064
pool->base.hubbub = dcn35_hubbub_create(ctx);
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
2065
if (pool->base.hubbub == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
2072
pool->base.dio = dcn35_dio_create(ctx);
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
2073
if (pool->base.dio == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
2080
for (i = 0; i < pool->base.pipe_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
2081
pool->base.hubps[i] = dcn35_hubp_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
2082
if (pool->base.hubps[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
2089
pool->base.dpps[i] = dcn35_dpp_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
2090
if (pool->base.dpps[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
2098
for (i = 0; i < pool->base.res_cap->num_opp; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
2099
pool->base.opps[i] = dcn35_opp_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
2100
if (pool->base.opps[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
2108
for (i = 0; i < pool->base.res_cap->num_timing_generator; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
2109
pool->base.timing_generators[i] = dcn35_timing_generator_create(
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
2111
if (pool->base.timing_generators[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
2117
pool->base.timing_generator_count = i;
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
2120
pool->base.psr = dmub_psr_create(ctx);
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
2121
if (pool->base.psr == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
2128
pool->base.replay = dmub_replay_create(ctx);
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
2129
if (pool->base.replay == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
2136
for (i = 0; i < pool->base.res_cap->num_timing_generator; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
2137
pool->base.multiple_abms[i] = dmub_abm_create(ctx,
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
2141
if (pool->base.multiple_abms[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
2149
pool->base.mpc = dcn35_mpc_create(ctx, pool->base.mpcc_count, pool->base.res_cap->num_mpc_3dlut);
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
2150
if (pool->base.mpc == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
2156
for (i = 0; i < pool->base.res_cap->num_dsc; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
2157
pool->base.dscs[i] = dcn35_dsc_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
2158
if (pool->base.dscs[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
2166
if (!dcn35_dwbc_create(ctx, &pool->base)) {
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
2172
if (!dcn35_mmhubbub_create(ctx, &pool->base)) {
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
2179
for (i = 0; i < pool->base.res_cap->num_ddc; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
2180
pool->base.engines[i] = dcn31_aux_engine_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
2181
if (pool->base.engines[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
2187
pool->base.hw_i2cs[i] = dcn31_i2c_hw_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
2188
if (pool->base.hw_i2cs[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
2194
pool->base.sw_i2cs[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
2198
pool->base.usb4_dpia_count = 4;
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
2200
pool->base.usb4_dpia_count = 0;
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
2203
if (!resource_construct(num_virtual_links, dc, &pool->base,
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
2210
dc->caps.max_planes = pool->base.pipe_count;
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
2219
dc->dcn_ip->max_num_dpp = pool->base.pipe_count;
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
2221
dc->dml2_options.dcn_pipe_count = pool->base.pipe_count;
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
2242
dcn35_resource_destruct(pool);
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
2251
struct dcn35_resource_pool *pool =
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
2254
if (!pool)
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
2257
if (dcn35_resource_construct(init_data->num_virtual_links, dc, pool))
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
2258
return &pool->base;
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.c
2261
kfree(pool);
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.h
33
#define TO_DCN35_RES_POOL(pool)\
drivers/gpu/drm/amd/display/dc/resource/dcn35/dcn35_resource.h
34
container_of(pool, struct dcn35_resource_pool, base)
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
1425
static void dcn351_resource_destruct(struct dcn351_resource_pool *pool)
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
1429
for (i = 0; i < pool->base.stream_enc_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
1430
if (pool->base.stream_enc[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
1431
if (pool->base.stream_enc[i]->vpg != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
1432
kfree(DCN30_VPG_FROM_VPG(pool->base.stream_enc[i]->vpg));
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
1433
pool->base.stream_enc[i]->vpg = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
1435
if (pool->base.stream_enc[i]->afmt != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
1436
kfree(DCN30_AFMT_FROM_AFMT(pool->base.stream_enc[i]->afmt));
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
1437
pool->base.stream_enc[i]->afmt = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
1439
kfree(DCN10STRENC_FROM_STRENC(pool->base.stream_enc[i]));
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
1440
pool->base.stream_enc[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
1444
for (i = 0; i < pool->base.hpo_dp_stream_enc_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
1445
if (pool->base.hpo_dp_stream_enc[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
1446
if (pool->base.hpo_dp_stream_enc[i]->vpg != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
1447
kfree(DCN30_VPG_FROM_VPG(pool->base.hpo_dp_stream_enc[i]->vpg));
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
1448
pool->base.hpo_dp_stream_enc[i]->vpg = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
1450
if (pool->base.hpo_dp_stream_enc[i]->apg != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
1451
kfree(DCN31_APG_FROM_APG(pool->base.hpo_dp_stream_enc[i]->apg));
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
1452
pool->base.hpo_dp_stream_enc[i]->apg = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
1454
kfree(DCN3_1_HPO_DP_STREAM_ENC_FROM_HPO_STREAM_ENC(pool->base.hpo_dp_stream_enc[i]));
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
1455
pool->base.hpo_dp_stream_enc[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
1459
for (i = 0; i < pool->base.hpo_dp_link_enc_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
1460
if (pool->base.hpo_dp_link_enc[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
1461
kfree(DCN3_1_HPO_DP_LINK_ENC_FROM_HPO_LINK_ENC(pool->base.hpo_dp_link_enc[i]));
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
1462
pool->base.hpo_dp_link_enc[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
1466
for (i = 0; i < pool->base.res_cap->num_dsc; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
1467
if (pool->base.dscs[i] != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
1468
dcn20_dsc_destroy(&pool->base.dscs[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
1471
if (pool->base.mpc != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
1472
kfree(TO_DCN20_MPC(pool->base.mpc));
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
1473
pool->base.mpc = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
1475
if (pool->base.hubbub != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
1476
kfree(pool->base.hubbub);
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
1477
pool->base.hubbub = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
1479
for (i = 0; i < pool->base.pipe_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
1480
if (pool->base.dpps[i] != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
1481
dcn35_dpp_destroy(&pool->base.dpps[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
1483
if (pool->base.ipps[i] != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
1484
pool->base.ipps[i]->funcs->ipp_destroy(&pool->base.ipps[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
1486
if (pool->base.hubps[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
1487
kfree(TO_DCN20_HUBP(pool->base.hubps[i]));
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
1488
pool->base.hubps[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
1491
if (pool->base.irqs != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
1492
dal_irq_service_destroy(&pool->base.irqs);
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
1496
for (i = 0; i < pool->base.res_cap->num_ddc; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
1497
if (pool->base.engines[i] != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
1498
dce110_engine_destroy(&pool->base.engines[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
1499
if (pool->base.hw_i2cs[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
1500
kfree(pool->base.hw_i2cs[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
1501
pool->base.hw_i2cs[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
1503
if (pool->base.sw_i2cs[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
1504
kfree(pool->base.sw_i2cs[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
1505
pool->base.sw_i2cs[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
1509
for (i = 0; i < pool->base.res_cap->num_opp; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
1510
if (pool->base.opps[i] != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
1511
pool->base.opps[i]->funcs->opp_destroy(&pool->base.opps[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
1514
for (i = 0; i < pool->base.res_cap->num_timing_generator; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
1515
if (pool->base.timing_generators[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
1516
kfree(DCN10TG_FROM_TG(pool->base.timing_generators[i]));
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
1517
pool->base.timing_generators[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
1521
for (i = 0; i < pool->base.res_cap->num_dwb; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
1522
if (pool->base.dwbc[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
1523
kfree(TO_DCN30_DWBC(pool->base.dwbc[i]));
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
1524
pool->base.dwbc[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
1526
if (pool->base.mcif_wb[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
1527
kfree(TO_DCN30_MMHUBBUB(pool->base.mcif_wb[i]));
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
1528
pool->base.mcif_wb[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
1532
for (i = 0; i < pool->base.audio_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
1533
if (pool->base.audios[i])
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
1534
dce_aud_destroy(&pool->base.audios[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
1537
for (i = 0; i < pool->base.clk_src_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
1538
if (pool->base.clock_sources[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
1539
dcn20_clock_source_destroy(&pool->base.clock_sources[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
1540
pool->base.clock_sources[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
1544
for (i = 0; i < pool->base.res_cap->num_mpc_3dlut; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
1545
if (pool->base.mpc_lut[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
1546
dc_3dlut_func_release(pool->base.mpc_lut[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
1547
pool->base.mpc_lut[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
1549
if (pool->base.mpc_shaper[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
1550
dc_transfer_func_release(pool->base.mpc_shaper[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
1551
pool->base.mpc_shaper[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
1555
if (pool->base.dp_clock_source != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
1556
dcn20_clock_source_destroy(&pool->base.dp_clock_source);
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
1557
pool->base.dp_clock_source = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
1560
for (i = 0; i < pool->base.res_cap->num_timing_generator; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
1561
if (pool->base.multiple_abms[i] != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
1562
dce_abm_destroy(&pool->base.multiple_abms[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
1565
if (pool->base.psr != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
1566
dmub_psr_destroy(&pool->base.psr);
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
1568
if (pool->base.replay != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
1569
dmub_replay_destroy(&pool->base.replay);
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
1571
if (pool->base.pg_cntl != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
1572
dcn_pg_cntl_destroy(&pool->base.pg_cntl);
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
1574
if (pool->base.dccg != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
1575
dcn_dccg_destroy(&pool->base.dccg);
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
1577
if (pool->base.dio != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
1578
kfree(TO_DCN10_DIO(pool->base.dio));
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
1579
pool->base.dio = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
1615
static bool dcn35_dwbc_create(struct dc_context *ctx, struct resource_pool *pool)
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
1618
uint32_t pipe_count = pool->res_cap->num_dwb;
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
1638
pool->dwbc[i] = &dwbc30->base;
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
1653
static bool dcn35_mmhubbub_create(struct dc_context *ctx, struct resource_pool *pool)
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
1656
uint32_t pipe_count = pool->res_cap->num_dwb;
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
1678
pool->mcif_wb[i] = &mcif_wb30->base;
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
1707
static void dcn351_destroy_resource_pool(struct resource_pool **pool)
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
1709
struct dcn351_resource_pool *dcn351_pool = TO_DCN351_RES_POOL(*pool);
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
1713
*pool = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
1819
struct dcn351_resource_pool *pool)
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
1850
pool->base.res_cap = &res_cap_dcn351;
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
1852
pool->base.funcs = &dcn351_res_pool_funcs;
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
1857
pool->base.underlay_pipe_index = NO_UNDERLAY_PIPE;
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
1858
pool->base.pipe_count = pool->base.res_cap->num_timing_generator;
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
1859
pool->base.mpcc_count = pool->base.res_cap->num_timing_generator;
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
1912
dc->caps.color.mpc.num_3dluts = pool->base.res_cap->num_mpc_3dlut; //2
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
1971
pool->base.clock_sources[DCN351_CLK_SRC_PLL0] =
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
1975
pool->base.clock_sources[DCN351_CLK_SRC_PLL1] =
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
1979
pool->base.clock_sources[DCN351_CLK_SRC_PLL2] =
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
1983
pool->base.clock_sources[DCN351_CLK_SRC_PLL3] =
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
1987
pool->base.clock_sources[DCN351_CLK_SRC_PLL4] =
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
1992
pool->base.clk_src_count = DCN351_CLK_SRC_TOTAL;
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
1995
pool->base.dp_clock_source =
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
2000
for (i = 0; i < pool->base.clk_src_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
2001
if (pool->base.clock_sources[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
2011
pool->base.dccg = dccg35_create(ctx, &dccg_regs, &dccg_shift, &dccg_mask);
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
2012
if (pool->base.dccg == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
2022
pool->base.pg_cntl = pg_cntl35_create(ctx, &pg_cntl_regs, &pg_cntl_shift, &pg_cntl_mask);
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
2023
if (pool->base.pg_cntl == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
2031
pool->base.irqs = dal_irq_service_dcn351_create(&init_data);
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
2032
if (!pool->base.irqs)
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
2036
pool->base.hubbub = dcn35_hubbub_create(ctx);
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
2037
if (pool->base.hubbub == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
2044
pool->base.dio = dcn351_dio_create(ctx);
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
2045
if (pool->base.dio == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
2052
for (i = 0; i < pool->base.pipe_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
2053
pool->base.hubps[i] = dcn35_hubp_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
2054
if (pool->base.hubps[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
2061
pool->base.dpps[i] = dcn35_dpp_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
2062
if (pool->base.dpps[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
2070
for (i = 0; i < pool->base.res_cap->num_opp; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
2071
pool->base.opps[i] = dcn35_opp_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
2072
if (pool->base.opps[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
2080
for (i = 0; i < pool->base.res_cap->num_timing_generator; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
2081
pool->base.timing_generators[i] = dcn35_timing_generator_create(
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
2083
if (pool->base.timing_generators[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
2089
pool->base.timing_generator_count = i;
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
2092
pool->base.psr = dmub_psr_create(ctx);
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
2093
if (pool->base.psr == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
2100
pool->base.replay = dmub_replay_create(ctx);
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
2101
if (pool->base.replay == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
2108
for (i = 0; i < pool->base.res_cap->num_timing_generator; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
2109
pool->base.multiple_abms[i] = dmub_abm_create(ctx,
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
2113
if (pool->base.multiple_abms[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
2121
pool->base.mpc = dcn35_mpc_create(ctx, pool->base.mpcc_count, pool->base.res_cap->num_mpc_3dlut);
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
2122
if (pool->base.mpc == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
2128
for (i = 0; i < pool->base.res_cap->num_dsc; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
2129
pool->base.dscs[i] = dcn35_dsc_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
2130
if (pool->base.dscs[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
2138
if (!dcn35_dwbc_create(ctx, &pool->base)) {
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
2144
if (!dcn35_mmhubbub_create(ctx, &pool->base)) {
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
2151
for (i = 0; i < pool->base.res_cap->num_ddc; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
2152
pool->base.engines[i] = dcn31_aux_engine_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
2153
if (pool->base.engines[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
2159
pool->base.hw_i2cs[i] = dcn31_i2c_hw_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
2160
if (pool->base.hw_i2cs[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
2166
pool->base.sw_i2cs[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
2170
pool->base.usb4_dpia_count = 4;
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
2172
pool->base.usb4_dpia_count = 0;
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
2175
if (!resource_construct(num_virtual_links, dc, &pool->base,
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
2182
dc->caps.max_planes = pool->base.pipe_count;
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
2192
dc->dcn_ip->max_num_dpp = pool->base.pipe_count;
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
2194
dc->dml2_options.dcn_pipe_count = pool->base.pipe_count;
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
2215
dcn351_resource_destruct(pool);
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
2224
struct dcn351_resource_pool *pool =
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
2227
if (!pool)
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
2230
if (dcn351_resource_construct(init_data->num_virtual_links, dc, pool))
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
2231
return &pool->base;
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.c
2234
kfree(pool);
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.h
12
#define TO_DCN351_RES_POOL(pool)\
drivers/gpu/drm/amd/display/dc/resource/dcn351/dcn351_resource.h
13
container_of(pool, struct dcn351_resource_pool, base)
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
1432
static void dcn36_resource_destruct(struct dcn36_resource_pool *pool)
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
1436
for (i = 0; i < pool->base.stream_enc_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
1437
if (pool->base.stream_enc[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
1438
if (pool->base.stream_enc[i]->vpg != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
1439
kfree(DCN30_VPG_FROM_VPG(pool->base.stream_enc[i]->vpg));
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
1440
pool->base.stream_enc[i]->vpg = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
1442
if (pool->base.stream_enc[i]->afmt != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
1443
kfree(DCN30_AFMT_FROM_AFMT(pool->base.stream_enc[i]->afmt));
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
1444
pool->base.stream_enc[i]->afmt = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
1446
kfree(DCN10STRENC_FROM_STRENC(pool->base.stream_enc[i]));
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
1447
pool->base.stream_enc[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
1451
for (i = 0; i < pool->base.hpo_dp_stream_enc_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
1452
if (pool->base.hpo_dp_stream_enc[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
1453
if (pool->base.hpo_dp_stream_enc[i]->vpg != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
1454
kfree(DCN30_VPG_FROM_VPG(pool->base.hpo_dp_stream_enc[i]->vpg));
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
1455
pool->base.hpo_dp_stream_enc[i]->vpg = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
1457
if (pool->base.hpo_dp_stream_enc[i]->apg != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
1458
kfree(DCN31_APG_FROM_APG(pool->base.hpo_dp_stream_enc[i]->apg));
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
1459
pool->base.hpo_dp_stream_enc[i]->apg = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
1461
kfree(DCN3_1_HPO_DP_STREAM_ENC_FROM_HPO_STREAM_ENC(pool->base.hpo_dp_stream_enc[i]));
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
1462
pool->base.hpo_dp_stream_enc[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
1466
for (i = 0; i < pool->base.hpo_dp_link_enc_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
1467
if (pool->base.hpo_dp_link_enc[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
1468
kfree(DCN3_1_HPO_DP_LINK_ENC_FROM_HPO_LINK_ENC(pool->base.hpo_dp_link_enc[i]));
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
1469
pool->base.hpo_dp_link_enc[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
1473
for (i = 0; i < pool->base.res_cap->num_dsc; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
1474
if (pool->base.dscs[i] != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
1475
dcn20_dsc_destroy(&pool->base.dscs[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
1478
if (pool->base.mpc != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
1479
kfree(TO_DCN20_MPC(pool->base.mpc));
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
1480
pool->base.mpc = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
1482
if (pool->base.hubbub != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
1483
kfree(pool->base.hubbub);
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
1484
pool->base.hubbub = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
1486
for (i = 0; i < pool->base.pipe_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
1487
if (pool->base.dpps[i] != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
1488
dcn35_dpp_destroy(&pool->base.dpps[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
1490
if (pool->base.ipps[i] != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
1491
pool->base.ipps[i]->funcs->ipp_destroy(&pool->base.ipps[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
1493
if (pool->base.hubps[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
1494
kfree(TO_DCN20_HUBP(pool->base.hubps[i]));
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
1495
pool->base.hubps[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
1498
if (pool->base.irqs != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
1499
dal_irq_service_destroy(&pool->base.irqs);
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
1503
for (i = 0; i < pool->base.res_cap->num_ddc; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
1504
if (pool->base.engines[i] != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
1505
dce110_engine_destroy(&pool->base.engines[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
1506
if (pool->base.hw_i2cs[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
1507
kfree(pool->base.hw_i2cs[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
1508
pool->base.hw_i2cs[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
1510
if (pool->base.sw_i2cs[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
1511
kfree(pool->base.sw_i2cs[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
1512
pool->base.sw_i2cs[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
1516
for (i = 0; i < pool->base.res_cap->num_opp; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
1517
if (pool->base.opps[i] != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
1518
pool->base.opps[i]->funcs->opp_destroy(&pool->base.opps[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
1521
for (i = 0; i < pool->base.res_cap->num_timing_generator; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
1522
if (pool->base.timing_generators[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
1523
kfree(DCN10TG_FROM_TG(pool->base.timing_generators[i]));
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
1524
pool->base.timing_generators[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
1528
for (i = 0; i < pool->base.res_cap->num_dwb; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
1529
if (pool->base.dwbc[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
1530
kfree(TO_DCN30_DWBC(pool->base.dwbc[i]));
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
1531
pool->base.dwbc[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
1533
if (pool->base.mcif_wb[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
1534
kfree(TO_DCN30_MMHUBBUB(pool->base.mcif_wb[i]));
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
1535
pool->base.mcif_wb[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
1539
for (i = 0; i < pool->base.audio_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
1540
if (pool->base.audios[i])
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
1541
dce_aud_destroy(&pool->base.audios[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
1544
for (i = 0; i < pool->base.clk_src_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
1545
if (pool->base.clock_sources[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
1546
dcn20_clock_source_destroy(&pool->base.clock_sources[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
1547
pool->base.clock_sources[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
1551
for (i = 0; i < pool->base.res_cap->num_mpc_3dlut; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
1552
if (pool->base.mpc_lut[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
1553
dc_3dlut_func_release(pool->base.mpc_lut[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
1554
pool->base.mpc_lut[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
1556
if (pool->base.mpc_shaper[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
1557
dc_transfer_func_release(pool->base.mpc_shaper[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
1558
pool->base.mpc_shaper[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
1562
if (pool->base.dp_clock_source != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
1563
dcn20_clock_source_destroy(&pool->base.dp_clock_source);
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
1564
pool->base.dp_clock_source = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
1567
for (i = 0; i < pool->base.res_cap->num_timing_generator; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
1568
if (pool->base.multiple_abms[i] != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
1569
dce_abm_destroy(&pool->base.multiple_abms[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
1572
if (pool->base.psr != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
1573
dmub_psr_destroy(&pool->base.psr);
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
1575
if (pool->base.replay != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
1576
dmub_replay_destroy(&pool->base.replay);
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
1578
if (pool->base.pg_cntl != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
1579
dcn_pg_cntl_destroy(&pool->base.pg_cntl);
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
1581
if (pool->base.dccg != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
1582
dcn_dccg_destroy(&pool->base.dccg);
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
1584
if (pool->base.dio != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
1585
kfree(TO_DCN10_DIO(pool->base.dio));
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
1586
pool->base.dio = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
1622
static bool dcn35_dwbc_create(struct dc_context *ctx, struct resource_pool *pool)
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
1625
uint32_t pipe_count = pool->res_cap->num_dwb;
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
1645
pool->dwbc[i] = &dwbc30->base;
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
1660
static bool dcn35_mmhubbub_create(struct dc_context *ctx, struct resource_pool *pool)
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
1663
uint32_t pipe_count = pool->res_cap->num_dwb;
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
1685
pool->mcif_wb[i] = &mcif_wb30->base;
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
1714
static void dcn36_destroy_resource_pool(struct resource_pool **pool)
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
1716
struct dcn36_resource_pool *dcn36_pool = TO_DCN36_RES_POOL(*pool);
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
1720
*pool = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
1825
struct dcn36_resource_pool *pool)
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
1856
pool->base.res_cap = &res_cap_dcn36;
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
1858
pool->base.funcs = &dcn36_res_pool_funcs;
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
1863
pool->base.underlay_pipe_index = NO_UNDERLAY_PIPE;
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
1864
pool->base.pipe_count = pool->base.res_cap->num_timing_generator;
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
1865
pool->base.mpcc_count = pool->base.res_cap->num_timing_generator;
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
1918
dc->caps.color.mpc.num_3dluts = pool->base.res_cap->num_mpc_3dlut; //2
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
1978
pool->base.clock_sources[DCN36_CLK_SRC_PLL0] =
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
1982
pool->base.clock_sources[DCN36_CLK_SRC_PLL1] =
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
1986
pool->base.clock_sources[DCN36_CLK_SRC_PLL2] =
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
1990
pool->base.clock_sources[DCN36_CLK_SRC_PLL3] =
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
1994
pool->base.clock_sources[DCN36_CLK_SRC_PLL4] =
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
1999
pool->base.clk_src_count = DCN36_CLK_SRC_TOTAL;
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
2002
pool->base.dp_clock_source =
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
2007
for (i = 0; i < pool->base.clk_src_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
2008
if (pool->base.clock_sources[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
2018
pool->base.dccg = dccg35_create(ctx, &dccg_regs, &dccg_shift, &dccg_mask);
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
2019
if (pool->base.dccg == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
2029
pool->base.pg_cntl = pg_cntl35_create(ctx, &pg_cntl_regs, &pg_cntl_shift, &pg_cntl_mask);
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
2030
if (pool->base.pg_cntl == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
2038
pool->base.irqs = dal_irq_service_dcn36_create(&init_data);
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
2039
if (!pool->base.irqs)
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
2043
pool->base.hubbub = dcn35_hubbub_create(ctx);
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
2044
if (pool->base.hubbub == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
2051
pool->base.dio = dcn36_dio_create(ctx);
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
2052
if (pool->base.dio == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
2059
for (i = 0; i < pool->base.pipe_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
2060
pool->base.hubps[i] = dcn35_hubp_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
2061
if (pool->base.hubps[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
2068
pool->base.dpps[i] = dcn35_dpp_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
2069
if (pool->base.dpps[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
2077
for (i = 0; i < pool->base.res_cap->num_opp; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
2078
pool->base.opps[i] = dcn35_opp_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
2079
if (pool->base.opps[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
2087
for (i = 0; i < pool->base.res_cap->num_timing_generator; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
2088
pool->base.timing_generators[i] = dcn35_timing_generator_create(
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
2090
if (pool->base.timing_generators[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
2096
pool->base.timing_generator_count = i;
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
2099
pool->base.psr = dmub_psr_create(ctx);
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
2100
if (pool->base.psr == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
2107
pool->base.replay = dmub_replay_create(ctx);
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
2108
if (pool->base.replay == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
2115
for (i = 0; i < pool->base.res_cap->num_timing_generator; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
2116
pool->base.multiple_abms[i] = dmub_abm_create(ctx,
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
2120
if (pool->base.multiple_abms[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
2128
pool->base.mpc = dcn35_mpc_create(ctx, pool->base.mpcc_count, pool->base.res_cap->num_mpc_3dlut);
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
2129
if (pool->base.mpc == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
2135
for (i = 0; i < pool->base.res_cap->num_dsc; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
2136
pool->base.dscs[i] = dcn35_dsc_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
2137
if (pool->base.dscs[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
2145
if (!dcn35_dwbc_create(ctx, &pool->base)) {
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
2151
if (!dcn35_mmhubbub_create(ctx, &pool->base)) {
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
2158
for (i = 0; i < pool->base.res_cap->num_ddc; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
2159
pool->base.engines[i] = dcn31_aux_engine_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
2160
if (pool->base.engines[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
2166
pool->base.hw_i2cs[i] = dcn31_i2c_hw_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
2167
if (pool->base.hw_i2cs[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
2173
pool->base.sw_i2cs[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
2177
pool->base.usb4_dpia_count = 4;
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
2179
pool->base.usb4_dpia_count = 0;
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
2182
if (!resource_construct(num_virtual_links, dc, &pool->base,
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
2189
dc->caps.max_planes = pool->base.pipe_count;
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
2198
dc->dcn_ip->max_num_dpp = pool->base.pipe_count;
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
2200
dc->dml2_options.dcn_pipe_count = pool->base.pipe_count;
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
2221
dcn36_resource_destruct(pool);
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
2230
struct dcn36_resource_pool *pool =
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
2233
if (!pool)
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
2236
if (dcn36_resource_construct(init_data->num_virtual_links, dc, pool))
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
2237
return &pool->base;
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.c
2240
kfree(pool);
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.h
12
#define TO_DCN36_RES_POOL(pool)\
drivers/gpu/drm/amd/display/dc/resource/dcn36/dcn36_resource.h
13
container_of(pool, struct dcn36_resource_pool, base)
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
1387
static void dcn401_resource_destruct(struct dcn401_resource_pool *pool)
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
1391
for (i = 0; i < pool->base.stream_enc_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
1392
if (pool->base.stream_enc[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
1393
if (pool->base.stream_enc[i]->vpg != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
1394
kfree(DCN31_VPG_FROM_VPG(pool->base.stream_enc[i]->vpg));
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
1395
pool->base.stream_enc[i]->vpg = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
1397
if (pool->base.stream_enc[i]->afmt != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
1398
kfree(DCN30_AFMT_FROM_AFMT(pool->base.stream_enc[i]->afmt));
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
1399
pool->base.stream_enc[i]->afmt = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
1401
kfree(DCN10STRENC_FROM_STRENC(pool->base.stream_enc[i]));
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
1402
pool->base.stream_enc[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
1406
for (i = 0; i < pool->base.hpo_dp_stream_enc_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
1407
if (pool->base.hpo_dp_stream_enc[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
1408
if (pool->base.hpo_dp_stream_enc[i]->vpg != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
1409
kfree(DCN31_VPG_FROM_VPG(pool->base.hpo_dp_stream_enc[i]->vpg));
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
1410
pool->base.hpo_dp_stream_enc[i]->vpg = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
1412
if (pool->base.hpo_dp_stream_enc[i]->apg != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
1413
kfree(DCN31_APG_FROM_APG(pool->base.hpo_dp_stream_enc[i]->apg));
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
1414
pool->base.hpo_dp_stream_enc[i]->apg = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
1416
kfree(DCN3_1_HPO_DP_STREAM_ENC_FROM_HPO_STREAM_ENC(pool->base.hpo_dp_stream_enc[i]));
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
1417
pool->base.hpo_dp_stream_enc[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
1421
for (i = 0; i < pool->base.hpo_dp_link_enc_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
1422
if (pool->base.hpo_dp_link_enc[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
1423
kfree(DCN3_1_HPO_DP_LINK_ENC_FROM_HPO_LINK_ENC(pool->base.hpo_dp_link_enc[i]));
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
1424
pool->base.hpo_dp_link_enc[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
1428
for (i = 0; i < pool->base.res_cap->num_dsc; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
1429
if (pool->base.dscs[i] != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
1430
dcn401_dsc_destroy(&pool->base.dscs[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
1433
if (pool->base.mpc != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
1434
kfree(TO_DCN20_MPC(pool->base.mpc));
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
1435
pool->base.mpc = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
1437
if (pool->base.hubbub != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
1438
kfree(TO_DCN20_HUBBUB(pool->base.hubbub));
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
1439
pool->base.hubbub = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
1441
for (i = 0; i < pool->base.pipe_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
1442
if (pool->base.dpps[i] != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
1443
dcn401_dpp_destroy(&pool->base.dpps[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
1445
if (pool->base.ipps[i] != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
1446
pool->base.ipps[i]->funcs->ipp_destroy(&pool->base.ipps[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
1448
if (pool->base.hubps[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
1449
kfree(TO_DCN20_HUBP(pool->base.hubps[i]));
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
1450
pool->base.hubps[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
1453
if (pool->base.irqs != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
1454
dal_irq_service_destroy(&pool->base.irqs);
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
1458
for (i = 0; i < pool->base.res_cap->num_ddc; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
1459
if (pool->base.engines[i] != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
1460
dce110_engine_destroy(&pool->base.engines[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
1461
if (pool->base.hw_i2cs[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
1462
kfree(pool->base.hw_i2cs[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
1463
pool->base.hw_i2cs[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
1465
if (pool->base.sw_i2cs[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
1466
kfree(pool->base.sw_i2cs[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
1467
pool->base.sw_i2cs[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
1471
for (i = 0; i < pool->base.res_cap->num_opp; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
1472
if (pool->base.opps[i] != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
1473
pool->base.opps[i]->funcs->opp_destroy(&pool->base.opps[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
1476
for (i = 0; i < pool->base.res_cap->num_timing_generator; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
1477
if (pool->base.timing_generators[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
1478
kfree(DCN10TG_FROM_TG(pool->base.timing_generators[i]));
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
1479
pool->base.timing_generators[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
1483
for (i = 0; i < pool->base.res_cap->num_dwb; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
1484
if (pool->base.dwbc[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
1485
kfree(TO_DCN30_DWBC(pool->base.dwbc[i]));
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
1486
pool->base.dwbc[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
1488
if (pool->base.mcif_wb[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
1489
kfree(TO_DCN30_MMHUBBUB(pool->base.mcif_wb[i]));
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
1490
pool->base.mcif_wb[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
1494
for (i = 0; i < pool->base.audio_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
1495
if (pool->base.audios[i])
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
1496
dce_aud_destroy(&pool->base.audios[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
1499
for (i = 0; i < pool->base.clk_src_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
1500
if (pool->base.clock_sources[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
1501
dcn20_clock_source_destroy(&pool->base.clock_sources[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
1502
pool->base.clock_sources[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
1506
for (i = 0; i < pool->base.res_cap->num_mpc_3dlut; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
1507
if (pool->base.mpc_lut[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
1508
dc_3dlut_func_release(pool->base.mpc_lut[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
1509
pool->base.mpc_lut[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
1511
if (pool->base.mpc_shaper[i] != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
1512
dc_transfer_func_release(pool->base.mpc_shaper[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
1513
pool->base.mpc_shaper[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
1517
if (pool->base.dp_clock_source != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
1518
dcn20_clock_source_destroy(&pool->base.dp_clock_source);
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
1519
pool->base.dp_clock_source = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
1522
for (i = 0; i < pool->base.res_cap->num_timing_generator; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
1523
if (pool->base.multiple_abms[i] != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
1524
dce_abm_destroy(&pool->base.multiple_abms[i]);
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
1527
if (pool->base.psr != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
1528
dmub_psr_destroy(&pool->base.psr);
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
1530
if (pool->base.dccg != NULL)
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
1531
dcn_dccg_destroy(&pool->base.dccg);
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
1533
if (pool->base.dio != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
1534
kfree(TO_DCN10_DIO(pool->base.dio));
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
1535
pool->base.dio = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
1538
if (pool->base.oem_device != NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
1539
struct dc *dc = pool->base.oem_device->ctx->dc;
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
1541
dc->link_srv->destroy_ddc_service(&pool->base.oem_device);
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
1546
static bool dcn401_dwbc_create(struct dc_context *ctx, struct resource_pool *pool)
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
1549
uint32_t dwb_count = pool->res_cap->num_dwb;
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
1570
pool->dwbc[i] = &dwbc401->base;
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
1576
static bool dcn401_mmhubbub_create(struct dc_context *ctx, struct resource_pool *pool)
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
1579
uint32_t dwb_count = pool->res_cap->num_dwb;
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
1599
pool->mcif_wb[i] = &mcif_wb30->base;
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
1632
static void dcn401_destroy_resource_pool(struct resource_pool **pool)
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
1634
struct dcn401_resource_pool *dcn401_pool = TO_DCN401_RES_POOL(*pool);
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
1638
*pool = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
1857
struct dcn401_resource_pool *pool)
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
1890
pool->base.res_cap = &res_cap_dcn4_01;
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
1893
num_pipes = pool->base.res_cap->num_timing_generator;
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
1896
for (i = 0; i < pool->base.res_cap->num_timing_generator; i++)
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
1906
pool->base.funcs = &dcn401_res_pool_funcs;
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
1911
pool->base.underlay_pipe_index = NO_UNDERLAY_PIPE;
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
1912
pool->base.timing_generator_count = num_pipes;
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
1913
pool->base.pipe_count = num_pipes;
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
1914
pool->base.mpcc_count = num_pipes;
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
1984
dc->caps.color.mpc.num_3dluts = pool->base.res_cap->num_mpc_3dlut; //4, configurable to be before or after BLND in MPCC
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
2049
pool->base.clock_sources[DCN401_CLK_SRC_PLL0] =
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
2053
pool->base.clock_sources[DCN401_CLK_SRC_PLL1] =
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
2057
pool->base.clock_sources[DCN401_CLK_SRC_PLL2] =
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
2061
pool->base.clock_sources[DCN401_CLK_SRC_PLL3] =
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
2070
pool->base.clk_src_count = DCN401_CLK_SRC_TOTAL;
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
2073
pool->base.dp_clock_source =
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
2078
for (i = 0; i < pool->base.clk_src_count; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
2079
if (pool->base.clock_sources[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
2087
pool->base.dccg = dccg401_create(ctx, &dccg_regs, &dccg_shift, &dccg_mask);
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
2088
if (pool->base.dccg == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
2096
pool->base.irqs = dal_irq_service_dcn401_create(&init_data);
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
2097
if (!pool->base.irqs)
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
2101
pool->base.hubbub = dcn401_hubbub_create(ctx);
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
2102
if (pool->base.hubbub == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
2109
pool->base.dio = dcn401_dio_create(ctx);
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
2110
if (pool->base.dio == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
2117
for (i = 0, j = 0; i < pool->base.res_cap->num_timing_generator; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
2125
pool->base.hubps[j] = dcn401_hubp_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
2126
if (pool->base.hubps[j] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
2133
pool->base.dpps[j] = dcn401_dpp_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
2134
if (pool->base.dpps[j] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
2141
pool->base.opps[j] = dcn401_opp_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
2142
if (pool->base.opps[j] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
2149
pool->base.timing_generators[j] = dcn401_timing_generator_create(
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
2151
if (pool->base.timing_generators[j] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
2157
pool->base.multiple_abms[j] = dmub_abm_create(ctx,
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
2161
if (pool->base.multiple_abms[j] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
2172
pool->base.psr = dmub_psr_create(ctx);
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
2173
if (pool->base.psr == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
2180
pool->base.mpc = dcn401_mpc_create(ctx, pool->base.res_cap->num_timing_generator, pool->base.res_cap->num_mpc_3dlut);
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
2181
if (pool->base.mpc == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
2188
for (i = 0; i < pool->base.res_cap->num_dsc; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
2189
pool->base.dscs[i] = dcn401_dsc_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
2190
if (pool->base.dscs[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
2198
if (!dcn401_dwbc_create(ctx, &pool->base)) {
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
2205
if (!dcn401_mmhubbub_create(ctx, &pool->base)) {
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
2212
for (i = 0; i < pool->base.res_cap->num_ddc; i++) {
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
2213
pool->base.engines[i] = dcn401_aux_engine_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
2214
if (pool->base.engines[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
2220
pool->base.hw_i2cs[i] = dcn401_i2c_hw_create(ctx, i);
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
2221
if (pool->base.hw_i2cs[i] == NULL) {
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
2227
pool->base.sw_i2cs[i] = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
2231
if (!resource_construct(num_virtual_links, dc, &pool->base,
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
2238
dc->caps.max_planes = pool->base.pipe_count;
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
2253
pool->base.oem_device = dc->link_srv->create_ddc_service(&ddc_init_data);
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
2255
pool->base.oem_device = NULL;
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
2262
dc->dml2_options.dcn_pipe_count = pool->base.pipe_count;
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
2271
dc->dml2_options.svp_pstate.callbacks.calculate_mall_ways_from_bytes = pool->base.funcs->calculate_mall_ways_from_bytes;
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
2303
dcn401_resource_destruct(pool);
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
2312
struct dcn401_resource_pool *pool =
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
2315
if (!pool)
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
2318
if (dcn401_resource_construct(init_data->num_virtual_links, dc, pool))
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
2319
return &pool->base;
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.c
2322
kfree(pool);
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.h
12
#define TO_DCN401_RES_POOL(pool)\
drivers/gpu/drm/amd/display/dc/resource/dcn401/dcn401_resource.h
13
container_of(pool, struct dcn401_resource_pool, base)
drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c
2268
struct intel_gt_buffer_pool_node *pool = eb->batch_pool;
drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c
2301
if (!pool) {
drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c
2302
pool = intel_gt_get_buffer_pool(eb->gt, len,
drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c
2304
if (IS_ERR(pool))
drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c
2305
return PTR_ERR(pool);
drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c
2306
eb->batch_pool = pool;
drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c
2309
err = i915_gem_object_lock(pool->obj, &eb->ww);
drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c
2313
shadow = shadow_batch_pin(eb, pool->obj, eb->context->vm, PIN_USER);
drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c
2317
intel_gt_buffer_pool_mark_used(pool);
drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c
2319
shadow->private = pool;
drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c
2325
shadow = shadow_batch_pin(eb, pool->obj,
drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c
2331
shadow->private = pool;
drivers/gpu/drm/i915/gem/i915_gem_ttm.c
327
return ttm_pool_alloc(&bdev->pool, ttm, ctx);
drivers/gpu/drm/i915/gem/i915_gem_ttm.c
342
ttm_pool_free(&bdev->pool, ttm);
drivers/gpu/drm/i915/gt/intel_gt_buffer_pool.c
103
struct intel_gt_buffer_pool *pool = node->pool;
drivers/gpu/drm/i915/gt/intel_gt_buffer_pool.c
104
struct intel_gt *gt = container_of(pool, struct intel_gt, buffer_pool);
drivers/gpu/drm/i915/gt/intel_gt_buffer_pool.c
105
struct list_head *list = bucket_for_size(pool, node->obj->base.size);
drivers/gpu/drm/i915/gt/intel_gt_buffer_pool.c
117
spin_lock_irqsave(&pool->lock, flags);
drivers/gpu/drm/i915/gt/intel_gt_buffer_pool.c
120
spin_unlock_irqrestore(&pool->lock, flags);
drivers/gpu/drm/i915/gt/intel_gt_buffer_pool.c
122
queue_delayed_work(gt->i915->unordered_wq, &pool->work,
drivers/gpu/drm/i915/gt/intel_gt_buffer_pool.c
140
node_create(struct intel_gt_buffer_pool *pool, size_t sz,
drivers/gpu/drm/i915/gt/intel_gt_buffer_pool.c
143
struct intel_gt *gt = container_of(pool, struct intel_gt, buffer_pool);
drivers/gpu/drm/i915/gt/intel_gt_buffer_pool.c
15
bucket_for_size(struct intel_gt_buffer_pool *pool, size_t sz)
drivers/gpu/drm/i915/gt/intel_gt_buffer_pool.c
153
node->pool = pool;
drivers/gpu/drm/i915/gt/intel_gt_buffer_pool.c
175
struct intel_gt_buffer_pool *pool = >->buffer_pool;
drivers/gpu/drm/i915/gt/intel_gt_buffer_pool.c
181
list = bucket_for_size(pool, size);
drivers/gpu/drm/i915/gt/intel_gt_buffer_pool.c
198
spin_lock_irq(&pool->lock);
drivers/gpu/drm/i915/gt/intel_gt_buffer_pool.c
200
spin_unlock_irq(&pool->lock);
drivers/gpu/drm/i915/gt/intel_gt_buffer_pool.c
207
node = node_create(pool, size, type);
drivers/gpu/drm/i915/gt/intel_gt_buffer_pool.c
223
struct intel_gt_buffer_pool *pool = >->buffer_pool;
drivers/gpu/drm/i915/gt/intel_gt_buffer_pool.c
226
spin_lock_init(&pool->lock);
drivers/gpu/drm/i915/gt/intel_gt_buffer_pool.c
227
for (n = 0; n < ARRAY_SIZE(pool->cache_list); n++)
drivers/gpu/drm/i915/gt/intel_gt_buffer_pool.c
228
INIT_LIST_HEAD(&pool->cache_list[n]);
drivers/gpu/drm/i915/gt/intel_gt_buffer_pool.c
229
INIT_DELAYED_WORK(&pool->work, pool_free_work);
drivers/gpu/drm/i915/gt/intel_gt_buffer_pool.c
234
struct intel_gt_buffer_pool *pool = >->buffer_pool;
drivers/gpu/drm/i915/gt/intel_gt_buffer_pool.c
237
while (pool_free_older_than(pool, 0))
drivers/gpu/drm/i915/gt/intel_gt_buffer_pool.c
239
} while (cancel_delayed_work_sync(&pool->work));
drivers/gpu/drm/i915/gt/intel_gt_buffer_pool.c
244
struct intel_gt_buffer_pool *pool = >->buffer_pool;
drivers/gpu/drm/i915/gt/intel_gt_buffer_pool.c
247
for (n = 0; n < ARRAY_SIZE(pool->cache_list); n++)
drivers/gpu/drm/i915/gt/intel_gt_buffer_pool.c
248
GEM_BUG_ON(!list_empty(&pool->cache_list[n]));
drivers/gpu/drm/i915/gt/intel_gt_buffer_pool.c
25
if (n >= ARRAY_SIZE(pool->cache_list))
drivers/gpu/drm/i915/gt/intel_gt_buffer_pool.c
26
n = ARRAY_SIZE(pool->cache_list) - 1;
drivers/gpu/drm/i915/gt/intel_gt_buffer_pool.c
28
return &pool->cache_list[n];
drivers/gpu/drm/i915/gt/intel_gt_buffer_pool.c
38
static bool pool_free_older_than(struct intel_gt_buffer_pool *pool, long keep)
drivers/gpu/drm/i915/gt/intel_gt_buffer_pool.c
45
for (n = 0; n < ARRAY_SIZE(pool->cache_list); n++) {
drivers/gpu/drm/i915/gt/intel_gt_buffer_pool.c
46
struct list_head *list = &pool->cache_list[n];
drivers/gpu/drm/i915/gt/intel_gt_buffer_pool.c
51
if (spin_trylock_irq(&pool->lock)) {
drivers/gpu/drm/i915/gt/intel_gt_buffer_pool.c
74
spin_unlock_irq(&pool->lock);
drivers/gpu/drm/i915/gt/intel_gt_buffer_pool.c
90
struct intel_gt_buffer_pool *pool =
drivers/gpu/drm/i915/gt/intel_gt_buffer_pool.c
91
container_of(wrk, typeof(*pool), work.work);
drivers/gpu/drm/i915/gt/intel_gt_buffer_pool.c
92
struct intel_gt *gt = container_of(pool, struct intel_gt, buffer_pool);
drivers/gpu/drm/i915/gt/intel_gt_buffer_pool.c
94
if (pool_free_older_than(pool, HZ))
drivers/gpu/drm/i915/gt/intel_gt_buffer_pool.c
95
queue_delayed_work(gt->i915->unordered_wq, &pool->work,
drivers/gpu/drm/i915/gt/intel_gt_buffer_pool_types.h
27
struct intel_gt_buffer_pool *pool;
drivers/gpu/drm/i915/i915_gpu_error.c
1252
pool_free(&compress->pool, page_address(page));
drivers/gpu/drm/i915/i915_gpu_error.c
1756
pool_refill(&compress->pool, ALLOW_FAIL);
drivers/gpu/drm/i915/i915_gpu_error.c
248
struct folio_batch pool;
drivers/gpu/drm/i915/i915_gpu_error.c
257
if (pool_init(&c->pool, ALLOW_FAIL))
drivers/gpu/drm/i915/i915_gpu_error.c
264
pool_fini(&c->pool);
drivers/gpu/drm/i915/i915_gpu_error.c
270
c->tmp = pool_alloc(&c->pool, ALLOW_FAIL);
drivers/gpu/drm/i915/i915_gpu_error.c
292
page_addr = pool_alloc(&c->pool, ALLOW_FAIL);
drivers/gpu/drm/i915/i915_gpu_error.c
373
pool_free(&c->pool, c->tmp);
drivers/gpu/drm/i915/i915_gpu_error.c
374
pool_fini(&c->pool);
drivers/gpu/drm/i915/i915_gpu_error.c
385
struct folio_batch pool;
drivers/gpu/drm/i915/i915_gpu_error.c
390
return pool_init(&c->pool, ALLOW_FAIL) == 0;
drivers/gpu/drm/i915/i915_gpu_error.c
405
ptr = pool_alloc(&c->pool, ALLOW_FAIL);
drivers/gpu/drm/i915/i915_gpu_error.c
429
pool_fini(&c->pool);
drivers/gpu/drm/loongson/lsdc_ttm.c
126
return ttm_pool_alloc(&bdev->pool, ttm, ctx);
drivers/gpu/drm/loongson/lsdc_ttm.c
137
return ttm_pool_free(&bdev->pool, ttm);
drivers/gpu/drm/nouveau/nouveau_bo.c
1437
return ttm_pool_alloc(&drm->ttm.bdev.pool, ttm, ctx);
drivers/gpu/drm/nouveau/nouveau_bo.c
1454
return ttm_pool_free(&drm->ttm.bdev.pool, ttm);
drivers/gpu/drm/panthor/panthor_drv.c
1143
struct panthor_heap_pool *pool;
drivers/gpu/drm/panthor/panthor_drv.c
1151
pool = panthor_vm_get_heap_pool(vm, true);
drivers/gpu/drm/panthor/panthor_drv.c
1152
if (IS_ERR(pool)) {
drivers/gpu/drm/panthor/panthor_drv.c
1153
ret = PTR_ERR(pool);
drivers/gpu/drm/panthor/panthor_drv.c
1157
ret = panthor_heap_create(pool,
drivers/gpu/drm/panthor/panthor_drv.c
1174
panthor_heap_pool_put(pool);
drivers/gpu/drm/panthor/panthor_drv.c
1186
struct panthor_heap_pool *pool;
drivers/gpu/drm/panthor/panthor_drv.c
1197
pool = panthor_vm_get_heap_pool(vm, false);
drivers/gpu/drm/panthor/panthor_drv.c
1198
if (IS_ERR(pool)) {
drivers/gpu/drm/panthor/panthor_drv.c
1199
ret = PTR_ERR(pool);
drivers/gpu/drm/panthor/panthor_drv.c
1203
ret = panthor_heap_destroy(pool, args->handle & GENMASK(15, 0));
drivers/gpu/drm/panthor/panthor_drv.c
1204
panthor_heap_pool_put(pool);
drivers/gpu/drm/panthor/panthor_heap.c
114
static int panthor_get_heap_ctx_offset(struct panthor_heap_pool *pool, int id)
drivers/gpu/drm/panthor/panthor_heap.c
116
return panthor_heap_ctx_stride(pool->ptdev) * id;
drivers/gpu/drm/panthor/panthor_heap.c
119
static void *panthor_get_heap_ctx(struct panthor_heap_pool *pool, int id)
drivers/gpu/drm/panthor/panthor_heap.c
121
return pool->gpu_contexts->kmap +
drivers/gpu/drm/panthor/panthor_heap.c
122
panthor_get_heap_ctx_offset(pool, id);
drivers/gpu/drm/panthor/panthor_heap.c
125
static void panthor_free_heap_chunk(struct panthor_heap_pool *pool,
drivers/gpu/drm/panthor/panthor_heap.c
134
atomic_sub(heap->chunk_size, &pool->size);
drivers/gpu/drm/panthor/panthor_heap.c
140
static int panthor_alloc_heap_chunk(struct panthor_heap_pool *pool,
drivers/gpu/drm/panthor/panthor_heap.c
152
chunk->bo = panthor_kernel_bo_create(pool->ptdev, pool->vm, heap->chunk_size,
drivers/gpu/drm/panthor/panthor_heap.c
189
atomic_add(heap->chunk_size, &pool->size);
drivers/gpu/drm/panthor/panthor_heap.c
202
static void panthor_free_heap_chunks(struct panthor_heap_pool *pool,
drivers/gpu/drm/panthor/panthor_heap.c
208
panthor_free_heap_chunk(pool, heap, chunk);
drivers/gpu/drm/panthor/panthor_heap.c
211
static int panthor_alloc_heap_chunks(struct panthor_heap_pool *pool,
drivers/gpu/drm/panthor/panthor_heap.c
219
ret = panthor_alloc_heap_chunk(pool, heap, true);
drivers/gpu/drm/panthor/panthor_heap.c
228
panthor_heap_destroy_locked(struct panthor_heap_pool *pool, u32 handle)
drivers/gpu/drm/panthor/panthor_heap.c
232
heap = xa_erase(&pool->xa, handle);
drivers/gpu/drm/panthor/panthor_heap.c
236
panthor_free_heap_chunks(pool, heap);
drivers/gpu/drm/panthor/panthor_heap.c
247
int panthor_heap_destroy(struct panthor_heap_pool *pool, u32 handle)
drivers/gpu/drm/panthor/panthor_heap.c
251
down_write(&pool->lock);
drivers/gpu/drm/panthor/panthor_heap.c
252
ret = panthor_heap_destroy_locked(pool, handle);
drivers/gpu/drm/panthor/panthor_heap.c
253
up_write(&pool->lock);
drivers/gpu/drm/panthor/panthor_heap.c
274
int panthor_heap_create(struct panthor_heap_pool *pool,
drivers/gpu/drm/panthor/panthor_heap.c
298
down_read(&pool->lock);
drivers/gpu/drm/panthor/panthor_heap.c
299
vm = panthor_vm_get(pool->vm);
drivers/gpu/drm/panthor/panthor_heap.c
300
up_read(&pool->lock);
drivers/gpu/drm/panthor/panthor_heap.c
318
ret = panthor_alloc_heap_chunks(pool, heap, initial_chunk_count);
drivers/gpu/drm/panthor/panthor_heap.c
327
down_write(&pool->lock);
drivers/gpu/drm/panthor/panthor_heap.c
329
if (!pool->vm) {
drivers/gpu/drm/panthor/panthor_heap.c
332
ret = xa_alloc(&pool->xa, &id, heap,
drivers/gpu/drm/panthor/panthor_heap.c
335
void *gpu_ctx = panthor_get_heap_ctx(pool, id);
drivers/gpu/drm/panthor/panthor_heap.c
337
memset(gpu_ctx, 0, panthor_heap_ctx_stride(pool->ptdev));
drivers/gpu/drm/panthor/panthor_heap.c
338
*heap_ctx_gpu_va = panthor_kernel_bo_gpuva(pool->gpu_contexts) +
drivers/gpu/drm/panthor/panthor_heap.c
339
panthor_get_heap_ctx_offset(pool, id);
drivers/gpu/drm/panthor/panthor_heap.c
342
up_write(&pool->lock);
drivers/gpu/drm/panthor/panthor_heap.c
351
panthor_free_heap_chunks(pool, heap);
drivers/gpu/drm/panthor/panthor_heap.c
370
int panthor_heap_return_chunk(struct panthor_heap_pool *pool,
drivers/gpu/drm/panthor/panthor_heap.c
374
u64 offset = heap_gpu_va - panthor_kernel_bo_gpuva(pool->gpu_contexts);
drivers/gpu/drm/panthor/panthor_heap.c
375
u32 heap_id = (u32)offset / panthor_heap_ctx_stride(pool->ptdev);
drivers/gpu/drm/panthor/panthor_heap.c
383
down_read(&pool->lock);
drivers/gpu/drm/panthor/panthor_heap.c
384
heap = xa_load(&pool->xa, heap_id);
drivers/gpu/drm/panthor/panthor_heap.c
398
atomic_sub(heap->chunk_size, &pool->size);
drivers/gpu/drm/panthor/panthor_heap.c
413
up_read(&pool->lock);
drivers/gpu/drm/panthor/panthor_heap.c
432
int panthor_heap_grow(struct panthor_heap_pool *pool,
drivers/gpu/drm/panthor/panthor_heap.c
438
u64 offset = heap_gpu_va - panthor_kernel_bo_gpuva(pool->gpu_contexts);
drivers/gpu/drm/panthor/panthor_heap.c
439
u32 heap_id = (u32)offset / panthor_heap_ctx_stride(pool->ptdev);
drivers/gpu/drm/panthor/panthor_heap.c
447
down_read(&pool->lock);
drivers/gpu/drm/panthor/panthor_heap.c
448
heap = xa_load(&pool->xa, heap_id);
drivers/gpu/drm/panthor/panthor_heap.c
476
ret = panthor_alloc_heap_chunk(pool, heap, false);
drivers/gpu/drm/panthor/panthor_heap.c
488
up_read(&pool->lock);
drivers/gpu/drm/panthor/panthor_heap.c
494
struct panthor_heap_pool *pool =
drivers/gpu/drm/panthor/panthor_heap.c
497
xa_destroy(&pool->xa);
drivers/gpu/drm/panthor/panthor_heap.c
498
kfree(pool);
drivers/gpu/drm/panthor/panthor_heap.c
505
void panthor_heap_pool_put(struct panthor_heap_pool *pool)
drivers/gpu/drm/panthor/panthor_heap.c
507
if (pool)
drivers/gpu/drm/panthor/panthor_heap.c
508
kref_put(&pool->refcount, panthor_heap_pool_release);
drivers/gpu/drm/panthor/panthor_heap.c
518
panthor_heap_pool_get(struct panthor_heap_pool *pool)
drivers/gpu/drm/panthor/panthor_heap.c
520
if (pool)
drivers/gpu/drm/panthor/panthor_heap.c
521
kref_get(&pool->refcount);
drivers/gpu/drm/panthor/panthor_heap.c
523
return pool;
drivers/gpu/drm/panthor/panthor_heap.c
541
struct panthor_heap_pool *pool;
drivers/gpu/drm/panthor/panthor_heap.c
544
pool = kzalloc_obj(*pool);
drivers/gpu/drm/panthor/panthor_heap.c
545
if (!pool)
drivers/gpu/drm/panthor/panthor_heap.c
551
pool->vm = vm;
drivers/gpu/drm/panthor/panthor_heap.c
552
pool->ptdev = ptdev;
drivers/gpu/drm/panthor/panthor_heap.c
553
init_rwsem(&pool->lock);
drivers/gpu/drm/panthor/panthor_heap.c
554
xa_init_flags(&pool->xa, XA_FLAGS_ALLOC);
drivers/gpu/drm/panthor/panthor_heap.c
555
kref_init(&pool->refcount);
drivers/gpu/drm/panthor/panthor_heap.c
557
pool->gpu_contexts = panthor_kernel_bo_create(ptdev, vm, bosize,
drivers/gpu/drm/panthor/panthor_heap.c
562
if (IS_ERR(pool->gpu_contexts)) {
drivers/gpu/drm/panthor/panthor_heap.c
563
ret = PTR_ERR(pool->gpu_contexts);
drivers/gpu/drm/panthor/panthor_heap.c
567
ret = panthor_kernel_bo_vmap(pool->gpu_contexts);
drivers/gpu/drm/panthor/panthor_heap.c
571
atomic_add(pool->gpu_contexts->obj->size, &pool->size);
drivers/gpu/drm/panthor/panthor_heap.c
573
return pool;
drivers/gpu/drm/panthor/panthor_heap.c
576
panthor_heap_pool_destroy(pool);
drivers/gpu/drm/panthor/panthor_heap.c
595
void panthor_heap_pool_destroy(struct panthor_heap_pool *pool)
drivers/gpu/drm/panthor/panthor_heap.c
600
if (!pool)
drivers/gpu/drm/panthor/panthor_heap.c
603
down_write(&pool->lock);
drivers/gpu/drm/panthor/panthor_heap.c
604
xa_for_each(&pool->xa, i, heap)
drivers/gpu/drm/panthor/panthor_heap.c
605
drm_WARN_ON(&pool->ptdev->base, panthor_heap_destroy_locked(pool, i));
drivers/gpu/drm/panthor/panthor_heap.c
607
if (!IS_ERR_OR_NULL(pool->gpu_contexts)) {
drivers/gpu/drm/panthor/panthor_heap.c
608
atomic_sub(pool->gpu_contexts->obj->size, &pool->size);
drivers/gpu/drm/panthor/panthor_heap.c
609
panthor_kernel_bo_destroy(pool->gpu_contexts);
drivers/gpu/drm/panthor/panthor_heap.c
613
pool->vm = NULL;
drivers/gpu/drm/panthor/panthor_heap.c
614
up_write(&pool->lock);
drivers/gpu/drm/panthor/panthor_heap.c
616
panthor_heap_pool_put(pool);
drivers/gpu/drm/panthor/panthor_heap.c
626
size_t panthor_heap_pool_size(struct panthor_heap_pool *pool)
drivers/gpu/drm/panthor/panthor_heap.c
628
if (!pool)
drivers/gpu/drm/panthor/panthor_heap.c
631
return atomic_read(&pool->size);
drivers/gpu/drm/panthor/panthor_heap.h
13
int panthor_heap_create(struct panthor_heap_pool *pool,
drivers/gpu/drm/panthor/panthor_heap.h
20
int panthor_heap_destroy(struct panthor_heap_pool *pool, u32 handle);
drivers/gpu/drm/panthor/panthor_heap.h
24
void panthor_heap_pool_destroy(struct panthor_heap_pool *pool);
drivers/gpu/drm/panthor/panthor_heap.h
27
panthor_heap_pool_get(struct panthor_heap_pool *pool);
drivers/gpu/drm/panthor/panthor_heap.h
28
void panthor_heap_pool_put(struct panthor_heap_pool *pool);
drivers/gpu/drm/panthor/panthor_heap.h
30
size_t panthor_heap_pool_size(struct panthor_heap_pool *pool);
drivers/gpu/drm/panthor/panthor_heap.h
32
int panthor_heap_grow(struct panthor_heap_pool *pool,
drivers/gpu/drm/panthor/panthor_heap.h
37
int panthor_heap_return_chunk(struct panthor_heap_pool *pool,
drivers/gpu/drm/panthor/panthor_mmu.c
1460
struct panthor_vm_pool *pool,
drivers/gpu/drm/panthor/panthor_mmu.c
1477
ret = xa_alloc(&pool->xa, &id, vm,
drivers/gpu/drm/panthor/panthor_mmu.c
1501
panthor_heap_pool_destroy(vm->heaps.pool);
drivers/gpu/drm/panthor/panthor_mmu.c
1502
vm->heaps.pool = NULL;
drivers/gpu/drm/panthor/panthor_mmu.c
1526
int panthor_vm_pool_destroy_vm(struct panthor_vm_pool *pool, u32 handle)
drivers/gpu/drm/panthor/panthor_mmu.c
1530
vm = xa_erase(&pool->xa, handle);
drivers/gpu/drm/panthor/panthor_mmu.c
1545
panthor_vm_pool_get_vm(struct panthor_vm_pool *pool, u32 handle)
drivers/gpu/drm/panthor/panthor_mmu.c
1549
xa_lock(&pool->xa);
drivers/gpu/drm/panthor/panthor_mmu.c
1550
vm = panthor_vm_get(xa_load(&pool->xa, handle));
drivers/gpu/drm/panthor/panthor_mmu.c
1551
xa_unlock(&pool->xa);
drivers/gpu/drm/panthor/panthor_mmu.c
1877
if (drm_WARN_ON(&ptdev->base, vm->heaps.pool))
drivers/gpu/drm/panthor/panthor_mmu.c
1878
panthor_heap_pool_destroy(vm->heaps.pool);
drivers/gpu/drm/panthor/panthor_mmu.c
1958
struct panthor_heap_pool *pool;
drivers/gpu/drm/panthor/panthor_mmu.c
1961
if (!vm->heaps.pool && create) {
drivers/gpu/drm/panthor/panthor_mmu.c
1963
pool = ERR_PTR(-EINVAL);
drivers/gpu/drm/panthor/panthor_mmu.c
1965
pool = panthor_heap_pool_create(vm->ptdev, vm);
drivers/gpu/drm/panthor/panthor_mmu.c
1967
if (!IS_ERR(pool))
drivers/gpu/drm/panthor/panthor_mmu.c
1968
vm->heaps.pool = panthor_heap_pool_get(pool);
drivers/gpu/drm/panthor/panthor_mmu.c
1970
pool = panthor_heap_pool_get(vm->heaps.pool);
drivers/gpu/drm/panthor/panthor_mmu.c
1971
if (!pool)
drivers/gpu/drm/panthor/panthor_mmu.c
1972
pool = ERR_PTR(-ENOENT);
drivers/gpu/drm/panthor/panthor_mmu.c
1976
return pool;
drivers/gpu/drm/panthor/panthor_mmu.c
1998
size_t size = panthor_heap_pool_size(vm->heaps.pool);
drivers/gpu/drm/panthor/panthor_mmu.c
332
struct panthor_heap_pool *pool;
drivers/gpu/drm/panthor/panthor_mmu.h
63
struct panthor_vm_pool *pool,
drivers/gpu/drm/panthor/panthor_mmu.h
65
int panthor_vm_pool_destroy_vm(struct panthor_vm_pool *pool, u32 handle);
drivers/gpu/drm/panthor/panthor_mmu.h
66
struct panthor_vm *panthor_vm_pool_get_vm(struct panthor_vm_pool *pool, u32 handle);
drivers/gpu/drm/panthor/panthor_sched.c
3809
static struct panthor_group *group_from_handle(struct panthor_group_pool *pool,
drivers/gpu/drm/panthor/panthor_sched.c
3814
xa_lock(&pool->xa);
drivers/gpu/drm/panthor/panthor_sched.c
3815
group = group_get(xa_find(&pool->xa, &group_handle, group_handle, GROUP_REGISTERED));
drivers/gpu/drm/panthor/panthor_sched.c
3816
xa_unlock(&pool->xa);
drivers/gpu/drm/radeon/radeon_ttm.c
550
return ttm_pool_alloc(&rdev->mman.bdev.pool, ttm, ctx);
drivers/gpu/drm/radeon/radeon_ttm.c
570
return ttm_pool_free(&rdev->mman.bdev.pool, ttm);
drivers/gpu/drm/radeon/radeon_ttm.c
778
return ttm_pool_debugfs(&rdev->mman.bdev.pool, m);
drivers/gpu/drm/ttm/tests/ttm_bo_validate_test.c
541
ttm_pool_alloc(&priv->ttm_dev->pool, old_tt, &ctx);
drivers/gpu/drm/ttm/tests/ttm_device_test.c
154
struct ttm_pool *pool;
drivers/gpu/drm/ttm/tests/ttm_device_test.c
164
pool = &ttm_dev->pool;
drivers/gpu/drm/ttm/tests/ttm_device_test.c
165
KUNIT_ASSERT_NOT_NULL(test, pool);
drivers/gpu/drm/ttm/tests/ttm_device_test.c
166
KUNIT_EXPECT_PTR_EQ(test, pool->dev, priv->dev);
drivers/gpu/drm/ttm/tests/ttm_device_test.c
167
KUNIT_EXPECT_EQ(test, pool->alloc_flags, params->alloc_flags);
drivers/gpu/drm/ttm/tests/ttm_device_test.c
172
pt = pool->caching[i].orders[j];
drivers/gpu/drm/ttm/tests/ttm_device_test.c
173
KUNIT_EXPECT_PTR_EQ(test, pt.pool, pool);
drivers/gpu/drm/ttm/tests/ttm_device_test.c
177
if (ttm_pool_uses_dma_alloc(pool))
drivers/gpu/drm/ttm/tests/ttm_pool_test.c
141
struct ttm_pool *pool;
drivers/gpu/drm/ttm/tests/ttm_pool_test.c
151
pool = kunit_kzalloc(test, sizeof(*pool), GFP_KERNEL);
drivers/gpu/drm/ttm/tests/ttm_pool_test.c
152
KUNIT_ASSERT_NOT_NULL(test, pool);
drivers/gpu/drm/ttm/tests/ttm_pool_test.c
154
ttm_pool_init(pool, devs->dev, NUMA_NO_NODE, params->alloc_flags);
drivers/gpu/drm/ttm/tests/ttm_pool_test.c
156
KUNIT_ASSERT_PTR_EQ(test, pool->dev, devs->dev);
drivers/gpu/drm/ttm/tests/ttm_pool_test.c
157
KUNIT_ASSERT_EQ(test, pool->nid, NUMA_NO_NODE);
drivers/gpu/drm/ttm/tests/ttm_pool_test.c
158
KUNIT_ASSERT_EQ(test, pool->alloc_flags, params->alloc_flags);
drivers/gpu/drm/ttm/tests/ttm_pool_test.c
160
err = ttm_pool_alloc(pool, tt, &simple_ctx);
drivers/gpu/drm/ttm/tests/ttm_pool_test.c
168
if (ttm_pool_uses_dma_alloc(pool)) {
drivers/gpu/drm/ttm/tests/ttm_pool_test.c
175
if (ttm_pool_uses_dma_alloc(pool)) {
drivers/gpu/drm/ttm/tests/ttm_pool_test.c
190
ttm_pool_free(pool, tt);
drivers/gpu/drm/ttm/tests/ttm_pool_test.c
192
ttm_pool_fini(pool);
drivers/gpu/drm/ttm/tests/ttm_pool_test.c
201
struct ttm_pool *pool;
drivers/gpu/drm/ttm/tests/ttm_pool_test.c
218
pool = kunit_kzalloc(test, sizeof(*pool), GFP_KERNEL);
drivers/gpu/drm/ttm/tests/ttm_pool_test.c
219
KUNIT_ASSERT_NOT_NULL(test, pool);
drivers/gpu/drm/ttm/tests/ttm_pool_test.c
221
ttm_pool_init(pool, devs->dev, NUMA_NO_NODE, TTM_ALLOCATION_POOL_USE_DMA_ALLOC);
drivers/gpu/drm/ttm/tests/ttm_pool_test.c
223
err = ttm_pool_alloc(pool, tt, &simple_ctx);
drivers/gpu/drm/ttm/tests/ttm_pool_test.c
233
ttm_pool_free(pool, tt);
drivers/gpu/drm/ttm/tests/ttm_pool_test.c
235
ttm_pool_fini(pool);
drivers/gpu/drm/ttm/tests/ttm_pool_test.c
241
struct ttm_pool *pool;
drivers/gpu/drm/ttm/tests/ttm_pool_test.c
248
pool = ttm_pool_pre_populated(test, size, caching);
drivers/gpu/drm/ttm/tests/ttm_pool_test.c
250
pt = &pool->caching[caching].orders[order];
drivers/gpu/drm/ttm/tests/ttm_pool_test.c
256
err = ttm_pool_alloc(pool, tt, &simple_ctx);
drivers/gpu/drm/ttm/tests/ttm_pool_test.c
261
ttm_pool_free(pool, tt);
drivers/gpu/drm/ttm/tests/ttm_pool_test.c
263
ttm_pool_fini(pool);
drivers/gpu/drm/ttm/tests/ttm_pool_test.c
269
struct ttm_pool *pool;
drivers/gpu/drm/ttm/tests/ttm_pool_test.c
277
pool = ttm_pool_pre_populated(test, size, pool_caching);
drivers/gpu/drm/ttm/tests/ttm_pool_test.c
279
pt_pool = &pool->caching[pool_caching].orders[order];
drivers/gpu/drm/ttm/tests/ttm_pool_test.c
280
pt_tt = &pool->caching[tt_caching].orders[order];
drivers/gpu/drm/ttm/tests/ttm_pool_test.c
288
err = ttm_pool_alloc(pool, tt, &simple_ctx);
drivers/gpu/drm/ttm/tests/ttm_pool_test.c
291
ttm_pool_free(pool, tt);
drivers/gpu/drm/ttm/tests/ttm_pool_test.c
297
ttm_pool_fini(pool);
drivers/gpu/drm/ttm/tests/ttm_pool_test.c
303
struct ttm_pool *pool;
drivers/gpu/drm/ttm/tests/ttm_pool_test.c
311
pool = ttm_pool_pre_populated(test, fst_size, caching);
drivers/gpu/drm/ttm/tests/ttm_pool_test.c
313
pt_pool = &pool->caching[caching].orders[order];
drivers/gpu/drm/ttm/tests/ttm_pool_test.c
314
pt_tt = &pool->caching[caching].orders[0];
drivers/gpu/drm/ttm/tests/ttm_pool_test.c
322
err = ttm_pool_alloc(pool, tt, &simple_ctx);
drivers/gpu/drm/ttm/tests/ttm_pool_test.c
325
ttm_pool_free(pool, tt);
drivers/gpu/drm/ttm/tests/ttm_pool_test.c
331
ttm_pool_fini(pool);
drivers/gpu/drm/ttm/tests/ttm_pool_test.c
339
struct ttm_pool *pool;
drivers/gpu/drm/ttm/tests/ttm_pool_test.c
348
pool = kunit_kzalloc(test, sizeof(*pool), GFP_KERNEL);
drivers/gpu/drm/ttm/tests/ttm_pool_test.c
349
KUNIT_ASSERT_NOT_NULL(test, pool);
drivers/gpu/drm/ttm/tests/ttm_pool_test.c
351
ttm_pool_init(pool, devs->dev, NUMA_NO_NODE, TTM_ALLOCATION_POOL_USE_DMA_ALLOC);
drivers/gpu/drm/ttm/tests/ttm_pool_test.c
352
ttm_pool_alloc(pool, tt, &simple_ctx);
drivers/gpu/drm/ttm/tests/ttm_pool_test.c
354
pt = &pool->caching[caching].orders[order];
drivers/gpu/drm/ttm/tests/ttm_pool_test.c
357
ttm_pool_free(pool, tt);
drivers/gpu/drm/ttm/tests/ttm_pool_test.c
362
ttm_pool_fini(pool);
drivers/gpu/drm/ttm/tests/ttm_pool_test.c
370
struct ttm_pool *pool;
drivers/gpu/drm/ttm/tests/ttm_pool_test.c
379
pool = kunit_kzalloc(test, sizeof(*pool), GFP_KERNEL);
drivers/gpu/drm/ttm/tests/ttm_pool_test.c
380
KUNIT_ASSERT_NOT_NULL(test, pool);
drivers/gpu/drm/ttm/tests/ttm_pool_test.c
382
ttm_pool_init(pool, devs->dev, NUMA_NO_NODE, 0);
drivers/gpu/drm/ttm/tests/ttm_pool_test.c
383
ttm_pool_alloc(pool, tt, &simple_ctx);
drivers/gpu/drm/ttm/tests/ttm_pool_test.c
385
pt = &pool->caching[caching].orders[order];
drivers/gpu/drm/ttm/tests/ttm_pool_test.c
388
ttm_pool_free(pool, tt);
drivers/gpu/drm/ttm/tests/ttm_pool_test.c
393
ttm_pool_fini(pool);
drivers/gpu/drm/ttm/tests/ttm_pool_test.c
398
struct ttm_pool *pool;
drivers/gpu/drm/ttm/tests/ttm_pool_test.c
404
pool = ttm_pool_pre_populated(test, size, caching);
drivers/gpu/drm/ttm/tests/ttm_pool_test.c
405
pt = &pool->caching[caching].orders[order];
drivers/gpu/drm/ttm/tests/ttm_pool_test.c
409
ttm_pool_fini(pool);
drivers/gpu/drm/ttm/tests/ttm_pool_test.c
80
struct ttm_pool *pool;
drivers/gpu/drm/ttm/tests/ttm_pool_test.c
87
pool = kunit_kzalloc(test, sizeof(*pool), GFP_KERNEL);
drivers/gpu/drm/ttm/tests/ttm_pool_test.c
88
KUNIT_ASSERT_NOT_NULL(test, pool);
drivers/gpu/drm/ttm/tests/ttm_pool_test.c
90
ttm_pool_init(pool, devs->dev, NUMA_NO_NODE, TTM_ALLOCATION_POOL_USE_DMA_ALLOC);
drivers/gpu/drm/ttm/tests/ttm_pool_test.c
92
err = ttm_pool_alloc(pool, tt, &simple_ctx);
drivers/gpu/drm/ttm/tests/ttm_pool_test.c
95
ttm_pool_free(pool, tt);
drivers/gpu/drm/ttm/tests/ttm_pool_test.c
98
return pool;
drivers/gpu/drm/ttm/tests/ttm_tt_test.c
364
err = ttm_pool_alloc(&devs->ttm_dev->pool, tt, &ctx);
drivers/gpu/drm/ttm/ttm_bo.c
311
queue_work_node(bdev->pool.nid, bdev->wq, &bo->delayed_delete);
drivers/gpu/drm/ttm/ttm_device.c
237
ttm_pool_init(&bdev->pool, dev, nid, alloc_flags);
drivers/gpu/drm/ttm/ttm_device.c
273
ttm_pool_fini(&bdev->pool);
drivers/gpu/drm/ttm/ttm_pool.c
1010
order = ttm_pool_page_order(pool, page);
drivers/gpu/drm/ttm/ttm_pool.c
1013
ttm_pool_unmap(pool, tt->dma_address[i],
drivers/gpu/drm/ttm/ttm_pool.c
1028
if (ttm_pool_uses_dma32(pool))
drivers/gpu/drm/ttm/ttm_pool.c
1048
ttm_pool_split_for_swap(pool, page);
drivers/gpu/drm/ttm/ttm_pool.c
107
struct ttm_pool *pool;
drivers/gpu/drm/ttm/ttm_pool.c
1076
void ttm_pool_init(struct ttm_pool *pool, struct device *dev,
drivers/gpu/drm/ttm/ttm_pool.c
1081
WARN_ON(!dev && ttm_pool_uses_dma_alloc(pool));
drivers/gpu/drm/ttm/ttm_pool.c
1083
pool->dev = dev;
drivers/gpu/drm/ttm/ttm_pool.c
1084
pool->nid = nid;
drivers/gpu/drm/ttm/ttm_pool.c
1085
pool->alloc_flags = alloc_flags;
drivers/gpu/drm/ttm/ttm_pool.c
1092
pt = ttm_pool_select_type(pool, i, j);
drivers/gpu/drm/ttm/ttm_pool.c
1093
if (pt != &pool->caching[i].orders[j])
drivers/gpu/drm/ttm/ttm_pool.c
1096
ttm_pool_type_init(pt, pool, i, j);
drivers/gpu/drm/ttm/ttm_pool.c
1122
void ttm_pool_fini(struct ttm_pool *pool)
drivers/gpu/drm/ttm/ttm_pool.c
1130
pt = ttm_pool_select_type(pool, i, j);
drivers/gpu/drm/ttm/ttm_pool.c
1131
if (pt != &pool->caching[i].orders[j])
drivers/gpu/drm/ttm/ttm_pool.c
1247
int ttm_pool_debugfs(struct ttm_pool *pool, struct seq_file *m)
drivers/gpu/drm/ttm/ttm_pool.c
1251
if (!ttm_pool_uses_dma_alloc(pool) && pool->nid == NUMA_NO_NODE) {
drivers/gpu/drm/ttm/ttm_pool.c
1260
if (!ttm_pool_select_type(pool, i, 0))
drivers/gpu/drm/ttm/ttm_pool.c
1262
if (ttm_pool_uses_dma_alloc(pool))
drivers/gpu/drm/ttm/ttm_pool.c
1265
seq_printf(m, "N%d ", pool->nid);
drivers/gpu/drm/ttm/ttm_pool.c
1277
ttm_pool_debugfs_orders(pool->caching[i].orders, m);
drivers/gpu/drm/ttm/ttm_pool.c
136
static struct page *ttm_pool_alloc_page(struct ttm_pool *pool, gfp_t gfp_flags,
drivers/gpu/drm/ttm/ttm_pool.c
139
const unsigned int beneficial_order = ttm_pool_beneficial_order(pool);
drivers/gpu/drm/ttm/ttm_pool.c
160
if (!ttm_pool_uses_dma_alloc(pool)) {
drivers/gpu/drm/ttm/ttm_pool.c
161
p = alloc_pages_node(pool->nid, gfp_flags, order);
drivers/gpu/drm/ttm/ttm_pool.c
174
vaddr = dma_alloc_attrs(pool->dev, (1ULL << order) * PAGE_SIZE,
drivers/gpu/drm/ttm/ttm_pool.c
197
static void ttm_pool_free_page(struct ttm_pool *pool, enum ttm_caching caching,
drivers/gpu/drm/ttm/ttm_pool.c
212
if (!pool || !ttm_pool_uses_dma_alloc(pool)) {
drivers/gpu/drm/ttm/ttm_pool.c
222
dma_free_attrs(pool->dev, (1UL << order) * PAGE_SIZE, vaddr, dma->addr,
drivers/gpu/drm/ttm/ttm_pool.c
250
static int ttm_pool_map(struct ttm_pool *pool, unsigned int order,
drivers/gpu/drm/ttm/ttm_pool.c
255
if (ttm_pool_uses_dma_alloc(pool)) {
drivers/gpu/drm/ttm/ttm_pool.c
262
addr = dma_map_page(pool->dev, p, 0, size, DMA_BIDIRECTIONAL);
drivers/gpu/drm/ttm/ttm_pool.c
263
if (dma_mapping_error(pool->dev, addr))
drivers/gpu/drm/ttm/ttm_pool.c
273
static void ttm_pool_unmap(struct ttm_pool *pool, dma_addr_t dma_addr,
drivers/gpu/drm/ttm/ttm_pool.c
277
if (ttm_pool_uses_dma_alloc(pool))
drivers/gpu/drm/ttm/ttm_pool.c
280
dma_unmap_page(pool->dev, dma_addr, (long)num_pages << PAGE_SHIFT,
drivers/gpu/drm/ttm/ttm_pool.c
319
static void ttm_pool_type_init(struct ttm_pool_type *pt, struct ttm_pool *pool,
drivers/gpu/drm/ttm/ttm_pool.c
322
pt->pool = pool;
drivers/gpu/drm/ttm/ttm_pool.c
343
ttm_pool_free_page(pt->pool, pt->caching, pt->order, p);
drivers/gpu/drm/ttm/ttm_pool.c
347
static struct ttm_pool_type *ttm_pool_select_type(struct ttm_pool *pool,
drivers/gpu/drm/ttm/ttm_pool.c
351
if (ttm_pool_uses_dma_alloc(pool))
drivers/gpu/drm/ttm/ttm_pool.c
352
return &pool->caching[caching].orders[order];
drivers/gpu/drm/ttm/ttm_pool.c
357
if (pool->nid != NUMA_NO_NODE)
drivers/gpu/drm/ttm/ttm_pool.c
358
return &pool->caching[caching].orders[order];
drivers/gpu/drm/ttm/ttm_pool.c
360
if (ttm_pool_uses_dma32(pool))
drivers/gpu/drm/ttm/ttm_pool.c
365
if (pool->nid != NUMA_NO_NODE)
drivers/gpu/drm/ttm/ttm_pool.c
366
return &pool->caching[caching].orders[order];
drivers/gpu/drm/ttm/ttm_pool.c
368
if (ttm_pool_uses_dma32(pool))
drivers/gpu/drm/ttm/ttm_pool.c
395
ttm_pool_free_page(pt->pool, pt->caching, pt->order, p);
drivers/gpu/drm/ttm/ttm_pool.c
406
static unsigned int ttm_pool_page_order(struct ttm_pool *pool, struct page *p)
drivers/gpu/drm/ttm/ttm_pool.c
408
if (ttm_pool_uses_dma_alloc(pool)) {
drivers/gpu/drm/ttm/ttm_pool.c
422
static void ttm_pool_split_for_swap(struct ttm_pool *pool, struct page *p)
drivers/gpu/drm/ttm/ttm_pool.c
424
unsigned int order = ttm_pool_page_order(pool, p);
drivers/gpu/drm/ttm/ttm_pool.c
465
static pgoff_t ttm_pool_unmap_and_free(struct ttm_pool *pool, struct page *page,
drivers/gpu/drm/ttm/ttm_pool.c
472
if (pool) {
drivers/gpu/drm/ttm/ttm_pool.c
473
order = ttm_pool_page_order(pool, page);
drivers/gpu/drm/ttm/ttm_pool.c
476
ttm_pool_unmap(pool, *dma_addr, nr);
drivers/gpu/drm/ttm/ttm_pool.c
478
pt = ttm_pool_select_type(pool, caching, order);
drivers/gpu/drm/ttm/ttm_pool.c
487
ttm_pool_free_page(pool, caching, order, page);
drivers/gpu/drm/ttm/ttm_pool.c
559
ttm_pool_split_for_swap(restore->pool, p);
drivers/gpu/drm/ttm/ttm_pool.c
572
ttm_pool_unmap_and_free(restore->pool, restore->alloced_page,
drivers/gpu/drm/ttm/ttm_pool.c
591
ttm_pool_page_allocated_restore(struct ttm_pool *pool, unsigned int order,
drivers/gpu/drm/ttm/ttm_pool.c
598
restore->pool = pool;
drivers/gpu/drm/ttm/ttm_pool.c
614
static int ttm_pool_page_allocated(struct ttm_pool *pool, unsigned int order,
drivers/gpu/drm/ttm/ttm_pool.c
632
r = ttm_pool_map(pool, order, p, &first_dma);
drivers/gpu/drm/ttm/ttm_pool.c
638
ttm_pool_page_allocated_restore(pool, order, p, page_caching,
drivers/gpu/drm/ttm/ttm_pool.c
663
static void ttm_pool_free_range(struct ttm_pool *pool, struct ttm_tt *tt,
drivers/gpu/drm/ttm/ttm_pool.c
684
nr = ttm_pool_unmap_and_free(pool, p, dma_addr, caching);
drivers/gpu/drm/ttm/ttm_pool.c
709
static int __ttm_pool_alloc(struct ttm_pool *pool, struct ttm_tt *tt,
drivers/gpu/drm/ttm/ttm_pool.c
723
WARN_ON(alloc->dma_addr && !pool->dev);
drivers/gpu/drm/ttm/ttm_pool.c
731
if (ttm_pool_uses_dma32(pool))
drivers/gpu/drm/ttm/ttm_pool.c
745
pt = ttm_pool_select_type(pool, page_caching, order);
drivers/gpu/drm/ttm/ttm_pool.c
757
p = ttm_pool_alloc_page(pool, gfp_flags, order);
drivers/gpu/drm/ttm/ttm_pool.c
770
r = ttm_pool_page_allocated(pool, order, p, page_caching, alloc,
drivers/gpu/drm/ttm/ttm_pool.c
792
ttm_pool_free_page(pool, page_caching, order, p);
drivers/gpu/drm/ttm/ttm_pool.c
799
ttm_pool_free_range(pool, tt, tt->caching, 0, caching_divide);
drivers/gpu/drm/ttm/ttm_pool.c
800
ttm_pool_free_range(pool, tt, ttm_cached, caching_divide,
drivers/gpu/drm/ttm/ttm_pool.c
818
int ttm_pool_alloc(struct ttm_pool *pool, struct ttm_tt *tt,
drivers/gpu/drm/ttm/ttm_pool.c
828
return __ttm_pool_alloc(pool, tt, ctx, &alloc, NULL);
drivers/gpu/drm/ttm/ttm_pool.c
845
int ttm_pool_restore_and_alloc(struct ttm_pool *pool, struct ttm_tt *tt,
drivers/gpu/drm/ttm/ttm_pool.c
866
restore->pool = pool;
drivers/gpu/drm/ttm/ttm_pool.c
883
return __ttm_pool_alloc(pool, tt, ctx, &alloc, restore);
drivers/gpu/drm/ttm/ttm_pool.c
894
void ttm_pool_free(struct ttm_pool *pool, struct ttm_tt *tt)
drivers/gpu/drm/ttm/ttm_pool.c
896
ttm_pool_free_range(pool, tt, tt->caching, 0, tt->num_pages);
drivers/gpu/drm/ttm/ttm_pool.c
927
ttm_pool_unmap_and_free(restore->pool, restore->alloced_page,
drivers/gpu/drm/ttm/ttm_pool.c
942
ttm_pool_free_range(restore->pool, tt, tt->caching,
drivers/gpu/drm/ttm/ttm_pool.c
945
ttm_pool_free_range(restore->pool, tt, ttm_cached,
drivers/gpu/drm/ttm/ttm_pool.c
975
long ttm_pool_backup(struct ttm_pool *pool, struct ttm_tt *tt,
drivers/gpu/drm/ttm/ttm_pool.c
991
ttm_pool_uses_dma_alloc(pool) || ttm_tt_is_backed_up(tt))
drivers/gpu/drm/ttm/ttm_pool_internal.h
10
static inline bool ttm_pool_uses_dma_alloc(struct ttm_pool *pool)
drivers/gpu/drm/ttm/ttm_pool_internal.h
12
return pool->alloc_flags & TTM_ALLOCATION_POOL_USE_DMA_ALLOC;
drivers/gpu/drm/ttm/ttm_pool_internal.h
15
static inline bool ttm_pool_uses_dma32(struct ttm_pool *pool)
drivers/gpu/drm/ttm/ttm_pool_internal.h
17
return pool->alloc_flags & TTM_ALLOCATION_POOL_USE_DMA32;
drivers/gpu/drm/ttm/ttm_pool_internal.h
20
static inline unsigned int ttm_pool_beneficial_order(struct ttm_pool *pool)
drivers/gpu/drm/ttm/ttm_pool_internal.h
22
return pool->alloc_flags & 0xff;
drivers/gpu/drm/ttm/ttm_resource.c
383
struct dmem_cgroup_pool_state *pool = NULL;
drivers/gpu/drm/ttm/ttm_resource.c
387
ret = dmem_cgroup_try_charge(man->cg, bo->base.size, &pool, ret_limit_pool);
drivers/gpu/drm/ttm/ttm_resource.c
394
if (pool)
drivers/gpu/drm/ttm/ttm_resource.c
395
dmem_cgroup_uncharge(pool, bo->base.size);
drivers/gpu/drm/ttm/ttm_resource.c
399
(*res_ptr)->css = pool;
drivers/gpu/drm/ttm/ttm_resource.c
411
struct dmem_cgroup_pool_state *pool;
drivers/gpu/drm/ttm/ttm_resource.c
420
pool = (*res)->css;
drivers/gpu/drm/ttm/ttm_resource.c
425
dmem_cgroup_uncharge(pool, bo->base.size);
drivers/gpu/drm/ttm/ttm_tt.c
289
ret = ttm_pool_backup(&bdev->pool, tt, &flags);
drivers/gpu/drm/ttm/ttm_tt.c
301
int ret = ttm_pool_restore_and_alloc(&bdev->pool, tt, ctx);
drivers/gpu/drm/ttm/ttm_tt.c
383
if (ttm_pool_uses_dma32(&bdev->pool))
drivers/gpu/drm/ttm/ttm_tt.c
402
ret = ttm_pool_alloc(&bdev->pool, ttm, ctx);
drivers/gpu/drm/ttm/ttm_tt.c
421
if (ttm_pool_uses_dma32(&bdev->pool))
drivers/gpu/drm/ttm/ttm_tt.c
440
ttm_pool_free(&bdev->pool, ttm);
drivers/gpu/drm/ttm/ttm_tt.c
444
if (ttm_pool_uses_dma32(&bdev->pool))
drivers/gpu/drm/ttm/ttm_tt.c
98
if (ttm_pool_uses_dma_alloc(&bdev->pool) &&
drivers/gpu/drm/vmwgfx/vmwgfx_ttm_buffer.c
375
return ttm_pool_alloc(&bdev->pool, ttm, ctx);
drivers/gpu/drm/vmwgfx/vmwgfx_ttm_buffer.c
397
ttm_pool_free(&bdev->pool, ttm);
drivers/gpu/drm/xe/xe_bo.c
571
err = ttm_pool_alloc(&ttm_dev->pool, tt, ctx);
drivers/gpu/drm/xe/xe_bo.c
593
ttm_pool_free(&ttm_dev->pool, tt);
drivers/gpu/drm/xe/xe_guc_relay.c
237
txn = mempool_alloc(&relay->pool, incoming ? GFP_ATOMIC : GFP_NOWAIT);
drivers/gpu/drm/xe/xe_guc_relay.c
288
mempool_free(txn, &relay->pool);
drivers/gpu/drm/xe/xe_guc_relay.c
330
mempool_exit(&relay->pool);
drivers/gpu/drm/xe/xe_guc_relay.c
361
err = mempool_init_kmalloc_pool(&relay->pool, XE_RELAY_MEMPOOL_MIN_NUM +
drivers/gpu/drm/xe/xe_guc_relay.c
367
relay_debug(relay, "using mempool with %d elements\n", relay->pool.min_nr);
drivers/gpu/drm/xe/xe_guc_relay.c
83
return mempool_initialized(&relay->pool);
drivers/gpu/drm/xe/xe_guc_relay_types.h
31
mempool_t pool;
drivers/hid/usbhid/hid-pidff.c
1469
PIDFF_FIND_FIELDS(pool, PID_POOL, 0);
drivers/hid/usbhid/hid-pidff.c
1573
if (pidff->pool[PID_SIMULTANEOUS_MAX].value)
drivers/hid/usbhid/hid-pidff.c
1575
pidff->pool[PID_SIMULTANEOUS_MAX].value[0]);
drivers/hid/usbhid/hid-pidff.c
1577
if (pidff->pool[PID_RAM_POOL_SIZE].value)
drivers/hid/usbhid/hid-pidff.c
1579
pidff->pool[PID_RAM_POOL_SIZE].value[0]);
drivers/hid/usbhid/hid-pidff.c
1581
if (pidff->pool[PID_DEVICE_MANAGED_POOL].value &&
drivers/hid/usbhid/hid-pidff.c
1582
pidff->pool[PID_DEVICE_MANAGED_POOL].value[0] == 0) {
drivers/hid/usbhid/hid-pidff.c
207
struct pidff_usage pool[ARRAY_SIZE(pidff_pool)];
drivers/hid/usbhid/hid-pidff.c
759
if (!pidff->pool[PID_SIMULTANEOUS_MAX].value)
drivers/hid/usbhid/hid-pidff.c
761
if (pidff->pool[PID_SIMULTANEOUS_MAX].value[0] >= 2)
drivers/hid/usbhid/hid-pidff.c
765
pidff->pool[PID_SIMULTANEOUS_MAX].value[0]);
drivers/hv/hv_kvp.c
379
__u8 pool = kvp_transaction.kvp_msg->kvp_hdr.pool;
drivers/hv/hv_kvp.c
393
message->kvp_hdr.pool = pool;
drivers/i3c/master.c
2808
void i3c_generic_ibi_free_pool(struct i3c_generic_ibi_pool *pool)
drivers/i3c/master.c
2813
while (!list_empty(&pool->free_slots)) {
drivers/i3c/master.c
2814
slot = list_first_entry(&pool->free_slots,
drivers/i3c/master.c
2824
WARN_ON(nslots != pool->num_slots);
drivers/i3c/master.c
2826
kfree(pool->payload_buf);
drivers/i3c/master.c
2827
kfree(pool->slots);
drivers/i3c/master.c
2828
kfree(pool);
drivers/i3c/master.c
2845
struct i3c_generic_ibi_pool *pool;
drivers/i3c/master.c
2850
pool = kzalloc_obj(*pool);
drivers/i3c/master.c
2851
if (!pool)
drivers/i3c/master.c
2854
spin_lock_init(&pool->lock);
drivers/i3c/master.c
2855
INIT_LIST_HEAD(&pool->free_slots);
drivers/i3c/master.c
2856
INIT_LIST_HEAD(&pool->pending);
drivers/i3c/master.c
2858
pool->slots = kzalloc_objs(*slot, req->num_slots);
drivers/i3c/master.c
2859
if (!pool->slots) {
drivers/i3c/master.c
2865
pool->payload_buf = kcalloc(req->num_slots,
drivers/i3c/master.c
2867
if (!pool->payload_buf) {
drivers/i3c/master.c
2874
slot = &pool->slots[i];
drivers/i3c/master.c
2878
slot->base.data = pool->payload_buf +
drivers/i3c/master.c
2881
list_add_tail(&slot->node, &pool->free_slots);
drivers/i3c/master.c
2882
pool->num_slots++;
drivers/i3c/master.c
2885
return pool;
drivers/i3c/master.c
2888
i3c_generic_ibi_free_pool(pool);
drivers/i3c/master.c
2904
i3c_generic_ibi_get_free_slot(struct i3c_generic_ibi_pool *pool)
drivers/i3c/master.c
2909
spin_lock_irqsave(&pool->lock, flags);
drivers/i3c/master.c
2910
slot = list_first_entry_or_null(&pool->free_slots,
drivers/i3c/master.c
2914
spin_unlock_irqrestore(&pool->lock, flags);
drivers/i3c/master.c
2928
void i3c_generic_ibi_recycle_slot(struct i3c_generic_ibi_pool *pool,
drivers/i3c/master.c
2938
spin_lock_irqsave(&pool->lock, flags);
drivers/i3c/master.c
2939
list_add_tail(&slot->node, &pool->free_slots);
drivers/i3c/master.c
2940
spin_unlock_irqrestore(&pool->lock, flags);
drivers/i3c/master/mipi-i3c-hci/dma.c
144
struct i3c_generic_ibi_pool *pool;
drivers/i3c/master/mipi-i3c-hci/dma.c
668
struct i3c_generic_ibi_pool *pool;
drivers/i3c/master/mipi-i3c-hci/dma.c
674
pool = i3c_generic_ibi_alloc_pool(dev, req);
drivers/i3c/master/mipi-i3c-hci/dma.c
675
if (IS_ERR(pool)) {
drivers/i3c/master/mipi-i3c-hci/dma.c
677
return PTR_ERR(pool);
drivers/i3c/master/mipi-i3c-hci/dma.c
679
dev_ibi->pool = pool;
drivers/i3c/master/mipi-i3c-hci/dma.c
691
i3c_generic_ibi_free_pool(dev_ibi->pool);
drivers/i3c/master/mipi-i3c-hci/dma.c
702
i3c_generic_ibi_recycle_slot(dev_ibi->pool, slot);
drivers/i3c/master/mipi-i3c-hci/dma.c
806
slot = i3c_generic_ibi_get_free_slot(dev_ibi->pool);
drivers/i3c/master/mipi-i3c-hci/pio.c
1009
i3c_generic_ibi_recycle_slot(dev_ibi->pool, slot);
drivers/i3c/master/mipi-i3c-hci/pio.c
112
struct i3c_generic_ibi_pool *pool;
drivers/i3c/master/mipi-i3c-hci/pio.c
881
ibi->slot = i3c_generic_ibi_get_free_slot(dev_ibi->pool);
drivers/i3c/master/mipi-i3c-hci/pio.c
898
i3c_generic_ibi_recycle_slot(dev_ibi->pool, ibi->slot);
drivers/i3c/master/mipi-i3c-hci/pio.c
975
struct i3c_generic_ibi_pool *pool;
drivers/i3c/master/mipi-i3c-hci/pio.c
981
pool = i3c_generic_ibi_alloc_pool(dev, req);
drivers/i3c/master/mipi-i3c-hci/pio.c
982
if (IS_ERR(pool)) {
drivers/i3c/master/mipi-i3c-hci/pio.c
984
return PTR_ERR(pool);
drivers/i3c/master/mipi-i3c-hci/pio.c
986
dev_ibi->pool = pool;
drivers/i3c/master/mipi-i3c-hci/pio.c
998
i3c_generic_ibi_free_pool(dev_ibi->pool);
drivers/iio/industrialio-trigger.c
257
ret = bitmap_find_free_region(trig->pool,
drivers/iio/industrialio-trigger.c
270
clear_bit(irq - trig->subirq_base, trig->pool);
drivers/iio/industrialio-trigger.c
285
bitmap_empty(trig->pool, CONFIG_IIO_CONSUMERS_PER_TRIGGER);
drivers/iio/industrialio-trigger.c
337
bitmap_weight(trig->pool, CONFIG_IIO_CONSUMERS_PER_TRIGGER) == 1;
drivers/infiniband/hw/hfi1/pio.c
280
int pool;
drivers/infiniband/hw/hfi1/pio.c
312
pool = wildcard_to_pool(size);
drivers/infiniband/hw/hfi1/pio.c
313
if (pool == -1) { /* non-wildcard */
drivers/infiniband/hw/hfi1/pio.c
315
} else if (pool < NUM_SC_POOLS) { /* valid wildcard */
drivers/infiniband/hw/hfi1/pio.c
316
mem_pool_info[pool].count += count;
drivers/infiniband/hw/hfi1/pio.c
379
unsigned pool = wildcard_to_pool(dd->sc_sizes[i].size);
drivers/infiniband/hw/hfi1/pio.c
381
WARN_ON_ONCE(pool >= NUM_SC_POOLS);
drivers/infiniband/hw/hfi1/pio.c
382
dd->sc_sizes[i].size = mem_pool_info[pool].size;
drivers/infiniband/hw/hns/hns_roce_cmd.c
202
hr_dev->cmd.pool = dma_pool_create("hns_roce_cmd", hr_dev->dev,
drivers/infiniband/hw/hns/hns_roce_cmd.c
205
if (!hr_dev->cmd.pool)
drivers/infiniband/hw/hns/hns_roce_cmd.c
213
dma_pool_destroy(hr_dev->cmd.pool);
drivers/infiniband/hw/hns/hns_roce_cmd.c
262
dma_pool_alloc(hr_dev->cmd.pool, GFP_KERNEL, &mailbox->dma);
drivers/infiniband/hw/hns/hns_roce_cmd.c
277
dma_pool_free(hr_dev->cmd.pool, mailbox->buf, mailbox->dma);
drivers/infiniband/hw/hns/hns_roce_device.h
552
struct dma_pool *pool;
drivers/infiniband/hw/mlx5/mlx5_ib.h
1125
mempool_t *pool;
drivers/infiniband/hw/mlx5/odp.c
1633
mempool_free(pfault, eq->pool);
drivers/infiniband/hw/mlx5/odp.c
1645
pfault = mempool_alloc(eq->pool, GFP_ATOMIC);
drivers/infiniband/hw/mlx5/odp.c
1782
static void mempool_refill(mempool_t *pool)
drivers/infiniband/hw/mlx5/odp.c
1784
while (pool->curr_nr < pool->min_nr)
drivers/infiniband/hw/mlx5/odp.c
1785
mempool_free(mempool_alloc(pool, GFP_KERNEL), pool);
drivers/infiniband/hw/mlx5/odp.c
1793
mempool_refill(eq->pool);
drivers/infiniband/hw/mlx5/odp.c
1817
eq->pool = mempool_create_kmalloc_pool(MLX5_IB_NUM_PF_DRAIN,
drivers/infiniband/hw/mlx5/odp.c
1819
if (!eq->pool) {
drivers/infiniband/hw/mlx5/odp.c
1856
mempool_destroy(eq->pool);
drivers/infiniband/hw/mlx5/odp.c
1873
mempool_destroy(eq->pool);
drivers/infiniband/hw/mthca/mthca_av.c
189
ah->av = dma_pool_zalloc(dev->av_table.pool,
drivers/infiniband/hw/mthca/mthca_av.c
253
dma_pool_free(dev->av_table.pool, ah->av, ah->avdma);
drivers/infiniband/hw/mthca/mthca_av.c
340
dev->av_table.pool = dma_pool_create("mthca_av", &dev->pdev->dev,
drivers/infiniband/hw/mthca/mthca_av.c
343
if (!dev->av_table.pool)
drivers/infiniband/hw/mthca/mthca_av.c
360
dma_pool_destroy(dev->av_table.pool);
drivers/infiniband/hw/mthca/mthca_av.c
374
dma_pool_destroy(dev->av_table.pool);
drivers/infiniband/hw/mthca/mthca_cmd.c
535
dev->cmd.pool = dma_pool_create("mthca_cmd", &dev->pdev->dev,
drivers/infiniband/hw/mthca/mthca_cmd.c
538
if (!dev->cmd.pool) {
drivers/infiniband/hw/mthca/mthca_cmd.c
548
dma_pool_destroy(dev->cmd.pool);
drivers/infiniband/hw/mthca/mthca_cmd.c
617
mailbox->buf = dma_pool_alloc(dev->cmd.pool, gfp_mask, &mailbox->dma);
drivers/infiniband/hw/mthca/mthca_cmd.c
631
dma_pool_free(dev->cmd.pool, mailbox->buf, mailbox->dma);
drivers/infiniband/hw/mthca/mthca_dev.h
120
struct dma_pool *pool;
drivers/infiniband/hw/mthca/mthca_dev.h
265
struct dma_pool *pool;
drivers/infiniband/sw/rxe/rxe_pool.c
100
pool->name = info->name;
drivers/infiniband/sw/rxe/rxe_pool.c
101
pool->type = type;
drivers/infiniband/sw/rxe/rxe_pool.c
102
pool->max_elem = info->max_elem;
drivers/infiniband/sw/rxe/rxe_pool.c
103
pool->elem_size = ALIGN(info->size, RXE_POOL_ALIGN);
drivers/infiniband/sw/rxe/rxe_pool.c
104
pool->elem_offset = info->elem_offset;
drivers/infiniband/sw/rxe/rxe_pool.c
105
pool->cleanup = info->cleanup;
drivers/infiniband/sw/rxe/rxe_pool.c
107
atomic_set(&pool->num_elem, 0);
drivers/infiniband/sw/rxe/rxe_pool.c
109
xa_init_flags(&pool->xa, XA_FLAGS_ALLOC);
drivers/infiniband/sw/rxe/rxe_pool.c
110
pool->limit.min = info->min_index;
drivers/infiniband/sw/rxe/rxe_pool.c
111
pool->limit.max = info->max_index;
drivers/infiniband/sw/rxe/rxe_pool.c
114
void rxe_pool_cleanup(struct rxe_pool *pool)
drivers/infiniband/sw/rxe/rxe_pool.c
116
WARN_ON(!xa_empty(&pool->xa));
drivers/infiniband/sw/rxe/rxe_pool.c
119
int __rxe_add_to_pool(struct rxe_pool *pool, struct rxe_pool_elem *elem,
drivers/infiniband/sw/rxe/rxe_pool.c
125
if (atomic_inc_return(&pool->num_elem) > pool->max_elem)
drivers/infiniband/sw/rxe/rxe_pool.c
128
elem->pool = pool;
drivers/infiniband/sw/rxe/rxe_pool.c
129
elem->obj = (u8 *)elem - pool->elem_offset;
drivers/infiniband/sw/rxe/rxe_pool.c
141
err = xa_alloc_cyclic(&pool->xa, &elem->index, NULL, pool->limit,
drivers/infiniband/sw/rxe/rxe_pool.c
142
&pool->next, gfp_flags);
drivers/infiniband/sw/rxe/rxe_pool.c
149
atomic_dec(&pool->num_elem);
drivers/infiniband/sw/rxe/rxe_pool.c
153
void *rxe_pool_get_index(struct rxe_pool *pool, u32 index)
drivers/infiniband/sw/rxe/rxe_pool.c
156
struct xarray *xa = &pool->xa;
drivers/infiniband/sw/rxe/rxe_pool.c
179
struct rxe_pool *pool = elem->pool;
drivers/infiniband/sw/rxe/rxe_pool.c
180
struct xarray *xa = &pool->xa;
drivers/infiniband/sw/rxe/rxe_pool.c
231
if (pool->cleanup)
drivers/infiniband/sw/rxe/rxe_pool.c
232
pool->cleanup(elem);
drivers/infiniband/sw/rxe/rxe_pool.c
234
atomic_dec(&pool->num_elem);
drivers/infiniband/sw/rxe/rxe_pool.c
253
xa_ret = xa_store(&elem->pool->xa, elem->index, elem, GFP_KERNEL);
drivers/infiniband/sw/rxe/rxe_pool.c
92
void rxe_pool_init(struct rxe_dev *rxe, struct rxe_pool *pool,
drivers/infiniband/sw/rxe/rxe_pool.c
97
memset(pool, 0, sizeof(*pool));
drivers/infiniband/sw/rxe/rxe_pool.c
99
pool->rxe = rxe;
drivers/infiniband/sw/rxe/rxe_pool.h
23
struct rxe_pool *pool;
drivers/infiniband/sw/rxe/rxe_pool.h
51
void rxe_pool_init(struct rxe_dev *rxe, struct rxe_pool *pool,
drivers/infiniband/sw/rxe/rxe_pool.h
55
void rxe_pool_cleanup(struct rxe_pool *pool);
drivers/infiniband/sw/rxe/rxe_pool.h
58
int __rxe_add_to_pool(struct rxe_pool *pool, struct rxe_pool_elem *elem,
drivers/infiniband/sw/rxe/rxe_pool.h
60
#define rxe_add_to_pool(pool, obj) __rxe_add_to_pool(pool, &(obj)->elem, true)
drivers/infiniband/sw/rxe/rxe_pool.h
61
#define rxe_add_to_pool_ah(pool, obj, sleepable) __rxe_add_to_pool(pool, \
drivers/infiniband/sw/rxe/rxe_pool.h
65
void *rxe_pool_get_index(struct rxe_pool *pool, u32 index);
drivers/infiniband/ulp/rtrs/rtrs-pri.h
340
struct rtrs_rdma_dev_pd *pool);
drivers/infiniband/ulp/rtrs/rtrs-pri.h
341
void rtrs_rdma_dev_pd_deinit(struct rtrs_rdma_dev_pd *pool);
drivers/infiniband/ulp/rtrs/rtrs-pri.h
344
struct rtrs_rdma_dev_pd *pool);
drivers/infiniband/ulp/rtrs/rtrs-pri.h
87
struct rtrs_rdma_dev_pd *pool;
drivers/infiniband/ulp/rtrs/rtrs.c
563
struct rtrs_rdma_dev_pd *pool)
drivers/infiniband/ulp/rtrs/rtrs.c
565
INIT_LIST_HEAD(&pool->list);
drivers/infiniband/ulp/rtrs/rtrs.c
566
mutex_init(&pool->mutex);
drivers/infiniband/ulp/rtrs/rtrs.c
567
pool->pd_flags = pd_flags;
drivers/infiniband/ulp/rtrs/rtrs.c
571
void rtrs_rdma_dev_pd_deinit(struct rtrs_rdma_dev_pd *pool)
drivers/infiniband/ulp/rtrs/rtrs.c
573
mutex_destroy(&pool->mutex);
drivers/infiniband/ulp/rtrs/rtrs.c
574
WARN_ON(!list_empty(&pool->list));
drivers/infiniband/ulp/rtrs/rtrs.c
580
struct rtrs_rdma_dev_pd *pool;
drivers/infiniband/ulp/rtrs/rtrs.c
584
pool = dev->pool;
drivers/infiniband/ulp/rtrs/rtrs.c
586
mutex_lock(&pool->mutex);
drivers/infiniband/ulp/rtrs/rtrs.c
588
mutex_unlock(&pool->mutex);
drivers/infiniband/ulp/rtrs/rtrs.c
590
if (pool->ops && pool->ops->deinit)
drivers/infiniband/ulp/rtrs/rtrs.c
591
pool->ops->deinit(dev);
drivers/infiniband/ulp/rtrs/rtrs.c
610
struct rtrs_rdma_dev_pd *pool)
drivers/infiniband/ulp/rtrs/rtrs.c
614
mutex_lock(&pool->mutex);
drivers/infiniband/ulp/rtrs/rtrs.c
615
list_for_each_entry(dev, &pool->list, entry) {
drivers/infiniband/ulp/rtrs/rtrs.c
620
mutex_unlock(&pool->mutex);
drivers/infiniband/ulp/rtrs/rtrs.c
626
dev->pool = pool;
drivers/infiniband/ulp/rtrs/rtrs.c
628
dev->ib_pd = ib_alloc_pd(ib_dev, pool->pd_flags);
drivers/infiniband/ulp/rtrs/rtrs.c
632
if (pool->ops && pool->ops->init && pool->ops->init(dev))
drivers/infiniband/ulp/rtrs/rtrs.c
635
mutex_lock(&pool->mutex);
drivers/infiniband/ulp/rtrs/rtrs.c
636
list_add(&dev->entry, &pool->list);
drivers/infiniband/ulp/rtrs/rtrs.c
638
mutex_unlock(&pool->mutex);
drivers/infiniband/ulp/srp/ib_srp.c
386
static void srp_destroy_fr_pool(struct srp_fr_pool *pool)
drivers/infiniband/ulp/srp/ib_srp.c
391
if (!pool)
drivers/infiniband/ulp/srp/ib_srp.c
394
for (i = 0, d = &pool->desc[0]; i < pool->size; i++, d++) {
drivers/infiniband/ulp/srp/ib_srp.c
398
kfree(pool);
drivers/infiniband/ulp/srp/ib_srp.c
412
struct srp_fr_pool *pool;
drivers/infiniband/ulp/srp/ib_srp.c
421
pool = kzalloc_flex(*pool, desc, pool_size);
drivers/infiniband/ulp/srp/ib_srp.c
422
if (!pool)
drivers/infiniband/ulp/srp/ib_srp.c
424
pool->size = pool_size;
drivers/infiniband/ulp/srp/ib_srp.c
425
pool->max_page_list_len = max_page_list_len;
drivers/infiniband/ulp/srp/ib_srp.c
426
spin_lock_init(&pool->lock);
drivers/infiniband/ulp/srp/ib_srp.c
427
INIT_LIST_HEAD(&pool->free_list);
drivers/infiniband/ulp/srp/ib_srp.c
434
for (i = 0, d = &pool->desc[0]; i < pool->size; i++, d++) {
drivers/infiniband/ulp/srp/ib_srp.c
444
list_add_tail(&d->entry, &pool->free_list);
drivers/infiniband/ulp/srp/ib_srp.c
448
return pool;
drivers/infiniband/ulp/srp/ib_srp.c
451
srp_destroy_fr_pool(pool);
drivers/infiniband/ulp/srp/ib_srp.c
454
pool = ERR_PTR(ret);
drivers/infiniband/ulp/srp/ib_srp.c
462
static struct srp_fr_desc *srp_fr_pool_get(struct srp_fr_pool *pool)
drivers/infiniband/ulp/srp/ib_srp.c
467
spin_lock_irqsave(&pool->lock, flags);
drivers/infiniband/ulp/srp/ib_srp.c
468
if (!list_empty(&pool->free_list)) {
drivers/infiniband/ulp/srp/ib_srp.c
469
d = list_first_entry(&pool->free_list, typeof(*d), entry);
drivers/infiniband/ulp/srp/ib_srp.c
472
spin_unlock_irqrestore(&pool->lock, flags);
drivers/infiniband/ulp/srp/ib_srp.c
486
static void srp_fr_pool_put(struct srp_fr_pool *pool, struct srp_fr_desc **desc,
drivers/infiniband/ulp/srp/ib_srp.c
492
spin_lock_irqsave(&pool->lock, flags);
drivers/infiniband/ulp/srp/ib_srp.c
494
list_add(&desc[i]->entry, &pool->free_list);
drivers/infiniband/ulp/srp/ib_srp.c
495
spin_unlock_irqrestore(&pool->lock, flags);
drivers/md/bcache/bset.c
1180
mempool_exit(&state->pool);
drivers/md/bcache/bset.c
1191
return mempool_init_page_pool(&state->pool, 1, page_order);
drivers/md/bcache/bset.c
1248
outp = mempool_alloc(&state->pool, GFP_NOIO);
drivers/md/bcache/bset.c
1282
mempool_free(virt_to_page(out), &state->pool);
drivers/md/bcache/bset.h
376
mempool_t pool;
drivers/md/dm-io.c
129
mempool_free(io, &io->client->pool);
drivers/md/dm-io.c
26
mempool_t pool;
drivers/md/dm-io.c
419
io = mempool_alloc(&client->pool, GFP_NOIO);
drivers/md/dm-io.c
59
ret = mempool_init_slab_pool(&client->pool, min_ios, _dm_io_cache);
drivers/md/dm-io.c
70
mempool_exit(&client->pool);
drivers/md/dm-io.c
78
mempool_exit(&client->pool);
drivers/md/dm-thin.c
1000
cell_error(pool, m->cell);
drivers/md/dm-thin.c
1011
metadata_operation_failed(pool, "dm_thin_insert_block", r);
drivers/md/dm-thin.c
1012
cell_error(pool, m->cell);
drivers/md/dm-thin.c
1026
inc_all_io_entry(tc->pool, m->cell->holder);
drivers/md/dm-thin.c
1033
mempool_free(m, &pool->mapping_pool);
drivers/md/dm-thin.c
1044
mempool_free(m, &tc->pool->mapping_pool);
drivers/md/dm-thin.c
1066
metadata_operation_failed(tc->pool, "dm_thin_remove_range", r);
drivers/md/dm-thin.c
1072
mempool_free(m, &tc->pool->mapping_pool);
drivers/md/dm-thin.c
1087
struct pool *pool = tc->pool;
drivers/md/dm-thin.c
1095
r = dm_pool_block_is_shared(pool->pmd, b, &shared);
drivers/md/dm-thin.c
1108
r = dm_pool_block_is_shared(pool->pmd, e, &shared);
drivers/md/dm-thin.c
1127
struct pool *pool = m->tc->pool;
drivers/md/dm-thin.c
1129
spin_lock_irqsave(&pool->lock, flags);
drivers/md/dm-thin.c
1130
list_add_tail(&m->list, &pool->prepared_discards_pt2);
drivers/md/dm-thin.c
1131
spin_unlock_irqrestore(&pool->lock, flags);
drivers/md/dm-thin.c
1132
wake_worker(pool);
drivers/md/dm-thin.c
1149
struct pool *pool = tc->pool;
drivers/md/dm-thin.c
1160
metadata_operation_failed(pool, "dm_thin_remove_range", r);
drivers/md/dm-thin.c
1163
mempool_free(m, &pool->mapping_pool);
drivers/md/dm-thin.c
1171
r = dm_pool_inc_data_range(pool->pmd, m->data_block, data_end);
drivers/md/dm-thin.c
1173
metadata_operation_failed(pool, "dm_pool_inc_data_range", r);
drivers/md/dm-thin.c
1176
mempool_free(m, &pool->mapping_pool);
drivers/md/dm-thin.c
1198
struct pool *pool = tc->pool;
drivers/md/dm-thin.c
1204
r = dm_pool_dec_data_range(pool->pmd, m->data_block,
drivers/md/dm-thin.c
1207
metadata_operation_failed(pool, "dm_pool_dec_data_range", r);
drivers/md/dm-thin.c
1213
mempool_free(m, &pool->mapping_pool);
drivers/md/dm-thin.c
1216
static void process_prepared(struct pool *pool, struct list_head *head,
drivers/md/dm-thin.c
1223
spin_lock_irq(&pool->lock);
drivers/md/dm-thin.c
1225
spin_unlock_irq(&pool->lock);
drivers/md/dm-thin.c
1234
static int io_overlaps_block(struct pool *pool, struct bio *bio)
drivers/md/dm-thin.c
1237
(pool->sectors_per_block << SECTOR_SHIFT);
drivers/md/dm-thin.c
1240
static int io_overwrites_block(struct pool *pool, struct bio *bio)
drivers/md/dm-thin.c
1243
io_overlaps_block(pool, bio);
drivers/md/dm-thin.c
1253
static int ensure_next_mapping(struct pool *pool)
drivers/md/dm-thin.c
1255
if (pool->next_mapping)
drivers/md/dm-thin.c
1258
pool->next_mapping = mempool_alloc(&pool->mapping_pool, GFP_ATOMIC);
drivers/md/dm-thin.c
1260
return pool->next_mapping ? 0 : -ENOMEM;
drivers/md/dm-thin.c
1263
static struct dm_thin_new_mapping *get_next_mapping(struct pool *pool)
drivers/md/dm-thin.c
1265
struct dm_thin_new_mapping *m = pool->next_mapping;
drivers/md/dm-thin.c
1267
BUG_ON(!pool->next_mapping);
drivers/md/dm-thin.c
1273
pool->next_mapping = NULL;
drivers/md/dm-thin.c
1287
dm_kcopyd_zero(tc->pool->copier, 1, &to, 0, copy_complete, m);
drivers/md/dm-thin.c
1294
struct pool *pool = tc->pool;
drivers/md/dm-thin.c
1300
inc_all_io_entry(pool, bio);
drivers/md/dm-thin.c
1313
struct pool *pool = tc->pool;
drivers/md/dm-thin.c
1314
struct dm_thin_new_mapping *m = get_next_mapping(pool);
drivers/md/dm-thin.c
1329
if (!dm_deferred_set_add_work(pool->shared_read_ds, &m->list))
drivers/md/dm-thin.c
1338
if (io_overwrites_block(pool, bio))
drivers/md/dm-thin.c
1344
from.sector = data_origin * pool->sectors_per_block;
drivers/md/dm-thin.c
1348
to.sector = data_dest * pool->sectors_per_block;
drivers/md/dm-thin.c
1351
dm_kcopyd_copy(pool->copier, &from, 1, &to,
drivers/md/dm-thin.c
1357
if (len < pool->sectors_per_block && pool->pf.zero_new_blocks) {
drivers/md/dm-thin.c
1360
data_dest * pool->sectors_per_block + len,
drivers/md/dm-thin.c
1361
(data_dest + 1) * pool->sectors_per_block);
drivers/md/dm-thin.c
1374
tc->pool->sectors_per_block);
drivers/md/dm-thin.c
1381
struct pool *pool = tc->pool;
drivers/md/dm-thin.c
1382
struct dm_thin_new_mapping *m = get_next_mapping(pool);
drivers/md/dm-thin.c
1396
if (pool->pf.zero_new_blocks) {
drivers/md/dm-thin.c
1397
if (io_overwrites_block(pool, bio))
drivers/md/dm-thin.c
1400
ll_zero(tc, m, data_block * pool->sectors_per_block,
drivers/md/dm-thin.c
1401
(data_block + 1) * pool->sectors_per_block);
drivers/md/dm-thin.c
1411
struct pool *pool = tc->pool;
drivers/md/dm-thin.c
1412
sector_t virt_block_begin = virt_block * pool->sectors_per_block;
drivers/md/dm-thin.c
1413
sector_t virt_block_end = (virt_block + 1) * pool->sectors_per_block;
drivers/md/dm-thin.c
1418
pool->sectors_per_block);
drivers/md/dm-thin.c
1429
static void set_pool_mode(struct pool *pool, enum pool_mode new_mode);
drivers/md/dm-thin.c
1431
static void requeue_bios(struct pool *pool);
drivers/md/dm-thin.c
1438
static bool is_read_only(struct pool *pool)
drivers/md/dm-thin.c
1440
return is_read_only_pool_mode(get_pool_mode(pool));
drivers/md/dm-thin.c
1443
static void check_for_metadata_space(struct pool *pool)
drivers/md/dm-thin.c
1449
r = dm_pool_get_free_metadata_block_count(pool->pmd, &nr_free);
drivers/md/dm-thin.c
1455
if (ooms_reason && !is_read_only(pool)) {
drivers/md/dm-thin.c
1457
set_pool_mode(pool, PM_OUT_OF_METADATA_SPACE);
drivers/md/dm-thin.c
1461
static void check_for_data_space(struct pool *pool)
drivers/md/dm-thin.c
1466
if (get_pool_mode(pool) != PM_OUT_OF_DATA_SPACE)
drivers/md/dm-thin.c
1469
r = dm_pool_get_free_block_count(pool->pmd, &nr_free);
drivers/md/dm-thin.c
1474
set_pool_mode(pool, PM_WRITE);
drivers/md/dm-thin.c
1475
requeue_bios(pool);
drivers/md/dm-thin.c
1483
static int commit(struct pool *pool)
drivers/md/dm-thin.c
1487
if (get_pool_mode(pool) >= PM_OUT_OF_METADATA_SPACE)
drivers/md/dm-thin.c
1490
r = dm_pool_commit_metadata(pool->pmd);
drivers/md/dm-thin.c
1492
metadata_operation_failed(pool, "dm_pool_commit_metadata", r);
drivers/md/dm-thin.c
1494
check_for_metadata_space(pool);
drivers/md/dm-thin.c
1495
check_for_data_space(pool);
drivers/md/dm-thin.c
1501
static void check_low_water_mark(struct pool *pool, dm_block_t free_blocks)
drivers/md/dm-thin.c
1503
if (free_blocks <= pool->low_water_blocks && !pool->low_water_triggered) {
drivers/md/dm-thin.c
1505
dm_device_name(pool->pool_md));
drivers/md/dm-thin.c
1506
spin_lock_irq(&pool->lock);
drivers/md/dm-thin.c
1507
pool->low_water_triggered = true;
drivers/md/dm-thin.c
1508
spin_unlock_irq(&pool->lock);
drivers/md/dm-thin.c
1509
dm_table_event(pool->ti->table);
drivers/md/dm-thin.c
1517
struct pool *pool = tc->pool;
drivers/md/dm-thin.c
1519
if (WARN_ON(get_pool_mode(pool) != PM_WRITE))
drivers/md/dm-thin.c
1522
r = dm_pool_get_free_block_count(pool->pmd, &free_blocks);
drivers/md/dm-thin.c
1524
metadata_operation_failed(pool, "dm_pool_get_free_block_count", r);
drivers/md/dm-thin.c
1528
check_low_water_mark(pool, free_blocks);
drivers/md/dm-thin.c
1535
r = commit(pool);
drivers/md/dm-thin.c
1539
r = dm_pool_get_free_block_count(pool->pmd, &free_blocks);
drivers/md/dm-thin.c
1541
metadata_operation_failed(pool, "dm_pool_get_free_block_count", r);
drivers/md/dm-thin.c
1546
set_pool_mode(pool, PM_OUT_OF_DATA_SPACE);
drivers/md/dm-thin.c
1551
r = dm_pool_alloc_data_block(pool->pmd, result);
drivers/md/dm-thin.c
1554
set_pool_mode(pool, PM_OUT_OF_DATA_SPACE);
drivers/md/dm-thin.c
1556
metadata_operation_failed(pool, "dm_pool_alloc_data_block", r);
drivers/md/dm-thin.c
1560
r = dm_pool_get_free_metadata_block_count(pool->pmd, &free_blocks);
drivers/md/dm-thin.c
1562
metadata_operation_failed(pool, "dm_pool_get_free_metadata_block_count", r);
drivers/md/dm-thin.c
1568
r = commit(pool);
drivers/md/dm-thin.c
1590
static blk_status_t should_error_unserviceable_bio(struct pool *pool)
drivers/md/dm-thin.c
1592
enum pool_mode m = get_pool_mode(pool);
drivers/md/dm-thin.c
1601
return pool->pf.error_if_no_space ? BLK_STS_NOSPC : 0;
drivers/md/dm-thin.c
1614
static void handle_unserviceable_bio(struct pool *pool, struct bio *bio)
drivers/md/dm-thin.c
1616
blk_status_t error = should_error_unserviceable_bio(pool);
drivers/md/dm-thin.c
1625
static void retry_bios_on_resume(struct pool *pool, struct dm_bio_prison_cell *cell)
drivers/md/dm-thin.c
1631
error = should_error_unserviceable_bio(pool);
drivers/md/dm-thin.c
1633
cell_error_with_code(pool, cell, error);
drivers/md/dm-thin.c
1638
cell_release(pool, cell, &bios);
drivers/md/dm-thin.c
1647
struct pool *pool = tc->pool;
drivers/md/dm-thin.c
1648
struct dm_thin_new_mapping *m = get_next_mapping(pool);
drivers/md/dm-thin.c
1660
if (!dm_deferred_set_add_work(pool->all_io_ds, &m->list))
drivers/md/dm-thin.c
1661
pool->process_prepared_discard(m);
drivers/md/dm-thin.c
1667
struct pool *pool = tc->pool;
drivers/md/dm-thin.c
1694
r = ensure_next_mapping(pool);
drivers/md/dm-thin.c
1704
if (bio_detain(tc->pool, &data_key, NULL, &data_cell)) {
drivers/md/dm-thin.c
1714
m = get_next_mapping(pool);
drivers/md/dm-thin.c
1732
if (!dm_deferred_set_add_work(pool->all_io_ds, &m->list))
drivers/md/dm-thin.c
1733
pool->process_prepared_discard(m);
drivers/md/dm-thin.c
1785
if (bio_detain(tc->pool, &virt_key, bio, &virt_cell)) {
drivers/md/dm-thin.c
1796
tc->pool->process_discard_cell(tc, virt_cell);
drivers/md/dm-thin.c
1806
struct pool *pool = tc->pool;
drivers/md/dm-thin.c
1816
retry_bios_on_resume(pool, cell);
drivers/md/dm-thin.c
1822
cell_error(pool, cell);
drivers/md/dm-thin.c
1840
h->shared_read_entry = dm_deferred_entry_inc(info->tc->pool->shared_read_ds);
drivers/md/dm-thin.c
1841
inc_all_io_entry(info->tc->pool, bio);
drivers/md/dm-thin.c
1858
cell_visit_release(tc->pool, __remap_and_issue_shared_cell,
drivers/md/dm-thin.c
1874
struct pool *pool = tc->pool;
drivers/md/dm-thin.c
1882
if (bio_detain(pool, &key, bio, &data_cell)) {
drivers/md/dm-thin.c
1893
h->shared_read_entry = dm_deferred_entry_inc(pool->shared_read_ds);
drivers/md/dm-thin.c
1894
inc_all_io_entry(pool, bio);
drivers/md/dm-thin.c
1907
struct pool *pool = tc->pool;
drivers/md/dm-thin.c
1913
inc_all_io_entry(pool, bio);
drivers/md/dm-thin.c
1940
retry_bios_on_resume(pool, cell);
drivers/md/dm-thin.c
1946
cell_error(pool, cell);
drivers/md/dm-thin.c
1954
struct pool *pool = tc->pool;
drivers/md/dm-thin.c
1960
cell_requeue(pool, cell);
drivers/md/dm-thin.c
1970
inc_all_io_entry(pool, bio);
drivers/md/dm-thin.c
1978
inc_all_io_entry(pool, bio);
drivers/md/dm-thin.c
2008
struct pool *pool = tc->pool;
drivers/md/dm-thin.c
2018
if (bio_detain(pool, &key, bio, &cell))
drivers/md/dm-thin.c
2036
handle_unserviceable_bio(tc->pool, bio);
drivers/md/dm-thin.c
2040
inc_all_io_entry(tc->pool, bio);
drivers/md/dm-thin.c
2051
handle_unserviceable_bio(tc->pool, bio);
drivers/md/dm-thin.c
2056
inc_all_io_entry(tc->pool, bio);
drivers/md/dm-thin.c
2097
cell_success(tc->pool, cell);
drivers/md/dm-thin.c
2102
cell_error(tc->pool, cell);
drivers/md/dm-thin.c
2109
static int need_commit_due_to_time(struct pool *pool)
drivers/md/dm-thin.c
2111
return !time_in_range(jiffies, pool->last_commit_jiffies,
drivers/md/dm-thin.c
2112
pool->last_commit_jiffies + COMMIT_PERIOD);
drivers/md/dm-thin.c
2181
struct pool *pool = tc->pool;
drivers/md/dm-thin.c
2216
if (ensure_next_mapping(pool)) {
drivers/md/dm-thin.c
2225
pool->process_discard(tc, bio);
drivers/md/dm-thin.c
2227
pool->process_bio(tc, bio);
drivers/md/dm-thin.c
2230
throttle_work_update(&pool->throttle);
drivers/md/dm-thin.c
2231
dm_pool_issue_prefetches(pool->pmd);
drivers/md/dm-thin.c
2255
static unsigned int sort_cells(struct pool *pool, struct list_head *cells)
drivers/md/dm-thin.c
2264
pool->cell_sort_array[count++] = cell;
drivers/md/dm-thin.c
2268
sort(pool->cell_sort_array, count, sizeof(cell), cmp_cells, NULL);
drivers/md/dm-thin.c
2275
struct pool *pool = tc->pool;
drivers/md/dm-thin.c
2290
count = sort_cells(tc->pool, &cells);
drivers/md/dm-thin.c
2293
cell = pool->cell_sort_array[i];
drivers/md/dm-thin.c
2301
if (ensure_next_mapping(pool)) {
drivers/md/dm-thin.c
2303
list_add(&pool->cell_sort_array[j]->user_list, &cells);
drivers/md/dm-thin.c
2312
pool->process_discard_cell(tc, cell);
drivers/md/dm-thin.c
2314
pool->process_cell(tc, cell);
drivers/md/dm-thin.c
2328
static struct thin_c *get_first_thin(struct pool *pool)
drivers/md/dm-thin.c
2333
tc = list_first_or_null_rcu(&pool->active_thins, struct thin_c, list);
drivers/md/dm-thin.c
2341
static struct thin_c *get_next_thin(struct pool *pool, struct thin_c *tc)
drivers/md/dm-thin.c
2346
list_for_each_entry_continue_rcu(tc, &pool->active_thins, list) {
drivers/md/dm-thin.c
2358
static void process_deferred_bios(struct pool *pool)
drivers/md/dm-thin.c
2364
tc = get_first_thin(pool);
drivers/md/dm-thin.c
2368
tc = get_next_thin(pool, tc);
drivers/md/dm-thin.c
2378
spin_lock_irq(&pool->lock);
drivers/md/dm-thin.c
2379
bio_list_merge(&bios, &pool->deferred_flush_bios);
drivers/md/dm-thin.c
2380
bio_list_init(&pool->deferred_flush_bios);
drivers/md/dm-thin.c
2382
bio_list_merge(&bio_completions, &pool->deferred_flush_completions);
drivers/md/dm-thin.c
2383
bio_list_init(&pool->deferred_flush_completions);
drivers/md/dm-thin.c
2384
spin_unlock_irq(&pool->lock);
drivers/md/dm-thin.c
2387
!(dm_pool_changed_this_transaction(pool->pmd) && need_commit_due_to_time(pool)))
drivers/md/dm-thin.c
2390
if (commit(pool)) {
drivers/md/dm-thin.c
2397
pool->last_commit_jiffies = jiffies;
drivers/md/dm-thin.c
2416
struct pool *pool = container_of(ws, struct pool, worker);
drivers/md/dm-thin.c
2418
throttle_work_start(&pool->throttle);
drivers/md/dm-thin.c
2419
dm_pool_issue_prefetches(pool->pmd);
drivers/md/dm-thin.c
2420
throttle_work_update(&pool->throttle);
drivers/md/dm-thin.c
2421
process_prepared(pool, &pool->prepared_mappings, &pool->process_prepared_mapping);
drivers/md/dm-thin.c
2422
throttle_work_update(&pool->throttle);
drivers/md/dm-thin.c
2423
process_prepared(pool, &pool->prepared_discards, &pool->process_prepared_discard);
drivers/md/dm-thin.c
2424
throttle_work_update(&pool->throttle);
drivers/md/dm-thin.c
2425
process_prepared(pool, &pool->prepared_discards_pt2, &pool->process_prepared_discard_pt2);
drivers/md/dm-thin.c
2426
throttle_work_update(&pool->throttle);
drivers/md/dm-thin.c
2427
process_deferred_bios(pool);
drivers/md/dm-thin.c
2428
throttle_work_complete(&pool->throttle);
drivers/md/dm-thin.c
2437
struct pool *pool = container_of(to_delayed_work(ws), struct pool, waker);
drivers/md/dm-thin.c
2439
wake_worker(pool);
drivers/md/dm-thin.c
2440
queue_delayed_work(pool->wq, &pool->waker, COMMIT_PERIOD);
drivers/md/dm-thin.c
2450
struct pool *pool = container_of(to_delayed_work(ws), struct pool,
drivers/md/dm-thin.c
2453
if (get_pool_mode(pool) == PM_OUT_OF_DATA_SPACE && !pool->pf.error_if_no_space) {
drivers/md/dm-thin.c
2454
pool->pf.error_if_no_space = true;
drivers/md/dm-thin.c
2455
notify_of_pool_mode_change(pool);
drivers/md/dm-thin.c
2456
error_retry_list_with_code(pool, BLK_STS_NOSPC);
drivers/md/dm-thin.c
2477
static void pool_work_wait(struct pool_work *pw, struct pool *pool,
drivers/md/dm-thin.c
2482
queue_work(pool->wq, &pw->worker);
drivers/md/dm-thin.c
2521
pool_work_wait(&w.pw, tc->pool, fn);
drivers/md/dm-thin.c
2526
static void set_discard_callbacks(struct pool *pool)
drivers/md/dm-thin.c
2528
struct pool_c *pt = pool->ti->private;
drivers/md/dm-thin.c
2531
pool->process_discard_cell = process_discard_cell_passdown;
drivers/md/dm-thin.c
2532
pool->process_prepared_discard = process_prepared_discard_passdown_pt1;
drivers/md/dm-thin.c
2533
pool->process_prepared_discard_pt2 = process_prepared_discard_passdown_pt2;
drivers/md/dm-thin.c
2535
pool->process_discard_cell = process_discard_cell_no_passdown;
drivers/md/dm-thin.c
2536
pool->process_prepared_discard = process_prepared_discard_no_passdown;
drivers/md/dm-thin.c
2540
static void set_pool_mode(struct pool *pool, enum pool_mode new_mode)
drivers/md/dm-thin.c
2542
struct pool_c *pt = pool->ti->private;
drivers/md/dm-thin.c
2543
bool needs_check = dm_pool_metadata_needs_check(pool->pmd);
drivers/md/dm-thin.c
2544
enum pool_mode old_mode = get_pool_mode(pool);
drivers/md/dm-thin.c
2553
dm_device_name(pool->pool_md));
drivers/md/dm-thin.c
2569
dm_pool_metadata_read_only(pool->pmd);
drivers/md/dm-thin.c
2570
pool->process_bio = process_bio_fail;
drivers/md/dm-thin.c
2571
pool->process_discard = process_bio_fail;
drivers/md/dm-thin.c
2572
pool->process_cell = process_cell_fail;
drivers/md/dm-thin.c
2573
pool->process_discard_cell = process_cell_fail;
drivers/md/dm-thin.c
2574
pool->process_prepared_mapping = process_prepared_mapping_fail;
drivers/md/dm-thin.c
2575
pool->process_prepared_discard = process_prepared_discard_fail;
drivers/md/dm-thin.c
2577
error_retry_list(pool);
drivers/md/dm-thin.c
2582
dm_pool_metadata_read_only(pool->pmd);
drivers/md/dm-thin.c
2583
pool->process_bio = process_bio_read_only;
drivers/md/dm-thin.c
2584
pool->process_discard = process_bio_success;
drivers/md/dm-thin.c
2585
pool->process_cell = process_cell_read_only;
drivers/md/dm-thin.c
2586
pool->process_discard_cell = process_cell_success;
drivers/md/dm-thin.c
2587
pool->process_prepared_mapping = process_prepared_mapping_fail;
drivers/md/dm-thin.c
2588
pool->process_prepared_discard = process_prepared_discard_success;
drivers/md/dm-thin.c
2590
error_retry_list(pool);
drivers/md/dm-thin.c
2602
pool->out_of_data_space = true;
drivers/md/dm-thin.c
2603
pool->process_bio = process_bio_read_only;
drivers/md/dm-thin.c
2604
pool->process_discard = process_discard_bio;
drivers/md/dm-thin.c
2605
pool->process_cell = process_cell_read_only;
drivers/md/dm-thin.c
2606
pool->process_prepared_mapping = process_prepared_mapping;
drivers/md/dm-thin.c
2607
set_discard_callbacks(pool);
drivers/md/dm-thin.c
2609
if (!pool->pf.error_if_no_space && no_space_timeout)
drivers/md/dm-thin.c
2610
queue_delayed_work(pool->wq, &pool->no_space_timeout, no_space_timeout);
drivers/md/dm-thin.c
2615
cancel_delayed_work_sync(&pool->no_space_timeout);
drivers/md/dm-thin.c
2616
pool->out_of_data_space = false;
drivers/md/dm-thin.c
2617
pool->pf.error_if_no_space = pt->requested_pf.error_if_no_space;
drivers/md/dm-thin.c
2618
dm_pool_metadata_read_write(pool->pmd);
drivers/md/dm-thin.c
2619
pool->process_bio = process_bio;
drivers/md/dm-thin.c
2620
pool->process_discard = process_discard_bio;
drivers/md/dm-thin.c
2621
pool->process_cell = process_cell;
drivers/md/dm-thin.c
2622
pool->process_prepared_mapping = process_prepared_mapping;
drivers/md/dm-thin.c
2623
set_discard_callbacks(pool);
drivers/md/dm-thin.c
2627
pool->pf.mode = new_mode;
drivers/md/dm-thin.c
2635
notify_of_pool_mode_change(pool);
drivers/md/dm-thin.c
2638
static void abort_transaction(struct pool *pool)
drivers/md/dm-thin.c
2640
const char *dev_name = dm_device_name(pool->pool_md);
drivers/md/dm-thin.c
2643
if (dm_pool_abort_metadata(pool->pmd)) {
drivers/md/dm-thin.c
2645
set_pool_mode(pool, PM_FAIL);
drivers/md/dm-thin.c
2648
if (dm_pool_metadata_set_needs_check(pool->pmd)) {
drivers/md/dm-thin.c
2650
set_pool_mode(pool, PM_FAIL);
drivers/md/dm-thin.c
2654
static void metadata_operation_failed(struct pool *pool, const char *op, int r)
drivers/md/dm-thin.c
2657
dm_device_name(pool->pool_md), op, r);
drivers/md/dm-thin.c
2659
abort_transaction(pool);
drivers/md/dm-thin.c
2660
set_pool_mode(pool, PM_READ_ONLY);
drivers/md/dm-thin.c
2674
struct pool *pool = tc->pool;
drivers/md/dm-thin.c
2680
wake_worker(pool);
drivers/md/dm-thin.c
2685
struct pool *pool = tc->pool;
drivers/md/dm-thin.c
2687
throttle_lock(&pool->throttle);
drivers/md/dm-thin.c
2689
throttle_unlock(&pool->throttle);
drivers/md/dm-thin.c
2694
struct pool *pool = tc->pool;
drivers/md/dm-thin.c
2696
throttle_lock(&pool->throttle);
drivers/md/dm-thin.c
2700
throttle_unlock(&pool->throttle);
drivers/md/dm-thin.c
2702
wake_worker(pool);
drivers/md/dm-thin.c
2737
if (get_pool_mode(tc->pool) == PM_FAIL) {
drivers/md/dm-thin.c
2752
if (bio_detain(tc->pool, &key, bio, &virt_cell))
drivers/md/dm-thin.c
2782
if (bio_detain(tc->pool, &key, bio, &data_cell)) {
drivers/md/dm-thin.c
2787
inc_all_io_entry(tc->pool, bio);
drivers/md/dm-thin.c
2811
static void requeue_bios(struct pool *pool)
drivers/md/dm-thin.c
2816
list_for_each_entry_rcu(tc, &pool->active_thins, list) {
drivers/md/dm-thin.c
2841
struct pool *pool = pt->pool;
drivers/md/dm-thin.c
2852
else if (data_limits->max_discard_sectors < pool->sectors_per_block)
drivers/md/dm-thin.c
2861
static int bind_control_target(struct pool *pool, struct dm_target *ti)
drivers/md/dm-thin.c
2868
enum pool_mode old_mode = get_pool_mode(pool);
drivers/md/dm-thin.c
2878
pool->ti = ti;
drivers/md/dm-thin.c
2879
pool->pf = pt->adjusted_pf;
drivers/md/dm-thin.c
2880
pool->low_water_blocks = pt->low_water_blocks;
drivers/md/dm-thin.c
2882
set_pool_mode(pool, new_mode);
drivers/md/dm-thin.c
2887
static void unbind_control_target(struct pool *pool, struct dm_target *ti)
drivers/md/dm-thin.c
2889
if (pool->ti == ti)
drivers/md/dm-thin.c
2890
pool->ti = NULL;
drivers/md/dm-thin.c
290
static void metadata_operation_failed(struct pool *pool, const char *op, int r);
drivers/md/dm-thin.c
2908
static void __pool_destroy(struct pool *pool)
drivers/md/dm-thin.c
2910
__pool_table_remove(pool);
drivers/md/dm-thin.c
2912
vfree(pool->cell_sort_array);
drivers/md/dm-thin.c
2913
if (dm_pool_metadata_close(pool->pmd) < 0)
drivers/md/dm-thin.c
2916
dm_bio_prison_destroy(pool->prison);
drivers/md/dm-thin.c
2917
dm_kcopyd_client_destroy(pool->copier);
drivers/md/dm-thin.c
2919
cancel_delayed_work_sync(&pool->waker);
drivers/md/dm-thin.c
292
static enum pool_mode get_pool_mode(struct pool *pool)
drivers/md/dm-thin.c
2920
cancel_delayed_work_sync(&pool->no_space_timeout);
drivers/md/dm-thin.c
2921
if (pool->wq)
drivers/md/dm-thin.c
2922
destroy_workqueue(pool->wq);
drivers/md/dm-thin.c
2924
if (pool->next_mapping)
drivers/md/dm-thin.c
2925
mempool_free(pool->next_mapping, &pool->mapping_pool);
drivers/md/dm-thin.c
2926
mempool_exit(&pool->mapping_pool);
drivers/md/dm-thin.c
2927
dm_deferred_set_destroy(pool->shared_read_ds);
drivers/md/dm-thin.c
2928
dm_deferred_set_destroy(pool->all_io_ds);
drivers/md/dm-thin.c
2929
kfree(pool);
drivers/md/dm-thin.c
2934
static struct pool *pool_create(struct mapped_device *pool_md,
drivers/md/dm-thin.c
294
return pool->pf.mode;
drivers/md/dm-thin.c
2942
struct pool *pool;
drivers/md/dm-thin.c
2952
pool = kzalloc_obj(*pool);
drivers/md/dm-thin.c
2953
if (!pool) {
drivers/md/dm-thin.c
2959
pool->pmd = pmd;
drivers/md/dm-thin.c
2960
pool->sectors_per_block = block_size;
drivers/md/dm-thin.c
2962
pool->sectors_per_block_shift = -1;
drivers/md/dm-thin.c
2964
pool->sectors_per_block_shift = __ffs(block_size);
drivers/md/dm-thin.c
2965
pool->low_water_blocks = 0;
drivers/md/dm-thin.c
2966
pool_features_init(&pool->pf);
drivers/md/dm-thin.c
2967
pool->prison = dm_bio_prison_create();
drivers/md/dm-thin.c
2968
if (!pool->prison) {
drivers/md/dm-thin.c
297
static void notify_of_pool_mode_change(struct pool *pool)
drivers/md/dm-thin.c
2974
pool->copier = dm_kcopyd_client_create(&dm_kcopyd_throttle);
drivers/md/dm-thin.c
2975
if (IS_ERR(pool->copier)) {
drivers/md/dm-thin.c
2976
r = PTR_ERR(pool->copier);
drivers/md/dm-thin.c
2986
pool->wq = alloc_ordered_workqueue("dm-" DM_MSG_PREFIX, WQ_MEM_RECLAIM);
drivers/md/dm-thin.c
2987
if (!pool->wq) {
drivers/md/dm-thin.c
2993
throttle_init(&pool->throttle);
drivers/md/dm-thin.c
2994
INIT_WORK(&pool->worker, do_worker);
drivers/md/dm-thin.c
2995
INIT_DELAYED_WORK(&pool->waker, do_waker);
drivers/md/dm-thin.c
2996
INIT_DELAYED_WORK(&pool->no_space_timeout, do_no_space_timeout);
drivers/md/dm-thin.c
2997
spin_lock_init(&pool->lock);
drivers/md/dm-thin.c
2998
bio_list_init(&pool->deferred_flush_bios);
drivers/md/dm-thin.c
2999
bio_list_init(&pool->deferred_flush_completions);
drivers/md/dm-thin.c
3000
INIT_LIST_HEAD(&pool->prepared_mappings);
drivers/md/dm-thin.c
3001
INIT_LIST_HEAD(&pool->prepared_discards);
drivers/md/dm-thin.c
3002
INIT_LIST_HEAD(&pool->prepared_discards_pt2);
drivers/md/dm-thin.c
3003
INIT_LIST_HEAD(&pool->active_thins);
drivers/md/dm-thin.c
3004
pool->low_water_triggered = false;
drivers/md/dm-thin.c
3005
pool->suspended = true;
drivers/md/dm-thin.c
3006
pool->out_of_data_space = false;
drivers/md/dm-thin.c
3008
pool->shared_read_ds = dm_deferred_set_create();
drivers/md/dm-thin.c
3009
if (!pool->shared_read_ds) {
drivers/md/dm-thin.c
3015
pool->all_io_ds = dm_deferred_set_create();
drivers/md/dm-thin.c
3016
if (!pool->all_io_ds) {
drivers/md/dm-thin.c
3022
pool->next_mapping = NULL;
drivers/md/dm-thin.c
3023
r = mempool_init_slab_pool(&pool->mapping_pool, MAPPING_POOL_SIZE,
drivers/md/dm-thin.c
3031
pool->cell_sort_array =
drivers/md/dm-thin.c
3033
sizeof(*pool->cell_sort_array));
drivers/md/dm-thin.c
3034
if (!pool->cell_sort_array) {
drivers/md/dm-thin.c
3040
pool->ref_count = 1;
drivers/md/dm-thin.c
3041
pool->last_commit_jiffies = jiffies;
drivers/md/dm-thin.c
3042
pool->pool_md = pool_md;
drivers/md/dm-thin.c
3043
pool->md_dev = metadata_dev;
drivers/md/dm-thin.c
3044
pool->data_dev = data_dev;
drivers/md/dm-thin.c
3045
__pool_table_insert(pool);
drivers/md/dm-thin.c
3047
return pool;
drivers/md/dm-thin.c
3050
mempool_exit(&pool->mapping_pool);
drivers/md/dm-thin.c
3052
dm_deferred_set_destroy(pool->all_io_ds);
drivers/md/dm-thin.c
3054
dm_deferred_set_destroy(pool->shared_read_ds);
drivers/md/dm-thin.c
3056
destroy_workqueue(pool->wq);
drivers/md/dm-thin.c
3058
dm_kcopyd_client_destroy(pool->copier);
drivers/md/dm-thin.c
3060
dm_bio_prison_destroy(pool->prison);
drivers/md/dm-thin.c
3062
kfree(pool);
drivers/md/dm-thin.c
307
enum pool_mode mode = get_pool_mode(pool);
drivers/md/dm-thin.c
3070
static void __pool_inc(struct pool *pool)
drivers/md/dm-thin.c
3073
pool->ref_count++;
drivers/md/dm-thin.c
3076
static void __pool_dec(struct pool *pool)
drivers/md/dm-thin.c
3079
BUG_ON(!pool->ref_count);
drivers/md/dm-thin.c
3080
if (!--pool->ref_count)
drivers/md/dm-thin.c
3081
__pool_destroy(pool);
drivers/md/dm-thin.c
3084
static struct pool *__pool_find(struct mapped_device *pool_md,
drivers/md/dm-thin.c
3090
struct pool *pool = __pool_table_lookup_metadata_dev(metadata_dev);
drivers/md/dm-thin.c
3092
if (pool) {
drivers/md/dm-thin.c
3093
if (pool->pool_md != pool_md) {
drivers/md/dm-thin.c
3097
if (pool->data_dev != data_dev) {
drivers/md/dm-thin.c
310
if (!pool->pf.error_if_no_space)
drivers/md/dm-thin.c
3101
__pool_inc(pool);
drivers/md/dm-thin.c
3104
pool = __pool_table_lookup(pool_md);
drivers/md/dm-thin.c
3105
if (pool) {
drivers/md/dm-thin.c
3106
if (pool->md_dev != metadata_dev || pool->data_dev != data_dev) {
drivers/md/dm-thin.c
3110
__pool_inc(pool);
drivers/md/dm-thin.c
3113
pool = pool_create(pool_md, metadata_dev, data_dev, block_size, read_only, error);
drivers/md/dm-thin.c
3118
return pool;
drivers/md/dm-thin.c
3132
unbind_control_target(pt->pool, ti);
drivers/md/dm-thin.c
3133
__pool_dec(pt->pool);
drivers/md/dm-thin.c
316
dm_table_event(pool->ti->table);
drivers/md/dm-thin.c
318
dm_device_name(pool->pool_md),
drivers/md/dm-thin.c
3193
struct pool *pool = context;
drivers/md/dm-thin.c
3196
dm_device_name(pool->pool_md));
drivers/md/dm-thin.c
3198
dm_table_event(pool->ti->table);
drivers/md/dm-thin.c
3214
struct pool *pool = context;
drivers/md/dm-thin.c
3216
return blkdev_issue_flush(pool->data_dev);
drivers/md/dm-thin.c
327
struct pool *pool;
drivers/md/dm-thin.c
3287
struct pool *pool;
drivers/md/dm-thin.c
3363
pool = __pool_find(dm_table_get_md(ti->table), metadata_dev->bdev, data_dev->bdev,
drivers/md/dm-thin.c
3365
if (IS_ERR(pool)) {
drivers/md/dm-thin.c
3366
r = PTR_ERR(pool);
drivers/md/dm-thin.c
3376
if (!pool_created && pf.discard_enabled != pool->pf.discard_enabled) {
drivers/md/dm-thin.c
3382
pt->pool = pool;
drivers/md/dm-thin.c
3408
r = dm_pool_register_metadata_threshold(pt->pool->pmd,
drivers/md/dm-thin.c
3411
pool);
drivers/md/dm-thin.c
3417
dm_pool_register_pre_commit_callback(pool->pmd,
drivers/md/dm-thin.c
3418
metadata_pre_commit_callback, pool);
drivers/md/dm-thin.c
3425
__pool_dec(pool);
drivers/md/dm-thin.c
3441
struct pool *pool = pt->pool;
drivers/md/dm-thin.c
3446
spin_lock_irq(&pool->lock);
drivers/md/dm-thin.c
3448
spin_unlock_irq(&pool->lock);
drivers/md/dm-thin.c
3457
struct pool *pool = pt->pool;
drivers/md/dm-thin.c
346
struct pool *pool;
drivers/md/dm-thin.c
3463
(void) sector_div(data_size, pool->sectors_per_block);
drivers/md/dm-thin.c
3465
r = dm_pool_get_data_dev_size(pool->pmd, &sb_data_size);
drivers/md/dm-thin.c
3468
dm_device_name(pool->pool_md));
drivers/md/dm-thin.c
3474
dm_device_name(pool->pool_md),
drivers/md/dm-thin.c
3479
if (dm_pool_metadata_needs_check(pool->pmd)) {
drivers/md/dm-thin.c
3481
dm_device_name(pool->pool_md));
drivers/md/dm-thin.c
3487
dm_device_name(pool->pool_md),
drivers/md/dm-thin.c
3489
r = dm_pool_resize_data_dev(pool->pmd, data_size);
drivers/md/dm-thin.c
3491
metadata_operation_failed(pool, "dm_pool_resize_data_dev", r);
drivers/md/dm-thin.c
3505
struct pool *pool = pt->pool;
drivers/md/dm-thin.c
3510
metadata_dev_size = get_metadata_dev_size_in_blocks(pool->md_dev);
drivers/md/dm-thin.c
3512
r = dm_pool_get_metadata_dev_size(pool->pmd, &sb_metadata_dev_size);
drivers/md/dm-thin.c
3515
dm_device_name(pool->pool_md));
drivers/md/dm-thin.c
3521
dm_device_name(pool->pool_md),
drivers/md/dm-thin.c
3526
if (dm_pool_metadata_needs_check(pool->pmd)) {
drivers/md/dm-thin.c
3528
dm_device_name(pool->pool_md));
drivers/md/dm-thin.c
3532
warn_if_metadata_device_too_big(pool->md_dev);
drivers/md/dm-thin.c
3534
dm_device_name(pool->pool_md),
drivers/md/dm-thin.c
3537
if (get_pool_mode(pool) == PM_OUT_OF_METADATA_SPACE)
drivers/md/dm-thin.c
3538
set_pool_mode(pool, PM_WRITE);
drivers/md/dm-thin.c
3540
r = dm_pool_resize_metadata_dev(pool->pmd, metadata_dev_size);
drivers/md/dm-thin.c
3542
metadata_operation_failed(pool, "dm_pool_resize_metadata_dev", r);
drivers/md/dm-thin.c
3568
struct pool *pool = pt->pool;
drivers/md/dm-thin.c
3573
r = bind_control_target(pool, ti);
drivers/md/dm-thin.c
3586
(void) commit(pool);
drivers/md/dm-thin.c
3593
if (r && get_pool_mode(pool) == PM_FAIL)
drivers/md/dm-thin.c
3599
static void pool_suspend_active_thins(struct pool *pool)
drivers/md/dm-thin.c
3604
tc = get_first_thin(pool);
drivers/md/dm-thin.c
3607
tc = get_next_thin(pool, tc);
drivers/md/dm-thin.c
3611
static void pool_resume_active_thins(struct pool *pool)
drivers/md/dm-thin.c
3616
tc = get_first_thin(pool);
drivers/md/dm-thin.c
3619
tc = get_next_thin(pool, tc);
drivers/md/dm-thin.c
3626
struct pool *pool = pt->pool;
drivers/md/dm-thin.c
3632
requeue_bios(pool);
drivers/md/dm-thin.c
3633
pool_resume_active_thins(pool);
drivers/md/dm-thin.c
3635
spin_lock_irq(&pool->lock);
drivers/md/dm-thin.c
3636
pool->low_water_triggered = false;
drivers/md/dm-thin.c
3637
pool->suspended = false;
drivers/md/dm-thin.c
3638
spin_unlock_irq(&pool->lock);
drivers/md/dm-thin.c
3640
do_waker(&pool->waker.work);
drivers/md/dm-thin.c
3646
struct pool *pool = pt->pool;
drivers/md/dm-thin.c
3648
spin_lock_irq(&pool->lock);
drivers/md/dm-thin.c
3649
pool->suspended = true;
drivers/md/dm-thin.c
3650
spin_unlock_irq(&pool->lock);
drivers/md/dm-thin.c
3652
pool_suspend_active_thins(pool);
drivers/md/dm-thin.c
3658
struct pool *pool = pt->pool;
drivers/md/dm-thin.c
3660
pool_resume_active_thins(pool);
drivers/md/dm-thin.c
3662
spin_lock_irq(&pool->lock);
drivers/md/dm-thin.c
3663
pool->suspended = false;
drivers/md/dm-thin.c
3664
spin_unlock_irq(&pool->lock);
drivers/md/dm-thin.c
367
static bool block_size_is_power_of_two(struct pool *pool)
drivers/md/dm-thin.c
3670
struct pool *pool = pt->pool;
drivers/md/dm-thin.c
3672
cancel_delayed_work_sync(&pool->waker);
drivers/md/dm-thin.c
3673
cancel_delayed_work_sync(&pool->no_space_timeout);
drivers/md/dm-thin.c
3674
flush_workqueue(pool->wq);
drivers/md/dm-thin.c
3675
(void) commit(pool);
drivers/md/dm-thin.c
369
return pool->sectors_per_block_shift >= 0;
drivers/md/dm-thin.c
3701
static int process_create_thin_mesg(unsigned int argc, char **argv, struct pool *pool)
drivers/md/dm-thin.c
3714
r = dm_pool_create_thin(pool->pmd, dev_id);
drivers/md/dm-thin.c
372
static sector_t block_to_sectors(struct pool *pool, dm_block_t b)
drivers/md/dm-thin.c
3724
static int process_create_snap_mesg(unsigned int argc, char **argv, struct pool *pool)
drivers/md/dm-thin.c
374
return block_size_is_power_of_two(pool) ?
drivers/md/dm-thin.c
3742
r = dm_pool_create_snap(pool->pmd, dev_id, origin_dev_id);
drivers/md/dm-thin.c
375
(b << pool->sectors_per_block_shift) :
drivers/md/dm-thin.c
3752
static int process_delete_mesg(unsigned int argc, char **argv, struct pool *pool)
drivers/md/dm-thin.c
376
(b * pool->sectors_per_block);
drivers/md/dm-thin.c
3765
r = dm_pool_delete_thin_device(pool->pmd, dev_id);
drivers/md/dm-thin.c
3772
static int process_set_transaction_id_mesg(unsigned int argc, char **argv, struct pool *pool)
drivers/md/dm-thin.c
3791
r = dm_pool_set_metadata_transaction_id(pool->pmd, old_id, new_id);
drivers/md/dm-thin.c
3801
static int process_reserve_metadata_snap_mesg(unsigned int argc, char **argv, struct pool *pool)
drivers/md/dm-thin.c
3809
(void) commit(pool);
drivers/md/dm-thin.c
3811
r = dm_pool_reserve_metadata_snap(pool->pmd);
drivers/md/dm-thin.c
3818
static int process_release_metadata_snap_mesg(unsigned int argc, char **argv, struct pool *pool)
drivers/md/dm-thin.c
3826
r = dm_pool_release_metadata_snap(pool->pmd);
drivers/md/dm-thin.c
3847
struct pool *pool = pt->pool;
drivers/md/dm-thin.c
3849
if (get_pool_mode(pool) >= PM_OUT_OF_METADATA_SPACE) {
drivers/md/dm-thin.c
3851
dm_device_name(pool->pool_md));
drivers/md/dm-thin.c
3856
r = process_create_thin_mesg(argc, argv, pool);
drivers/md/dm-thin.c
3859
r = process_create_snap_mesg(argc, argv, pool);
drivers/md/dm-thin.c
3862
r = process_delete_mesg(argc, argv, pool);
drivers/md/dm-thin.c
3865
r = process_set_transaction_id_mesg(argc, argv, pool);
drivers/md/dm-thin.c
3868
r = process_reserve_metadata_snap_mesg(argc, argv, pool);
drivers/md/dm-thin.c
3871
r = process_release_metadata_snap_mesg(argc, argv, pool);
drivers/md/dm-thin.c
3877
(void) commit(pool);
drivers/md/dm-thin.c
3927
struct pool *pool = pt->pool;
drivers/md/dm-thin.c
3931
if (get_pool_mode(pool) == PM_FAIL) {
drivers/md/dm-thin.c
3938
(void) commit(pool);
drivers/md/dm-thin.c
3940
r = dm_pool_get_metadata_transaction_id(pool->pmd, &transaction_id);
drivers/md/dm-thin.c
3943
dm_device_name(pool->pool_md), r);
drivers/md/dm-thin.c
3947
r = dm_pool_get_free_metadata_block_count(pool->pmd, &nr_free_blocks_metadata);
drivers/md/dm-thin.c
3950
dm_device_name(pool->pool_md), r);
drivers/md/dm-thin.c
3954
r = dm_pool_get_metadata_dev_size(pool->pmd, &nr_blocks_metadata);
drivers/md/dm-thin.c
3957
dm_device_name(pool->pool_md), r);
drivers/md/dm-thin.c
3961
r = dm_pool_get_free_block_count(pool->pmd, &nr_free_blocks_data);
drivers/md/dm-thin.c
3964
dm_device_name(pool->pool_md), r);
drivers/md/dm-thin.c
3968
r = dm_pool_get_data_dev_size(pool->pmd, &nr_blocks_data);
drivers/md/dm-thin.c
3971
dm_device_name(pool->pool_md), r);
drivers/md/dm-thin.c
3975
r = dm_pool_get_metadata_snap(pool->pmd, &held_root);
drivers/md/dm-thin.c
3978
dm_device_name(pool->pool_md), r);
drivers/md/dm-thin.c
3994
mode = get_pool_mode(pool);
drivers/md/dm-thin.c
4002
if (!pool->pf.discard_enabled)
drivers/md/dm-thin.c
4004
else if (pool->pf.discard_passdown)
drivers/md/dm-thin.c
4009
if (pool->pf.error_if_no_space)
drivers/md/dm-thin.c
401
sector_t s = block_to_sectors(tc->pool, data_b);
drivers/md/dm-thin.c
4014
if (dm_pool_metadata_needs_check(pool->pmd))
drivers/md/dm-thin.c
402
sector_t len = block_to_sectors(tc->pool, data_e - data_b);
drivers/md/dm-thin.c
4027
(unsigned long)pool->sectors_per_block,
drivers/md/dm-thin.c
4053
struct pool *pool = pt->pool;
drivers/md/dm-thin.c
4065
if (limits->max_sectors < pool->sectors_per_block) {
drivers/md/dm-thin.c
4066
while (!is_factor(pool->sectors_per_block, limits->max_sectors)) {
drivers/md/dm-thin.c
4077
if (io_opt_sectors < pool->sectors_per_block ||
drivers/md/dm-thin.c
4078
!is_factor(io_opt_sectors, pool->sectors_per_block)) {
drivers/md/dm-thin.c
4079
if (is_factor(pool->sectors_per_block, limits->max_sectors))
drivers/md/dm-thin.c
4082
limits->io_min = pool->sectors_per_block << SECTOR_SHIFT;
drivers/md/dm-thin.c
4083
limits->io_opt = pool->sectors_per_block << SECTOR_SHIFT;
drivers/md/dm-thin.c
4149
spin_lock_irq(&tc->pool->lock);
drivers/md/dm-thin.c
4151
spin_unlock_irq(&tc->pool->lock);
drivers/md/dm-thin.c
4159
__pool_dec(tc->pool);
drivers/md/dm-thin.c
4244
tc->pool = __pool_table_lookup(pool_md);
drivers/md/dm-thin.c
4245
if (!tc->pool) {
drivers/md/dm-thin.c
4250
__pool_inc(tc->pool);
drivers/md/dm-thin.c
4252
if (get_pool_mode(tc->pool) == PM_FAIL) {
drivers/md/dm-thin.c
4258
r = dm_pool_open_thin_device(tc->pool->pmd, tc->dev_id, &tc->td);
drivers/md/dm-thin.c
4264
r = dm_set_target_max_io_len(ti, tc->pool->sectors_per_block);
drivers/md/dm-thin.c
4275
if (tc->pool->pf.discard_enabled) {
drivers/md/dm-thin.c
4283
spin_lock_irq(&tc->pool->lock);
drivers/md/dm-thin.c
4284
if (tc->pool->suspended) {
drivers/md/dm-thin.c
4285
spin_unlock_irq(&tc->pool->lock);
drivers/md/dm-thin.c
4293
list_add_tail_rcu(&tc->list, &tc->pool->active_thins);
drivers/md/dm-thin.c
4294
spin_unlock_irq(&tc->pool->lock);
drivers/md/dm-thin.c
4310
__pool_dec(tc->pool);
drivers/md/dm-thin.c
4340
struct pool *pool = h->tc->pool;
drivers/md/dm-thin.c
4346
spin_lock_irqsave(&pool->lock, flags);
drivers/md/dm-thin.c
4351
spin_unlock_irqrestore(&pool->lock, flags);
drivers/md/dm-thin.c
4358
spin_lock_irqsave(&pool->lock, flags);
drivers/md/dm-thin.c
436
static void wake_worker(struct pool *pool)
drivers/md/dm-thin.c
4360
list_add_tail(&m->list, &pool->prepared_discards);
drivers/md/dm-thin.c
4361
spin_unlock_irqrestore(&pool->lock, flags);
drivers/md/dm-thin.c
4362
wake_worker(pool);
drivers/md/dm-thin.c
438
queue_work(pool->wq, &pool->worker);
drivers/md/dm-thin.c
4410
if (get_pool_mode(tc->pool) == PM_FAIL) {
drivers/md/dm-thin.c
443
static int bio_detain(struct pool *pool, struct dm_cell_key *key, struct bio *bio,
drivers/md/dm-thin.c
4432
DMEMIT("%llu ", mapped * tc->pool->sectors_per_block);
drivers/md/dm-thin.c
4435
tc->pool->sectors_per_block) - 1);
drivers/md/dm-thin.c
4465
struct pool *pool = tc->pool;
drivers/md/dm-thin.c
4471
if (!pool->ti)
drivers/md/dm-thin.c
4474
blocks = pool->ti->len;
drivers/md/dm-thin.c
4475
(void) sector_div(blocks, pool->sectors_per_block);
drivers/md/dm-thin.c
4477
return fn(ti, tc->pool_dev, 0, pool->sectors_per_block * blocks, data);
drivers/md/dm-thin.c
4485
struct pool *pool = tc->pool;
drivers/md/dm-thin.c
4487
if (pool->pf.discard_enabled) {
drivers/md/dm-thin.c
4488
limits->discard_granularity = pool->sectors_per_block << SECTOR_SHIFT;
drivers/md/dm-thin.c
4489
limits->max_hw_discard_sectors = pool->sectors_per_block * BIO_PRISON_MAX_RANGE;
drivers/md/dm-thin.c
453
cell_prealloc = dm_bio_prison_alloc_cell(pool->prison, GFP_NOIO);
drivers/md/dm-thin.c
455
r = dm_bio_detain(pool->prison, key, bio, cell_prealloc, cell_result);
drivers/md/dm-thin.c
461
dm_bio_prison_free_cell(pool->prison, cell_prealloc);
drivers/md/dm-thin.c
467
static void cell_release(struct pool *pool,
drivers/md/dm-thin.c
471
dm_cell_release(pool->prison, cell, bios);
drivers/md/dm-thin.c
472
dm_bio_prison_free_cell(pool->prison, cell);
drivers/md/dm-thin.c
475
static void cell_visit_release(struct pool *pool,
drivers/md/dm-thin.c
480
dm_cell_visit_release(pool->prison, fn, context, cell);
drivers/md/dm-thin.c
481
dm_bio_prison_free_cell(pool->prison, cell);
drivers/md/dm-thin.c
484
static void cell_release_no_holder(struct pool *pool,
drivers/md/dm-thin.c
488
dm_cell_release_no_holder(pool->prison, cell, bios);
drivers/md/dm-thin.c
489
dm_bio_prison_free_cell(pool->prison, cell);
drivers/md/dm-thin.c
492
static void cell_error_with_code(struct pool *pool,
drivers/md/dm-thin.c
495
dm_cell_error(pool->prison, cell, error_code);
drivers/md/dm-thin.c
496
dm_bio_prison_free_cell(pool->prison, cell);
drivers/md/dm-thin.c
499
static blk_status_t get_pool_io_error_code(struct pool *pool)
drivers/md/dm-thin.c
501
return pool->out_of_data_space ? BLK_STS_NOSPC : BLK_STS_IOERR;
drivers/md/dm-thin.c
504
static void cell_error(struct pool *pool, struct dm_bio_prison_cell *cell)
drivers/md/dm-thin.c
506
cell_error_with_code(pool, cell, get_pool_io_error_code(pool));
drivers/md/dm-thin.c
509
static void cell_success(struct pool *pool, struct dm_bio_prison_cell *cell)
drivers/md/dm-thin.c
511
cell_error_with_code(pool, cell, 0);
drivers/md/dm-thin.c
514
static void cell_requeue(struct pool *pool, struct dm_bio_prison_cell *cell)
drivers/md/dm-thin.c
516
cell_error_with_code(pool, cell, BLK_STS_DM_REQUEUE);
drivers/md/dm-thin.c
540
static void __pool_table_insert(struct pool *pool)
drivers/md/dm-thin.c
543
list_add(&pool->list, &dm_thin_pool_table.pools);
drivers/md/dm-thin.c
546
static void __pool_table_remove(struct pool *pool)
drivers/md/dm-thin.c
549
list_del(&pool->list);
drivers/md/dm-thin.c
552
static struct pool *__pool_table_lookup(struct mapped_device *md)
drivers/md/dm-thin.c
554
struct pool *pool = NULL, *tmp;
drivers/md/dm-thin.c
560
pool = tmp;
drivers/md/dm-thin.c
565
return pool;
drivers/md/dm-thin.c
568
static struct pool *__pool_table_lookup_metadata_dev(struct block_device *md_dev)
drivers/md/dm-thin.c
570
struct pool *pool = NULL, *tmp;
drivers/md/dm-thin.c
576
pool = tmp;
drivers/md/dm-thin.c
581
return pool;
drivers/md/dm-thin.c
621
struct pool *pool = tc->pool;
drivers/md/dm-thin.c
632
cell_requeue(pool, cell);
drivers/md/dm-thin.c
650
static void error_retry_list_with_code(struct pool *pool, blk_status_t error)
drivers/md/dm-thin.c
655
list_for_each_entry_rcu(tc, &pool->active_thins, list)
drivers/md/dm-thin.c
660
static void error_retry_list(struct pool *pool)
drivers/md/dm-thin.c
662
error_retry_list_with_code(pool, get_pool_io_error_code(pool));
drivers/md/dm-thin.c
674
struct pool *pool = tc->pool;
drivers/md/dm-thin.c
677
if (block_size_is_power_of_two(pool))
drivers/md/dm-thin.c
678
block_nr >>= pool->sectors_per_block_shift;
drivers/md/dm-thin.c
680
(void) sector_div(block_nr, pool->sectors_per_block);
drivers/md/dm-thin.c
691
struct pool *pool = tc->pool;
drivers/md/dm-thin.c
695
b += pool->sectors_per_block - 1ull; /* so we round up */
drivers/md/dm-thin.c
697
if (block_size_is_power_of_two(pool)) {
drivers/md/dm-thin.c
698
b >>= pool->sectors_per_block_shift;
drivers/md/dm-thin.c
699
e >>= pool->sectors_per_block_shift;
drivers/md/dm-thin.c
701
(void) sector_div(b, pool->sectors_per_block);
drivers/md/dm-thin.c
702
(void) sector_div(e, pool->sectors_per_block);
drivers/md/dm-thin.c
716
struct pool *pool = tc->pool;
drivers/md/dm-thin.c
720
if (block_size_is_power_of_two(pool)) {
drivers/md/dm-thin.c
722
(block << pool->sectors_per_block_shift) |
drivers/md/dm-thin.c
723
(bi_sector & (pool->sectors_per_block - 1));
drivers/md/dm-thin.c
725
bio->bi_iter.bi_sector = (block * pool->sectors_per_block) +
drivers/md/dm-thin.c
726
sector_div(bi_sector, pool->sectors_per_block);
drivers/md/dm-thin.c
741
static void inc_all_io_entry(struct pool *pool, struct bio *bio)
drivers/md/dm-thin.c
749
h->all_io_entry = dm_deferred_entry_inc(pool->all_io_ds);
drivers/md/dm-thin.c
754
struct pool *pool = tc->pool;
drivers/md/dm-thin.c
775
spin_lock_irq(&pool->lock);
drivers/md/dm-thin.c
776
bio_list_add(&pool->deferred_flush_bios, bio);
drivers/md/dm-thin.c
777
spin_unlock_irq(&pool->lock);
drivers/md/dm-thin.c
829
struct pool *pool = m->tc->pool;
drivers/md/dm-thin.c
832
list_add_tail(&m->list, &pool->prepared_mappings);
drivers/md/dm-thin.c
833
wake_worker(pool);
drivers/md/dm-thin.c
840
struct pool *pool = m->tc->pool;
drivers/md/dm-thin.c
842
spin_lock_irqsave(&pool->lock, flags);
drivers/md/dm-thin.c
844
spin_unlock_irqrestore(&pool->lock, flags);
drivers/md/dm-thin.c
882
struct pool *pool = tc->pool;
drivers/md/dm-thin.c
887
cell_release_no_holder(pool, cell, &bios);
drivers/md/dm-thin.c
893
wake_worker(pool);
drivers/md/dm-thin.c
915
inc_all_io_entry(info->tc->pool, bio);
drivers/md/dm-thin.c
943
cell_visit_release(tc->pool, __inc_remap_and_issue_cell,
drivers/md/dm-thin.c
955
cell_error(m->tc->pool, m->cell);
drivers/md/dm-thin.c
957
mempool_free(m, &m->tc->pool->mapping_pool);
drivers/md/dm-thin.c
962
struct pool *pool = tc->pool;
drivers/md/dm-thin.c
987
spin_lock_irq(&pool->lock);
drivers/md/dm-thin.c
988
bio_list_add(&pool->deferred_flush_completions, bio);
drivers/md/dm-thin.c
989
spin_unlock_irq(&pool->lock);
drivers/md/dm-thin.c
995
struct pool *pool = tc->pool;
drivers/md/dm-vdo/block-map.c
2718
cursors->pool = cursors->zone->vio_pool;
drivers/md/dm-vdo/block-map.c
2733
acquire_vio_from_pool(cursors->pool, &cursor->waiter);
drivers/md/dm-vdo/block-map.c
99
struct vio_pool *pool;
drivers/md/dm-vdo/data-vio.c
1010
void drain_data_vio_pool(struct data_vio_pool *pool, struct vdo_completion *completion)
drivers/md/dm-vdo/data-vio.c
1013
vdo_start_draining(&pool->state, VDO_ADMIN_STATE_SUSPENDING, completion,
drivers/md/dm-vdo/data-vio.c
1022
void resume_data_vio_pool(struct data_vio_pool *pool, struct vdo_completion *completion)
drivers/md/dm-vdo/data-vio.c
1025
vdo_continue_completion(completion, vdo_resume_if_quiescent(&pool->state));
drivers/md/dm-vdo/data-vio.c
1042
void dump_data_vio_pool(struct data_vio_pool *pool, bool dump_vios)
drivers/md/dm-vdo/data-vio.c
1051
if (pool == NULL)
drivers/md/dm-vdo/data-vio.c
1054
spin_lock(&pool->lock);
drivers/md/dm-vdo/data-vio.c
1055
dump_limiter("data_vios", &pool->limiter);
drivers/md/dm-vdo/data-vio.c
1056
dump_limiter("discard permits", &pool->discard_limiter);
drivers/md/dm-vdo/data-vio.c
1061
for (i = 0; i < pool->limiter.limit; i++) {
drivers/md/dm-vdo/data-vio.c
1062
struct data_vio *data_vio = &pool->data_vios[i];
drivers/md/dm-vdo/data-vio.c
1069
spin_unlock(&pool->lock);
drivers/md/dm-vdo/data-vio.c
1072
spin_lock(&pool->lock);
drivers/md/dm-vdo/data-vio.c
1077
spin_unlock(&pool->lock);
drivers/md/dm-vdo/data-vio.c
1080
data_vio_count_t get_data_vio_pool_active_requests(struct data_vio_pool *pool)
drivers/md/dm-vdo/data-vio.c
1082
return READ_ONCE(pool->limiter.busy);
drivers/md/dm-vdo/data-vio.c
1085
data_vio_count_t get_data_vio_pool_request_limit(struct data_vio_pool *pool)
drivers/md/dm-vdo/data-vio.c
1087
return READ_ONCE(pool->limiter.limit);
drivers/md/dm-vdo/data-vio.c
1090
data_vio_count_t get_data_vio_pool_maximum_requests(struct data_vio_pool *pool)
drivers/md/dm-vdo/data-vio.c
1092
return READ_ONCE(pool->limiter.max_busy);
drivers/md/dm-vdo/data-vio.c
1263
struct data_vio_pool *pool = completion->vdo->data_vio_pool;
drivers/md/dm-vdo/data-vio.c
1265
vdo_funnel_queue_put(pool->queue, &completion->work_queue_entry_link);
drivers/md/dm-vdo/data-vio.c
1266
schedule_releases(pool);
drivers/md/dm-vdo/data-vio.c
130
struct data_vio_pool *pool;
drivers/md/dm-vdo/data-vio.c
232
static bool check_for_drain_complete_locked(struct data_vio_pool *pool)
drivers/md/dm-vdo/data-vio.c
234
if (pool->limiter.busy > 0)
drivers/md/dm-vdo/data-vio.c
237
VDO_ASSERT_LOG_ONLY((pool->discard_limiter.busy == 0),
drivers/md/dm-vdo/data-vio.c
240
return (bio_list_empty(&pool->limiter.new_waiters) &&
drivers/md/dm-vdo/data-vio.c
241
bio_list_empty(&pool->discard_limiter.new_waiters));
drivers/md/dm-vdo/data-vio.c
583
launch_bio(limiter->pool->completion.vdo, data_vio, bio);
drivers/md/dm-vdo/data-vio.c
605
static inline struct data_vio *get_available_data_vio(struct data_vio_pool *pool)
drivers/md/dm-vdo/data-vio.c
608
list_first_entry(&pool->available, struct data_vio, pool_entry);
drivers/md/dm-vdo/data-vio.c
616
assign_data_vio(limiter, get_available_data_vio(limiter->pool));
drivers/md/dm-vdo/data-vio.c
653
static void schedule_releases(struct data_vio_pool *pool)
drivers/md/dm-vdo/data-vio.c
657
if (atomic_cmpxchg(&pool->processing, false, true))
drivers/md/dm-vdo/data-vio.c
660
pool->completion.requeue = true;
drivers/md/dm-vdo/data-vio.c
661
vdo_launch_completion_with_priority(&pool->completion,
drivers/md/dm-vdo/data-vio.c
665
static void reuse_or_release_resources(struct data_vio_pool *pool,
drivers/md/dm-vdo/data-vio.c
670
if (bio_list_empty(&pool->discard_limiter.waiters)) {
drivers/md/dm-vdo/data-vio.c
672
pool->discard_limiter.release_count++;
drivers/md/dm-vdo/data-vio.c
674
assign_discard_permit(&pool->discard_limiter);
drivers/md/dm-vdo/data-vio.c
678
if (pool->limiter.arrival < pool->discard_limiter.arrival) {
drivers/md/dm-vdo/data-vio.c
679
assign_data_vio(&pool->limiter, data_vio);
drivers/md/dm-vdo/data-vio.c
680
} else if (pool->discard_limiter.arrival < U64_MAX) {
drivers/md/dm-vdo/data-vio.c
681
assign_data_vio(&pool->discard_limiter, data_vio);
drivers/md/dm-vdo/data-vio.c
684
pool->limiter.release_count++;
drivers/md/dm-vdo/data-vio.c
694
struct data_vio_pool *pool = as_data_vio_pool(completion);
drivers/md/dm-vdo/data-vio.c
702
spin_lock(&pool->lock);
drivers/md/dm-vdo/data-vio.c
703
get_waiters(&pool->discard_limiter);
drivers/md/dm-vdo/data-vio.c
704
get_waiters(&pool->limiter);
drivers/md/dm-vdo/data-vio.c
705
spin_unlock(&pool->lock);
drivers/md/dm-vdo/data-vio.c
707
if (pool->limiter.arrival == U64_MAX) {
drivers/md/dm-vdo/data-vio.c
708
struct bio *bio = bio_list_peek(&pool->limiter.waiters);
drivers/md/dm-vdo/data-vio.c
711
pool->limiter.arrival = get_arrival_time(bio);
drivers/md/dm-vdo/data-vio.c
716
struct funnel_queue_entry *entry = vdo_funnel_queue_poll(pool->queue);
drivers/md/dm-vdo/data-vio.c
724
reuse_or_release_resources(pool, data_vio, &returned);
drivers/md/dm-vdo/data-vio.c
727
spin_lock(&pool->lock);
drivers/md/dm-vdo/data-vio.c
734
update_limiter(&pool->discard_limiter);
drivers/md/dm-vdo/data-vio.c
735
list_splice(&returned, &pool->available);
drivers/md/dm-vdo/data-vio.c
736
update_limiter(&pool->limiter);
drivers/md/dm-vdo/data-vio.c
737
to_wake = pool->limiter.wake_count;
drivers/md/dm-vdo/data-vio.c
738
pool->limiter.wake_count = 0;
drivers/md/dm-vdo/data-vio.c
739
discards_to_wake = pool->discard_limiter.wake_count;
drivers/md/dm-vdo/data-vio.c
740
pool->discard_limiter.wake_count = 0;
drivers/md/dm-vdo/data-vio.c
742
atomic_set(&pool->processing, false);
drivers/md/dm-vdo/data-vio.c
746
reschedule = !vdo_is_funnel_queue_empty(pool->queue);
drivers/md/dm-vdo/data-vio.c
748
vdo_is_state_draining(&pool->state) &&
drivers/md/dm-vdo/data-vio.c
749
check_for_drain_complete_locked(pool));
drivers/md/dm-vdo/data-vio.c
750
spin_unlock(&pool->lock);
drivers/md/dm-vdo/data-vio.c
753
wake_up_nr(&pool->limiter.blocked_threads, to_wake);
drivers/md/dm-vdo/data-vio.c
756
wake_up_nr(&pool->discard_limiter.blocked_threads, discards_to_wake);
drivers/md/dm-vdo/data-vio.c
759
schedule_releases(pool);
drivers/md/dm-vdo/data-vio.c
761
vdo_finish_draining(&pool->state);
drivers/md/dm-vdo/data-vio.c
764
static void initialize_limiter(struct limiter *limiter, struct data_vio_pool *pool,
drivers/md/dm-vdo/data-vio.c
767
limiter->pool = pool;
drivers/md/dm-vdo/data-vio.c
842
struct data_vio_pool *pool;
drivers/md/dm-vdo/data-vio.c
846
__func__, &pool);
drivers/md/dm-vdo/data-vio.c
852
initialize_limiter(&pool->discard_limiter, pool, assign_discard_permit,
drivers/md/dm-vdo/data-vio.c
854
pool->discard_limiter.permitted_waiters = &pool->permitted_discards;
drivers/md/dm-vdo/data-vio.c
855
initialize_limiter(&pool->limiter, pool, assign_data_vio_to_waiter, pool_size);
drivers/md/dm-vdo/data-vio.c
856
pool->limiter.permitted_waiters = &pool->limiter.waiters;
drivers/md/dm-vdo/data-vio.c
857
INIT_LIST_HEAD(&pool->available);
drivers/md/dm-vdo/data-vio.c
858
spin_lock_init(&pool->lock);
drivers/md/dm-vdo/data-vio.c
859
vdo_set_admin_state_code(&pool->state, VDO_ADMIN_STATE_NORMAL_OPERATION);
drivers/md/dm-vdo/data-vio.c
860
vdo_initialize_completion(&pool->completion, vdo, VDO_DATA_VIO_POOL_COMPLETION);
drivers/md/dm-vdo/data-vio.c
861
vdo_prepare_completion(&pool->completion, process_release_callback,
drivers/md/dm-vdo/data-vio.c
865
result = vdo_make_funnel_queue(&pool->queue);
drivers/md/dm-vdo/data-vio.c
867
free_data_vio_pool(vdo_forget(pool));
drivers/md/dm-vdo/data-vio.c
872
struct data_vio *data_vio = &pool->data_vios[i];
drivers/md/dm-vdo/data-vio.c
877
free_data_vio_pool(pool);
drivers/md/dm-vdo/data-vio.c
881
list_add(&data_vio->pool_entry, &pool->available);
drivers/md/dm-vdo/data-vio.c
884
*pool_ptr = pool;
drivers/md/dm-vdo/data-vio.c
894
void free_data_vio_pool(struct data_vio_pool *pool)
drivers/md/dm-vdo/data-vio.c
898
if (pool == NULL)
drivers/md/dm-vdo/data-vio.c
906
BUG_ON(atomic_read(&pool->processing));
drivers/md/dm-vdo/data-vio.c
908
spin_lock(&pool->lock);
drivers/md/dm-vdo/data-vio.c
909
VDO_ASSERT_LOG_ONLY((pool->limiter.busy == 0),
drivers/md/dm-vdo/data-vio.c
911
pool->limiter.busy);
drivers/md/dm-vdo/data-vio.c
912
VDO_ASSERT_LOG_ONLY((bio_list_empty(&pool->limiter.waiters) &&
drivers/md/dm-vdo/data-vio.c
913
bio_list_empty(&pool->limiter.new_waiters)),
drivers/md/dm-vdo/data-vio.c
915
VDO_ASSERT_LOG_ONLY((bio_list_empty(&pool->discard_limiter.waiters) &&
drivers/md/dm-vdo/data-vio.c
916
bio_list_empty(&pool->discard_limiter.new_waiters)),
drivers/md/dm-vdo/data-vio.c
918
spin_unlock(&pool->lock);
drivers/md/dm-vdo/data-vio.c
920
list_for_each_entry_safe(data_vio, tmp, &pool->available, pool_entry) {
drivers/md/dm-vdo/data-vio.c
925
vdo_free_funnel_queue(vdo_forget(pool->queue));
drivers/md/dm-vdo/data-vio.c
926
vdo_free(pool);
drivers/md/dm-vdo/data-vio.c
941
__releases(&limiter->pool->lock)
drivers/md/dm-vdo/data-vio.c
948
spin_unlock(&limiter->pool->lock);
drivers/md/dm-vdo/data-vio.c
960
void vdo_launch_bio(struct data_vio_pool *pool, struct bio *bio)
drivers/md/dm-vdo/data-vio.c
964
VDO_ASSERT_LOG_ONLY(!vdo_is_state_quiescent(&pool->state),
drivers/md/dm-vdo/data-vio.c
968
spin_lock(&pool->lock);
drivers/md/dm-vdo/data-vio.c
970
!acquire_permit(&pool->discard_limiter)) {
drivers/md/dm-vdo/data-vio.c
971
wait_permit(&pool->discard_limiter, bio);
drivers/md/dm-vdo/data-vio.c
975
if (!acquire_permit(&pool->limiter)) {
drivers/md/dm-vdo/data-vio.c
976
wait_permit(&pool->limiter, bio);
drivers/md/dm-vdo/data-vio.c
980
data_vio = get_available_data_vio(pool);
drivers/md/dm-vdo/data-vio.c
981
spin_unlock(&pool->lock);
drivers/md/dm-vdo/data-vio.c
982
launch_bio(pool->completion.vdo, data_vio, bio);
drivers/md/dm-vdo/data-vio.c
989
struct data_vio_pool *pool = container_of(state, struct data_vio_pool, state);
drivers/md/dm-vdo/data-vio.c
991
spin_lock(&pool->lock);
drivers/md/dm-vdo/data-vio.c
992
drained = check_for_drain_complete_locked(pool);
drivers/md/dm-vdo/data-vio.c
993
spin_unlock(&pool->lock);
drivers/md/dm-vdo/data-vio.h
333
void free_data_vio_pool(struct data_vio_pool *pool);
drivers/md/dm-vdo/data-vio.h
334
void vdo_launch_bio(struct data_vio_pool *pool, struct bio *bio);
drivers/md/dm-vdo/data-vio.h
335
void drain_data_vio_pool(struct data_vio_pool *pool, struct vdo_completion *completion);
drivers/md/dm-vdo/data-vio.h
336
void resume_data_vio_pool(struct data_vio_pool *pool, struct vdo_completion *completion);
drivers/md/dm-vdo/data-vio.h
338
void dump_data_vio_pool(struct data_vio_pool *pool, bool dump_vios);
drivers/md/dm-vdo/data-vio.h
339
data_vio_count_t get_data_vio_pool_active_requests(struct data_vio_pool *pool);
drivers/md/dm-vdo/data-vio.h
340
data_vio_count_t get_data_vio_pool_request_limit(struct data_vio_pool *pool);
drivers/md/dm-vdo/data-vio.h
341
data_vio_count_t get_data_vio_pool_maximum_requests(struct data_vio_pool *pool);
drivers/md/dm-vdo/physical-zone.c
214
static void return_pbn_lock_to_pool(struct pbn_lock_pool *pool, struct pbn_lock *lock)
drivers/md/dm-vdo/physical-zone.c
223
list_add_tail(&idle->entry, &pool->idle_list);
drivers/md/dm-vdo/physical-zone.c
225
VDO_ASSERT_LOG_ONLY(pool->borrowed > 0, "shouldn't return more than borrowed");
drivers/md/dm-vdo/physical-zone.c
226
pool->borrowed -= 1;
drivers/md/dm-vdo/physical-zone.c
240
struct pbn_lock_pool *pool;
drivers/md/dm-vdo/physical-zone.c
244
__func__, &pool);
drivers/md/dm-vdo/physical-zone.c
248
pool->capacity = capacity;
drivers/md/dm-vdo/physical-zone.c
249
pool->borrowed = capacity;
drivers/md/dm-vdo/physical-zone.c
250
INIT_LIST_HEAD(&pool->idle_list);
drivers/md/dm-vdo/physical-zone.c
253
return_pbn_lock_to_pool(pool, &pool->locks[i].lock);
drivers/md/dm-vdo/physical-zone.c
255
*pool_ptr = pool;
drivers/md/dm-vdo/physical-zone.c
266
static void free_pbn_lock_pool(struct pbn_lock_pool *pool)
drivers/md/dm-vdo/physical-zone.c
268
if (pool == NULL)
drivers/md/dm-vdo/physical-zone.c
271
VDO_ASSERT_LOG_ONLY(pool->borrowed == 0,
drivers/md/dm-vdo/physical-zone.c
273
pool->borrowed);
drivers/md/dm-vdo/physical-zone.c
274
vdo_free(pool);
drivers/md/dm-vdo/physical-zone.c
289
static int __must_check borrow_pbn_lock_from_pool(struct pbn_lock_pool *pool,
drivers/md/dm-vdo/physical-zone.c
297
if (pool->borrowed >= pool->capacity)
drivers/md/dm-vdo/physical-zone.c
300
pool->borrowed += 1;
drivers/md/dm-vdo/physical-zone.c
302
result = VDO_ASSERT(!list_empty(&pool->idle_list),
drivers/md/dm-vdo/physical-zone.c
307
idle_entry = pool->idle_list.prev;
drivers/md/dm-vdo/slab-depot.c
2369
struct vio_pool *pool = slab->allocator->refcount_big_vio_pool;
drivers/md/dm-vdo/slab-depot.c
2371
if (!pool) {
drivers/md/dm-vdo/slab-depot.c
2372
pool = slab->allocator->vio_pool;
drivers/md/dm-vdo/slab-depot.c
2382
acquire_vio_from_pool(pool, waiter);
drivers/md/dm-vdo/vio.c
325
struct vio_pool *pool;
drivers/md/dm-vdo/vio.c
331
__func__, &pool);
drivers/md/dm-vdo/vio.c
335
pool->thread_id = thread_id;
drivers/md/dm-vdo/vio.c
336
INIT_LIST_HEAD(&pool->available);
drivers/md/dm-vdo/vio.c
337
INIT_LIST_HEAD(&pool->busy);
drivers/md/dm-vdo/vio.c
340
"VIO pool buffer", &pool->buffer);
drivers/md/dm-vdo/vio.c
342
free_vio_pool(pool);
drivers/md/dm-vdo/vio.c
346
ptr = pool->buffer;
drivers/md/dm-vdo/vio.c
347
for (pool->size = 0; pool->size < pool_size; pool->size++, ptr += per_vio_size) {
drivers/md/dm-vdo/vio.c
348
struct pooled_vio *pooled = &pool->vios[pool->size];
drivers/md/dm-vdo/vio.c
353
free_vio_pool(pool);
drivers/md/dm-vdo/vio.c
358
pooled->pool = pool;
drivers/md/dm-vdo/vio.c
359
list_add_tail(&pooled->pool_entry, &pool->available);
drivers/md/dm-vdo/vio.c
362
*pool_ptr = pool;
drivers/md/dm-vdo/vio.c
370
void free_vio_pool(struct vio_pool *pool)
drivers/md/dm-vdo/vio.c
374
if (pool == NULL)
drivers/md/dm-vdo/vio.c
378
VDO_ASSERT_LOG_ONLY(!vdo_waitq_has_waiters(&pool->waiting),
drivers/md/dm-vdo/vio.c
380
VDO_ASSERT_LOG_ONLY((pool->busy_count == 0),
drivers/md/dm-vdo/vio.c
382
pool->busy_count);
drivers/md/dm-vdo/vio.c
383
VDO_ASSERT_LOG_ONLY(list_empty(&pool->busy),
drivers/md/dm-vdo/vio.c
386
list_for_each_entry_safe(pooled, tmp, &pool->available, pool_entry) {
drivers/md/dm-vdo/vio.c
389
pool->size--;
drivers/md/dm-vdo/vio.c
392
VDO_ASSERT_LOG_ONLY(pool->size == 0,
drivers/md/dm-vdo/vio.c
395
vdo_free(vdo_forget(pool->buffer));
drivers/md/dm-vdo/vio.c
396
vdo_free(pool);
drivers/md/dm-vdo/vio.c
405
bool is_vio_pool_busy(struct vio_pool *pool)
drivers/md/dm-vdo/vio.c
407
return (pool->busy_count != 0);
drivers/md/dm-vdo/vio.c
415
void acquire_vio_from_pool(struct vio_pool *pool, struct vdo_waiter *waiter)
drivers/md/dm-vdo/vio.c
419
VDO_ASSERT_LOG_ONLY((pool->thread_id == vdo_get_callback_thread_id()),
drivers/md/dm-vdo/vio.c
422
if (list_empty(&pool->available)) {
drivers/md/dm-vdo/vio.c
423
vdo_waitq_enqueue_waiter(&pool->waiting, waiter);
drivers/md/dm-vdo/vio.c
427
pooled = list_first_entry(&pool->available, struct pooled_vio, pool_entry);
drivers/md/dm-vdo/vio.c
428
pool->busy_count++;
drivers/md/dm-vdo/vio.c
429
list_move_tail(&pooled->pool_entry, &pool->busy);
drivers/md/dm-vdo/vio.c
439
struct vio_pool *pool = vio->pool;
drivers/md/dm-vdo/vio.c
441
VDO_ASSERT_LOG_ONLY((pool->thread_id == vdo_get_callback_thread_id()),
drivers/md/dm-vdo/vio.c
446
if (vdo_waitq_has_waiters(&pool->waiting)) {
drivers/md/dm-vdo/vio.c
447
vdo_waitq_notify_next_waiter(&pool->waiting, NULL, vio);
drivers/md/dm-vdo/vio.c
451
list_move_tail(&vio->pool_entry, &pool->available);
drivers/md/dm-vdo/vio.c
452
--pool->busy_count;
drivers/md/dm-vdo/vio.h
201
void free_vio_pool(struct vio_pool *pool);
drivers/md/dm-vdo/vio.h
202
bool __must_check is_vio_pool_busy(struct vio_pool *pool);
drivers/md/dm-vdo/vio.h
203
void acquire_vio_from_pool(struct vio_pool *pool, struct vdo_waiter *waiter);
drivers/md/dm-vdo/vio.h
34
struct vio_pool *pool;
drivers/media/platform/amphion/vpu_windsor.c
451
struct vpu_enc_mem_pool *pool;
drivers/media/platform/amphion/vpu_windsor.c
523
return hcs->ctrls[instance].pool;
drivers/media/platform/amphion/vpu_windsor.c
620
hcs->ctrls[i].pool = rpc->virt + offset;
drivers/media/platform/amphion/vpu_windsor.c
818
struct vpu_enc_mem_pool *pool;
drivers/media/platform/amphion/vpu_windsor.c
824
pool = get_mem_pool(shared, instance);
drivers/media/platform/amphion/vpu_windsor.c
828
if (index >= ARRAY_SIZE(pool->enc_frames))
drivers/media/platform/amphion/vpu_windsor.c
830
res = &pool->enc_frames[index];
drivers/media/platform/amphion/vpu_windsor.c
833
if (index >= ARRAY_SIZE(pool->ref_frames))
drivers/media/platform/amphion/vpu_windsor.c
835
res = &pool->ref_frames[index];
drivers/media/platform/amphion/vpu_windsor.c
840
res = &pool->act_frame;
drivers/media/platform/chips-media/coda/coda-common.c
3148
struct gen_pool *pool;
drivers/media/platform/chips-media/coda/coda-common.c
3215
pool = of_gen_pool_get(np, "iram", 0);
drivers/media/platform/chips-media/coda/coda-common.c
3216
if (!pool) {
drivers/media/platform/chips-media/coda/coda-common.c
3220
dev->iram_pool = pool;
drivers/media/platform/renesas/vsp1/vsp1_clu.c
224
vsp1_dl_body_pool_destroy(clu->pool);
drivers/media/platform/renesas/vsp1/vsp1_clu.c
262
clu->pool = vsp1_dl_body_pool_create(clu->entity.vsp1, 3, CLU_SIZE + 1,
drivers/media/platform/renesas/vsp1/vsp1_clu.c
264
if (!clu->pool)
drivers/media/platform/renesas/vsp1/vsp1_clu.c
48
dlb = vsp1_dl_body_get(clu->pool);
drivers/media/platform/renesas/vsp1/vsp1_clu.h
35
struct vsp1_dl_body_pool *pool;
drivers/media/platform/renesas/vsp1/vsp1_dl.c
1109
return vsp1_dl_body_get(dlm->pool);
drivers/media/platform/renesas/vsp1/vsp1_dl.c
111
struct vsp1_dl_body_pool *pool;
drivers/media/platform/renesas/vsp1/vsp1_dl.c
1149
dlm->pool = vsp1_dl_body_pool_create(vsp1, prealloc + 1,
drivers/media/platform/renesas/vsp1/vsp1_dl.c
1151
if (!dlm->pool)
drivers/media/platform/renesas/vsp1/vsp1_dl.c
1197
vsp1_dl_body_pool_destroy(dlm->pool);
drivers/media/platform/renesas/vsp1/vsp1_dl.c
232
struct vsp1_dl_body_pool *pool;
drivers/media/platform/renesas/vsp1/vsp1_dl.c
258
struct vsp1_dl_body_pool *pool;
drivers/media/platform/renesas/vsp1/vsp1_dl.c
262
pool = kzalloc_obj(*pool);
drivers/media/platform/renesas/vsp1/vsp1_dl.c
263
if (!pool)
drivers/media/platform/renesas/vsp1/vsp1_dl.c
266
pool->vsp1 = vsp1;
drivers/media/platform/renesas/vsp1/vsp1_dl.c
275
pool->size = dlb_size * num_bodies;
drivers/media/platform/renesas/vsp1/vsp1_dl.c
277
pool->bodies = kzalloc_objs(*pool->bodies, num_bodies);
drivers/media/platform/renesas/vsp1/vsp1_dl.c
278
if (!pool->bodies) {
drivers/media/platform/renesas/vsp1/vsp1_dl.c
279
kfree(pool);
drivers/media/platform/renesas/vsp1/vsp1_dl.c
283
pool->mem = dma_alloc_wc(vsp1->bus_master, pool->size, &pool->dma,
drivers/media/platform/renesas/vsp1/vsp1_dl.c
285
if (!pool->mem) {
drivers/media/platform/renesas/vsp1/vsp1_dl.c
286
kfree(pool->bodies);
drivers/media/platform/renesas/vsp1/vsp1_dl.c
287
kfree(pool);
drivers/media/platform/renesas/vsp1/vsp1_dl.c
291
spin_lock_init(&pool->lock);
drivers/media/platform/renesas/vsp1/vsp1_dl.c
292
INIT_LIST_HEAD(&pool->free);
drivers/media/platform/renesas/vsp1/vsp1_dl.c
295
struct vsp1_dl_body *dlb = &pool->bodies[i];
drivers/media/platform/renesas/vsp1/vsp1_dl.c
297
dlb->pool = pool;
drivers/media/platform/renesas/vsp1/vsp1_dl.c
300
dlb->dma = pool->dma + i * dlb_size;
drivers/media/platform/renesas/vsp1/vsp1_dl.c
301
dlb->entries = pool->mem + i * dlb_size;
drivers/media/platform/renesas/vsp1/vsp1_dl.c
303
list_add_tail(&dlb->free, &pool->free);
drivers/media/platform/renesas/vsp1/vsp1_dl.c
306
return pool;
drivers/media/platform/renesas/vsp1/vsp1_dl.c
315
void vsp1_dl_body_pool_destroy(struct vsp1_dl_body_pool *pool)
drivers/media/platform/renesas/vsp1/vsp1_dl.c
317
if (!pool)
drivers/media/platform/renesas/vsp1/vsp1_dl.c
320
if (pool->mem)
drivers/media/platform/renesas/vsp1/vsp1_dl.c
321
dma_free_wc(pool->vsp1->bus_master, pool->size, pool->mem,
drivers/media/platform/renesas/vsp1/vsp1_dl.c
322
pool->dma);
drivers/media/platform/renesas/vsp1/vsp1_dl.c
324
kfree(pool->bodies);
drivers/media/platform/renesas/vsp1/vsp1_dl.c
325
kfree(pool);
drivers/media/platform/renesas/vsp1/vsp1_dl.c
336
struct vsp1_dl_body *vsp1_dl_body_get(struct vsp1_dl_body_pool *pool)
drivers/media/platform/renesas/vsp1/vsp1_dl.c
341
spin_lock_irqsave(&pool->lock, flags);
drivers/media/platform/renesas/vsp1/vsp1_dl.c
343
if (!list_empty(&pool->free)) {
drivers/media/platform/renesas/vsp1/vsp1_dl.c
344
dlb = list_first_entry(&pool->free, struct vsp1_dl_body, free);
drivers/media/platform/renesas/vsp1/vsp1_dl.c
349
spin_unlock_irqrestore(&pool->lock, flags);
drivers/media/platform/renesas/vsp1/vsp1_dl.c
372
spin_lock_irqsave(&dlb->pool->lock, flags);
drivers/media/platform/renesas/vsp1/vsp1_dl.c
373
list_add_tail(&dlb->free, &dlb->pool->free);
drivers/media/platform/renesas/vsp1/vsp1_dl.c
374
spin_unlock_irqrestore(&dlb->pool->lock, flags);
drivers/media/platform/renesas/vsp1/vsp1_dl.c
433
struct vsp1_dl_cmd_pool *pool;
drivers/media/platform/renesas/vsp1/vsp1_dl.c
437
pool = kzalloc_obj(*pool);
drivers/media/platform/renesas/vsp1/vsp1_dl.c
438
if (!pool)
drivers/media/platform/renesas/vsp1/vsp1_dl.c
441
pool->vsp1 = vsp1;
drivers/media/platform/renesas/vsp1/vsp1_dl.c
443
spin_lock_init(&pool->lock);
drivers/media/platform/renesas/vsp1/vsp1_dl.c
444
INIT_LIST_HEAD(&pool->free);
drivers/media/platform/renesas/vsp1/vsp1_dl.c
446
pool->cmds = kzalloc_objs(*pool->cmds, num_cmds);
drivers/media/platform/renesas/vsp1/vsp1_dl.c
447
if (!pool->cmds) {
drivers/media/platform/renesas/vsp1/vsp1_dl.c
448
kfree(pool);
drivers/media/platform/renesas/vsp1/vsp1_dl.c
456
pool->size = cmd_size * num_cmds;
drivers/media/platform/renesas/vsp1/vsp1_dl.c
457
pool->mem = dma_alloc_wc(vsp1->bus_master, pool->size, &pool->dma,
drivers/media/platform/renesas/vsp1/vsp1_dl.c
459
if (!pool->mem) {
drivers/media/platform/renesas/vsp1/vsp1_dl.c
460
kfree(pool->cmds);
drivers/media/platform/renesas/vsp1/vsp1_dl.c
461
kfree(pool);
drivers/media/platform/renesas/vsp1/vsp1_dl.c
466
struct vsp1_dl_ext_cmd *cmd = &pool->cmds[i];
drivers/media/platform/renesas/vsp1/vsp1_dl.c
472
cmd->pool = pool;
drivers/media/platform/renesas/vsp1/vsp1_dl.c
480
cmd->cmds = pool->mem + cmd_offset;
drivers/media/platform/renesas/vsp1/vsp1_dl.c
481
cmd->cmd_dma = pool->dma + cmd_offset;
drivers/media/platform/renesas/vsp1/vsp1_dl.c
483
cmd->data = pool->mem + data_offset;
drivers/media/platform/renesas/vsp1/vsp1_dl.c
484
cmd->data_dma = pool->dma + data_offset;
drivers/media/platform/renesas/vsp1/vsp1_dl.c
486
list_add_tail(&cmd->free, &pool->free);
drivers/media/platform/renesas/vsp1/vsp1_dl.c
489
return pool;
drivers/media/platform/renesas/vsp1/vsp1_dl.c
493
struct vsp1_dl_ext_cmd *vsp1_dl_ext_cmd_get(struct vsp1_dl_cmd_pool *pool)
drivers/media/platform/renesas/vsp1/vsp1_dl.c
498
spin_lock_irqsave(&pool->lock, flags);
drivers/media/platform/renesas/vsp1/vsp1_dl.c
500
if (!list_empty(&pool->free)) {
drivers/media/platform/renesas/vsp1/vsp1_dl.c
501
cmd = list_first_entry(&pool->free, struct vsp1_dl_ext_cmd,
drivers/media/platform/renesas/vsp1/vsp1_dl.c
506
spin_unlock_irqrestore(&pool->lock, flags);
drivers/media/platform/renesas/vsp1/vsp1_dl.c
521
spin_lock_irqsave(&cmd->pool->lock, flags);
drivers/media/platform/renesas/vsp1/vsp1_dl.c
522
list_add_tail(&cmd->free, &cmd->pool->free);
drivers/media/platform/renesas/vsp1/vsp1_dl.c
523
spin_unlock_irqrestore(&cmd->pool->lock, flags);
drivers/media/platform/renesas/vsp1/vsp1_dl.c
526
static void vsp1_dl_ext_cmd_pool_destroy(struct vsp1_dl_cmd_pool *pool)
drivers/media/platform/renesas/vsp1/vsp1_dl.c
528
if (!pool)
drivers/media/platform/renesas/vsp1/vsp1_dl.c
531
if (pool->mem)
drivers/media/platform/renesas/vsp1/vsp1_dl.c
532
dma_free_wc(pool->vsp1->bus_master, pool->size, pool->mem,
drivers/media/platform/renesas/vsp1/vsp1_dl.c
533
pool->dma);
drivers/media/platform/renesas/vsp1/vsp1_dl.c
535
kfree(pool->cmds);
drivers/media/platform/renesas/vsp1/vsp1_dl.c
536
kfree(pool);
drivers/media/platform/renesas/vsp1/vsp1_dl.c
568
dl->body0 = vsp1_dl_body_get(dlm->pool);
drivers/media/platform/renesas/vsp1/vsp1_dl.h
38
struct vsp1_dl_cmd_pool *pool;
drivers/media/platform/renesas/vsp1/vsp1_dl.h
71
void vsp1_dl_body_pool_destroy(struct vsp1_dl_body_pool *pool);
drivers/media/platform/renesas/vsp1/vsp1_dl.h
72
struct vsp1_dl_body *vsp1_dl_body_get(struct vsp1_dl_body_pool *pool);
drivers/media/platform/renesas/vsp1/vsp1_lut.c
185
vsp1_dl_body_pool_destroy(lut->pool);
drivers/media/platform/renesas/vsp1/vsp1_lut.c
222
lut->pool = vsp1_dl_body_pool_create(vsp1, 3, LUT_SIZE, 0);
drivers/media/platform/renesas/vsp1/vsp1_lut.c
223
if (!lut->pool)
drivers/media/platform/renesas/vsp1/vsp1_lut.c
45
dlb = vsp1_dl_body_get(lut->pool);
drivers/media/platform/renesas/vsp1/vsp1_lut.h
32
struct vsp1_dl_body_pool *pool;
drivers/misc/sram-exec.c
73
void *sram_exec_copy(struct gen_pool *pool, void *dst, void *src,
drivers/misc/sram-exec.c
84
if (p->pool == pool)
drivers/misc/sram-exec.c
92
if (!gen_pool_has_addr(pool, (unsigned long)dst, size))
drivers/misc/sram.c
121
if (block->pool) {
drivers/misc/sram.c
160
if (part->pool &&
drivers/misc/sram.c
161
gen_pool_avail(part->pool) < gen_pool_size(part->pool))
drivers/misc/sram.c
224
block->pool = of_property_read_bool(child, "pool");
drivers/misc/sram.c
227
if ((block->export || block->pool || block->protect_exec) &&
drivers/misc/sram.c
291
if ((block->export || block->pool || block->protect_exec) &&
drivers/misc/sram.c
314
if (sram->pool) {
drivers/misc/sram.c
318
ret = gen_pool_add_virt(sram->pool,
drivers/misc/sram.c
405
sram->pool = devm_gen_pool_create(sram->dev, ilog2(SRAM_GRANULARITY),
drivers/misc/sram.c
407
if (IS_ERR(sram->pool))
drivers/misc/sram.c
408
return PTR_ERR(sram->pool);
drivers/misc/sram.c
428
if (sram->pool)
drivers/misc/sram.c
430
gen_pool_size(sram->pool) / 1024, sram->virt_base);
drivers/misc/sram.c
446
if (sram->pool && gen_pool_avail(sram->pool) < gen_pool_size(sram->pool))
drivers/misc/sram.c
62
part->pool = devm_gen_pool_create(sram->dev, ilog2(SRAM_GRANULARITY),
drivers/misc/sram.c
64
if (IS_ERR(part->pool))
drivers/misc/sram.c
65
return PTR_ERR(part->pool);
drivers/misc/sram.c
67
ret = gen_pool_add_virt(part->pool, (unsigned long)part->base, start,
drivers/misc/sram.h
16
struct gen_pool *pool;
drivers/misc/sram.h
29
struct gen_pool *pool;
drivers/misc/sram.h
41
bool pool;
drivers/mtd/nand/raw/atmel/nand-controller.c
2305
nc->sram.pool = of_gen_pool_get(nc->base.dev->of_node,
drivers/mtd/nand/raw/atmel/nand-controller.c
2307
if (!nc->sram.pool)
drivers/mtd/nand/raw/atmel/nand-controller.c
2310
nc->sram.virt = (void __iomem *)gen_pool_dma_alloc(nc->sram.pool,
drivers/mtd/nand/raw/atmel/nand-controller.c
2336
if (hsmc_nc->sram.pool)
drivers/mtd/nand/raw/atmel/nand-controller.c
2337
gen_pool_free(hsmc_nc->sram.pool,
drivers/mtd/nand/raw/atmel/nand-controller.c
256
struct gen_pool *pool;
drivers/mtd/ubi/fastmap-wl.c
111
struct ubi_fm_pool *pool = &ubi->fm_pool;
drivers/mtd/ubi/fastmap-wl.c
126
free += pool->size - pool->used + wl_pool->size - wl_pool->used;
drivers/mtd/ubi/fastmap-wl.c
171
struct ubi_fm_pool *pool = &ubi->fm_pool;
drivers/mtd/ubi/fastmap-wl.c
172
int pool_need = pool->max_size - pool->size +
drivers/mtd/ubi/fastmap-wl.c
188
struct ubi_fm_pool *pool = &ubi->fm_pool;
drivers/mtd/ubi/fastmap-wl.c
202
return_unused_pool_pebs(ubi, pool);
drivers/mtd/ubi/fastmap-wl.c
205
pool->size = 0;
drivers/mtd/ubi/fastmap-wl.c
222
if (pool->size < pool->max_size) {
drivers/mtd/ubi/fastmap-wl.c
230
pool->pebs[pool->size] = e->pnum;
drivers/mtd/ubi/fastmap-wl.c
231
pool->size++;
drivers/mtd/ubi/fastmap-wl.c
257
pool->used = 0;
drivers/mtd/ubi/fastmap-wl.c
297
struct ubi_fm_pool *pool = &ubi->fm_pool;
drivers/mtd/ubi/fastmap-wl.c
306
if (pool->used == pool->size || wl_pool->used == wl_pool->size) {
drivers/mtd/ubi/fastmap-wl.c
319
if (pool->used == pool->size) {
drivers/mtd/ubi/fastmap-wl.c
336
ubi_assert(pool->used < pool->size);
drivers/mtd/ubi/fastmap-wl.c
337
ret = pool->pebs[pool->used++];
drivers/mtd/ubi/fastmap-wl.c
354
struct ubi_fm_pool *pool = &ubi->fm_wl_pool;
drivers/mtd/ubi/fastmap-wl.c
357
if (pool->used == pool->size) {
drivers/mtd/ubi/fastmap-wl.c
371
pnum = pool->pebs[pool->used];
drivers/mtd/ubi/fastmap-wl.c
415
struct ubi_fm_pool *pool = &ubi->fm_wl_pool;
drivers/mtd/ubi/fastmap-wl.c
420
if (pool->used == pool->size) {
drivers/mtd/ubi/fastmap-wl.c
431
pnum = pool->pebs[pool->used++];
drivers/mtd/ubi/fastmap-wl.c
55
struct ubi_fm_pool *pool)
drivers/mtd/ubi/fastmap-wl.c
60
for (i = pool->used; i < pool->size; i++) {
drivers/mtd/ubi/fastmap-wl.c
61
e = ubi->lookuptbl[pool->pebs[i]];
drivers/net/ethernet/broadcom/bnge/bnge_netdev.c
568
struct page_pool *pool;
drivers/net/ethernet/broadcom/bnge/bnge_netdev.c
580
pool = page_pool_create(&pp);
drivers/net/ethernet/broadcom/bnge/bnge_netdev.c
581
if (IS_ERR(pool))
drivers/net/ethernet/broadcom/bnge/bnge_netdev.c
582
return PTR_ERR(pool);
drivers/net/ethernet/broadcom/bnge/bnge_netdev.c
583
rxr->page_pool = pool;
drivers/net/ethernet/broadcom/bnge/bnge_netdev.c
585
rxr->need_head_pool = page_pool_is_unreadable(pool);
drivers/net/ethernet/broadcom/bnge/bnge_netdev.c
589
pool = page_pool_create(&pp);
drivers/net/ethernet/broadcom/bnge/bnge_netdev.c
590
if (IS_ERR(pool))
drivers/net/ethernet/broadcom/bnge/bnge_netdev.c
593
page_pool_get(pool);
drivers/net/ethernet/broadcom/bnge/bnge_netdev.c
595
rxr->head_pool = pool;
drivers/net/ethernet/broadcom/bnge/bnge_netdev.c
601
return PTR_ERR(pool);
drivers/net/ethernet/broadcom/bnx2x/bnx2x_cmn.c
552
struct bnx2x_alloc_pool *pool = &fp->page_pool;
drivers/net/ethernet/broadcom/bnx2x/bnx2x_cmn.c
555
if (!pool->page) {
drivers/net/ethernet/broadcom/bnx2x/bnx2x_cmn.c
556
pool->page = alloc_pages(gfp_mask, PAGES_PER_SGE_SHIFT);
drivers/net/ethernet/broadcom/bnx2x/bnx2x_cmn.c
557
if (unlikely(!pool->page))
drivers/net/ethernet/broadcom/bnx2x/bnx2x_cmn.c
560
pool->offset = 0;
drivers/net/ethernet/broadcom/bnx2x/bnx2x_cmn.c
563
mapping = dma_map_page(&bp->pdev->dev, pool->page,
drivers/net/ethernet/broadcom/bnx2x/bnx2x_cmn.c
564
pool->offset, SGE_PAGE_SIZE, DMA_FROM_DEVICE);
drivers/net/ethernet/broadcom/bnx2x/bnx2x_cmn.c
570
sw_buf->page = pool->page;
drivers/net/ethernet/broadcom/bnx2x/bnx2x_cmn.c
571
sw_buf->offset = pool->offset;
drivers/net/ethernet/broadcom/bnx2x/bnx2x_cmn.c
578
pool->offset += SGE_PAGE_SIZE;
drivers/net/ethernet/broadcom/bnx2x/bnx2x_cmn.c
579
if (PAGE_SIZE - pool->offset >= SGE_PAGE_SIZE)
drivers/net/ethernet/broadcom/bnx2x/bnx2x_cmn.c
580
get_page(pool->page);
drivers/net/ethernet/broadcom/bnx2x/bnx2x_cmn.c
582
pool->page = NULL;
drivers/net/ethernet/broadcom/bnx2x/bnx2x_cmn.h
1003
struct bnx2x_alloc_pool *pool)
drivers/net/ethernet/broadcom/bnx2x/bnx2x_cmn.h
1005
put_page(pool->page);
drivers/net/ethernet/broadcom/bnx2x/bnx2x_cmn.h
1007
pool->page = NULL;
drivers/net/ethernet/broadcom/bnxt/bnxt.c
3852
struct page_pool *pool;
drivers/net/ethernet/broadcom/bnxt/bnxt.c
3868
pool = page_pool_create(&pp);
drivers/net/ethernet/broadcom/bnxt/bnxt.c
3869
if (IS_ERR(pool))
drivers/net/ethernet/broadcom/bnxt/bnxt.c
3870
return PTR_ERR(pool);
drivers/net/ethernet/broadcom/bnxt/bnxt.c
3871
rxr->page_pool = pool;
drivers/net/ethernet/broadcom/bnxt/bnxt.c
3873
rxr->need_head_pool = page_pool_is_unreadable(pool);
drivers/net/ethernet/broadcom/bnxt/bnxt.c
3880
pool = page_pool_create(&pp);
drivers/net/ethernet/broadcom/bnxt/bnxt.c
3881
if (IS_ERR(pool))
drivers/net/ethernet/broadcom/bnxt/bnxt.c
3884
page_pool_get(pool);
drivers/net/ethernet/broadcom/bnxt/bnxt.c
3886
rxr->head_pool = pool;
drivers/net/ethernet/broadcom/bnxt/bnxt.c
3893
return PTR_ERR(pool);
drivers/net/ethernet/chelsio/libcxgb/libcxgb_ppm.c
121
struct cxgbi_ppm_pool *pool;
drivers/net/ethernet/chelsio/libcxgb/libcxgb_ppm.c
125
if (!ppm->pool)
drivers/net/ethernet/chelsio/libcxgb/libcxgb_ppm.c
129
pool = per_cpu_ptr(ppm->pool, cpu);
drivers/net/ethernet/chelsio/libcxgb/libcxgb_ppm.c
130
spin_lock_bh(&pool->lock);
drivers/net/ethernet/chelsio/libcxgb/libcxgb_ppm.c
133
i = ppm_find_unused_entries(pool->bmap, ppm->pool_index_max,
drivers/net/ethernet/chelsio/libcxgb/libcxgb_ppm.c
134
pool->next, count, 0);
drivers/net/ethernet/chelsio/libcxgb/libcxgb_ppm.c
136
pool->next = 0;
drivers/net/ethernet/chelsio/libcxgb/libcxgb_ppm.c
137
spin_unlock_bh(&pool->lock);
drivers/net/ethernet/chelsio/libcxgb/libcxgb_ppm.c
141
pool->next = i + count;
drivers/net/ethernet/chelsio/libcxgb/libcxgb_ppm.c
142
if (pool->next >= ppm->pool_index_max)
drivers/net/ethernet/chelsio/libcxgb/libcxgb_ppm.c
143
pool->next = 0;
drivers/net/ethernet/chelsio/libcxgb/libcxgb_ppm.c
145
spin_unlock_bh(&pool->lock);
drivers/net/ethernet/chelsio/libcxgb/libcxgb_ppm.c
149
pool->next);
drivers/net/ethernet/chelsio/libcxgb/libcxgb_ppm.c
197
struct cxgbi_ppm_pool *pool;
drivers/net/ethernet/chelsio/libcxgb/libcxgb_ppm.c
202
pool = per_cpu_ptr(ppm->pool, cpu);
drivers/net/ethernet/chelsio/libcxgb/libcxgb_ppm.c
203
spin_lock_bh(&pool->lock);
drivers/net/ethernet/chelsio/libcxgb/libcxgb_ppm.c
204
bitmap_clear(pool->bmap, i, count);
drivers/net/ethernet/chelsio/libcxgb/libcxgb_ppm.c
206
if (i < pool->next)
drivers/net/ethernet/chelsio/libcxgb/libcxgb_ppm.c
207
pool->next = i;
drivers/net/ethernet/chelsio/libcxgb/libcxgb_ppm.c
208
spin_unlock_bh(&pool->lock);
drivers/net/ethernet/chelsio/libcxgb/libcxgb_ppm.c
211
__func__, cpu, i, pool->next);
drivers/net/ethernet/chelsio/libcxgb/libcxgb_ppm.c
329
free_percpu(ppm->pool);
drivers/net/ethernet/chelsio/libcxgb/libcxgb_ppm.c
395
struct cxgbi_ppm_pool __percpu *pool = NULL;
drivers/net/ethernet/chelsio/libcxgb/libcxgb_ppm.c
429
pool = ppm_alloc_cpu_pool(&ppmax_pool, &pool_index_max);
drivers/net/ethernet/chelsio/libcxgb/libcxgb_ppm.c
430
if (!pool) {
drivers/net/ethernet/chelsio/libcxgb/libcxgb_ppm.c
484
ppm->pool = pool;
drivers/net/ethernet/chelsio/libcxgb/libcxgb_ppm.c
511
free_percpu(pool);
drivers/net/ethernet/chelsio/libcxgb/libcxgb_ppm.h
141
struct cxgbi_ppm_pool __percpu *pool;
drivers/net/ethernet/cisco/enic/enic.h
186
struct page_pool *pool;
drivers/net/ethernet/cisco/enic/enic_main.c
1715
enic->rq[i].pool = page_pool_create(&pp_params);
drivers/net/ethernet/cisco/enic/enic_main.c
1716
if (IS_ERR(enic->rq[i].pool)) {
drivers/net/ethernet/cisco/enic/enic_main.c
1717
err = PTR_ERR(enic->rq[i].pool);
drivers/net/ethernet/cisco/enic/enic_main.c
1718
enic->rq[i].pool = NULL;
drivers/net/ethernet/cisco/enic/enic_main.c
1764
page_pool_destroy(enic->rq[i].pool);
drivers/net/ethernet/cisco/enic/enic_main.c
1765
enic->rq[i].pool = NULL;
drivers/net/ethernet/cisco/enic/enic_main.c
1826
page_pool_destroy(enic->rq[i].pool);
drivers/net/ethernet/cisco/enic/enic_main.c
1827
enic->rq[i].pool = NULL;
drivers/net/ethernet/cisco/enic/enic_rq.c
291
page = page_pool_dev_alloc(erq->pool, &offset, &truesize);
drivers/net/ethernet/cisco/enic/enic_rq.c
312
page_pool_put_full_page(erq->pool, (struct page *)buf->os_buf, true);
drivers/net/ethernet/engleder/tsnep.h
232
struct xsk_buff_pool *pool, u16 queue_id);
drivers/net/ethernet/engleder/tsnep.h
261
int tsnep_enable_xsk(struct tsnep_queue *queue, struct xsk_buff_pool *pool);
drivers/net/ethernet/engleder/tsnep_main.c
2096
int tsnep_enable_xsk(struct tsnep_queue *queue, struct xsk_buff_pool *pool)
drivers/net/ethernet/engleder/tsnep_main.c
2101
frame_size = xsk_pool_get_rx_frame_size(pool);
drivers/net/ethernet/engleder/tsnep_main.c
2118
xsk_pool_set_rxq_info(pool, &queue->rx->xdp_rxq_zc);
drivers/net/ethernet/engleder/tsnep_main.c
2123
queue->tx->xsk_pool = pool;
drivers/net/ethernet/engleder/tsnep_main.c
2124
queue->rx->xsk_pool = pool;
drivers/net/ethernet/engleder/tsnep_main.c
2297
return tsnep_xdp_setup_pool(adapter, bpf->xsk.pool,
drivers/net/ethernet/engleder/tsnep_xdp.c
22
struct xsk_buff_pool *pool, u16 queue_id)
drivers/net/ethernet/engleder/tsnep_xdp.c
40
retval = xsk_pool_dma_map(pool, adapter->dmadev,
drivers/net/ethernet/engleder/tsnep_xdp.c
48
retval = tsnep_enable_xsk(queue, pool);
drivers/net/ethernet/engleder/tsnep_xdp.c
50
xsk_pool_dma_unmap(pool, DMA_ATTR_SKIP_CPU_SYNC);
drivers/net/ethernet/engleder/tsnep_xdp.c
60
struct xsk_buff_pool *pool;
drivers/net/ethernet/engleder/tsnep_xdp.c
67
pool = xsk_get_pool_from_qid(adapter->netdev, queue_id);
drivers/net/ethernet/engleder/tsnep_xdp.c
68
if (!pool)
drivers/net/ethernet/engleder/tsnep_xdp.c
75
xsk_pool_dma_unmap(pool, DMA_ATTR_SKIP_CPU_SYNC);
drivers/net/ethernet/engleder/tsnep_xdp.c
81
struct xsk_buff_pool *pool, u16 queue_id)
drivers/net/ethernet/engleder/tsnep_xdp.c
83
return pool ? tsnep_xdp_enable_pool(adapter, pool, queue_id) :
drivers/net/ethernet/freescale/dpaa/dpaa_eth.c
1357
err = bman_release(dpaa_bp->pool, bmb, cnt);
drivers/net/ethernet/freescale/dpaa/dpaa_eth.c
556
dpaa_bp->pool = bman_new_pool();
drivers/net/ethernet/freescale/dpaa/dpaa_eth.c
557
if (!dpaa_bp->pool) {
drivers/net/ethernet/freescale/dpaa/dpaa_eth.c
563
dpaa_bp->bpid = (u8)bman_get_bpid(dpaa_bp->pool);
drivers/net/ethernet/freescale/dpaa/dpaa_eth.c
578
bman_free_pool(dpaa_bp->pool);
drivers/net/ethernet/freescale/dpaa/dpaa_eth.c
593
ret = bman_acquire(bp->pool, bmb, num);
drivers/net/ethernet/freescale/dpaa/dpaa_eth.c
632
bman_free_pool(bp->pool);
drivers/net/ethernet/freescale/dpaa/dpaa_eth.c
793
u32 pool;
drivers/net/ethernet/freescale/dpaa/dpaa_eth.c
796
ret = qman_alloc_pool(&pool);
drivers/net/ethernet/freescale/dpaa/dpaa_eth.c
799
rx_pool_channel = pool;
drivers/net/ethernet/freescale/dpaa/dpaa_eth.c
814
u32 pool = QM_SDQCR_CHANNELS_POOL_CONV(channel);
drivers/net/ethernet/freescale/dpaa/dpaa_eth.c
821
qman_p_static_dequeue_add(portal, pool);
drivers/net/ethernet/freescale/dpaa/dpaa_eth.h
71
struct bman_pool *pool;
drivers/net/ethernet/freescale/dpaa2/dpaa2-eth.c
2807
return dpaa2_xsk_setup_pool(dev, xdp->xsk.pool, xdp->xsk.queue_id);
drivers/net/ethernet/freescale/dpaa2/dpaa2-eth.h
847
int dpaa2_xsk_setup_pool(struct net_device *dev, struct xsk_buff_pool *pool, u16 qid);
drivers/net/ethernet/freescale/dpaa2/dpaa2-xsk.c
175
struct xsk_buff_pool *pool = xsk_get_pool_from_qid(dev, qid);
drivers/net/ethernet/freescale/dpaa2/dpaa2-xsk.c
190
xsk_pool_dma_unmap(pool, 0);
drivers/net/ethernet/freescale/dpaa2/dpaa2-xsk.c
221
struct xsk_buff_pool *pool,
drivers/net/ethernet/freescale/dpaa2/dpaa2-xsk.c
244
err = xsk_pool_dma_map(pool, priv->net_dev->dev.parent, 0);
drivers/net/ethernet/freescale/dpaa2/dpaa2-xsk.c
257
xsk_pool_set_rxq_info(pool, &ch->xdp_rxq);
drivers/net/ethernet/freescale/dpaa2/dpaa2-xsk.c
265
ch->xsk_pool = pool;
drivers/net/ethernet/freescale/dpaa2/dpaa2-xsk.c
295
xsk_pool_dma_unmap(pool, 0);
drivers/net/ethernet/freescale/dpaa2/dpaa2-xsk.c
303
int dpaa2_xsk_setup_pool(struct net_device *dev, struct xsk_buff_pool *pool, u16 qid)
drivers/net/ethernet/freescale/dpaa2/dpaa2-xsk.c
305
return pool ? dpaa2_xsk_enable_pool(dev, pool, qid) :
drivers/net/ethernet/freescale/dpaa2/dpni-cmd.h
132
struct dpni_cmd_pool pool[DPNI_MAX_DPBP];
drivers/net/ethernet/freescale/dpaa2/dpni.c
178
cmd_params->pool[i].dpbp_id =
drivers/net/ethernet/freescale/dpaa2/dpni.c
180
cmd_params->pool[i].priority_mask =
drivers/net/ethernet/freescale/fec_main.c
1483
struct xsk_buff_pool *pool,
drivers/net/ethernet/freescale/fec_main.c
1487
struct xdp_desc *xsk_desc = pool->tx_descs;
drivers/net/ethernet/freescale/fec_main.c
1506
batch = xsk_tx_peek_release_desc_batch(pool, budget);
drivers/net/ethernet/freescale/fec_main.c
1512
dma = xsk_buff_raw_get_dma(pool, xsk_desc[i].addr);
drivers/net/ethernet/freescale/fec_main.c
1513
xsk_buff_raw_dma_sync_for_device(pool, dma, xsk_desc[i].len);
drivers/net/ethernet/freescale/fec_main.c
1720
struct xsk_buff_pool *pool = txq->xsk_pool;
drivers/net/ethernet/freescale/fec_main.c
1723
xsk_tx_completed(pool, xsk_cnt);
drivers/net/ethernet/freescale/fec_main.c
1725
if (xsk_uses_need_wakeup(pool))
drivers/net/ethernet/freescale/fec_main.c
1726
xsk_set_tx_need_wakeup(pool);
drivers/net/ethernet/freescale/fec_main.c
1732
if (!fec_enet_xsk_xmit(fep, pool, queue))
drivers/net/ethernet/freescale/fec_main.c
4421
struct xsk_buff_pool *pool)
drivers/net/ethernet/freescale/fec_main.c
4435
rxq->xsk_pool = pool;
drivers/net/ethernet/freescale/fec_main.c
4439
buf[i].xdp = xsk_buff_alloc(pool);
drivers/net/ethernet/freescale/fec_main.c
4523
struct xsk_buff_pool *pool,
drivers/net/ethernet/freescale/fec_main.c
4535
rxq = pool ? fec_alloc_new_rxq_xsk(fep, queue, pool) :
drivers/net/ethernet/freescale/fec_main.c
4545
fec_init_rxq_bd_buffers(rxq, !!pool);
drivers/net/ethernet/freescale/fec_main.c
4546
txq->xsk_pool = pool;
drivers/net/ethernet/freescale/fec_main.c
4562
struct xsk_buff_pool *pool,
drivers/net/ethernet/freescale/fec_main.c
4567
err = xsk_pool_dma_map(pool, &fep->pdev->dev, 0);
drivers/net/ethernet/freescale/fec_main.c
4577
rxq->xsk_pool = pool;
drivers/net/ethernet/freescale/fec_main.c
4578
txq->xsk_pool = pool;
drivers/net/ethernet/freescale/fec_main.c
4583
err = fec_xsk_restart_napi(fep, pool, queue);
drivers/net/ethernet/freescale/fec_main.c
4585
xsk_pool_dma_unmap(pool, 0);
drivers/net/ethernet/freescale/fec_main.c
4619
struct xsk_buff_pool *pool,
drivers/net/ethernet/freescale/fec_main.c
4625
return pool ? fec_enable_xsk_pool(fep, pool, queue) :
drivers/net/ethernet/freescale/fec_main.c
4667
return fec_setup_xsk_pool(fep, bpf->xsk.pool,
drivers/net/ethernet/freescale/fman/fman_muram.c
108
vaddr = gen_pool_alloc(muram->pool, size);
drivers/net/ethernet/freescale/fman/fman_muram.c
13
struct gen_pool *pool;
drivers/net/ethernet/freescale/fman/fman_muram.c
130
gen_pool_free(muram->pool, addr, size);
drivers/net/ethernet/freescale/fman/fman_muram.c
47
muram->pool = gen_pool_create(ilog2(64), -1);
drivers/net/ethernet/freescale/fman/fman_muram.c
48
if (!muram->pool) {
drivers/net/ethernet/freescale/fman/fman_muram.c
59
ret = gen_pool_add_virt(muram->pool, (unsigned long)vaddr,
drivers/net/ethernet/freescale/fman/fman_muram.c
74
gen_pool_destroy(muram->pool);
drivers/net/ethernet/google/gve/gve_main.c
1190
struct xsk_buff_pool *pool, u16 qid)
drivers/net/ethernet/google/gve/gve_main.c
1198
MEM_TYPE_XSK_BUFF_POOL, pool);
drivers/net/ethernet/google/gve/gve_main.c
1204
rx->xsk_pool = pool;
drivers/net/ethernet/google/gve/gve_main.c
1207
priv->tx[tx_qid].xsk_pool = pool;
drivers/net/ethernet/google/gve/gve_main.c
1589
struct xsk_buff_pool *pool,
drivers/net/ethernet/google/gve/gve_main.c
1599
if (xsk_pool_get_rx_frame_size(pool) <
drivers/net/ethernet/google/gve/gve_main.c
1605
err = xsk_pool_dma_map(pool, &priv->pdev->dev,
drivers/net/ethernet/google/gve/gve_main.c
1616
err = gve_reg_xsk_pool(priv, dev, pool, qid);
drivers/net/ethernet/google/gve/gve_main.c
1632
xsk_pool_dma_unmap(pool,
drivers/net/ethernet/google/gve/gve_main.c
1644
struct xsk_buff_pool *pool;
drivers/net/ethernet/google/gve/gve_main.c
1653
pool = xsk_get_pool_from_qid(dev, qid);
drivers/net/ethernet/google/gve/gve_main.c
1654
if (pool)
drivers/net/ethernet/google/gve/gve_main.c
1655
xsk_pool_dma_unmap(pool,
drivers/net/ethernet/google/gve/gve_main.c
1775
if (xdp->xsk.pool)
drivers/net/ethernet/google/gve/gve_main.c
1776
return gve_xsk_pool_enable(dev, xdp->xsk.pool, xdp->xsk.queue_id);
drivers/net/ethernet/google/gve/gve_rx_dqo.c
220
struct page_pool *pool;
drivers/net/ethernet/google/gve/gve_rx_dqo.c
277
pool = gve_rx_create_page_pool(priv, rx, cfg->xdp);
drivers/net/ethernet/google/gve/gve_rx_dqo.c
278
if (IS_ERR(pool))
drivers/net/ethernet/google/gve/gve_rx_dqo.c
281
rx->dqo.page_pool = pool;
drivers/net/ethernet/google/gve/gve_tx_dqo.c
1074
struct xsk_buff_pool *pool = tx->xsk_pool;
drivers/net/ethernet/google/gve/gve_tx_dqo.c
1091
if (!xsk_tx_peek_desc(pool, &desc))
drivers/net/ethernet/google/gve/gve_tx_dqo.c
1099
addr = xsk_buff_raw_get_dma(pool, desc.addr);
drivers/net/ethernet/google/gve/gve_tx_dqo.c
1100
xsk_buff_raw_dma_sync_for_device(pool, addr, desc.len);
drivers/net/ethernet/google/gve/gve_tx_dqo.c
1115
xsk_tx_release(pool);
drivers/net/ethernet/hisilicon/hix5hd2_gmac.c
1005
size = priv->pool[i].count * sizeof(struct hix5hd2_desc);
drivers/net/ethernet/hisilicon/hix5hd2_gmac.c
1011
priv->pool[i].size = size;
drivers/net/ethernet/hisilicon/hix5hd2_gmac.c
1012
priv->pool[i].desc = virt_addr;
drivers/net/ethernet/hisilicon/hix5hd2_gmac.c
1013
priv->pool[i].phys_addr = phys_addr;
drivers/net/ethernet/hisilicon/hix5hd2_gmac.c
242
struct hix5hd2_desc_sw pool[QUEUE_NUMS];
drivers/net/ethernet/hisilicon/hix5hd2_gmac.c
243
#define rx_fq pool[0]
drivers/net/ethernet/hisilicon/hix5hd2_gmac.c
244
#define rx_bq pool[1]
drivers/net/ethernet/hisilicon/hix5hd2_gmac.c
245
#define tx_bq pool[2]
drivers/net/ethernet/hisilicon/hix5hd2_gmac.c
246
#define tx_rq pool[3]
drivers/net/ethernet/hisilicon/hix5hd2_gmac.c
983
if (priv->pool[i].desc) {
drivers/net/ethernet/hisilicon/hix5hd2_gmac.c
984
dma_free_coherent(priv->dev, priv->pool[i].size,
drivers/net/ethernet/hisilicon/hix5hd2_gmac.c
985
priv->pool[i].desc,
drivers/net/ethernet/hisilicon/hix5hd2_gmac.c
986
priv->pool[i].phys_addr);
drivers/net/ethernet/hisilicon/hix5hd2_gmac.c
987
priv->pool[i].desc = NULL;
drivers/net/ethernet/ibm/ibmveth.c
151
static void ibmveth_init_buffer_pool(struct ibmveth_buff_pool *pool,
drivers/net/ethernet/ibm/ibmveth.c
155
pool->size = pool_size;
drivers/net/ethernet/ibm/ibmveth.c
156
pool->index = pool_index;
drivers/net/ethernet/ibm/ibmveth.c
157
pool->buff_size = buff_size;
drivers/net/ethernet/ibm/ibmveth.c
158
pool->threshold = pool_size * 7 / 8;
drivers/net/ethernet/ibm/ibmveth.c
159
pool->active = pool_active;
drivers/net/ethernet/ibm/ibmveth.c
163
static int ibmveth_alloc_buffer_pool(struct ibmveth_buff_pool *pool)
drivers/net/ethernet/ibm/ibmveth.c
167
pool->free_map = kmalloc_array(pool->size, sizeof(u16), GFP_KERNEL);
drivers/net/ethernet/ibm/ibmveth.c
169
if (!pool->free_map)
drivers/net/ethernet/ibm/ibmveth.c
172
pool->dma_addr = kzalloc_objs(dma_addr_t, pool->size);
drivers/net/ethernet/ibm/ibmveth.c
173
if (!pool->dma_addr) {
drivers/net/ethernet/ibm/ibmveth.c
174
kfree(pool->free_map);
drivers/net/ethernet/ibm/ibmveth.c
175
pool->free_map = NULL;
drivers/net/ethernet/ibm/ibmveth.c
179
pool->skbuff = kcalloc(pool->size, sizeof(void *), GFP_KERNEL);
drivers/net/ethernet/ibm/ibmveth.c
181
if (!pool->skbuff) {
drivers/net/ethernet/ibm/ibmveth.c
182
kfree(pool->dma_addr);
drivers/net/ethernet/ibm/ibmveth.c
183
pool->dma_addr = NULL;
drivers/net/ethernet/ibm/ibmveth.c
185
kfree(pool->free_map);
drivers/net/ethernet/ibm/ibmveth.c
186
pool->free_map = NULL;
drivers/net/ethernet/ibm/ibmveth.c
190
for (i = 0; i < pool->size; ++i)
drivers/net/ethernet/ibm/ibmveth.c
191
pool->free_map[i] = i;
drivers/net/ethernet/ibm/ibmveth.c
193
atomic_set(&pool->available, 0);
drivers/net/ethernet/ibm/ibmveth.c
194
pool->producer_index = 0;
drivers/net/ethernet/ibm/ibmveth.c
1945
struct ibmveth_buff_pool *pool = container_of(kobj,
drivers/net/ethernet/ibm/ibmveth.c
195
pool->consumer_index = 0;
drivers/net/ethernet/ibm/ibmveth.c
1950
return sprintf(buf, "%d\n", pool->active);
drivers/net/ethernet/ibm/ibmveth.c
1952
return sprintf(buf, "%d\n", pool->size);
drivers/net/ethernet/ibm/ibmveth.c
1954
return sprintf(buf, "%d\n", pool->buff_size);
drivers/net/ethernet/ibm/ibmveth.c
1981
struct ibmveth_buff_pool *pool = container_of(kobj,
drivers/net/ethernet/ibm/ibmveth.c
1998
oldbuff_size = pool->buff_size;
drivers/net/ethernet/ibm/ibmveth.c
1999
oldactive = pool->active;
drivers/net/ethernet/ibm/ibmveth.c
2000
oldsize = pool->size;
drivers/net/ethernet/ibm/ibmveth.c
2016
if (pool == &adapter->rx_buff_pool[i])
drivers/net/ethernet/ibm/ibmveth.c
2057
pool->active = newactive;
drivers/net/ethernet/ibm/ibmveth.c
2058
pool->buff_size = newbuff_size;
drivers/net/ethernet/ibm/ibmveth.c
2059
pool->size = newsize;
drivers/net/ethernet/ibm/ibmveth.c
2064
pool->active = oldactive;
drivers/net/ethernet/ibm/ibmveth.c
2065
pool->buff_size = oldbuff_size;
drivers/net/ethernet/ibm/ibmveth.c
2066
pool->size = oldsize;
drivers/net/ethernet/ibm/ibmveth.c
212
struct ibmveth_buff_pool *pool)
drivers/net/ethernet/ibm/ibmveth.c
215
u32 remaining = pool->size - atomic_read(&pool->available);
drivers/net/ethernet/ibm/ibmveth.c
2185
struct ibmveth_buff_pool *pool;
drivers/net/ethernet/ibm/ibmveth.c
2198
pool = &adapter->rx_buff_pool[0];
drivers/net/ethernet/ibm/ibmveth.c
2199
pool->skbuff = kunit_kcalloc(test, pool->size, sizeof(void *), GFP_KERNEL);
drivers/net/ethernet/ibm/ibmveth.c
2200
KUNIT_ASSERT_NOT_ERR_OR_NULL(test, pool->skbuff);
drivers/net/ethernet/ibm/ibmveth.c
2211
pool->skbuff[0] = NULL;
drivers/net/ethernet/ibm/ibmveth.c
2232
struct ibmveth_buff_pool *pool;
drivers/net/ethernet/ibm/ibmveth.c
2251
pool = &adapter->rx_buff_pool[0];
drivers/net/ethernet/ibm/ibmveth.c
2252
pool->skbuff = kunit_kcalloc(test, pool->size, sizeof(void *), GFP_KERNEL);
drivers/net/ethernet/ibm/ibmveth.c
2253
KUNIT_ASSERT_NOT_ERR_OR_NULL(test, pool->skbuff);
drivers/net/ethernet/ibm/ibmveth.c
2261
pool->skbuff[0] = skb;
drivers/net/ethernet/ibm/ibmveth.c
233
unsigned int free_index = pool->consumer_index;
drivers/net/ethernet/ibm/ibmveth.c
237
index = pool->free_map[free_index];
drivers/net/ethernet/ibm/ibmveth.c
247
if (!pool->skbuff[index]) {
drivers/net/ethernet/ibm/ibmveth.c
251
pool->buff_size);
drivers/net/ethernet/ibm/ibmveth.c
259
pool->buff_size,
drivers/net/ethernet/ibm/ibmveth.c
267
pool->dma_addr[index] = dma_addr;
drivers/net/ethernet/ibm/ibmveth.c
268
pool->skbuff[index] = skb;
drivers/net/ethernet/ibm/ibmveth.c
271
dma_addr = pool->dma_addr[index];
drivers/net/ethernet/ibm/ibmveth.c
278
len = min(pool->buff_size, len);
drivers/net/ethernet/ibm/ibmveth.c
279
ibmveth_flush_buffer(pool->skbuff[index]->data,
drivers/net/ethernet/ibm/ibmveth.c
284
pool->buff_size;
drivers/net/ethernet/ibm/ibmveth.c
287
correlators[filled] = ((u64)pool->index << 32) | index;
drivers/net/ethernet/ibm/ibmveth.c
288
*(u64 *)pool->skbuff[index]->data = correlators[filled];
drivers/net/ethernet/ibm/ibmveth.c
291
if (free_index >= pool->size)
drivers/net/ethernet/ibm/ibmveth.c
322
free_index = pool->consumer_index;
drivers/net/ethernet/ibm/ibmveth.c
323
pool->free_map[free_index] = IBM_VETH_INVALID_MAP;
drivers/net/ethernet/ibm/ibmveth.c
325
pool->consumer_index++;
drivers/net/ethernet/ibm/ibmveth.c
326
if (pool->consumer_index >= pool->size)
drivers/net/ethernet/ibm/ibmveth.c
327
pool->consumer_index = 0;
drivers/net/ethernet/ibm/ibmveth.c
341
dma_addr = pool->dma_addr[index];
drivers/net/ethernet/ibm/ibmveth.c
343
if (pool->skbuff[index]) {
drivers/net/ethernet/ibm/ibmveth.c
347
pool->buff_size,
drivers/net/ethernet/ibm/ibmveth.c
350
dev_kfree_skb_any(pool->skbuff[index]);
drivers/net/ethernet/ibm/ibmveth.c
351
pool->skbuff[index] = NULL;
drivers/net/ethernet/ibm/ibmveth.c
379
atomic_add(buffers_added, &(pool->available));
drivers/net/ethernet/ibm/ibmveth.c
402
struct ibmveth_buff_pool *pool = &adapter->rx_buff_pool[i];
drivers/net/ethernet/ibm/ibmveth.c
404
if (pool->active &&
drivers/net/ethernet/ibm/ibmveth.c
405
(atomic_read(&pool->available) < pool->threshold))
drivers/net/ethernet/ibm/ibmveth.c
406
ibmveth_replenish_buffer_pool(adapter, pool);
drivers/net/ethernet/ibm/ibmveth.c
414
struct ibmveth_buff_pool *pool)
drivers/net/ethernet/ibm/ibmveth.c
418
kfree(pool->free_map);
drivers/net/ethernet/ibm/ibmveth.c
419
pool->free_map = NULL;
drivers/net/ethernet/ibm/ibmveth.c
421
if (pool->skbuff && pool->dma_addr) {
drivers/net/ethernet/ibm/ibmveth.c
422
for (i = 0; i < pool->size; ++i) {
drivers/net/ethernet/ibm/ibmveth.c
423
struct sk_buff *skb = pool->skbuff[i];
drivers/net/ethernet/ibm/ibmveth.c
426
pool->dma_addr[i],
drivers/net/ethernet/ibm/ibmveth.c
427
pool->buff_size,
drivers/net/ethernet/ibm/ibmveth.c
430
pool->skbuff[i] = NULL;
drivers/net/ethernet/ibm/ibmveth.c
435
if (pool->dma_addr) {
drivers/net/ethernet/ibm/ibmveth.c
436
kfree(pool->dma_addr);
drivers/net/ethernet/ibm/ibmveth.c
437
pool->dma_addr = NULL;
drivers/net/ethernet/ibm/ibmveth.c
440
if (pool->skbuff) {
drivers/net/ethernet/ibm/ibmveth.c
441
kfree(pool->skbuff);
drivers/net/ethernet/ibm/ibmveth.c
442
pool->skbuff = NULL;
drivers/net/ethernet/ibm/ibmveth.c
460
unsigned int pool = correlator >> 32;
drivers/net/ethernet/ibm/ibmveth.c
465
if (WARN_ON(pool >= IBMVETH_NUM_BUFF_POOLS) ||
drivers/net/ethernet/ibm/ibmveth.c
466
WARN_ON(index >= adapter->rx_buff_pool[pool].size)) {
drivers/net/ethernet/ibm/ibmveth.c
471
skb = adapter->rx_buff_pool[pool].skbuff[index];
drivers/net/ethernet/ibm/ibmveth.c
485
adapter->rx_buff_pool[pool].skbuff[index] = NULL;
drivers/net/ethernet/ibm/ibmveth.c
488
adapter->rx_buff_pool[pool].dma_addr[index],
drivers/net/ethernet/ibm/ibmveth.c
489
adapter->rx_buff_pool[pool].buff_size,
drivers/net/ethernet/ibm/ibmveth.c
493
free_index = adapter->rx_buff_pool[pool].producer_index;
drivers/net/ethernet/ibm/ibmveth.c
494
adapter->rx_buff_pool[pool].producer_index++;
drivers/net/ethernet/ibm/ibmveth.c
495
if (adapter->rx_buff_pool[pool].producer_index >=
drivers/net/ethernet/ibm/ibmveth.c
496
adapter->rx_buff_pool[pool].size)
drivers/net/ethernet/ibm/ibmveth.c
497
adapter->rx_buff_pool[pool].producer_index = 0;
drivers/net/ethernet/ibm/ibmveth.c
498
adapter->rx_buff_pool[pool].free_map[free_index] = index;
drivers/net/ethernet/ibm/ibmveth.c
502
atomic_dec(&(adapter->rx_buff_pool[pool].available));
drivers/net/ethernet/ibm/ibmveth.c
511
unsigned int pool = correlator >> 32;
drivers/net/ethernet/ibm/ibmveth.c
514
if (WARN_ON(pool >= IBMVETH_NUM_BUFF_POOLS) ||
drivers/net/ethernet/ibm/ibmveth.c
515
WARN_ON(index >= adapter->rx_buff_pool[pool].size)) {
drivers/net/ethernet/ibm/ibmveth.c
520
return adapter->rx_buff_pool[pool].skbuff[index];
drivers/net/ethernet/ibm/ibmvnic.c
3511
struct ibmvnic_rx_pool *pool = &adapter->rx_pool[rx_buff->pool_index];
drivers/net/ethernet/ibm/ibmvnic.c
3515
pool->free_map[pool->next_alloc] = (int)(rx_buff - pool->rx_buff);
drivers/net/ethernet/ibm/ibmvnic.c
3516
pool->next_alloc = (pool->next_alloc + 1) % pool->size;
drivers/net/ethernet/ibm/ibmvnic.c
3518
atomic_dec(&pool->available);
drivers/net/ethernet/ibm/ibmvnic.c
4254
unsigned int pool = scrq->pool_index;
drivers/net/ethernet/ibm/ibmvnic.c
4260
tx_pool = &adapter->tso_pool[pool];
drivers/net/ethernet/ibm/ibmvnic.c
4263
tx_pool = &adapter->tx_pool[pool];
drivers/net/ethernet/ibm/ibmvnic.c
771
struct ibmvnic_rx_pool *pool)
drivers/net/ethernet/ibm/ibmvnic.c
773
int count = pool->size - atomic_read(&pool->available);
drivers/net/ethernet/ibm/ibmvnic.c
774
u64 handle = adapter->rx_scrq[pool->index]->handle;
drivers/net/ethernet/ibm/ibmvnic.c
790
if (!pool->active)
drivers/net/ethernet/ibm/ibmvnic.c
793
rx_scrq = adapter->rx_scrq[pool->index];
drivers/net/ethernet/ibm/ibmvnic.c
803
bufidx = pool->free_map[pool->next_free];
drivers/net/ethernet/ibm/ibmvnic.c
810
skb = pool->rx_buff[bufidx].skb;
drivers/net/ethernet/ibm/ibmvnic.c
813
pool->buff_size);
drivers/net/ethernet/ibm/ibmvnic.c
821
pool->free_map[pool->next_free] = IBMVNIC_INVALID_MAP;
drivers/net/ethernet/ibm/ibmvnic.c
822
pool->next_free = (pool->next_free + 1) % pool->size;
drivers/net/ethernet/ibm/ibmvnic.c
825
map_rxpool_buf_to_ltb(pool, bufidx, <b, &offset);
drivers/net/ethernet/ibm/ibmvnic.c
827
memset(dst, 0, pool->buff_size);
drivers/net/ethernet/ibm/ibmvnic.c
831
pool->rx_buff[bufidx].data = dst;
drivers/net/ethernet/ibm/ibmvnic.c
832
pool->rx_buff[bufidx].dma = dma_addr;
drivers/net/ethernet/ibm/ibmvnic.c
833
pool->rx_buff[bufidx].skb = skb;
drivers/net/ethernet/ibm/ibmvnic.c
834
pool->rx_buff[bufidx].pool_index = pool->index;
drivers/net/ethernet/ibm/ibmvnic.c
835
pool->rx_buff[bufidx].size = pool->buff_size;
drivers/net/ethernet/ibm/ibmvnic.c
842
cpu_to_be64((u64)&pool->rx_buff[bufidx]);
drivers/net/ethernet/ibm/ibmvnic.c
854
sub_crq->rx_add.len = cpu_to_be32(pool->buff_size << shift);
drivers/net/ethernet/ibm/ibmvnic.c
870
atomic_add(buffers_added, &pool->available);
drivers/net/ethernet/ibm/ibmvnic.c
887
pool->next_free = pool->next_free == 0 ?
drivers/net/ethernet/ibm/ibmvnic.c
888
pool->size - 1 : pool->next_free - 1;
drivers/net/ethernet/ibm/ibmvnic.c
892
bufidx = (int)(rx_buff - pool->rx_buff);
drivers/net/ethernet/ibm/ibmvnic.c
893
pool->free_map[pool->next_free] = bufidx;
drivers/net/ethernet/ibm/ibmvnic.c
894
dev_kfree_skb_any(pool->rx_buff[bufidx].skb);
drivers/net/ethernet/ibm/ibmvnic.c
895
pool->rx_buff[bufidx].skb = NULL;
drivers/net/ethernet/ibm/ibmvnic.c
898
atomic_add(buffers_added, &pool->available);
drivers/net/ethernet/intel/i40e/i40e_main.c
13581
return i40e_xsk_pool_setup(vsi, xdp->xsk.pool,
drivers/net/ethernet/intel/i40e/i40e_xsk.c
100
err = xsk_pool_dma_map(pool, &vsi->back->pdev->dev, I40E_RX_DMA_ATTR);
drivers/net/ethernet/intel/i40e/i40e_xsk.c
141
struct xsk_buff_pool *pool;
drivers/net/ethernet/intel/i40e/i40e_xsk.c
145
pool = xsk_get_pool_from_qid(netdev, qid);
drivers/net/ethernet/intel/i40e/i40e_xsk.c
146
if (!pool)
drivers/net/ethernet/intel/i40e/i40e_xsk.c
158
xsk_pool_dma_unmap(pool, I40E_RX_DMA_ATTR);
drivers/net/ethernet/intel/i40e/i40e_xsk.c
183
int i40e_xsk_pool_setup(struct i40e_vsi *vsi, struct xsk_buff_pool *pool,
drivers/net/ethernet/intel/i40e/i40e_xsk.c
186
return pool ? i40e_xsk_pool_enable(vsi, pool, qid) :
drivers/net/ethernet/intel/i40e/i40e_xsk.c
83
struct xsk_buff_pool *pool,
drivers/net/ethernet/intel/i40e/i40e_xsk.h
24
int i40e_xsk_pool_setup(struct i40e_vsi *vsi, struct xsk_buff_pool *pool,
drivers/net/ethernet/intel/ice/ice.h
795
struct xsk_buff_pool *pool = xsk_get_pool_from_qid(vsi->netdev, qid);
drivers/net/ethernet/intel/ice/ice.h
800
return (pool && pool->dev) ? pool : NULL;
drivers/net/ethernet/intel/ice/ice_main.c
3074
ret = ice_xsk_pool_setup(vsi, xdp->xsk.pool, xdp->xsk.queue_id);
drivers/net/ethernet/intel/ice/ice_xsk.c
126
struct xsk_buff_pool *pool = xsk_get_pool_from_qid(vsi->netdev, qid);
drivers/net/ethernet/intel/ice/ice_xsk.c
128
if (!pool)
drivers/net/ethernet/intel/ice/ice_xsk.c
131
xsk_pool_dma_unmap(pool, ICE_RX_DMA_ATTR);
drivers/net/ethernet/intel/ice/ice_xsk.c
145
ice_xsk_pool_enable(struct ice_vsi *vsi, struct xsk_buff_pool *pool, u16 qid)
drivers/net/ethernet/intel/ice/ice_xsk.c
156
err = xsk_pool_dma_map(pool, ice_pf_to_dev(vsi->back),
drivers/net/ethernet/intel/ice/ice_xsk.c
197
int ice_xsk_pool_setup(struct ice_vsi *vsi, struct xsk_buff_pool *pool, u16 qid)
drivers/net/ethernet/intel/ice/ice_xsk.c
200
bool if_running, pool_present = !!pool;
drivers/net/ethernet/intel/ice/ice_xsk.c
224
pool_failure = pool_present ? ice_xsk_pool_enable(vsi, pool, qid) :
drivers/net/ethernet/intel/ice/ice_xsk.c
260
static u16 ice_fill_rx_descs(struct xsk_buff_pool *pool, struct xdp_buff **xdp,
drivers/net/ethernet/intel/ice/ice_xsk.c
267
buffs = xsk_buff_alloc_batch(pool, xdp, count);
drivers/net/ethernet/intel/ice/ice_xsk.h
13
int ice_xsk_pool_setup(struct ice_vsi *vsi, struct xsk_buff_pool *pool,
drivers/net/ethernet/intel/ice/ice_xsk.h
42
struct xsk_buff_pool __always_unused *pool,
drivers/net/ethernet/intel/idpf/idpf_txrx.h
564
struct xsk_buff_pool *pool;
drivers/net/ethernet/intel/idpf/idpf_txrx.h
660
struct xsk_buff_pool *pool;
drivers/net/ethernet/intel/idpf/idpf_txrx.h
765
struct xsk_buff_pool *pool;
drivers/net/ethernet/intel/idpf/xdp.c
69
rxq->pool);
drivers/net/ethernet/intel/idpf/xsk.c
137
rxq->pool = NULL;
drivers/net/ethernet/intel/idpf/xsk.c
144
bufq->pool = NULL;
drivers/net/ethernet/intel/idpf/xsk.c
15
struct xsk_buff_pool *pool;
drivers/net/ethernet/intel/idpf/xsk.c
17
pool = xsk_get_pool_from_qid(vport->netdev, rxq->idx);
drivers/net/ethernet/intel/idpf/xsk.c
174
.dev = xdpsq->pool->dev,
drivers/net/ethernet/intel/idpf/xsk.c
18
if (!pool || !pool->dev || !xsk_buff_can_alloc(pool, 1))
drivers/net/ethernet/intel/idpf/xsk.c
197
xsk_tx_completed(xdpsq->pool, xsk_frames);
drivers/net/ethernet/intel/idpf/xsk.c
208
.dev = xdpsq->pool->dev,
drivers/net/ethernet/intel/idpf/xsk.c
21
rxq->pool = pool;
drivers/net/ethernet/intel/idpf/xsk.c
263
xsk_tx_completed(xdpsq->pool, xsk_frames);
drivers/net/ethernet/intel/idpf/xsk.c
282
.pool = xdpsq->pool,
drivers/net/ethernet/intel/idpf/xsk.c
30
struct xsk_buff_pool *pool;
drivers/net/ethernet/intel/idpf/xsk.c
300
.pool = xdpsq->pool,
drivers/net/ethernet/intel/idpf/xsk.c
326
return libeth_xsk_xmit_do_bulk(xsksq->pool, xsksq,
drivers/net/ethernet/intel/idpf/xsk.c
358
.pool = bufq->pool,
drivers/net/ethernet/intel/idpf/xsk.c
392
.pool = bufq->pool,
drivers/net/ethernet/intel/idpf/xsk.c
409
netdev_err(bufq->pool->netdev,
drivers/net/ethernet/intel/idpf/xsk.c
411
bufq->pool->queue_id);
drivers/net/ethernet/intel/idpf/xsk.c
45
pool = xsk_get_pool_from_qid(vport->netdev, qid);
drivers/net/ethernet/intel/idpf/xsk.c
46
if (!pool || !pool->dev || !xsk_buff_can_alloc(pool, 1))
drivers/net/ethernet/intel/idpf/xsk.c
477
wake = xsk_uses_need_wakeup(rxq->pool);
drivers/net/ethernet/intel/idpf/xsk.c
479
xsk_clear_rx_need_wakeup(rxq->pool);
drivers/net/ethernet/intel/idpf/xsk.c
49
bufq->pool = pool;
drivers/net/ethernet/intel/idpf/xsk.c
549
xsk_set_rx_need_wakeup(rxq->pool);
drivers/net/ethernet/intel/idpf/xsk.c
556
struct xsk_buff_pool *pool = bpf->xsk.pool;
drivers/net/ethernet/intel/idpf/xsk.c
561
if (pool && !IS_ALIGNED(xsk_pool_get_rx_frame_size(pool),
drivers/net/ethernet/intel/idpf/xsk.c
567
xsk_pool_get_rx_frame_size(pool));
drivers/net/ethernet/intel/idpf/xsk.c
57
struct xsk_buff_pool *pool;
drivers/net/ethernet/intel/idpf/xsk.c
573
goto pool;
drivers/net/ethernet/intel/idpf/xsk.c
584
pool:
drivers/net/ethernet/intel/idpf/xsk.c
585
ret = libeth_xsk_setup_pool(vport->netdev, qid, pool);
drivers/net/ethernet/intel/idpf/xsk.c
67
pool = xsk_get_pool_from_qid(vport->netdev, qid);
drivers/net/ethernet/intel/idpf/xsk.c
68
if (!pool || !pool->dev)
drivers/net/ethernet/intel/idpf/xsk.c
71
txq->pool = pool;
drivers/net/ethernet/intel/idpf/xsk.c
75
idpf_queue_assign(NOIRQ, txq, xsk_uses_need_wakeup(pool));
drivers/net/ethernet/intel/idpf/xsk.c
82
const struct xsk_buff_pool *pool;
drivers/net/ethernet/intel/idpf/xsk.c
93
pool = xsk_get_pool_from_qid(vport->netdev, qid);
drivers/net/ethernet/intel/idpf/xsk.c
94
if (!pool || !pool->dev)
drivers/net/ethernet/intel/igb/igb.h
862
struct xsk_buff_pool *pool,
drivers/net/ethernet/intel/igb/igb_main.c
2950
return igb_xsk_pool_setup(adapter, xdp->xsk.pool,
drivers/net/ethernet/intel/igb/igb_xsk.c
113
err = xsk_pool_dma_map(pool, &adapter->pdev->dev, IGB_RX_DMA_ATTR);
drivers/net/ethernet/intel/igb/igb_xsk.c
131
xsk_pool_dma_unmap(pool, IGB_RX_DMA_ATTR);
drivers/net/ethernet/intel/igb/igb_xsk.c
141
struct xsk_buff_pool *pool;
drivers/net/ethernet/intel/igb/igb_xsk.c
146
pool = xsk_get_pool_from_qid(adapter->netdev, qid);
drivers/net/ethernet/intel/igb/igb_xsk.c
147
if (!pool)
drivers/net/ethernet/intel/igb/igb_xsk.c
155
xsk_pool_dma_unmap(pool, IGB_RX_DMA_ATTR);
drivers/net/ethernet/intel/igb/igb_xsk.c
169
struct xsk_buff_pool *pool,
drivers/net/ethernet/intel/igb/igb_xsk.c
172
return pool ? igb_xsk_pool_enable(adapter, pool, qid) :
drivers/net/ethernet/intel/igb/igb_xsk.c
176
static u16 igb_fill_rx_descs(struct xsk_buff_pool *pool, struct xdp_buff **xdp,
drivers/net/ethernet/intel/igb/igb_xsk.c
187
buffs = xsk_buff_alloc_batch(pool, xdp, count);
drivers/net/ethernet/intel/igb/igb_xsk.c
87
struct xsk_buff_pool *pool;
drivers/net/ethernet/intel/igb/igb_xsk.c
89
pool = xsk_get_pool_from_qid(adapter->netdev, qid);
drivers/net/ethernet/intel/igb/igb_xsk.c
94
return (pool && pool->dev) ? pool : NULL;
drivers/net/ethernet/intel/igb/igb_xsk.c
98
struct xsk_buff_pool *pool,
drivers/net/ethernet/intel/igc/igc_main.c
3039
struct xsk_buff_pool *pool = ring->xsk_pool;
drivers/net/ethernet/intel/igc/igc_main.c
3063
while (budget >= 4 && xsk_tx_peek_desc(pool, &xdp_desc)) {
drivers/net/ethernet/intel/igc/igc_main.c
3076
dma = xsk_buff_raw_get_dma(pool, xdp_desc.addr);
drivers/net/ethernet/intel/igc/igc_main.c
3077
meta = xsk_buff_get_metadata(pool, xdp_desc.addr);
drivers/net/ethernet/intel/igc/igc_main.c
3078
xsk_buff_raw_dma_sync_for_device(pool, dma, xdp_desc.len);
drivers/net/ethernet/intel/igc/igc_main.c
3121
xsk_tx_release(pool);
drivers/net/ethernet/intel/igc/igc_main.c
6864
return igc_xdp_setup_pool(adapter, bpf->xsk.pool,
drivers/net/ethernet/intel/igc/igc_xdp.c
110
xsk_pool_dma_unmap(pool, IGC_RX_DMA_ATTR);
drivers/net/ethernet/intel/igc/igc_xdp.c
121
struct xsk_buff_pool *pool;
drivers/net/ethernet/intel/igc/igc_xdp.c
129
pool = xsk_get_pool_from_qid(adapter->netdev, queue_id);
drivers/net/ethernet/intel/igc/igc_xdp.c
130
if (!pool)
drivers/net/ethernet/intel/igc/igc_xdp.c
146
xsk_pool_dma_unmap(pool, IGC_RX_DMA_ATTR);
drivers/net/ethernet/intel/igc/igc_xdp.c
159
int igc_xdp_setup_pool(struct igc_adapter *adapter, struct xsk_buff_pool *pool,
drivers/net/ethernet/intel/igc/igc_xdp.c
162
return pool ? igc_xdp_enable_pool(adapter, pool, queue_id) :
drivers/net/ethernet/intel/igc/igc_xdp.c
57
struct xsk_buff_pool *pool, u16 queue_id)
drivers/net/ethernet/intel/igc/igc_xdp.c
71
frame_size = xsk_pool_get_rx_frame_size(pool);
drivers/net/ethernet/intel/igc/igc_xdp.c
81
err = xsk_pool_dma_map(pool, dev, IGC_RX_DMA_ATTR);
drivers/net/ethernet/intel/igc/igc_xdp.h
9
int igc_xdp_setup_pool(struct igc_adapter *adapter, struct xsk_buff_pool *pool,
drivers/net/ethernet/intel/ixgbe/ixgbe.h
335
int pool;
drivers/net/ethernet/intel/ixgbe/ixgbe.h
594
u16 pool;
drivers/net/ethernet/intel/ixgbe/ixgbe_fcoe.c
113
if (ddp->pool) {
drivers/net/ethernet/intel/ixgbe/ixgbe_fcoe.c
114
dma_pool_free(ddp->pool, ddp->udl, ddp->udp);
drivers/net/ethernet/intel/ixgbe/ixgbe_fcoe.c
115
ddp->pool = NULL;
drivers/net/ethernet/intel/ixgbe/ixgbe_fcoe.c
183
if (!ddp_pool->pool) {
drivers/net/ethernet/intel/ixgbe/ixgbe_fcoe.c
196
ddp->udl = dma_pool_alloc(ddp_pool->pool, GFP_ATOMIC, &ddp->udp);
drivers/net/ethernet/intel/ixgbe/ixgbe_fcoe.c
201
ddp->pool = ddp_pool->pool;
drivers/net/ethernet/intel/ixgbe/ixgbe_fcoe.c
320
dma_pool_free(ddp->pool, ddp->udl, ddp->udp);
drivers/net/ethernet/intel/ixgbe/ixgbe_fcoe.c
603
dma_pool_destroy(ddp_pool->pool);
drivers/net/ethernet/intel/ixgbe/ixgbe_fcoe.c
604
ddp_pool->pool = NULL;
drivers/net/ethernet/intel/ixgbe/ixgbe_fcoe.c
612
struct dma_pool *pool;
drivers/net/ethernet/intel/ixgbe/ixgbe_fcoe.c
617
pool = dma_pool_create(pool_name, dev, IXGBE_FCPTR_MAX,
drivers/net/ethernet/intel/ixgbe/ixgbe_fcoe.c
619
if (!pool)
drivers/net/ethernet/intel/ixgbe/ixgbe_fcoe.c
623
ddp_pool->pool = pool;
drivers/net/ethernet/intel/ixgbe/ixgbe_fcoe.h
42
struct dma_pool *pool;
drivers/net/ethernet/intel/ixgbe/ixgbe_fcoe.h
47
struct dma_pool *pool;
drivers/net/ethernet/intel/ixgbe/ixgbe_lib.c
191
u16 reg_idx, pool;
drivers/net/ethernet/intel/ixgbe/ixgbe_lib.c
199
pool = 0;
drivers/net/ethernet/intel/ixgbe/ixgbe_lib.c
209
pool++;
drivers/net/ethernet/intel/ixgbe/ixgbe_lib.c
213
adapter->rx_ring[i]->netdev = pool ? NULL : adapter->netdev;
drivers/net/ethernet/intel/ixgbe/ixgbe_lib.c
24
u16 reg_idx, pool;
drivers/net/ethernet/intel/ixgbe/ixgbe_lib.c
37
for (i = 0, pool = 0; i < adapter->num_rx_queues; i++, reg_idx++) {
drivers/net/ethernet/intel/ixgbe/ixgbe_lib.c
40
pool++;
drivers/net/ethernet/intel/ixgbe/ixgbe_lib.c
44
adapter->rx_ring[i]->netdev = pool ? NULL : adapter->netdev;
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
10585
u16 pool = VMDQ_P(0);
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
10587
if (netdev_uc_count(dev) >= ixgbe_available_rars(adapter, pool))
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
10715
int pool, err;
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
10736
pool = find_first_zero_bit(adapter->fwd_bitmask, adapter->num_rx_pools);
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
10737
if (pool == adapter->num_rx_pools) {
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
10783
if (pool >= adapter->num_rx_pools)
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
10791
set_bit(pool, adapter->fwd_bitmask);
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
10792
netdev_set_sb_channel(vdev, pool);
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
10793
accel->pool = pool;
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
10815
VMDQ_P(accel->pool));
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
10838
clear_bit(accel->pool, adapter->fwd_bitmask);
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
10965
return ixgbe_xsk_pool_setup(adapter, xdp->xsk.pool,
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
4296
u16 pool = adapter->num_rx_pools;
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
4302
while (pool--)
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
4304
IXGBE_PFVFRETA(i >> 2, VMDQ_P(pool)),
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
4347
u16 pool = adapter->num_rx_pools;
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
4349
while (pool--)
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
4351
IXGBE_PFVFRSSRK(i, VMDQ_P(pool)),
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
4422
u16 pool = adapter->num_rx_pools;
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
4433
while (pool--)
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
4435
IXGBE_PFVFMRQC(VMDQ_P(pool)),
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
4602
u16 pool = adapter->num_rx_pools;
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
4619
while (pool--)
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
4620
IXGBE_WRITE_REG(hw, IXGBE_PSRTYPE(VMDQ_P(pool)), psrtype);
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
4626
u16 pool = adapter->num_rx_pools;
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
4645
while (pool--)
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
4646
IXGBE_WRITE_REG(hw, IXGBE_VMOLR(VMDQ_P(pool)), vmolr);
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
5174
mac_table->pool,
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
5197
mac_table->pool,
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
5218
static int ixgbe_available_rars(struct ixgbe_adapter *adapter, u16 pool)
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
5231
if (mac_table->pool != pool)
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
5248
mac_table->pool = VMDQ_P(0);
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
5252
hw->mac.ops.set_rar(hw, 0, mac_table->addr, mac_table->pool,
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
5257
const u8 *addr, u16 pool)
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
5271
mac_table->pool = pool;
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
5285
const u8 *addr, u16 pool)
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
5300
if (mac_table->pool != pool)
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
5802
baseq = accel->pool * adapter->num_rx_queues_per_pool;
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
5804
accel->pool, adapter->num_rx_pools,
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
5827
VMDQ_P(accel->pool));
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
5843
clear_bit(accel->pool, adapter->fwd_bitmask);
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
9819
int pool;
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
9831
pool = find_first_zero_bit(adapter->fwd_bitmask, adapter->num_rx_pools);
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
9832
if (pool < adapter->num_rx_pools) {
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
9833
set_bit(pool, adapter->fwd_bitmask);
drivers/net/ethernet/intel/ixgbe/ixgbe_main.c
9834
accel->pool = pool;
drivers/net/ethernet/intel/ixgbe/ixgbe_txrx_common.h
36
struct xsk_buff_pool *pool,
drivers/net/ethernet/intel/ixgbe/ixgbe_x550.c
3538
unsigned int pool)
drivers/net/ethernet/intel/ixgbe/ixgbe_x550.c
3543
if (pool > 63)
drivers/net/ethernet/intel/ixgbe/ixgbe_x550.c
3550
pfflp |= (1ULL << pool);
drivers/net/ethernet/intel/ixgbe/ixgbe_x550.c
3552
pfflp &= ~(1ULL << pool);
drivers/net/ethernet/intel/ixgbe/ixgbe_x550.h
16
unsigned int pool);
drivers/net/ethernet/intel/ixgbe/ixgbe_xsk.c
24
struct xsk_buff_pool *pool,
drivers/net/ethernet/intel/ixgbe/ixgbe_xsk.c
38
err = xsk_pool_dma_map(pool, &adapter->pdev->dev, IXGBE_RX_DMA_ATTR);
drivers/net/ethernet/intel/ixgbe/ixgbe_xsk.c
393
struct xsk_buff_pool *pool = xdp_ring->xsk_pool;
drivers/net/ethernet/intel/ixgbe/ixgbe_xsk.c
410
if (!xsk_tx_peek_desc(pool, &desc))
drivers/net/ethernet/intel/ixgbe/ixgbe_xsk.c
413
dma = xsk_buff_raw_get_dma(pool, desc.addr);
drivers/net/ethernet/intel/ixgbe/ixgbe_xsk.c
414
xsk_buff_raw_dma_sync_for_device(pool, dma, desc.len);
drivers/net/ethernet/intel/ixgbe/ixgbe_xsk.c
442
xsk_tx_release(pool);
drivers/net/ethernet/intel/ixgbe/ixgbe_xsk.c
463
struct xsk_buff_pool *pool = tx_ring->xsk_pool;
drivers/net/ethernet/intel/ixgbe/ixgbe_xsk.c
503
xsk_tx_completed(pool, xsk_frames);
drivers/net/ethernet/intel/ixgbe/ixgbe_xsk.c
505
if (xsk_uses_need_wakeup(pool))
drivers/net/ethernet/intel/ixgbe/ixgbe_xsk.c
506
xsk_set_tx_need_wakeup(pool);
drivers/net/ethernet/intel/ixgbe/ixgbe_xsk.c
545
struct xsk_buff_pool *pool = tx_ring->xsk_pool;
drivers/net/ethernet/intel/ixgbe/ixgbe_xsk.c
565
xsk_tx_completed(pool, xsk_frames);
drivers/net/ethernet/intel/ixgbe/ixgbe_xsk.c
57
xsk_pool_dma_unmap(pool, IXGBE_RX_DMA_ATTR);
drivers/net/ethernet/intel/ixgbe/ixgbe_xsk.c
67
struct xsk_buff_pool *pool;
drivers/net/ethernet/intel/ixgbe/ixgbe_xsk.c
70
pool = xsk_get_pool_from_qid(adapter->netdev, qid);
drivers/net/ethernet/intel/ixgbe/ixgbe_xsk.c
71
if (!pool)
drivers/net/ethernet/intel/ixgbe/ixgbe_xsk.c
81
xsk_pool_dma_unmap(pool, IXGBE_RX_DMA_ATTR);
drivers/net/ethernet/intel/ixgbe/ixgbe_xsk.c
90
struct xsk_buff_pool *pool,
drivers/net/ethernet/intel/ixgbe/ixgbe_xsk.c
93
return pool ? ixgbe_xsk_pool_enable(adapter, pool, qid) :
drivers/net/ethernet/intel/libeth/rx.c
164
struct page_pool *pool;
drivers/net/ethernet/intel/libeth/rx.c
176
pool = page_pool_create(&pp);
drivers/net/ethernet/intel/libeth/rx.c
177
if (IS_ERR(pool))
drivers/net/ethernet/intel/libeth/rx.c
178
return PTR_ERR(pool);
drivers/net/ethernet/intel/libeth/rx.c
186
ret = xdp_reg_page_pool(pool);
drivers/net/ethernet/intel/libeth/rx.c
191
fq->pp = pool;
drivers/net/ethernet/intel/libeth/rx.c
198
page_pool_destroy(pool);
drivers/net/ethernet/intel/libeth/xsk.c
140
if (xsk_uses_need_wakeup(xsk->pool) && ret == -ENOBUFS)
drivers/net/ethernet/intel/libeth/xsk.c
169
fq->buf_len = xsk_pool_get_rx_frame_size(fq->pool);
drivers/net/ethernet/intel/libeth/xsk.c
170
fq->truesize = xsk_pool_get_rx_frag_step(fq->pool);
drivers/net/ethernet/intel/libeth/xsk.c
258
struct xsk_buff_pool *pool;
drivers/net/ethernet/intel/libeth/xsk.c
260
pool = xsk_get_pool_from_qid(dev, qid);
drivers/net/ethernet/intel/libeth/xsk.c
261
if (!pool)
drivers/net/ethernet/intel/libeth/xsk.c
265
return xsk_pool_dma_map(pool, dev->dev.parent,
drivers/net/ethernet/intel/libeth/xsk.c
268
xsk_pool_dma_unmap(pool, LIBETH_XSK_DMA_ATTR);
drivers/net/ethernet/marvell/mvneta.c
184
#define MVNETA_CAUSE_BMU_ALLOC_ERR_MASK(pool) (1 << (MVNETA_CAUSE_BMU_ALLOC_ERR_SHIFT + (pool)))
drivers/net/ethernet/marvell/mvneta.c
2395
mvneta_swbm_build_skb(struct mvneta_port *pp, struct page_pool *pool,
drivers/net/ethernet/marvell/mvneta.c
64
#define MVNETA_PORT_POOL_BUFFER_SZ_REG(pool) (0x1700 + ((pool) << 2))
drivers/net/ethernet/marvell/mvneta_bm.h
33
#define MVNETA_BM_XBAR_POOL_REG(pool) \
drivers/net/ethernet/marvell/mvneta_bm.h
34
(((pool) < 2) ? MVNETA_BM_XBAR_01_REG : MVNETA_BM_XBAR_23_REG)
drivers/net/ethernet/marvell/mvneta_bm.h
35
#define MVNETA_BM_TARGET_ID_OFFS(pool) (((pool) & 1) ? 16 : 0)
drivers/net/ethernet/marvell/mvneta_bm.h
36
#define MVNETA_BM_TARGET_ID_MASK(pool) \
drivers/net/ethernet/marvell/mvneta_bm.h
37
(0xf << MVNETA_BM_TARGET_ID_OFFS(pool))
drivers/net/ethernet/marvell/mvneta_bm.h
38
#define MVNETA_BM_TARGET_ID_VAL(pool, id) \
drivers/net/ethernet/marvell/mvneta_bm.h
39
((id) << MVNETA_BM_TARGET_ID_OFFS(pool))
drivers/net/ethernet/marvell/mvneta_bm.h
40
#define MVNETA_BM_XBAR_ATTR_OFFS(pool) (((pool) & 1) ? 20 : 4)
drivers/net/ethernet/marvell/mvneta_bm.h
41
#define MVNETA_BM_XBAR_ATTR_MASK(pool) \
drivers/net/ethernet/marvell/mvneta_bm.h
42
(0xff << MVNETA_BM_XBAR_ATTR_OFFS(pool))
drivers/net/ethernet/marvell/mvneta_bm.h
43
#define MVNETA_BM_XBAR_ATTR_VAL(pool, attr) \
drivers/net/ethernet/marvell/mvneta_bm.h
44
((attr) << MVNETA_BM_XBAR_ATTR_OFFS(pool))
drivers/net/ethernet/marvell/mvneta_bm.h
47
#define MVNETA_BM_POOL_BASE_REG(pool) (0x10 + ((pool) << 4))
drivers/net/ethernet/marvell/mvneta_bm.h
51
#define MVNETA_BM_POOL_READ_PTR_REG(pool) (0x14 + ((pool) << 4))
drivers/net/ethernet/marvell/mvneta_bm.h
57
#define MVNETA_BM_POOL_WRITE_PTR_REG(pool) (0x18 + ((pool) << 4))
drivers/net/ethernet/marvell/mvneta_bm.h
64
#define MVNETA_BM_POOL_SIZE_REG(pool) (0x1c + ((pool) << 4))
drivers/net/ethernet/marvell/mvpp2/mvpp2.h
302
#define MVPP2_BM_POOL_BASE_REG(pool) (0x6000 + ((pool) * 4))
drivers/net/ethernet/marvell/mvpp2/mvpp2.h
304
#define MVPP2_BM_POOL_SIZE_REG(pool) (0x6040 + ((pool) * 4))
drivers/net/ethernet/marvell/mvpp2/mvpp2.h
306
#define MVPP2_BM_POOL_READ_PTR_REG(pool) (0x6080 + ((pool) * 4))
drivers/net/ethernet/marvell/mvpp2/mvpp2.h
308
#define MVPP2_BM_POOL_PTRS_NUM_REG(pool) (0x60c0 + ((pool) * 4))
drivers/net/ethernet/marvell/mvpp2/mvpp2.h
310
#define MVPP2_BM_BPPI_READ_PTR_REG(pool) (0x6100 + ((pool) * 4))
drivers/net/ethernet/marvell/mvpp2/mvpp2.h
311
#define MVPP2_BM_BPPI_PTRS_NUM_REG(pool) (0x6140 + ((pool) * 4))
drivers/net/ethernet/marvell/mvpp2/mvpp2.h
315
#define MVPP2_BM_POOL_CTRL_REG(pool) (0x6200 + ((pool) * 4))
drivers/net/ethernet/marvell/mvpp2/mvpp2.h
331
#define MVPP2_BM_INTR_CAUSE_REG(pool) (0x6240 + ((pool) * 4))
drivers/net/ethernet/marvell/mvpp2/mvpp2.h
337
#define MVPP2_BM_INTR_MASK_REG(pool) (0x6280 + ((pool) * 4))
drivers/net/ethernet/marvell/mvpp2/mvpp2.h
338
#define MVPP2_BM_PHY_ALLOC_REG(pool) (0x6400 + ((pool) * 4))
drivers/net/ethernet/marvell/mvpp2/mvpp2.h
345
#define MVPP2_BM_PHY_RLS_REG(pool) (0x6480 + ((pool) * 4))
drivers/net/ethernet/marvell/mvpp2/mvpp2.h
45
#define MVPP2_POOL_BUF_SIZE_REG(pool) (0x180 + 4 * (pool))
drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c
1000
static inline void mvpp2_bm_pool_put(struct mvpp2_port *port, int pool,
drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c
1034
MVPP2_BM_PHY_RLS_REG(pool), buf_dma_addr);
drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c
1099
mvpp2_bm_pool_use(struct mvpp2_port *port, unsigned pool, int pkt_size)
drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c
1101
struct mvpp2_bm_pool *new_pool = &port->priv->bm_pools[pool];
drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c
1104
if ((port->priv->percpu_pools && pool > mvpp2_get_nrxqs(port->priv) * 2) ||
drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c
1105
(!port->priv->percpu_pools && pool >= MVPP2_BM_POOLS_NUM)) {
drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c
1106
netdev_err(port->dev, "Invalid pool %d\n", pool);
drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c
1122
if (pool < port->nrxqs)
drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c
1127
pkts_num = mvpp2_pools[pool].buf_num;
drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c
1156
unsigned int pool, int pkt_size)
drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c
1158
struct mvpp2_bm_pool *new_pool = &port->priv->bm_pools[pool];
drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c
1161
if (pool > port->nrxqs * 2) {
drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c
1162
netdev_err(port->dev, "Invalid pool %d\n", pool);
drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c
3041
int pool;
drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c
3043
pool = (status & MVPP2_RXD_BM_POOL_ID_MASK) >>
drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c
3046
mvpp2_bm_pool_put(port, pool,
drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c
3589
struct page_pool *page_pool, int pool)
drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c
360
static void *mvpp2_frag_alloc(const struct mvpp2_bm_pool *pool,
drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c
3600
mvpp2_bm_pool_put(port, pool, dma_addr, phys_addr);
drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c
366
if (likely(pool->frag_size <= PAGE_SIZE))
drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c
367
return netdev_alloc_frag(pool->frag_size);
drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c
369
return kmalloc(pool->frag_size, GFP_ATOMIC);
drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c
372
static void mvpp2_frag_free(const struct mvpp2_bm_pool *pool,
drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c
377
else if (likely(pool->frag_size <= PAGE_SIZE))
drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c
3866
int pool, u32 rx_status)
drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c
3886
mvpp2_bm_pool_put(port, pool, dma_addr, phys_addr);
drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c
3923
int pool, rx_bytes, err, ret;
drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c
3938
pool = (rx_status & MVPP2_RXD_BM_POOL_ID_MASK) >>
drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c
3940
bm_pool = &port->priv->bm_pools[pool];
drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c
3943
pp = port->priv->page_pool[pool];
drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c
3990
err = mvpp2_rx_refill(port, bm_pool, pp, pool);
drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c
4022
err = mvpp2_rx_refill(port, bm_pool, pp, pool);
drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c
4054
mvpp2_buff_hdr_pool_put(port, rx_desc, pool, rx_status);
drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c
4056
mvpp2_bm_pool_put(port, pool, dma_addr, phys_addr);
drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c
901
struct mvpp2_bm_pool *pool,
drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c
920
val = mvpp2_cm3_read(port->priv, MSS_BUF_POOL_REG(pool->id));
drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c
926
mvpp2_cm3_write(port->priv, MSS_BUF_POOL_REG(pool->id), val);
drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c
929
val = mvpp2_cm3_read(port->priv, MSS_BUF_POOL_REG(pool->id));
drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c
935
if (!pool->buf_num) {
drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c
940
mvpp2_cm3_write(port->priv, MSS_BUF_POOL_REG(pool->id), val);
drivers/net/ethernet/marvell/octeontx2/af/cn20k/debugfs.c
182
struct npa_cn20k_pool_s *pool = &rsp->pool;
drivers/net/ethernet/marvell/octeontx2/af/cn20k/debugfs.c
184
seq_printf(m, "W0: Stack base\t\t%llx\n", pool->stack_base);
drivers/net/ethernet/marvell/octeontx2/af/cn20k/debugfs.c
187
pool->ena, pool->nat_align);
drivers/net/ethernet/marvell/octeontx2/af/cn20k/debugfs.c
189
pool->stack_caching);
drivers/net/ethernet/marvell/octeontx2/af/cn20k/debugfs.c
191
pool->buf_offset, pool->buf_size);
drivers/net/ethernet/marvell/octeontx2/af/cn20k/debugfs.c
194
pool->stack_max_pages, pool->stack_pages);
drivers/net/ethernet/marvell/octeontx2/af/cn20k/debugfs.c
197
pool->stack_offset, pool->shift, pool->avg_level);
drivers/net/ethernet/marvell/octeontx2/af/cn20k/debugfs.c
199
pool->avg_con, pool->fc_ena, pool->fc_stype);
drivers/net/ethernet/marvell/octeontx2/af/cn20k/debugfs.c
201
pool->fc_hyst_bits, pool->fc_up_crossing);
drivers/net/ethernet/marvell/octeontx2/af/cn20k/debugfs.c
202
seq_printf(m, "W4: update_time\t\t%d\n", pool->update_time);
drivers/net/ethernet/marvell/octeontx2/af/cn20k/debugfs.c
204
seq_printf(m, "W5: fc_addr\t\t%llx\n", pool->fc_addr);
drivers/net/ethernet/marvell/octeontx2/af/cn20k/debugfs.c
206
seq_printf(m, "W6: ptr_start\t\t%llx\n", pool->ptr_start);
drivers/net/ethernet/marvell/octeontx2/af/cn20k/debugfs.c
208
seq_printf(m, "W7: ptr_end\t\t%llx\n", pool->ptr_end);
drivers/net/ethernet/marvell/octeontx2/af/cn20k/debugfs.c
211
pool->err_int, pool->err_int_ena);
drivers/net/ethernet/marvell/octeontx2/af/cn20k/debugfs.c
212
seq_printf(m, "W8: thresh_int\t\t%d\n", pool->thresh_int);
drivers/net/ethernet/marvell/octeontx2/af/cn20k/debugfs.c
214
pool->thresh_int_ena, pool->thresh_up);
drivers/net/ethernet/marvell/octeontx2/af/cn20k/debugfs.c
216
pool->thresh_qint_idx, pool->err_qint_idx);
drivers/net/ethernet/marvell/octeontx2/af/cn20k/debugfs.c
217
seq_printf(m, "W8: fc_msh_dst\t\t%d\n", pool->fc_msh_dst);
drivers/net/ethernet/marvell/octeontx2/af/mbox.h
818
struct npa_pool_s pool;
drivers/net/ethernet/marvell/octeontx2/af/mbox.h
835
struct npa_pool_s pool;
drivers/net/ethernet/marvell/octeontx2/af/mbox.h
851
struct npa_cn20k_pool_s pool;
drivers/net/ethernet/marvell/octeontx2/af/mbox.h
868
struct npa_cn20k_pool_s pool;
drivers/net/ethernet/marvell/octeontx2/af/rvu_debugfs.c
1156
struct npa_pool_s *pool = &rsp->pool;
drivers/net/ethernet/marvell/octeontx2/af/rvu_debugfs.c
1164
seq_printf(m, "W0: Stack base\t\t%llx\n", pool->stack_base);
drivers/net/ethernet/marvell/octeontx2/af/rvu_debugfs.c
1167
pool->ena, pool->nat_align);
drivers/net/ethernet/marvell/octeontx2/af/rvu_debugfs.c
1169
pool->stack_caching, pool->stack_way_mask);
drivers/net/ethernet/marvell/octeontx2/af/rvu_debugfs.c
1171
pool->buf_offset, pool->buf_size);
drivers/net/ethernet/marvell/octeontx2/af/rvu_debugfs.c
1174
pool->stack_max_pages, pool->stack_pages);
drivers/net/ethernet/marvell/octeontx2/af/rvu_debugfs.c
1176
seq_printf(m, "W3: op_pc \t\t%llu\n", (u64)pool->op_pc);
drivers/net/ethernet/marvell/octeontx2/af/rvu_debugfs.c
1179
pool->stack_offset, pool->shift, pool->avg_level);
drivers/net/ethernet/marvell/octeontx2/af/rvu_debugfs.c
1181
pool->avg_con, pool->fc_ena, pool->fc_stype);
drivers/net/ethernet/marvell/octeontx2/af/rvu_debugfs.c
1183
pool->fc_hyst_bits, pool->fc_up_crossing);
drivers/net/ethernet/marvell/octeontx2/af/rvu_debugfs.c
1185
seq_printf(m, "W4: fc_be\t\t%d\n", pool->fc_be);
drivers/net/ethernet/marvell/octeontx2/af/rvu_debugfs.c
1186
seq_printf(m, "W4: update_time\t\t%d\n", pool->update_time);
drivers/net/ethernet/marvell/octeontx2/af/rvu_debugfs.c
1188
seq_printf(m, "W5: fc_addr\t\t%llx\n", pool->fc_addr);
drivers/net/ethernet/marvell/octeontx2/af/rvu_debugfs.c
1190
seq_printf(m, "W6: ptr_start\t\t%llx\n", pool->ptr_start);
drivers/net/ethernet/marvell/octeontx2/af/rvu_debugfs.c
1192
seq_printf(m, "W7: ptr_end\t\t%llx\n", pool->ptr_end);
drivers/net/ethernet/marvell/octeontx2/af/rvu_debugfs.c
1195
pool->err_int, pool->err_int_ena);
drivers/net/ethernet/marvell/octeontx2/af/rvu_debugfs.c
1196
seq_printf(m, "W8: thresh_int\t\t%d\n", pool->thresh_int);
drivers/net/ethernet/marvell/octeontx2/af/rvu_debugfs.c
1198
pool->thresh_int_ena, pool->thresh_up);
drivers/net/ethernet/marvell/octeontx2/af/rvu_debugfs.c
1200
pool->thresh_qint_idx, pool->err_qint_idx);
drivers/net/ethernet/marvell/octeontx2/af/rvu_debugfs.c
1202
seq_printf(m, "W8: fc_msh_dst\t\t%d\n", pool->fc_msh_dst);
drivers/net/ethernet/marvell/octeontx2/af/rvu_npa.c
125
memcpy(ctx, &req->pool, sizeof(struct npa_pool_s));
drivers/net/ethernet/marvell/octeontx2/af/rvu_npa.c
139
memcpy(ctx, &req->pool, sizeof(struct npa_pool_s));
drivers/net/ethernet/marvell/octeontx2/af/rvu_npa.c
181
if (req->op == NPA_AQ_INSTOP_INIT && req->pool.ena)
drivers/net/ethernet/marvell/octeontx2/af/rvu_npa.c
184
ena = (req->pool.ena & req->pool_mask.ena) |
drivers/net/ethernet/marvell/octeontx2/af/rvu_npa.c
202
memcpy(&rsp->pool, ctx,
drivers/net/ethernet/marvell/octeontx2/af/rvu_npa.c
225
aq_req.pool.ena = 0;
drivers/net/ethernet/marvell/octeontx2/nic/cn10k.c
129
struct otx2_pool *pool;
drivers/net/ethernet/marvell/octeontx2/nic/cn10k.c
133
pool = &pfvf->qset.pool[cq->cq_idx];
drivers/net/ethernet/marvell/octeontx2/nic/cn10k.c
144
ptrs[num_ptrs] = pool->xsk_pool ?
drivers/net/ethernet/marvell/octeontx2/nic/cn20k.c
258
struct otx2_pool *pool;
drivers/net/ethernet/marvell/octeontx2/nic/cn20k.c
262
pool = &pfvf->qset.pool[pool_id];
drivers/net/ethernet/marvell/octeontx2/nic/cn20k.c
267
if (!pool->fc_addr) {
drivers/net/ethernet/marvell/octeontx2/nic/cn20k.c
268
err = qmem_alloc(pfvf->dev, &pool->fc_addr, 1, OTX2_ALIGN);
drivers/net/ethernet/marvell/octeontx2/nic/cn20k.c
296
aq->aura.fc_addr = pool->fc_addr->iova;
drivers/net/ethernet/marvell/octeontx2/nic/cn20k.c
337
struct otx2_pool *pool;
drivers/net/ethernet/marvell/octeontx2/nic/cn20k.c
340
pool = &pfvf->qset.pool[pool_id];
drivers/net/ethernet/marvell/octeontx2/nic/cn20k.c
342
err = qmem_alloc(pfvf->dev, &pool->stack,
drivers/net/ethernet/marvell/octeontx2/nic/cn20k.c
347
pool->rbsize = buf_size;
drivers/net/ethernet/marvell/octeontx2/nic/cn20k.c
355
qmem_free(pfvf->dev, pool->stack);
drivers/net/ethernet/marvell/octeontx2/nic/cn20k.c
360
qmem_free(pfvf->dev, pool->stack);
drivers/net/ethernet/marvell/octeontx2/nic/cn20k.c
366
aq->pool.stack_base = pool->stack->iova;
drivers/net/ethernet/marvell/octeontx2/nic/cn20k.c
367
aq->pool.stack_caching = 1;
drivers/net/ethernet/marvell/octeontx2/nic/cn20k.c
368
aq->pool.ena = 1;
drivers/net/ethernet/marvell/octeontx2/nic/cn20k.c
369
aq->pool.buf_size = buf_size / 128;
drivers/net/ethernet/marvell/octeontx2/nic/cn20k.c
370
aq->pool.stack_max_pages = stack_pages;
drivers/net/ethernet/marvell/octeontx2/nic/cn20k.c
371
aq->pool.shift = ilog2(numptrs) - 8;
drivers/net/ethernet/marvell/octeontx2/nic/cn20k.c
372
aq->pool.ptr_start = 0;
drivers/net/ethernet/marvell/octeontx2/nic/cn20k.c
373
aq->pool.ptr_end = ~0ULL;
drivers/net/ethernet/marvell/octeontx2/nic/cn20k.c
380
pool->page_pool = NULL;
drivers/net/ethernet/marvell/octeontx2/nic/cn20k.c
391
pool->page_pool = page_pool_create(&pp_params);
drivers/net/ethernet/marvell/octeontx2/nic/cn20k.c
392
if (IS_ERR(pool->page_pool)) {
drivers/net/ethernet/marvell/octeontx2/nic/cn20k.c
394
return PTR_ERR(pool->page_pool);
drivers/net/ethernet/marvell/octeontx2/nic/otx2_common.c
1029
sq->aura_fc_addr = pool->fc_addr->base;
drivers/net/ethernet/marvell/octeontx2/nic/otx2_common.c
1057
struct otx2_pool *pool;
drivers/net/ethernet/marvell/octeontx2/nic/otx2_common.c
1068
pool = &qset->pool[qidx];
drivers/net/ethernet/marvell/octeontx2/nic/otx2_common.c
1069
if (pool->xsk_pool) {
drivers/net/ethernet/marvell/octeontx2/nic/otx2_common.c
1073
xsk_pool_set_rxq_info(pool->xsk_pool, &cq->xdp_rxq);
drivers/net/ethernet/marvell/octeontx2/nic/otx2_common.c
1074
} else if (pool->page_pool) {
drivers/net/ethernet/marvell/octeontx2/nic/otx2_common.c
1077
pool->page_pool);
drivers/net/ethernet/marvell/octeontx2/nic/otx2_common.c
1111
cq->rbpool = &qset->pool[pool_id];
drivers/net/ethernet/marvell/octeontx2/nic/otx2_common.c
1290
void otx2_free_bufs(struct otx2_nic *pfvf, struct otx2_pool *pool,
drivers/net/ethernet/marvell/octeontx2/nic/otx2_common.c
1298
if (pool->page_pool) {
drivers/net/ethernet/marvell/octeontx2/nic/otx2_common.c
1299
page_pool_put_full_page(pool->page_pool, page, true);
drivers/net/ethernet/marvell/octeontx2/nic/otx2_common.c
1300
} else if (pool->xsk_pool) {
drivers/net/ethernet/marvell/octeontx2/nic/otx2_common.c
1314
struct otx2_pool *pool;
drivers/net/ethernet/marvell/octeontx2/nic/otx2_common.c
1331
pool = &pfvf->qset.pool[pool_id];
drivers/net/ethernet/marvell/octeontx2/nic/otx2_common.c
1336
otx2_free_bufs(pfvf, pool, iova, size);
drivers/net/ethernet/marvell/octeontx2/nic/otx2_common.c
1340
for (idx = 0 ; idx < pool->xdp_cnt; idx++) {
drivers/net/ethernet/marvell/octeontx2/nic/otx2_common.c
1341
if (!pool->xdp[idx])
drivers/net/ethernet/marvell/octeontx2/nic/otx2_common.c
1344
xsk_buff_free(pool->xdp[idx]);
drivers/net/ethernet/marvell/octeontx2/nic/otx2_common.c
1351
struct otx2_pool *pool;
drivers/net/ethernet/marvell/octeontx2/nic/otx2_common.c
1354
if (!pfvf->qset.pool)
drivers/net/ethernet/marvell/octeontx2/nic/otx2_common.c
1358
pool = &pfvf->qset.pool[pool_id];
drivers/net/ethernet/marvell/octeontx2/nic/otx2_common.c
1359
qmem_free(pfvf->dev, pool->stack);
drivers/net/ethernet/marvell/octeontx2/nic/otx2_common.c
1360
qmem_free(pfvf->dev, pool->fc_addr);
drivers/net/ethernet/marvell/octeontx2/nic/otx2_common.c
1361
page_pool_destroy(pool->page_pool);
drivers/net/ethernet/marvell/octeontx2/nic/otx2_common.c
1362
devm_kfree(pfvf->dev, pool->xdp);
drivers/net/ethernet/marvell/octeontx2/nic/otx2_common.c
1363
pool->xsk_pool = NULL;
drivers/net/ethernet/marvell/octeontx2/nic/otx2_common.c
1365
devm_kfree(pfvf->dev, pfvf->qset.pool);
drivers/net/ethernet/marvell/octeontx2/nic/otx2_common.c
1366
pfvf->qset.pool = NULL;
drivers/net/ethernet/marvell/octeontx2/nic/otx2_common.c
1380
struct otx2_pool *pool;
drivers/net/ethernet/marvell/octeontx2/nic/otx2_common.c
1383
pool = &pfvf->qset.pool[pool_id];
drivers/net/ethernet/marvell/octeontx2/nic/otx2_common.c
1388
if (!pool->fc_addr) {
drivers/net/ethernet/marvell/octeontx2/nic/otx2_common.c
1389
err = qmem_alloc(pfvf->dev, &pool->fc_addr, 1, OTX2_ALIGN);
drivers/net/ethernet/marvell/octeontx2/nic/otx2_common.c
1416
aq->aura.fc_addr = pool->fc_addr->iova;
drivers/net/ethernet/marvell/octeontx2/nic/otx2_common.c
1466
struct otx2_pool *pool;
drivers/net/ethernet/marvell/octeontx2/nic/otx2_common.c
1469
pool = &pfvf->qset.pool[pool_id];
drivers/net/ethernet/marvell/octeontx2/nic/otx2_common.c
1471
err = qmem_alloc(pfvf->dev, &pool->stack,
drivers/net/ethernet/marvell/octeontx2/nic/otx2_common.c
1476
pool->rbsize = buf_size;
drivers/net/ethernet/marvell/octeontx2/nic/otx2_common.c
1484
qmem_free(pfvf->dev, pool->stack);
drivers/net/ethernet/marvell/octeontx2/nic/otx2_common.c
1489
qmem_free(pfvf->dev, pool->stack);
drivers/net/ethernet/marvell/octeontx2/nic/otx2_common.c
1495
aq->pool.stack_base = pool->stack->iova;
drivers/net/ethernet/marvell/octeontx2/nic/otx2_common.c
1496
aq->pool.stack_caching = 1;
drivers/net/ethernet/marvell/octeontx2/nic/otx2_common.c
1497
aq->pool.ena = 1;
drivers/net/ethernet/marvell/octeontx2/nic/otx2_common.c
1498
aq->pool.buf_size = buf_size / 128;
drivers/net/ethernet/marvell/octeontx2/nic/otx2_common.c
1499
aq->pool.stack_max_pages = stack_pages;
drivers/net/ethernet/marvell/octeontx2/nic/otx2_common.c
1500
aq->pool.shift = ilog2(numptrs) - 8;
drivers/net/ethernet/marvell/octeontx2/nic/otx2_common.c
1501
aq->pool.ptr_start = 0;
drivers/net/ethernet/marvell/octeontx2/nic/otx2_common.c
1502
aq->pool.ptr_end = ~0ULL;
drivers/net/ethernet/marvell/octeontx2/nic/otx2_common.c
1518
pool->page_pool = page_pool_create(&pp_params);
drivers/net/ethernet/marvell/octeontx2/nic/otx2_common.c
1519
if (IS_ERR(pool->page_pool)) {
drivers/net/ethernet/marvell/octeontx2/nic/otx2_common.c
1521
return PTR_ERR(pool->page_pool);
drivers/net/ethernet/marvell/octeontx2/nic/otx2_common.c
1529
pool->xsk_pool = xsk_pool;
drivers/net/ethernet/marvell/octeontx2/nic/otx2_common.c
1530
pool->xdp_cnt = numptrs;
drivers/net/ethernet/marvell/octeontx2/nic/otx2_common.c
1531
pool->xdp = devm_kcalloc(pfvf->dev,
drivers/net/ethernet/marvell/octeontx2/nic/otx2_common.c
1533
if (!pool->xdp)
drivers/net/ethernet/marvell/octeontx2/nic/otx2_common.c
1546
struct otx2_pool *pool;
drivers/net/ethernet/marvell/octeontx2/nic/otx2_common.c
1584
pool = &pfvf->qset.pool[pool_id];
drivers/net/ethernet/marvell/octeontx2/nic/otx2_common.c
1595
err = otx2_alloc_rbuf(pfvf, pool, &bufptr, pool_id, ptr);
drivers/net/ethernet/marvell/octeontx2/nic/otx2_common.c
1597
if (pool->xsk_pool) {
drivers/net/ethernet/marvell/octeontx2/nic/otx2_common.c
1600
xsk_buff_free(pool->xdp[ptr]);
drivers/net/ethernet/marvell/octeontx2/nic/otx2_common.c
1625
struct otx2_pool *pool;
drivers/net/ethernet/marvell/octeontx2/nic/otx2_common.c
1655
pool = &pfvf->qset.pool[pool_id];
drivers/net/ethernet/marvell/octeontx2/nic/otx2_common.c
1658
err = otx2_alloc_rbuf(pfvf, pool, &bufptr, pool_id, ptr);
drivers/net/ethernet/marvell/octeontx2/nic/otx2_common.c
1660
if (pool->xsk_pool) {
drivers/net/ethernet/marvell/octeontx2/nic/otx2_common.c
1662
xsk_buff_free(pool->xdp[--ptr]);
drivers/net/ethernet/marvell/octeontx2/nic/otx2_common.c
1668
pool->xsk_pool ? bufptr :
drivers/net/ethernet/marvell/octeontx2/nic/otx2_common.c
1693
qset->pool = devm_kcalloc(pfvf->dev, hw->pool_cnt,
drivers/net/ethernet/marvell/octeontx2/nic/otx2_common.c
1695
if (!qset->pool)
drivers/net/ethernet/marvell/octeontx2/nic/otx2_common.c
531
static int otx2_alloc_pool_buf(struct otx2_nic *pfvf, struct otx2_pool *pool,
drivers/net/ethernet/marvell/octeontx2/nic/otx2_common.c
538
sz = SKB_DATA_ALIGN(pool->rbsize);
drivers/net/ethernet/marvell/octeontx2/nic/otx2_common.c
541
page = page_pool_alloc_frag(pool->page_pool, &offset, sz, GFP_ATOMIC);
drivers/net/ethernet/marvell/octeontx2/nic/otx2_common.c
549
static int __otx2_alloc_rbuf(struct otx2_nic *pfvf, struct otx2_pool *pool,
drivers/net/ethernet/marvell/octeontx2/nic/otx2_common.c
554
if (pool->xsk_pool)
drivers/net/ethernet/marvell/octeontx2/nic/otx2_common.c
555
return otx2_xsk_pool_alloc_buf(pfvf, pool, dma, idx);
drivers/net/ethernet/marvell/octeontx2/nic/otx2_common.c
557
if (pool->page_pool)
drivers/net/ethernet/marvell/octeontx2/nic/otx2_common.c
558
return otx2_alloc_pool_buf(pfvf, pool, dma);
drivers/net/ethernet/marvell/octeontx2/nic/otx2_common.c
560
buf = napi_alloc_frag_align(pool->rbsize, OTX2_ALIGN);
drivers/net/ethernet/marvell/octeontx2/nic/otx2_common.c
564
*dma = dma_map_single_attrs(pfvf->dev, buf, pool->rbsize,
drivers/net/ethernet/marvell/octeontx2/nic/otx2_common.c
574
int otx2_alloc_rbuf(struct otx2_nic *pfvf, struct otx2_pool *pool,
drivers/net/ethernet/marvell/octeontx2/nic/otx2_common.c
580
ret = __otx2_alloc_rbuf(pfvf, pool, dma, qidx, idx);
drivers/net/ethernet/marvell/octeontx2/nic/otx2_common.c
964
struct otx2_pool *pool;
drivers/net/ethernet/marvell/octeontx2/nic/otx2_common.c
968
pool = &pfvf->qset.pool[sqb_aura];
drivers/net/ethernet/marvell/octeontx2/nic/otx2_common.h
1036
int otx2_alloc_rbuf(struct otx2_nic *pfvf, struct otx2_pool *pool,
drivers/net/ethernet/marvell/octeontx2/nic/otx2_common.h
1147
void otx2_free_bufs(struct otx2_nic *pfvf, struct otx2_pool *pool,
drivers/net/ethernet/marvell/octeontx2/nic/otx2_pf.c
2863
return otx2_xsk_pool_setup(pf, xdp->xsk.pool, xdp->xsk.queue_id);
drivers/net/ethernet/marvell/octeontx2/nic/otx2_txrx.c
1279
struct otx2_pool *pool;
drivers/net/ethernet/marvell/octeontx2/nic/otx2_txrx.c
1285
pool = &pfvf->qset.pool[pool_id];
drivers/net/ethernet/marvell/octeontx2/nic/otx2_txrx.c
1288
if (pool->page_pool)
drivers/net/ethernet/marvell/octeontx2/nic/otx2_txrx.c
1310
otx2_free_bufs(pfvf, pool, iova, pfvf->rbsize);
drivers/net/ethernet/marvell/octeontx2/nic/otx2_txrx.c
1496
struct otx2_pool *pool;
drivers/net/ethernet/marvell/octeontx2/nic/otx2_txrx.c
1504
pool = &pfvf->qset.pool[qidx];
drivers/net/ethernet/marvell/octeontx2/nic/otx2_txrx.c
1506
if (pool->xsk_pool) {
drivers/net/ethernet/marvell/octeontx2/nic/otx2_txrx.c
1507
xsk_buff = pool->xdp[--cq->rbpool->xdp_top];
drivers/net/ethernet/marvell/octeontx2/nic/otx2_txrx.c
1575
page_pool_recycle_direct(pool->page_pool, page);
drivers/net/ethernet/marvell/octeontx2/nic/otx2_txrx.c
569
struct otx2_pool *pool = NULL;
drivers/net/ethernet/marvell/octeontx2/nic/otx2_txrx.c
610
pool = &pfvf->qset.pool[cq->cq_idx];
drivers/net/ethernet/marvell/octeontx2/nic/otx2_txrx.c
627
if (pool->xsk_pool)
drivers/net/ethernet/marvell/octeontx2/nic/otx2_txrx.c
628
xsk_set_rx_need_wakeup(pool->xsk_pool);
drivers/net/ethernet/marvell/octeontx2/nic/otx2_txrx.c
632
if (pool && pool->xsk_pool)
drivers/net/ethernet/marvell/octeontx2/nic/otx2_txrx.c
633
xsk_clear_rx_need_wakeup(pool->xsk_pool);
drivers/net/ethernet/marvell/octeontx2/nic/otx2_txrx.h
165
struct otx2_pool *pool;
drivers/net/ethernet/marvell/octeontx2/nic/otx2_xsk.c
106
pool = &pfvf->qset.pool[qidx];
drivers/net/ethernet/marvell/octeontx2/nic/otx2_xsk.c
110
otx2_free_bufs(pfvf, pool, iova, pfvf->rbsize);
drivers/net/ethernet/marvell/octeontx2/nic/otx2_xsk.c
119
int otx2_xsk_pool_enable(struct otx2_nic *pf, struct xsk_buff_pool *pool, u16 qidx)
drivers/net/ethernet/marvell/octeontx2/nic/otx2_xsk.c
128
err = xsk_pool_dma_map(pool, pf->dev, DMA_ATTR_SKIP_CPU_SYNC | DMA_ATTR_WEAK_ORDERING);
drivers/net/ethernet/marvell/octeontx2/nic/otx2_xsk.c
143
struct xsk_buff_pool *pool;
drivers/net/ethernet/marvell/octeontx2/nic/otx2_xsk.c
146
pool = xsk_get_pool_from_qid(netdev, qidx);
drivers/net/ethernet/marvell/octeontx2/nic/otx2_xsk.c
147
if (!pool)
drivers/net/ethernet/marvell/octeontx2/nic/otx2_xsk.c
154
xsk_pool_dma_unmap(pool, DMA_ATTR_SKIP_CPU_SYNC | DMA_ATTR_WEAK_ORDERING);
drivers/net/ethernet/marvell/octeontx2/nic/otx2_xsk.c
161
int otx2_xsk_pool_setup(struct otx2_nic *pf, struct xsk_buff_pool *pool, u16 qidx)
drivers/net/ethernet/marvell/octeontx2/nic/otx2_xsk.c
163
if (pool)
drivers/net/ethernet/marvell/octeontx2/nic/otx2_xsk.c
164
return otx2_xsk_pool_enable(pf, pool, qidx);
drivers/net/ethernet/marvell/octeontx2/nic/otx2_xsk.c
17
int otx2_xsk_pool_alloc_buf(struct otx2_nic *pfvf, struct otx2_pool *pool,
drivers/net/ethernet/marvell/octeontx2/nic/otx2_xsk.c
228
void otx2_zc_napi_handler(struct otx2_nic *pfvf, struct xsk_buff_pool *pool,
drivers/net/ethernet/marvell/octeontx2/nic/otx2_xsk.c
23
xdp = xsk_buff_alloc(pool->xsk_pool);
drivers/net/ethernet/marvell/octeontx2/nic/otx2_xsk.c
231
struct xdp_desc *xdp_desc = pool->tx_descs;
drivers/net/ethernet/marvell/octeontx2/nic/otx2_xsk.c
235
batch = xsk_tx_peek_release_desc_batch(pool, budget);
drivers/net/ethernet/marvell/octeontx2/nic/otx2_xsk.c
242
dma_addr = xsk_buff_raw_get_dma(pool, xdp_desc[i].addr);
drivers/net/ethernet/marvell/octeontx2/nic/otx2_xsk.c
27
pool->xdp[pool->xdp_top++] = xdp;
drivers/net/ethernet/marvell/octeontx2/nic/otx2_xsk.c
78
pool_aq->pool.ena = 0;
drivers/net/ethernet/marvell/octeontx2/nic/otx2_xsk.c
95
struct otx2_pool *pool;
drivers/net/ethernet/marvell/octeontx2/nic/otx2_xsk.h
14
int otx2_xsk_pool_setup(struct otx2_nic *pf, struct xsk_buff_pool *pool, u16 qid);
drivers/net/ethernet/marvell/octeontx2/nic/otx2_xsk.h
15
int otx2_xsk_pool_enable(struct otx2_nic *pf, struct xsk_buff_pool *pool, u16 qid);
drivers/net/ethernet/marvell/octeontx2/nic/otx2_xsk.h
17
int otx2_xsk_pool_alloc_buf(struct otx2_nic *pfvf, struct otx2_pool *pool,
drivers/net/ethernet/marvell/octeontx2/nic/otx2_xsk.h
20
void otx2_zc_napi_handler(struct otx2_nic *pfvf, struct xsk_buff_pool *pool,
drivers/net/ethernet/marvell/octeontx2/nic/qos_sq.c
109
qmem_free(pfvf->dev, pool->stack);
drivers/net/ethernet/marvell/octeontx2/nic/qos_sq.c
111
qmem_free(pfvf->dev, pool->fc_addr);
drivers/net/ethernet/marvell/octeontx2/nic/qos_sq.c
21
struct otx2_pool *pool;
drivers/net/ethernet/marvell/octeontx2/nic/qos_sq.c
212
pool_aq->pool.ena = 0;
drivers/net/ethernet/marvell/octeontx2/nic/qos_sq.c
23
if (!pfvf->qset.pool)
drivers/net/ethernet/marvell/octeontx2/nic/qos_sq.c
26
pool = &pfvf->qset.pool[pool_id];
drivers/net/ethernet/marvell/octeontx2/nic/qos_sq.c
27
qmem_free(pfvf->dev, pool->stack);
drivers/net/ethernet/marvell/octeontx2/nic/qos_sq.c
28
qmem_free(pfvf->dev, pool->fc_addr);
drivers/net/ethernet/marvell/octeontx2/nic/qos_sq.c
29
pool->stack = NULL;
drivers/net/ethernet/marvell/octeontx2/nic/qos_sq.c
30
pool->fc_addr = NULL;
drivers/net/ethernet/marvell/octeontx2/nic/qos_sq.c
39
struct otx2_pool *pool;
drivers/net/ethernet/marvell/octeontx2/nic/qos_sq.c
57
pool = &pfvf->qset.pool[pool_id];
drivers/net/ethernet/marvell/octeontx2/nic/qos_sq.c
85
err = otx2_alloc_rbuf(pfvf, pool, &bufptr, pool_id, ptr);
drivers/net/ethernet/mellanox/mlx4/cmd.c
2525
if (!priv->cmd.pool) {
drivers/net/ethernet/mellanox/mlx4/cmd.c
2526
priv->cmd.pool = dma_pool_create("mlx4_cmd",
drivers/net/ethernet/mellanox/mlx4/cmd.c
2530
if (!priv->cmd.pool)
drivers/net/ethernet/mellanox/mlx4/cmd.c
2592
if (priv->cmd.pool && (cleanup_mask & MLX4_CMD_CLEANUP_POOL)) {
drivers/net/ethernet/mellanox/mlx4/cmd.c
2593
dma_pool_destroy(priv->cmd.pool);
drivers/net/ethernet/mellanox/mlx4/cmd.c
2594
priv->cmd.pool = NULL;
drivers/net/ethernet/mellanox/mlx4/cmd.c
2693
mailbox->buf = dma_pool_zalloc(mlx4_priv(dev)->cmd.pool, GFP_KERNEL,
drivers/net/ethernet/mellanox/mlx4/cmd.c
2710
dma_pool_free(mlx4_priv(dev)->cmd.pool, mailbox->buf, mailbox->dma);
drivers/net/ethernet/mellanox/mlx4/en_tx.c
354
struct page_pool *pool = ring->recycle_ring->pp;
drivers/net/ethernet/mellanox/mlx4/en_tx.c
357
page_pool_put_full_page(pool, tx_info->page, !!napi_mode);
drivers/net/ethernet/mellanox/mlx4/mlx4.h
631
struct dma_pool *pool;
drivers/net/ethernet/mellanox/mlx5/core/cmd.c
1443
mailbox->buf = dma_pool_zalloc(dev->cmd.pool, flags,
drivers/net/ethernet/mellanox/mlx5/core/cmd.c
1458
dma_pool_free(dev->cmd.pool, mailbox->buf, mailbox->dma);
drivers/net/ethernet/mellanox/mlx5/core/cmd.c
2501
cmd->pool = dma_pool_create("mlx5_cmd", mlx5_core_dma_dev(dev), size, align, 0);
drivers/net/ethernet/mellanox/mlx5/core/cmd.c
2502
if (!cmd->pool) {
drivers/net/ethernet/mellanox/mlx5/core/cmd.c
2538
dma_pool_destroy(cmd->pool);
drivers/net/ethernet/mellanox/mlx5/core/cmd.c
2552
dma_pool_destroy(cmd->pool);
drivers/net/ethernet/mellanox/mlx5/core/en/port.c
151
MLX5_SET(sbpr_reg, in, pool, pool_idx);
drivers/net/ethernet/mellanox/mlx5/core/en/port.c
164
MLX5_SET(sbpr_reg, in, pool, pool_idx);
drivers/net/ethernet/mellanox/mlx5/core/en/port.c
211
MLX5_SET(sbcm_reg, in, pool, pool_idx);
drivers/net/ethernet/mellanox/mlx5/core/en/xsk/pool.c
10
struct xsk_buff_pool *pool)
drivers/net/ethernet/mellanox/mlx5/core/en/xsk/pool.c
124
err = mlx5e_open_xsk(priv, params, &xsk, pool, c);
drivers/net/ethernet/mellanox/mlx5/core/en/xsk/pool.c
14
return xsk_pool_dma_map(pool, dev, DMA_ATTR_SKIP_CPU_SYNC);
drivers/net/ethernet/mellanox/mlx5/core/en/xsk/pool.c
146
mlx5e_xsk_unmap_pool(priv, pool);
drivers/net/ethernet/mellanox/mlx5/core/en/xsk/pool.c
164
struct xsk_buff_pool *pool = mlx5e_xsk_get_pool(&priv->channels.params,
drivers/net/ethernet/mellanox/mlx5/core/en/xsk/pool.c
168
if (unlikely(!pool))
drivers/net/ethernet/mellanox/mlx5/core/en/xsk/pool.c
18
struct xsk_buff_pool *pool)
drivers/net/ethernet/mellanox/mlx5/core/en/xsk/pool.c
191
mlx5e_xsk_unmap_pool(priv, pool);
drivers/net/ethernet/mellanox/mlx5/core/en/xsk/pool.c
196
static int mlx5e_xsk_enable_pool(struct mlx5e_priv *priv, struct xsk_buff_pool *pool,
drivers/net/ethernet/mellanox/mlx5/core/en/xsk/pool.c
20
return xsk_pool_dma_unmap(pool, DMA_ATTR_SKIP_CPU_SYNC);
drivers/net/ethernet/mellanox/mlx5/core/en/xsk/pool.c
202
err = mlx5e_xsk_enable_locked(priv, pool, ix);
drivers/net/ethernet/mellanox/mlx5/core/en/xsk/pool.c
219
int mlx5e_xsk_setup_pool(struct net_device *dev, struct xsk_buff_pool *pool, u16 qid)
drivers/net/ethernet/mellanox/mlx5/core/en/xsk/pool.c
227
return pool ? mlx5e_xsk_enable_pool(priv, pool, qid) :
drivers/net/ethernet/mellanox/mlx5/core/en/xsk/pool.c
45
static int mlx5e_xsk_add_pool(struct mlx5e_xsk *xsk, struct xsk_buff_pool *pool, u16 ix)
drivers/net/ethernet/mellanox/mlx5/core/en/xsk/pool.c
53
xsk->pools[ix] = pool;
drivers/net/ethernet/mellanox/mlx5/core/en/xsk/pool.c
64
static bool mlx5e_xsk_is_pool_sane(struct xsk_buff_pool *pool)
drivers/net/ethernet/mellanox/mlx5/core/en/xsk/pool.c
66
return xsk_pool_get_headroom(pool) <= 0xffff &&
drivers/net/ethernet/mellanox/mlx5/core/en/xsk/pool.c
67
xsk_pool_get_chunk_size(pool) <= 0xffff;
drivers/net/ethernet/mellanox/mlx5/core/en/xsk/pool.c
70
void mlx5e_build_xsk_param(struct xsk_buff_pool *pool, struct mlx5e_xsk_param *xsk)
drivers/net/ethernet/mellanox/mlx5/core/en/xsk/pool.c
72
xsk->headroom = xsk_pool_get_headroom(pool);
drivers/net/ethernet/mellanox/mlx5/core/en/xsk/pool.c
73
xsk->chunk_size = xsk_pool_get_chunk_size(pool);
drivers/net/ethernet/mellanox/mlx5/core/en/xsk/pool.c
74
xsk->unaligned = pool->unaligned;
drivers/net/ethernet/mellanox/mlx5/core/en/xsk/pool.c
78
struct xsk_buff_pool *pool, u16 ix)
drivers/net/ethernet/mellanox/mlx5/core/en/xsk/pool.c
88
if (unlikely(!mlx5e_xsk_is_pool_sane(pool)))
drivers/net/ethernet/mellanox/mlx5/core/en/xsk/pool.c
91
err = mlx5e_xsk_map_pool(mlx5_sd_ch_ix_get_dev(priv->mdev, ix), pool);
drivers/net/ethernet/mellanox/mlx5/core/en/xsk/pool.c
95
err = mlx5e_xsk_add_pool(&priv->xsk, pool, ix);
drivers/net/ethernet/mellanox/mlx5/core/en/xsk/pool.c
99
mlx5e_build_xsk_param(pool, &xsk);
drivers/net/ethernet/mellanox/mlx5/core/en/xsk/pool.h
22
void mlx5e_build_xsk_param(struct xsk_buff_pool *pool, struct mlx5e_xsk_param *xsk);
drivers/net/ethernet/mellanox/mlx5/core/en/xsk/pool.h
25
int mlx5e_xsk_setup_pool(struct net_device *dev, struct xsk_buff_pool *pool, u16 qid);
drivers/net/ethernet/mellanox/mlx5/core/en/xsk/setup.c
101
err = mlx5e_init_xsk_rq(c, params, pool, xsk, xskrq);
drivers/net/ethernet/mellanox/mlx5/core/en/xsk/setup.c
114
struct mlx5e_xsk_param *xsk, struct xsk_buff_pool *pool,
drivers/net/ethernet/mellanox/mlx5/core/en/xsk/setup.c
137
err = mlx5e_open_xsk_rq(c, params, &cparam->rq, pool, xsk);
drivers/net/ethernet/mellanox/mlx5/core/en/xsk/setup.c
152
err = mlx5e_open_xdpsq(c, params, &cparam->xdp_sq, pool, &c->xsksq, true);
drivers/net/ethernet/mellanox/mlx5/core/en/xsk/setup.c
62
struct xsk_buff_pool *pool,
drivers/net/ethernet/mellanox/mlx5/core/en/xsk/setup.c
82
rq->xsk_pool = pool;
drivers/net/ethernet/mellanox/mlx5/core/en/xsk/setup.c
94
struct mlx5e_rq_param *rq_params, struct xsk_buff_pool *pool,
drivers/net/ethernet/mellanox/mlx5/core/en/xsk/setup.h
15
struct mlx5e_xsk_param *xsk, struct xsk_buff_pool *pool,
drivers/net/ethernet/mellanox/mlx5/core/en/xsk/tx.c
100
xdptxd.data = xsk_buff_raw_get_data(pool, desc.addr);
drivers/net/ethernet/mellanox/mlx5/core/en/xsk/tx.c
102
meta = xsk_buff_get_metadata(pool, desc.addr);
drivers/net/ethernet/mellanox/mlx5/core/en/xsk/tx.c
104
xsk_buff_raw_dma_sync_for_device(pool, xdptxd.dma_addr, xdptxd.len);
drivers/net/ethernet/mellanox/mlx5/core/en/xsk/tx.c
136
xsk_tx_release(pool);
drivers/net/ethernet/mellanox/mlx5/core/en/xsk/tx.c
68
struct xsk_buff_pool *pool = sq->xsk_pool;
drivers/net/ethernet/mellanox/mlx5/core/en/xsk/tx.c
90
if (!xsk_tx_peek_desc(pool, &desc)) {
drivers/net/ethernet/mellanox/mlx5/core/en/xsk/tx.c
99
xdptxd.dma_addr = xsk_buff_raw_get_dma(pool, desc.addr);
drivers/net/ethernet/mellanox/mlx5/core/en_accel/ktls_tx.c
280
struct mlx5e_tls_tx_pool *pool =
drivers/net/ethernet/mellanox/mlx5/core/en_accel/ktls_tx.c
287
bulk_async = mlx5e_bulk_async_init(pool->mdev, MLX5E_TLS_TX_POOL_BULK);
drivers/net/ethernet/mellanox/mlx5/core/en_accel/ktls_tx.c
292
obj = mlx5e_tls_priv_tx_init(pool->mdev, pool->sw_stats, &bulk_async->arr[i]);
drivers/net/ethernet/mellanox/mlx5/core/en_accel/ktls_tx.c
306
atomic64_add(i, &pool->sw_stats->tx_tls_pool_alloc);
drivers/net/ethernet/mellanox/mlx5/core/en_accel/ktls_tx.c
311
mutex_lock(&pool->lock);
drivers/net/ethernet/mellanox/mlx5/core/en_accel/ktls_tx.c
312
if (pool->size + MLX5E_TLS_TX_POOL_BULK >= MLX5E_TLS_TX_POOL_HIGH) {
drivers/net/ethernet/mellanox/mlx5/core/en_accel/ktls_tx.c
313
mutex_unlock(&pool->lock);
drivers/net/ethernet/mellanox/mlx5/core/en_accel/ktls_tx.c
316
list_splice(&local_list, &pool->list);
drivers/net/ethernet/mellanox/mlx5/core/en_accel/ktls_tx.c
317
pool->size += MLX5E_TLS_TX_POOL_BULK;
drivers/net/ethernet/mellanox/mlx5/core/en_accel/ktls_tx.c
318
if (pool->size <= MLX5E_TLS_TX_POOL_LOW)
drivers/net/ethernet/mellanox/mlx5/core/en_accel/ktls_tx.c
319
queue_work(pool->wq, work);
drivers/net/ethernet/mellanox/mlx5/core/en_accel/ktls_tx.c
320
mutex_unlock(&pool->lock);
drivers/net/ethernet/mellanox/mlx5/core/en_accel/ktls_tx.c
324
mlx5e_tls_priv_tx_list_cleanup(pool->mdev, &local_list, i);
drivers/net/ethernet/mellanox/mlx5/core/en_accel/ktls_tx.c
325
atomic64_add(i, &pool->sw_stats->tx_tls_pool_free);
drivers/net/ethernet/mellanox/mlx5/core/en_accel/ktls_tx.c
330
struct mlx5e_tls_tx_pool *pool =
drivers/net/ethernet/mellanox/mlx5/core/en_accel/ktls_tx.c
336
mutex_lock(&pool->lock);
drivers/net/ethernet/mellanox/mlx5/core/en_accel/ktls_tx.c
337
if (pool->size < MLX5E_TLS_TX_POOL_HIGH) {
drivers/net/ethernet/mellanox/mlx5/core/en_accel/ktls_tx.c
338
mutex_unlock(&pool->lock);
drivers/net/ethernet/mellanox/mlx5/core/en_accel/ktls_tx.c
342
list_for_each_entry(obj, &pool->list, list_node)
drivers/net/ethernet/mellanox/mlx5/core/en_accel/ktls_tx.c
346
list_cut_position(&local_list, &pool->list, &obj->list_node);
drivers/net/ethernet/mellanox/mlx5/core/en_accel/ktls_tx.c
347
pool->size -= MLX5E_TLS_TX_POOL_BULK;
drivers/net/ethernet/mellanox/mlx5/core/en_accel/ktls_tx.c
348
if (pool->size >= MLX5E_TLS_TX_POOL_HIGH)
drivers/net/ethernet/mellanox/mlx5/core/en_accel/ktls_tx.c
349
queue_work(pool->wq, work);
drivers/net/ethernet/mellanox/mlx5/core/en_accel/ktls_tx.c
350
mutex_unlock(&pool->lock);
drivers/net/ethernet/mellanox/mlx5/core/en_accel/ktls_tx.c
352
mlx5e_tls_priv_tx_list_cleanup(pool->mdev, &local_list, MLX5E_TLS_TX_POOL_BULK);
drivers/net/ethernet/mellanox/mlx5/core/en_accel/ktls_tx.c
353
atomic64_add(MLX5E_TLS_TX_POOL_BULK, &pool->sw_stats->tx_tls_pool_free);
drivers/net/ethernet/mellanox/mlx5/core/en_accel/ktls_tx.c
359
struct mlx5e_tls_tx_pool *pool;
drivers/net/ethernet/mellanox/mlx5/core/en_accel/ktls_tx.c
363
pool = kvzalloc_obj(*pool);
drivers/net/ethernet/mellanox/mlx5/core/en_accel/ktls_tx.c
364
if (!pool)
drivers/net/ethernet/mellanox/mlx5/core/en_accel/ktls_tx.c
367
pool->wq = create_singlethread_workqueue("mlx5e_tls_tx_pool");
drivers/net/ethernet/mellanox/mlx5/core/en_accel/ktls_tx.c
368
if (!pool->wq)
drivers/net/ethernet/mellanox/mlx5/core/en_accel/ktls_tx.c
371
INIT_LIST_HEAD(&pool->list);
drivers/net/ethernet/mellanox/mlx5/core/en_accel/ktls_tx.c
372
mutex_init(&pool->lock);
drivers/net/ethernet/mellanox/mlx5/core/en_accel/ktls_tx.c
374
INIT_WORK(&pool->create_work, create_work);
drivers/net/ethernet/mellanox/mlx5/core/en_accel/ktls_tx.c
375
INIT_WORK(&pool->destroy_work, destroy_work);
drivers/net/ethernet/mellanox/mlx5/core/en_accel/ktls_tx.c
377
pool->mdev = mdev;
drivers/net/ethernet/mellanox/mlx5/core/en_accel/ktls_tx.c
378
pool->sw_stats = sw_stats;
drivers/net/ethernet/mellanox/mlx5/core/en_accel/ktls_tx.c
380
return pool;
drivers/net/ethernet/mellanox/mlx5/core/en_accel/ktls_tx.c
383
kvfree(pool);
drivers/net/ethernet/mellanox/mlx5/core/en_accel/ktls_tx.c
387
static void mlx5e_tls_tx_pool_list_cleanup(struct mlx5e_tls_tx_pool *pool)
drivers/net/ethernet/mellanox/mlx5/core/en_accel/ktls_tx.c
389
while (pool->size > MLX5E_TLS_TX_POOL_BULK) {
drivers/net/ethernet/mellanox/mlx5/core/en_accel/ktls_tx.c
394
list_for_each_entry(obj, &pool->list, list_node)
drivers/net/ethernet/mellanox/mlx5/core/en_accel/ktls_tx.c
398
list_cut_position(&local_list, &pool->list, &obj->list_node);
drivers/net/ethernet/mellanox/mlx5/core/en_accel/ktls_tx.c
399
mlx5e_tls_priv_tx_list_cleanup(pool->mdev, &local_list, MLX5E_TLS_TX_POOL_BULK);
drivers/net/ethernet/mellanox/mlx5/core/en_accel/ktls_tx.c
400
atomic64_add(MLX5E_TLS_TX_POOL_BULK, &pool->sw_stats->tx_tls_pool_free);
drivers/net/ethernet/mellanox/mlx5/core/en_accel/ktls_tx.c
401
pool->size -= MLX5E_TLS_TX_POOL_BULK;
drivers/net/ethernet/mellanox/mlx5/core/en_accel/ktls_tx.c
403
if (pool->size) {
drivers/net/ethernet/mellanox/mlx5/core/en_accel/ktls_tx.c
404
mlx5e_tls_priv_tx_list_cleanup(pool->mdev, &pool->list, pool->size);
drivers/net/ethernet/mellanox/mlx5/core/en_accel/ktls_tx.c
405
atomic64_add(pool->size, &pool->sw_stats->tx_tls_pool_free);
drivers/net/ethernet/mellanox/mlx5/core/en_accel/ktls_tx.c
409
static void mlx5e_tls_tx_pool_cleanup(struct mlx5e_tls_tx_pool *pool)
drivers/net/ethernet/mellanox/mlx5/core/en_accel/ktls_tx.c
411
mlx5e_tls_tx_pool_list_cleanup(pool);
drivers/net/ethernet/mellanox/mlx5/core/en_accel/ktls_tx.c
412
destroy_workqueue(pool->wq);
drivers/net/ethernet/mellanox/mlx5/core/en_accel/ktls_tx.c
413
kvfree(pool);
drivers/net/ethernet/mellanox/mlx5/core/en_accel/ktls_tx.c
416
static void pool_push(struct mlx5e_tls_tx_pool *pool, struct mlx5e_ktls_offload_context_tx *obj)
drivers/net/ethernet/mellanox/mlx5/core/en_accel/ktls_tx.c
418
mutex_lock(&pool->lock);
drivers/net/ethernet/mellanox/mlx5/core/en_accel/ktls_tx.c
419
list_add(&obj->list_node, &pool->list);
drivers/net/ethernet/mellanox/mlx5/core/en_accel/ktls_tx.c
420
if (++pool->size == MLX5E_TLS_TX_POOL_HIGH)
drivers/net/ethernet/mellanox/mlx5/core/en_accel/ktls_tx.c
421
queue_work(pool->wq, &pool->destroy_work);
drivers/net/ethernet/mellanox/mlx5/core/en_accel/ktls_tx.c
422
mutex_unlock(&pool->lock);
drivers/net/ethernet/mellanox/mlx5/core/en_accel/ktls_tx.c
425
static struct mlx5e_ktls_offload_context_tx *pool_pop(struct mlx5e_tls_tx_pool *pool)
drivers/net/ethernet/mellanox/mlx5/core/en_accel/ktls_tx.c
429
mutex_lock(&pool->lock);
drivers/net/ethernet/mellanox/mlx5/core/en_accel/ktls_tx.c
430
if (unlikely(pool->size == 0)) {
drivers/net/ethernet/mellanox/mlx5/core/en_accel/ktls_tx.c
435
queue_work(pool->wq, &pool->create_work);
drivers/net/ethernet/mellanox/mlx5/core/en_accel/ktls_tx.c
436
mutex_unlock(&pool->lock);
drivers/net/ethernet/mellanox/mlx5/core/en_accel/ktls_tx.c
437
obj = mlx5e_tls_priv_tx_init(pool->mdev, pool->sw_stats, NULL);
drivers/net/ethernet/mellanox/mlx5/core/en_accel/ktls_tx.c
439
atomic64_inc(&pool->sw_stats->tx_tls_pool_alloc);
drivers/net/ethernet/mellanox/mlx5/core/en_accel/ktls_tx.c
443
obj = list_first_entry(&pool->list, struct mlx5e_ktls_offload_context_tx,
drivers/net/ethernet/mellanox/mlx5/core/en_accel/ktls_tx.c
446
if (--pool->size == MLX5E_TLS_TX_POOL_LOW)
drivers/net/ethernet/mellanox/mlx5/core/en_accel/ktls_tx.c
447
queue_work(pool->wq, &pool->create_work);
drivers/net/ethernet/mellanox/mlx5/core/en_accel/ktls_tx.c
448
mutex_unlock(&pool->lock);
drivers/net/ethernet/mellanox/mlx5/core/en_accel/ktls_tx.c
458
struct mlx5e_tls_tx_pool *pool;
drivers/net/ethernet/mellanox/mlx5/core/en_accel/ktls_tx.c
466
pool = priv->tls->tx_pool;
drivers/net/ethernet/mellanox/mlx5/core/en_accel/ktls_tx.c
468
priv_tx = pool_pop(pool);
drivers/net/ethernet/mellanox/mlx5/core/en_accel/ktls_tx.c
506
pool_push(pool, priv_tx);
drivers/net/ethernet/mellanox/mlx5/core/en_accel/ktls_tx.c
513
struct mlx5e_tls_tx_pool *pool;
drivers/net/ethernet/mellanox/mlx5/core/en_accel/ktls_tx.c
518
pool = priv->tls->tx_pool;
drivers/net/ethernet/mellanox/mlx5/core/en_accel/ktls_tx.c
522
pool_push(pool, priv_tx);
drivers/net/ethernet/mellanox/mlx5/core/en_main.c
5204
return mlx5e_xsk_setup_pool(dev, xdp->xsk.pool,
drivers/net/ethernet/mellanox/mlx5/core/en_stats.c
496
struct page_pool *pool = c->rq.page_pool;
drivers/net/ethernet/mellanox/mlx5/core/en_stats.c
499
if (!page_pool_get_stats(pool, &stats))
drivers/net/ethernet/mellanox/mlx5/core/eq.c
875
struct mlx5_irq_pool *pool = mlx5_irq_table_get_comp_irq_pool(dev);
drivers/net/ethernet/mellanox/mlx5/core/eq.c
881
if (!mlx5_irq_pool_is_sf_pool(pool))
drivers/net/ethernet/mellanox/mlx5/core/eq.c
891
irq = mlx5_irq_affinity_request(dev, pool, af_desc);
drivers/net/ethernet/mellanox/mlx5/core/eq.c
898
mlx5_core_dbg(pool->dev, "IRQ %u mapped to cpu %*pbl, %u EQs on this irq\n",
drivers/net/ethernet/mellanox/mlx5/core/fs_pool.c
62
void mlx5_fs_pool_init(struct mlx5_fs_pool *pool, struct mlx5_core_dev *dev,
drivers/net/ethernet/mellanox/mlx5/core/fs_pool.c
67
pool->dev = dev;
drivers/net/ethernet/mellanox/mlx5/core/fs_pool.c
68
pool->pool_ctx = pool_ctx;
drivers/net/ethernet/mellanox/mlx5/core/fs_pool.c
69
mutex_init(&pool->pool_lock);
drivers/net/ethernet/mellanox/mlx5/core/fs_pool.c
70
INIT_LIST_HEAD(&pool->fully_used);
drivers/net/ethernet/mellanox/mlx5/core/fs_pool.c
71
INIT_LIST_HEAD(&pool->partially_used);
drivers/net/ethernet/mellanox/mlx5/core/fs_pool.c
72
INIT_LIST_HEAD(&pool->unused);
drivers/net/ethernet/mellanox/mlx5/core/fs_pool.c
73
pool->available_units = 0;
drivers/net/ethernet/mellanox/mlx5/core/fs_pool.c
74
pool->used_units = 0;
drivers/net/ethernet/mellanox/mlx5/core/fs_pool.c
75
pool->threshold = 0;
drivers/net/ethernet/mellanox/mlx5/core/fs_pool.c
76
pool->ops = ops;
drivers/net/ethernet/mellanox/mlx5/core/fs_pool.c
79
void mlx5_fs_pool_cleanup(struct mlx5_fs_pool *pool)
drivers/net/ethernet/mellanox/mlx5/core/fs_pool.c
81
struct mlx5_core_dev *dev = pool->dev;
drivers/net/ethernet/mellanox/mlx5/core/fs_pool.c
85
list_for_each_entry_safe(bulk, tmp, &pool->fully_used, pool_list)
drivers/net/ethernet/mellanox/mlx5/core/fs_pool.c
86
pool->ops->bulk_destroy(dev, bulk);
drivers/net/ethernet/mellanox/mlx5/core/fs_pool.c
87
list_for_each_entry_safe(bulk, tmp, &pool->partially_used, pool_list)
drivers/net/ethernet/mellanox/mlx5/core/fs_pool.c
88
pool->ops->bulk_destroy(dev, bulk);
drivers/net/ethernet/mellanox/mlx5/core/fs_pool.c
89
list_for_each_entry_safe(bulk, tmp, &pool->unused, pool_list)
drivers/net/ethernet/mellanox/mlx5/core/fs_pool.c
90
pool->ops->bulk_destroy(dev, bulk);
drivers/net/ethernet/mellanox/mlx5/core/fs_pool.h
26
void (*update_threshold)(struct mlx5_fs_pool *pool);
drivers/net/ethernet/mellanox/mlx5/core/fs_pool.h
48
void mlx5_fs_pool_init(struct mlx5_fs_pool *pool, struct mlx5_core_dev *dev,
drivers/net/ethernet/mellanox/mlx5/core/fs_pool.h
50
void mlx5_fs_pool_cleanup(struct mlx5_fs_pool *pool);
drivers/net/ethernet/mellanox/mlx5/core/irq_affinity.c
10
pool->irqs_per_cpu[cpu]--;
drivers/net/ethernet/mellanox/mlx5/core/irq_affinity.c
100
int end = pool->xa_num_irqs.max;
drivers/net/ethernet/mellanox/mlx5/core/irq_affinity.c
106
lockdep_assert_held(&pool->lock);
drivers/net/ethernet/mellanox/mlx5/core/irq_affinity.c
107
xa_for_each_range(&pool->irqs, index, iter, start, end) {
drivers/net/ethernet/mellanox/mlx5/core/irq_affinity.c
114
if (iter_refcount < pool->min_threshold)
drivers/net/ethernet/mellanox/mlx5/core/irq_affinity.c
13
static void cpu_get(struct mlx5_irq_pool *pool, int cpu)
drivers/net/ethernet/mellanox/mlx5/core/irq_affinity.c
137
mlx5_irq_affinity_request(struct mlx5_core_dev *dev, struct mlx5_irq_pool *pool,
drivers/net/ethernet/mellanox/mlx5/core/irq_affinity.c
143
mutex_lock(&pool->lock);
drivers/net/ethernet/mellanox/mlx5/core/irq_affinity.c
144
least_loaded_irq = irq_pool_find_least_loaded(pool, &af_desc->mask);
drivers/net/ethernet/mellanox/mlx5/core/irq_affinity.c
146
mlx5_irq_read_locked(least_loaded_irq) < pool->min_threshold)
drivers/net/ethernet/mellanox/mlx5/core/irq_affinity.c
149
new_irq = irq_pool_request_irq(pool, af_desc);
drivers/net/ethernet/mellanox/mlx5/core/irq_affinity.c
15
pool->irqs_per_cpu[cpu]++;
drivers/net/ethernet/mellanox/mlx5/core/irq_affinity.c
153
mlx5_core_err(pool->dev, "Didn't find a matching IRQ. err = %pe\n",
drivers/net/ethernet/mellanox/mlx5/core/irq_affinity.c
155
mutex_unlock(&pool->lock);
drivers/net/ethernet/mellanox/mlx5/core/irq_affinity.c
167
if (mlx5_irq_read_locked(least_loaded_irq) > pool->max_threshold)
drivers/net/ethernet/mellanox/mlx5/core/irq_affinity.c
168
mlx5_core_dbg(pool->dev, "IRQ %u overloaded, pool_name: %s, %u EQs on this irq\n",
drivers/net/ethernet/mellanox/mlx5/core/irq_affinity.c
169
pci_irq_vector(pool->dev->pdev,
drivers/net/ethernet/mellanox/mlx5/core/irq_affinity.c
170
mlx5_irq_get_index(least_loaded_irq)), pool->name,
drivers/net/ethernet/mellanox/mlx5/core/irq_affinity.c
173
mutex_unlock(&pool->lock);
drivers/net/ethernet/mellanox/mlx5/core/irq_affinity.c
174
if (mlx5_irq_pool_is_sf_pool(pool)) {
drivers/net/ethernet/mellanox/mlx5/core/irq_affinity.c
189
struct mlx5_irq_pool *pool = mlx5_irq_get_pool(irq);
drivers/net/ethernet/mellanox/mlx5/core/irq_affinity.c
19
static int cpu_get_least_loaded(struct mlx5_irq_pool *pool,
drivers/net/ethernet/mellanox/mlx5/core/irq_affinity.c
193
synchronize_irq(pci_irq_vector(pool->dev->pdev,
drivers/net/ethernet/mellanox/mlx5/core/irq_affinity.c
195
if (mlx5_irq_pool_is_sf_pool(pool))
drivers/net/ethernet/mellanox/mlx5/core/irq_affinity.c
199
if (pool->irqs_per_cpu)
drivers/net/ethernet/mellanox/mlx5/core/irq_affinity.c
200
cpu_put(pool, cpu);
drivers/net/ethernet/mellanox/mlx5/core/irq_affinity.c
27
if (!pool->irqs_per_cpu[cpu]) {
drivers/net/ethernet/mellanox/mlx5/core/irq_affinity.c
33
if (pool->irqs_per_cpu[cpu] < pool->irqs_per_cpu[best_cpu])
drivers/net/ethernet/mellanox/mlx5/core/irq_affinity.c
38
mlx5_core_err(pool->dev, "NO online CPUs in req_mask (%*pbl)\n",
drivers/net/ethernet/mellanox/mlx5/core/irq_affinity.c
42
pool->irqs_per_cpu[best_cpu]++;
drivers/net/ethernet/mellanox/mlx5/core/irq_affinity.c
48
irq_pool_request_irq(struct mlx5_irq_pool *pool, struct irq_affinity_desc *af_desc)
drivers/net/ethernet/mellanox/mlx5/core/irq_affinity.c
59
err = xa_alloc(&pool->irqs, &irq_index, NULL, pool->xa_num_irqs, GFP_KERNEL);
drivers/net/ethernet/mellanox/mlx5/core/irq_affinity.c
65
if (pool->irqs_per_cpu) {
drivers/net/ethernet/mellanox/mlx5/core/irq_affinity.c
70
cpumask_set_cpu(cpu_get_least_loaded(pool, &af_desc->mask),
drivers/net/ethernet/mellanox/mlx5/core/irq_affinity.c
73
cpu_get(pool, cpumask_first(&af_desc->mask));
drivers/net/ethernet/mellanox/mlx5/core/irq_affinity.c
76
irq = mlx5_irq_alloc(pool, irq_index,
drivers/net/ethernet/mellanox/mlx5/core/irq_affinity.c
8
static void cpu_put(struct mlx5_irq_pool *pool, int cpu)
drivers/net/ethernet/mellanox/mlx5/core/irq_affinity.c
80
xa_erase(&pool->irqs, irq_index);
drivers/net/ethernet/mellanox/mlx5/core/irq_affinity.c
97
irq_pool_find_least_loaded(struct mlx5_irq_pool *pool, const struct cpumask *req_mask)
drivers/net/ethernet/mellanox/mlx5/core/irq_affinity.c
99
int start = pool->xa_num_irqs.min;
drivers/net/ethernet/mellanox/mlx5/core/lib/crypto.c
19
#define MLX5_CRYPTO_DEK_POOL_CALC_FREED(pool) MLX5_CRYPTO_DEK_CALC_FREED(pool)
drivers/net/ethernet/mellanox/mlx5/core/lib/crypto.c
288
mlx5_crypto_dek_bulk_create(struct mlx5_crypto_dek_pool *pool)
drivers/net/ethernet/mellanox/mlx5/core/lib/crypto.c
290
struct mlx5_crypto_dek_priv *dek_priv = pool->mdev->mlx5e_res.dek_priv;
drivers/net/ethernet/mellanox/mlx5/core/lib/crypto.c
291
struct mlx5_core_dev *mdev = pool->mdev;
drivers/net/ethernet/mellanox/mlx5/core/lib/crypto.c
313
err = mlx5_crypto_create_dek_bulk(mdev, pool->key_purpose,
drivers/net/ethernet/mellanox/mlx5/core/lib/crypto.c
334
mlx5_crypto_dek_pool_add_bulk(struct mlx5_crypto_dek_pool *pool)
drivers/net/ethernet/mellanox/mlx5/core/lib/crypto.c
338
bulk = mlx5_crypto_dek_bulk_create(pool);
drivers/net/ethernet/mellanox/mlx5/core/lib/crypto.c
342
pool->avail_deks += bulk->num_deks;
drivers/net/ethernet/mellanox/mlx5/core/lib/crypto.c
343
pool->num_deks += bulk->num_deks;
drivers/net/ethernet/mellanox/mlx5/core/lib/crypto.c
344
list_add(&bulk->entry, &pool->partial_list);
drivers/net/ethernet/mellanox/mlx5/core/lib/crypto.c
357
static void mlx5_crypto_dek_pool_remove_bulk(struct mlx5_crypto_dek_pool *pool,
drivers/net/ethernet/mellanox/mlx5/core/lib/crypto.c
361
pool->num_deks -= bulk->num_deks;
drivers/net/ethernet/mellanox/mlx5/core/lib/crypto.c
362
pool->avail_deks -= bulk->avail_deks;
drivers/net/ethernet/mellanox/mlx5/core/lib/crypto.c
363
pool->in_use_deks -= bulk->in_use_deks;
drivers/net/ethernet/mellanox/mlx5/core/lib/crypto.c
370
mlx5_crypto_dek_pool_pop(struct mlx5_crypto_dek_pool *pool, u32 *obj_offset)
drivers/net/ethernet/mellanox/mlx5/core/lib/crypto.c
375
mutex_lock(&pool->lock);
drivers/net/ethernet/mellanox/mlx5/core/lib/crypto.c
376
bulk = list_first_entry_or_null(&pool->partial_list,
drivers/net/ethernet/mellanox/mlx5/core/lib/crypto.c
383
mlx5_core_err(pool->mdev, "Wrong DEK bulk avail_start.\n");
drivers/net/ethernet/mellanox/mlx5/core/lib/crypto.c
388
bulk = list_first_entry_or_null(&pool->avail_list,
drivers/net/ethernet/mellanox/mlx5/core/lib/crypto.c
392
list_move(&bulk->entry, &pool->partial_list);
drivers/net/ethernet/mellanox/mlx5/core/lib/crypto.c
394
bulk = mlx5_crypto_dek_pool_add_bulk(pool);
drivers/net/ethernet/mellanox/mlx5/core/lib/crypto.c
407
list_move(&bulk->entry, &pool->full_list);
drivers/net/ethernet/mellanox/mlx5/core/lib/crypto.c
412
pool->avail_deks--;
drivers/net/ethernet/mellanox/mlx5/core/lib/crypto.c
413
pool->in_use_deks++;
drivers/net/ethernet/mellanox/mlx5/core/lib/crypto.c
416
mutex_unlock(&pool->lock);
drivers/net/ethernet/mellanox/mlx5/core/lib/crypto.c
420
static bool mlx5_crypto_dek_need_sync(struct mlx5_crypto_dek_pool *pool)
drivers/net/ethernet/mellanox/mlx5/core/lib/crypto.c
422
return !pool->syncing &&
drivers/net/ethernet/mellanox/mlx5/core/lib/crypto.c
423
MLX5_CRYPTO_DEK_POOL_CALC_FREED(pool) > MLX5_CRYPTO_DEK_POOL_SYNC_THRESH;
drivers/net/ethernet/mellanox/mlx5/core/lib/crypto.c
426
static int mlx5_crypto_dek_free_locked(struct mlx5_crypto_dek_pool *pool,
drivers/net/ethernet/mellanox/mlx5/core/lib/crypto.c
441
pool->in_use_deks--;
drivers/net/ethernet/mellanox/mlx5/core/lib/crypto.c
444
list_move(&bulk->entry, &pool->sync_list);
drivers/net/ethernet/mellanox/mlx5/core/lib/crypto.c
446
if (mlx5_crypto_dek_need_sync(pool) && schedule_work(&pool->sync_work))
drivers/net/ethernet/mellanox/mlx5/core/lib/crypto.c
447
pool->syncing = true;
drivers/net/ethernet/mellanox/mlx5/core/lib/crypto.c
454
static int mlx5_crypto_dek_pool_push(struct mlx5_crypto_dek_pool *pool,
drivers/net/ethernet/mellanox/mlx5/core/lib/crypto.c
459
mutex_lock(&pool->lock);
drivers/net/ethernet/mellanox/mlx5/core/lib/crypto.c
460
if (pool->syncing)
drivers/net/ethernet/mellanox/mlx5/core/lib/crypto.c
461
list_add(&dek->entry, &pool->wait_for_free);
drivers/net/ethernet/mellanox/mlx5/core/lib/crypto.c
463
err = mlx5_crypto_dek_free_locked(pool, dek);
drivers/net/ethernet/mellanox/mlx5/core/lib/crypto.c
464
mutex_unlock(&pool->lock);
drivers/net/ethernet/mellanox/mlx5/core/lib/crypto.c
479
static void mlx5_crypto_dek_bulk_reset_synced(struct mlx5_crypto_dek_pool *pool,
drivers/net/ethernet/mellanox/mlx5/core/lib/crypto.c
496
pool->avail_deks += reused;
drivers/net/ethernet/mellanox/mlx5/core/lib/crypto.c
510
static bool mlx5_crypto_dek_bulk_handle_avail(struct mlx5_crypto_dek_pool *pool,
drivers/net/ethernet/mellanox/mlx5/core/lib/crypto.c
514
if (list_empty(&pool->avail_list)) {
drivers/net/ethernet/mellanox/mlx5/core/lib/crypto.c
515
list_move(&bulk->entry, &pool->avail_list);
drivers/net/ethernet/mellanox/mlx5/core/lib/crypto.c
519
mlx5_crypto_dek_pool_remove_bulk(pool, bulk, true);
drivers/net/ethernet/mellanox/mlx5/core/lib/crypto.c
524
static void mlx5_crypto_dek_pool_splice_destroy_list(struct mlx5_crypto_dek_pool *pool,
drivers/net/ethernet/mellanox/mlx5/core/lib/crypto.c
528
spin_lock(&pool->destroy_lock);
drivers/net/ethernet/mellanox/mlx5/core/lib/crypto.c
530
spin_unlock(&pool->destroy_lock);
drivers/net/ethernet/mellanox/mlx5/core/lib/crypto.c
533
static void mlx5_crypto_dek_pool_free_wait_keys(struct mlx5_crypto_dek_pool *pool)
drivers/net/ethernet/mellanox/mlx5/core/lib/crypto.c
537
list_for_each_entry_safe(dek, next, &pool->wait_for_free, entry) {
drivers/net/ethernet/mellanox/mlx5/core/lib/crypto.c
539
mlx5_crypto_dek_free_locked(pool, dek);
drivers/net/ethernet/mellanox/mlx5/core/lib/crypto.c
548
static void mlx5_crypto_dek_pool_reset_synced(struct mlx5_crypto_dek_pool *pool)
drivers/net/ethernet/mellanox/mlx5/core/lib/crypto.c
553
list_for_each_entry_safe(bulk, tmp, &pool->partial_list, entry) {
drivers/net/ethernet/mellanox/mlx5/core/lib/crypto.c
554
mlx5_crypto_dek_bulk_reset_synced(pool, bulk);
drivers/net/ethernet/mellanox/mlx5/core/lib/crypto.c
556
mlx5_crypto_dek_bulk_handle_avail(pool, bulk, &destroy_list);
drivers/net/ethernet/mellanox/mlx5/core/lib/crypto.c
559
list_for_each_entry_safe(bulk, tmp, &pool->full_list, entry) {
drivers/net/ethernet/mellanox/mlx5/core/lib/crypto.c
560
mlx5_crypto_dek_bulk_reset_synced(pool, bulk);
drivers/net/ethernet/mellanox/mlx5/core/lib/crypto.c
566
mlx5_crypto_dek_bulk_handle_avail(pool, bulk, &destroy_list);
drivers/net/ethernet/mellanox/mlx5/core/lib/crypto.c
568
list_move(&bulk->entry, &pool->partial_list);
drivers/net/ethernet/mellanox/mlx5/core/lib/crypto.c
571
list_for_each_entry_safe(bulk, tmp, &pool->sync_list, entry) {
drivers/net/ethernet/mellanox/mlx5/core/lib/crypto.c
573
pool->avail_deks += bulk->num_deks;
drivers/net/ethernet/mellanox/mlx5/core/lib/crypto.c
574
if (mlx5_crypto_dek_bulk_handle_avail(pool, bulk, &destroy_list)) {
drivers/net/ethernet/mellanox/mlx5/core/lib/crypto.c
580
mlx5_crypto_dek_pool_free_wait_keys(pool);
drivers/net/ethernet/mellanox/mlx5/core/lib/crypto.c
583
mlx5_crypto_dek_pool_splice_destroy_list(pool, &destroy_list,
drivers/net/ethernet/mellanox/mlx5/core/lib/crypto.c
584
&pool->destroy_list);
drivers/net/ethernet/mellanox/mlx5/core/lib/crypto.c
585
schedule_work(&pool->destroy_work);
drivers/net/ethernet/mellanox/mlx5/core/lib/crypto.c
591
struct mlx5_crypto_dek_pool *pool =
drivers/net/ethernet/mellanox/mlx5/core/lib/crypto.c
595
err = mlx5_crypto_cmd_sync_crypto(pool->mdev, BIT(pool->key_purpose));
drivers/net/ethernet/mellanox/mlx5/core/lib/crypto.c
596
mutex_lock(&pool->lock);
drivers/net/ethernet/mellanox/mlx5/core/lib/crypto.c
598
mlx5_crypto_dek_pool_reset_synced(pool);
drivers/net/ethernet/mellanox/mlx5/core/lib/crypto.c
599
pool->syncing = false;
drivers/net/ethernet/mellanox/mlx5/core/lib/crypto.c
600
mutex_unlock(&pool->lock);
drivers/net/ethernet/mellanox/mlx5/core/lib/crypto.c
672
struct mlx5_crypto_dek_pool *pool =
drivers/net/ethernet/mellanox/mlx5/core/lib/crypto.c
676
mlx5_crypto_dek_pool_splice_destroy_list(pool, &pool->destroy_list,
drivers/net/ethernet/mellanox/mlx5/core/lib/crypto.c
684
struct mlx5_crypto_dek_pool *pool;
drivers/net/ethernet/mellanox/mlx5/core/lib/crypto.c
686
pool = kzalloc_obj(*pool);
drivers/net/ethernet/mellanox/mlx5/core/lib/crypto.c
687
if (!pool)
drivers/net/ethernet/mellanox/mlx5/core/lib/crypto.c
690
pool->mdev = mdev;
drivers/net/ethernet/mellanox/mlx5/core/lib/crypto.c
691
pool->key_purpose = key_purpose;
drivers/net/ethernet/mellanox/mlx5/core/lib/crypto.c
693
mutex_init(&pool->lock);
drivers/net/ethernet/mellanox/mlx5/core/lib/crypto.c
694
INIT_LIST_HEAD(&pool->avail_list);
drivers/net/ethernet/mellanox/mlx5/core/lib/crypto.c
695
INIT_LIST_HEAD(&pool->partial_list);
drivers/net/ethernet/mellanox/mlx5/core/lib/crypto.c
696
INIT_LIST_HEAD(&pool->full_list);
drivers/net/ethernet/mellanox/mlx5/core/lib/crypto.c
697
INIT_LIST_HEAD(&pool->sync_list);
drivers/net/ethernet/mellanox/mlx5/core/lib/crypto.c
698
INIT_LIST_HEAD(&pool->wait_for_free);
drivers/net/ethernet/mellanox/mlx5/core/lib/crypto.c
699
INIT_WORK(&pool->sync_work, mlx5_crypto_dek_sync_work_fn);
drivers/net/ethernet/mellanox/mlx5/core/lib/crypto.c
700
spin_lock_init(&pool->destroy_lock);
drivers/net/ethernet/mellanox/mlx5/core/lib/crypto.c
701
INIT_LIST_HEAD(&pool->destroy_list);
drivers/net/ethernet/mellanox/mlx5/core/lib/crypto.c
702
INIT_WORK(&pool->destroy_work, mlx5_crypto_dek_destroy_work_fn);
drivers/net/ethernet/mellanox/mlx5/core/lib/crypto.c
704
return pool;
drivers/net/ethernet/mellanox/mlx5/core/lib/crypto.c
707
void mlx5_crypto_dek_pool_destroy(struct mlx5_crypto_dek_pool *pool)
drivers/net/ethernet/mellanox/mlx5/core/lib/crypto.c
711
cancel_work_sync(&pool->sync_work);
drivers/net/ethernet/mellanox/mlx5/core/lib/crypto.c
712
cancel_work_sync(&pool->destroy_work);
drivers/net/ethernet/mellanox/mlx5/core/lib/crypto.c
714
mlx5_crypto_dek_pool_free_wait_keys(pool);
drivers/net/ethernet/mellanox/mlx5/core/lib/crypto.c
716
list_for_each_entry_safe(bulk, tmp, &pool->avail_list, entry)
drivers/net/ethernet/mellanox/mlx5/core/lib/crypto.c
717
mlx5_crypto_dek_pool_remove_bulk(pool, bulk, false);
drivers/net/ethernet/mellanox/mlx5/core/lib/crypto.c
719
list_for_each_entry_safe(bulk, tmp, &pool->full_list, entry)
drivers/net/ethernet/mellanox/mlx5/core/lib/crypto.c
720
mlx5_crypto_dek_pool_remove_bulk(pool, bulk, false);
drivers/net/ethernet/mellanox/mlx5/core/lib/crypto.c
722
list_for_each_entry_safe(bulk, tmp, &pool->sync_list, entry)
drivers/net/ethernet/mellanox/mlx5/core/lib/crypto.c
723
mlx5_crypto_dek_pool_remove_bulk(pool, bulk, false);
drivers/net/ethernet/mellanox/mlx5/core/lib/crypto.c
725
list_for_each_entry_safe(bulk, tmp, &pool->partial_list, entry)
drivers/net/ethernet/mellanox/mlx5/core/lib/crypto.c
726
mlx5_crypto_dek_pool_remove_bulk(pool, bulk, false);
drivers/net/ethernet/mellanox/mlx5/core/lib/crypto.c
728
mlx5_crypto_dek_free_destroy_list(&pool->destroy_list);
drivers/net/ethernet/mellanox/mlx5/core/lib/crypto.c
730
mutex_destroy(&pool->lock);
drivers/net/ethernet/mellanox/mlx5/core/lib/crypto.c
732
kfree(pool);
drivers/net/ethernet/mellanox/mlx5/core/lib/crypto.h
26
void mlx5_crypto_dek_pool_destroy(struct mlx5_crypto_dek_pool *pool);
drivers/net/ethernet/mellanox/mlx5/core/mlx5_irq.h
48
mlx5_irq_affinity_request(struct mlx5_core_dev *dev, struct mlx5_irq_pool *pool,
drivers/net/ethernet/mellanox/mlx5/core/mlx5_irq.h
60
mlx5_irq_affinity_request(struct mlx5_core_dev *dev, struct mlx5_irq_pool *pool,
drivers/net/ethernet/mellanox/mlx5/core/pci_irq.c
156
struct mlx5_irq_pool *pool = irq->pool;
drivers/net/ethernet/mellanox/mlx5/core/pci_irq.c
167
rmap = mlx5_eq_table_get_rmap(pool->dev);
drivers/net/ethernet/mellanox/mlx5/core/pci_irq.c
173
if (irq->map.index && pci_msix_can_alloc_dyn(pool->dev->pdev))
drivers/net/ethernet/mellanox/mlx5/core/pci_irq.c
174
pci_msix_free_irq(pool->dev->pdev, irq->map);
drivers/net/ethernet/mellanox/mlx5/core/pci_irq.c
179
struct mlx5_irq_pool *pool = irq->pool;
drivers/net/ethernet/mellanox/mlx5/core/pci_irq.c
181
xa_erase(&pool->irqs, irq->pool_index);
drivers/net/ethernet/mellanox/mlx5/core/pci_irq.c
189
struct mlx5_irq_pool *pool = irq->pool;
drivers/net/ethernet/mellanox/mlx5/core/pci_irq.c
192
mutex_lock(&pool->lock);
drivers/net/ethernet/mellanox/mlx5/core/pci_irq.c
198
mutex_unlock(&pool->lock);
drivers/net/ethernet/mellanox/mlx5/core/pci_irq.c
204
lockdep_assert_held(&irq->pool->lock);
drivers/net/ethernet/mellanox/mlx5/core/pci_irq.c
210
lockdep_assert_held(&irq->pool->lock);
drivers/net/ethernet/mellanox/mlx5/core/pci_irq.c
221
mutex_lock(&irq->pool->lock);
drivers/net/ethernet/mellanox/mlx5/core/pci_irq.c
223
mutex_unlock(&irq->pool->lock);
drivers/net/ethernet/mellanox/mlx5/core/pci_irq.c
233
static void irq_sf_set_name(struct mlx5_irq_pool *pool, char *name, int vecidx)
drivers/net/ethernet/mellanox/mlx5/core/pci_irq.c
235
snprintf(name, MLX5_MAX_IRQ_NAME, "%s%d", pool->name, vecidx);
drivers/net/ethernet/mellanox/mlx5/core/pci_irq.c
238
static void irq_set_name(struct mlx5_irq_pool *pool, char *name, int vecidx)
drivers/net/ethernet/mellanox/mlx5/core/pci_irq.c
240
if (!pool->xa_num_irqs.max) {
drivers/net/ethernet/mellanox/mlx5/core/pci_irq.c
255
struct mlx5_irq *mlx5_irq_alloc(struct mlx5_irq_pool *pool, int i,
drivers/net/ethernet/mellanox/mlx5/core/pci_irq.c
259
struct mlx5_core_dev *dev = pool->dev;
drivers/net/ethernet/mellanox/mlx5/core/pci_irq.c
293
if (!mlx5_irq_pool_is_sf_pool(pool))
drivers/net/ethernet/mellanox/mlx5/core/pci_irq.c
294
irq_set_name(pool, name, i);
drivers/net/ethernet/mellanox/mlx5/core/pci_irq.c
296
irq_sf_set_name(pool, name, i);
drivers/net/ethernet/mellanox/mlx5/core/pci_irq.c
311
irq->pool = pool;
drivers/net/ethernet/mellanox/mlx5/core/pci_irq.c
314
err = xa_err(xa_store(&pool->irqs, irq->pool_index, irq, GFP_KERNEL));
drivers/net/ethernet/mellanox/mlx5/core/pci_irq.c
34
struct mlx5_irq_pool *pool;
drivers/net/ethernet/mellanox/mlx5/core/pci_irq.c
381
return irq->pool;
drivers/net/ethernet/mellanox/mlx5/core/pci_irq.c
388
irq_pool_request_vector(struct mlx5_irq_pool *pool, int vecidx,
drivers/net/ethernet/mellanox/mlx5/core/pci_irq.c
394
mutex_lock(&pool->lock);
drivers/net/ethernet/mellanox/mlx5/core/pci_irq.c
395
irq = xa_load(&pool->irqs, vecidx);
drivers/net/ethernet/mellanox/mlx5/core/pci_irq.c
400
irq = mlx5_irq_alloc(pool, vecidx, af_desc, rmap);
drivers/net/ethernet/mellanox/mlx5/core/pci_irq.c
402
mutex_unlock(&pool->lock);
drivers/net/ethernet/mellanox/mlx5/core/pci_irq.c
421
struct mlx5_irq_pool *pool = NULL;
drivers/net/ethernet/mellanox/mlx5/core/pci_irq.c
424
pool = sf_comp_irq_pool_get(irq_table);
drivers/net/ethernet/mellanox/mlx5/core/pci_irq.c
429
return pool ? pool : irq_table->pcif_pool;
drivers/net/ethernet/mellanox/mlx5/core/pci_irq.c
435
struct mlx5_irq_pool *pool = NULL;
drivers/net/ethernet/mellanox/mlx5/core/pci_irq.c
438
pool = sf_ctrl_irq_pool_get(irq_table);
drivers/net/ethernet/mellanox/mlx5/core/pci_irq.c
443
return pool ? pool : irq_table->pcif_pool;
drivers/net/ethernet/mellanox/mlx5/core/pci_irq.c
470
struct mlx5_irq_pool *pool = ctrl_irq_pool_get(dev);
drivers/net/ethernet/mellanox/mlx5/core/pci_irq.c
480
if (!mlx5_irq_pool_is_sf_pool(pool)) {
drivers/net/ethernet/mellanox/mlx5/core/pci_irq.c
484
if (!pool->xa_num_irqs.max) {
drivers/net/ethernet/mellanox/mlx5/core/pci_irq.c
490
irq = irq_pool_request_vector(pool, 0, af_desc, NULL);
drivers/net/ethernet/mellanox/mlx5/core/pci_irq.c
492
irq = mlx5_irq_affinity_request(dev, pool, af_desc);
drivers/net/ethernet/mellanox/mlx5/core/pci_irq.c
515
struct mlx5_irq_pool *pool;
drivers/net/ethernet/mellanox/mlx5/core/pci_irq.c
518
pool = irq_table->pcif_pool;
drivers/net/ethernet/mellanox/mlx5/core/pci_irq.c
519
irq = irq_pool_request_vector(pool, vecidx, af_desc, rmap);
drivers/net/ethernet/mellanox/mlx5/core/pci_irq.c
554
struct mlx5_irq_pool *pool = table->pcif_pool;
drivers/net/ethernet/mellanox/mlx5/core/pci_irq.c
563
if (!pool->xa_num_irqs.max)
drivers/net/ethernet/mellanox/mlx5/core/pci_irq.c
581
struct mlx5_irq_pool *pool = kvzalloc_obj(*pool);
drivers/net/ethernet/mellanox/mlx5/core/pci_irq.c
583
if (!pool)
drivers/net/ethernet/mellanox/mlx5/core/pci_irq.c
585
pool->dev = dev;
drivers/net/ethernet/mellanox/mlx5/core/pci_irq.c
586
mutex_init(&pool->lock);
drivers/net/ethernet/mellanox/mlx5/core/pci_irq.c
587
xa_init_flags(&pool->irqs, XA_FLAGS_ALLOC);
drivers/net/ethernet/mellanox/mlx5/core/pci_irq.c
588
pool->xa_num_irqs.min = start;
drivers/net/ethernet/mellanox/mlx5/core/pci_irq.c
589
pool->xa_num_irqs.max = start + size - 1;
drivers/net/ethernet/mellanox/mlx5/core/pci_irq.c
591
snprintf(pool->name, MLX5_MAX_IRQ_NAME - MLX5_MAX_IRQ_IDX_CHARS,
drivers/net/ethernet/mellanox/mlx5/core/pci_irq.c
593
pool->min_threshold = min_threshold * MLX5_EQ_REFS_PER_IRQ;
drivers/net/ethernet/mellanox/mlx5/core/pci_irq.c
594
pool->max_threshold = max_threshold * MLX5_EQ_REFS_PER_IRQ;
drivers/net/ethernet/mellanox/mlx5/core/pci_irq.c
597
return pool;
drivers/net/ethernet/mellanox/mlx5/core/pci_irq.c
600
static void irq_pool_free(struct mlx5_irq_pool *pool)
drivers/net/ethernet/mellanox/mlx5/core/pci_irq.c
609
xa_for_each(&pool->irqs, index, irq)
drivers/net/ethernet/mellanox/mlx5/core/pci_irq.c
611
xa_destroy(&pool->irqs);
drivers/net/ethernet/mellanox/mlx5/core/pci_irq.c
612
mutex_destroy(&pool->lock);
drivers/net/ethernet/mellanox/mlx5/core/pci_irq.c
613
kfree(pool->irqs_per_cpu);
drivers/net/ethernet/mellanox/mlx5/core/pci_irq.c
614
kvfree(pool);
drivers/net/ethernet/mellanox/mlx5/core/pci_irq.c
695
static void mlx5_irq_pool_free_irqs(struct mlx5_irq_pool *pool)
drivers/net/ethernet/mellanox/mlx5/core/pci_irq.c
700
xa_for_each(&pool->irqs, index, irq)
drivers/net/ethernet/mellanox/mlx5/core/pci_irq.h
31
static inline bool mlx5_irq_pool_is_sf_pool(struct mlx5_irq_pool *pool)
drivers/net/ethernet/mellanox/mlx5/core/pci_irq.h
33
return !strncmp("mlx5_sf", pool->name, strlen("mlx5_sf"));
drivers/net/ethernet/mellanox/mlx5/core/pci_irq.h
36
struct mlx5_irq *mlx5_irq_alloc(struct mlx5_irq_pool *pool, int i,
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/action.c
1612
table_ste->pool = mlx5hws_pool_create(ctx, &pool_attr);
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/action.c
1613
if (!table_ste->pool) {
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/action.c
1621
ste_pool = table_ste->pool;
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/action.c
1672
mlx5hws_pool_destroy(table_ste->pool);
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/action.c
1687
mlx5hws_pool_destroy(table_ste->pool);
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/action.c
1838
stc_attr.ste_table.ste_pool = table_ste->pool;
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/action.c
246
struct mlx5hws_pool *pool;
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/action.c
262
pool = stc_attr->ste_table.ste_pool;
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/action.c
264
base_id = mlx5hws_pool_get_base_id(pool);
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/action.c
266
base_id = mlx5hws_pool_get_base_mirror_id(pool);
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/action.h
122
struct mlx5hws_pool *pool;
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/action_ste_pool.c
106
mlx5hws_cmd_rtc_destroy(action_tbl->pool->ctx->mdev,
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/action_ste_pool.c
108
mlx5hws_cmd_rtc_destroy(action_tbl->pool->ctx->mdev,
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/action_ste_pool.c
121
stc_attr.ste_table.ste_pool = action_tbl->pool;
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/action_ste_pool.c
169
mlx5hws_pool_get_base_id(action_tbl->pool),
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/action_ste_pool.c
170
mlx5hws_pool_get_base_mirror_id(action_tbl->pool),
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/action_ste_pool.c
179
mlx5hws_pool_destroy(action_tbl->pool);
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/action_ste_pool.c
194
mlx5hws_pool_get_base_id(action_tbl->pool),
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/action_ste_pool.c
195
mlx5hws_pool_get_base_mirror_id(action_tbl->pool),
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/action_ste_pool.c
202
mlx5hws_pool_destroy(action_tbl->pool);
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/action_ste_pool.c
235
struct mlx5hws_action_ste_pool *pool)
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/action_ste_pool.c
240
mutex_init(&pool->lock);
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/action_ste_pool.c
249
err = hws_action_ste_pool_element_init(ctx, &pool->elems[opt],
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/action_ste_pool.c
253
pool->elems[opt].parent_pool = pool;
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/action_ste_pool.c
260
hws_action_ste_pool_element_destroy(&pool->elems[opt]);
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/action_ste_pool.c
265
static void hws_action_ste_pool_destroy(struct mlx5hws_action_ste_pool *pool)
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/action_ste_pool.c
271
hws_action_ste_pool_element_destroy(&pool->elems[opt]);
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/action_ste_pool.c
284
if (mlx5hws_pool_full(action_tbl->pool) &&
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/action_ste_pool.c
328
struct mlx5hws_action_ste_pool *pool;
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/action_ste_pool.c
332
pool = kzalloc_objs(*pool, queues);
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/action_ste_pool.c
333
if (!pool)
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/action_ste_pool.c
337
err = hws_action_ste_pool_init(ctx, &pool[i]);
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/action_ste_pool.c
34
action_tbl->pool = mlx5hws_pool_create(ctx, &pool_attr);
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/action_ste_pool.c
342
ctx->action_ste_pool = pool;
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/action_ste_pool.c
35
if (!action_tbl->pool) {
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/action_ste_pool.c
354
hws_action_ste_pool_destroy(&pool[i]);
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/action_ste_pool.c
355
kfree(pool);
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/action_ste_pool.c
374
hws_action_ste_choose_elem(struct mlx5hws_action_ste_pool *pool,
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/action_ste_pool.c
378
return &pool->elems[MLX5HWS_POOL_OPTIMIZE_MIRROR];
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/action_ste_pool.c
381
return &pool->elems[MLX5HWS_POOL_OPTIMIZE_ORIG];
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/action_ste_pool.c
383
return &pool->elems[MLX5HWS_POOL_OPTIMIZE_NONE];
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/action_ste_pool.c
392
err = mlx5hws_pool_chunk_alloc(action_tbl->pool, &chunk->ste);
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/action_ste_pool.c
402
int mlx5hws_action_ste_chunk_alloc(struct mlx5hws_action_ste_pool *pool,
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/action_ste_pool.c
414
mutex_lock(&pool->lock);
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/action_ste_pool.c
416
elem = hws_action_ste_choose_elem(pool, skip_rx, skip_tx);
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/action_ste_pool.c
442
if (mlx5hws_pool_empty(action_tbl->pool))
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/action_ste_pool.c
448
mutex_unlock(&pool->lock);
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/action_ste_pool.c
457
mlx5hws_dbg(chunk->action_tbl->pool->ctx,
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/action_ste_pool.c
462
mlx5hws_pool_chunk_free(chunk->action_tbl->pool, &chunk->ste);
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/action_ste_pool.c
63
mlx5hws_pool_get_base_mirror_id(action_tbl->pool);
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/action_ste_pool.c
71
rtc_attr.ste_base = mlx5hws_pool_get_base_id(action_tbl->pool);
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/action_ste_pool.h
19
struct mlx5hws_pool *pool;
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/action_ste_pool.h
63
int mlx5hws_action_ste_chunk_alloc(struct mlx5hws_action_ste_pool *pool,
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/debug.c
392
int ste_0_id = mlx5hws_pool_get_base_id(action_tbl->pool);
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/debug.c
393
int ste_1_id = mlx5hws_pool_get_base_mirror_id(action_tbl->pool);
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/debug.c
403
struct mlx5hws_action_ste_pool *pool)
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/debug.c
408
mutex_lock(&pool->lock);
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/debug.c
411
list_for_each_entry(action_tbl, &pool->elems[opt].available,
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/debug.c
416
mutex_unlock(&pool->lock);
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/fs_hws.c
111
xa_for_each(&hws_pool->mh_pools, i, pool)
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/fs_hws.c
112
mlx5_fs_destroy_mh_pool(pool, &hws_pool->mh_pools, i);
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/fs_hws.c
114
xa_for_each(&hws_pool->el2tol2tnl_pools, i, pool)
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/fs_hws.c
115
mlx5_fs_destroy_pr_pool(pool, &hws_pool->el2tol2tnl_pools, i);
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/fs_hws.c
117
xa_for_each(&hws_pool->el2tol3tnl_pools, i, pool)
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/fs_hws.c
118
mlx5_fs_destroy_pr_pool(pool, &hws_pool->el2tol3tnl_pools, i);
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/fs_hws.c
1258
mlx5_fs_destroy_pr_pool(struct mlx5_fs_pool *pool, struct xarray *pr_pools,
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/fs_hws.c
1262
mlx5_fs_hws_pr_pool_cleanup(pool);
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/fs_hws.c
1263
kfree(pool);
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/fs_hws.c
1427
struct mlx5_fs_pool *pool;
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/fs_hws.c
1430
pool = kzalloc_obj(*pool);
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/fs_hws.c
1431
if (!pool)
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/fs_hws.c
1433
err = mlx5_fs_hws_mh_pool_init(pool, dev, pattern);
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/fs_hws.c
1436
err = xa_insert(mh_pools, index, pool, GFP_KERNEL);
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/fs_hws.c
1439
return pool;
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/fs_hws.c
1442
mlx5_fs_hws_mh_pool_cleanup(pool);
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/fs_hws.c
1444
kfree(pool);
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/fs_hws.c
1449
mlx5_fs_destroy_mh_pool(struct mlx5_fs_pool *pool, struct xarray *mh_pools,
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/fs_hws.c
1453
mlx5_fs_hws_mh_pool_cleanup(pool);
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/fs_hws.c
1454
kfree(pool);
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/fs_hws.c
1466
struct mlx5_fs_pool *pool;
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/fs_hws.c
1475
xa_for_each(&hws_pool->mh_pools, i, pool) {
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/fs_hws.c
1476
if (mlx5_fs_hws_mh_pool_match(pool, &pattern)) {
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/fs_hws.c
1484
pool = mlx5_fs_create_mh_pool(ns->dev, &pattern,
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/fs_hws.c
1486
if (IS_ERR(pool))
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/fs_hws.c
1487
return PTR_ERR(pool);
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/fs_hws.c
1489
mh_data = mlx5_fs_hws_mh_pool_acquire_mh(pool);
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/fs_hws.c
1501
modify_hdr->fs_hws_action.fs_pool = pool;
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/fs_hws.c
1508
mlx5_fs_hws_mh_pool_release_mh(pool, mh_data);
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/fs_hws.c
1511
mlx5_fs_destroy_mh_pool(pool, &hws_pool->mh_pools, cnt);
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/fs_hws.c
1519
struct mlx5_fs_pool *pool;
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/fs_hws.c
1528
pool = modify_hdr->fs_hws_action.fs_pool;
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/fs_hws.c
1529
mlx5_fs_hws_mh_pool_release_mh(pool, mh_data);
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/fs_hws.c
17
mlx5_fs_destroy_pr_pool(struct mlx5_fs_pool *pool, struct xarray *pr_pools,
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/fs_hws.c
20
mlx5_fs_destroy_mh_pool(struct mlx5_fs_pool *pool, struct xarray *mh_pools,
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/fs_hws.c
95
struct mlx5_fs_pool *pool;
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/pool.c
101
mlx5hws_err(pool->ctx, "Failed to allocate mirrored resource\n");
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/pool.c
103
pool->resource = NULL;
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/pool.c
106
pool->mirror_resource = mirror_resource;
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/pool.c
11
mlx5hws_cmd_ste_destroy(resource->pool->ctx->mdev, resource->base_id);
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/pool.c
112
static int hws_pool_buddy_init(struct mlx5hws_pool *pool)
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/pool.c
116
buddy = mlx5hws_buddy_create(pool->alloc_log_sz);
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/pool.c
118
mlx5hws_err(pool->ctx, "Failed to create buddy order: %zu\n",
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/pool.c
119
pool->alloc_log_sz);
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/pool.c
123
if (hws_pool_resource_alloc(pool) != 0) {
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/pool.c
124
mlx5hws_err(pool->ctx, "Failed to create resource type: %d size %zu\n",
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/pool.c
125
pool->type, pool->alloc_log_sz);
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/pool.c
131
pool->db.buddy = buddy;
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/pool.c
136
static int hws_pool_buddy_db_get_chunk(struct mlx5hws_pool *pool,
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/pool.c
139
struct mlx5hws_buddy_mem *buddy = pool->db.buddy;
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/pool.c
14
mlx5hws_cmd_stc_destroy(resource->pool->ctx->mdev, resource->base_id);
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/pool.c
142
mlx5hws_err(pool->ctx, "Bad buddy state\n");
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/pool.c
153
static void hws_pool_buddy_db_put_chunk(struct mlx5hws_pool *pool,
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/pool.c
158
buddy = pool->db.buddy;
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/pool.c
160
mlx5hws_err(pool->ctx, "Bad buddy state\n");
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/pool.c
167
static void hws_pool_buddy_db_uninit(struct mlx5hws_pool *pool)
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/pool.c
171
buddy = pool->db.buddy;
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/pool.c
175
pool->db.buddy = NULL;
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/pool.c
179
static int hws_pool_buddy_db_init(struct mlx5hws_pool *pool)
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/pool.c
183
ret = hws_pool_buddy_init(pool);
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/pool.c
187
pool->p_db_uninit = &hws_pool_buddy_db_uninit;
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/pool.c
188
pool->p_get_chunk = &hws_pool_buddy_db_get_chunk;
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/pool.c
189
pool->p_put_chunk = &hws_pool_buddy_db_put_chunk;
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/pool.c
207
static int hws_pool_bitmap_init(struct mlx5hws_pool *pool)
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/pool.c
211
bitmap = hws_pool_create_and_init_bitmap(pool->alloc_log_sz);
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/pool.c
213
mlx5hws_err(pool->ctx, "Failed to create bitmap order: %zu\n",
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/pool.c
214
pool->alloc_log_sz);
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/pool.c
218
if (hws_pool_resource_alloc(pool) != 0) {
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/pool.c
219
mlx5hws_err(pool->ctx, "Failed to create resource type: %d: size %zu\n",
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/pool.c
220
pool->type, pool->alloc_log_sz);
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/pool.c
225
pool->db.bitmap = bitmap;
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/pool.c
23
static void hws_pool_resource_free(struct mlx5hws_pool *pool)
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/pool.c
230
static int hws_pool_bitmap_db_get_chunk(struct mlx5hws_pool *pool,
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/pool.c
236
mlx5hws_err(pool->ctx, "Pool only supports order 0 allocs\n");
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/pool.c
240
bitmap = pool->db.bitmap;
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/pool.c
242
mlx5hws_err(pool->ctx, "Bad bitmap state\n");
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/pool.c
246
size = 1 << pool->alloc_log_sz;
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/pool.c
25
hws_pool_free_one_resource(pool->resource);
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/pool.c
257
static void hws_pool_bitmap_db_put_chunk(struct mlx5hws_pool *pool,
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/pool.c
26
pool->resource = NULL;
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/pool.c
262
bitmap = pool->db.bitmap;
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/pool.c
264
mlx5hws_err(pool->ctx, "Bad bitmap state\n");
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/pool.c
271
static void hws_pool_bitmap_db_uninit(struct mlx5hws_pool *pool)
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/pool.c
275
bitmap = pool->db.bitmap;
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/pool.c
278
pool->db.bitmap = NULL;
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/pool.c
28
if (pool->tbl_type == MLX5HWS_TABLE_TYPE_FDB) {
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/pool.c
282
static int hws_pool_bitmap_db_init(struct mlx5hws_pool *pool)
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/pool.c
286
ret = hws_pool_bitmap_init(pool);
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/pool.c
29
hws_pool_free_one_resource(pool->mirror_resource);
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/pool.c
290
pool->p_db_uninit = &hws_pool_bitmap_db_uninit;
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/pool.c
291
pool->p_get_chunk = &hws_pool_bitmap_db_get_chunk;
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/pool.c
292
pool->p_put_chunk = &hws_pool_bitmap_db_put_chunk;
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/pool.c
297
static int hws_pool_db_init(struct mlx5hws_pool *pool,
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/pool.c
30
pool->mirror_resource = NULL;
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/pool.c
303
ret = hws_pool_bitmap_db_init(pool);
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/pool.c
305
ret = hws_pool_buddy_db_init(pool);
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/pool.c
308
mlx5hws_err(pool->ctx, "Failed to init pool type: %d (ret: %d)\n",
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/pool.c
316
static void hws_pool_db_unint(struct mlx5hws_pool *pool)
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/pool.c
318
pool->p_db_uninit(pool);
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/pool.c
321
int mlx5hws_pool_chunk_alloc(struct mlx5hws_pool *pool,
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/pool.c
326
mutex_lock(&pool->lock);
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/pool.c
327
ret = pool->p_get_chunk(pool, chunk);
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/pool.c
329
pool->available_elems -= 1 << chunk->order;
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/pool.c
330
mutex_unlock(&pool->lock);
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/pool.c
335
void mlx5hws_pool_chunk_free(struct mlx5hws_pool *pool,
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/pool.c
338
mutex_lock(&pool->lock);
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/pool.c
339
pool->p_put_chunk(pool, chunk);
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/pool.c
340
pool->available_elems += 1 << chunk->order;
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/pool.c
341
mutex_unlock(&pool->lock);
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/pool.c
348
struct mlx5hws_pool *pool;
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/pool.c
35
hws_pool_create_one_resource(struct mlx5hws_pool *pool, u32 log_range,
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/pool.c
350
pool = kzalloc_obj(*pool);
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/pool.c
351
if (!pool)
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/pool.c
354
pool->ctx = ctx;
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/pool.c
355
pool->type = pool_attr->pool_type;
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/pool.c
356
pool->alloc_log_sz = pool_attr->alloc_log_sz;
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/pool.c
357
pool->flags = pool_attr->flags;
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/pool.c
358
pool->tbl_type = pool_attr->table_type;
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/pool.c
359
pool->opt_type = pool_attr->opt_type;
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/pool.c
361
if (pool->flags & MLX5HWS_POOL_FLAG_BUDDY)
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/pool.c
366
pool->alloc_log_sz = pool_attr->alloc_log_sz;
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/pool.c
367
pool->available_elems = 1 << pool_attr->alloc_log_sz;
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/pool.c
369
if (hws_pool_db_init(pool, res_db_type))
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/pool.c
372
mutex_init(&pool->lock);
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/pool.c
374
return pool;
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/pool.c
377
kfree(pool);
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/pool.c
381
void mlx5hws_pool_destroy(struct mlx5hws_pool *pool)
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/pool.c
383
mutex_destroy(&pool->lock);
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/pool.c
385
if (pool->available_elems != 1 << pool->alloc_log_sz)
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/pool.c
386
mlx5hws_err(pool->ctx, "Attempting to destroy non-empty pool\n");
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/pool.c
388
if (pool->resource)
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/pool.c
389
hws_pool_resource_free(pool);
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/pool.c
391
hws_pool_db_unint(pool);
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/pool.c
393
kfree(pool);
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/pool.c
48
switch (pool->type) {
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/pool.c
52
ret = mlx5hws_cmd_ste_create(pool->ctx->mdev, &ste_attr, &obj_id);
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/pool.c
57
ret = mlx5hws_cmd_stc_create(pool->ctx->mdev, &stc_attr, &obj_id);
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/pool.c
66
resource->pool = pool;
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/pool.c
77
static int hws_pool_resource_alloc(struct mlx5hws_pool *pool)
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/pool.c
82
fw_ft_type = mlx5hws_table_get_res_fw_ft_type(pool->tbl_type, false);
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/pool.c
83
opt_log_range = pool->opt_type == MLX5HWS_POOL_OPTIMIZE_MIRROR ?
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/pool.c
84
0 : pool->alloc_log_sz;
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/pool.c
85
resource = hws_pool_create_one_resource(pool, opt_log_range, fw_ft_type);
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/pool.c
87
mlx5hws_err(pool->ctx, "Failed to allocate resource\n");
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/pool.c
9
switch (resource->pool->type) {
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/pool.c
91
pool->resource = resource;
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/pool.c
93
if (pool->tbl_type == MLX5HWS_TABLE_TYPE_FDB) {
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/pool.c
96
fw_ft_type = mlx5hws_table_get_res_fw_ft_type(pool->tbl_type, true);
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/pool.c
97
opt_log_range = pool->opt_type == MLX5HWS_POOL_OPTIMIZE_ORIG ?
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/pool.c
98
0 : pool->alloc_log_sz;
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/pool.c
99
mirror_resource = hws_pool_create_one_resource(pool, opt_log_range, fw_ft_type);
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/pool.h
101
return pool->resource->base_id;
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/pool.h
104
static inline u32 mlx5hws_pool_get_base_mirror_id(struct mlx5hws_pool *pool)
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/pool.h
106
return pool->mirror_resource->base_id;
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/pool.h
110
mlx5hws_pool_empty(struct mlx5hws_pool *pool)
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/pool.h
114
mutex_lock(&pool->lock);
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/pool.h
115
ret = pool->available_elems == 0;
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/pool.h
116
mutex_unlock(&pool->lock);
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/pool.h
122
mlx5hws_pool_full(struct mlx5hws_pool *pool)
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/pool.h
126
mutex_lock(&pool->lock);
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/pool.h
127
ret = pool->available_elems == (1 << pool->alloc_log_sz);
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/pool.h
128
mutex_unlock(&pool->lock);
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/pool.h
20
struct mlx5hws_pool *pool;
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/pool.h
63
typedef int (*mlx5hws_pool_db_get_chunk)(struct mlx5hws_pool *pool,
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/pool.h
65
typedef void (*mlx5hws_pool_db_put_chunk)(struct mlx5hws_pool *pool,
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/pool.h
67
typedef void (*mlx5hws_pool_unint_db)(struct mlx5hws_pool *pool);
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/pool.h
91
void mlx5hws_pool_destroy(struct mlx5hws_pool *pool);
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/pool.h
93
int mlx5hws_pool_chunk_alloc(struct mlx5hws_pool *pool,
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/pool.h
96
void mlx5hws_pool_chunk_free(struct mlx5hws_pool *pool,
drivers/net/ethernet/mellanox/mlx5/core/steering/hws/pool.h
99
static inline u32 mlx5hws_pool_get_base_id(struct mlx5hws_pool *pool)
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_arg.c
102
mutex_lock(&pool->mutex);
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_arg.c
103
if (list_empty(&pool->free_list)) {
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_arg.c
104
ret = dr_arg_pool_alloc_objs(pool);
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_arg.c
109
arg_obj = list_first_entry_or_null(&pool->free_list,
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_arg.c
118
mutex_unlock(&pool->mutex);
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_arg.c
122
static void dr_arg_pool_put_arg_obj(struct dr_arg_pool *pool,
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_arg.c
125
mutex_lock(&pool->mutex);
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_arg.c
126
list_add(&arg_obj->list_node, &pool->free_list);
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_arg.c
127
mutex_unlock(&pool->mutex);
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_arg.c
133
struct dr_arg_pool *pool;
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_arg.c
135
pool = kzalloc_obj(*pool);
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_arg.c
136
if (!pool)
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_arg.c
139
pool->dmn = dmn;
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_arg.c
141
INIT_LIST_HEAD(&pool->free_list);
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_arg.c
142
mutex_init(&pool->mutex);
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_arg.c
144
pool->log_chunk_size = chunk_size;
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_arg.c
145
if (dr_arg_pool_alloc_objs(pool))
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_arg.c
148
return pool;
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_arg.c
151
kfree(pool);
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_arg.c
156
static void dr_arg_pool_destroy(struct dr_arg_pool *pool)
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_arg.c
160
list_for_each_entry_safe(arg_obj, tmp_arg, &pool->free_list, list_node) {
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_arg.c
163
mlx5dr_cmd_destroy_modify_header_arg(pool->dmn->mdev, arg_obj->obj_id);
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_arg.c
167
mutex_destroy(&pool->mutex);
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_arg.c
168
kfree(pool);
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_arg.c
31
static int dr_arg_pool_alloc_objs(struct dr_arg_pool *pool)
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_arg.c
43
pool->dmn->info.caps.log_header_modify_argument_granularity;
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_arg.c
46
max_t(u32, pool->dmn->info.caps.log_header_modify_argument_granularity,
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_arg.c
49
min_t(u32, pool->dmn->info.caps.log_header_modify_argument_max_alloc,
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_arg.c
52
if (pool->log_chunk_size > object_range) {
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_arg.c
53
mlx5dr_err(pool->dmn, "Required chunk size (%d) is not supported\n",
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_arg.c
54
pool->log_chunk_size);
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_arg.c
58
num_of_objects = (1 << (object_range - pool->log_chunk_size));
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_arg.c
60
ret = mlx5dr_cmd_create_modify_header_arg(pool->dmn->mdev,
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_arg.c
62
pool->dmn->pdn,
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_arg.c
65
mlx5dr_err(pool->dmn, "failed allocating object with range: %d:\n",
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_arg.c
77
arg_obj->log_chunk_size = pool->log_chunk_size;
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_arg.c
82
arg_obj->obj_offset = i * (1 << pool->log_chunk_size);
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_arg.c
84
list_splice_tail_init(&cur_list, &pool->free_list);
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_arg.c
89
mlx5dr_cmd_destroy_modify_header_arg(pool->dmn->mdev, obj_id);
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_arg.c
97
static struct mlx5dr_arg_obj *dr_arg_pool_get_arg_obj(struct dr_arg_pool *pool)
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_icm_pool.c
102
chunk->buddy_mem->pool->icm_type);
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_icm_pool.c
111
dr_icm_pool_mr_create(struct mlx5dr_icm_pool *pool)
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_icm_pool.c
113
struct mlx5_core_dev *mdev = pool->dmn->mdev;
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_icm_pool.c
123
icm_mr->dmn = pool->dmn;
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_icm_pool.c
125
icm_mr->dm.length = mlx5dr_icm_pool_chunk_size_to_byte(pool->max_log_chunk_sz,
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_icm_pool.c
126
pool->icm_type);
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_icm_pool.c
128
switch (pool->icm_type) {
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_icm_pool.c
144
WARN_ON(pool->icm_type);
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_icm_pool.c
153
mlx5dr_err(pool->dmn, "Failed to allocate SW ICM memory, err (%d)\n", err);
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_icm_pool.c
158
err = dr_icm_create_dm_mkey(mdev, pool->dmn->pdn,
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_icm_pool.c
164
mlx5dr_err(pool->dmn, "Failed to create SW ICM MKEY, err (%d)\n", err);
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_icm_pool.c
171
mlx5dr_err(pool->dmn, "Failed to get Aligned ICM mem (asked: %zu)\n",
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_icm_pool.c
228
mlx5dr_icm_pool_chunk_size_to_entries(buddy->pool->max_log_chunk_sz);
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_icm_pool.c
262
static int dr_icm_buddy_create(struct mlx5dr_icm_pool *pool)
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_icm_pool.c
267
icm_mr = dr_icm_pool_mr_create(pool);
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_icm_pool.c
275
if (mlx5dr_buddy_init(buddy, pool->max_log_chunk_sz))
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_icm_pool.c
279
buddy->pool = pool;
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_icm_pool.c
281
if (pool->icm_type == DR_ICM_TYPE_STE) {
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_icm_pool.c
288
list_add(&buddy->list_node, &pool->buddy_mem_list);
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_icm_pool.c
290
pool->dmn->num_buddies[pool->icm_type]++;
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_icm_pool.c
305
enum mlx5dr_icm_type icm_type = buddy->pool->icm_type;
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_icm_pool.c
314
buddy->pool->dmn->num_buddies[icm_type]--;
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_icm_pool.c
321
struct mlx5dr_icm_pool *pool,
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_icm_pool.c
332
if (pool->icm_type == DR_ICM_TYPE_STE) {
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_icm_pool.c
333
offset = mlx5dr_icm_pool_dm_type_to_entry_size(pool->icm_type) * seg;
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_icm_pool.c
340
static bool dr_icm_pool_is_sync_required(struct mlx5dr_icm_pool *pool)
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_icm_pool.c
342
return pool->hot_memory_size > pool->th;
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_icm_pool.c
345
static void dr_icm_pool_clear_hot_chunks_arr(struct mlx5dr_icm_pool *pool)
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_icm_pool.c
350
for (i = 0; i < pool->hot_chunks_num; i++) {
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_icm_pool.c
351
hot_chunk = &pool->hot_chunks_arr[i];
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_icm_pool.c
357
pool->icm_type);
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_icm_pool.c
360
pool->hot_chunks_num = 0;
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_icm_pool.c
361
pool->hot_memory_size = 0;
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_icm_pool.c
364
static int dr_icm_pool_sync_all_buddy_pools(struct mlx5dr_icm_pool *pool)
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_icm_pool.c
369
err = mlx5dr_cmd_sync_steering(pool->dmn->mdev);
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_icm_pool.c
371
mlx5dr_err(pool->dmn, "Failed to sync to HW (err: %d)\n", err);
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_icm_pool.c
375
dr_icm_pool_clear_hot_chunks_arr(pool);
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_icm_pool.c
377
list_for_each_entry_safe(buddy, tmp_buddy, &pool->buddy_mem_list, list_node) {
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_icm_pool.c
378
if (!buddy->used_memory && pool->icm_type == DR_ICM_TYPE_STE)
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_icm_pool.c
385
static int dr_icm_handle_buddies_get_mem(struct mlx5dr_icm_pool *pool,
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_icm_pool.c
396
list_for_each_entry(buddy_mem_pool, &pool->buddy_mem_list, list_node) {
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_icm_pool.c
404
mlx5dr_err(pool->dmn,
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_icm_pool.c
412
err = dr_icm_buddy_create(pool);
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_icm_pool.c
414
mlx5dr_err(pool->dmn,
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_icm_pool.c
434
mlx5dr_icm_alloc_chunk(struct mlx5dr_icm_pool *pool,
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_icm_pool.c
442
if (chunk_size > pool->max_log_chunk_sz)
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_icm_pool.c
445
mutex_lock(&pool->mutex);
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_icm_pool.c
447
ret = dr_icm_handle_buddies_get_mem(pool, chunk_size, &buddy, &seg);
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_icm_pool.c
451
chunk = kmem_cache_alloc(pool->chunks_kmem_cache, GFP_KERNEL);
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_icm_pool.c
455
dr_icm_chunk_init(chunk, pool, chunk_size, buddy, seg);
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_icm_pool.c
462
mutex_unlock(&pool->mutex);
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_icm_pool.c
469
struct mlx5dr_icm_pool *pool = buddy->pool;
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_icm_pool.c
473
chunks_cache = pool->chunks_kmem_cache;
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_icm_pool.c
476
mutex_lock(&pool->mutex);
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_icm_pool.c
478
pool->hot_memory_size += mlx5dr_icm_pool_get_chunk_byte_size(chunk);
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_icm_pool.c
480
hot_chunk = &pool->hot_chunks_arr[pool->hot_chunks_num++];
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_icm_pool.c
488
if (dr_icm_pool_is_sync_required(pool))
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_icm_pool.c
489
dr_icm_pool_sync_all_buddy_pools(pool);
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_icm_pool.c
491
mutex_unlock(&pool->mutex);
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_icm_pool.c
494
struct mlx5dr_ste_htbl *mlx5dr_icm_pool_alloc_htbl(struct mlx5dr_icm_pool *pool)
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_icm_pool.c
496
return kmem_cache_alloc(pool->dmn->htbls_kmem_cache, GFP_KERNEL);
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_icm_pool.c
499
void mlx5dr_icm_pool_free_htbl(struct mlx5dr_icm_pool *pool, struct mlx5dr_ste_htbl *htbl)
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_icm_pool.c
501
kmem_cache_free(pool->dmn->htbls_kmem_cache, htbl);
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_icm_pool.c
508
struct mlx5dr_icm_pool *pool;
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_icm_pool.c
511
pool = kvzalloc_obj(*pool);
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_icm_pool.c
512
if (!pool)
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_icm_pool.c
515
pool->dmn = dmn;
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_icm_pool.c
516
pool->icm_type = icm_type;
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_icm_pool.c
517
pool->chunks_kmem_cache = dmn->chunks_kmem_cache;
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_icm_pool.c
519
INIT_LIST_HEAD(&pool->buddy_mem_list);
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_icm_pool.c
520
mutex_init(&pool->mutex);
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_icm_pool.c
524
pool->max_log_chunk_sz = dmn->info.max_log_sw_icm_sz;
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_icm_pool.c
525
max_hot_size = mlx5dr_icm_pool_chunk_size_to_byte(pool->max_log_chunk_sz,
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_icm_pool.c
526
pool->icm_type) *
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_icm_pool.c
530
pool->max_log_chunk_sz = dmn->info.max_log_action_icm_sz;
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_icm_pool.c
531
max_hot_size = mlx5dr_icm_pool_chunk_size_to_byte(pool->max_log_chunk_sz,
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_icm_pool.c
532
pool->icm_type) *
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_icm_pool.c
536
pool->max_log_chunk_sz = dmn->info.max_log_modify_hdr_pattern_icm_sz;
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_icm_pool.c
537
max_hot_size = mlx5dr_icm_pool_chunk_size_to_byte(pool->max_log_chunk_sz,
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_icm_pool.c
538
pool->icm_type) *
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_icm_pool.c
545
entry_size = mlx5dr_icm_pool_dm_type_to_entry_size(pool->icm_type);
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_icm_pool.c
548
pool->th = max_hot_size;
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_icm_pool.c
550
pool->hot_chunks_arr = kvzalloc_objs(struct mlx5dr_icm_hot_chunk,
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_icm_pool.c
552
if (!pool->hot_chunks_arr)
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_icm_pool.c
555
return pool;
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_icm_pool.c
558
kvfree(pool);
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_icm_pool.c
562
void mlx5dr_icm_pool_destroy(struct mlx5dr_icm_pool *pool)
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_icm_pool.c
566
dr_icm_pool_clear_hot_chunks_arr(pool);
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_icm_pool.c
568
list_for_each_entry_safe(buddy, tmp_buddy, &pool->buddy_mem_list, list_node)
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_icm_pool.c
571
kvfree(pool->hot_chunks_arr);
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_icm_pool.c
572
mutex_destroy(&pool->mutex);
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_icm_pool.c
573
kvfree(pool);
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_icm_pool.c
82
u32 offset = mlx5dr_icm_pool_dm_type_to_entry_size(chunk->buddy_mem->pool->icm_type);
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_icm_pool.c
94
u32 size = mlx5dr_icm_pool_dm_type_to_entry_size(chunk->buddy_mem->pool->icm_type);
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_send.c
103
kfree(pool);
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_send.c
114
struct mlx5dr_send_info_pool *pool;
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_send.c
117
pool = kzalloc_obj(*pool);
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_send.c
118
if (!pool)
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_send.c
121
INIT_LIST_HEAD(&pool->free_list);
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_send.c
123
ret = dr_send_info_pool_fill(pool);
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_send.c
125
kfree(pool);
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_send.c
129
return pool;
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_send.c
152
struct mlx5dr_send_info_pool *pool;
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_send.c
155
pool = nic_type == DR_DOMAIN_NIC_TYPE_RX ? dmn->send_info_pool_rx :
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_send.c
158
if (unlikely(list_empty(&pool->free_list))) {
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_send.c
159
ret = dr_send_info_pool_fill(pool);
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_send.c
164
pool_obj = list_first_entry_or_null(&pool->free_list,
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_send.c
186
list_add(&pool_obj->list_node, &pool_obj->pool->free_list);
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_send.c
61
struct mlx5dr_send_info_pool *pool;
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_send.c
69
static int dr_send_info_pool_fill(struct mlx5dr_send_info_pool *pool)
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_send.c
79
pool_obj->pool = pool;
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_send.c
80
list_add_tail(&pool_obj->list_node, &pool->free_list);
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_send.c
86
list_for_each_entry_safe(pool_obj, tmp_pool_obj, &pool->free_list, list_node) {
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_send.c
94
static void dr_send_info_pool_destroy(struct mlx5dr_send_info_pool *pool)
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_send.c
98
list_for_each_entry_safe(pool_obj, tmp_pool_obj, &pool->free_list, list_node) {
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_ste.c
496
struct mlx5dr_ste_htbl *mlx5dr_ste_htbl_alloc(struct mlx5dr_icm_pool *pool,
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_ste.c
505
htbl = mlx5dr_icm_pool_alloc_htbl(pool);
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_ste.c
509
chunk = mlx5dr_icm_alloc_chunk(pool, chunk_size);
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_ste.c
534
mlx5dr_icm_pool_free_htbl(pool, htbl);
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_ste.c
540
struct mlx5dr_icm_pool *pool = htbl->chunk->buddy_mem->pool;
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_ste.c
546
mlx5dr_icm_pool_free_htbl(pool, htbl);
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_types.h
1226
struct mlx5dr_ste_htbl *mlx5dr_icm_pool_alloc_htbl(struct mlx5dr_icm_pool *pool);
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_types.h
1227
void mlx5dr_icm_pool_free_htbl(struct mlx5dr_icm_pool *pool, struct mlx5dr_ste_htbl *htbl);
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_types.h
1389
void mlx5dr_icm_pool_destroy(struct mlx5dr_icm_pool *pool);
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_types.h
1392
mlx5dr_icm_alloc_chunk(struct mlx5dr_icm_pool *pool,
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_types.h
229
mlx5dr_ste_htbl_alloc(struct mlx5dr_icm_pool *pool,
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/mlx5dr.h
174
struct mlx5dr_icm_pool *pool;
drivers/net/ethernet/mellanox/mlxsw/reg.h
12593
MLXSW_ITEM32(reg, sbpr, pool, 0x00, 0, 4);
drivers/net/ethernet/mellanox/mlxsw/reg.h
12619
static inline void mlxsw_reg_sbpr_pack(char *payload, u8 pool,
drivers/net/ethernet/mellanox/mlxsw/reg.h
12625
mlxsw_reg_sbpr_pool_set(payload, pool);
drivers/net/ethernet/mellanox/mlxsw/reg.h
12701
MLXSW_ITEM32(reg, sbcm, pool, 0x24, 0, 4);
drivers/net/ethernet/mellanox/mlxsw/reg.h
12706
bool infi_max, u8 pool)
drivers/net/ethernet/mellanox/mlxsw/reg.h
12715
mlxsw_reg_sbcm_pool_set(payload, pool);
drivers/net/ethernet/mellanox/mlxsw/reg.h
12741
MLXSW_ITEM32(reg, sbpm, pool, 0x00, 8, 4);
drivers/net/ethernet/mellanox/mlxsw/reg.h
12789
static inline void mlxsw_reg_sbpm_pack(char *payload, u16 local_port, u8 pool,
drivers/net/ethernet/mellanox/mlxsw/reg.h
12795
mlxsw_reg_sbpm_pool_set(payload, pool);
drivers/net/ethernet/mellanox/mlxsw/reg.h
12849
MLXSW_ITEM32(reg, sbmm, pool, 0x24, 0, 4);
drivers/net/ethernet/mellanox/mlxsw/reg.h
12852
u32 max_buff, u8 pool)
drivers/net/ethernet/mellanox/mlxsw/reg.h
12858
mlxsw_reg_sbmm_pool_set(payload, pool);
drivers/net/ethernet/mellanox/mlxsw/spectrum_buffers.c
1151
des->pool);
drivers/net/ethernet/mellanox/mlxsw/spectrum_buffers.c
191
mlxsw_reg_sbpr_pack(sbpr_pl, des->pool, des->dir, mode,
drivers/net/ethernet/mellanox/mlxsw/spectrum_buffers.c
231
min_buff, max_buff, infi_max, des->pool);
drivers/net/ethernet/mellanox/mlxsw/spectrum_buffers.c
259
mlxsw_reg_sbpm_pack(sbpm_pl, local_port, des->pool, des->dir, false,
drivers/net/ethernet/mellanox/mlxsw/spectrum_buffers.c
282
mlxsw_reg_sbpm_pack(sbpm_pl, local_port, des->pool, des->dir,
drivers/net/ethernet/mellanox/mlxsw/spectrum_buffers.c
310
mlxsw_reg_sbpm_pack(sbpm_pl, local_port, des->pool, des->dir,
drivers/net/ethernet/mellanox/mlxsw/spectrum_buffers.c
54
u8 pool;
drivers/net/ethernet/mellanox/mlxsw/spectrum_cnt.c
104
for (i = 0; i < pool->sub_pools_count; i++) {
drivers/net/ethernet/mellanox/mlxsw/spectrum_cnt.c
105
sub_pool = &pool->sub_pools[i];
drivers/net/ethernet/mellanox/mlxsw/spectrum_cnt.c
115
const struct mlxsw_sp_counter_pool *pool = priv;
drivers/net/ethernet/mellanox/mlxsw/spectrum_cnt.c
117
return atomic_read(&pool->active_entries_count);
drivers/net/ethernet/mellanox/mlxsw/spectrum_cnt.c
124
struct mlxsw_sp_counter_pool *pool;
drivers/net/ethernet/mellanox/mlxsw/spectrum_cnt.c
127
pool = kzalloc_flex(*pool, sub_pools, sub_pools_count);
drivers/net/ethernet/mellanox/mlxsw/spectrum_cnt.c
128
if (!pool)
drivers/net/ethernet/mellanox/mlxsw/spectrum_cnt.c
130
mlxsw_sp->counter_pool = pool;
drivers/net/ethernet/mellanox/mlxsw/spectrum_cnt.c
131
pool->sub_pools_count = sub_pools_count;
drivers/net/ethernet/mellanox/mlxsw/spectrum_cnt.c
132
memcpy(pool->sub_pools, mlxsw_sp_counter_sub_pools,
drivers/net/ethernet/mellanox/mlxsw/spectrum_cnt.c
133
flex_array_size(pool, sub_pools, pool->sub_pools_count));
drivers/net/ethernet/mellanox/mlxsw/spectrum_cnt.c
134
spin_lock_init(&pool->counter_pool_lock);
drivers/net/ethernet/mellanox/mlxsw/spectrum_cnt.c
135
atomic_set(&pool->active_entries_count, 0);
drivers/net/ethernet/mellanox/mlxsw/spectrum_cnt.c
138
&pool->pool_size);
drivers/net/ethernet/mellanox/mlxsw/spectrum_cnt.c
142
mlxsw_sp_counter_pool_occ_get, pool);
drivers/net/ethernet/mellanox/mlxsw/spectrum_cnt.c
144
pool->usage = bitmap_zalloc(pool->pool_size, GFP_KERNEL);
drivers/net/ethernet/mellanox/mlxsw/spectrum_cnt.c
145
if (!pool->usage) {
drivers/net/ethernet/mellanox/mlxsw/spectrum_cnt.c
157
bitmap_free(pool->usage);
drivers/net/ethernet/mellanox/mlxsw/spectrum_cnt.c
162
kfree(pool);
drivers/net/ethernet/mellanox/mlxsw/spectrum_cnt.c
168
struct mlxsw_sp_counter_pool *pool = mlxsw_sp->counter_pool;
drivers/net/ethernet/mellanox/mlxsw/spectrum_cnt.c
172
WARN_ON(!bitmap_empty(pool->usage, pool->pool_size));
drivers/net/ethernet/mellanox/mlxsw/spectrum_cnt.c
173
WARN_ON(atomic_read(&pool->active_entries_count));
drivers/net/ethernet/mellanox/mlxsw/spectrum_cnt.c
174
bitmap_free(pool->usage);
drivers/net/ethernet/mellanox/mlxsw/spectrum_cnt.c
177
kfree(pool);
drivers/net/ethernet/mellanox/mlxsw/spectrum_cnt.c
184
struct mlxsw_sp_counter_pool *pool = mlxsw_sp->counter_pool;
drivers/net/ethernet/mellanox/mlxsw/spectrum_cnt.c
190
sub_pool = &pool->sub_pools[sub_pool_id];
drivers/net/ethernet/mellanox/mlxsw/spectrum_cnt.c
194
spin_lock(&pool->counter_pool_lock);
drivers/net/ethernet/mellanox/mlxsw/spectrum_cnt.c
195
entry_index = find_next_zero_bit(pool->usage, stop_index, entry_index);
drivers/net/ethernet/mellanox/mlxsw/spectrum_cnt.c
208
__set_bit(entry_index + i, pool->usage);
drivers/net/ethernet/mellanox/mlxsw/spectrum_cnt.c
209
spin_unlock(&pool->counter_pool_lock);
drivers/net/ethernet/mellanox/mlxsw/spectrum_cnt.c
213
atomic_add(sub_pool->entry_size, &pool->active_entries_count);
drivers/net/ethernet/mellanox/mlxsw/spectrum_cnt.c
217
spin_unlock(&pool->counter_pool_lock);
drivers/net/ethernet/mellanox/mlxsw/spectrum_cnt.c
225
struct mlxsw_sp_counter_pool *pool = mlxsw_sp->counter_pool;
drivers/net/ethernet/mellanox/mlxsw/spectrum_cnt.c
229
if (WARN_ON(counter_index >= pool->pool_size))
drivers/net/ethernet/mellanox/mlxsw/spectrum_cnt.c
231
sub_pool = &pool->sub_pools[sub_pool_id];
drivers/net/ethernet/mellanox/mlxsw/spectrum_cnt.c
232
spin_lock(&pool->counter_pool_lock);
drivers/net/ethernet/mellanox/mlxsw/spectrum_cnt.c
234
__clear_bit(counter_index + i, pool->usage);
drivers/net/ethernet/mellanox/mlxsw/spectrum_cnt.c
235
spin_unlock(&pool->counter_pool_lock);
drivers/net/ethernet/mellanox/mlxsw/spectrum_cnt.c
237
atomic_sub(sub_pool->entry_size, &pool->active_entries_count);
drivers/net/ethernet/mellanox/mlxsw/spectrum_cnt.c
54
struct mlxsw_sp_counter_pool *pool = mlxsw_sp->counter_pool;
drivers/net/ethernet/mellanox/mlxsw/spectrum_cnt.c
62
for (i = 0; i < pool->sub_pools_count; i++) {
drivers/net/ethernet/mellanox/mlxsw/spectrum_cnt.c
63
sub_pool = &pool->sub_pools[i];
drivers/net/ethernet/mellanox/mlxsw/spectrum_cnt.c
89
sub_pool = &pool->sub_pools[i];
drivers/net/ethernet/mellanox/mlxsw/spectrum_cnt.c
99
struct mlxsw_sp_counter_pool *pool = mlxsw_sp->counter_pool;
drivers/net/ethernet/microchip/sparx5/sparx5_main.h
587
int sparx5_pool_put(struct sparx5_pool_entry *pool, int size, u32 id);
drivers/net/ethernet/microchip/sparx5/sparx5_main.h
588
int sparx5_pool_get(struct sparx5_pool_entry *pool, int size, u32 *id);
drivers/net/ethernet/microchip/sparx5/sparx5_main.h
589
int sparx5_pool_get_with_idx(struct sparx5_pool_entry *pool, int size, u32 idx,
drivers/net/ethernet/microchip/sparx5/sparx5_pool.c
23
int sparx5_pool_put(struct sparx5_pool_entry *pool, int size, u32 id)
drivers/net/ethernet/microchip/sparx5/sparx5_pool.c
27
e_itr = (pool + sparx5_pool_id_to_idx(id));
drivers/net/ethernet/microchip/sparx5/sparx5_pool.c
37
int sparx5_pool_get(struct sparx5_pool_entry *pool, int size, u32 *id)
drivers/net/ethernet/microchip/sparx5/sparx5_pool.c
42
for (i = 0, e_itr = pool; i < size; i++, e_itr++) {
drivers/net/ethernet/microchip/sparx5/sparx5_pool.c
55
int sparx5_pool_get_with_idx(struct sparx5_pool_entry *pool, int size, u32 idx,
drivers/net/ethernet/microchip/sparx5/sparx5_pool.c
61
for (i = 0, e_itr = pool; i < size; i++, e_itr++) {
drivers/net/ethernet/microchip/sparx5/sparx5_pool.c
75
e_itr = (pool + ret);
drivers/net/ethernet/netronome/nfp/nfd3/xsk.c
21
struct xsk_buff_pool *pool = r_vec->xsk_pool;
drivers/net/ethernet/netronome/nfp/nfd3/xsk.c
29
xsk_buff_raw_dma_sync_for_device(pool, xrxbuf->dma_addr + pkt_off,
drivers/net/ethernet/netronome/nfp/nfp_abi.h
85
__le32 pool;
drivers/net/ethernet/netronome/nfp/nfp_net_common.c
2230
return nfp_net_xsk_setup_pool(netdev, xdp->xsk.pool,
drivers/net/ethernet/netronome/nfp/nfp_net_xsk.c
100
struct xsk_buff_pool *pool)
drivers/net/ethernet/netronome/nfp/nfp_net_xsk.c
102
return xsk_pool_dma_unmap(pool, 0);
drivers/net/ethernet/netronome/nfp/nfp_net_xsk.c
105
static int nfp_net_xsk_pool_map(struct device *dev, struct xsk_buff_pool *pool)
drivers/net/ethernet/netronome/nfp/nfp_net_xsk.c
107
return xsk_pool_dma_map(pool, dev, 0);
drivers/net/ethernet/netronome/nfp/nfp_net_xsk.c
111
struct xsk_buff_pool *pool, u16 queue_id)
drivers/net/ethernet/netronome/nfp/nfp_net_xsk.c
130
if (pool) {
drivers/net/ethernet/netronome/nfp/nfp_net_xsk.c
131
err = nfp_net_xsk_pool_map(nn->dp.dev, pool);
drivers/net/ethernet/netronome/nfp/nfp_net_xsk.c
144
dp->xsk_pools[queue_id] = pool;
drivers/net/ethernet/netronome/nfp/nfp_net_xsk.c
156
if (pool)
drivers/net/ethernet/netronome/nfp/nfp_net_xsk.c
157
nfp_net_xsk_pool_unmap(nn->dp.dev, pool);
drivers/net/ethernet/netronome/nfp/nfp_net_xsk.c
60
struct xsk_buff_pool *pool = r_vec->xsk_pool;
drivers/net/ethernet/netronome/nfp/nfp_net_xsk.c
67
xdp = xsk_buff_alloc(pool);
drivers/net/ethernet/netronome/nfp/nfp_net_xsk.h
32
int nfp_net_xsk_setup_pool(struct net_device *netdev, struct xsk_buff_pool *pool,
drivers/net/ethernet/netronome/nfp/nfp_shared_buf.c
32
.pool = cpu_to_le32(pool_index),
drivers/net/ethernet/netronome/nfp/nfp_shared_buf.c
63
.pool = cpu_to_le32(pool_index),
drivers/net/ethernet/stmicro/stmmac/stmmac_main.c
2652
struct xsk_buff_pool *pool = tx_q->xsk_pool;
drivers/net/ethernet/stmicro/stmmac/stmmac_main.c
2679
if (!xsk_tx_peek_desc(pool, &xdp_desc))
drivers/net/ethernet/stmicro/stmmac/stmmac_main.c
2696
dma_addr = xsk_buff_raw_get_dma(pool, xdp_desc.addr);
drivers/net/ethernet/stmicro/stmmac/stmmac_main.c
2697
meta = xsk_buff_get_metadata(pool, xdp_desc.addr);
drivers/net/ethernet/stmicro/stmmac/stmmac_main.c
2698
xsk_buff_raw_dma_sync_for_device(pool, dma_addr, xdp_desc.len);
drivers/net/ethernet/stmicro/stmmac/stmmac_main.c
2756
xsk_tx_release(pool);
drivers/net/ethernet/stmicro/stmmac/stmmac_main.c
6881
return stmmac_xdp_setup_pool(priv, bpf->xsk.pool,
drivers/net/ethernet/stmicro/stmmac/stmmac_xdp.c
10
struct xsk_buff_pool *pool, u16 queue)
drivers/net/ethernet/stmicro/stmmac/stmmac_xdp.c
21
frame_size = xsk_pool_get_rx_frame_size(pool);
drivers/net/ethernet/stmicro/stmmac/stmmac_xdp.c
28
err = xsk_pool_dma_map(pool, priv->device, STMMAC_RX_DMA_ATTR);
drivers/net/ethernet/stmicro/stmmac/stmmac_xdp.c
61
struct xsk_buff_pool *pool;
drivers/net/ethernet/stmicro/stmmac/stmmac_xdp.c
68
pool = xsk_get_pool_from_qid(priv->dev, queue);
drivers/net/ethernet/stmicro/stmmac/stmmac_xdp.c
69
if (!pool)
drivers/net/ethernet/stmicro/stmmac/stmmac_xdp.c
81
xsk_pool_dma_unmap(pool, STMMAC_RX_DMA_ATTR);
drivers/net/ethernet/stmicro/stmmac/stmmac_xdp.c
95
int stmmac_xdp_setup_pool(struct stmmac_priv *priv, struct xsk_buff_pool *pool,
drivers/net/ethernet/stmicro/stmmac/stmmac_xdp.c
98
return pool ? stmmac_xdp_enable_pool(priv, pool, queue) :
drivers/net/ethernet/stmicro/stmmac/stmmac_xdp.h
9
int stmmac_xdp_setup_pool(struct stmmac_priv *priv, struct xsk_buff_pool *pool,
drivers/net/ethernet/ti/am65-cpsw-nuss.c
571
struct page_pool *pool;
drivers/net/ethernet/ti/am65-cpsw-nuss.c
577
pool = page_pool_create(&pp_params);
drivers/net/ethernet/ti/am65-cpsw-nuss.c
578
if (IS_ERR(pool)) {
drivers/net/ethernet/ti/am65-cpsw-nuss.c
579
ret = PTR_ERR(pool);
drivers/net/ethernet/ti/am65-cpsw-nuss.c
583
flow->page_pool = pool;
drivers/net/ethernet/ti/am65-cpsw-nuss.c
601
pool);
drivers/net/ethernet/ti/am65-cpts.c
1182
INIT_LIST_HEAD(&cpts->pool);
drivers/net/ethernet/ti/am65-cpts.c
1187
list_add(&cpts->pool_data[i].list, &cpts->pool);
drivers/net/ethernet/ti/am65-cpts.c
169
struct list_head pool;
drivers/net/ethernet/ti/am65-cpts.c
243
list_add(&event->list, &cpts->pool);
drivers/net/ethernet/ti/am65-cpts.c
298
event = list_first_entry_or_null(&cpts->pool,
drivers/net/ethernet/ti/am65-cpts.c
876
list_splice_tail(&events_free, &cpts->pool);
drivers/net/ethernet/ti/am65-cpts.c
933
list_move(&event->list, &cpts->pool);
drivers/net/ethernet/ti/am65-cpts.c
944
list_move(&event->list, &cpts->pool);
drivers/net/ethernet/ti/cpsw.c
375
struct page_pool *pool;
drivers/net/ethernet/ti/cpsw.c
387
pool = cpsw->page_pool[ch];
drivers/net/ethernet/ti/cpsw.c
403
page_pool_recycle_direct(pool, page);
drivers/net/ethernet/ti/cpsw.c
407
new_page = page_pool_dev_alloc_pages(pool);
drivers/net/ethernet/ti/cpsw.c
441
page_pool_recycle_direct(pool, page);
drivers/net/ethernet/ti/cpsw.c
473
page_pool_recycle_direct(pool, new_page);
drivers/net/ethernet/ti/cpsw_new.c
311
struct page_pool *pool;
drivers/net/ethernet/ti/cpsw_new.c
331
pool = cpsw->page_pool[ch];
drivers/net/ethernet/ti/cpsw_new.c
347
page_pool_recycle_direct(pool, page);
drivers/net/ethernet/ti/cpsw_new.c
351
new_page = page_pool_dev_alloc_pages(pool);
drivers/net/ethernet/ti/cpsw_new.c
384
page_pool_recycle_direct(pool, page);
drivers/net/ethernet/ti/cpsw_new.c
417
page_pool_recycle_direct(pool, new_page);
drivers/net/ethernet/ti/cpsw_priv.c
1086
struct page_pool *pool;
drivers/net/ethernet/ti/cpsw_priv.c
1093
pool = cpsw->page_pool[ch];
drivers/net/ethernet/ti/cpsw_priv.c
1096
page = page_pool_dev_alloc_pages(pool);
drivers/net/ethernet/ti/cpsw_priv.c
1115
page_pool_recycle_direct(pool, page);
drivers/net/ethernet/ti/cpsw_priv.c
1131
struct page_pool *pool;
drivers/net/ethernet/ti/cpsw_priv.c
1140
pool = page_pool_create(&pp_params);
drivers/net/ethernet/ti/cpsw_priv.c
1141
if (IS_ERR(pool))
drivers/net/ethernet/ti/cpsw_priv.c
1144
return pool;
drivers/net/ethernet/ti/cpsw_priv.c
1149
struct page_pool *pool;
drivers/net/ethernet/ti/cpsw_priv.c
1153
pool = cpsw_create_page_pool(cpsw, pool_size);
drivers/net/ethernet/ti/cpsw_priv.c
1154
if (IS_ERR(pool))
drivers/net/ethernet/ti/cpsw_priv.c
1155
ret = PTR_ERR(pool);
drivers/net/ethernet/ti/cpsw_priv.c
1157
cpsw->page_pool[ch] = pool;
drivers/net/ethernet/ti/cpsw_priv.c
1166
struct page_pool *pool;
drivers/net/ethernet/ti/cpsw_priv.c
1169
pool = cpsw->page_pool[ch];
drivers/net/ethernet/ti/cpsw_priv.c
1176
ret = xdp_rxq_info_reg_mem_model(rxq, MEM_TYPE_PAGE_POOL, pool);
drivers/net/ethernet/ti/cpts.c
120
if (list_empty(&cpts->pool) && cpts_purge_events(cpts)) {
drivers/net/ethernet/ti/cpts.c
125
event = list_first_entry(&cpts->pool, struct cpts_event, list);
drivers/net/ethernet/ti/cpts.c
388
list_splice_tail(&events_free, &cpts->pool);
drivers/net/ethernet/ti/cpts.c
472
list_add(&event->list, &cpts->pool);
drivers/net/ethernet/ti/cpts.c
484
list_add(&event->list, &cpts->pool);
drivers/net/ethernet/ti/cpts.c
555
INIT_LIST_HEAD(&cpts->pool);
drivers/net/ethernet/ti/cpts.c
557
list_add(&cpts->pool_data[i].list, &cpts->pool);
drivers/net/ethernet/ti/cpts.c
73
list_add(&event->list, &cpts->pool);
drivers/net/ethernet/ti/cpts.h
114
struct list_head pool;
drivers/net/ethernet/ti/davinci_cpdma.c
100
struct cpdma_desc_pool *pool;
drivers/net/ethernet/ti/davinci_cpdma.c
1031
desc = cpdma_desc_alloc(ctlr->pool);
drivers/net/ethernet/ti/davinci_cpdma.c
1052
cpdma_desc_free(ctlr->pool, desc, 1);
drivers/net/ethernet/ti/davinci_cpdma.c
1182
struct cpdma_desc_pool *pool = ctlr->pool;
drivers/net/ethernet/ti/davinci_cpdma.c
1188
gen_pool_avail(pool->gen_pool);
drivers/net/ethernet/ti/davinci_cpdma.c
1198
struct cpdma_desc_pool *pool = ctlr->pool;
drivers/net/ethernet/ti/davinci_cpdma.c
1215
cpdma_desc_free(pool, desc, 1);
drivers/net/ethernet/ti/davinci_cpdma.c
1225
struct cpdma_desc_pool *pool = ctlr->pool;
drivers/net/ethernet/ti/davinci_cpdma.c
1237
desc_dma = desc_phys(pool, desc);
drivers/net/ethernet/ti/davinci_cpdma.c
1253
chan->head = desc_from_phys(pool, desc_read(desc, hw_next));
drivers/net/ethernet/ti/davinci_cpdma.c
1260
chan_write(chan, hdp, desc_phys(pool, chan->head));
drivers/net/ethernet/ti/davinci_cpdma.c
1315
struct cpdma_desc_pool *pool = ctlr->pool;
drivers/net/ethernet/ti/davinci_cpdma.c
1359
chan->head = desc_from_phys(pool, next_dma);
drivers/net/ethernet/ti/davinci_cpdma.c
1434
ctlr->num_tx_desc = ctlr->pool->num_desc - ctlr->num_rx_desc;
drivers/net/ethernet/ti/davinci_cpdma.c
1438
ctlr->num_tx_desc = ctlr->pool->num_desc - ctlr->num_rx_desc;
drivers/net/ethernet/ti/davinci_cpdma.c
192
struct cpdma_desc_pool *pool = ctlr->pool;
drivers/net/ethernet/ti/davinci_cpdma.c
194
if (!pool)
drivers/net/ethernet/ti/davinci_cpdma.c
197
WARN(gen_pool_size(pool->gen_pool) != gen_pool_avail(pool->gen_pool),
drivers/net/ethernet/ti/davinci_cpdma.c
199
gen_pool_size(pool->gen_pool),
drivers/net/ethernet/ti/davinci_cpdma.c
200
gen_pool_avail(pool->gen_pool));
drivers/net/ethernet/ti/davinci_cpdma.c
201
if (pool->cpumap)
drivers/net/ethernet/ti/davinci_cpdma.c
202
dma_free_coherent(ctlr->dev, pool->mem_size, pool->cpumap,
drivers/net/ethernet/ti/davinci_cpdma.c
203
pool->phys);
drivers/net/ethernet/ti/davinci_cpdma.c
215
struct cpdma_desc_pool *pool;
drivers/net/ethernet/ti/davinci_cpdma.c
218
pool = devm_kzalloc(ctlr->dev, sizeof(*pool), GFP_KERNEL);
drivers/net/ethernet/ti/davinci_cpdma.c
219
if (!pool)
drivers/net/ethernet/ti/davinci_cpdma.c
221
ctlr->pool = pool;
drivers/net/ethernet/ti/davinci_cpdma.c
223
pool->mem_size = cpdma_params->desc_mem_size;
drivers/net/ethernet/ti/davinci_cpdma.c
224
pool->desc_size = ALIGN(sizeof(struct cpdma_desc),
drivers/net/ethernet/ti/davinci_cpdma.c
226
pool->num_desc = pool->mem_size / pool->desc_size;
drivers/net/ethernet/ti/davinci_cpdma.c
234
pool->num_desc = cpdma_params->descs_pool_size;
drivers/net/ethernet/ti/davinci_cpdma.c
235
pool->mem_size = pool->desc_size * pool->num_desc;
drivers/net/ethernet/ti/davinci_cpdma.c
236
if (pool->mem_size > cpdma_params->desc_mem_size)
drivers/net/ethernet/ti/davinci_cpdma.c
240
pool->gen_pool = devm_gen_pool_create(ctlr->dev, ilog2(pool->desc_size),
drivers/net/ethernet/ti/davinci_cpdma.c
242
if (IS_ERR(pool->gen_pool)) {
drivers/net/ethernet/ti/davinci_cpdma.c
243
ret = PTR_ERR(pool->gen_pool);
drivers/net/ethernet/ti/davinci_cpdma.c
249
pool->phys = cpdma_params->desc_mem_phys;
drivers/net/ethernet/ti/davinci_cpdma.c
250
pool->iomap = devm_ioremap(ctlr->dev, pool->phys,
drivers/net/ethernet/ti/davinci_cpdma.c
251
pool->mem_size);
drivers/net/ethernet/ti/davinci_cpdma.c
252
pool->hw_addr = cpdma_params->desc_hw_addr;
drivers/net/ethernet/ti/davinci_cpdma.c
254
pool->cpumap = dma_alloc_coherent(ctlr->dev, pool->mem_size,
drivers/net/ethernet/ti/davinci_cpdma.c
255
&pool->hw_addr, GFP_KERNEL);
drivers/net/ethernet/ti/davinci_cpdma.c
256
pool->iomap = (void __iomem __force *)pool->cpumap;
drivers/net/ethernet/ti/davinci_cpdma.c
257
pool->phys = pool->hw_addr; /* assumes no IOMMU, don't use this value */
drivers/net/ethernet/ti/davinci_cpdma.c
260
if (!pool->iomap)
drivers/net/ethernet/ti/davinci_cpdma.c
263
ret = gen_pool_add_virt(pool->gen_pool, (unsigned long)pool->iomap,
drivers/net/ethernet/ti/davinci_cpdma.c
264
pool->phys, pool->mem_size, -1);
drivers/net/ethernet/ti/davinci_cpdma.c
275
ctlr->pool = NULL;
drivers/net/ethernet/ti/davinci_cpdma.c
279
static inline dma_addr_t desc_phys(struct cpdma_desc_pool *pool,
drivers/net/ethernet/ti/davinci_cpdma.c
284
return pool->hw_addr + (__force long)desc - (__force long)pool->iomap;
drivers/net/ethernet/ti/davinci_cpdma.c
288
desc_from_phys(struct cpdma_desc_pool *pool, dma_addr_t dma)
drivers/net/ethernet/ti/davinci_cpdma.c
290
return dma ? pool->iomap + dma - pool->hw_addr : NULL;
drivers/net/ethernet/ti/davinci_cpdma.c
294
cpdma_desc_alloc(struct cpdma_desc_pool *pool)
drivers/net/ethernet/ti/davinci_cpdma.c
297
gen_pool_alloc(pool->gen_pool, pool->desc_size);
drivers/net/ethernet/ti/davinci_cpdma.c
300
static void cpdma_desc_free(struct cpdma_desc_pool *pool,
drivers/net/ethernet/ti/davinci_cpdma.c
303
gen_pool_free(pool->gen_pool, (unsigned long)desc, pool->desc_size);
drivers/net/ethernet/ti/davinci_cpdma.c
378
struct cpdma_desc_pool *pool = ctlr->pool;
drivers/net/ethernet/ti/davinci_cpdma.c
393
chan_write(chan, hdp, desc_phys(pool, chan->head));
drivers/net/ethernet/ti/davinci_cpdma.c
527
ctlr->num_tx_desc = ctlr->pool->num_desc / 2;
drivers/net/ethernet/ti/davinci_cpdma.c
528
ctlr->num_rx_desc = ctlr->pool->num_desc - ctlr->num_tx_desc;
drivers/net/ethernet/ti/davinci_cpdma.c
985
struct cpdma_desc_pool *pool = ctlr->pool;
drivers/net/ethernet/ti/davinci_cpdma.c
989
desc_dma = desc_phys(pool, desc);
drivers/net/ethernet/ti/icssg/icssg_common.c
100
struct xsk_buff_pool *pool = tx_chn->xsk_pool;
drivers/net/ethernet/ti/icssg/icssg_common.c
1023
struct page_pool *pool;
drivers/net/ethernet/ti/icssg/icssg_common.c
1031
pool = rx_chn->pg_pool;
drivers/net/ethernet/ti/icssg/icssg_common.c
1053
page_pool_dma_sync_for_cpu(pool, page, 0, PAGE_SIZE);
drivers/net/ethernet/ti/icssg/icssg_common.c
1064
new_page = page_pool_dev_alloc_pages(pool);
drivers/net/ethernet/ti/icssg/icssg_common.c
1078
page_pool_recycle_direct(pool, page);
drivers/net/ethernet/ti/icssg/icssg_common.c
1094
page_pool_recycle_direct(pool, page);
drivers/net/ethernet/ti/icssg/icssg_common.c
1122
page_pool_recycle_direct(pool, new_page);
drivers/net/ethernet/ti/icssg/icssg_common.c
1135
struct page_pool *pool;
drivers/net/ethernet/ti/icssg/icssg_common.c
1139
pool = rx_chn->pg_pool;
drivers/net/ethernet/ti/icssg/icssg_common.c
1147
page_pool_recycle_direct(pool, page);
drivers/net/ethernet/ti/icssg/icssg_common.c
122
if (!xsk_tx_peek_desc(pool, &xdp_desc))
drivers/net/ethernet/ti/icssg/icssg_common.c
125
dma_buf = xsk_buff_raw_get_dma(pool, xdp_desc.addr);
drivers/net/ethernet/ti/icssg/icssg_common.c
127
xsk_buff_raw_dma_sync_for_device(pool, dma_buf, pkt_len);
drivers/net/ethernet/ti/icssg/icssg_common.c
473
struct page_pool *pool;
drivers/net/ethernet/ti/icssg/icssg_common.c
484
pool = page_pool_create(&pp_params);
drivers/net/ethernet/ti/icssg/icssg_common.c
485
if (IS_ERR(pool))
drivers/net/ethernet/ti/icssg/icssg_common.c
488
return pool;
drivers/net/ethernet/ti/icssg/icssg_common.c
500
struct page_pool *pool;
drivers/net/ethernet/ti/icssg/icssg_common.c
543
pool = prueth_create_page_pool(emac, rx_chn->dma_dev, rx_chn->descs_num);
drivers/net/ethernet/ti/icssg/icssg_common.c
544
if (IS_ERR(pool)) {
drivers/net/ethernet/ti/icssg/icssg_common.c
545
ret = PTR_ERR(pool);
drivers/net/ethernet/ti/icssg/icssg_common.c
549
rx_chn->pg_pool = pool;
drivers/net/ethernet/ti/icssg/icssg_prueth.c
1245
struct xsk_buff_pool *pool, u16 queue_id)
drivers/net/ethernet/ti/icssg/icssg_prueth.c
1257
frame_size = xsk_pool_get_rx_frame_size(pool);
drivers/net/ethernet/ti/icssg/icssg_prueth.c
1261
ret = xsk_pool_dma_map(pool, rx_chn->dma_dev, PRUETH_RX_DMA_ATTR);
drivers/net/ethernet/ti/icssg/icssg_prueth.c
1299
struct xsk_buff_pool *pool;
drivers/net/ethernet/ti/icssg/icssg_prueth.c
1313
pool = xsk_get_pool_from_qid(emac->ndev, queue_id);
drivers/net/ethernet/ti/icssg/icssg_prueth.c
1314
if (!pool) {
drivers/net/ethernet/ti/icssg/icssg_prueth.c
1327
xsk_pool_dma_unmap(pool, PRUETH_RX_DMA_ATTR);
drivers/net/ethernet/ti/icssg/icssg_prueth.c
1362
return bpf->xsk.pool ?
drivers/net/ethernet/ti/icssg/icssg_prueth.c
1363
prueth_xsk_pool_enable(emac, bpf->xsk.pool, bpf->xsk.queue_id) :
drivers/net/ethernet/ti/icssg/icssg_prueth.c
595
struct page_pool *pool = emac->rx_chns.pg_pool;
drivers/net/ethernet/ti/icssg/icssg_prueth.c
609
ret = xdp_rxq_info_reg_mem_model(rxq, MEM_TYPE_PAGE_POOL, pool);
drivers/net/ethernet/ti/k3-cppi-desc-pool.c
101
dma_free_coherent(pool->dev, pool->mem_size, pool->cpumem,
drivers/net/ethernet/ti/k3-cppi-desc-pool.c
102
pool->dma_addr);
drivers/net/ethernet/ti/k3-cppi-desc-pool.c
104
kfree(pool->desc_infos);
drivers/net/ethernet/ti/k3-cppi-desc-pool.c
106
gen_pool_destroy(pool->gen_pool); /* frees pool->name */
drivers/net/ethernet/ti/k3-cppi-desc-pool.c
108
kfree(pool);
drivers/net/ethernet/ti/k3-cppi-desc-pool.c
113
dma_addr_t k3_cppi_desc_pool_virt2dma(struct k3_cppi_desc_pool *pool,
drivers/net/ethernet/ti/k3-cppi-desc-pool.c
116
return addr ? pool->dma_addr + (addr - pool->cpumem) : 0;
drivers/net/ethernet/ti/k3-cppi-desc-pool.c
120
void *k3_cppi_desc_pool_dma2virt(struct k3_cppi_desc_pool *pool, dma_addr_t dma)
drivers/net/ethernet/ti/k3-cppi-desc-pool.c
122
return dma ? pool->cpumem + (dma - pool->dma_addr) : NULL;
drivers/net/ethernet/ti/k3-cppi-desc-pool.c
126
void *k3_cppi_desc_pool_alloc(struct k3_cppi_desc_pool *pool)
drivers/net/ethernet/ti/k3-cppi-desc-pool.c
128
return (void *)gen_pool_alloc(pool->gen_pool, pool->desc_size);
drivers/net/ethernet/ti/k3-cppi-desc-pool.c
132
void k3_cppi_desc_pool_free(struct k3_cppi_desc_pool *pool, void *addr)
drivers/net/ethernet/ti/k3-cppi-desc-pool.c
134
gen_pool_free(pool->gen_pool, (unsigned long)addr, pool->desc_size);
drivers/net/ethernet/ti/k3-cppi-desc-pool.c
138
size_t k3_cppi_desc_pool_avail(struct k3_cppi_desc_pool *pool)
drivers/net/ethernet/ti/k3-cppi-desc-pool.c
140
return gen_pool_avail(pool->gen_pool) / pool->desc_size;
drivers/net/ethernet/ti/k3-cppi-desc-pool.c
144
size_t k3_cppi_desc_pool_desc_size(const struct k3_cppi_desc_pool *pool)
drivers/net/ethernet/ti/k3-cppi-desc-pool.c
146
return pool->desc_size;
drivers/net/ethernet/ti/k3-cppi-desc-pool.c
150
void *k3_cppi_desc_pool_cpuaddr(const struct k3_cppi_desc_pool *pool)
drivers/net/ethernet/ti/k3-cppi-desc-pool.c
152
return pool->cpumem;
drivers/net/ethernet/ti/k3-cppi-desc-pool.c
156
void k3_cppi_desc_pool_desc_info_set(struct k3_cppi_desc_pool *pool,
drivers/net/ethernet/ti/k3-cppi-desc-pool.c
159
pool->desc_infos[desc_idx] = info;
drivers/net/ethernet/ti/k3-cppi-desc-pool.c
163
void *k3_cppi_desc_pool_desc_info(const struct k3_cppi_desc_pool *pool,
drivers/net/ethernet/ti/k3-cppi-desc-pool.c
166
return pool->desc_infos[desc_idx];
drivers/net/ethernet/ti/k3-cppi-desc-pool.c
28
void k3_cppi_desc_pool_destroy(struct k3_cppi_desc_pool *pool)
drivers/net/ethernet/ti/k3-cppi-desc-pool.c
30
if (!pool)
drivers/net/ethernet/ti/k3-cppi-desc-pool.c
33
WARN(gen_pool_size(pool->gen_pool) != gen_pool_avail(pool->gen_pool),
drivers/net/ethernet/ti/k3-cppi-desc-pool.c
35
gen_pool_size(pool->gen_pool),
drivers/net/ethernet/ti/k3-cppi-desc-pool.c
36
gen_pool_avail(pool->gen_pool));
drivers/net/ethernet/ti/k3-cppi-desc-pool.c
37
if (pool->cpumem)
drivers/net/ethernet/ti/k3-cppi-desc-pool.c
38
dma_free_coherent(pool->dev, pool->mem_size, pool->cpumem,
drivers/net/ethernet/ti/k3-cppi-desc-pool.c
39
pool->dma_addr);
drivers/net/ethernet/ti/k3-cppi-desc-pool.c
41
kfree(pool->desc_infos);
drivers/net/ethernet/ti/k3-cppi-desc-pool.c
43
gen_pool_destroy(pool->gen_pool); /* frees pool->name */
drivers/net/ethernet/ti/k3-cppi-desc-pool.c
45
kfree(pool);
drivers/net/ethernet/ti/k3-cppi-desc-pool.c
54
struct k3_cppi_desc_pool *pool;
drivers/net/ethernet/ti/k3-cppi-desc-pool.c
58
pool = kzalloc_obj(*pool);
drivers/net/ethernet/ti/k3-cppi-desc-pool.c
59
if (!pool)
drivers/net/ethernet/ti/k3-cppi-desc-pool.c
62
pool->dev = dev;
drivers/net/ethernet/ti/k3-cppi-desc-pool.c
63
pool->desc_size = roundup_pow_of_two(desc_size);
drivers/net/ethernet/ti/k3-cppi-desc-pool.c
64
pool->num_desc = size;
drivers/net/ethernet/ti/k3-cppi-desc-pool.c
65
pool->mem_size = pool->num_desc * pool->desc_size;
drivers/net/ethernet/ti/k3-cppi-desc-pool.c
67
pool_name = kstrdup_const(name ? name : dev_name(pool->dev),
drivers/net/ethernet/ti/k3-cppi-desc-pool.c
72
pool->gen_pool = gen_pool_create(ilog2(pool->desc_size), -1);
drivers/net/ethernet/ti/k3-cppi-desc-pool.c
73
if (!pool->gen_pool) {
drivers/net/ethernet/ti/k3-cppi-desc-pool.c
78
pool->gen_pool->name = pool_name;
drivers/net/ethernet/ti/k3-cppi-desc-pool.c
80
pool->desc_infos = kzalloc_objs(*pool->desc_infos, pool->num_desc);
drivers/net/ethernet/ti/k3-cppi-desc-pool.c
81
if (!pool->desc_infos)
drivers/net/ethernet/ti/k3-cppi-desc-pool.c
84
pool->cpumem = dma_alloc_coherent(pool->dev, pool->mem_size,
drivers/net/ethernet/ti/k3-cppi-desc-pool.c
85
&pool->dma_addr, GFP_KERNEL);
drivers/net/ethernet/ti/k3-cppi-desc-pool.c
87
if (!pool->cpumem)
drivers/net/ethernet/ti/k3-cppi-desc-pool.c
90
ret = gen_pool_add_virt(pool->gen_pool, (unsigned long)pool->cpumem,
drivers/net/ethernet/ti/k3-cppi-desc-pool.c
91
(phys_addr_t)pool->dma_addr, pool->mem_size,
drivers/net/ethernet/ti/k3-cppi-desc-pool.c
94
dev_err(pool->dev, "pool add failed %d\n", ret);
drivers/net/ethernet/ti/k3-cppi-desc-pool.c
98
return pool;
drivers/net/ethernet/ti/k3-cppi-desc-pool.h
15
void k3_cppi_desc_pool_destroy(struct k3_cppi_desc_pool *pool);
drivers/net/ethernet/ti/k3-cppi-desc-pool.h
23
k3_cppi_desc_pool_virt2dma(struct k3_cppi_desc_pool *pool, void *addr);
drivers/net/ethernet/ti/k3-cppi-desc-pool.h
25
k3_cppi_desc_pool_dma2virt(struct k3_cppi_desc_pool *pool, dma_addr_t dma);
drivers/net/ethernet/ti/k3-cppi-desc-pool.h
26
void *k3_cppi_desc_pool_alloc(struct k3_cppi_desc_pool *pool);
drivers/net/ethernet/ti/k3-cppi-desc-pool.h
27
void k3_cppi_desc_pool_free(struct k3_cppi_desc_pool *pool, void *addr);
drivers/net/ethernet/ti/k3-cppi-desc-pool.h
28
size_t k3_cppi_desc_pool_avail(struct k3_cppi_desc_pool *pool);
drivers/net/ethernet/ti/k3-cppi-desc-pool.h
29
size_t k3_cppi_desc_pool_desc_size(const struct k3_cppi_desc_pool *pool);
drivers/net/ethernet/ti/k3-cppi-desc-pool.h
30
void *k3_cppi_desc_pool_cpuaddr(const struct k3_cppi_desc_pool *pool);
drivers/net/ethernet/ti/k3-cppi-desc-pool.h
31
void k3_cppi_desc_pool_desc_info_set(struct k3_cppi_desc_pool *pool,
drivers/net/ethernet/ti/k3-cppi-desc-pool.h
33
void *k3_cppi_desc_pool_desc_info(const struct k3_cppi_desc_pool *pool,
drivers/net/ethernet/via/via-velocity.c
1484
void *pool;
drivers/net/ethernet/via/via-velocity.c
1493
pool = dma_alloc_coherent(vptr->dev, tx_ring_size * vptr->tx.numq +
drivers/net/ethernet/via/via-velocity.c
1495
if (!pool) {
drivers/net/ethernet/via/via-velocity.c
1501
vptr->rx.ring = pool;
drivers/net/ethernet/via/via-velocity.c
1504
pool += rx_ring_size;
drivers/net/ethernet/via/via-velocity.c
1508
vptr->tx.rings[i] = pool;
drivers/net/ethernet/via/via-velocity.c
1510
pool += tx_ring_size;
drivers/net/ethernet/wangxun/libwx/wx_hw.c
1006
int wx_add_mac_filter(struct wx *wx, u8 *addr, u16 pool)
drivers/net/ethernet/wangxun/libwx/wx_hw.c
1016
if (wx->mac_table[i].pools != (1ULL << pool)) {
drivers/net/ethernet/wangxun/libwx/wx_hw.c
1018
wx->mac_table[i].pools |= (1ULL << pool);
drivers/net/ethernet/wangxun/libwx/wx_hw.c
1030
wx->mac_table[i].pools |= (1ULL << pool);
drivers/net/ethernet/wangxun/libwx/wx_hw.c
1037
int wx_del_mac_filter(struct wx *wx, u8 *addr, u16 pool)
drivers/net/ethernet/wangxun/libwx/wx_hw.c
1050
wx->mac_table[i].pools &= ~(1ULL << pool);
drivers/net/ethernet/wangxun/libwx/wx_hw.c
1083
static int wx_write_uc_addr_list(struct net_device *netdev, int pool)
drivers/net/ethernet/wangxun/libwx/wx_hw.c
1096
wx_del_mac_filter(wx, ha->addr, pool);
drivers/net/ethernet/wangxun/libwx/wx_hw.c
1097
wx_add_mac_filter(wx, ha->addr, pool);
drivers/net/ethernet/wangxun/libwx/wx_hw.c
1486
u16 pool = wx->num_rx_pools;
drivers/net/ethernet/wangxun/libwx/wx_hw.c
1497
while (pool--)
drivers/net/ethernet/wangxun/libwx/wx_hw.c
1498
wr32m(wx, WX_PSR_VM_L2CTL(pool),
drivers/net/ethernet/wangxun/libwx/wx_hw.c
2174
int pool;
drivers/net/ethernet/wangxun/libwx/wx_hw.c
2183
for_each_set_bit(pool, &wx->fwd_bitmask, 8)
drivers/net/ethernet/wangxun/libwx/wx_hw.c
2184
wr32(wx, WX_RDB_PL_CFG(VMDQ_P(pool)), psrtype);
drivers/net/ethernet/wangxun/libwx/wx_hw.c
2191
for_each_set_bit(pool, &wx->fwd_bitmask, 32)
drivers/net/ethernet/wangxun/libwx/wx_hw.c
2192
wr32(wx, WX_RDB_PL_CFG(VMDQ_P(pool)), psrtype);
drivers/net/ethernet/wangxun/libwx/wx_hw.h
29
int wx_add_mac_filter(struct wx *wx, u8 *addr, u16 pool);
drivers/net/ethernet/wangxun/libwx/wx_hw.h
30
int wx_del_mac_filter(struct wx *wx, u8 *addr, u16 pool);
drivers/net/ipa/gsi_trans.c
113
pool->base = virt;
drivers/net/ipa/gsi_trans.c
115
pool->count = alloc_size / size;
drivers/net/ipa/gsi_trans.c
116
pool->free = 0;
drivers/net/ipa/gsi_trans.c
117
pool->max_alloc = max_alloc;
drivers/net/ipa/gsi_trans.c
118
pool->size = size;
drivers/net/ipa/gsi_trans.c
119
pool->addr = 0; /* Only used for DMA pools */
drivers/net/ipa/gsi_trans.c
124
void gsi_trans_pool_exit(struct gsi_trans_pool *pool)
drivers/net/ipa/gsi_trans.c
126
kfree(pool->base);
drivers/net/ipa/gsi_trans.c
127
memset(pool, 0, sizeof(*pool));
drivers/net/ipa/gsi_trans.c
135
int gsi_trans_pool_init_dma(struct device *dev, struct gsi_trans_pool *pool,
drivers/net/ipa/gsi_trans.c
165
pool->base = virt;
drivers/net/ipa/gsi_trans.c
166
pool->count = total_size / size;
drivers/net/ipa/gsi_trans.c
167
pool->free = 0;
drivers/net/ipa/gsi_trans.c
168
pool->size = size;
drivers/net/ipa/gsi_trans.c
169
pool->max_alloc = max_alloc;
drivers/net/ipa/gsi_trans.c
170
pool->addr = addr;
drivers/net/ipa/gsi_trans.c
175
void gsi_trans_pool_exit_dma(struct device *dev, struct gsi_trans_pool *pool)
drivers/net/ipa/gsi_trans.c
177
size_t total_size = pool->count * pool->size;
drivers/net/ipa/gsi_trans.c
179
dma_free_coherent(dev, total_size, pool->base, pool->addr);
drivers/net/ipa/gsi_trans.c
180
memset(pool, 0, sizeof(*pool));
drivers/net/ipa/gsi_trans.c
184
static u32 gsi_trans_pool_alloc_common(struct gsi_trans_pool *pool, u32 count)
drivers/net/ipa/gsi_trans.c
189
WARN_ON(count > pool->max_alloc);
drivers/net/ipa/gsi_trans.c
192
if (count > pool->count - pool->free)
drivers/net/ipa/gsi_trans.c
193
pool->free = 0;
drivers/net/ipa/gsi_trans.c
195
offset = pool->free * pool->size;
drivers/net/ipa/gsi_trans.c
196
pool->free += count;
drivers/net/ipa/gsi_trans.c
197
memset(pool->base + offset, 0, count * pool->size);
drivers/net/ipa/gsi_trans.c
203
void *gsi_trans_pool_alloc(struct gsi_trans_pool *pool, u32 count)
drivers/net/ipa/gsi_trans.c
205
return pool->base + gsi_trans_pool_alloc_common(pool, count);
drivers/net/ipa/gsi_trans.c
209
void *gsi_trans_pool_alloc_dma(struct gsi_trans_pool *pool, dma_addr_t *addr)
drivers/net/ipa/gsi_trans.c
211
u32 offset = gsi_trans_pool_alloc_common(pool, 1);
drivers/net/ipa/gsi_trans.c
213
*addr = pool->addr + offset;
drivers/net/ipa/gsi_trans.c
215
return pool->base + offset;
drivers/net/ipa/gsi_trans.c
87
int gsi_trans_pool_init(struct gsi_trans_pool *pool, size_t size, u32 count,
drivers/net/ipa/gsi_trans.h
113
int gsi_trans_pool_init_dma(struct device *dev, struct gsi_trans_pool *pool,
drivers/net/ipa/gsi_trans.h
125
void *gsi_trans_pool_alloc_dma(struct gsi_trans_pool *pool, dma_addr_t *addr);
drivers/net/ipa/gsi_trans.h
132
void gsi_trans_pool_exit_dma(struct device *dev, struct gsi_trans_pool *pool);
drivers/net/ipa/gsi_trans.h
83
int gsi_trans_pool_init(struct gsi_trans_pool *pool, size_t size, u32 count,
drivers/net/ipa/gsi_trans.h
93
void *gsi_trans_pool_alloc(struct gsi_trans_pool *pool, u32 count);
drivers/net/ipa/gsi_trans.h
99
void gsi_trans_pool_exit(struct gsi_trans_pool *pool);
drivers/net/netdevsim/netdev.c
476
struct page_pool *pool;
drivers/net/netdevsim/netdev.c
478
pool = page_pool_create(¶ms);
drivers/net/netdevsim/netdev.c
479
if (IS_ERR(pool))
drivers/net/netdevsim/netdev.c
480
return PTR_ERR(pool);
drivers/net/netdevsim/netdev.c
482
*p = pool;
drivers/net/virtio_net.c
1444
struct xsk_buff_pool *pool, gfp_t gfp)
drivers/net/virtio_net.c
1454
num = xsk_buff_alloc_batch(pool, xsk_buffs, rq->vq->num_free);
drivers/net/virtio_net.c
1458
len = xsk_pool_get_rx_frame_size(pool) + vi->hdr_len;
drivers/net/virtio_net.c
1495
struct xsk_buff_pool *pool,
drivers/net/virtio_net.c
1503
addr = xsk_buff_raw_get_dma(pool, desc->addr);
drivers/net/virtio_net.c
1504
xsk_buff_raw_dma_sync_for_device(pool, addr, desc->len);
drivers/net/virtio_net.c
1516
struct xsk_buff_pool *pool,
drivers/net/virtio_net.c
1520
struct xdp_desc *descs = pool->tx_descs;
drivers/net/virtio_net.c
1527
nb_pkts = xsk_tx_peek_release_desc_batch(pool, budget);
drivers/net/virtio_net.c
1532
err = virtnet_xsk_xmit_one(sq, pool, &descs[i]);
drivers/net/virtio_net.c
1547
static bool virtnet_xsk_xmit(struct send_queue *sq, struct xsk_buff_pool *pool,
drivers/net/virtio_net.c
1564
sent = virtnet_xsk_xmit_batch(sq, pool, budget, &kicks);
drivers/net/virtio_net.c
1583
if (xsk_uses_need_wakeup(pool))
drivers/net/virtio_net.c
1584
xsk_set_tx_need_wakeup(pool);
drivers/net/virtio_net.c
5782
struct xsk_buff_pool *pool)
drivers/net/virtio_net.c
5788
if (pool) {
drivers/net/virtio_net.c
5798
xsk_pool_set_rxq_info(pool, &rq->xsk_rxq_info);
drivers/net/virtio_net.c
5807
pool = NULL;
drivers/net/virtio_net.c
5810
rq->xsk_pool = pool;
drivers/net/virtio_net.c
5814
if (pool)
drivers/net/virtio_net.c
5824
struct xsk_buff_pool *pool)
drivers/net/virtio_net.c
5836
pool = NULL;
drivers/net/virtio_net.c
5839
sq->xsk_pool = pool;
drivers/net/virtio_net.c
5847
struct xsk_buff_pool *pool,
drivers/net/virtio_net.c
5857
if (vi->hdr_len > xsk_pool_get_headroom(pool))
drivers/net/virtio_net.c
5899
err = xsk_pool_dma_map(pool, dma_dev, 0);
drivers/net/virtio_net.c
5903
err = virtnet_rq_bind_xsk_pool(vi, rq, pool);
drivers/net/virtio_net.c
5907
err = virtnet_sq_bind_xsk_pool(vi, sq, pool);
drivers/net/virtio_net.c
5921
xsk_pool_dma_unmap(pool, 0);
drivers/net/virtio_net.c
5933
struct xsk_buff_pool *pool;
drivers/net/virtio_net.c
5944
pool = rq->xsk_pool;
drivers/net/virtio_net.c
5949
xsk_pool_dma_unmap(pool, 0);
drivers/net/virtio_net.c
5960
if (xdp->xsk.pool)
drivers/net/virtio_net.c
5961
return virtnet_xsk_pool_enable(dev, xdp->xsk.pool,
drivers/net/wireless/ath/wcn36xx/dxe.c
237
struct wcn36xx_dxe_mem_pool *pool)
drivers/net/wireless/ath/wcn36xx/dxe.c
239
int i, chunk_size = pool->chunk_size;
drivers/net/wireless/ath/wcn36xx/dxe.c
240
dma_addr_t bd_phy_addr = pool->phy_addr;
drivers/net/wireless/ath/wcn36xx/dxe.c
241
void *bd_cpu_addr = pool->virt_addr;
drivers/net/wireless/intel/ipw2x00/ipw2200.c
3145
struct dma_pool *pool;
drivers/net/wireless/intel/ipw2x00/ipw2200.c
3161
pool = dma_pool_create("ipw2200", &priv->pci_dev->dev, CB_MAX_LENGTH, 0,
drivers/net/wireless/intel/ipw2x00/ipw2200.c
3163
if (!pool) {
drivers/net/wireless/intel/ipw2x00/ipw2200.c
3189
virts[total_nr] = dma_pool_alloc(pool, GFP_KERNEL,
drivers/net/wireless/intel/ipw2x00/ipw2200.c
3233
dma_pool_free(pool, virts[i], phys[i]);
drivers/net/wireless/intel/ipw2x00/ipw2200.c
3235
dma_pool_destroy(pool);
drivers/net/wireless/intel/ipw2x00/ipw2200.c
3396
if (rxq->pool[i].skb != NULL) {
drivers/net/wireless/intel/ipw2x00/ipw2200.c
3398
rxq->pool[i].dma_addr,
drivers/net/wireless/intel/ipw2x00/ipw2200.c
3400
dev_kfree_skb_irq(rxq->pool[i].skb);
drivers/net/wireless/intel/ipw2x00/ipw2200.c
3401
rxq->pool[i].skb = NULL;
drivers/net/wireless/intel/ipw2x00/ipw2200.c
3403
list_add_tail(&rxq->pool[i].list, &rxq->rx_used);
drivers/net/wireless/intel/ipw2x00/ipw2200.c
5187
if (rxq->pool[i].skb != NULL) {
drivers/net/wireless/intel/ipw2x00/ipw2200.c
5189
rxq->pool[i].dma_addr,
drivers/net/wireless/intel/ipw2x00/ipw2200.c
5191
dev_kfree_skb(rxq->pool[i].skb);
drivers/net/wireless/intel/ipw2x00/ipw2200.c
5214
list_add_tail(&rxq->pool[i].list, &rxq->rx_used);
drivers/net/wireless/intel/ipw2x00/ipw2200.h
705
struct ipw_rx_mem_buffer pool[RX_QUEUE_SIZE + RX_FREE_BUFFERS];
drivers/net/wireless/intel/iwlegacy/3945-mac.c
1062
if (rxq->pool[i].page != NULL) {
drivers/net/wireless/intel/iwlegacy/3945-mac.c
1064
rxq->pool[i].page_dma,
drivers/net/wireless/intel/iwlegacy/3945-mac.c
1067
__il_free_pages(il, rxq->pool[i].page);
drivers/net/wireless/intel/iwlegacy/3945-mac.c
1068
rxq->pool[i].page = NULL;
drivers/net/wireless/intel/iwlegacy/3945-mac.c
1070
list_add_tail(&rxq->pool[i].list, &rxq->rx_used);
drivers/net/wireless/intel/iwlegacy/3945-mac.c
1112
if (rxq->pool[i].page != NULL) {
drivers/net/wireless/intel/iwlegacy/3945-mac.c
1114
rxq->pool[i].page_dma,
drivers/net/wireless/intel/iwlegacy/3945-mac.c
1117
__il_free_pages(il, rxq->pool[i].page);
drivers/net/wireless/intel/iwlegacy/3945-mac.c
1118
rxq->pool[i].page = NULL;
drivers/net/wireless/intel/iwlegacy/4965-mac.c
101
__il_free_pages(il, rxq->pool[i].page);
drivers/net/wireless/intel/iwlegacy/4965-mac.c
102
rxq->pool[i].page = NULL;
drivers/net/wireless/intel/iwlegacy/4965-mac.c
104
list_add_tail(&rxq->pool[i].list, &rxq->rx_used);
drivers/net/wireless/intel/iwlegacy/4965-mac.c
411
if (rxq->pool[i].page != NULL) {
drivers/net/wireless/intel/iwlegacy/4965-mac.c
413
rxq->pool[i].page_dma,
drivers/net/wireless/intel/iwlegacy/4965-mac.c
416
__il_free_pages(il, rxq->pool[i].page);
drivers/net/wireless/intel/iwlegacy/4965-mac.c
417
rxq->pool[i].page = NULL;
drivers/net/wireless/intel/iwlegacy/4965-mac.c
96
if (rxq->pool[i].page != NULL) {
drivers/net/wireless/intel/iwlegacy/4965-mac.c
98
rxq->pool[i].page_dma,
drivers/net/wireless/intel/iwlegacy/common.c
2618
list_add_tail(&rxq->pool[i].list, &rxq->rx_used);
drivers/net/wireless/intel/iwlegacy/common.h
607
struct il_rx_buf pool[RX_QUEUE_SIZE + RX_FREE_BUFFERS];
drivers/net/wireless/st/cw1200/queue.c
182
queue->pool = kzalloc_objs(struct cw1200_queue_item, capacity);
drivers/net/wireless/st/cw1200/queue.c
183
if (!queue->pool)
drivers/net/wireless/st/cw1200/queue.c
188
kfree(queue->pool);
drivers/net/wireless/st/cw1200/queue.c
189
queue->pool = NULL;
drivers/net/wireless/st/cw1200/queue.c
194
list_add_tail(&queue->pool[i].head, &queue->free_pool);
drivers/net/wireless/st/cw1200/queue.c
246
kfree(queue->pool);
drivers/net/wireless/st/cw1200/queue.c
248
queue->pool = NULL;
drivers/net/wireless/st/cw1200/queue.c
300
item - queue->pool);
drivers/net/wireless/st/cw1200/queue.c
379
item = &queue->pool[item_id];
drivers/net/wireless/st/cw1200/queue.c
423
item = &queue->pool[item_id];
drivers/net/wireless/st/cw1200/queue.c
472
item = &queue->pool[item_id];
drivers/net/wireless/st/cw1200/queue.h
32
struct cw1200_queue_item *pool;
drivers/nvme/host/apple.c
429
struct dma_pool *pool;
drivers/nvme/host/apple.c
462
pool = anv->prp_small_pool;
drivers/nvme/host/apple.c
465
pool = anv->prp_page_pool;
drivers/nvme/host/apple.c
469
prp_list = dma_pool_alloc(pool, GFP_ATOMIC, &prp_dma);
drivers/nvme/host/apple.c
482
prp_list = dma_pool_alloc(pool, GFP_ATOMIC, &prp_dma);
drivers/nvme/host/rdma.c
1237
struct list_head *pool = &queue->qp->rdma_mrs;
drivers/nvme/host/rdma.c
1243
pool = &queue->qp->sig_mrs;
drivers/nvme/host/rdma.c
1246
ib_mr_pool_put(queue->qp, pool, req->mr);
drivers/pci/p2pdma.c
125
kaddr = (void *)gen_pool_alloc_owner(p2pdma->pool, len, (void **)&ref);
drivers/pci/p2pdma.c
154
gen_pool_free(p2pdma->pool, (uintptr_t)kaddr, len);
drivers/pci/p2pdma.c
175
gen_pool_free(p2pdma->pool, (uintptr_t)kaddr, len);
drivers/pci/p2pdma.c
220
gen_pool_free_owner(p2pdma->pool, (uintptr_t)page_to_virt(page),
drivers/pci/p2pdma.c
240
if (p2pdma->pool)
drivers/pci/p2pdma.c
244
if (!p2pdma->pool)
drivers/pci/p2pdma.c
247
gen_pool_destroy(p2pdma->pool);
drivers/pci/p2pdma.c
25
struct gen_pool *pool;
drivers/pci/p2pdma.c
339
if (p2pdma->pool)
drivers/pci/p2pdma.c
343
p2pdma->pool = gen_pool_create(PAGE_SHIFT, dev_to_node(&pdev->dev));
drivers/pci/p2pdma.c
344
if (!p2pdma->pool)
drivers/pci/p2pdma.c
354
gen_pool_destroy(p2pdma->pool);
drivers/pci/p2pdma.c
355
p2pdma->pool = NULL;
drivers/pci/p2pdma.c
445
error = gen_pool_add_owner(p2pdma->pool, (unsigned long)addr,
drivers/pci/p2pdma.c
50
if (p2pdma && p2pdma->pool)
drivers/pci/p2pdma.c
51
size = gen_pool_size(p2pdma->pool);
drivers/pci/p2pdma.c
67
if (p2pdma && p2pdma->pool)
drivers/pci/p2pdma.c
68
avail = gen_pool_avail(p2pdma->pool);
drivers/pci/p2pdma.c
938
ret = (void *)gen_pool_alloc_owner(p2pdma->pool, size, (void **) &ref);
drivers/pci/p2pdma.c
943
gen_pool_free(p2pdma->pool, (unsigned long) ret, size);
drivers/pci/p2pdma.c
963
gen_pool_free_owner(p2pdma->pool, (uintptr_t)addr, size,
drivers/pci/p2pdma.c
991
return gen_pool_virt_to_phys(p2pdma->pool, (unsigned long)addr);
drivers/ptp/ptp_ines.c
157
struct list_head pool;
drivers/ptp/ptp_ines.c
215
INIT_LIST_HEAD(&port->pool);
drivers/ptp/ptp_ines.c
217
list_add(&port->pool_data[j].list, &port->pool);
drivers/ptp/ptp_ines.c
270
list_add(&ts->list, &port->pool);
drivers/ptp/ptp_ines.c
276
list_add(&ts->list, &port->pool);
drivers/ptp/ptp_ines.c
525
if (list_empty(&port->pool)) {
drivers/ptp/ptp_ines.c
542
ts = list_first_entry(&port->pool, struct ines_timestamp, list);
drivers/s390/cio/css.c
1087
static void __gp_dma_free_dma(struct gen_pool *pool,
drivers/s390/net/qeth_core_main.c
267
struct qeth_qdio_buffer_pool *pool = &card->qdio.init_pool;
drivers/s390/net/qeth_core_main.c
269
int delta = count - pool->buf_count;
drivers/s390/net/qeth_core_main.c
275
if (list_empty(&pool->entry_list))
drivers/s390/net/qeth_core_main.c
280
entry = list_first_entry(&pool->entry_list,
drivers/s390/net/qeth_core_main.c
307
list_splice(&entries, &pool->entry_list);
drivers/s390/net/qeth_core_main.c
311
pool->buf_count = count;
drivers/s390/scsi/zfcp_aux.c
208
adapter->pool.erp_req =
drivers/s390/scsi/zfcp_aux.c
210
if (!adapter->pool.erp_req)
drivers/s390/scsi/zfcp_aux.c
213
adapter->pool.gid_pn_req =
drivers/s390/scsi/zfcp_aux.c
215
if (!adapter->pool.gid_pn_req)
drivers/s390/scsi/zfcp_aux.c
218
adapter->pool.scsi_req =
drivers/s390/scsi/zfcp_aux.c
220
if (!adapter->pool.scsi_req)
drivers/s390/scsi/zfcp_aux.c
223
adapter->pool.scsi_abort =
drivers/s390/scsi/zfcp_aux.c
225
if (!adapter->pool.scsi_abort)
drivers/s390/scsi/zfcp_aux.c
228
adapter->pool.status_read_req =
drivers/s390/scsi/zfcp_aux.c
231
if (!adapter->pool.status_read_req)
drivers/s390/scsi/zfcp_aux.c
234
adapter->pool.qtcb_pool =
drivers/s390/scsi/zfcp_aux.c
236
if (!adapter->pool.qtcb_pool)
drivers/s390/scsi/zfcp_aux.c
240
adapter->pool.sr_data =
drivers/s390/scsi/zfcp_aux.c
242
if (!adapter->pool.sr_data)
drivers/s390/scsi/zfcp_aux.c
245
adapter->pool.gid_pn =
drivers/s390/scsi/zfcp_aux.c
247
if (!adapter->pool.gid_pn)
drivers/s390/scsi/zfcp_aux.c
255
mempool_destroy(adapter->pool.erp_req);
drivers/s390/scsi/zfcp_aux.c
256
mempool_destroy(adapter->pool.scsi_req);
drivers/s390/scsi/zfcp_aux.c
257
mempool_destroy(adapter->pool.scsi_abort);
drivers/s390/scsi/zfcp_aux.c
258
mempool_destroy(adapter->pool.qtcb_pool);
drivers/s390/scsi/zfcp_aux.c
259
mempool_destroy(adapter->pool.status_read_req);
drivers/s390/scsi/zfcp_aux.c
260
mempool_destroy(adapter->pool.sr_data);
drivers/s390/scsi/zfcp_aux.c
261
mempool_destroy(adapter->pool.gid_pn);
drivers/s390/scsi/zfcp_def.h
191
struct zfcp_adapter_mempool pool; /* Adapter memory pools */
drivers/s390/scsi/zfcp_def.h
337
mempool_t *pool;
drivers/s390/scsi/zfcp_erp.c
884
if (mempool_resize(act->adapter->pool.sr_data,
drivers/s390/scsi/zfcp_erp.c
888
if (mempool_resize(act->adapter->pool.status_read_req,
drivers/s390/scsi/zfcp_fc.c
406
adapter->pool.gid_pn_req,
drivers/s390/scsi/zfcp_fc.c
426
fc_req = mempool_alloc(adapter->pool.gid_pn, GFP_ATOMIC);
drivers/s390/scsi/zfcp_fc.c
440
mempool_free(fc_req, adapter->pool.gid_pn);
drivers/s390/scsi/zfcp_fsf.c
1054
qdio->adapter->pool.scsi_abort);
drivers/s390/scsi/zfcp_fsf.c
1225
struct zfcp_fsf_ct_els *ct, mempool_t *pool,
drivers/s390/scsi/zfcp_fsf.c
1237
SBAL_SFLAGS0_TYPE_WRITE_READ, pool);
drivers/s390/scsi/zfcp_fsf.c
1383
qdio->adapter->pool.erp_req);
drivers/s390/scsi/zfcp_fsf.c
1500
qdio->adapter->pool.erp_req);
drivers/s390/scsi/zfcp_fsf.c
1791
qdio->adapter->pool.erp_req);
drivers/s390/scsi/zfcp_fsf.c
1858
qdio->adapter->pool.erp_req);
drivers/s390/scsi/zfcp_fsf.c
1933
qdio->adapter->pool.erp_req);
drivers/s390/scsi/zfcp_fsf.c
1992
qdio->adapter->pool.erp_req);
drivers/s390/scsi/zfcp_fsf.c
2085
qdio->adapter->pool.erp_req);
drivers/s390/scsi/zfcp_fsf.c
2205
adapter->pool.erp_req);
drivers/s390/scsi/zfcp_fsf.c
2297
qdio->adapter->pool.erp_req);
drivers/s390/scsi/zfcp_fsf.c
2585
sbtype, adapter->pool.scsi_req);
drivers/s390/scsi/zfcp_fsf.c
264
mempool_free(virt_to_page(sr_buf), adapter->pool.sr_data);
drivers/s390/scsi/zfcp_fsf.c
2693
qdio->adapter->pool.scsi_req);
drivers/s390/scsi/zfcp_fsf.c
323
mempool_free(virt_to_page(sr_buf), adapter->pool.sr_data);
drivers/s390/scsi/zfcp_fsf.c
802
static struct zfcp_fsf_req *zfcp_fsf_alloc(mempool_t *pool)
drivers/s390/scsi/zfcp_fsf.c
806
if (likely(pool))
drivers/s390/scsi/zfcp_fsf.c
807
req = mempool_alloc(pool, GFP_ATOMIC);
drivers/s390/scsi/zfcp_fsf.c
815
req->pool = pool;
drivers/s390/scsi/zfcp_fsf.c
819
static struct fsf_qtcb *zfcp_fsf_qtcb_alloc(mempool_t *pool)
drivers/s390/scsi/zfcp_fsf.c
823
if (likely(pool))
drivers/s390/scsi/zfcp_fsf.c
824
qtcb = mempool_alloc(pool, GFP_ATOMIC);
drivers/s390/scsi/zfcp_fsf.c
837
mempool_t *pool)
drivers/s390/scsi/zfcp_fsf.c
840
struct zfcp_fsf_req *req = zfcp_fsf_alloc(pool);
drivers/s390/scsi/zfcp_fsf.c
855
if (likely(pool))
drivers/s390/scsi/zfcp_fsf.c
857
adapter->pool.qtcb_pool);
drivers/s390/scsi/zfcp_fsf.c
93
if (likely(req->pool)) {
drivers/s390/scsi/zfcp_fsf.c
942
adapter->pool.status_read_req);
drivers/s390/scsi/zfcp_fsf.c
948
page = mempool_alloc(adapter->pool.sr_data, GFP_ATOMIC);
drivers/s390/scsi/zfcp_fsf.c
95
mempool_free(req->qtcb, req->adapter->pool.qtcb_pool);
drivers/s390/scsi/zfcp_fsf.c
96
mempool_free(req, req->pool);
drivers/s390/scsi/zfcp_fsf.c
969
mempool_free(virt_to_page(sr_buf), adapter->pool.sr_data);
drivers/scsi/fnic/fnic_main.c
706
mempool_t *pool;
drivers/scsi/fnic/fnic_main.c
902
pool = mempool_create_slab_pool(2, fnic_sgl_cache[FNIC_SGL_CACHE_DFLT]);
drivers/scsi/fnic/fnic_main.c
903
if (!pool) {
drivers/scsi/fnic/fnic_main.c
907
fnic->io_sgl_pool[FNIC_SGL_CACHE_DFLT] = pool;
drivers/scsi/fnic/fnic_main.c
909
pool = mempool_create_slab_pool(2, fnic_sgl_cache[FNIC_SGL_CACHE_MAX]);
drivers/scsi/fnic/fnic_main.c
910
if (!pool) {
drivers/scsi/fnic/fnic_main.c
914
fnic->io_sgl_pool[FNIC_SGL_CACHE_MAX] = pool;
drivers/scsi/fnic/fnic_main.c
916
pool = mempool_create_slab_pool(FDLS_MIN_FRAMES, fdls_frame_cache);
drivers/scsi/fnic/fnic_main.c
917
if (!pool) {
drivers/scsi/fnic/fnic_main.c
921
fnic->frame_pool = pool;
drivers/scsi/fnic/fnic_main.c
923
pool = mempool_create_slab_pool(FDLS_MIN_FRAME_ELEM,
drivers/scsi/fnic/fnic_main.c
925
if (!pool) {
drivers/scsi/fnic/fnic_main.c
929
fnic->frame_elem_pool = pool;
drivers/scsi/ibmvscsi/ibmvfc.c
1016
static int ibmvfc_valid_event(struct ibmvfc_event_pool *pool,
drivers/scsi/ibmvscsi/ibmvfc.c
1019
int index = evt - pool->events;
drivers/scsi/ibmvscsi/ibmvfc.c
1020
if (index < 0 || index >= pool->size) /* outside of bounds */
drivers/scsi/ibmvscsi/ibmvfc.c
1022
if (evt != pool->events + index) /* unaligned */
drivers/scsi/ibmvscsi/ibmvfc.c
1034
struct ibmvfc_event_pool *pool = &evt->queue->evt_pool;
drivers/scsi/ibmvscsi/ibmvfc.c
1037
BUG_ON(!ibmvfc_valid_event(pool, evt));
drivers/scsi/ibmvscsi/ibmvfc.c
793
struct ibmvfc_event_pool *pool = &queue->evt_pool;
drivers/scsi/ibmvscsi/ibmvfc.c
799
pool->size = queue->total_depth;
drivers/scsi/ibmvscsi/ibmvfc.c
800
pool->events = kzalloc_objs(*pool->events, pool->size);
drivers/scsi/ibmvscsi/ibmvfc.c
801
if (!pool->events)
drivers/scsi/ibmvscsi/ibmvfc.c
804
pool->iu_storage = dma_alloc_coherent(vhost->dev,
drivers/scsi/ibmvscsi/ibmvfc.c
805
pool->size * sizeof(*pool->iu_storage),
drivers/scsi/ibmvscsi/ibmvfc.c
806
&pool->iu_token, 0);
drivers/scsi/ibmvscsi/ibmvfc.c
808
if (!pool->iu_storage) {
drivers/scsi/ibmvscsi/ibmvfc.c
809
kfree(pool->events);
drivers/scsi/ibmvscsi/ibmvfc.c
819
for (i = 0; i < pool->size; ++i) {
drivers/scsi/ibmvscsi/ibmvfc.c
820
struct ibmvfc_event *evt = &pool->events[i];
drivers/scsi/ibmvscsi/ibmvfc.c
831
evt->crq.ioba = cpu_to_be64(pool->iu_token + (sizeof(*evt->xfer_iu) * i));
drivers/scsi/ibmvscsi/ibmvfc.c
832
evt->xfer_iu = pool->iu_storage + i;
drivers/scsi/ibmvscsi/ibmvfc.c
853
struct ibmvfc_event_pool *pool = &queue->evt_pool;
drivers/scsi/ibmvscsi/ibmvfc.c
856
for (i = 0; i < pool->size; ++i) {
drivers/scsi/ibmvscsi/ibmvfc.c
857
list_del(&pool->events[i].queue_list);
drivers/scsi/ibmvscsi/ibmvfc.c
858
BUG_ON(atomic_read(&pool->events[i].free) != 1);
drivers/scsi/ibmvscsi/ibmvfc.c
859
if (pool->events[i].ext_list)
drivers/scsi/ibmvscsi/ibmvfc.c
861
pool->events[i].ext_list,
drivers/scsi/ibmvscsi/ibmvfc.c
862
pool->events[i].ext_list_token);
drivers/scsi/ibmvscsi/ibmvfc.c
865
kfree(pool->events);
drivers/scsi/ibmvscsi/ibmvfc.c
867
pool->size * sizeof(*pool->iu_storage),
drivers/scsi/ibmvscsi/ibmvfc.c
868
pool->iu_storage, pool->iu_token);
drivers/scsi/ibmvscsi/ibmvscsi.c
1055
evt_struct = get_event_struct(&hostdata->pool);
drivers/scsi/ibmvscsi/ibmvscsi.c
1071
free_event_struct(&hostdata->pool, evt_struct);
drivers/scsi/ibmvscsi/ibmvscsi.c
1205
struct srp_event_struct *evt_struct = get_event_struct(&hostdata->pool);
drivers/scsi/ibmvscsi/ibmvscsi.c
1274
evt_struct = get_event_struct(&hostdata->pool);
drivers/scsi/ibmvscsi/ibmvscsi.c
1366
evt_struct = get_event_struct(&hostdata->pool);
drivers/scsi/ibmvscsi/ibmvscsi.c
1442
evt_struct = get_event_struct(&hostdata->pool);
drivers/scsi/ibmvscsi/ibmvscsi.c
1521
evt = get_event_struct(&hostdata->pool);
drivers/scsi/ibmvscsi/ibmvscsi.c
1618
free_event_struct(&found_evt->hostdata->pool, found_evt);
drivers/scsi/ibmvscsi/ibmvscsi.c
1644
evt = get_event_struct(&hostdata->pool);
drivers/scsi/ibmvscsi/ibmvscsi.c
1723
free_event_struct(&tmp_evt->hostdata->pool,
drivers/scsi/ibmvscsi/ibmvscsi.c
1829
if (!valid_event_struct(&hostdata->pool, evt_struct)) {
drivers/scsi/ibmvscsi/ibmvscsi.c
1860
free_event_struct(&evt_struct->hostdata->pool, evt_struct);
drivers/scsi/ibmvscsi/ibmvscsi.c
2273
if (initialize_event_pool(&hostdata->pool, max_events, hostdata) != 0) {
drivers/scsi/ibmvscsi/ibmvscsi.c
2331
release_event_pool(&hostdata->pool, hostdata);
drivers/scsi/ibmvscsi/ibmvscsi.c
2352
release_event_pool(&hostdata->pool, hostdata);
drivers/scsi/ibmvscsi/ibmvscsi.c
443
static int initialize_event_pool(struct event_pool *pool,
drivers/scsi/ibmvscsi/ibmvscsi.c
448
pool->size = size;
drivers/scsi/ibmvscsi/ibmvscsi.c
449
pool->next = 0;
drivers/scsi/ibmvscsi/ibmvscsi.c
450
pool->events = kzalloc_objs(*pool->events, pool->size);
drivers/scsi/ibmvscsi/ibmvscsi.c
451
if (!pool->events)
drivers/scsi/ibmvscsi/ibmvscsi.c
454
pool->iu_storage =
drivers/scsi/ibmvscsi/ibmvscsi.c
456
pool->size * sizeof(*pool->iu_storage),
drivers/scsi/ibmvscsi/ibmvscsi.c
457
&pool->iu_token, GFP_KERNEL);
drivers/scsi/ibmvscsi/ibmvscsi.c
458
if (!pool->iu_storage) {
drivers/scsi/ibmvscsi/ibmvscsi.c
459
kfree(pool->events);
drivers/scsi/ibmvscsi/ibmvscsi.c
463
for (i = 0; i < pool->size; ++i) {
drivers/scsi/ibmvscsi/ibmvscsi.c
464
struct srp_event_struct *evt = &pool->events[i];
drivers/scsi/ibmvscsi/ibmvscsi.c
469
evt->crq.IU_data_ptr = cpu_to_be64(pool->iu_token +
drivers/scsi/ibmvscsi/ibmvscsi.c
471
evt->xfer_iu = pool->iu_storage + i;
drivers/scsi/ibmvscsi/ibmvscsi.c
487
static void release_event_pool(struct event_pool *pool,
drivers/scsi/ibmvscsi/ibmvscsi.c
491
for (i = 0; i < pool->size; ++i) {
drivers/scsi/ibmvscsi/ibmvscsi.c
492
if (atomic_read(&pool->events[i].free) != 1)
drivers/scsi/ibmvscsi/ibmvscsi.c
494
if (pool->events[i].ext_list) {
drivers/scsi/ibmvscsi/ibmvscsi.c
497
pool->events[i].ext_list,
drivers/scsi/ibmvscsi/ibmvscsi.c
498
pool->events[i].ext_list_token);
drivers/scsi/ibmvscsi/ibmvscsi.c
504
kfree(pool->events);
drivers/scsi/ibmvscsi/ibmvscsi.c
506
pool->size * sizeof(*pool->iu_storage),
drivers/scsi/ibmvscsi/ibmvscsi.c
507
pool->iu_storage, pool->iu_token);
drivers/scsi/ibmvscsi/ibmvscsi.c
517
static int valid_event_struct(struct event_pool *pool,
drivers/scsi/ibmvscsi/ibmvscsi.c
520
int index = evt - pool->events;
drivers/scsi/ibmvscsi/ibmvscsi.c
521
if (index < 0 || index >= pool->size) /* outside of bounds */
drivers/scsi/ibmvscsi/ibmvscsi.c
523
if (evt != pool->events + index) /* unaligned */
drivers/scsi/ibmvscsi/ibmvscsi.c
533
static void free_event_struct(struct event_pool *pool,
drivers/scsi/ibmvscsi/ibmvscsi.c
536
if (!valid_event_struct(pool, evt)) {
drivers/scsi/ibmvscsi/ibmvscsi.c
538
"(not in pool %p)\n", evt, pool->events);
drivers/scsi/ibmvscsi/ibmvscsi.c
556
static struct srp_event_struct *get_event_struct(struct event_pool *pool)
drivers/scsi/ibmvscsi/ibmvscsi.c
559
int poolsize = pool->size;
drivers/scsi/ibmvscsi/ibmvscsi.c
560
int offset = pool->next;
drivers/scsi/ibmvscsi/ibmvscsi.c
564
if (!atomic_dec_if_positive(&pool->events[offset].free)) {
drivers/scsi/ibmvscsi/ibmvscsi.c
565
pool->next = offset;
drivers/scsi/ibmvscsi/ibmvscsi.c
566
return &pool->events[offset];
drivers/scsi/ibmvscsi/ibmvscsi.c
804
free_event_struct(&evt->hostdata->pool, evt);
drivers/scsi/ibmvscsi/ibmvscsi.c
972
free_event_struct(&hostdata->pool, evt_struct);
drivers/scsi/ibmvscsi/ibmvscsi.c
986
free_event_struct(&hostdata->pool, evt_struct);
drivers/scsi/ibmvscsi/ibmvscsi.h
91
struct event_pool pool;
drivers/scsi/ibmvscsi_tgt/libsrp.c
30
q->pool = kzalloc_objs(struct iu_entry *, max);
drivers/scsi/ibmvscsi_tgt/libsrp.c
31
if (!q->pool)
drivers/scsi/ibmvscsi_tgt/libsrp.c
38
kfifo_init(&q->queue, (void *)q->pool, max * sizeof(void *));
drivers/scsi/ibmvscsi_tgt/libsrp.c
48
kfree(q->pool);
drivers/scsi/ibmvscsi_tgt/libsrp.c
55
kfree(q->pool);
drivers/scsi/ibmvscsi_tgt/libsrp.h
77
void *pool;
drivers/scsi/libfc/fc_exch.c
1935
struct fc_exch_pool *pool,
drivers/scsi/libfc/fc_exch.c
1941
spin_lock_bh(&pool->lock);
drivers/scsi/libfc/fc_exch.c
1943
list_for_each_entry_safe(ep, next, &pool->ex_list, ex_list) {
drivers/scsi/libfc/fc_exch.c
1948
spin_unlock_bh(&pool->lock);
drivers/scsi/libfc/fc_exch.c
1953
spin_lock_bh(&pool->lock);
drivers/scsi/libfc/fc_exch.c
1962
pool->next_index = 0;
drivers/scsi/libfc/fc_exch.c
1963
pool->left = FC_XID_UNKNOWN;
drivers/scsi/libfc/fc_exch.c
1964
pool->right = FC_XID_UNKNOWN;
drivers/scsi/libfc/fc_exch.c
1965
spin_unlock_bh(&pool->lock);
drivers/scsi/libfc/fc_exch.c
1987
per_cpu_ptr(ema->mp->pool, cpu),
drivers/scsi/libfc/fc_exch.c
2416
free_percpu(mp->pool);
drivers/scsi/libfc/fc_exch.c
2471
struct fc_exch_pool *pool;
drivers/scsi/libfc/fc_exch.c
2493
pool_exch_range = (PCPU_MIN_UNIT_SIZE - sizeof(*pool)) /
drivers/scsi/libfc/fc_exch.c
2518
pool_size = sizeof(*pool) + pool_exch_range * sizeof(struct fc_exch *);
drivers/scsi/libfc/fc_exch.c
2519
mp->pool = __alloc_percpu(pool_size, __alignof__(struct fc_exch_pool));
drivers/scsi/libfc/fc_exch.c
2520
if (!mp->pool)
drivers/scsi/libfc/fc_exch.c
2523
pool = per_cpu_ptr(mp->pool, cpu);
drivers/scsi/libfc/fc_exch.c
2524
pool->next_index = 0;
drivers/scsi/libfc/fc_exch.c
2525
pool->left = FC_XID_UNKNOWN;
drivers/scsi/libfc/fc_exch.c
2526
pool->right = FC_XID_UNKNOWN;
drivers/scsi/libfc/fc_exch.c
2527
spin_lock_init(&pool->lock);
drivers/scsi/libfc/fc_exch.c
2528
INIT_LIST_HEAD(&pool->ex_list);
drivers/scsi/libfc/fc_exch.c
2533
free_percpu(mp->pool);
drivers/scsi/libfc/fc_exch.c
412
static inline struct fc_exch *fc_exch_ptr_get(struct fc_exch_pool *pool,
drivers/scsi/libfc/fc_exch.c
415
struct fc_exch **exches = (struct fc_exch **)(pool + 1);
drivers/scsi/libfc/fc_exch.c
425
static inline void fc_exch_ptr_set(struct fc_exch_pool *pool, u16 index,
drivers/scsi/libfc/fc_exch.c
428
((struct fc_exch **)(pool + 1))[index] = ep;
drivers/scsi/libfc/fc_exch.c
437
struct fc_exch_pool *pool;
drivers/scsi/libfc/fc_exch.c
440
pool = ep->pool;
drivers/scsi/libfc/fc_exch.c
441
spin_lock_bh(&pool->lock);
drivers/scsi/libfc/fc_exch.c
442
WARN_ON(pool->total_exches <= 0);
drivers/scsi/libfc/fc_exch.c
443
pool->total_exches--;
drivers/scsi/libfc/fc_exch.c
448
if (pool->left == FC_XID_UNKNOWN)
drivers/scsi/libfc/fc_exch.c
449
pool->left = index;
drivers/scsi/libfc/fc_exch.c
450
else if (pool->right == FC_XID_UNKNOWN)
drivers/scsi/libfc/fc_exch.c
451
pool->right = index;
drivers/scsi/libfc/fc_exch.c
453
pool->next_index = index;
drivers/scsi/libfc/fc_exch.c
454
fc_exch_ptr_set(pool, index, NULL);
drivers/scsi/libfc/fc_exch.c
456
fc_exch_ptr_set(pool, index, &fc_quarantine_exch);
drivers/scsi/libfc/fc_exch.c
459
spin_unlock_bh(&pool->lock);
drivers/scsi/libfc/fc_exch.c
818
struct fc_exch_pool *pool;
drivers/scsi/libfc/fc_exch.c
829
pool = per_cpu_ptr(mp->pool, cpu);
drivers/scsi/libfc/fc_exch.c
830
spin_lock_bh(&pool->lock);
drivers/scsi/libfc/fc_exch.c
833
if (pool->left != FC_XID_UNKNOWN) {
drivers/scsi/libfc/fc_exch.c
834
if (!WARN_ON(fc_exch_ptr_get(pool, pool->left))) {
drivers/scsi/libfc/fc_exch.c
835
index = pool->left;
drivers/scsi/libfc/fc_exch.c
836
pool->left = FC_XID_UNKNOWN;
drivers/scsi/libfc/fc_exch.c
84
struct fc_exch_pool __percpu *pool;
drivers/scsi/libfc/fc_exch.c
840
if (pool->right != FC_XID_UNKNOWN) {
drivers/scsi/libfc/fc_exch.c
841
if (!WARN_ON(fc_exch_ptr_get(pool, pool->right))) {
drivers/scsi/libfc/fc_exch.c
842
index = pool->right;
drivers/scsi/libfc/fc_exch.c
843
pool->right = FC_XID_UNKNOWN;
drivers/scsi/libfc/fc_exch.c
848
index = pool->next_index;
drivers/scsi/libfc/fc_exch.c
850
while (fc_exch_ptr_get(pool, index)) {
drivers/scsi/libfc/fc_exch.c
852
if (index == pool->next_index)
drivers/scsi/libfc/fc_exch.c
855
pool->next_index = index == mp->pool_max_index ? 0 : index + 1;
drivers/scsi/libfc/fc_exch.c
866
fc_exch_ptr_set(pool, index, ep);
drivers/scsi/libfc/fc_exch.c
867
list_add_tail(&ep->ex_list, &pool->ex_list);
drivers/scsi/libfc/fc_exch.c
869
pool->total_exches++;
drivers/scsi/libfc/fc_exch.c
870
spin_unlock_bh(&pool->lock);
drivers/scsi/libfc/fc_exch.c
877
ep->pool = pool;
drivers/scsi/libfc/fc_exch.c
888
spin_unlock_bh(&pool->lock);
drivers/scsi/libfc/fc_exch.c
929
struct fc_exch_pool *pool;
drivers/scsi/libfc/fc_exch.c
943
pool = per_cpu_ptr(mp->pool, cpu);
drivers/scsi/libfc/fc_exch.c
944
spin_lock_bh(&pool->lock);
drivers/scsi/libfc/fc_exch.c
945
ep = fc_exch_ptr_get(pool, (xid - mp->min_xid) >> fc_cpu_order);
drivers/scsi/libfc/fc_exch.c
954
spin_unlock_bh(&pool->lock);
drivers/scsi/libiscsi.c
2783
q->pool = kvcalloc(num_arrays * max, sizeof(void *), GFP_KERNEL);
drivers/scsi/libiscsi.c
2784
if (q->pool == NULL)
drivers/scsi/libiscsi.c
2787
kfifo_init(&q->queue, (void*)q->pool, max * sizeof(void*));
drivers/scsi/libiscsi.c
2790
q->pool[i] = kzalloc(item_size, GFP_KERNEL);
drivers/scsi/libiscsi.c
2791
if (q->pool[i] == NULL) {
drivers/scsi/libiscsi.c
2795
kfifo_in(&q->queue, (void*)&q->pool[i], sizeof(void*));
drivers/scsi/libiscsi.c
2799
*items = q->pool + max;
drivers/scsi/libiscsi.c
2800
memcpy(*items, q->pool, max * sizeof(void *));
drivers/scsi/libiscsi.c
2816
kfree(q->pool[i]);
drivers/scsi/libiscsi.c
2817
kvfree(q->pool);
drivers/scsi/lpfc/lpfc_mem.c
111
struct lpfc_dma_pool *pool = &phba->lpfc_mbuf_safety_pool;
drivers/scsi/lpfc/lpfc_mem.c
121
pool->elements = kmalloc_objs(struct lpfc_dmabuf, LPFC_MBUF_POOL_SIZE);
drivers/scsi/lpfc/lpfc_mem.c
122
if (!pool->elements)
drivers/scsi/lpfc/lpfc_mem.c
125
pool->max_count = 0;
drivers/scsi/lpfc/lpfc_mem.c
126
pool->current_count = 0;
drivers/scsi/lpfc/lpfc_mem.c
128
pool->elements[i].virt = dma_pool_alloc(phba->lpfc_mbuf_pool,
drivers/scsi/lpfc/lpfc_mem.c
129
GFP_KERNEL, &pool->elements[i].phys);
drivers/scsi/lpfc/lpfc_mem.c
130
if (!pool->elements[i].virt)
drivers/scsi/lpfc/lpfc_mem.c
132
pool->max_count++;
drivers/scsi/lpfc/lpfc_mem.c
133
pool->current_count++;
drivers/scsi/lpfc/lpfc_mem.c
201
dma_pool_free(phba->lpfc_mbuf_pool, pool->elements[i].virt,
drivers/scsi/lpfc/lpfc_mem.c
202
pool->elements[i].phys);
drivers/scsi/lpfc/lpfc_mem.c
203
kfree(pool->elements);
drivers/scsi/lpfc/lpfc_mem.c
239
struct lpfc_dma_pool *pool = &phba->lpfc_mbuf_safety_pool;
drivers/scsi/lpfc/lpfc_mem.c
272
for (i = 0; i < pool->current_count; i++)
drivers/scsi/lpfc/lpfc_mem.c
273
dma_pool_free(phba->lpfc_mbuf_pool, pool->elements[i].virt,
drivers/scsi/lpfc/lpfc_mem.c
274
pool->elements[i].phys);
drivers/scsi/lpfc/lpfc_mem.c
275
kfree(pool->elements);
drivers/scsi/lpfc/lpfc_mem.c
387
struct lpfc_dma_pool *pool = &phba->lpfc_mbuf_safety_pool;
drivers/scsi/lpfc/lpfc_mem.c
394
if (!ret && (mem_flags & MEM_PRI) && pool->current_count) {
drivers/scsi/lpfc/lpfc_mem.c
395
pool->current_count--;
drivers/scsi/lpfc/lpfc_mem.c
396
ret = pool->elements[pool->current_count].virt;
drivers/scsi/lpfc/lpfc_mem.c
397
*handle = pool->elements[pool->current_count].phys;
drivers/scsi/lpfc/lpfc_mem.c
420
struct lpfc_dma_pool *pool = &phba->lpfc_mbuf_safety_pool;
drivers/scsi/lpfc/lpfc_mem.c
422
if (pool->current_count < pool->max_count) {
drivers/scsi/lpfc/lpfc_mem.c
423
pool->elements[pool->current_count].virt = virt;
drivers/scsi/lpfc/lpfc_mem.c
424
pool->elements[pool->current_count].phys = dma;
drivers/scsi/lpfc/lpfc_mem.c
425
pool->current_count++;
drivers/scsi/megaraid/megaraid_mm.c
1066
mm_dmapool_t *pool;
drivers/scsi/megaraid/megaraid_mm.c
1077
pool = &adp->dma_pool_list[i];
drivers/scsi/megaraid/megaraid_mm.c
1079
pool->buf_size = bufsize;
drivers/scsi/megaraid/megaraid_mm.c
1080
spin_lock_init(&pool->lock);
drivers/scsi/megaraid/megaraid_mm.c
1082
pool->handle = dma_pool_create("megaraid mm data buffer",
drivers/scsi/megaraid/megaraid_mm.c
1086
if (!pool->handle) {
drivers/scsi/megaraid/megaraid_mm.c
1090
pool->vaddr = dma_pool_alloc(pool->handle, GFP_KERNEL,
drivers/scsi/megaraid/megaraid_mm.c
1091
&pool->paddr);
drivers/scsi/megaraid/megaraid_mm.c
1093
if (!pool->vaddr)
drivers/scsi/megaraid/megaraid_mm.c
1182
mm_dmapool_t *pool;
drivers/scsi/megaraid/megaraid_mm.c
1186
pool = &adp->dma_pool_list[i];
drivers/scsi/megaraid/megaraid_mm.c
1188
if (pool->handle) {
drivers/scsi/megaraid/megaraid_mm.c
1190
if (pool->vaddr)
drivers/scsi/megaraid/megaraid_mm.c
1191
dma_pool_free(pool->handle, pool->vaddr,
drivers/scsi/megaraid/megaraid_mm.c
1192
pool->paddr);
drivers/scsi/megaraid/megaraid_mm.c
1194
dma_pool_destroy(pool->handle);
drivers/scsi/megaraid/megaraid_mm.c
1195
pool->handle = NULL;
drivers/scsi/megaraid/megaraid_mm.c
510
mm_dmapool_t *pool;
drivers/scsi/megaraid/megaraid_mm.c
527
pool = &adp->dma_pool_list[i];
drivers/scsi/megaraid/megaraid_mm.c
529
if (xferlen > pool->buf_size)
drivers/scsi/megaraid/megaraid_mm.c
535
spin_lock_irqsave(&pool->lock, flags);
drivers/scsi/megaraid/megaraid_mm.c
537
if (!pool->in_use) {
drivers/scsi/megaraid/megaraid_mm.c
539
pool->in_use = 1;
drivers/scsi/megaraid/megaraid_mm.c
541
kioc->buf_vaddr = pool->vaddr;
drivers/scsi/megaraid/megaraid_mm.c
542
kioc->buf_paddr = pool->paddr;
drivers/scsi/megaraid/megaraid_mm.c
544
spin_unlock_irqrestore(&pool->lock, flags);
drivers/scsi/megaraid/megaraid_mm.c
548
spin_unlock_irqrestore(&pool->lock, flags);
drivers/scsi/megaraid/megaraid_mm.c
563
pool = &adp->dma_pool_list[right_pool];
drivers/scsi/megaraid/megaraid_mm.c
565
spin_lock_irqsave(&pool->lock, flags);
drivers/scsi/megaraid/megaraid_mm.c
569
kioc->buf_vaddr = dma_pool_alloc(pool->handle, GFP_ATOMIC,
drivers/scsi/megaraid/megaraid_mm.c
571
spin_unlock_irqrestore(&pool->lock, flags);
drivers/scsi/megaraid/megaraid_mm.c
636
mm_dmapool_t *pool;
drivers/scsi/megaraid/megaraid_mm.c
640
pool = &adp->dma_pool_list[kioc->pool_index];
drivers/scsi/megaraid/megaraid_mm.c
643
spin_lock_irqsave(&pool->lock, flags);
drivers/scsi/megaraid/megaraid_mm.c
653
dma_pool_free(pool->handle, kioc->buf_vaddr,
drivers/scsi/megaraid/megaraid_mm.c
656
pool->in_use = 0;
drivers/scsi/megaraid/megaraid_mm.c
658
spin_unlock_irqrestore(&pool->lock, flags);
drivers/scsi/mvumi.c
1767
struct mvumi_ob_data *pool;
drivers/scsi/mvumi.c
1770
pool = list_first_entry(&mhba->free_ob_list,
drivers/scsi/mvumi.c
1772
list_del_init(&pool->list);
drivers/scsi/mvumi.c
1773
list_add_tail(&pool->list, &mhba->ob_data_list);
drivers/scsi/mvumi.c
1775
ob_frame = (struct mvumi_rsp_frame *) &pool->data[0];
drivers/scsi/qla2xxx/qla_attr.c
2359
ha->dif_bundle_dma_allocs, ha->pool.unusable.count);
drivers/scsi/qla2xxx/qla_def.h
4840
} pool;
drivers/scsi/qla2xxx/qla_os.c
4244
INIT_LIST_HEAD(&ha->pool.good.head);
drivers/scsi/qla2xxx/qla_os.c
4245
INIT_LIST_HEAD(&ha->pool.unusable.head);
drivers/scsi/qla2xxx/qla_os.c
4246
ha->pool.good.count = 0;
drivers/scsi/qla2xxx/qla_os.c
4247
ha->pool.unusable.count = 0;
drivers/scsi/qla2xxx/qla_os.c
4279
&ha->pool.unusable.head);
drivers/scsi/qla2xxx/qla_os.c
4280
ha->pool.unusable.count++;
drivers/scsi/qla2xxx/qla_os.c
4283
&ha->pool.good.head);
drivers/scsi/qla2xxx/qla_os.c
4284
ha->pool.good.count++;
drivers/scsi/qla2xxx/qla_os.c
4290
&ha->pool.good.head, list) {
drivers/scsi/qla2xxx/qla_os.c
4301
__func__, ha->pool.good.count,
drivers/scsi/qla2xxx/qla_os.c
4302
ha->pool.unusable.count);
drivers/scsi/qla2xxx/qla_os.c
4539
list_for_each_entry_safe(dsd, nxt, &ha->pool.unusable.head,
drivers/scsi/qla2xxx/qla_os.c
4547
ha->pool.unusable.count--;
drivers/scsi/qla2xxx/qla_os.c
4978
list_for_each_entry_safe(dsd, nxt, &ha->pool.unusable.head,
drivers/scsi/qla2xxx/qla_os.c
4986
ha->pool.unusable.count--;
drivers/scsi/qla2xxx/qla_os.c
4988
list_for_each_entry_safe(dsd, nxt, &ha->pool.good.head, list) {
drivers/scsi/snic/snic_main.c
349
mempool_t *pool;
drivers/scsi/snic/snic_main.c
548
pool = mempool_create_slab_pool(2,
drivers/scsi/snic/snic_main.c
550
if (!pool) {
drivers/scsi/snic/snic_main.c
557
snic->req_pool[SNIC_REQ_CACHE_DFLT_SGL] = pool;
drivers/scsi/snic/snic_main.c
559
pool = mempool_create_slab_pool(2,
drivers/scsi/snic/snic_main.c
561
if (!pool) {
drivers/scsi/snic/snic_main.c
568
snic->req_pool[SNIC_REQ_CACHE_MAX_SGL] = pool;
drivers/scsi/snic/snic_main.c
570
pool = mempool_create_slab_pool(2,
drivers/scsi/snic/snic_main.c
572
if (!pool) {
drivers/scsi/snic/snic_main.c
579
snic->req_pool[SNIC_REQ_TM_CACHE] = pool;
drivers/soc/fsl/qbman/bman.c
697
struct bman_pool *pool = NULL;
drivers/soc/fsl/qbman/bman.c
703
pool = kmalloc_obj(*pool);
drivers/soc/fsl/qbman/bman.c
704
if (!pool)
drivers/soc/fsl/qbman/bman.c
707
pool->bpid = bpid;
drivers/soc/fsl/qbman/bman.c
709
return pool;
drivers/soc/fsl/qbman/bman.c
716
void bman_free_pool(struct bman_pool *pool)
drivers/soc/fsl/qbman/bman.c
718
bm_release_bpid(pool->bpid);
drivers/soc/fsl/qbman/bman.c
720
kfree(pool);
drivers/soc/fsl/qbman/bman.c
724
int bman_get_bpid(const struct bman_pool *pool)
drivers/soc/fsl/qbman/bman.c
726
return pool->bpid;
drivers/soc/fsl/qbman/bman.c
738
int bman_release(struct bman_pool *pool, const struct bm_buffer *bufs, u8 num)
drivers/soc/fsl/qbman/bman.c
773
bm_buffer_set_bpid(r->bufs, pool->bpid);
drivers/soc/fsl/qbman/bman.c
786
int bman_acquire(struct bman_pool *pool, struct bm_buffer *bufs, u8 num)
drivers/soc/fsl/qbman/bman.c
796
mcc->bpid = pool->bpid;
drivers/soc/fsl/qbman/bman_test_api.c
105
pool = bman_new_pool();
drivers/soc/fsl/qbman/bman_test_api.c
106
if (!pool) {
drivers/soc/fsl/qbman/bman_test_api.c
119
if (bman_release(pool, bufs_in + i, num)) {
drivers/soc/fsl/qbman/bman_test_api.c
132
tmp = bman_acquire(pool, bufs_out + i - num, num);
drivers/soc/fsl/qbman/bman_test_api.c
136
i = bman_acquire(pool, NULL, 1);
drivers/soc/fsl/qbman/bman_test_api.c
145
bman_free_pool(pool);
drivers/soc/fsl/qbman/bman_test_api.c
37
static struct bman_pool *pool;
drivers/soc/ti/knav_qmss.h
362
#define for_each_pool(kdev, pool) \
drivers/soc/ti/knav_qmss.h
363
list_for_each_entry(pool, &kdev->pools, list)
drivers/soc/ti/knav_qmss_queue.c
1027
pool = devm_kzalloc(kdev->dev, sizeof(*pool), GFP_KERNEL);
drivers/soc/ti/knav_qmss_queue.c
1028
if (!pool) {
drivers/soc/ti/knav_qmss_queue.c
1032
pool->num_desc = 0;
drivers/soc/ti/knav_qmss_queue.c
1033
pool->region_offset = region->num_desc;
drivers/soc/ti/knav_qmss_queue.c
1034
list_add(&pool->region_inst, ®ion->pools);
drivers/soc/ti/knav_qmss_queue.c
1350
struct knav_pool *pool, *tmp;
drivers/soc/ti/knav_qmss_queue.c
1357
list_for_each_entry_safe(pool, tmp, ®ion->pools, region_inst)
drivers/soc/ti/knav_qmss_queue.c
1358
knav_pool_destroy(pool);
drivers/soc/ti/knav_qmss_queue.c
686
static void kdesc_fill_pool(struct knav_pool *pool)
drivers/soc/ti/knav_qmss_queue.c
691
region = pool->region;
drivers/soc/ti/knav_qmss_queue.c
692
pool->desc_size = region->desc_size;
drivers/soc/ti/knav_qmss_queue.c
693
for (i = 0; i < pool->num_desc; i++) {
drivers/soc/ti/knav_qmss_queue.c
694
int index = pool->region_offset + i;
drivers/soc/ti/knav_qmss_queue.c
698
dma_size = ALIGN(pool->desc_size, SMP_CACHE_BYTES);
drivers/soc/ti/knav_qmss_queue.c
699
dma_sync_single_for_device(pool->dev, dma_addr, dma_size,
drivers/soc/ti/knav_qmss_queue.c
701
knav_queue_push(pool->queue, dma_addr, dma_size, 0);
drivers/soc/ti/knav_qmss_queue.c
706
static void kdesc_empty_pool(struct knav_pool *pool)
drivers/soc/ti/knav_qmss_queue.c
713
if (!pool->queue)
drivers/soc/ti/knav_qmss_queue.c
717
dma = knav_queue_pop(pool->queue, &size);
drivers/soc/ti/knav_qmss_queue.c
720
desc = knav_pool_desc_dma_to_virt(pool, dma);
drivers/soc/ti/knav_qmss_queue.c
722
dev_dbg(pool->kdev->dev,
drivers/soc/ti/knav_qmss_queue.c
726
WARN_ON(i != pool->num_desc);
drivers/soc/ti/knav_qmss_queue.c
727
knav_queue_close(pool->queue);
drivers/soc/ti/knav_qmss_queue.c
734
struct knav_pool *pool = ph;
drivers/soc/ti/knav_qmss_queue.c
735
return pool->region->dma_start + (virt - pool->region->virt_start);
drivers/soc/ti/knav_qmss_queue.c
741
struct knav_pool *pool = ph;
drivers/soc/ti/knav_qmss_queue.c
742
return pool->region->virt_start + (dma - pool->region->dma_start);
drivers/soc/ti/knav_qmss_queue.c
760
struct knav_pool *pool, *pi = NULL, *iter;
drivers/soc/ti/knav_qmss_queue.c
771
pool = devm_kzalloc(kdev->dev, sizeof(*pool), GFP_KERNEL);
drivers/soc/ti/knav_qmss_queue.c
772
if (!pool) {
drivers/soc/ti/knav_qmss_queue.c
790
pool->queue = knav_queue_open(name, KNAV_QUEUE_GP, 0);
drivers/soc/ti/knav_qmss_queue.c
791
if (IS_ERR(pool->queue)) {
drivers/soc/ti/knav_qmss_queue.c
794
name, PTR_ERR(pool->queue));
drivers/soc/ti/knav_qmss_queue.c
795
ret = PTR_ERR(pool->queue);
drivers/soc/ti/knav_qmss_queue.c
799
pool->name = kstrndup(name, KNAV_NAME_SIZE - 1, GFP_KERNEL);
drivers/soc/ti/knav_qmss_queue.c
800
pool->kdev = kdev;
drivers/soc/ti/knav_qmss_queue.c
801
pool->dev = kdev->dev;
drivers/soc/ti/knav_qmss_queue.c
828
pool->region = region;
drivers/soc/ti/knav_qmss_queue.c
829
pool->num_desc = num_desc;
drivers/soc/ti/knav_qmss_queue.c
830
pool->region_offset = last_offset;
drivers/soc/ti/knav_qmss_queue.c
832
list_add_tail(&pool->list, &kdev->pools);
drivers/soc/ti/knav_qmss_queue.c
833
list_add_tail(&pool->region_inst, node);
drivers/soc/ti/knav_qmss_queue.c
842
kdesc_fill_pool(pool);
drivers/soc/ti/knav_qmss_queue.c
843
return pool;
drivers/soc/ti/knav_qmss_queue.c
848
kfree(pool->name);
drivers/soc/ti/knav_qmss_queue.c
849
devm_kfree(kdev->dev, pool);
drivers/soc/ti/knav_qmss_queue.c
860
struct knav_pool *pool = ph;
drivers/soc/ti/knav_qmss_queue.c
862
if (!pool)
drivers/soc/ti/knav_qmss_queue.c
865
if (!pool->region)
drivers/soc/ti/knav_qmss_queue.c
868
kdesc_empty_pool(pool);
drivers/soc/ti/knav_qmss_queue.c
871
pool->region->used_desc -= pool->num_desc;
drivers/soc/ti/knav_qmss_queue.c
872
list_del(&pool->region_inst);
drivers/soc/ti/knav_qmss_queue.c
873
list_del(&pool->list);
drivers/soc/ti/knav_qmss_queue.c
876
kfree(pool->name);
drivers/soc/ti/knav_qmss_queue.c
877
devm_kfree(kdev->dev, pool);
drivers/soc/ti/knav_qmss_queue.c
890
struct knav_pool *pool = ph;
drivers/soc/ti/knav_qmss_queue.c
895
dma = knav_queue_pop(pool->queue, &size);
drivers/soc/ti/knav_qmss_queue.c
898
data = knav_pool_desc_dma_to_virt(pool, dma);
drivers/soc/ti/knav_qmss_queue.c
910
struct knav_pool *pool = ph;
drivers/soc/ti/knav_qmss_queue.c
912
dma = knav_pool_desc_virt_to_dma(pool, desc);
drivers/soc/ti/knav_qmss_queue.c
913
knav_queue_push(pool->queue, dma, pool->region->desc_size, 0);
drivers/soc/ti/knav_qmss_queue.c
930
struct knav_pool *pool = ph;
drivers/soc/ti/knav_qmss_queue.c
931
*dma = knav_pool_desc_virt_to_dma(pool, desc);
drivers/soc/ti/knav_qmss_queue.c
932
size = min(size, pool->region->desc_size);
drivers/soc/ti/knav_qmss_queue.c
935
dma_sync_single_for_device(pool->dev, *dma, size, DMA_TO_DEVICE);
drivers/soc/ti/knav_qmss_queue.c
955
struct knav_pool *pool = ph;
drivers/soc/ti/knav_qmss_queue.c
959
desc_sz = min(dma_sz, pool->region->desc_size);
drivers/soc/ti/knav_qmss_queue.c
960
desc = knav_pool_desc_dma_to_virt(pool, dma);
drivers/soc/ti/knav_qmss_queue.c
961
dma_sync_single_for_cpu(pool->dev, dma, desc_sz, DMA_FROM_DEVICE);
drivers/soc/ti/knav_qmss_queue.c
974
struct knav_pool *pool = ph;
drivers/soc/ti/knav_qmss_queue.c
975
return knav_queue_get_count(pool->queue);
drivers/soc/ti/knav_qmss_queue.c
985
struct knav_pool *pool;
drivers/staging/media/atomisp/pci/runtime/rmgr/interface/ia_css_rmgr_vbuf.h
48
struct ia_css_rmgr_vbuf_pool *pool);
drivers/staging/media/atomisp/pci/runtime/rmgr/interface/ia_css_rmgr_vbuf.h
56
struct ia_css_rmgr_vbuf_pool *pool);
drivers/staging/media/atomisp/pci/runtime/rmgr/interface/ia_css_rmgr_vbuf.h
65
struct ia_css_rmgr_vbuf_pool *pool,
drivers/staging/media/atomisp/pci/runtime/rmgr/interface/ia_css_rmgr_vbuf.h
75
struct ia_css_rmgr_vbuf_pool *pool,
drivers/staging/media/atomisp/pci/runtime/rmgr/src/rmgr_vbuf.c
119
int ia_css_rmgr_init_vbuf(struct ia_css_rmgr_vbuf_pool *pool)
drivers/staging/media/atomisp/pci/runtime/rmgr/src/rmgr_vbuf.c
125
assert(pool);
drivers/staging/media/atomisp/pci/runtime/rmgr/src/rmgr_vbuf.c
126
if (!pool)
drivers/staging/media/atomisp/pci/runtime/rmgr/src/rmgr_vbuf.c
129
if (pool->recycle && pool->size) {
drivers/staging/media/atomisp/pci/runtime/rmgr/src/rmgr_vbuf.c
133
pool->size;
drivers/staging/media/atomisp/pci/runtime/rmgr/src/rmgr_vbuf.c
134
pool->handles = kvmalloc(bytes_needed, GFP_KERNEL);
drivers/staging/media/atomisp/pci/runtime/rmgr/src/rmgr_vbuf.c
135
if (pool->handles)
drivers/staging/media/atomisp/pci/runtime/rmgr/src/rmgr_vbuf.c
136
memset(pool->handles, 0, bytes_needed);
drivers/staging/media/atomisp/pci/runtime/rmgr/src/rmgr_vbuf.c
141
pool->size = 0;
drivers/staging/media/atomisp/pci/runtime/rmgr/src/rmgr_vbuf.c
142
pool->handles = NULL;
drivers/staging/media/atomisp/pci/runtime/rmgr/src/rmgr_vbuf.c
152
void ia_css_rmgr_uninit_vbuf(struct ia_css_rmgr_vbuf_pool *pool)
drivers/staging/media/atomisp/pci/runtime/rmgr/src/rmgr_vbuf.c
157
if (!pool) {
drivers/staging/media/atomisp/pci/runtime/rmgr/src/rmgr_vbuf.c
161
if (pool->handles) {
drivers/staging/media/atomisp/pci/runtime/rmgr/src/rmgr_vbuf.c
163
for (i = 0; i < pool->size; i++) {
drivers/staging/media/atomisp/pci/runtime/rmgr/src/rmgr_vbuf.c
164
if (pool->handles[i]) {
drivers/staging/media/atomisp/pci/runtime/rmgr/src/rmgr_vbuf.c
167
pool->handles[i]->vptr,
drivers/staging/media/atomisp/pci/runtime/rmgr/src/rmgr_vbuf.c
168
pool->handles[i]->count);
drivers/staging/media/atomisp/pci/runtime/rmgr/src/rmgr_vbuf.c
170
hmm_free(pool->handles[i]->vptr);
drivers/staging/media/atomisp/pci/runtime/rmgr/src/rmgr_vbuf.c
172
ia_css_rmgr_refcount_release_vbuf(&pool->handles[i]);
drivers/staging/media/atomisp/pci/runtime/rmgr/src/rmgr_vbuf.c
176
kvfree(pool->handles);
drivers/staging/media/atomisp/pci/runtime/rmgr/src/rmgr_vbuf.c
177
pool->handles = NULL;
drivers/staging/media/atomisp/pci/runtime/rmgr/src/rmgr_vbuf.c
188
void rmgr_push_handle(struct ia_css_rmgr_vbuf_pool *pool,
drivers/staging/media/atomisp/pci/runtime/rmgr/src/rmgr_vbuf.c
194
assert(pool);
drivers/staging/media/atomisp/pci/runtime/rmgr/src/rmgr_vbuf.c
195
assert(pool->recycle);
drivers/staging/media/atomisp/pci/runtime/rmgr/src/rmgr_vbuf.c
196
assert(pool->handles);
drivers/staging/media/atomisp/pci/runtime/rmgr/src/rmgr_vbuf.c
198
for (i = 0; i < pool->size; i++) {
drivers/staging/media/atomisp/pci/runtime/rmgr/src/rmgr_vbuf.c
199
if (!pool->handles[i]) {
drivers/staging/media/atomisp/pci/runtime/rmgr/src/rmgr_vbuf.c
201
pool->handles[i] = *handle;
drivers/staging/media/atomisp/pci/runtime/rmgr/src/rmgr_vbuf.c
216
void rmgr_pop_handle(struct ia_css_rmgr_vbuf_pool *pool,
drivers/staging/media/atomisp/pci/runtime/rmgr/src/rmgr_vbuf.c
221
assert(pool);
drivers/staging/media/atomisp/pci/runtime/rmgr/src/rmgr_vbuf.c
222
assert(pool->recycle);
drivers/staging/media/atomisp/pci/runtime/rmgr/src/rmgr_vbuf.c
223
assert(pool->handles);
drivers/staging/media/atomisp/pci/runtime/rmgr/src/rmgr_vbuf.c
226
for (i = 0; i < pool->size; i++) {
drivers/staging/media/atomisp/pci/runtime/rmgr/src/rmgr_vbuf.c
227
if ((pool->handles[i]) &&
drivers/staging/media/atomisp/pci/runtime/rmgr/src/rmgr_vbuf.c
228
(pool->handles[i]->size == (*handle)->size)) {
drivers/staging/media/atomisp/pci/runtime/rmgr/src/rmgr_vbuf.c
229
*handle = pool->handles[i];
drivers/staging/media/atomisp/pci/runtime/rmgr/src/rmgr_vbuf.c
230
pool->handles[i] = NULL;
drivers/staging/media/atomisp/pci/runtime/rmgr/src/rmgr_vbuf.c
245
void ia_css_rmgr_acq_vbuf(struct ia_css_rmgr_vbuf_pool *pool,
drivers/staging/media/atomisp/pci/runtime/rmgr/src/rmgr_vbuf.c
248
if ((!pool) || (!handle) || (!*handle)) {
drivers/staging/media/atomisp/pci/runtime/rmgr/src/rmgr_vbuf.c
253
if (pool->copy_on_write) {
drivers/staging/media/atomisp/pci/runtime/rmgr/src/rmgr_vbuf.c
273
if (pool->recycle) {
drivers/staging/media/atomisp/pci/runtime/rmgr/src/rmgr_vbuf.c
275
rmgr_pop_handle(pool, &new_handle);
drivers/staging/media/atomisp/pci/runtime/rmgr/src/rmgr_vbuf.c
301
void ia_css_rmgr_rel_vbuf(struct ia_css_rmgr_vbuf_pool *pool,
drivers/staging/media/atomisp/pci/runtime/rmgr/src/rmgr_vbuf.c
304
if ((!pool) || (!handle) || (!*handle)) {
drivers/staging/media/atomisp/pci/runtime/rmgr/src/rmgr_vbuf.c
310
if (!pool->recycle) {
drivers/staging/media/atomisp/pci/runtime/rmgr/src/rmgr_vbuf.c
315
rmgr_push_handle(pool, handle);
drivers/staging/media/ipu3/ipu3-css-pool.c
25
void imgu_css_pool_cleanup(struct imgu_device *imgu, struct imgu_css_pool *pool)
drivers/staging/media/ipu3/ipu3-css-pool.c
30
imgu_dmamap_free(imgu, &pool->entry[i].param);
drivers/staging/media/ipu3/ipu3-css-pool.c
33
int imgu_css_pool_init(struct imgu_device *imgu, struct imgu_css_pool *pool,
drivers/staging/media/ipu3/ipu3-css-pool.c
39
pool->entry[i].valid = false;
drivers/staging/media/ipu3/ipu3-css-pool.c
41
pool->entry[i].param.vaddr = NULL;
drivers/staging/media/ipu3/ipu3-css-pool.c
45
if (!imgu_dmamap_alloc(imgu, &pool->entry[i].param, size))
drivers/staging/media/ipu3/ipu3-css-pool.c
49
pool->last = IPU3_CSS_POOL_SIZE;
drivers/staging/media/ipu3/ipu3-css-pool.c
54
imgu_css_pool_cleanup(imgu, pool);
drivers/staging/media/ipu3/ipu3-css-pool.c
61
void imgu_css_pool_get(struct imgu_css_pool *pool)
drivers/staging/media/ipu3/ipu3-css-pool.c
64
u32 n = (pool->last + 1) % IPU3_CSS_POOL_SIZE;
drivers/staging/media/ipu3/ipu3-css-pool.c
66
pool->entry[n].valid = true;
drivers/staging/media/ipu3/ipu3-css-pool.c
67
pool->last = n;
drivers/staging/media/ipu3/ipu3-css-pool.c
73
void imgu_css_pool_put(struct imgu_css_pool *pool)
drivers/staging/media/ipu3/ipu3-css-pool.c
75
pool->entry[pool->last].valid = false;
drivers/staging/media/ipu3/ipu3-css-pool.c
76
pool->last = (pool->last + IPU3_CSS_POOL_SIZE - 1) % IPU3_CSS_POOL_SIZE;
drivers/staging/media/ipu3/ipu3-css-pool.c
89
imgu_css_pool_last(struct imgu_css_pool *pool, unsigned int n)
drivers/staging/media/ipu3/ipu3-css-pool.c
92
int i = (pool->last + IPU3_CSS_POOL_SIZE - n) % IPU3_CSS_POOL_SIZE;
drivers/staging/media/ipu3/ipu3-css-pool.c
96
if (!pool->entry[i].valid)
drivers/staging/media/ipu3/ipu3-css-pool.c
99
return &pool->entry[i].param;
drivers/staging/media/ipu3/ipu3-css-pool.h
46
struct imgu_css_pool *pool);
drivers/staging/media/ipu3/ipu3-css-pool.h
47
int imgu_css_pool_init(struct imgu_device *imgu, struct imgu_css_pool *pool,
drivers/staging/media/ipu3/ipu3-css-pool.h
49
void imgu_css_pool_get(struct imgu_css_pool *pool);
drivers/staging/media/ipu3/ipu3-css-pool.h
50
void imgu_css_pool_put(struct imgu_css_pool *pool);
drivers/staging/media/ipu3/ipu3-css-pool.h
51
const struct imgu_css_map *imgu_css_pool_last(struct imgu_css_pool *pool,
drivers/staging/media/ipu3/ipu3-css.c
1027
if (imgu_css_pool_init(imgu, &css_pipe->pool.parameter_set_info,
drivers/staging/media/ipu3/ipu3-css.c
1029
imgu_css_pool_init(imgu, &css_pipe->pool.acc,
drivers/staging/media/ipu3/ipu3-css.c
1031
imgu_css_pool_init(imgu, &css_pipe->pool.gdc,
drivers/staging/media/ipu3/ipu3-css.c
1035
imgu_css_pool_init(imgu, &css_pipe->pool.obgrid,
drivers/staging/media/ipu3/ipu3-css.c
1042
&css_pipe->pool.binary_params_p[i],
drivers/staging/media/ipu3/ipu3-css.c
2133
imgu_css_pool_get(&css_pipe->pool.parameter_set_info);
drivers/staging/media/ipu3/ipu3-css.c
2134
param_set = imgu_css_pool_last(&css_pipe->pool.parameter_set_info,
drivers/staging/media/ipu3/ipu3-css.c
2138
map = imgu_css_pool_last(&css_pipe->pool.acc, 0);
drivers/staging/media/ipu3/ipu3-css.c
2140
imgu_css_pool_get(&css_pipe->pool.acc);
drivers/staging/media/ipu3/ipu3-css.c
2141
map = imgu_css_pool_last(&css_pipe->pool.acc, 0);
drivers/staging/media/ipu3/ipu3-css.c
2147
map = imgu_css_pool_last(&css_pipe->pool.binary_params_p[m], 0);
drivers/staging/media/ipu3/ipu3-css.c
2151
imgu_css_pool_get(&css_pipe->pool.binary_params_p[m]);
drivers/staging/media/ipu3/ipu3-css.c
2152
map = imgu_css_pool_last(&css_pipe->pool.binary_params_p[m], 0);
drivers/staging/media/ipu3/ipu3-css.c
2158
map = imgu_css_pool_last(&css_pipe->pool.binary_params_p[m], 0);
drivers/staging/media/ipu3/ipu3-css.c
2161
imgu_css_pool_get(&css_pipe->pool.binary_params_p[m]);
drivers/staging/media/ipu3/ipu3-css.c
2162
map = imgu_css_pool_last(&css_pipe->pool.binary_params_p[m], 0);
drivers/staging/media/ipu3/ipu3-css.c
2169
map = imgu_css_pool_last(&css_pipe->pool.acc, 1);
drivers/staging/media/ipu3/ipu3-css.c
2180
map = imgu_css_pool_last(&css_pipe->pool.binary_params_p[m], 1);
drivers/staging/media/ipu3/ipu3-css.c
2189
map = imgu_css_pool_last(&css_pipe->pool.binary_params_p[m], 1);
drivers/staging/media/ipu3/ipu3-css.c
2202
map = imgu_css_pool_last(&css_pipe->pool.gdc, 0);
drivers/staging/media/ipu3/ipu3-css.c
2204
imgu_css_pool_get(&css_pipe->pool.gdc);
drivers/staging/media/ipu3/ipu3-css.c
2205
map = imgu_css_pool_last(&css_pipe->pool.gdc, 0);
drivers/staging/media/ipu3/ipu3-css.c
2219
map = imgu_css_pool_last(&css_pipe->pool.obgrid, 0);
drivers/staging/media/ipu3/ipu3-css.c
2221
imgu_css_pool_get(&css_pipe->pool.obgrid);
drivers/staging/media/ipu3/ipu3-css.c
2222
map = imgu_css_pool_last(&css_pipe->pool.obgrid, 0);
drivers/staging/media/ipu3/ipu3-css.c
2236
map = imgu_css_pool_last(&css_pipe->pool.acc, 0);
drivers/staging/media/ipu3/ipu3-css.c
2239
map = imgu_css_pool_last(&css_pipe->pool.gdc, 0);
drivers/staging/media/ipu3/ipu3-css.c
2243
map = imgu_css_pool_last(&css_pipe->pool.obgrid, 0);
drivers/staging/media/ipu3/ipu3-css.c
2249
map = imgu_css_pool_last(&css_pipe->pool.binary_params_p[m], 0);
drivers/staging/media/ipu3/ipu3-css.c
2254
map = imgu_css_pool_last(&css_pipe->pool.parameter_set_info, 0);
drivers/staging/media/ipu3/ipu3-css.c
2293
imgu_css_pool_put(&css_pipe->pool.parameter_set_info);
drivers/staging/media/ipu3/ipu3-css.c
2295
imgu_css_pool_put(&css_pipe->pool.acc);
drivers/staging/media/ipu3/ipu3-css.c
2297
imgu_css_pool_put(&css_pipe->pool.gdc);
drivers/staging/media/ipu3/ipu3-css.c
2299
imgu_css_pool_put(&css_pipe->pool.obgrid);
drivers/staging/media/ipu3/ipu3-css.c
2302
&css_pipe->pool.binary_params_p
drivers/staging/media/ipu3/ipu3-css.c
2306
&css_pipe->pool.binary_params_p
drivers/staging/media/ipu3/ipu3-css.c
656
imgu_css_pool_cleanup(imgu, &css_pipe->pool.parameter_set_info);
drivers/staging/media/ipu3/ipu3-css.c
657
imgu_css_pool_cleanup(imgu, &css_pipe->pool.acc);
drivers/staging/media/ipu3/ipu3-css.c
658
imgu_css_pool_cleanup(imgu, &css_pipe->pool.gdc);
drivers/staging/media/ipu3/ipu3-css.c
659
imgu_css_pool_cleanup(imgu, &css_pipe->pool.obgrid);
drivers/staging/media/ipu3/ipu3-css.c
662
imgu_css_pool_cleanup(imgu, &css_pipe->pool.binary_params_p[i]);
drivers/staging/media/ipu3/ipu3-css.h
142
} pool;
drivers/staging/octeon/ethernet-mem.c
102
cvmx_fpa_free(fpa, pool, 0);
drivers/staging/octeon/ethernet-mem.c
114
static void cvm_oct_free_hw_memory(int pool, int size, int elements)
drivers/staging/octeon/ethernet-mem.c
120
fpa = cvmx_fpa_alloc(pool);
drivers/staging/octeon/ethernet-mem.c
131
pool, elements);
drivers/staging/octeon/ethernet-mem.c
134
pool, elements);
drivers/staging/octeon/ethernet-mem.c
137
int cvm_oct_mem_fill_fpa(int pool, int size, int elements)
drivers/staging/octeon/ethernet-mem.c
141
if (pool == CVMX_FPA_PACKET_POOL)
drivers/staging/octeon/ethernet-mem.c
142
freed = cvm_oct_fill_hw_skbuff(pool, size, elements);
drivers/staging/octeon/ethernet-mem.c
144
freed = cvm_oct_fill_hw_memory(pool, size, elements);
drivers/staging/octeon/ethernet-mem.c
148
void cvm_oct_mem_empty_fpa(int pool, int size, int elements)
drivers/staging/octeon/ethernet-mem.c
150
if (pool == CVMX_FPA_PACKET_POOL)
drivers/staging/octeon/ethernet-mem.c
151
cvm_oct_free_hw_skbuff(pool, size, elements);
drivers/staging/octeon/ethernet-mem.c
153
cvm_oct_free_hw_memory(pool, size, elements);
drivers/staging/octeon/ethernet-mem.c
24
static int cvm_oct_fill_hw_skbuff(int pool, int size, int elements)
drivers/staging/octeon/ethernet-mem.c
35
cvmx_fpa_free(skb->data, pool, size / 128);
drivers/staging/octeon/ethernet-mem.c
47
static void cvm_oct_free_hw_skbuff(int pool, int size, int elements)
drivers/staging/octeon/ethernet-mem.c
52
memory = cvmx_fpa_alloc(pool);
drivers/staging/octeon/ethernet-mem.c
63
pool, elements);
drivers/staging/octeon/ethernet-mem.c
66
pool, elements);
drivers/staging/octeon/ethernet-mem.c
77
static int cvm_oct_fill_hw_memory(int pool, int size, int elements)
drivers/staging/octeon/ethernet-mem.c
97
elements * size, pool);
drivers/staging/octeon/ethernet-mem.h
8
int cvm_oct_mem_fill_fpa(int pool, int size, int elements);
drivers/staging/octeon/ethernet-mem.h
9
void cvm_oct_mem_empty_fpa(int pool, int size, int elements);
drivers/staging/octeon/ethernet-tx.c
263
hw_buffer.s.pool = 0;
drivers/staging/octeon/ethernet-tx.c
267
hw_buffer.s.pool = 0;
drivers/staging/octeon/ethernet-tx.c
570
work->packet_ptr.s.pool = CVMX_FPA_PACKET_POOL;
drivers/staging/octeon/ethernet.c
186
segment_ptr.s.pool,
drivers/staging/octeon/octeon-stubs.h
1248
static inline void *cvmx_fpa_alloc(uint64_t pool)
drivers/staging/octeon/octeon-stubs.h
1253
static inline void cvmx_fpa_free(void *ptr, uint64_t pool,
drivers/staging/octeon/octeon-stubs.h
183
uint64_t pool:3;
drivers/tee/amdtee/amdtee_private.h
42
struct tee_shm_pool *pool;
drivers/tee/amdtee/core.c
458
struct tee_shm_pool *pool;
drivers/tee/amdtee/core.c
478
pool = amdtee_config_shm();
drivers/tee/amdtee/core.c
479
if (IS_ERR(pool)) {
drivers/tee/amdtee/core.c
481
rc = PTR_ERR(pool);
drivers/tee/amdtee/core.c
485
teedev = tee_device_alloc(&amdtee_desc, NULL, pool, amdtee);
drivers/tee/amdtee/core.c
496
amdtee->pool = pool;
drivers/tee/amdtee/core.c
506
tee_shm_pool_free(pool);
drivers/tee/amdtee/core.c
530
tee_shm_pool_free(amdtee->pool);
drivers/tee/amdtee/shm_pool.c
11
static int pool_op_alloc(struct tee_shm_pool *pool, struct tee_shm *shm,
drivers/tee/amdtee/shm_pool.c
41
static void pool_op_free(struct tee_shm_pool *pool, struct tee_shm *shm)
drivers/tee/amdtee/shm_pool.c
49
static void pool_op_destroy_pool(struct tee_shm_pool *pool)
drivers/tee/amdtee/shm_pool.c
51
kfree(pool);
drivers/tee/amdtee/shm_pool.c
62
struct tee_shm_pool *pool = kzalloc_obj(*pool);
drivers/tee/amdtee/shm_pool.c
64
if (!pool)
drivers/tee/amdtee/shm_pool.c
67
pool->ops = &pool_ops;
drivers/tee/amdtee/shm_pool.c
69
return pool;
drivers/tee/optee/core.c
214
tee_shm_pool_free(optee->pool);
drivers/tee/optee/ffa_abi.c
1038
struct tee_protmem_pool *pool;
drivers/tee/optee/ffa_abi.c
1042
pool = optee_protmem_alloc_dyn_pool(optee, id);
drivers/tee/optee/ffa_abi.c
1043
if (IS_ERR(pool))
drivers/tee/optee/ffa_abi.c
1044
return PTR_ERR(pool);
drivers/tee/optee/ffa_abi.c
1046
rc = tee_device_register_dma_heap(optee->teedev, id, pool);
drivers/tee/optee/ffa_abi.c
1048
pool->ops->destroy_pool(pool);
drivers/tee/optee/ffa_abi.c
1060
struct tee_shm_pool *pool;
drivers/tee/optee/ffa_abi.c
1089
pool = optee_ffa_shm_pool_alloc_pages();
drivers/tee/optee/ffa_abi.c
1090
if (IS_ERR(pool)) {
drivers/tee/optee/ffa_abi.c
1091
rc = PTR_ERR(pool);
drivers/tee/optee/ffa_abi.c
1094
optee->pool = pool;
drivers/tee/optee/ffa_abi.c
1105
teedev = tee_device_alloc(&optee_ffa_clnt_desc, NULL, optee->pool,
drivers/tee/optee/ffa_abi.c
1113
teedev = tee_device_alloc(&optee_ffa_supp_desc, NULL, optee->pool,
drivers/tee/optee/ffa_abi.c
1190
tee_shm_pool_free(pool);
drivers/tee/optee/ffa_abi.c
375
static int pool_ffa_op_alloc(struct tee_shm_pool *pool,
drivers/tee/optee/ffa_abi.c
382
static void pool_ffa_op_free(struct tee_shm_pool *pool,
drivers/tee/optee/ffa_abi.c
388
static void pool_ffa_op_destroy_pool(struct tee_shm_pool *pool)
drivers/tee/optee/ffa_abi.c
390
kfree(pool);
drivers/tee/optee/ffa_abi.c
407
struct tee_shm_pool *pool = kzalloc_obj(*pool);
drivers/tee/optee/ffa_abi.c
409
if (!pool)
drivers/tee/optee/ffa_abi.c
412
pool->ops = &pool_ffa_ops;
drivers/tee/optee/ffa_abi.c
414
return pool;
drivers/tee/optee/optee_private.h
259
struct tee_shm_pool *pool;
drivers/tee/optee/protmem.c
132
static int protmem_pool_op_dyn_alloc(struct tee_protmem_pool *pool,
drivers/tee/optee/protmem.c
136
struct optee_protmem_dyn_pool *rp = to_protmem_dyn_pool(pool);
drivers/tee/optee/protmem.c
16
struct tee_protmem_pool pool;
drivers/tee/optee/protmem.c
167
static void protmem_pool_op_dyn_free(struct tee_protmem_pool *pool,
drivers/tee/optee/protmem.c
170
struct optee_protmem_dyn_pool *rp = to_protmem_dyn_pool(pool);
drivers/tee/optee/protmem.c
180
static int protmem_pool_op_dyn_update_shm(struct tee_protmem_pool *pool,
drivers/tee/optee/protmem.c
185
struct optee_protmem_dyn_pool *rp = to_protmem_dyn_pool(pool);
drivers/tee/optee/protmem.c
192
static void pool_op_dyn_destroy_pool(struct tee_protmem_pool *pool)
drivers/tee/optee/protmem.c
194
struct optee_protmem_dyn_pool *rp = to_protmem_dyn_pool(pool);
drivers/tee/optee/protmem.c
30
to_protmem_dyn_pool(struct tee_protmem_pool *pool)
drivers/tee/optee/protmem.c
32
return container_of(pool, struct optee_protmem_dyn_pool, pool);
drivers/tee/optee/protmem.c
323
rp->pool.ops = &protmem_pool_ops_dyn;
drivers/tee/optee/protmem.c
328
return &rp->pool;
drivers/tee/optee/smc_abi.c
1668
struct tee_protmem_pool *pool;
drivers/tee/optee/smc_abi.c
1693
pool = tee_protmem_static_pool_alloc(res.result.start, res.result.size);
drivers/tee/optee/smc_abi.c
1694
if (IS_ERR(pool))
drivers/tee/optee/smc_abi.c
1697
return pool;
drivers/tee/optee/smc_abi.c
1709
struct tee_protmem_pool *pool = ERR_PTR(-EINVAL);
drivers/tee/optee/smc_abi.c
1716
pool = static_protmem_pool_init(optee);
drivers/tee/optee/smc_abi.c
1717
if (dyn_protm && IS_ERR(pool))
drivers/tee/optee/smc_abi.c
1718
pool = optee_protmem_alloc_dyn_pool(optee, heap_id);
drivers/tee/optee/smc_abi.c
1719
if (IS_ERR(pool))
drivers/tee/optee/smc_abi.c
1720
return PTR_ERR(pool);
drivers/tee/optee/smc_abi.c
1722
rc = tee_device_register_dma_heap(optee->teedev, heap_id, pool);
drivers/tee/optee/smc_abi.c
1724
pool->ops->destroy_pool(pool);
drivers/tee/optee/smc_abi.c
1732
struct tee_shm_pool *pool = ERR_PTR(-EINVAL);
drivers/tee/optee/smc_abi.c
1792
pool = optee_shm_pool_alloc_pages();
drivers/tee/optee/smc_abi.c
1798
if (IS_ERR(pool) && (sec_caps & OPTEE_SMC_SEC_CAP_HAVE_RESERVED_SHM)) {
drivers/tee/optee/smc_abi.c
1813
pool = optee_config_shm_memremap(invoke_fn, &memremaped_shm);
drivers/tee/optee/smc_abi.c
1816
if (IS_ERR(pool))
drivers/tee/optee/smc_abi.c
1817
return PTR_ERR(pool);
drivers/tee/optee/smc_abi.c
1836
teedev = tee_device_alloc(&optee_clnt_desc, NULL, pool, optee);
drivers/tee/optee/smc_abi.c
1843
teedev = tee_device_alloc(&optee_supp_desc, NULL, pool, optee);
drivers/tee/optee/smc_abi.c
1863
optee->pool = pool;
drivers/tee/optee/smc_abi.c
1952
tee_shm_pool_free(pool);
drivers/tee/optee/smc_abi.c
588
static int pool_op_alloc(struct tee_shm_pool *pool,
drivers/tee/optee/smc_abi.c
601
static void pool_op_free(struct tee_shm_pool *pool,
drivers/tee/optee/smc_abi.c
610
static void pool_op_destroy_pool(struct tee_shm_pool *pool)
drivers/tee/optee/smc_abi.c
612
kfree(pool);
drivers/tee/optee/smc_abi.c
629
struct tee_shm_pool *pool = kzalloc_obj(*pool);
drivers/tee/optee/smc_abi.c
631
if (!pool)
drivers/tee/optee/smc_abi.c
634
pool->ops = &pool_ops;
drivers/tee/optee/smc_abi.c
636
return pool;
drivers/tee/qcomtee/call.c
703
struct tee_shm_pool *pool;
drivers/tee/qcomtee/call.c
712
pool = qcomtee_shm_pool_alloc();
drivers/tee/qcomtee/call.c
713
if (IS_ERR(pool)) {
drivers/tee/qcomtee/call.c
714
err = PTR_ERR(pool);
drivers/tee/qcomtee/call.c
719
teedev = tee_device_alloc(&qcomtee_desc, NULL, pool, qcomtee);
drivers/tee/qcomtee/call.c
727
qcomtee->pool = pool;
drivers/tee/qcomtee/call.c
771
tee_shm_pool_free(pool);
drivers/tee/qcomtee/call.c
797
tee_shm_pool_free(qcomtee->pool);
drivers/tee/qcomtee/qcomtee.h
33
struct tee_shm_pool *pool;
drivers/tee/qcomtee/shm.c
117
static int pool_op_alloc(struct tee_shm_pool *pool, struct tee_shm *shm,
drivers/tee/qcomtee/shm.c
123
static void pool_op_free(struct tee_shm_pool *pool, struct tee_shm *shm)
drivers/tee/qcomtee/shm.c
128
static void pool_op_destroy_pool(struct tee_shm_pool *pool)
drivers/tee/qcomtee/shm.c
130
kfree(pool);
drivers/tee/qcomtee/shm.c
141
struct tee_shm_pool *pool;
drivers/tee/qcomtee/shm.c
143
pool = kzalloc_obj(*pool);
drivers/tee/qcomtee/shm.c
144
if (!pool)
drivers/tee/qcomtee/shm.c
147
pool->ops = &pool_ops;
drivers/tee/qcomtee/shm.c
149
return pool;
drivers/tee/tee_core.c
1039
struct tee_shm_pool *pool,
drivers/tee/tee_core.c
1105
teedev->pool = pool;
drivers/tee/tee_core.c
1288
teedev->pool = NULL;
drivers/tee/tee_heap.c
153
buf->heap->pool->ops->free(buf->heap->pool, &buf->table);
drivers/tee/tee_heap.c
176
struct tee_protmem_pool *pool;
drivers/tee/tee_heap.c
183
pool = h->pool;
drivers/tee/tee_heap.c
199
rc = pool->ops->alloc(pool, &buf->table, len, &buf->offs);
drivers/tee/tee_heap.c
21
struct tee_protmem_pool *pool;
drivers/tee/tee_heap.c
216
pool->ops->free(pool, &buf->table);
drivers/tee/tee_heap.c
245
struct tee_protmem_pool *pool)
drivers/tee/tee_heap.c
269
h->pool = pool;
drivers/tee/tee_heap.c
287
struct tee_protmem_pool *pool)
drivers/tee/tee_heap.c
304
h->pool = pool;
drivers/tee/tee_heap.c
309
rc = alloc_dma_heap(teedev, id, pool);
drivers/tee/tee_heap.c
359
rc = buf->heap->pool->ops->update_shm(buf->heap->pool, &buf->table,
drivers/tee/tee_heap.c
369
struct tee_protmem_pool *pool __always_unused)
drivers/tee/tee_heap.c
392
to_protmem_static_pool(struct tee_protmem_pool *pool)
drivers/tee/tee_heap.c
394
return container_of(pool, struct tee_protmem_static_pool, pool);
drivers/tee/tee_heap.c
397
static int protmem_pool_op_static_alloc(struct tee_protmem_pool *pool,
drivers/tee/tee_heap.c
401
struct tee_protmem_static_pool *stp = to_protmem_static_pool(pool);
drivers/tee/tee_heap.c
41
struct tee_protmem_pool pool;
drivers/tee/tee_heap.c
421
static void protmem_pool_op_static_free(struct tee_protmem_pool *pool,
drivers/tee/tee_heap.c
424
struct tee_protmem_static_pool *stp = to_protmem_static_pool(pool);
drivers/tee/tee_heap.c
433
static int protmem_pool_op_static_update_shm(struct tee_protmem_pool *pool,
drivers/tee/tee_heap.c
438
struct tee_protmem_static_pool *stp = to_protmem_static_pool(pool);
drivers/tee/tee_heap.c
446
static void protmem_pool_op_static_destroy_pool(struct tee_protmem_pool *pool)
drivers/tee/tee_heap.c
448
struct tee_protmem_static_pool *stp = to_protmem_static_pool(pool);
drivers/tee/tee_heap.c
489
stp->pool.ops = &protmem_pool_ops_static;
drivers/tee/tee_heap.c
491
return &stp->pool;
drivers/tee/tee_heap.c
53
h->pool->ops->destroy_pool(h->pool);
drivers/tee/tee_heap.c
55
h->pool = NULL;
drivers/tee/tee_shm.c
109
rc = teedev->pool->ops->alloc(teedev->pool, shm, size, align);
drivers/tee/tee_shm.c
56
teedev->pool->ops->free(teedev->pool, shm);
drivers/tee/tee_shm.c
85
if (!teedev->pool) {
drivers/tee/tee_shm_pool.c
12
static int pool_op_gen_alloc(struct tee_shm_pool *pool, struct tee_shm *shm,
drivers/tee/tee_shm_pool.c
16
struct gen_pool *genpool = pool->private_data;
drivers/tee/tee_shm_pool.c
37
static void pool_op_gen_free(struct tee_shm_pool *pool, struct tee_shm *shm)
drivers/tee/tee_shm_pool.c
39
gen_pool_free(pool->private_data, (unsigned long)shm->kaddr,
drivers/tee/tee_shm_pool.c
44
static void pool_op_gen_destroy_pool(struct tee_shm_pool *pool)
drivers/tee/tee_shm_pool.c
46
gen_pool_destroy(pool->private_data);
drivers/tee/tee_shm_pool.c
47
kfree(pool);
drivers/tee/tee_shm_pool.c
61
struct tee_shm_pool *pool;
drivers/tee/tee_shm_pool.c
68
pool = kzalloc_obj(*pool);
drivers/tee/tee_shm_pool.c
69
if (!pool)
drivers/tee/tee_shm_pool.c
72
pool->private_data = gen_pool_create(min_alloc_order, -1);
drivers/tee/tee_shm_pool.c
73
if (!pool->private_data) {
drivers/tee/tee_shm_pool.c
78
rc = gen_pool_add_virt(pool->private_data, vaddr, paddr, size, -1);
drivers/tee/tee_shm_pool.c
80
gen_pool_destroy(pool->private_data);
drivers/tee/tee_shm_pool.c
84
pool->ops = &pool_ops_generic;
drivers/tee/tee_shm_pool.c
86
return pool;
drivers/tee/tee_shm_pool.c
88
kfree(pool);
drivers/tee/tstee/core.c
355
static int pool_op_alloc(struct tee_shm_pool *pool, struct tee_shm *shm,
drivers/tee/tstee/core.c
361
static void pool_op_free(struct tee_shm_pool *pool, struct tee_shm *shm)
drivers/tee/tstee/core.c
366
static void pool_op_destroy_pool(struct tee_shm_pool *pool)
drivers/tee/tstee/core.c
368
kfree(pool);
drivers/tee/tstee/core.c
379
struct tee_shm_pool *pool = kzalloc_obj(*pool);
drivers/tee/tstee/core.c
381
if (!pool)
drivers/tee/tstee/core.c
384
pool->ops = &pool_ops;
drivers/tee/tstee/core.c
386
return pool;
drivers/tee/tstee/core.c
423
tstee->pool = tstee_create_shm_pool();
drivers/tee/tstee/core.c
424
if (IS_ERR(tstee->pool)) {
drivers/tee/tstee/core.c
425
rc = PTR_ERR(tstee->pool);
drivers/tee/tstee/core.c
426
tstee->pool = NULL;
drivers/tee/tstee/core.c
430
tstee->teedev = tee_device_alloc(&tstee_desc, NULL, tstee->pool, tstee);
drivers/tee/tstee/core.c
448
tee_shm_pool_free(tstee->pool);
drivers/tee/tstee/core.c
459
tee_shm_pool_free(tstee->pool);
drivers/tee/tstee/tstee_private.h
81
struct tee_shm_pool *pool;
drivers/usb/core/buffer.c
105
dma_pool_destroy(hcd->pool[i]);
drivers/usb/core/buffer.c
106
hcd->pool[i] = NULL;
drivers/usb/core/buffer.c
139
return dma_pool_alloc(hcd->pool[i], mem_flags, dma);
drivers/usb/core/buffer.c
169
dma_pool_free(hcd->pool[i], addr, dma);
drivers/usb/core/buffer.c
78
hcd->pool[i] = dma_pool_create(name, hcd->self.sysdev,
drivers/usb/core/buffer.c
80
if (!hcd->pool[i]) {
drivers/usb/gadget/function/u_serial.c
239
struct list_head *pool = &port->write_pool;
drivers/usb/gadget/function/u_serial.c
249
while (!port->write_busy && !list_empty(pool)) {
drivers/usb/gadget/function/u_serial.c
256
req = list_entry(pool->next, struct usb_request, list);
drivers/usb/gadget/function/u_serial.c
287
list_add(&req->list, pool);
drivers/usb/gadget/function/u_serial.c
312
struct list_head *pool = &port->read_pool;
drivers/usb/gadget/function/u_serial.c
315
while (!list_empty(pool)) {
drivers/usb/gadget/function/u_serial.c
328
req = list_entry(pool->next, struct usb_request, list);
drivers/usb/gadget/function/u_serial.c
342
list_add(&req->list, pool);
drivers/usb/host/xhci-dbgtty.c
107
struct list_head *pool = &port->read_pool;
drivers/usb/host/xhci-dbgtty.c
109
while (!list_empty(pool)) {
drivers/usb/host/xhci-dbgtty.c
113
req = list_entry(pool->next, struct dbc_request, list_pool);
drivers/usb/host/xhci-dbgtty.c
122
list_add(&req->list_pool, pool);
drivers/usb/host/xhci-dbgtty.c
58
struct list_head *pool = &port->write_pool;
drivers/usb/host/xhci-dbgtty.c
62
while (!list_empty(pool)) {
drivers/usb/host/xhci-dbgtty.c
63
req = list_entry(pool->next, struct dbc_request, list_pool);
drivers/usb/host/xhci-dbgtty.c
77
list_add(&req->list_pool, pool);
drivers/usb/musb/cppi_dma.h
119
struct dma_pool *pool;
drivers/xen/swiotlb-xen.c
285
struct io_tlb_pool *pool;
drivers/xen/swiotlb-xen.c
297
pool = xen_swiotlb_find_pool(hwdev, dev_addr);
drivers/xen/swiotlb-xen.c
298
if (pool)
drivers/xen/swiotlb-xen.c
300
attrs, pool);
drivers/xen/swiotlb-xen.c
308
struct io_tlb_pool *pool;
drivers/xen/swiotlb-xen.c
317
pool = xen_swiotlb_find_pool(dev, dma_addr);
drivers/xen/swiotlb-xen.c
318
if (pool)
drivers/xen/swiotlb-xen.c
319
__swiotlb_sync_single_for_cpu(dev, paddr, size, dir, pool);
drivers/xen/swiotlb-xen.c
327
struct io_tlb_pool *pool;
drivers/xen/swiotlb-xen.c
329
pool = xen_swiotlb_find_pool(dev, dma_addr);
drivers/xen/swiotlb-xen.c
330
if (pool)
drivers/xen/swiotlb-xen.c
331
__swiotlb_sync_single_for_device(dev, paddr, size, dir, pool);
fs/ceph/addr.c
2364
s64 pool, struct ceph_string *pool_ns)
fs/ceph/addr.c
2380
if (pool < perm->pool)
fs/ceph/addr.c
2382
else if (pool > perm->pool)
fs/ceph/addr.c
2403
doutc(cl, "pool %lld ns %.*s no perm cached\n", pool,
fs/ceph/addr.c
2406
doutc(cl, "pool %lld no perm cached\n", pool);
fs/ceph/addr.c
2414
if (pool < perm->pool)
fs/ceph/addr.c
2416
else if (pool > perm->pool)
fs/ceph/addr.c
2446
rd_req->r_base_oloc.pool = pool;
fs/ceph/addr.c
2512
perm->pool = pool;
fs/ceph/addr.c
2531
doutc(cl, "pool %lld ns %.*s result = %d\n", pool,
fs/ceph/addr.c
2534
doutc(cl, "pool %lld result = %d\n", pool, err);
fs/ceph/addr.c
2543
s64 pool;
fs/ceph/addr.c
2565
pool = ci->i_layout.pool_id;
fs/ceph/addr.c
2570
doutc(cl, "pool %lld no read perm\n", pool);
fs/ceph/addr.c
2574
doutc(cl, "pool %lld no write perm\n", pool);
fs/ceph/addr.c
2581
ret = __ceph_pool_perm_get(ci, pool, pool_ns);
fs/ceph/addr.c
2593
if (pool == ci->i_layout.pool_id &&
fs/ceph/addr.c
2597
pool = ci->i_layout.pool_id;
fs/ceph/file.c
2903
src_oloc.pool = src_ci->i_layout.pool_id;
fs/ceph/file.c
2905
dst_oloc.pool = dst_ci->i_layout.pool_id;
fs/ceph/ioctl.c
212
oloc.pool = ci->i_layout.pool_id;
fs/ceph/mds_client.h
395
s64 pool;
fs/ceph/xattr.c
167
s64 pool = ci->i_layout.pool_id;
fs/ceph/xattr.c
171
pool_name = ceph_pg_pool_name_by_id(osdc->osdmap, pool);
fs/ceph/xattr.c
177
ret = ceph_fmt_xattr(val, size, "%lld", pool);
fs/ceph/xattr.c
422
XATTR_LAYOUT_FIELD(dir, layout, pool),
fs/ceph/xattr.c
479
XATTR_LAYOUT_FIELD(file, layout, pool),
fs/ceph/xattr.c
64
s64 pool = ci->i_layout.pool_id;
fs/ceph/xattr.c
75
pool_name = ceph_pg_pool_name_by_id(osdc->osdmap, pool);
fs/ceph/xattr.c
86
ci->i_layout.object_size, pool);
fs/crypto/crypto.c
327
mempool_t *pool;
fs/crypto/crypto.c
342
pool = mempool_create_page_pool(num_prealloc_crypto_pages, 0);
fs/crypto/crypto.c
343
if (!pool)
fs/crypto/crypto.c
346
smp_store_release(&fscrypt_bounce_page_pool, pool);
fs/nfsd/nfssvc.c
886
struct svc_pool *pool = rqstp->rq_pool;
fs/nfsd/nfssvc.c
912
if (pool->sp_nrthreads > pool->sp_nrthrmin) {
fs/nfsd/nfssvc.c
913
trace_nfsd_dynthread_kill(net, pool);
fs/nfsd/nfssvc.c
920
trace_nfsd_dynthread_trylock_fail(net, pool);
fs/nfsd/nfssvc.c
925
if (pool->sp_nrthreads < pool->sp_nrthrmax) {
fs/nfsd/nfssvc.c
927
if (pool->sp_nrthreads < pool->sp_nrthrmax) {
fs/nfsd/nfssvc.c
930
trace_nfsd_dynthread_start(net, pool);
fs/nfsd/nfssvc.c
931
ret = svc_new_thread(rqstp->rq_server, pool);
fs/nfsd/nfssvc.c
938
trace_nfsd_dynthread_trylock_fail(net, pool);
fs/nfsd/nfssvc.c
941
clear_bit(SP_TASK_STARTING, &pool->sp_flags);
fs/nfsd/trace.h
109
__entry->pool_id = pool->sp_id;
fs/nfsd/trace.h
110
__entry->nrthreads = pool->sp_nrthreads;
fs/nfsd/trace.h
111
__entry->nrthrmin = pool->sp_nrthrmin;
fs/nfsd/trace.h
112
__entry->nrthrmax = pool->sp_nrthrmax;
fs/nfsd/trace.h
122
TP_PROTO(const struct net *net, const struct svc_pool *pool), \
fs/nfsd/trace.h
123
TP_ARGS(net, pool))
fs/nfsd/trace.h
2069
int pool,
fs/nfsd/trace.h
2072
TP_ARGS(net, pool, nrthreads),
fs/nfsd/trace.h
2075
__field(int, pool)
fs/nfsd/trace.h
2080
__entry->pool = pool;
fs/nfsd/trace.h
2084
__entry->pool, __entry->nrthreads
fs/nfsd/trace.h
97
const struct svc_pool *pool
fs/nfsd/trace.h
99
TP_ARGS(net, pool),
fs/smb/client/smbdirect.c
1807
response = mempool_alloc(sc->recv_io.mem.pool, GFP_KERNEL);
fs/smb/client/smbdirect.c
1825
mempool_free(response, sc->recv_io.mem.pool);
fs/smb/client/smbdirect.c
1835
mempool_free(response, sc->recv_io.mem.pool);
fs/smb/client/smbdirect.c
1959
mempool_destroy(sc->send_io.mem.pool);
fs/smb/client/smbdirect.c
1962
mempool_destroy(sc->recv_io.mem.pool);
fs/smb/client/smbdirect.c
2012
mempool_destroy(sc->recv_io.mem.pool);
fs/smb/client/smbdirect.c
2014
mempool_destroy(sc->send_io.mem.pool);
fs/smb/client/smbdirect.c
2038
sc->send_io.mem.pool =
fs/smb/client/smbdirect.c
2041
if (!sc->send_io.mem.pool)
fs/smb/client/smbdirect.c
2059
sc->recv_io.mem.pool =
fs/smb/client/smbdirect.c
2062
if (!sc->recv_io.mem.pool)
fs/smb/client/smbdirect.c
2074
mempool_destroy(sc->recv_io.mem.pool);
fs/smb/client/smbdirect.c
2078
mempool_destroy(sc->send_io.mem.pool);
fs/smb/client/smbdirect.c
519
msg = mempool_alloc(sc->send_io.mem.pool, GFP_KERNEL);
fs/smb/client/smbdirect.c
557
mempool_free(msg, sc->send_io.mem.pool);
fs/smb/common/smbdirect/smbdirect_socket.h
162
mempool_t *pool;
fs/smb/common/smbdirect/smbdirect_socket.h
227
mempool_t *pool;
fs/smb/server/transport_rdma.c
2238
mempool_free(recvmsg, sc->recv_io.mem.pool);
fs/smb/server/transport_rdma.c
2240
mempool_destroy(sc->recv_io.mem.pool);
fs/smb/server/transport_rdma.c
2241
sc->recv_io.mem.pool = NULL;
fs/smb/server/transport_rdma.c
2246
mempool_destroy(sc->send_io.mem.pool);
fs/smb/server/transport_rdma.c
2247
sc->send_io.mem.pool = NULL;
fs/smb/server/transport_rdma.c
2268
sc->send_io.mem.pool = mempool_create(sp->send_credit_target,
fs/smb/server/transport_rdma.c
2271
if (!sc->send_io.mem.pool)
fs/smb/server/transport_rdma.c
2282
sc->recv_io.mem.pool =
fs/smb/server/transport_rdma.c
2285
if (!sc->recv_io.mem.pool)
fs/smb/server/transport_rdma.c
2289
recvmsg = mempool_alloc(sc->recv_io.mem.pool, KSMBD_DEFAULT_GFP);
fs/smb/server/transport_rdma.c
536
msg = mempool_alloc(sc->send_io.mem.pool, KSMBD_DEFAULT_GFP);
fs/smb/server/transport_rdma.c
565
mempool_free(msg, sc->send_io.mem.pool);
include/drm/ttm/ttm_device.h
253
struct ttm_pool pool;
include/drm/ttm/ttm_pool.h
52
struct ttm_pool *pool;
include/drm/ttm/ttm_pool.h
81
int ttm_pool_alloc(struct ttm_pool *pool, struct ttm_tt *tt,
include/drm/ttm/ttm_pool.h
83
void ttm_pool_free(struct ttm_pool *pool, struct ttm_tt *tt);
include/drm/ttm/ttm_pool.h
85
void ttm_pool_init(struct ttm_pool *pool, struct device *dev,
include/drm/ttm/ttm_pool.h
87
void ttm_pool_fini(struct ttm_pool *pool);
include/drm/ttm/ttm_pool.h
89
int ttm_pool_debugfs(struct ttm_pool *pool, struct seq_file *m);
include/drm/ttm/ttm_pool.h
93
long ttm_pool_backup(struct ttm_pool *pool, struct ttm_tt *ttm,
include/drm/ttm/ttm_pool.h
95
int ttm_pool_restore_and_alloc(struct ttm_pool *pool, struct ttm_tt *tt,
include/linux/agpgart.h
102
struct agp_memory *pool;
include/linux/bio.h
350
extern int biovec_init_pool(mempool_t *pool, int pool_entries);
include/linux/ceph/ceph_fs.h
453
__le32 pool;
include/linux/ceph/messenger.h
290
struct ceph_msgpool *pool;
include/linux/ceph/msgpool.h
13
mempool_t *pool;
include/linux/ceph/msgpool.h
19
int ceph_msgpool_init(struct ceph_msgpool *pool, int type,
include/linux/ceph/msgpool.h
22
extern void ceph_msgpool_destroy(struct ceph_msgpool *pool);
include/linux/ceph/msgpool.h
23
struct ceph_msg *ceph_msgpool_get(struct ceph_msgpool *pool, int front_len,
include/linux/ceph/osd_client.h
378
s64 pool;
include/linux/ceph/osdmap.h
23
uint64_t pool;
include/linux/ceph/osdmap.h
246
pgid->pool = ceph_decode_64(p);
include/linux/ceph/osdmap.h
63
static inline bool ceph_can_shift_osds(struct ceph_pg_pool_info *pool)
include/linux/ceph/osdmap.h
65
switch (pool->type) {
include/linux/ceph/osdmap.h
76
s64 pool;
include/linux/ceph/osdmap.h
82
oloc->pool = -1;
include/linux/ceph/osdmap.h
88
return oloc->pool == -1;
include/linux/ceph/rados.h
63
__le32 pool; /* object pool */
include/linux/cgroup_dmem.h
23
void dmem_cgroup_uncharge(struct dmem_cgroup_pool_state *pool, u64 size);
include/linux/cgroup_dmem.h
28
void dmem_cgroup_pool_state_put(struct dmem_cgroup_pool_state *pool);
include/linux/cgroup_dmem.h
51
static inline void dmem_cgroup_uncharge(struct dmem_cgroup_pool_state *pool, u64 size)
include/linux/cgroup_dmem.h
62
static inline void dmem_cgroup_pool_state_put(struct dmem_cgroup_pool_state *pool)
include/linux/dmapool.h
25
void dma_pool_destroy(struct dma_pool *pool);
include/linux/dmapool.h
27
void *dma_pool_alloc(struct dma_pool *pool, gfp_t mem_flags,
include/linux/dmapool.h
29
void dma_pool_free(struct dma_pool *pool, void *vaddr, dma_addr_t addr);
include/linux/dmapool.h
36
void dmam_pool_destroy(struct dma_pool *pool);
include/linux/dmapool.h
45
static inline void dma_pool_destroy(struct dma_pool *pool) { }
include/linux/dmapool.h
46
static inline void *dma_pool_alloc(struct dma_pool *pool, gfp_t mem_flags,
include/linux/dmapool.h
48
static inline void dma_pool_free(struct dma_pool *pool, void *vaddr,
include/linux/dmapool.h
53
static inline void dmam_pool_destroy(struct dma_pool *pool) { }
include/linux/dmapool.h
71
static inline void *dma_pool_zalloc(struct dma_pool *pool, gfp_t mem_flags,
include/linux/dmapool.h
74
return dma_pool_alloc(pool, mem_flags | __GFP_ZERO, handle);
include/linux/firmware/qcom/qcom_tzmem.h
53
void qcom_tzmem_pool_free(struct qcom_tzmem_pool *pool);
include/linux/firmware/qcom/qcom_tzmem.h
58
void *qcom_tzmem_alloc(struct qcom_tzmem_pool *pool, size_t size, gfp_t gfp);
include/linux/genalloc.h
102
static inline int gen_pool_add_virt(struct gen_pool *pool, unsigned long addr,
include/linux/genalloc.h
105
return gen_pool_add_owner(pool, addr, phys, size, nid, NULL);
include/linux/genalloc.h
120
static inline int gen_pool_add(struct gen_pool *pool, unsigned long addr,
include/linux/genalloc.h
123
return gen_pool_add_virt(pool, addr, -1, size, nid);
include/linux/genalloc.h
126
unsigned long gen_pool_alloc_algo_owner(struct gen_pool *pool, size_t size,
include/linux/genalloc.h
129
static inline unsigned long gen_pool_alloc_owner(struct gen_pool *pool,
include/linux/genalloc.h
132
return gen_pool_alloc_algo_owner(pool, size, pool->algo, pool->data,
include/linux/genalloc.h
136
static inline unsigned long gen_pool_alloc_algo(struct gen_pool *pool,
include/linux/genalloc.h
139
return gen_pool_alloc_algo_owner(pool, size, algo, data, NULL);
include/linux/genalloc.h
152
static inline unsigned long gen_pool_alloc(struct gen_pool *pool, size_t size)
include/linux/genalloc.h
154
return gen_pool_alloc_algo(pool, size, pool->algo, pool->data);
include/linux/genalloc.h
157
extern void *gen_pool_dma_alloc(struct gen_pool *pool, size_t size,
include/linux/genalloc.h
159
extern void *gen_pool_dma_alloc_algo(struct gen_pool *pool, size_t size,
include/linux/genalloc.h
161
extern void *gen_pool_dma_alloc_align(struct gen_pool *pool, size_t size,
include/linux/genalloc.h
163
extern void *gen_pool_dma_zalloc(struct gen_pool *pool, size_t size, dma_addr_t *dma);
include/linux/genalloc.h
164
extern void *gen_pool_dma_zalloc_algo(struct gen_pool *pool, size_t size,
include/linux/genalloc.h
166
extern void *gen_pool_dma_zalloc_align(struct gen_pool *pool, size_t size,
include/linux/genalloc.h
168
extern void gen_pool_free_owner(struct gen_pool *pool, unsigned long addr,
include/linux/genalloc.h
170
static inline void gen_pool_free(struct gen_pool *pool, unsigned long addr,
include/linux/genalloc.h
173
gen_pool_free_owner(pool, addr, size, NULL);
include/linux/genalloc.h
181
extern void gen_pool_set_algo(struct gen_pool *pool, genpool_algo_t algo,
include/linux/genalloc.h
186
struct gen_pool *pool, unsigned long start_addr);
include/linux/genalloc.h
190
void *data, struct gen_pool *pool, unsigned long start_addr);
include/linux/genalloc.h
194
void *data, struct gen_pool *pool, unsigned long start_addr);
include/linux/genalloc.h
199
void *data, struct gen_pool *pool, unsigned long start_addr);
include/linux/genalloc.h
203
struct gen_pool *pool, unsigned long start_addr);
include/linux/genalloc.h
210
extern bool gen_pool_has_addr(struct gen_pool *pool, unsigned long start,
include/linux/genalloc.h
53
void *data, struct gen_pool *pool,
include/linux/genalloc.h
98
extern phys_addr_t gen_pool_virt_to_phys(struct gen_pool *pool, unsigned long);
include/linux/hisi_acc_qm.h
578
struct scatterlist *sgl, struct hisi_acc_sgl_pool *pool,
include/linux/hisi_acc_qm.h
585
struct hisi_acc_sgl_pool *pool);
include/linux/i3c/master.h
724
void i3c_generic_ibi_free_pool(struct i3c_generic_ibi_pool *pool);
include/linux/i3c/master.h
727
i3c_generic_ibi_get_free_slot(struct i3c_generic_ibi_pool *pool);
include/linux/i3c/master.h
728
void i3c_generic_ibi_recycle_slot(struct i3c_generic_ibi_pool *pool,
include/linux/iio/trigger.h
75
unsigned long pool[BITS_TO_LONGS(CONFIG_IIO_CONSUMERS_PER_TRIGGER)];
include/linux/kprobes.h
143
struct objpool_head pool;
include/linux/mempool.h
30
static inline bool mempool_initialized(struct mempool *pool)
include/linux/mempool.h
32
return pool->elements != NULL;
include/linux/mempool.h
35
static inline bool mempool_is_saturated(struct mempool *pool)
include/linux/mempool.h
37
return READ_ONCE(pool->curr_nr) >= pool->min_nr;
include/linux/mempool.h
40
void mempool_exit(struct mempool *pool);
include/linux/mempool.h
41
int mempool_init_node(struct mempool *pool, int min_nr,
include/linux/mempool.h
44
int mempool_init_noprof(struct mempool *pool, int min_nr,
include/linux/mempool.h
62
int mempool_resize(struct mempool *pool, int new_min_nr);
include/linux/mempool.h
63
void mempool_destroy(struct mempool *pool);
include/linux/mempool.h
65
void *mempool_alloc_noprof(struct mempool *pool, gfp_t gfp_mask) __malloc;
include/linux/mempool.h
68
int mempool_alloc_bulk_noprof(struct mempool *pool, void **elem,
include/linux/mempool.h
73
void *mempool_alloc_preallocated(struct mempool *pool) __malloc;
include/linux/mempool.h
74
void mempool_free(void *element, struct mempool *pool);
include/linux/mempool.h
75
unsigned int mempool_free_bulk(struct mempool *pool, void **elem,
include/linux/mlx5/driver.h
333
struct dma_pool *pool;
include/linux/mlx5/mlx5_ifc.h
12082
u8 pool[0x4];
include/linux/mlx5/mlx5_ifc.h
12134
u8 pool[0x4];
include/linux/netdevice.h
1025
struct xsk_buff_pool *pool;
include/linux/netdevice.h
3575
struct page_pool *pool;
include/linux/netdevice.h
700
struct xsk_buff_pool *pool;
include/linux/objpool.h
121
int objpool_init(struct objpool_head *pool, int nr_objs, int object_size,
include/linux/objpool.h
126
static inline void *__objpool_try_get_slot(struct objpool_head *pool, int cpu)
include/linux/objpool.h
128
struct objpool_slot *slot = pool->cpu_slots[cpu];
include/linux/objpool.h
147
if (READ_ONCE(slot->last) - head - 1 >= pool->nr_objs) {
include/linux/objpool.h
169
static inline void *objpool_pop(struct objpool_head *pool)
include/linux/objpool.h
180
obj = __objpool_try_get_slot(pool, cpu);
include/linux/objpool.h
191
__objpool_try_add_slot(void *obj, struct objpool_head *pool, int cpu)
include/linux/objpool.h
193
struct objpool_slot *slot = pool->cpu_slots[cpu];
include/linux/objpool.h
202
WARN_ON_ONCE(tail - head > pool->nr_objs);
include/linux/objpool.h
221
static inline int objpool_push(void *obj, struct objpool_head *pool)
include/linux/objpool.h
228
rc = __objpool_try_add_slot(obj, pool, raw_smp_processor_id());
include/linux/objpool.h
251
int objpool_drop(void *obj, struct objpool_head *pool);
include/linux/objpool.h
257
void objpool_free(struct objpool_head *pool);
include/linux/objpool.h
275
void objpool_fini(struct objpool_head *pool);
include/linux/rethook.h
37
struct objpool_head pool;
include/linux/skbuff.h
3710
int skb_pp_cow_data(struct page_pool *pool, struct sk_buff **pskb,
include/linux/skbuff.h
3712
int skb_cow_data_for_xdp(struct page_pool *pool, struct sk_buff **pskb,
include/linux/sram.h
11
static inline void *sram_exec_copy(struct gen_pool *pool, void *dst, void *src,
include/linux/sram.h
9
void *sram_exec_copy(struct gen_pool *pool, void *dst, void *src, size_t size);
include/linux/sunrpc/svc.h
446
int svc_new_thread(struct svc_serv *serv, struct svc_pool *pool);
include/linux/sunrpc/svc.h
453
int svc_set_pool_threads(struct svc_serv *serv, struct svc_pool *pool,
include/linux/sunrpc/svc.h
465
void svc_pool_wake_idle_thread(struct svc_pool *pool);
include/linux/swiotlb.h
247
unsigned long attrs, struct io_tlb_pool *pool);
include/linux/swiotlb.h
252
struct io_tlb_pool *pool = swiotlb_find_pool(dev, addr);
include/linux/swiotlb.h
254
if (unlikely(pool))
include/linux/swiotlb.h
255
__swiotlb_tbl_unmap_single(dev, addr, size, dir, attrs, pool);
include/linux/swiotlb.h
260
struct io_tlb_pool *pool);
include/linux/swiotlb.h
264
struct io_tlb_pool *pool = swiotlb_find_pool(dev, addr);
include/linux/swiotlb.h
266
if (unlikely(pool))
include/linux/swiotlb.h
267
__swiotlb_sync_single_for_device(dev, addr, size, dir, pool);
include/linux/swiotlb.h
272
struct io_tlb_pool *pool);
include/linux/swiotlb.h
276
struct io_tlb_pool *pool = swiotlb_find_pool(dev, addr);
include/linux/swiotlb.h
278
if (unlikely(pool))
include/linux/swiotlb.h
279
__swiotlb_sync_single_for_cpu(dev, addr, size, dir, pool);
include/linux/tee_core.h
171
int (*alloc)(struct tee_protmem_pool *pool, struct sg_table *sgt,
include/linux/tee_core.h
173
void (*free)(struct tee_protmem_pool *pool, struct sg_table *sgt);
include/linux/tee_core.h
174
int (*update_shm)(struct tee_protmem_pool *pool, struct sg_table *sgt,
include/linux/tee_core.h
177
void (*destroy_pool)(struct tee_protmem_pool *pool);
include/linux/tee_core.h
194
struct tee_shm_pool *pool,
include/linux/tee_core.h
220
struct tee_protmem_pool *pool);
include/linux/tee_core.h
286
int (*alloc)(struct tee_shm_pool *pool, struct tee_shm *shm,
include/linux/tee_core.h
288
void (*free)(struct tee_shm_pool *pool, struct tee_shm *shm);
include/linux/tee_core.h
289
void (*destroy_pool)(struct tee_shm_pool *pool);
include/linux/tee_core.h
311
static inline void tee_shm_pool_free(struct tee_shm_pool *pool)
include/linux/tee_core.h
313
pool->ops->destroy_pool(pool);
include/linux/tee_core.h
73
struct tee_shm_pool *pool;
include/linux/usb/hcd.h
191
struct dma_pool *pool[HCD_BUFFER_POOLS];
include/linux/zsmalloc.h
28
void zs_destroy_pool(struct zs_pool *pool);
include/linux/zsmalloc.h
30
unsigned long zs_malloc(struct zs_pool *pool, size_t size, gfp_t flags,
include/linux/zsmalloc.h
32
void zs_free(struct zs_pool *pool, unsigned long obj);
include/linux/zsmalloc.h
34
size_t zs_huge_class_size(struct zs_pool *pool);
include/linux/zsmalloc.h
36
unsigned long zs_get_total_pages(struct zs_pool *pool);
include/linux/zsmalloc.h
37
unsigned long zs_compact(struct zs_pool *pool);
include/linux/zsmalloc.h
39
unsigned int zs_lookup_class_index(struct zs_pool *pool, unsigned int size);
include/linux/zsmalloc.h
41
void zs_pool_stats(struct zs_pool *pool, struct zs_pool_stats *stats);
include/linux/zsmalloc.h
43
void *zs_obj_read_begin(struct zs_pool *pool, unsigned long handle,
include/linux/zsmalloc.h
45
void zs_obj_read_end(struct zs_pool *pool, unsigned long handle,
include/linux/zsmalloc.h
47
void zs_obj_read_sg_begin(struct zs_pool *pool, unsigned long handle,
include/linux/zsmalloc.h
49
void zs_obj_read_sg_end(struct zs_pool *pool, unsigned long handle);
include/linux/zsmalloc.h
50
void zs_obj_write(struct zs_pool *pool, unsigned long handle,
include/linux/zstd_lib.h
1873
ZSTDLIB_STATIC_API void ZSTD_freeThreadPool (ZSTD_threadPool* pool); /* accept NULL pointer */
include/linux/zstd_lib.h
1874
ZSTDLIB_STATIC_API size_t ZSTD_CCtx_refThreadPool(ZSTD_CCtx* cctx, ZSTD_threadPool* pool);
include/net/libeth/xdp.h
421
struct xsk_buff_pool *pool;
include/net/libeth/xsk.h
122
xsk_buff_raw_dma_sync_for_device(sq->pool, desc.addr, desc.len);
include/net/libeth/xsk.h
199
ctx = xsk_buff_raw_get_ctx(sq->pool, xdesc->addr);
include/net/libeth/xsk.h
228
.addr = xsk_buff_raw_get_dma(sq->pool, xdesc->addr),
include/net/libeth/xsk.h
259
xsk_buff_raw_dma_sync_for_device(sq->pool, desc.addr, desc.len);
include/net/libeth/xsk.h
285
libeth_xsk_xmit_do_bulk(struct xsk_buff_pool *pool, void *xdpsq, u32 budget,
include/net/libeth/xsk.h
296
wake = xsk_uses_need_wakeup(pool);
include/net/libeth/xsk.h
298
xsk_clear_tx_need_wakeup(pool);
include/net/libeth/xsk.h
300
n = xsk_tx_peek_release_desc_batch(pool, budget);
include/net/libeth/xsk.h
301
bulk = container_of(&pool->tx_descs[0], typeof(*bulk), desc);
include/net/libeth/xsk.h
309
xsk_set_tx_need_wakeup(pool);
include/net/libeth/xsk.h
605
struct xsk_buff_pool *pool;
include/net/libeth/xsk.h
657
ret = xsk_buff_alloc_batch(fq->pool, xskb, this);
include/net/netdev_rx_queue.h
28
struct xsk_buff_pool *pool;
include/net/page_pool/helpers.h
109
static inline struct page *page_pool_dev_alloc_frag(struct page_pool *pool,
include/net/page_pool/helpers.h
115
return page_pool_alloc_frag(pool, offset, size, gfp);
include/net/page_pool/helpers.h
118
static inline netmem_ref page_pool_alloc_netmem(struct page_pool *pool,
include/net/page_pool/helpers.h
122
unsigned int max_size = PAGE_SIZE << pool->p.order;
include/net/page_pool/helpers.h
128
return page_pool_alloc_netmems(pool, gfp);
include/net/page_pool/helpers.h
131
netmem = page_pool_alloc_frag_netmem(pool, offset, *size, gfp);
include/net/page_pool/helpers.h
139
if (pool->frag_offset + *size > max_size) {
include/net/page_pool/helpers.h
141
pool->frag_offset = max_size;
include/net/page_pool/helpers.h
147
static inline netmem_ref page_pool_dev_alloc_netmem(struct page_pool *pool,
include/net/page_pool/helpers.h
153
return page_pool_alloc_netmem(pool, offset, size, gfp);
include/net/page_pool/helpers.h
156
static inline netmem_ref page_pool_dev_alloc_netmems(struct page_pool *pool)
include/net/page_pool/helpers.h
160
return page_pool_alloc_netmems(pool, gfp);
include/net/page_pool/helpers.h
163
static inline struct page *page_pool_alloc(struct page_pool *pool,
include/net/page_pool/helpers.h
167
return netmem_to_page(page_pool_alloc_netmem(pool, offset, size, gfp));
include/net/page_pool/helpers.h
182
static inline struct page *page_pool_dev_alloc(struct page_pool *pool,
include/net/page_pool/helpers.h
188
return page_pool_alloc(pool, offset, size, gfp);
include/net/page_pool/helpers.h
191
static inline void *page_pool_alloc_va(struct page_pool *pool,
include/net/page_pool/helpers.h
198
page = page_pool_alloc(pool, &offset, size, gfp & ~__GFP_HIGHMEM);
include/net/page_pool/helpers.h
216
static inline void *page_pool_dev_alloc_va(struct page_pool *pool,
include/net/page_pool/helpers.h
221
return page_pool_alloc_va(pool, size, gfp);
include/net/page_pool/helpers.h
232
page_pool_get_dma_dir(const struct page_pool *pool)
include/net/page_pool/helpers.h
234
return pool->p.dma_dir;
include/net/page_pool/helpers.h
331
static inline void page_pool_put_netmem(struct page_pool *pool,
include/net/page_pool/helpers.h
343
page_pool_put_unrefed_netmem(pool, netmem, dma_sync_size, allow_direct);
include/net/page_pool/helpers.h
360
static inline void page_pool_put_page(struct page_pool *pool,
include/net/page_pool/helpers.h
365
page_pool_put_netmem(pool, page_to_netmem(page), dma_sync_size,
include/net/page_pool/helpers.h
369
static inline void page_pool_put_full_netmem(struct page_pool *pool,
include/net/page_pool/helpers.h
373
page_pool_put_netmem(pool, netmem, -1, allow_direct);
include/net/page_pool/helpers.h
385
static inline void page_pool_put_full_page(struct page_pool *pool,
include/net/page_pool/helpers.h
388
page_pool_put_netmem(pool, page_to_netmem(page), -1, allow_direct);
include/net/page_pool/helpers.h
399
static inline void page_pool_recycle_direct(struct page_pool *pool,
include/net/page_pool/helpers.h
402
page_pool_put_full_page(pool, page, true);
include/net/page_pool/helpers.h
405
static inline void page_pool_recycle_direct_netmem(struct page_pool *pool,
include/net/page_pool/helpers.h
408
page_pool_put_full_netmem(pool, netmem, true);
include/net/page_pool/helpers.h
422
static inline void page_pool_free_va(struct page_pool *pool, void *va,
include/net/page_pool/helpers.h
425
page_pool_put_page(pool, virt_to_head_page(va), -1, allow_direct);
include/net/page_pool/helpers.h
450
static inline void __page_pool_dma_sync_for_cpu(const struct page_pool *pool,
include/net/page_pool/helpers.h
454
dma_sync_single_range_for_cpu(pool->p.dev, dma_addr,
include/net/page_pool/helpers.h
455
offset + pool->p.offset, dma_sync_size,
include/net/page_pool/helpers.h
456
page_pool_get_dma_dir(pool));
include/net/page_pool/helpers.h
471
static inline void page_pool_dma_sync_for_cpu(const struct page_pool *pool,
include/net/page_pool/helpers.h
475
__page_pool_dma_sync_for_cpu(pool, page_pool_get_dma_addr(page), offset,
include/net/page_pool/helpers.h
480
page_pool_dma_sync_netmem_for_cpu(const struct page_pool *pool,
include/net/page_pool/helpers.h
484
if (!pool->dma_sync_for_cpu)
include/net/page_pool/helpers.h
487
__page_pool_dma_sync_for_cpu(pool,
include/net/page_pool/helpers.h
492
static inline void page_pool_get(struct page_pool *pool)
include/net/page_pool/helpers.h
494
refcount_inc(&pool->user_cnt);
include/net/page_pool/helpers.h
497
static inline bool page_pool_put(struct page_pool *pool)
include/net/page_pool/helpers.h
499
return refcount_dec_and_test(&pool->user_cnt);
include/net/page_pool/helpers.h
502
static inline void page_pool_nid_changed(struct page_pool *pool, int new_nid)
include/net/page_pool/helpers.h
504
if (unlikely(pool->p.nid != new_nid))
include/net/page_pool/helpers.h
505
page_pool_update_nid(pool, new_nid);
include/net/page_pool/helpers.h
520
static inline bool page_pool_is_unreadable(struct page_pool *pool)
include/net/page_pool/helpers.h
522
return !!pool->mp_ops;
include/net/page_pool/helpers.h
67
bool page_pool_get_stats(const struct page_pool *pool,
include/net/page_pool/helpers.h
92
static inline struct page *page_pool_dev_alloc_pages(struct page_pool *pool)
include/net/page_pool/helpers.h
96
return page_pool_alloc_pages(pool, gfp);
include/net/page_pool/memory_provider.h
13
netmem_ref (*alloc_netmems)(struct page_pool *pool, gfp_t gfp);
include/net/page_pool/memory_provider.h
14
bool (*release_netmem)(struct page_pool *pool, netmem_ref netmem);
include/net/page_pool/memory_provider.h
15
int (*init)(struct page_pool *pool);
include/net/page_pool/memory_provider.h
16
void (*destroy)(struct page_pool *pool);
include/net/page_pool/memory_provider.h
23
void net_mp_niov_set_page_pool(struct page_pool *pool, struct net_iov *niov);
include/net/page_pool/memory_provider.h
45
static inline void net_mp_netmem_place_in_cache(struct page_pool *pool,
include/net/page_pool/memory_provider.h
48
pool->alloc.cache[pool->alloc.count++] = netmem;
include/net/page_pool/types.h
188
void (*disconnect)(void *pool);
include/net/page_pool/types.h
255
struct page *page_pool_alloc_pages(struct page_pool *pool, gfp_t gfp);
include/net/page_pool/types.h
256
netmem_ref page_pool_alloc_netmems(struct page_pool *pool, gfp_t gfp);
include/net/page_pool/types.h
257
struct page *page_pool_alloc_frag(struct page_pool *pool, unsigned int *offset,
include/net/page_pool/types.h
259
netmem_ref page_pool_alloc_frag_netmem(struct page_pool *pool,
include/net/page_pool/types.h
269
void page_pool_enable_direct_recycling(struct page_pool *pool,
include/net/page_pool/types.h
271
void page_pool_disable_direct_recycling(struct page_pool *pool);
include/net/page_pool/types.h
272
void page_pool_destroy(struct page_pool *pool);
include/net/page_pool/types.h
273
void page_pool_use_xdp_mem(struct page_pool *pool, void (*disconnect)(void *),
include/net/page_pool/types.h
277
static inline void page_pool_destroy(struct page_pool *pool)
include/net/page_pool/types.h
281
static inline void page_pool_use_xdp_mem(struct page_pool *pool,
include/net/page_pool/types.h
292
void page_pool_put_unrefed_netmem(struct page_pool *pool, netmem_ref netmem,
include/net/page_pool/types.h
295
void page_pool_put_unrefed_page(struct page_pool *pool, struct page *page,
include/net/page_pool/types.h
309
void page_pool_update_nid(struct page_pool *pool, int new_nid);
include/net/xdp.h
500
int xdp_reg_page_pool(struct page_pool *pool);
include/net/xdp.h
501
void xdp_unreg_page_pool(const struct page_pool *pool);
include/net/xdp.h
503
const struct page_pool *pool);
include/net/xdp_sock.h
55
struct xsk_buff_pool *pool;
include/net/xdp_sock_drv.h
101
return xp_alloc(pool);
include/net/xdp_sock_drv.h
110
static inline u32 xsk_buff_alloc_batch(struct xsk_buff_pool *pool, struct xdp_buff **xdp, u32 max)
include/net/xdp_sock_drv.h
112
return xp_alloc_batch(pool, xdp, max);
include/net/xdp_sock_drv.h
115
static inline bool xsk_buff_can_alloc(struct xsk_buff_pool *pool, u32 count)
include/net/xdp_sock_drv.h
117
return xp_can_alloc(pool, count);
include/net/xdp_sock_drv.h
123
struct list_head *xskb_list = &xskb->pool->xskb_list;
include/net/xdp_sock_drv.h
151
list_add_tail(&frag->list_node, &frag->pool->xskb_list);
include/net/xdp_sock_drv.h
162
frag = list_first_entry_or_null(&xskb->pool->xskb_list,
include/net/xdp_sock_drv.h
184
frag = list_first_entry(&xskb->pool->xskb_list, struct xdp_buff_xsk,
include/net/xdp_sock_drv.h
194
frag = list_last_entry(&xskb->pool->xskb_list, struct xdp_buff_xsk,
include/net/xdp_sock_drv.h
207
static inline dma_addr_t xsk_buff_raw_get_dma(struct xsk_buff_pool *pool,
include/net/xdp_sock_drv.h
210
return xp_raw_get_dma(pool, addr);
include/net/xdp_sock_drv.h
213
static inline void *xsk_buff_raw_get_data(struct xsk_buff_pool *pool, u64 addr)
include/net/xdp_sock_drv.h
215
return xp_raw_get_data(pool, addr);
include/net/xdp_sock_drv.h
230
xsk_buff_raw_get_ctx(const struct xsk_buff_pool *pool, u64 addr)
include/net/xdp_sock_drv.h
232
return xp_raw_get_ctx(pool, addr);
include/net/xdp_sock_drv.h
248
__xsk_buff_get_metadata(const struct xsk_buff_pool *pool, void *data)
include/net/xdp_sock_drv.h
252
if (!pool->tx_metadata_len)
include/net/xdp_sock_drv.h
255
meta = data - pool->tx_metadata_len;
include/net/xdp_sock_drv.h
263
xsk_buff_get_metadata(struct xsk_buff_pool *pool, u64 addr)
include/net/xdp_sock_drv.h
265
return __xsk_buff_get_metadata(pool, xp_raw_get_data(pool, addr));
include/net/xdp_sock_drv.h
27
void xsk_tx_completed(struct xsk_buff_pool *pool, u32 nb_entries);
include/net/xdp_sock_drv.h
275
static inline void xsk_buff_raw_dma_sync_for_device(struct xsk_buff_pool *pool,
include/net/xdp_sock_drv.h
279
xp_dma_sync_for_device(pool, dma, size);
include/net/xdp_sock_drv.h
28
bool xsk_tx_peek_desc(struct xsk_buff_pool *pool, struct xdp_desc *desc);
include/net/xdp_sock_drv.h
284
static inline void xsk_tx_completed(struct xsk_buff_pool *pool, u32 nb_entries)
include/net/xdp_sock_drv.h
288
static inline bool xsk_tx_peek_desc(struct xsk_buff_pool *pool,
include/net/xdp_sock_drv.h
29
u32 xsk_tx_peek_release_desc_batch(struct xsk_buff_pool *pool, u32 max);
include/net/xdp_sock_drv.h
294
static inline u32 xsk_tx_peek_release_desc_batch(struct xsk_buff_pool *pool, u32 max)
include/net/xdp_sock_drv.h
299
static inline void xsk_tx_release(struct xsk_buff_pool *pool)
include/net/xdp_sock_drv.h
30
void xsk_tx_release(struct xsk_buff_pool *pool);
include/net/xdp_sock_drv.h
309
static inline void xsk_set_rx_need_wakeup(struct xsk_buff_pool *pool)
include/net/xdp_sock_drv.h
313
static inline void xsk_set_tx_need_wakeup(struct xsk_buff_pool *pool)
include/net/xdp_sock_drv.h
317
static inline void xsk_clear_rx_need_wakeup(struct xsk_buff_pool *pool)
include/net/xdp_sock_drv.h
321
static inline void xsk_clear_tx_need_wakeup(struct xsk_buff_pool *pool)
include/net/xdp_sock_drv.h
325
static inline bool xsk_uses_need_wakeup(struct xsk_buff_pool *pool)
include/net/xdp_sock_drv.h
33
void xsk_set_rx_need_wakeup(struct xsk_buff_pool *pool);
include/net/xdp_sock_drv.h
330
static inline u32 xsk_pool_get_headroom(struct xsk_buff_pool *pool)
include/net/xdp_sock_drv.h
335
static inline u32 xsk_pool_get_chunk_size(struct xsk_buff_pool *pool)
include/net/xdp_sock_drv.h
34
void xsk_set_tx_need_wakeup(struct xsk_buff_pool *pool);
include/net/xdp_sock_drv.h
340
static inline u32 xsk_pool_get_rx_frame_size(struct xsk_buff_pool *pool)
include/net/xdp_sock_drv.h
345
static inline u32 xsk_pool_get_rx_frag_step(struct xsk_buff_pool *pool)
include/net/xdp_sock_drv.h
35
void xsk_clear_rx_need_wakeup(struct xsk_buff_pool *pool);
include/net/xdp_sock_drv.h
350
static inline void xsk_pool_set_rxq_info(struct xsk_buff_pool *pool,
include/net/xdp_sock_drv.h
355
static inline void xsk_pool_fill_cb(struct xsk_buff_pool *pool,
include/net/xdp_sock_drv.h
36
void xsk_clear_tx_need_wakeup(struct xsk_buff_pool *pool);
include/net/xdp_sock_drv.h
360
static inline void xsk_pool_dma_unmap(struct xsk_buff_pool *pool,
include/net/xdp_sock_drv.h
365
static inline int xsk_pool_dma_map(struct xsk_buff_pool *pool,
include/net/xdp_sock_drv.h
37
bool xsk_uses_need_wakeup(struct xsk_buff_pool *pool);
include/net/xdp_sock_drv.h
381
static inline struct xdp_buff *xsk_buff_alloc(struct xsk_buff_pool *pool)
include/net/xdp_sock_drv.h
39
static inline u32 xsk_pool_get_headroom(struct xsk_buff_pool *pool)
include/net/xdp_sock_drv.h
391
static inline u32 xsk_buff_alloc_batch(struct xsk_buff_pool *pool, struct xdp_buff **xdp, u32 max)
include/net/xdp_sock_drv.h
396
static inline bool xsk_buff_can_alloc(struct xsk_buff_pool *pool, u32 count)
include/net/xdp_sock_drv.h
41
return XDP_PACKET_HEADROOM + pool->headroom;
include/net/xdp_sock_drv.h
434
static inline dma_addr_t xsk_buff_raw_get_dma(struct xsk_buff_pool *pool,
include/net/xdp_sock_drv.h
44
static inline u32 xsk_pool_get_chunk_size(struct xsk_buff_pool *pool)
include/net/xdp_sock_drv.h
440
static inline void *xsk_buff_raw_get_data(struct xsk_buff_pool *pool, u64 addr)
include/net/xdp_sock_drv.h
446
xsk_buff_raw_get_ctx(const struct xsk_buff_pool *pool, u64 addr)
include/net/xdp_sock_drv.h
457
__xsk_buff_get_metadata(const struct xsk_buff_pool *pool, void *data)
include/net/xdp_sock_drv.h
46
return pool->chunk_size;
include/net/xdp_sock_drv.h
463
xsk_buff_get_metadata(struct xsk_buff_pool *pool, u64 addr)
include/net/xdp_sock_drv.h
472
static inline void xsk_buff_raw_dma_sync_for_device(struct xsk_buff_pool *pool,
include/net/xdp_sock_drv.h
49
static inline u32 xsk_pool_get_rx_frame_size(struct xsk_buff_pool *pool)
include/net/xdp_sock_drv.h
51
return xsk_pool_get_chunk_size(pool) - xsk_pool_get_headroom(pool);
include/net/xdp_sock_drv.h
54
static inline u32 xsk_pool_get_rx_frag_step(struct xsk_buff_pool *pool)
include/net/xdp_sock_drv.h
56
return pool->unaligned ? 0 : xsk_pool_get_chunk_size(pool);
include/net/xdp_sock_drv.h
59
static inline void xsk_pool_set_rxq_info(struct xsk_buff_pool *pool,
include/net/xdp_sock_drv.h
62
xp_set_rxq_info(pool, rxq);
include/net/xdp_sock_drv.h
65
static inline void xsk_pool_fill_cb(struct xsk_buff_pool *pool,
include/net/xdp_sock_drv.h
68
xp_fill_cb(pool, desc);
include/net/xdp_sock_drv.h
71
static inline void xsk_pool_dma_unmap(struct xsk_buff_pool *pool,
include/net/xdp_sock_drv.h
74
xp_dma_unmap(pool, attrs);
include/net/xdp_sock_drv.h
77
static inline int xsk_pool_dma_map(struct xsk_buff_pool *pool,
include/net/xdp_sock_drv.h
80
struct xdp_umem *umem = pool->umem;
include/net/xdp_sock_drv.h
82
return xp_dma_map(pool, dev, attrs, umem->pgs, umem->npgs);
include/net/xdp_sock_drv.h
99
static inline struct xdp_buff *xsk_buff_alloc(struct xsk_buff_pool *pool)
include/net/xsk_buff_pool.h
106
int xp_assign_dev(struct xsk_buff_pool *pool, struct net_device *dev,
include/net/xsk_buff_pool.h
108
int xp_assign_dev_shared(struct xsk_buff_pool *pool, struct xdp_sock *umem_xs,
include/net/xsk_buff_pool.h
110
int xp_alloc_tx_descs(struct xsk_buff_pool *pool, struct xdp_sock *xs);
include/net/xsk_buff_pool.h
111
void xp_destroy(struct xsk_buff_pool *pool);
include/net/xsk_buff_pool.h
112
void xp_get_pool(struct xsk_buff_pool *pool);
include/net/xsk_buff_pool.h
113
bool xp_put_pool(struct xsk_buff_pool *pool);
include/net/xsk_buff_pool.h
114
void xp_clear_dev(struct xsk_buff_pool *pool);
include/net/xsk_buff_pool.h
115
void xp_add_xsk(struct xsk_buff_pool *pool, struct xdp_sock *xs);
include/net/xsk_buff_pool.h
116
void xp_del_xsk(struct xsk_buff_pool *pool, struct xdp_sock *xs);
include/net/xsk_buff_pool.h
121
static inline void xp_init_xskb_addr(struct xdp_buff_xsk *xskb, struct xsk_buff_pool *pool,
include/net/xsk_buff_pool.h
124
xskb->xdp.data_hard_start = pool->addrs + addr + pool->headroom;
include/net/xsk_buff_pool.h
127
static inline void xp_init_xskb_dma(struct xdp_buff_xsk *xskb, struct xsk_buff_pool *pool,
include/net/xsk_buff_pool.h
132
xskb->dma = xskb->frame_dma + pool->headroom + XDP_PACKET_HEADROOM;
include/net/xsk_buff_pool.h
136
void xp_set_rxq_info(struct xsk_buff_pool *pool, struct xdp_rxq_info *rxq);
include/net/xsk_buff_pool.h
137
void xp_fill_cb(struct xsk_buff_pool *pool, struct xsk_cb_desc *desc);
include/net/xsk_buff_pool.h
138
int xp_dma_map(struct xsk_buff_pool *pool, struct device *dev,
include/net/xsk_buff_pool.h
140
void xp_dma_unmap(struct xsk_buff_pool *pool, unsigned long attrs);
include/net/xsk_buff_pool.h
141
struct xdp_buff *xp_alloc(struct xsk_buff_pool *pool);
include/net/xsk_buff_pool.h
142
u32 xp_alloc_batch(struct xsk_buff_pool *pool, struct xdp_buff **xdp, u32 max);
include/net/xsk_buff_pool.h
143
bool xp_can_alloc(struct xsk_buff_pool *pool, u32 count);
include/net/xsk_buff_pool.h
144
void *xp_raw_get_data(struct xsk_buff_pool *pool, u64 addr);
include/net/xsk_buff_pool.h
145
dma_addr_t xp_raw_get_dma(struct xsk_buff_pool *pool, u64 addr);
include/net/xsk_buff_pool.h
152
struct xdp_desc_ctx xp_raw_get_ctx(const struct xsk_buff_pool *pool, u64 addr);
include/net/xsk_buff_pool.h
166
dma_sync_single_for_cpu(xskb->pool->dev, xskb->dma,
include/net/xsk_buff_pool.h
167
xskb->pool->frame_len,
include/net/xsk_buff_pool.h
171
static inline void xp_dma_sync_for_device(struct xsk_buff_pool *pool,
include/net/xsk_buff_pool.h
174
dma_sync_single_for_device(pool->dev, dma, size, DMA_BIDIRECTIONAL);
include/net/xsk_buff_pool.h
184
static inline bool xp_desc_crosses_non_contig_pg(struct xsk_buff_pool *pool,
include/net/xsk_buff_pool.h
192
return pool->dma_pages &&
include/net/xsk_buff_pool.h
193
!(pool->dma_pages[addr >> PAGE_SHIFT] & XSK_NEXT_PG_CONTIG_MASK);
include/net/xsk_buff_pool.h
201
static inline u64 xp_aligned_extract_addr(struct xsk_buff_pool *pool, u64 addr)
include/net/xsk_buff_pool.h
203
return addr & pool->chunk_mask;
include/net/xsk_buff_pool.h
222
static inline u32 xp_aligned_extract_idx(struct xsk_buff_pool *pool, u64 addr)
include/net/xsk_buff_pool.h
224
return xp_aligned_extract_addr(pool, addr) >> pool->chunk_shift;
include/net/xsk_buff_pool.h
229
if (xskb->pool->unaligned)
include/net/xsk_buff_pool.h
230
xskb->pool->free_heads[xskb->pool->free_heads_cnt++] = xskb;
include/net/xsk_buff_pool.h
234
struct xsk_buff_pool *pool)
include/net/xsk_buff_pool.h
236
u64 orig_addr = xskb->xdp.data - pool->addrs;
include/net/xsk_buff_pool.h
239
if (!pool->unaligned)
include/net/xsk_buff_pool.h
243
offset += pool->headroom;
include/net/xsk_buff_pool.h
248
static inline bool xp_tx_metadata_enabled(const struct xsk_buff_pool *pool)
include/net/xsk_buff_pool.h
250
return pool->tx_metadata_len > 0;
include/net/xsk_buff_pool.h
30
struct xsk_buff_pool *pool;
include/scsi/libfc.h
444
struct fc_exch_pool *pool;
include/scsi/libiscsi.h
263
void **pool; /* Pool of elements */
include/soc/fsl/bman.h
103
int bman_get_bpid(const struct bman_pool *pool);
include/soc/fsl/bman.h
114
int bman_release(struct bman_pool *pool, const struct bm_buffer *bufs, u8 num);
include/soc/fsl/bman.h
127
int bman_acquire(struct bman_pool *pool, struct bm_buffer *bufs, u8 num);
include/soc/fsl/bman.h
94
void bman_free_pool(struct bman_pool *pool);
include/trace/events/page_pool.h
101
__field(const struct page_pool *, pool)
include/trace/events/page_pool.h
107
__entry->pool = pool;
include/trace/events/page_pool.h
108
__entry->pool_nid = pool->p.nid;
include/trace/events/page_pool.h
113
__entry->pool, __entry->pool_nid, __entry->new_nid)
include/trace/events/page_pool.h
16
TP_PROTO(const struct page_pool *pool,
include/trace/events/page_pool.h
19
TP_ARGS(pool, inflight, hold, release),
include/trace/events/page_pool.h
22
__field(const struct page_pool *, pool)
include/trace/events/page_pool.h
30
__entry->pool = pool;
include/trace/events/page_pool.h
34
__entry->cnt = pool->destroy_cnt;
include/trace/events/page_pool.h
38
__entry->pool, __entry->inflight, __entry->hold,
include/trace/events/page_pool.h
44
TP_PROTO(const struct page_pool *pool,
include/trace/events/page_pool.h
47
TP_ARGS(pool, netmem, release),
include/trace/events/page_pool.h
50
__field(const struct page_pool *, pool)
include/trace/events/page_pool.h
57
__entry->pool = pool;
include/trace/events/page_pool.h
64
__entry->pool, (void *)__entry->netmem,
include/trace/events/page_pool.h
70
TP_PROTO(const struct page_pool *pool,
include/trace/events/page_pool.h
73
TP_ARGS(pool, netmem, hold),
include/trace/events/page_pool.h
76
__field(const struct page_pool *, pool)
include/trace/events/page_pool.h
83
__entry->pool = pool;
include/trace/events/page_pool.h
90
__entry->pool, (void *)__entry->netmem,
include/trace/events/page_pool.h
96
TP_PROTO(const struct page_pool *pool, int new_nid),
include/trace/events/page_pool.h
98
TP_ARGS(pool, new_nid),
include/trace/events/sunrpc.h
2125
TP_PROTO(const struct svc_pool *pool, pid_t pid),
include/trace/events/sunrpc.h
2127
TP_ARGS(pool, pid),
include/trace/events/sunrpc.h
2135
__entry->pool_id = pool->sp_id;
include/trace/events/sunrpc.h
2145
const struct svc_pool *pool, pid_t pid \
include/trace/events/sunrpc.h
2147
TP_ARGS(pool, pid))
include/trace/events/workqueue.h
43
__entry->cpu = pwq->pool->cpu;
include/uapi/linux/hyperv.h
352
__u8 pool;
include/uapi/linux/hyperv.h
407
__u8 pool;
include/uapi/linux/netfilter_bridge/ebt_among.h
43
struct ebt_mac_wormhash_tuple pool[];
io_uring/zcrx.c
311
static void io_zcrx_sync_for_device(struct page_pool *pool,
io_uring/zcrx.c
319
if (!dma_dev_need_sync(pool->p.dev))
io_uring/zcrx.c
322
niov_size = 1U << io_pp_to_ifq(pool)->niov_shift;
io_uring/zcrx.c
324
__dma_sync_single_for_device(pool->p.dev, dma_addr + pool->p.offset,
io_uring/zcrx.c
325
niov_size, pool->p.dma_dir);
kernel/cgroup/dmem.c
113
static void dmemcg_pool_get(struct dmem_cgroup_pool_state *pool)
kernel/cgroup/dmem.c
115
refcount_inc(&pool->ref);
kernel/cgroup/dmem.c
118
static bool dmemcg_pool_tryget(struct dmem_cgroup_pool_state *pool)
kernel/cgroup/dmem.c
120
return refcount_inc_not_zero(&pool->ref);
kernel/cgroup/dmem.c
123
static void dmemcg_pool_put(struct dmem_cgroup_pool_state *pool)
kernel/cgroup/dmem.c
125
if (!refcount_dec_and_test(&pool->ref))
kernel/cgroup/dmem.c
128
call_rcu(&pool->rcu, dmemcg_pool_free_rcu);
kernel/cgroup/dmem.c
133
struct dmem_cgroup_pool_state *pool = container_of(rcu, typeof(*pool), rcu);
kernel/cgroup/dmem.c
135
if (pool->parent)
kernel/cgroup/dmem.c
136
dmemcg_pool_put(pool->parent);
kernel/cgroup/dmem.c
137
kref_put(&pool->region->ref, dmemcg_free_region);
kernel/cgroup/dmem.c
138
kfree(pool);
kernel/cgroup/dmem.c
141
static void free_cg_pool(struct dmem_cgroup_pool_state *pool)
kernel/cgroup/dmem.c
143
list_del(&pool->region_node);
kernel/cgroup/dmem.c
144
dmemcg_pool_put(pool);
kernel/cgroup/dmem.c
148
set_resource_min(struct dmem_cgroup_pool_state *pool, u64 val)
kernel/cgroup/dmem.c
150
page_counter_set_min(&pool->cnt, val);
kernel/cgroup/dmem.c
154
set_resource_low(struct dmem_cgroup_pool_state *pool, u64 val)
kernel/cgroup/dmem.c
156
page_counter_set_low(&pool->cnt, val);
kernel/cgroup/dmem.c
160
set_resource_max(struct dmem_cgroup_pool_state *pool, u64 val)
kernel/cgroup/dmem.c
162
page_counter_set_max(&pool->cnt, val);
kernel/cgroup/dmem.c
165
static u64 get_resource_low(struct dmem_cgroup_pool_state *pool)
kernel/cgroup/dmem.c
167
return pool ? READ_ONCE(pool->cnt.low) : 0;
kernel/cgroup/dmem.c
170
static u64 get_resource_min(struct dmem_cgroup_pool_state *pool)
kernel/cgroup/dmem.c
172
return pool ? READ_ONCE(pool->cnt.min) : 0;
kernel/cgroup/dmem.c
175
static u64 get_resource_max(struct dmem_cgroup_pool_state *pool)
kernel/cgroup/dmem.c
177
return pool ? READ_ONCE(pool->cnt.max) : PAGE_COUNTER_MAX;
kernel/cgroup/dmem.c
180
static u64 get_resource_current(struct dmem_cgroup_pool_state *pool)
kernel/cgroup/dmem.c
182
return pool ? page_counter_read(&pool->cnt) : 0;
kernel/cgroup/dmem.c
195
struct dmem_cgroup_pool_state *pool;
kernel/cgroup/dmem.c
198
list_for_each_entry_rcu(pool, &dmemcs->pools, css_node)
kernel/cgroup/dmem.c
199
reset_all_resource_limits(pool);
kernel/cgroup/dmem.c
206
struct dmem_cgroup_pool_state *pool, *next;
kernel/cgroup/dmem.c
209
list_for_each_entry_safe(pool, next, &dmemcs->pools, css_node) {
kernel/cgroup/dmem.c
214
list_del(&pool->css_node);
kernel/cgroup/dmem.c
215
free_cg_pool(pool);
kernel/cgroup/dmem.c
236
struct dmem_cgroup_pool_state *pool;
kernel/cgroup/dmem.c
238
list_for_each_entry_rcu(pool, &dmemcs->pools, css_node, spin_is_locked(&dmemcg_lock))
kernel/cgroup/dmem.c
239
if (pool->region == region)
kernel/cgroup/dmem.c
240
return pool;
kernel/cgroup/dmem.c
245
static struct dmem_cgroup_pool_state *pool_parent(struct dmem_cgroup_pool_state *pool)
kernel/cgroup/dmem.c
247
if (!pool->cnt.parent)
kernel/cgroup/dmem.c
250
return container_of(pool->cnt.parent, typeof(*pool), cnt);
kernel/cgroup/dmem.c
260
struct dmem_cgroup_pool_state *pool, *found_pool;
kernel/cgroup/dmem.c
270
list_for_each_entry_rcu(pool, &dmemcg_iter->pools, css_node) {
kernel/cgroup/dmem.c
271
if (pool->region == limit_pool->region) {
kernel/cgroup/dmem.c
272
found_pool = pool;
kernel/cgroup/dmem.c
306
struct dmem_cgroup_pool_state *pool = test_pool;
kernel/cgroup/dmem.c
318
for (pool = test_pool; pool && limit_pool != pool; pool = pool_parent(pool))
kernel/cgroup/dmem.c
321
if (!pool)
kernel/cgroup/dmem.c
359
struct dmem_cgroup_pool_state *pool, *ppool = NULL;
kernel/cgroup/dmem.c
362
pool = kzalloc_obj(*pool, GFP_NOWAIT);
kernel/cgroup/dmem.c
363
if (!pool)
kernel/cgroup/dmem.c
366
pool = *allocpool;
kernel/cgroup/dmem.c
370
pool->region = region;
kernel/cgroup/dmem.c
371
pool->cs = dmemcs;
kernel/cgroup/dmem.c
376
page_counter_init(&pool->cnt,
kernel/cgroup/dmem.c
378
reset_all_resource_limits(pool);
kernel/cgroup/dmem.c
379
refcount_set(&pool->ref, 1);
kernel/cgroup/dmem.c
381
if (ppool && !pool->parent) {
kernel/cgroup/dmem.c
382
pool->parent = ppool;
kernel/cgroup/dmem.c
386
list_add_tail_rcu(&pool->css_node, &dmemcs->pools);
kernel/cgroup/dmem.c
387
list_add_tail(&pool->region_node, ®ion->pools);
kernel/cgroup/dmem.c
390
pool->inited = true;
kernel/cgroup/dmem.c
392
pool->inited = ppool ? ppool->inited : false;
kernel/cgroup/dmem.c
393
return pool;
kernel/cgroup/dmem.c
400
struct dmem_cgroup_pool_state *pool, *ppool, *retpool;
kernel/cgroup/dmem.c
408
pool = find_cg_pool_locked(p, region);
kernel/cgroup/dmem.c
409
if (!pool)
kernel/cgroup/dmem.c
410
pool = alloc_pool_single(p, region, allocpool);
kernel/cgroup/dmem.c
412
if (IS_ERR(pool))
kernel/cgroup/dmem.c
413
return pool;
kernel/cgroup/dmem.c
415
if (p == dmemcs && pool->inited)
kernel/cgroup/dmem.c
416
return pool;
kernel/cgroup/dmem.c
418
if (pool->inited)
kernel/cgroup/dmem.c
422
retpool = pool = find_cg_pool_locked(dmemcs, region);
kernel/cgroup/dmem.c
424
if (pool->inited)
kernel/cgroup/dmem.c
431
pool->cnt.parent = &ppool->cnt;
kernel/cgroup/dmem.c
432
if (ppool && !pool->parent) {
kernel/cgroup/dmem.c
433
pool->parent = ppool;
kernel/cgroup/dmem.c
436
pool->inited = true;
kernel/cgroup/dmem.c
438
pool = ppool;
kernel/cgroup/dmem.c
447
struct dmem_cgroup_pool_state *pool, *next;
kernel/cgroup/dmem.c
449
list_for_each_entry_safe(pool, next, ®ion->pools, region_node)
kernel/cgroup/dmem.c
450
free_cg_pool(pool);
kernel/cgroup/dmem.c
470
struct dmem_cgroup_pool_state *pool, *next;
kernel/cgroup/dmem.c
480
list_for_each_entry_safe(pool, next, ®ion->pools, region_node) {
kernel/cgroup/dmem.c
481
list_del_rcu(&pool->css_node);
kernel/cgroup/dmem.c
482
list_del(&pool->region_node);
kernel/cgroup/dmem.c
483
dmemcg_pool_put(pool);
kernel/cgroup/dmem.c
562
void dmem_cgroup_pool_state_put(struct dmem_cgroup_pool_state *pool)
kernel/cgroup/dmem.c
564
if (pool) {
kernel/cgroup/dmem.c
565
css_put(&pool->cs->css);
kernel/cgroup/dmem.c
566
dmemcg_pool_put(pool);
kernel/cgroup/dmem.c
574
struct dmem_cgroup_pool_state *pool, *allocpool = NULL;
kernel/cgroup/dmem.c
578
pool = find_cg_pool_locked(cg, region);
kernel/cgroup/dmem.c
579
if (pool && !READ_ONCE(pool->inited))
kernel/cgroup/dmem.c
580
pool = NULL;
kernel/cgroup/dmem.c
581
if (pool && !dmemcg_pool_tryget(pool))
kernel/cgroup/dmem.c
582
pool = NULL;
kernel/cgroup/dmem.c
585
while (!pool) {
kernel/cgroup/dmem.c
588
pool = get_cg_pool_locked(cg, region, &allocpool);
kernel/cgroup/dmem.c
590
pool = ERR_PTR(-ENODEV);
kernel/cgroup/dmem.c
591
if (!IS_ERR(pool))
kernel/cgroup/dmem.c
592
dmemcg_pool_get(pool);
kernel/cgroup/dmem.c
595
if (pool == ERR_PTR(-ENOMEM)) {
kernel/cgroup/dmem.c
596
pool = NULL;
kernel/cgroup/dmem.c
602
pool = NULL;
kernel/cgroup/dmem.c
609
return pool;
kernel/cgroup/dmem.c
621
void dmem_cgroup_uncharge(struct dmem_cgroup_pool_state *pool, u64 size)
kernel/cgroup/dmem.c
623
if (!pool)
kernel/cgroup/dmem.c
626
page_counter_uncharge(&pool->cnt, size);
kernel/cgroup/dmem.c
627
css_put(&pool->cs->css);
kernel/cgroup/dmem.c
628
dmemcg_pool_put(pool);
kernel/cgroup/dmem.c
656
struct dmem_cgroup_pool_state *pool;
kernel/cgroup/dmem.c
670
pool = get_cg_pool_unlocked(cg, region);
kernel/cgroup/dmem.c
671
if (IS_ERR(pool)) {
kernel/cgroup/dmem.c
672
ret = PTR_ERR(pool);
kernel/cgroup/dmem.c
676
if (!page_counter_try_charge(&pool->cnt, size, &fail)) {
kernel/cgroup/dmem.c
682
dmemcg_pool_put(pool);
kernel/cgroup/dmem.c
688
*ret_pool = pool;
kernel/cgroup/dmem.c
735
struct dmem_cgroup_pool_state *pool = NULL;
kernel/cgroup/dmem.c
769
pool = get_cg_pool_unlocked(dmemcs, region);
kernel/cgroup/dmem.c
770
if (IS_ERR(pool)) {
kernel/cgroup/dmem.c
771
err = PTR_ERR(pool);
kernel/cgroup/dmem.c
776
apply(pool, new_limit);
kernel/cgroup/dmem.c
777
dmemcg_pool_put(pool);
kernel/cgroup/dmem.c
795
struct dmem_cgroup_pool_state *pool = find_cg_pool_locked(dmemcs, region);
kernel/cgroup/dmem.c
800
val = fn(pool);
kernel/cgroup/rdma.c
117
struct rdmacg_resource_pool *pool;
kernel/cgroup/rdma.c
121
list_for_each_entry(pool, &cg->rpools, cg_node)
kernel/cgroup/rdma.c
122
if (pool->device == device)
kernel/cgroup/rdma.c
123
return pool;
kernel/dma/pool.c
120
ret = gen_pool_add_virt(pool, (unsigned long)addr, page_to_phys(page),
kernel/dma/pool.c
145
static void atomic_pool_resize(struct gen_pool *pool, gfp_t gfp)
kernel/dma/pool.c
147
if (pool && gen_pool_avail(pool) < atomic_pool_size)
kernel/dma/pool.c
148
atomic_pool_expand(pool, gen_pool_size(pool), gfp);
kernel/dma/pool.c
165
struct gen_pool *pool;
kernel/dma/pool.c
168
pool = gen_pool_create(PAGE_SHIFT, NUMA_NO_NODE);
kernel/dma/pool.c
169
if (!pool)
kernel/dma/pool.c
172
gen_pool_set_algo(pool, gen_pool_first_fit_order_align, NULL);
kernel/dma/pool.c
174
ret = atomic_pool_expand(pool, pool_size, gfp);
kernel/dma/pool.c
176
gen_pool_destroy(pool);
kernel/dma/pool.c
183
gen_pool_size(pool) >> 10, &gfp);
kernel/dma/pool.c
184
return pool;
kernel/dma/pool.c
250
struct gen_pool *pool, void **cpu_addr,
kernel/dma/pool.c
256
addr = gen_pool_alloc(pool, size);
kernel/dma/pool.c
260
phys = gen_pool_virt_to_phys(pool, addr);
kernel/dma/pool.c
262
gen_pool_free(pool, addr, size);
kernel/dma/pool.c
266
if (gen_pool_avail(pool) < atomic_pool_size)
kernel/dma/pool.c
278
struct gen_pool *pool = NULL;
kernel/dma/pool.c
282
while ((pool = dma_guess_pool(pool, gfp))) {
kernel/dma/pool.c
284
page = __dma_alloc_from_pool(dev, size, pool, cpu_addr,
kernel/dma/pool.c
299
struct gen_pool *pool = NULL;
kernel/dma/pool.c
301
while ((pool = dma_guess_pool(pool, 0))) {
kernel/dma/pool.c
302
if (!gen_pool_has_addr(pool, (unsigned long)start, size))
kernel/dma/pool.c
304
gen_pool_free(pool, (unsigned long)start, size);
kernel/dma/pool.c
79
static int atomic_pool_expand(struct gen_pool *pool, size_t pool_size,
kernel/dma/swiotlb.c
1031
static int swiotlb_search_pool_area(struct device *dev, struct io_tlb_pool *pool,
kernel/dma/swiotlb.c
1035
struct io_tlb_area *area = pool->areas + area_index;
kernel/dma/swiotlb.c
1038
phys_to_dma_unencrypted(dev, pool->start) & boundary_mask;
kernel/dma/swiotlb.c
1049
BUG_ON(area_index >= pool->nareas);
kernel/dma/swiotlb.c
1077
if (unlikely(nslots > pool->area_nslabs - area->used))
kernel/dma/swiotlb.c
1080
slot_base = area_index * pool->area_nslabs;
kernel/dma/swiotlb.c
1083
for (slots_checked = 0; slots_checked < pool->area_nslabs; ) {
kernel/dma/swiotlb.c
1092
index = wrap_area_index(pool, index + 1);
kernel/dma/swiotlb.c
1100
if (pool->slots[slot_index].list >= nslots)
kernel/dma/swiotlb.c
1103
index = wrap_area_index(pool, index + stride);
kernel/dma/swiotlb.c
1118
pool->slots[i].list = 0;
kernel/dma/swiotlb.c
1119
pool->slots[i].alloc_size = alloc_size - (offset +
kernel/dma/swiotlb.c
1124
pool->slots[i].list; i--)
kernel/dma/swiotlb.c
1125
pool->slots[i].list = ++count;
kernel/dma/swiotlb.c
1130
area->index = wrap_area_index(pool, index + nslots);
kernel/dma/swiotlb.c
1161
struct io_tlb_pool *pool;
kernel/dma/swiotlb.c
1166
list_for_each_entry_rcu(pool, &mem->pools, node) {
kernel/dma/swiotlb.c
1167
if (cpu_offset >= pool->nareas)
kernel/dma/swiotlb.c
1169
area_index = (start_cpu + cpu_offset) & (pool->nareas - 1);
kernel/dma/swiotlb.c
1170
index = swiotlb_search_pool_area(dev, pool, area_index,
kernel/dma/swiotlb.c
1174
*retpool = pool;
kernel/dma/swiotlb.c
1201
struct io_tlb_pool *pool;
kernel/dma/swiotlb.c
1214
alloc_align_mask, &pool);
kernel/dma/swiotlb.c
1226
pool = swiotlb_alloc_pool(dev, nslabs, nslabs, 1, phys_limit,
kernel/dma/swiotlb.c
1228
if (!pool)
kernel/dma/swiotlb.c
1231
index = swiotlb_search_pool_area(dev, pool, 0, orig_addr,
kernel/dma/swiotlb.c
1234
swiotlb_dyn_free(&pool->rcu);
kernel/dma/swiotlb.c
1238
pool->transient = true;
kernel/dma/swiotlb.c
1240
list_add_rcu(&pool->node, &dev->dma_io_tlb_pools);
kernel/dma/swiotlb.c
1242
inc_transient_used(mem, pool->nslabs);
kernel/dma/swiotlb.c
1266
*retpool = pool;
kernel/dma/swiotlb.c
1276
struct io_tlb_pool *pool;
kernel/dma/swiotlb.c
1280
*retpool = pool = &dev->dma_io_tlb_mem->defpool;
kernel/dma/swiotlb.c
1281
i = start = raw_smp_processor_id() & (pool->nareas - 1);
kernel/dma/swiotlb.c
1283
index = swiotlb_search_pool_area(dev, pool, i, orig_addr,
kernel/dma/swiotlb.c
1287
if (++i >= pool->nareas)
kernel/dma/swiotlb.c
1321
static unsigned long mem_pool_used(struct io_tlb_pool *pool)
kernel/dma/swiotlb.c
1326
for (i = 0; i < pool->nareas; i++)
kernel/dma/swiotlb.c
1327
used += pool->areas[i].used;
kernel/dma/swiotlb.c
1343
struct io_tlb_pool *pool;
kernel/dma/swiotlb.c
1347
list_for_each_entry_rcu(pool, &mem->pools, node)
kernel/dma/swiotlb.c
1348
used += mem_pool_used(pool);
kernel/dma/swiotlb.c
1390
struct io_tlb_pool *pool;
kernel/dma/swiotlb.c
1418
index = swiotlb_find_slots(dev, orig_addr, size, alloc_align_mask, &pool);
kernel/dma/swiotlb.c
1441
pool->slots[index].pad_slots = pad_slots;
kernel/dma/swiotlb.c
1443
pool->slots[index + i].orig_addr = slot_addr(orig_addr, i);
kernel/dma/swiotlb.c
1444
tlb_addr = slot_addr(pool->start, index) + offset;
kernel/dma/swiotlb.c
1454
swiotlb_bounce(dev, tlb_addr, mapping_size, DMA_TO_DEVICE, pool);
kernel/dma/swiotlb.c
1526
struct io_tlb_pool *pool)
kernel/dma/swiotlb.c
1528
if (!pool->transient)
kernel/dma/swiotlb.c
1531
dec_used(dev->dma_io_tlb_mem, pool->nslabs);
kernel/dma/swiotlb.c
1532
swiotlb_del_pool(dev, pool);
kernel/dma/swiotlb.c
1533
dec_transient_used(dev->dma_io_tlb_mem, pool->nslabs);
kernel/dma/swiotlb.c
1540
phys_addr_t tlb_addr, struct io_tlb_pool *pool)
kernel/dma/swiotlb.c
1552
unsigned long attrs, struct io_tlb_pool *pool)
kernel/dma/swiotlb.c
1560
DMA_FROM_DEVICE, pool);
kernel/dma/swiotlb.c
1562
if (swiotlb_del_transient(dev, tlb_addr, pool))
kernel/dma/swiotlb.c
1564
swiotlb_release_slots(dev, tlb_addr, pool);
kernel/dma/swiotlb.c
1569
struct io_tlb_pool *pool)
kernel/dma/swiotlb.c
1572
swiotlb_bounce(dev, tlb_addr, size, DMA_TO_DEVICE, pool);
kernel/dma/swiotlb.c
1579
struct io_tlb_pool *pool)
kernel/dma/swiotlb.c
1582
swiotlb_bounce(dev, tlb_addr, size, DMA_FROM_DEVICE, pool);
kernel/dma/swiotlb.c
1769
struct io_tlb_pool *pool;
kernel/dma/swiotlb.c
1778
index = swiotlb_find_slots(dev, 0, size, align, &pool);
kernel/dma/swiotlb.c
1782
tlb_addr = slot_addr(pool->start, index);
kernel/dma/swiotlb.c
1786
swiotlb_release_slots(dev, tlb_addr, pool);
kernel/dma/swiotlb.c
1796
struct io_tlb_pool *pool;
kernel/dma/swiotlb.c
1798
pool = swiotlb_find_pool(dev, tlb_addr);
kernel/dma/swiotlb.c
1799
if (!pool)
kernel/dma/swiotlb.c
1802
swiotlb_release_slots(dev, tlb_addr, pool);
kernel/dma/swiotlb.c
1827
struct io_tlb_pool *pool;
kernel/dma/swiotlb.c
1832
pool = &mem->defpool;
kernel/dma/swiotlb.c
1834
pool->slots = kzalloc_objs(*pool->slots, nslabs);
kernel/dma/swiotlb.c
1835
if (!pool->slots) {
kernel/dma/swiotlb.c
1840
pool->areas = kzalloc_objs(*pool->areas, nareas);
kernel/dma/swiotlb.c
1841
if (!pool->areas) {
kernel/dma/swiotlb.c
1842
kfree(pool->slots);
kernel/dma/swiotlb.c
1849
swiotlb_init_io_tlb_pool(pool, rmem->base, nslabs,
kernel/dma/swiotlb.c
1857
add_mem_pool(mem, pool);
kernel/dma/swiotlb.c
305
static void add_mem_pool(struct io_tlb_mem *mem, struct io_tlb_pool *pool)
kernel/dma/swiotlb.c
309
list_add_rcu(&pool->node, &mem->pools);
kernel/dma/swiotlb.c
310
mem->nslabs += pool->nslabs;
kernel/dma/swiotlb.c
313
mem->nslabs = pool->nslabs;
kernel/dma/swiotlb.c
686
struct io_tlb_pool *pool;
kernel/dma/swiotlb.c
697
pool_size = sizeof(*pool) + array_size(sizeof(*pool->areas), nareas);
kernel/dma/swiotlb.c
698
pool = kzalloc(pool_size, gfp);
kernel/dma/swiotlb.c
699
if (!pool)
kernel/dma/swiotlb.c
701
pool->areas = (void *)pool + sizeof(*pool);
kernel/dma/swiotlb.c
712
slot_order = get_order(array_size(sizeof(*pool->slots), nslabs));
kernel/dma/swiotlb.c
713
pool->slots = (struct io_tlb_slot *)
kernel/dma/swiotlb.c
715
if (!pool->slots)
kernel/dma/swiotlb.c
718
swiotlb_init_io_tlb_pool(pool, page_to_phys(tlb), nslabs, true, nareas);
kernel/dma/swiotlb.c
719
return pool;
kernel/dma/swiotlb.c
724
kfree(pool);
kernel/dma/swiotlb.c
737
struct io_tlb_pool *pool;
kernel/dma/swiotlb.c
739
pool = swiotlb_alloc_pool(NULL, IO_TLB_MIN_SLABS, default_nslabs,
kernel/dma/swiotlb.c
741
if (!pool) {
kernel/dma/swiotlb.c
746
add_mem_pool(mem, pool);
kernel/dma/swiotlb.c
755
struct io_tlb_pool *pool = container_of(rcu, struct io_tlb_pool, rcu);
kernel/dma/swiotlb.c
756
size_t slots_size = array_size(sizeof(*pool->slots), pool->nslabs);
kernel/dma/swiotlb.c
757
size_t tlb_size = pool->end - pool->start;
kernel/dma/swiotlb.c
759
free_pages((unsigned long)pool->slots, get_order(slots_size));
kernel/dma/swiotlb.c
760
swiotlb_free_tlb(pool->vaddr, tlb_size);
kernel/dma/swiotlb.c
761
kfree(pool);
kernel/dma/swiotlb.c
779
struct io_tlb_pool *pool;
kernel/dma/swiotlb.c
782
list_for_each_entry_rcu(pool, &mem->pools, node) {
kernel/dma/swiotlb.c
783
if (paddr >= pool->start && paddr < pool->end)
kernel/dma/swiotlb.c
787
list_for_each_entry_rcu(pool, &dev->dma_io_tlb_pools, node) {
kernel/dma/swiotlb.c
788
if (paddr >= pool->start && paddr < pool->end)
kernel/dma/swiotlb.c
791
pool = NULL;
kernel/dma/swiotlb.c
794
return pool;
kernel/dma/swiotlb.c
802
static void swiotlb_del_pool(struct device *dev, struct io_tlb_pool *pool)
kernel/dma/swiotlb.c
807
list_del_rcu(&pool->node);
kernel/dma/swiotlb.c
810
call_rcu(&pool->rcu, swiotlb_dyn_free);
kernel/kprobes.c
1958
objpool_drop(ri, &rph->pool);
kernel/kprobes.c
1967
objpool_push(ri, &rp->rph->pool);
kernel/kprobes.c
2009
objpool_fini(&rph->pool);
kernel/kprobes.c
2154
ri = objpool_pop(&rph->pool);
kernel/kprobes.c
2161
objpool_push(ri, &rph->pool);
kernel/kprobes.c
2306
if (objpool_init(&rp->rph->pool, rp->maxactive, rp->data_size +
kernel/scftorture.c
155
struct llist_head *pool;
kernel/scftorture.c
161
pool = &per_cpu(scf_free_pool, cpu);
kernel/scftorture.c
162
llist_add(&scfcp->scf_node, pool);
kernel/scftorture.c
167
struct llist_head *pool;
kernel/scftorture.c
171
pool = &per_cpu(scf_free_pool, cpu);
kernel/scftorture.c
172
node = llist_del_all(pool);
kernel/trace/rethook.c
119
if (objpool_init(&rh->pool, num, size, GFP_KERNEL, rh,
kernel/trace/rethook.c
132
objpool_drop(node, &rh->pool);
kernel/trace/rethook.c
148
objpool_push(node, &node->rethook->pool);
kernel/trace/rethook.c
180
return (struct rethook_node *)objpool_pop(&rh->pool);
kernel/trace/rethook.c
38
objpool_fini(&rh->pool);
kernel/workqueue.c
1002
lockdep_assert_held(&pool->lock);
kernel/workqueue.c
1013
pool->nr_running++;
kernel/workqueue.c
1017
static struct worker *first_idle_worker(struct worker_pool *pool)
kernel/workqueue.c
1019
if (unlikely(list_empty(&pool->idle_list)))
kernel/workqueue.c
1022
return list_first_entry(&pool->idle_list, struct worker, entry);
kernel/workqueue.c
1037
struct worker_pool *pool = worker->pool;
kernel/workqueue.c
1046
pool->nr_idle++;
kernel/workqueue.c
1050
list_add(&worker->entry, &pool->idle_list);
kernel/workqueue.c
1052
if (too_many_workers(pool) && !timer_pending(&pool->idle_timer))
kernel/workqueue.c
1053
mod_timer(&pool->idle_timer, jiffies + IDLE_WORKER_TIMEOUT);
kernel/workqueue.c
1056
WARN_ON_ONCE(pool->nr_workers == pool->nr_idle && pool->nr_running);
kernel/workqueue.c
1070
struct worker_pool *pool = worker->pool;
kernel/workqueue.c
1075
pool->nr_idle--;
kernel/workqueue.c
1112
static struct worker *find_worker_executing_work(struct worker_pool *pool,
kernel/workqueue.c
1117
hash_for_each_possible(pool->busy_hash, worker, hentry,
kernel/workqueue.c
1189
struct worker_pool *pool = worker->pool;
kernel/workqueue.c
1192
lockdep_assert_held(&pool->lock);
kernel/workqueue.c
1212
collision = find_worker_executing_work(pool, work);
kernel/workqueue.c
1222
static struct irq_work *bh_pool_irq_work(struct worker_pool *pool)
kernel/workqueue.c
1224
int high = pool->attrs->nice == HIGHPRI_NICE_LEVEL ? 1 : 0;
kernel/workqueue.c
1226
return &per_cpu(bh_pool_irq_works, pool->cpu)[high];
kernel/workqueue.c
1229
static void kick_bh_pool(struct worker_pool *pool)
kernel/workqueue.c
1233
if (unlikely(pool->cpu != smp_processor_id() &&
kernel/workqueue.c
1234
!(pool->flags & POOL_BH_DRAINING))) {
kernel/workqueue.c
1235
irq_work_queue_on(bh_pool_irq_work(pool), pool->cpu);
kernel/workqueue.c
1239
if (pool->attrs->nice == HIGHPRI_NICE_LEVEL)
kernel/workqueue.c
1252
static bool kick_pool(struct worker_pool *pool)
kernel/workqueue.c
1254
struct worker *worker = first_idle_worker(pool);
kernel/workqueue.c
1257
lockdep_assert_held(&pool->lock);
kernel/workqueue.c
1259
if (!need_more_worker(pool) || !worker)
kernel/workqueue.c
1262
if (pool->flags & POOL_BH) {
kernel/workqueue.c
1263
kick_bh_pool(pool);
kernel/workqueue.c
1286
if (!pool->attrs->affn_strict &&
kernel/workqueue.c
1287
!cpumask_test_cpu(p->wake_cpu, pool->attrs->__pod_cpumask)) {
kernel/workqueue.c
1288
struct work_struct *work = list_first_entry(&pool->worklist,
kernel/workqueue.c
1290
int wake_cpu = cpumask_any_and_distribute(pool->attrs->__pod_cpumask,
kernel/workqueue.c
1419
worker->pool->nr_running++;
kernel/workqueue.c
1441
struct worker_pool *pool;
kernel/workqueue.c
1451
pool = worker->pool;
kernel/workqueue.c
1458
raw_spin_lock_irq(&pool->lock);
kernel/workqueue.c
1466
raw_spin_unlock_irq(&pool->lock);
kernel/workqueue.c
1470
pool->nr_running--;
kernel/workqueue.c
1471
if (kick_pool(pool))
kernel/workqueue.c
1474
raw_spin_unlock_irq(&pool->lock);
kernel/workqueue.c
1488
struct worker_pool *pool = worker->pool;
kernel/workqueue.c
1515
raw_spin_lock(&pool->lock);
kernel/workqueue.c
1521
if (kick_pool(pool))
kernel/workqueue.c
1524
raw_spin_unlock(&pool->lock);
kernel/workqueue.c
1644
lockdep_assert_held(&pwq->pool->lock);
kernel/workqueue.c
1658
lockdep_assert_held(&pwq->pool->lock);
kernel/workqueue.c
1681
raw_spin_lock_irq(&pwq->pool->lock);
kernel/workqueue.c
1683
raw_spin_unlock_irq(&pwq->pool->lock);
kernel/workqueue.c
1699
if (list_empty(&pwq->pool->worklist))
kernel/workqueue.c
1700
pwq->pool->last_progress_ts = jiffies;
kernel/workqueue.c
1701
move_linked_works(work, &pwq->pool->worklist, NULL);
kernel/workqueue.c
1729
struct worker_pool *pool = pwq->pool;
kernel/workqueue.c
1730
struct wq_node_nr_active *nna = wq_node_nr_active(wq, pool->node);
kernel/workqueue.c
1733
lockdep_assert_held(&pool->lock);
kernel/workqueue.c
1849
raw_spin_lock_irq(&pwq->pool->lock);
kernel/workqueue.c
1853
kick_pool(pwq->pool);
kernel/workqueue.c
1855
raw_spin_unlock_irq(&pwq->pool->lock);
kernel/workqueue.c
1888
if (pwq->pool != locked_pool) {
kernel/workqueue.c
1890
locked_pool = pwq->pool;
kernel/workqueue.c
1927
if (pwq->pool != caller_pool)
kernel/workqueue.c
1928
kick_pool(pwq->pool);
kernel/workqueue.c
1948
struct worker_pool *pool = pwq->pool;
kernel/workqueue.c
1949
struct wq_node_nr_active *nna = wq_node_nr_active(pwq->wq, pool->node);
kernel/workqueue.c
1951
lockdep_assert_held(&pool->lock);
kernel/workqueue.c
1986
node_activate_pending_pwq(nna, pool);
kernel/workqueue.c
2066
struct worker_pool *pool;
kernel/workqueue.c
2093
pool = get_work_pool(work);
kernel/workqueue.c
2094
if (!pool)
kernel/workqueue.c
2097
raw_spin_lock(&pool->lock);
kernel/workqueue.c
2107
if (pwq && pwq->pool == pool) {
kernel/workqueue.c
2127
move_linked_works(work, &pwq->pool->worklist, NULL);
kernel/workqueue.c
2135
set_work_pool_and_keep_pending(work, pool->id,
kernel/workqueue.c
2136
pool_offq_flags(pool));
kernel/workqueue.c
2141
raw_spin_unlock(&pool->lock);
kernel/workqueue.c
2145
raw_spin_unlock(&pool->lock);
kernel/workqueue.c
2252
struct worker_pool *last_pool, *pool;
kernel/workqueue.c
2285
pool = pwq->pool;
kernel/workqueue.c
2297
if (last_pool && last_pool != pool && !(wq->flags & __WQ_ORDERED)) {
kernel/workqueue.c
2306
pool = pwq->pool;
kernel/workqueue.c
2307
WARN_ON_ONCE(pool != last_pool);
kernel/workqueue.c
2311
raw_spin_lock(&pool->lock);
kernel/workqueue.c
2314
raw_spin_lock(&pool->lock);
kernel/workqueue.c
2326
raw_spin_unlock(&pool->lock);
kernel/workqueue.c
2350
if (list_empty(&pool->worklist))
kernel/workqueue.c
2351
pool->last_progress_ts = jiffies;
kernel/workqueue.c
2354
insert_work(pwq, work, &pool->worklist, work_flags);
kernel/workqueue.c
2355
kick_pool(pool);
kernel/workqueue.c
2362
raw_spin_unlock(&pool->lock);
kernel/workqueue.c
261
struct worker_pool *pool; /* I: the associated pool */
kernel/workqueue.c
2671
static cpumask_t *pool_allowed_cpus(struct worker_pool *pool)
kernel/workqueue.c
2673
if (pool->cpu < 0 && pool->attrs->affn_strict)
kernel/workqueue.c
2674
return pool->attrs->__pod_cpumask;
kernel/workqueue.c
2676
return pool->attrs->cpumask;
kernel/workqueue.c
2689
struct worker_pool *pool)
kernel/workqueue.c
2698
if (pool->flags & POOL_DISASSOCIATED) {
kernel/workqueue.c
2701
WARN_ON_ONCE(pool->flags & POOL_BH);
kernel/workqueue.c
2702
kthread_set_per_cpu(worker->task, pool->cpu);
kernel/workqueue.c
2706
set_cpus_allowed_ptr(worker->task, pool_allowed_cpus(pool));
kernel/workqueue.c
2708
list_add_tail(&worker->node, &pool->workers);
kernel/workqueue.c
2709
worker->pool = pool;
kernel/workqueue.c
2744
struct worker_pool *pool = worker->pool;
kernel/workqueue.c
2747
WARN_ON_ONCE(pool->flags & POOL_BH);
kernel/workqueue.c
2751
worker->pool = NULL;
kernel/workqueue.c
2759
struct worker_pool *pool)
kernel/workqueue.c
2765
if (pool) {
kernel/workqueue.c
2766
if (pool->cpu >= 0)
kernel/workqueue.c
2768
pool->cpu, worker->id,
kernel/workqueue.c
2769
pool->attrs->nice < 0 ? "H" : "");
kernel/workqueue.c
2772
pool->id, worker->id);
kernel/workqueue.c
2790
static struct worker *create_worker(struct worker_pool *pool)
kernel/workqueue.c
2796
id = ida_alloc(&pool->worker_ida, GFP_KERNEL);
kernel/workqueue.c
2803
worker = alloc_worker(pool->node);
kernel/workqueue.c
2811
if (!(pool->flags & POOL_BH)) {
kernel/workqueue.c
2814
format_worker_id(id_buf, sizeof(id_buf), worker, pool);
kernel/workqueue.c
2816
pool->node, "%s", id_buf);
kernel/workqueue.c
2828
set_user_nice(worker->task, pool->attrs->nice);
kernel/workqueue.c
2829
kthread_bind_mask(worker->task, pool_allowed_cpus(pool));
kernel/workqueue.c
2833
worker_attach_to_pool(worker, pool);
kernel/workqueue.c
2836
raw_spin_lock_irq(&pool->lock);
kernel/workqueue.c
2838
worker->pool->nr_workers++;
kernel/workqueue.c
2849
raw_spin_unlock_irq(&pool->lock);
kernel/workqueue.c
2854
ida_free(&pool->worker_ida, id);
kernel/workqueue.c
2891
struct worker_pool *pool = worker->pool;
kernel/workqueue.c
2893
lockdep_assert_held(&pool->lock);
kernel/workqueue.c
2902
pool->nr_workers--;
kernel/workqueue.c
2903
pool->nr_idle--;
kernel/workqueue.c
2925
struct worker_pool *pool = timer_container_of(pool, t, idle_timer);
kernel/workqueue.c
2928
if (work_pending(&pool->idle_cull_work))
kernel/workqueue.c
2931
raw_spin_lock_irq(&pool->lock);
kernel/workqueue.c
2933
if (too_many_workers(pool)) {
kernel/workqueue.c
2938
worker = list_last_entry(&pool->idle_list, struct worker, entry);
kernel/workqueue.c
2943
mod_timer(&pool->idle_timer, expires);
kernel/workqueue.c
2945
raw_spin_unlock_irq(&pool->lock);
kernel/workqueue.c
2948
queue_work(system_dfl_wq, &pool->idle_cull_work);
kernel/workqueue.c
2964
struct worker_pool *pool = container_of(work, struct worker_pool, idle_cull_work);
kernel/workqueue.c
2974
raw_spin_lock_irq(&pool->lock);
kernel/workqueue.c
2976
while (too_many_workers(pool)) {
kernel/workqueue.c
2980
worker = list_last_entry(&pool->idle_list, struct worker, entry);
kernel/workqueue.c
2984
mod_timer(&pool->idle_timer, expires);
kernel/workqueue.c
2991
raw_spin_unlock_irq(&pool->lock);
kernel/workqueue.c
3023
struct worker_pool *pool = timer_container_of(pool, t, mayday_timer);
kernel/workqueue.c
3026
raw_spin_lock_irq(&pool->lock);
kernel/workqueue.c
3029
if (need_to_create_worker(pool)) {
kernel/workqueue.c
3036
list_for_each_entry(work, &pool->worklist, entry)
kernel/workqueue.c
3041
raw_spin_unlock_irq(&pool->lock);
kernel/workqueue.c
3043
mod_timer(&pool->mayday_timer, jiffies + MAYDAY_INTERVAL);
kernel/workqueue.c
3064
static void maybe_create_worker(struct worker_pool *pool)
kernel/workqueue.c
3065
__releases(&pool->lock)
kernel/workqueue.c
3066
__acquires(&pool->lock)
kernel/workqueue.c
3069
raw_spin_unlock_irq(&pool->lock);
kernel/workqueue.c
3072
mod_timer(&pool->mayday_timer, jiffies + MAYDAY_INITIAL_TIMEOUT);
kernel/workqueue.c
3075
if (create_worker(pool) || !need_to_create_worker(pool))
kernel/workqueue.c
3080
if (!need_to_create_worker(pool))
kernel/workqueue.c
3084
timer_delete_sync(&pool->mayday_timer);
kernel/workqueue.c
3085
raw_spin_lock_irq(&pool->lock);
kernel/workqueue.c
3091
if (need_to_create_worker(pool))
kernel/workqueue.c
3096
static void worker_lock_callback(struct worker_pool *pool)
kernel/workqueue.c
3098
spin_lock(&pool->cb_lock);
kernel/workqueue.c
3101
static void worker_unlock_callback(struct worker_pool *pool)
kernel/workqueue.c
3103
spin_unlock(&pool->cb_lock);
kernel/workqueue.c
3106
static void workqueue_callback_cancel_wait_running(struct worker_pool *pool)
kernel/workqueue.c
3108
spin_lock(&pool->cb_lock);
kernel/workqueue.c
3109
spin_unlock(&pool->cb_lock);
kernel/workqueue.c
3114
static void worker_lock_callback(struct worker_pool *pool) { }
kernel/workqueue.c
3115
static void worker_unlock_callback(struct worker_pool *pool) { }
kernel/workqueue.c
3116
static void workqueue_callback_cancel_wait_running(struct worker_pool *pool) { }
kernel/workqueue.c
3144
struct worker_pool *pool = worker->pool;
kernel/workqueue.c
3146
if (pool->flags & POOL_MANAGER_ACTIVE)
kernel/workqueue.c
3149
pool->flags |= POOL_MANAGER_ACTIVE;
kernel/workqueue.c
3150
pool->manager = worker;
kernel/workqueue.c
3152
maybe_create_worker(pool);
kernel/workqueue.c
3154
pool->manager = NULL;
kernel/workqueue.c
3155
pool->flags &= ~POOL_MANAGER_ACTIVE;
kernel/workqueue.c
3175
__releases(&pool->lock)
kernel/workqueue.c
3176
__acquires(&pool->lock)
kernel/workqueue.c
3179
struct worker_pool *pool = worker->pool;
kernel/workqueue.c
3182
bool bh_draining = pool->flags & POOL_BH_DRAINING;
kernel/workqueue.c
3196
WARN_ON_ONCE(!(pool->flags & POOL_DISASSOCIATED) &&
kernel/workqueue.c
3197
raw_smp_processor_id() != pool->cpu);
kernel/workqueue.c
3201
hash_add(pool->busy_hash, &worker->hentry, (unsigned long)work);
kernel/workqueue.c
3234
kick_pool(pool);
kernel/workqueue.c
3242
set_work_pool_and_clear_pending(work, pool->id, pool_offq_flags(pool));
kernel/workqueue.c
3245
raw_spin_unlock_irq(&pool->lock);
kernel/workqueue.c
3311
raw_spin_lock_irq(&pool->lock);
kernel/workqueue.c
3356
worker->pool->last_progress_ts = jiffies;
kernel/workqueue.c
3388
struct worker_pool *pool = worker->pool;
kernel/workqueue.c
3393
raw_spin_lock_irq(&pool->lock);
kernel/workqueue.c
3397
raw_spin_unlock_irq(&pool->lock);
kernel/workqueue.c
3403
worker->pool = NULL;
kernel/workqueue.c
3404
ida_free(&pool->worker_ida, worker->id);
kernel/workqueue.c
3411
if (!need_more_worker(pool))
kernel/workqueue.c
3415
if (unlikely(!may_start_working(pool)) && manage_workers(worker))
kernel/workqueue.c
3436
list_first_entry(&pool->worklist,
kernel/workqueue.c
3441
} while (keep_working(pool));
kernel/workqueue.c
3454
raw_spin_unlock_irq(&pool->lock);
kernel/workqueue.c
3461
struct worker_pool *pool = pwq->pool;
kernel/workqueue.c
3470
if (!need_to_create_worker(pool)) {
kernel/workqueue.c
3491
if (!(pool->flags & POOL_MANAGER_ACTIVE) ||
kernel/workqueue.c
3498
work = list_first_entry(&pool->worklist, struct work_struct, entry);
kernel/workqueue.c
3503
list_for_each_entry_safe_from(work, n, &pool->worklist, entry) {
kernel/workqueue.c
3568
struct worker_pool *pool = pwq->pool;
kernel/workqueue.c
3576
worker_attach_to_pool(rescuer, pool);
kernel/workqueue.c
3578
raw_spin_lock_irq(&pool->lock);
kernel/workqueue.c
3591
pwq->nr_active && need_to_create_worker(pool)) {
kernel/workqueue.c
3607
kick_pool(pool);
kernel/workqueue.c
3609
raw_spin_unlock_irq(&pool->lock);
kernel/workqueue.c
3638
struct worker_pool *pool = worker->pool;
kernel/workqueue.c
3642
worker_lock_callback(pool);
kernel/workqueue.c
3643
raw_spin_lock_irq(&pool->lock);
kernel/workqueue.c
3650
if (!need_more_worker(pool))
kernel/workqueue.c
3658
list_first_entry(&pool->worklist,
kernel/workqueue.c
3663
} while (keep_working(pool) &&
kernel/workqueue.c
3669
kick_pool(pool);
kernel/workqueue.c
3670
raw_spin_unlock_irq(&pool->lock);
kernel/workqueue.c
3671
worker_unlock_callback(pool);
kernel/workqueue.c
3687
struct worker_pool *pool =
kernel/workqueue.c
3689
if (need_more_worker(pool))
kernel/workqueue.c
3690
bh_worker(list_first_entry(&pool->workers, struct worker, node));
kernel/workqueue.c
3695
struct worker_pool *pool;
kernel/workqueue.c
3703
struct worker_pool *pool = dead_work->pool;
kernel/workqueue.c
3713
raw_spin_lock_irq(&pool->lock);
kernel/workqueue.c
3714
pool->flags |= POOL_BH_DRAINING;
kernel/workqueue.c
3715
raw_spin_unlock_irq(&pool->lock);
kernel/workqueue.c
3717
bh_worker(list_first_entry(&pool->workers, struct worker, node));
kernel/workqueue.c
3719
raw_spin_lock_irq(&pool->lock);
kernel/workqueue.c
3720
pool->flags &= ~POOL_BH_DRAINING;
kernel/workqueue.c
3721
repeat = need_more_worker(pool);
kernel/workqueue.c
3722
raw_spin_unlock_irq(&pool->lock);
kernel/workqueue.c
3730
if (pool->attrs->nice == HIGHPRI_NICE_LEVEL)
kernel/workqueue.c
3752
struct worker_pool *pool = &per_cpu(bh_worker_pools, cpu)[i];
kernel/workqueue.c
3755
if (!need_more_worker(pool))
kernel/workqueue.c
3759
dead_work.pool = pool;
kernel/workqueue.c
3762
if (pool->attrs->nice == HIGHPRI_NICE_LEVEL)
kernel/workqueue.c
3949
if (current_pool != pwq->pool) {
kernel/workqueue.c
3952
current_pool = pwq->pool;
kernel/workqueue.c
4204
raw_spin_lock_irq(&pwq->pool->lock);
kernel/workqueue.c
4206
raw_spin_unlock_irq(&pwq->pool->lock);
kernel/workqueue.c
4230
struct worker_pool *pool;
kernel/workqueue.c
4235
pool = get_work_pool(work);
kernel/workqueue.c
4236
if (!pool) {
kernel/workqueue.c
4241
raw_spin_lock_irq(&pool->lock);
kernel/workqueue.c
4245
if (unlikely(pwq->pool != pool))
kernel/workqueue.c
4248
worker = find_worker_executing_work(pool, work);
kernel/workqueue.c
4258
raw_spin_unlock_irq(&pool->lock);
kernel/workqueue.c
4277
raw_spin_unlock_irq(&pool->lock);
kernel/workqueue.c
4314
struct worker_pool *pool;
kernel/workqueue.c
4317
pool = get_work_pool(work);
kernel/workqueue.c
4318
if (pool)
kernel/workqueue.c
4319
workqueue_callback_cancel_wait_running(pool);
kernel/workqueue.c
4847
static int init_worker_pool(struct worker_pool *pool)
kernel/workqueue.c
4849
raw_spin_lock_init(&pool->lock);
kernel/workqueue.c
4850
pool->id = -1;
kernel/workqueue.c
4851
pool->cpu = -1;
kernel/workqueue.c
4852
pool->node = NUMA_NO_NODE;
kernel/workqueue.c
4853
pool->flags |= POOL_DISASSOCIATED;
kernel/workqueue.c
4854
pool->last_progress_ts = jiffies;
kernel/workqueue.c
4855
INIT_LIST_HEAD(&pool->worklist);
kernel/workqueue.c
4856
INIT_LIST_HEAD(&pool->idle_list);
kernel/workqueue.c
4857
hash_init(pool->busy_hash);
kernel/workqueue.c
4859
timer_setup(&pool->idle_timer, idle_worker_timeout, TIMER_DEFERRABLE);
kernel/workqueue.c
4860
INIT_WORK(&pool->idle_cull_work, idle_cull_fn);
kernel/workqueue.c
4862
timer_setup(&pool->mayday_timer, pool_mayday_timeout, 0);
kernel/workqueue.c
4864
INIT_LIST_HEAD(&pool->workers);
kernel/workqueue.c
4866
ida_init(&pool->worker_ida);
kernel/workqueue.c
4867
INIT_HLIST_NODE(&pool->hash_node);
kernel/workqueue.c
4868
pool->refcnt = 1;
kernel/workqueue.c
4870
spin_lock_init(&pool->cb_lock);
kernel/workqueue.c
4874
pool->attrs = alloc_workqueue_attrs();
kernel/workqueue.c
4875
if (!pool->attrs)
kernel/workqueue.c
4878
wqattrs_clear_for_pool(pool->attrs);
kernel/workqueue.c
4996
struct worker_pool *pool = container_of(rcu, struct worker_pool, rcu);
kernel/workqueue.c
4998
ida_destroy(&pool->worker_ida);
kernel/workqueue.c
4999
free_workqueue_attrs(pool->attrs);
kernel/workqueue.c
5000
kfree(pool);
kernel/workqueue.c
5014
static void put_unbound_pool(struct worker_pool *pool)
kernel/workqueue.c
5021
if (--pool->refcnt)
kernel/workqueue.c
5025
if (WARN_ON(!(pool->cpu < 0)) ||
kernel/workqueue.c
5026
WARN_ON(!list_empty(&pool->worklist)))
kernel/workqueue.c
5030
if (pool->id >= 0)
kernel/workqueue.c
5031
idr_remove(&worker_pool_idr, pool->id);
kernel/workqueue.c
5032
hash_del(&pool->hash_node);
kernel/workqueue.c
5049
!(pool->flags & POOL_MANAGER_ACTIVE),
kernel/workqueue.c
5053
raw_spin_lock_irq(&pool->lock);
kernel/workqueue.c
5054
if (!(pool->flags & POOL_MANAGER_ACTIVE)) {
kernel/workqueue.c
5055
pool->flags |= POOL_MANAGER_ACTIVE;
kernel/workqueue.c
5058
raw_spin_unlock_irq(&pool->lock);
kernel/workqueue.c
5062
while ((worker = first_idle_worker(pool)))
kernel/workqueue.c
5064
WARN_ON(pool->nr_workers || pool->nr_idle);
kernel/workqueue.c
5065
raw_spin_unlock_irq(&pool->lock);
kernel/workqueue.c
5074
timer_delete_sync(&pool->idle_timer);
kernel/workqueue.c
5075
cancel_work_sync(&pool->idle_cull_work);
kernel/workqueue.c
5076
timer_delete_sync(&pool->mayday_timer);
kernel/workqueue.c
5079
call_rcu(&pool->rcu, rcu_free_pool);
kernel/workqueue.c
5100
struct worker_pool *pool;
kernel/workqueue.c
5106
hash_for_each_possible(unbound_pool_hash, pool, hash_node, hash) {
kernel/workqueue.c
5107
if (wqattrs_equal(pool->attrs, attrs)) {
kernel/workqueue.c
5108
pool->refcnt++;
kernel/workqueue.c
5109
return pool;
kernel/workqueue.c
5122
pool = kzalloc_node(sizeof(*pool), GFP_KERNEL, node);
kernel/workqueue.c
5123
if (!pool || init_worker_pool(pool) < 0)
kernel/workqueue.c
5126
pool->node = node;
kernel/workqueue.c
5127
copy_workqueue_attrs(pool->attrs, attrs);
kernel/workqueue.c
5128
wqattrs_clear_for_pool(pool->attrs);
kernel/workqueue.c
5130
if (worker_pool_assign_id(pool) < 0)
kernel/workqueue.c
5134
if (wq_online && !create_worker(pool))
kernel/workqueue.c
5138
hash_add(unbound_pool_hash, &pool->hash_node, hash);
kernel/workqueue.c
5140
return pool;
kernel/workqueue.c
5142
if (pool)
kernel/workqueue.c
5143
put_unbound_pool(pool);
kernel/workqueue.c
5156
struct worker_pool *pool = pwq->pool;
kernel/workqueue.c
5179
put_unbound_pool(pool);
kernel/workqueue.c
5185
wq_node_nr_active(pwq->wq, pwq->pool->node);
kernel/workqueue.c
5206
struct worker_pool *pool)
kernel/workqueue.c
5212
pwq->pool = pool;
kernel/workqueue.c
5258
struct worker_pool *pool;
kernel/workqueue.c
5263
pool = get_unbound_pool(attrs);
kernel/workqueue.c
5264
if (!pool)
kernel/workqueue.c
5267
pwq = kmem_cache_alloc_node(pwq_cache, GFP_KERNEL, pool->node);
kernel/workqueue.c
5269
put_unbound_pool(pool);
kernel/workqueue.c
5273
init_pwq(pwq, wq, pool);
kernel/workqueue.c
537
static void show_one_worker_pool(struct worker_pool *pool);
kernel/workqueue.c
547
#define for_each_bh_worker_pool(pool, cpu) \
kernel/workqueue.c
548
for ((pool) = &per_cpu(bh_worker_pools, cpu)[0]; \
kernel/workqueue.c
549
(pool) < &per_cpu(bh_worker_pools, cpu)[NR_STD_WORKER_POOLS]; \
kernel/workqueue.c
550
(pool)++)
kernel/workqueue.c
552
#define for_each_cpu_worker_pool(pool, cpu) \
kernel/workqueue.c
553
for ((pool) = &per_cpu(cpu_worker_pools, cpu)[0]; \
kernel/workqueue.c
5538
if (wqattrs_equal(target_attrs, unbound_pwq(wq, cpu)->pool->attrs))
kernel/workqueue.c
554
(pool) < &per_cpu(cpu_worker_pools, cpu)[NR_STD_WORKER_POOLS]; \
kernel/workqueue.c
555
(pool)++)
kernel/workqueue.c
5557
raw_spin_lock_irq(&pwq->pool->lock);
kernel/workqueue.c
5559
raw_spin_unlock_irq(&pwq->pool->lock);
kernel/workqueue.c
5587
struct worker_pool *pool;
kernel/workqueue.c
5589
pool = &(per_cpu_ptr(pools, cpu)[highpri]);
kernel/workqueue.c
5593
pool->node);
kernel/workqueue.c
5597
init_pwq(*pwq_p, wq, pool);
kernel/workqueue.c
569
#define for_each_pool(pool, pi) \
kernel/workqueue.c
570
idr_for_each_entry(&worker_pool_idr, pool, pi) \
kernel/workqueue.c
5745
raw_spin_lock_irqsave(&pwq->pool->lock, irq_flags);
kernel/workqueue.c
5748
kick_pool(pwq->pool);
kernel/workqueue.c
5750
raw_spin_unlock_irqrestore(&pwq->pool->lock, irq_flags);
kernel/workqueue.c
584
#define for_each_pool_worker(worker, pool) \
kernel/workqueue.c
585
list_for_each_entry((worker), &(pool)->workers, node) \
kernel/workqueue.c
5984
raw_spin_lock_irq(&pwq->pool->lock);
kernel/workqueue.c
5989
raw_spin_unlock_irq(&pwq->pool->lock);
kernel/workqueue.c
5995
raw_spin_unlock_irq(&pwq->pool->lock);
kernel/workqueue.c
6168
struct worker_pool *pool;
kernel/workqueue.c
6176
pool = get_work_pool(work);
kernel/workqueue.c
6177
if (pool) {
kernel/workqueue.c
6178
raw_spin_lock_irqsave(&pool->lock, irq_flags);
kernel/workqueue.c
6179
if (find_worker_executing_work(pool, work))
kernel/workqueue.c
6181
raw_spin_unlock_irqrestore(&pool->lock, irq_flags);
kernel/workqueue.c
6261
static void pr_cont_pool_info(struct worker_pool *pool)
kernel/workqueue.c
6263
pr_cont(" cpus=%*pbl", nr_cpumask_bits, pool->attrs->cpumask);
kernel/workqueue.c
6264
if (pool->node != NUMA_NO_NODE)
kernel/workqueue.c
6265
pr_cont(" node=%d", pool->node);
kernel/workqueue.c
6266
pr_cont(" flags=0x%x", pool->flags);
kernel/workqueue.c
6267
if (pool->flags & POOL_BH)
kernel/workqueue.c
6269
pool->attrs->nice == HIGHPRI_NICE_LEVEL ? "-hi" : "");
kernel/workqueue.c
6271
pr_cont(" nice=%d", pool->attrs->nice);
kernel/workqueue.c
6276
struct worker_pool *pool = worker->pool;
kernel/workqueue.c
6278
if (pool->flags & POOL_BH)
kernel/workqueue.c
6280
pool->attrs->nice == HIGHPRI_NICE_LEVEL ? "-hi" : "");
kernel/workqueue.c
6333
struct worker_pool *pool = pwq->pool;
kernel/workqueue.c
6339
pr_info(" pwq %d:", pool->id);
kernel/workqueue.c
6340
pr_cont_pool_info(pool);
kernel/workqueue.c
6346
hash_for_each(pool->busy_hash, bkt, worker, hentry) {
kernel/workqueue.c
6356
hash_for_each(pool->busy_hash, bkt, worker, hentry) {
kernel/workqueue.c
6373
list_for_each_entry(work, &pool->worklist, entry) {
kernel/workqueue.c
6383
list_for_each_entry(work, &pool->worklist, entry) {
kernel/workqueue.c
6429
raw_spin_lock_irqsave(&pwq->pool->lock, irq_flags);
kernel/workqueue.c
6440
raw_spin_unlock_irqrestore(&pwq->pool->lock, irq_flags);
kernel/workqueue.c
6455
static void show_one_worker_pool(struct worker_pool *pool)
kernel/workqueue.c
6462
raw_spin_lock_irqsave(&pool->lock, irq_flags);
kernel/workqueue.c
6463
if (pool->nr_workers == pool->nr_idle)
kernel/workqueue.c
6467
if (!list_empty(&pool->worklist))
kernel/workqueue.c
6468
hung = jiffies_to_msecs(jiffies - pool->last_progress_ts) / 1000;
kernel/workqueue.c
6476
pr_info("pool %d:", pool->id);
kernel/workqueue.c
6477
pr_cont_pool_info(pool);
kernel/workqueue.c
6478
pr_cont(" hung=%lus workers=%d", hung, pool->nr_workers);
kernel/workqueue.c
6479
if (pool->manager)
kernel/workqueue.c
6481
task_pid_nr(pool->manager->task));
kernel/workqueue.c
6482
list_for_each_entry(worker, &pool->idle_list, entry) {
kernel/workqueue.c
6490
raw_spin_unlock_irqrestore(&pool->lock, irq_flags);
kernel/workqueue.c
6508
struct worker_pool *pool;
kernel/workqueue.c
6518
for_each_pool(pool, pi)
kernel/workqueue.c
6519
show_one_worker_pool(pool);
kernel/workqueue.c
6555
struct worker_pool *pool = worker->pool;
kernel/workqueue.c
6558
off = format_worker_id(buf, size, worker, pool);
kernel/workqueue.c
6560
if (pool) {
kernel/workqueue.c
6561
raw_spin_lock_irq(&pool->lock);
kernel/workqueue.c
6575
raw_spin_unlock_irq(&pool->lock);
kernel/workqueue.c
6603
struct worker_pool *pool;
kernel/workqueue.c
6606
for_each_cpu_worker_pool(pool, cpu) {
kernel/workqueue.c
6608
raw_spin_lock_irq(&pool->lock);
kernel/workqueue.c
6618
for_each_pool_worker(worker, pool)
kernel/workqueue.c
6621
pool->flags |= POOL_DISASSOCIATED;
kernel/workqueue.c
6631
pool->nr_running = 0;
kernel/workqueue.c
6638
kick_pool(pool);
kernel/workqueue.c
6640
raw_spin_unlock_irq(&pool->lock);
kernel/workqueue.c
6642
for_each_pool_worker(worker, pool)
kernel/workqueue.c
6655
static void rebind_workers(struct worker_pool *pool)
kernel/workqueue.c
6668
for_each_pool_worker(worker, pool) {
kernel/workqueue.c
6669
kthread_set_per_cpu(worker->task, pool->cpu);
kernel/workqueue.c
6671
pool_allowed_cpus(pool)) < 0);
kernel/workqueue.c
6674
raw_spin_lock_irq(&pool->lock);
kernel/workqueue.c
6676
pool->flags &= ~POOL_DISASSOCIATED;
kernel/workqueue.c
6678
for_each_pool_worker(worker, pool) {
kernel/workqueue.c
6702
raw_spin_unlock_irq(&pool->lock);
kernel/workqueue.c
6715
static void restore_unbound_workers_cpumask(struct worker_pool *pool, int cpu)
kernel/workqueue.c
6723
if (!cpumask_test_cpu(cpu, pool->attrs->cpumask))
kernel/workqueue.c
6726
cpumask_and(&cpumask, pool->attrs->cpumask, cpu_online_mask);
kernel/workqueue.c
6729
for_each_pool_worker(worker, pool)
kernel/workqueue.c
6735
struct worker_pool *pool;
kernel/workqueue.c
6737
for_each_cpu_worker_pool(pool, cpu) {
kernel/workqueue.c
6738
if (pool->nr_workers)
kernel/workqueue.c
6740
if (!create_worker(pool))
kernel/workqueue.c
6748
struct worker_pool *pool;
kernel/workqueue.c
6756
for_each_pool(pool, pi) {
kernel/workqueue.c
6758
if (pool->flags & POOL_BH)
kernel/workqueue.c
6762
if (pool->cpu == cpu)
kernel/workqueue.c
6763
rebind_workers(pool);
kernel/workqueue.c
6764
else if (pool->cpu < 0)
kernel/workqueue.c
6765
restore_unbound_workers_cpumask(pool, cpu);
kernel/workqueue.c
7003
struct worker_pool *pool;
kernel/workqueue.c
7010
if (wq->rescuer && !wq->rescuer->pool)
kernel/workqueue.c
7015
for_each_cpu_worker_pool(pool, cpu) {
kernel/workqueue.c
7016
if (!(pool->flags & POOL_DISASSOCIATED))
kernel/workqueue.c
7018
for_each_pool_worker(worker, pool)
kernel/workqueue.c
709
static int worker_pool_assign_id(struct worker_pool *pool)
kernel/workqueue.c
715
ret = idr_alloc(&worker_pool_idr, pool, 0, WORK_OFFQ_POOL_NONE,
kernel/workqueue.c
718
pool->id = ret;
kernel/workqueue.c
751
return unbound_pwq(wq, -1)->pool->attrs->__pod_cpumask;
kernel/workqueue.c
7590
static void show_cpu_pool_busy_workers(struct worker_pool *pool)
kernel/workqueue.c
7596
raw_spin_lock_irqsave(&pool->lock, irq_flags);
kernel/workqueue.c
7598
hash_for_each(pool->busy_hash, bkt, worker, hentry) {
kernel/workqueue.c
7606
pr_info("pool %d:\n", pool->id);
kernel/workqueue.c
7612
raw_spin_unlock_irqrestore(&pool->lock, irq_flags);
kernel/workqueue.c
7617
struct worker_pool *pool;
kernel/workqueue.c
7624
for_each_pool(pool, pi) {
kernel/workqueue.c
7625
if (pool->cpu_stall)
kernel/workqueue.c
7626
show_cpu_pool_busy_workers(pool);
kernel/workqueue.c
7670
struct worker_pool *pool;
kernel/workqueue.c
7677
for_each_pool(pool, pi) {
kernel/workqueue.c
7680
pool->cpu_stall = false;
kernel/workqueue.c
7681
if (list_empty(&pool->worklist))
kernel/workqueue.c
7691
if (pool->cpu >= 0)
kernel/workqueue.c
7692
touched = READ_ONCE(per_cpu(wq_watchdog_touched_cpu, pool->cpu));
kernel/workqueue.c
7695
pool_ts = READ_ONCE(pool->last_progress_ts);
kernel/workqueue.c
770
static unsigned long pool_offq_flags(struct worker_pool *pool)
kernel/workqueue.c
7715
scoped_guard(raw_spinlock_irqsave, &pool->lock) {
kernel/workqueue.c
7716
pool_ts = pool->last_progress_ts;
kernel/workqueue.c
772
return (pool->flags & POOL_BH) ? WORK_OFFQ_BH : 0;
kernel/workqueue.c
7728
if (pool->cpu >= 0 && !(pool->flags & POOL_BH)) {
kernel/workqueue.c
7729
pool->cpu_stall = true;
kernel/workqueue.c
7733
pr_cont_pool_info(pool);
kernel/workqueue.c
7838
static void __init init_cpu_worker_pool(struct worker_pool *pool, int cpu, int nice)
kernel/workqueue.c
7840
BUG_ON(init_worker_pool(pool));
kernel/workqueue.c
7841
pool->cpu = cpu;
kernel/workqueue.c
7842
cpumask_copy(pool->attrs->cpumask, cpumask_of(cpu));
kernel/workqueue.c
7843
cpumask_copy(pool->attrs->__pod_cpumask, cpumask_of(cpu));
kernel/workqueue.c
7844
pool->attrs->nice = nice;
kernel/workqueue.c
7845
pool->attrs->affn_strict = true;
kernel/workqueue.c
7846
pool->node = cpu_to_node(cpu);
kernel/workqueue.c
7850
BUG_ON(worker_pool_assign_id(pool));
kernel/workqueue.c
7916
struct worker_pool *pool;
kernel/workqueue.c
7919
for_each_bh_worker_pool(pool, cpu) {
kernel/workqueue.c
7920
init_cpu_worker_pool(pool, cpu, std_nice[i]);
kernel/workqueue.c
7921
pool->flags |= POOL_BH;
kernel/workqueue.c
7922
init_irq_work(bh_pool_irq_work(pool), irq_work_fns[i]);
kernel/workqueue.c
7927
for_each_cpu_worker_pool(pool, cpu)
kernel/workqueue.c
7928
init_cpu_worker_pool(pool, cpu, std_nice[i++]);
kernel/workqueue.c
8023
struct worker_pool *pool;
kernel/workqueue.c
8035
for_each_bh_worker_pool(pool, cpu)
kernel/workqueue.c
8036
pool->node = cpu_to_node(cpu);
kernel/workqueue.c
8037
for_each_cpu_worker_pool(pool, cpu)
kernel/workqueue.c
8038
pool->node = cpu_to_node(cpu);
kernel/workqueue.c
8056
for_each_bh_worker_pool(pool, cpu)
kernel/workqueue.c
8057
BUG_ON(!create_worker(pool));
kernel/workqueue.c
8060
for_each_cpu_worker_pool(pool, cpu) {
kernel/workqueue.c
8061
pool->flags &= ~POOL_DISASSOCIATED;
kernel/workqueue.c
8062
BUG_ON(!create_worker(pool));
kernel/workqueue.c
8066
hash_for_each(unbound_pool_hash, bkt, pool, hash_node)
kernel/workqueue.c
8067
BUG_ON(!create_worker(pool));
kernel/workqueue.c
890
return work_struct_pwq(data)->pool;
kernel/workqueue.c
935
static bool need_more_worker(struct worker_pool *pool)
kernel/workqueue.c
937
return !list_empty(&pool->worklist) && !pool->nr_running;
kernel/workqueue.c
941
static bool may_start_working(struct worker_pool *pool)
kernel/workqueue.c
943
return pool->nr_idle;
kernel/workqueue.c
947
static bool keep_working(struct worker_pool *pool)
kernel/workqueue.c
949
return !list_empty(&pool->worklist) && (pool->nr_running <= 1);
kernel/workqueue.c
953
static bool need_to_create_worker(struct worker_pool *pool)
kernel/workqueue.c
955
return need_more_worker(pool) && !may_start_working(pool);
kernel/workqueue.c
959
static bool too_many_workers(struct worker_pool *pool)
kernel/workqueue.c
961
bool managing = pool->flags & POOL_MANAGER_ACTIVE;
kernel/workqueue.c
962
int nr_idle = pool->nr_idle + managing; /* manager is considered idle */
kernel/workqueue.c
963
int nr_busy = pool->nr_workers - nr_idle;
kernel/workqueue.c
977
struct worker_pool *pool = worker->pool;
kernel/workqueue.c
979
lockdep_assert_held(&pool->lock);
kernel/workqueue.c
984
pool->nr_running--;
kernel/workqueue.c
999
struct worker_pool *pool = worker->pool;
kernel/workqueue_internal.h
46
struct worker_pool *pool; /* A: the associated pool */
lib/debugobjects.c
137
static __always_inline unsigned int pool_count(struct obj_pool *pool)
lib/debugobjects.c
139
return READ_ONCE(pool->cnt);
lib/debugobjects.c
142
static __always_inline bool pool_should_refill(struct obj_pool *pool)
lib/debugobjects.c
144
return pool_count(pool) < pool->min_cnt;
lib/debugobjects.c
147
static __always_inline bool pool_must_refill(struct obj_pool *pool)
lib/debugobjects.c
149
return pool_count(pool) < pool->min_cnt / 2;
lib/genalloc.c
155
struct gen_pool *pool;
lib/genalloc.c
157
pool = kmalloc_node(sizeof(struct gen_pool), GFP_KERNEL, nid);
lib/genalloc.c
158
if (pool != NULL) {
lib/genalloc.c
159
spin_lock_init(&pool->lock);
lib/genalloc.c
160
INIT_LIST_HEAD(&pool->chunks);
lib/genalloc.c
161
pool->min_alloc_order = min_alloc_order;
lib/genalloc.c
162
pool->algo = gen_pool_first_fit;
lib/genalloc.c
163
pool->data = NULL;
lib/genalloc.c
164
pool->name = NULL;
lib/genalloc.c
166
return pool;
lib/genalloc.c
184
int gen_pool_add_owner(struct gen_pool *pool, unsigned long virt, phys_addr_t phys,
lib/genalloc.c
188
unsigned long nbits = size >> pool->min_alloc_order;
lib/genalloc.c
202
spin_lock(&pool->lock);
lib/genalloc.c
203
list_add_rcu(&chunk->next_chunk, &pool->chunks);
lib/genalloc.c
204
spin_unlock(&pool->lock);
lib/genalloc.c
217
phys_addr_t gen_pool_virt_to_phys(struct gen_pool *pool, unsigned long addr)
lib/genalloc.c
223
list_for_each_entry_rcu(chunk, &pool->chunks, next_chunk) {
lib/genalloc.c
242
void gen_pool_destroy(struct gen_pool *pool)
lib/genalloc.c
246
int order = pool->min_alloc_order;
lib/genalloc.c
249
list_for_each_safe(_chunk, _next_chunk, &pool->chunks) {
lib/genalloc.c
259
kfree_const(pool->name);
lib/genalloc.c
260
kfree(pool);
lib/genalloc.c
277
unsigned long gen_pool_alloc_algo_owner(struct gen_pool *pool, size_t size,
lib/genalloc.c
282
int order = pool->min_alloc_order;
lib/genalloc.c
297
list_for_each_entry_rcu(chunk, &pool->chunks, next_chunk) {
lib/genalloc.c
305
nbits, data, pool, chunk->start_addr);
lib/genalloc.c
341
void *gen_pool_dma_alloc(struct gen_pool *pool, size_t size, dma_addr_t *dma)
lib/genalloc.c
343
return gen_pool_dma_alloc_algo(pool, size, dma, pool->algo, pool->data);
lib/genalloc.c
362
void *gen_pool_dma_alloc_algo(struct gen_pool *pool, size_t size,
lib/genalloc.c
367
if (!pool)
lib/genalloc.c
370
vaddr = gen_pool_alloc_algo(pool, size, algo, data);
lib/genalloc.c
375
*dma = gen_pool_virt_to_phys(pool, vaddr);
lib/genalloc.c
395
void *gen_pool_dma_alloc_align(struct gen_pool *pool, size_t size,
lib/genalloc.c
400
return gen_pool_dma_alloc_algo(pool, size, dma,
lib/genalloc.c
419
void *gen_pool_dma_zalloc(struct gen_pool *pool, size_t size, dma_addr_t *dma)
lib/genalloc.c
421
return gen_pool_dma_zalloc_algo(pool, size, dma, pool->algo, pool->data);
lib/genalloc.c
440
void *gen_pool_dma_zalloc_algo(struct gen_pool *pool, size_t size,
lib/genalloc.c
443
void *vaddr = gen_pool_dma_alloc_algo(pool, size, dma, algo, data);
lib/genalloc.c
466
void *gen_pool_dma_zalloc_align(struct gen_pool *pool, size_t size,
lib/genalloc.c
471
return gen_pool_dma_zalloc_algo(pool, size, dma,
lib/genalloc.c
487
void gen_pool_free_owner(struct gen_pool *pool, unsigned long addr, size_t size,
lib/genalloc.c
491
int order = pool->min_alloc_order;
lib/genalloc.c
503
list_for_each_entry_rcu(chunk, &pool->chunks, next_chunk) {
lib/genalloc.c
531
void gen_pool_for_each_chunk(struct gen_pool *pool,
lib/genalloc.c
532
void (*func)(struct gen_pool *pool, struct gen_pool_chunk *chunk, void *data),
lib/genalloc.c
538
list_for_each_entry_rcu(chunk, &(pool)->chunks, next_chunk)
lib/genalloc.c
539
func(pool, chunk, data);
lib/genalloc.c
553
bool gen_pool_has_addr(struct gen_pool *pool, unsigned long start,
lib/genalloc.c
561
list_for_each_entry_rcu(chunk, &(pool)->chunks, next_chunk) {
lib/genalloc.c
580
size_t gen_pool_avail(struct gen_pool *pool)
lib/genalloc.c
586
list_for_each_entry_rcu(chunk, &pool->chunks, next_chunk)
lib/genalloc.c
599
size_t gen_pool_size(struct gen_pool *pool)
lib/genalloc.c
605
list_for_each_entry_rcu(chunk, &pool->chunks, next_chunk)
lib/genalloc.c
622
void gen_pool_set_algo(struct gen_pool *pool, genpool_algo_t algo, void *data)
lib/genalloc.c
626
pool->algo = algo;
lib/genalloc.c
627
if (!pool->algo)
lib/genalloc.c
628
pool->algo = gen_pool_first_fit;
lib/genalloc.c
630
pool->data = data;
lib/genalloc.c
649
struct gen_pool *pool, unsigned long start_addr)
lib/genalloc.c
668
struct gen_pool *pool, unsigned long start_addr)
lib/genalloc.c
675
order = pool->min_alloc_order;
lib/genalloc.c
696
struct gen_pool *pool, unsigned long start_addr)
lib/genalloc.c
704
order = pool->min_alloc_order;
lib/genalloc.c
731
unsigned int nr, void *data, struct gen_pool *pool,
lib/genalloc.c
756
struct gen_pool *pool, unsigned long start_addr)
lib/genalloc.c
832
struct gen_pool **ptr, *pool;
lib/genalloc.c
849
pool = gen_pool_create(min_alloc_order, nid);
lib/genalloc.c
850
if (!pool)
lib/genalloc.c
853
*ptr = pool;
lib/genalloc.c
854
pool->name = pool_name;
lib/genalloc.c
857
return pool;
lib/genalloc.c
885
struct gen_pool *pool = NULL;
lib/genalloc.c
903
pool = gen_pool_get(&pdev->dev, name);
lib/genalloc.c
909
return pool;
lib/objpool.c
105
static void objpool_fini_percpu_slots(struct objpool_head *pool)
lib/objpool.c
109
if (!pool->cpu_slots)
lib/objpool.c
113
kvfree(pool->cpu_slots[i]);
lib/objpool.c
114
kfree(pool->cpu_slots);
lib/objpool.c
118
int objpool_init(struct objpool_head *pool, int nr_objs, int object_size,
lib/objpool.c
138
memset(pool, 0, sizeof(struct objpool_head));
lib/objpool.c
139
pool->nr_possible_cpus = num_possible_cpus();
lib/objpool.c
140
pool->obj_size = object_size;
lib/objpool.c
141
pool->capacity = capacity;
lib/objpool.c
142
pool->gfp = gfp & ~__GFP_ZERO;
lib/objpool.c
143
pool->context = context;
lib/objpool.c
144
pool->release = release;
lib/objpool.c
146
pool->cpu_slots = kzalloc(slot_size, pool->gfp);
lib/objpool.c
147
if (!pool->cpu_slots)
lib/objpool.c
151
rc = objpool_init_percpu_slots(pool, nr_objs, context, objinit);
lib/objpool.c
153
objpool_fini_percpu_slots(pool);
lib/objpool.c
155
refcount_set(&pool->ref, pool->nr_objs + 1);
lib/objpool.c
162
void objpool_free(struct objpool_head *pool)
lib/objpool.c
164
if (!pool->cpu_slots)
lib/objpool.c
168
objpool_fini_percpu_slots(pool);
lib/objpool.c
171
if (pool->release)
lib/objpool.c
172
pool->release(pool, pool->context);
lib/objpool.c
177
int objpool_drop(void *obj, struct objpool_head *pool)
lib/objpool.c
179
if (!obj || !pool)
lib/objpool.c
182
if (refcount_dec_and_test(&pool->ref)) {
lib/objpool.c
183
objpool_free(pool);
lib/objpool.c
19
objpool_init_percpu_slot(struct objpool_head *pool,
lib/objpool.c
192
void objpool_fini(struct objpool_head *pool)
lib/objpool.c
197
while (objpool_pop(pool))
lib/objpool.c
200
if (refcount_sub_and_test(count, &pool->ref))
lib/objpool.c
201
objpool_free(pool);
lib/objpool.c
24
void *obj = (void *)&slot->entries[pool->capacity];
lib/objpool.c
28
slot->mask = pool->capacity - 1;
lib/objpool.c
37
obj = obj + pool->obj_size;
lib/objpool.c
40
pool->nr_objs++;
lib/objpool.c
48
objpool_init_percpu_slots(struct objpool_head *pool, int nr_objs,
lib/objpool.c
63
nodes = nr_objs / pool->nr_possible_cpus;
lib/objpool.c
64
if (cpu_count < (nr_objs % pool->nr_possible_cpus))
lib/objpool.c
68
size = struct_size(slot, entries, pool->capacity) +
lib/objpool.c
69
pool->obj_size * nodes;
lib/objpool.c
83
if ((pool->gfp & (GFP_ATOMIC | GFP_KERNEL)) != GFP_ATOMIC)
lib/objpool.c
84
slot = __vmalloc_node(size, sizeof(void *), pool->gfp,
lib/objpool.c
88
slot = kmalloc_node(size, pool->gfp, cpu_to_node(i));
lib/objpool.c
93
pool->cpu_slots[i] = slot;
lib/objpool.c
96
rc = objpool_init_percpu_slot(pool, slot, nodes, context, objinit);
lib/sg_pool.c
14
mempool_t *pool;
lib/sg_pool.c
158
sgp->pool = mempool_create_slab_pool(SG_MEMPOOL_SIZE,
lib/sg_pool.c
160
if (!sgp->pool) {
lib/sg_pool.c
173
mempool_destroy(sgp->pool);
lib/sg_pool.c
59
mempool_free(sgl, sgp->pool);
lib/sg_pool.c
67
return mempool_alloc(sgp->pool, gfp_mask);
lib/stackdepot.c
498
void *pool;
lib/stackdepot.c
512
pool = context_unsafe(stack_pools[pool_index]);
lib/stackdepot.c
513
if (WARN_ON(!pool))
lib/stackdepot.c
516
stack = pool + offset;
lib/test_objpool.c
127
struct objpool_head pool; /* objpool head */
lib/test_objpool.c
152
on->owner = &sop->pool;
lib/test_objpool.c
199
struct objpool_head *pool,
lib/test_objpool.c
203
item->pool = pool;
lib/test_objpool.c
297
if (objpool_init(&sop->pool, max, test->objsz,
lib/test_objpool.c
302
WARN_ON(max != sop->pool.nr_objs);
lib/test_objpool.c
309
objpool_fini(&sop->pool);
lib/test_objpool.c
330
nods[i] = objpool_pop(item->pool);
lib/test_objpool.c
339
objpool_push(on, item->pool);
lib/test_objpool.c
367
ot_init_cpu_item(item, test, &sop->pool, ot_bulk_sync);
lib/test_objpool.c
427
objpool_fini(&sop->pool);
lib/test_objpool.c
441
WARN_ON(!head || !sop || head != &sop->pool);
lib/test_objpool.c
463
if (objpool_init(&sop->pool, max, test->objsz, gfp, sop,
lib/test_objpool.c
468
WARN_ON(max != sop->pool.nr_objs);
lib/test_objpool.c
480
static void ot_nod_recycle(struct ot_node *on, struct objpool_head *pool,
lib/test_objpool.c
489
objpool_push(on, pool);
lib/test_objpool.c
493
sop = container_of(pool, struct ot_context, pool);
lib/test_objpool.c
494
WARN_ON(sop != pool->context);
lib/test_objpool.c
497
objpool_drop(on, pool);
lib/test_objpool.c
507
nods[i] = objpool_pop(item->pool);
lib/test_objpool.c
523
ot_nod_recycle(on, item->pool, stop);
lib/test_objpool.c
554
ot_init_cpu_item(item, test, &sop->pool, ot_bulk_async);
lib/test_objpool.c
63
struct objpool_head *pool; /* pool head */
lib/zstd/compress/zstd_compress.c
1274
size_t ZSTD_CCtx_refThreadPool(ZSTD_CCtx* cctx, ZSTD_threadPool* pool)
lib/zstd/compress/zstd_compress.c
1278
cctx->pool = pool;
lib/zstd/compress/zstd_compress_internal.h
487
ZSTD_threadPool* pool;
mm/dmapool.c
102
for (i = sizeof(struct dma_block); i < pool->size; i++) {
mm/dmapool.c
105
dev_err(pool->dev, "%s %s, %p (corrupted)\n", __func__,
mm/dmapool.c
106
pool->name, block);
mm/dmapool.c
113
data, pool->size, 1);
mm/dmapool.c
118
memset(block, POOL_POISON_ALLOCATED, pool->size);
mm/dmapool.c
121
static struct dma_page *pool_find_page(struct dma_pool *pool, dma_addr_t dma)
mm/dmapool.c
125
list_for_each_entry(page, &pool->page_list, page_list) {
mm/dmapool.c
128
if ((dma - page->dma) < pool->allocation)
mm/dmapool.c
134
static bool pool_block_err(struct dma_pool *pool, void *vaddr, dma_addr_t dma)
mm/dmapool.c
136
struct dma_block *block = pool->next_block;
mm/dmapool.c
139
page = pool_find_page(pool, dma);
mm/dmapool.c
141
dev_err(pool->dev, "%s %s, %p/%pad (bad dma)\n",
mm/dmapool.c
142
__func__, pool->name, vaddr, &dma);
mm/dmapool.c
151
dev_err(pool->dev, "%s %s, dma %pad already free\n",
mm/dmapool.c
152
__func__, pool->name, &dma);
mm/dmapool.c
156
memset(vaddr, POOL_POISON_FREED, pool->size);
mm/dmapool.c
160
static void pool_init_page(struct dma_pool *pool, struct dma_page *page)
mm/dmapool.c
162
memset(page->vaddr, POOL_POISON_FREED, pool->allocation);
mm/dmapool.c
165
static void pool_check_block(struct dma_pool *pool, struct dma_block *block,
mm/dmapool.c
170
static bool pool_block_err(struct dma_pool *pool, void *vaddr, dma_addr_t dma)
mm/dmapool.c
173
memset(vaddr, 0, pool->size);
mm/dmapool.c
177
static void pool_init_page(struct dma_pool *pool, struct dma_page *page)
mm/dmapool.c
182
static struct dma_block *pool_block_pop(struct dma_pool *pool)
mm/dmapool.c
184
struct dma_block *block = pool->next_block;
mm/dmapool.c
187
pool->next_block = block->next_block;
mm/dmapool.c
188
pool->nr_active++;
mm/dmapool.c
193
static void pool_block_push(struct dma_pool *pool, struct dma_block *block,
mm/dmapool.c
197
block->next_block = pool->next_block;
mm/dmapool.c
198
pool->next_block = block;
mm/dmapool.c
303
static void pool_initialise_page(struct dma_pool *pool, struct dma_page *page)
mm/dmapool.c
305
unsigned int next_boundary = pool->boundary, offset = 0;
mm/dmapool.c
308
pool_init_page(pool, page);
mm/dmapool.c
309
while (offset + pool->size <= pool->allocation) {
mm/dmapool.c
310
if (offset + pool->size > next_boundary) {
mm/dmapool.c
312
next_boundary += pool->boundary;
mm/dmapool.c
326
offset += pool->size;
mm/dmapool.c
327
pool->nr_blocks++;
mm/dmapool.c
330
last->next_block = pool->next_block;
mm/dmapool.c
331
pool->next_block = first;
mm/dmapool.c
333
list_add(&page->page_list, &pool->page_list);
mm/dmapool.c
334
pool->nr_pages++;
mm/dmapool.c
337
static struct dma_page *pool_alloc_page(struct dma_pool *pool, gfp_t mem_flags)
mm/dmapool.c
341
page = kmalloc_node(sizeof(*page), mem_flags, pool->node);
mm/dmapool.c
345
page->vaddr = dma_alloc_coherent(pool->dev, pool->allocation,
mm/dmapool.c
363
void dma_pool_destroy(struct dma_pool *pool)
mm/dmapool.c
368
if (unlikely(!pool))
mm/dmapool.c
373
list_del(&pool->pools);
mm/dmapool.c
374
empty = list_empty(&pool->dev->dma_pools);
mm/dmapool.c
377
device_remove_file(pool->dev, &dev_attr_pools);
mm/dmapool.c
380
if (pool->nr_active) {
mm/dmapool.c
381
dev_err(pool->dev, "%s %s busy\n", __func__, pool->name);
mm/dmapool.c
385
list_for_each_entry_safe(page, tmp, &pool->page_list, page_list) {
mm/dmapool.c
387
dma_free_coherent(pool->dev, pool->allocation,
mm/dmapool.c
393
kfree(pool);
mm/dmapool.c
407
void *dma_pool_alloc(struct dma_pool *pool, gfp_t mem_flags,
mm/dmapool.c
416
spin_lock_irqsave(&pool->lock, flags);
mm/dmapool.c
417
block = pool_block_pop(pool);
mm/dmapool.c
423
spin_unlock_irqrestore(&pool->lock, flags);
mm/dmapool.c
425
page = pool_alloc_page(pool, mem_flags & (~__GFP_ZERO));
mm/dmapool.c
429
spin_lock_irqsave(&pool->lock, flags);
mm/dmapool.c
430
pool_initialise_page(pool, page);
mm/dmapool.c
431
block = pool_block_pop(pool);
mm/dmapool.c
433
spin_unlock_irqrestore(&pool->lock, flags);
mm/dmapool.c
436
pool_check_block(pool, block, mem_flags);
mm/dmapool.c
438
memset(block, 0, pool->size);
mm/dmapool.c
453
void dma_pool_free(struct dma_pool *pool, void *vaddr, dma_addr_t dma)
mm/dmapool.c
458
spin_lock_irqsave(&pool->lock, flags);
mm/dmapool.c
459
if (!pool_block_err(pool, vaddr, dma)) {
mm/dmapool.c
460
pool_block_push(pool, block, dma);
mm/dmapool.c
461
pool->nr_active--;
mm/dmapool.c
463
spin_unlock_irqrestore(&pool->lock, flags);
mm/dmapool.c
472
struct dma_pool *pool = *(struct dma_pool **)res;
mm/dmapool.c
474
dma_pool_destroy(pool);
mm/dmapool.c
499
struct dma_pool **ptr, *pool;
mm/dmapool.c
505
pool = *ptr = dma_pool_create(name, dev, size, align, allocation);
mm/dmapool.c
506
if (pool)
mm/dmapool.c
511
return pool;
mm/dmapool.c
521
void dmam_pool_destroy(struct dma_pool *pool)
mm/dmapool.c
523
struct device *dev = pool->dev;
mm/dmapool.c
525
WARN_ON(devres_release(dev, dmam_pool_release, dmam_pool_match, pool));
mm/dmapool.c
75
struct dma_pool *pool;
mm/dmapool.c
81
list_for_each_entry(pool, &dev->dma_pools, pools) {
mm/dmapool.c
84
pool->name, pool->nr_active,
mm/dmapool.c
85
pool->nr_blocks, pool->size,
mm/dmapool.c
86
pool->nr_pages);
mm/dmapool.c
96
static void pool_check_block(struct dma_pool *pool, struct dma_block *block,
mm/dmapool_test.c
32
static struct dma_pool *pool;
mm/dmapool_test.c
46
p[i].v = dma_pool_alloc(pool, GFP_KERNEL,
mm/dmapool_test.c
53
dma_pool_free(pool, p[i].v, p[i].dma);
mm/dmapool_test.c
59
dma_pool_free(pool, p[i].v, p[i].dma);
mm/dmapool_test.c
74
pool = dma_pool_create("test pool", &test_dev, parms->size,
mm/dmapool_test.c
76
if (!pool) {
mm/dmapool_test.c
96
dma_pool_destroy(pool);
mm/kasan/kasan_test_c.c
1238
static void *mempool_prepare_kmalloc(struct kunit *test, mempool_t *pool, size_t size)
mm/kasan/kasan_test_c.c
1244
memset(pool, 0, sizeof(*pool));
mm/kasan/kasan_test_c.c
1245
ret = mempool_init_kmalloc_pool(pool, pool_size, size);
mm/kasan/kasan_test_c.c
1254
elem = mempool_alloc_preallocated(pool);
mm/kasan/kasan_test_c.c
1260
static struct kmem_cache *mempool_prepare_slab(struct kunit *test, mempool_t *pool, size_t size)
mm/kasan/kasan_test_c.c
1269
memset(pool, 0, sizeof(*pool));
mm/kasan/kasan_test_c.c
1270
ret = mempool_init_slab_pool(pool, pool_size, cache);
mm/kasan/kasan_test_c.c
1281
static void *mempool_prepare_page(struct kunit *test, mempool_t *pool, int order)
mm/kasan/kasan_test_c.c
1287
memset(pool, 0, sizeof(*pool));
mm/kasan/kasan_test_c.c
1288
ret = mempool_init_page_pool(pool, pool_size, order);
mm/kasan/kasan_test_c.c
1291
elem = mempool_alloc_preallocated(pool);
mm/kasan/kasan_test_c.c
1297
static void mempool_oob_right_helper(struct kunit *test, mempool_t *pool, size_t size)
mm/kasan/kasan_test_c.c
1301
elem = mempool_alloc_preallocated(pool);
mm/kasan/kasan_test_c.c
1313
mempool_free(elem, pool);
mm/kasan/kasan_test_c.c
1318
mempool_t pool;
mm/kasan/kasan_test_c.c
1322
extra_elem = mempool_prepare_kmalloc(test, &pool, size);
mm/kasan/kasan_test_c.c
1324
mempool_oob_right_helper(test, &pool, size);
mm/kasan/kasan_test_c.c
1326
mempool_free(extra_elem, &pool);
mm/kasan/kasan_test_c.c
1327
mempool_exit(&pool);
mm/kasan/kasan_test_c.c
1332
mempool_t pool;
mm/kasan/kasan_test_c.c
1336
extra_elem = mempool_prepare_kmalloc(test, &pool, size);
mm/kasan/kasan_test_c.c
1338
mempool_oob_right_helper(test, &pool, size);
mm/kasan/kasan_test_c.c
1340
mempool_free(extra_elem, &pool);
mm/kasan/kasan_test_c.c
1341
mempool_exit(&pool);
mm/kasan/kasan_test_c.c
1346
mempool_t pool;
mm/kasan/kasan_test_c.c
1350
cache = mempool_prepare_slab(test, &pool, size);
mm/kasan/kasan_test_c.c
1352
mempool_oob_right_helper(test, &pool, size);
mm/kasan/kasan_test_c.c
1354
mempool_exit(&pool);
mm/kasan/kasan_test_c.c
1366
static void mempool_uaf_helper(struct kunit *test, mempool_t *pool, bool page)
mm/kasan/kasan_test_c.c
1370
elem = mempool_alloc_preallocated(pool);
mm/kasan/kasan_test_c.c
1373
mempool_free(elem, pool);
mm/kasan/kasan_test_c.c
1381
mempool_t pool;
mm/kasan/kasan_test_c.c
1385
extra_elem = mempool_prepare_kmalloc(test, &pool, size);
mm/kasan/kasan_test_c.c
1387
mempool_uaf_helper(test, &pool, false);
mm/kasan/kasan_test_c.c
1389
mempool_free(extra_elem, &pool);
mm/kasan/kasan_test_c.c
1390
mempool_exit(&pool);
mm/kasan/kasan_test_c.c
1395
mempool_t pool;
mm/kasan/kasan_test_c.c
1399
extra_elem = mempool_prepare_kmalloc(test, &pool, size);
mm/kasan/kasan_test_c.c
1401
mempool_uaf_helper(test, &pool, false);
mm/kasan/kasan_test_c.c
1403
mempool_free(extra_elem, &pool);
mm/kasan/kasan_test_c.c
1404
mempool_exit(&pool);
mm/kasan/kasan_test_c.c
1409
mempool_t pool;
mm/kasan/kasan_test_c.c
1413
cache = mempool_prepare_slab(test, &pool, size);
mm/kasan/kasan_test_c.c
1415
mempool_uaf_helper(test, &pool, false);
mm/kasan/kasan_test_c.c
1417
mempool_exit(&pool);
mm/kasan/kasan_test_c.c
1423
mempool_t pool;
mm/kasan/kasan_test_c.c
1427
extra_elem = mempool_prepare_page(test, &pool, order);
mm/kasan/kasan_test_c.c
1429
mempool_uaf_helper(test, &pool, true);
mm/kasan/kasan_test_c.c
1431
mempool_free(extra_elem, &pool);
mm/kasan/kasan_test_c.c
1432
mempool_exit(&pool);
mm/kasan/kasan_test_c.c
1435
static void mempool_double_free_helper(struct kunit *test, mempool_t *pool)
mm/kasan/kasan_test_c.c
1439
elem = mempool_alloc_preallocated(pool);
mm/kasan/kasan_test_c.c
1442
mempool_free(elem, pool);
mm/kasan/kasan_test_c.c
1444
KUNIT_EXPECT_KASAN_FAIL(test, mempool_free(elem, pool));
mm/kasan/kasan_test_c.c
1449
mempool_t pool;
mm/kasan/kasan_test_c.c
1453
extra_elem = mempool_prepare_kmalloc(test, &pool, size);
mm/kasan/kasan_test_c.c
1455
mempool_double_free_helper(test, &pool);
mm/kasan/kasan_test_c.c
1457
mempool_free(extra_elem, &pool);
mm/kasan/kasan_test_c.c
1458
mempool_exit(&pool);
mm/kasan/kasan_test_c.c
1463
mempool_t pool;
mm/kasan/kasan_test_c.c
1467
extra_elem = mempool_prepare_kmalloc(test, &pool, size);
mm/kasan/kasan_test_c.c
1469
mempool_double_free_helper(test, &pool);
mm/kasan/kasan_test_c.c
1471
mempool_free(extra_elem, &pool);
mm/kasan/kasan_test_c.c
1472
mempool_exit(&pool);
mm/kasan/kasan_test_c.c
1477
mempool_t pool;
mm/kasan/kasan_test_c.c
1481
extra_elem = mempool_prepare_page(test, &pool, order);
mm/kasan/kasan_test_c.c
1483
mempool_double_free_helper(test, &pool);
mm/kasan/kasan_test_c.c
1485
mempool_free(extra_elem, &pool);
mm/kasan/kasan_test_c.c
1486
mempool_exit(&pool);
mm/kasan/kasan_test_c.c
1489
static void mempool_kmalloc_invalid_free_helper(struct kunit *test, mempool_t *pool)
mm/kasan/kasan_test_c.c
1493
elem = mempool_alloc_preallocated(pool);
mm/kasan/kasan_test_c.c
1496
KUNIT_EXPECT_KASAN_FAIL(test, mempool_free(elem + 1, pool));
mm/kasan/kasan_test_c.c
1498
mempool_free(elem, pool);
mm/kasan/kasan_test_c.c
1503
mempool_t pool;
mm/kasan/kasan_test_c.c
1507
extra_elem = mempool_prepare_kmalloc(test, &pool, size);
mm/kasan/kasan_test_c.c
1509
mempool_kmalloc_invalid_free_helper(test, &pool);
mm/kasan/kasan_test_c.c
1511
mempool_free(extra_elem, &pool);
mm/kasan/kasan_test_c.c
1512
mempool_exit(&pool);
mm/kasan/kasan_test_c.c
1517
mempool_t pool;
mm/kasan/kasan_test_c.c
1521
extra_elem = mempool_prepare_kmalloc(test, &pool, size);
mm/kasan/kasan_test_c.c
1523
mempool_kmalloc_invalid_free_helper(test, &pool);
mm/kasan/kasan_test_c.c
1525
mempool_free(extra_elem, &pool);
mm/kasan/kasan_test_c.c
1526
mempool_exit(&pool);
mm/mempool.c
100
__check_element(pool, addr, PAGE_SIZE << order);
mm/mempool.c
113
static void poison_element(struct mempool *pool, void *element)
mm/mempool.c
120
if (pool->alloc == mempool_kmalloc) {
mm/mempool.c
121
__poison_element(element, (size_t)pool->pool_data);
mm/mempool.c
122
} else if (pool->alloc == mempool_alloc_slab) {
mm/mempool.c
123
__poison_element(element, kmem_cache_size(pool->pool_data));
mm/mempool.c
124
} else if (pool->alloc == mempool_alloc_pages) {
mm/mempool.c
126
int order = (int)(long)pool->pool_data;
mm/mempool.c
144
static inline void check_element(struct mempool *pool, void *element)
mm/mempool.c
147
static inline void poison_element(struct mempool *pool, void *element)
mm/mempool.c
152
static __always_inline bool kasan_poison_element(struct mempool *pool,
mm/mempool.c
155
if (pool->alloc == mempool_alloc_slab || pool->alloc == mempool_kmalloc)
mm/mempool.c
157
else if (pool->alloc == mempool_alloc_pages)
mm/mempool.c
159
(unsigned long)pool->pool_data);
mm/mempool.c
163
static void kasan_unpoison_element(struct mempool *pool, void *element)
mm/mempool.c
165
if (pool->alloc == mempool_kmalloc)
mm/mempool.c
166
kasan_mempool_unpoison_object(element, (size_t)pool->pool_data);
mm/mempool.c
167
else if (pool->alloc == mempool_alloc_slab)
mm/mempool.c
169
kmem_cache_size(pool->pool_data));
mm/mempool.c
170
else if (pool->alloc == mempool_alloc_pages)
mm/mempool.c
172
(unsigned long)pool->pool_data);
mm/mempool.c
175
static __always_inline void add_element(struct mempool *pool, void *element)
mm/mempool.c
177
BUG_ON(pool->min_nr != 0 && pool->curr_nr >= pool->min_nr);
mm/mempool.c
178
poison_element(pool, element);
mm/mempool.c
179
if (kasan_poison_element(pool, element))
mm/mempool.c
180
pool->elements[pool->curr_nr++] = element;
mm/mempool.c
183
static void *remove_element(struct mempool *pool)
mm/mempool.c
185
void *element = pool->elements[--pool->curr_nr];
mm/mempool.c
187
BUG_ON(pool->curr_nr < 0);
mm/mempool.c
188
kasan_unpoison_element(pool, element);
mm/mempool.c
189
check_element(pool, element);
mm/mempool.c
204
void mempool_exit(struct mempool *pool)
mm/mempool.c
206
while (pool->curr_nr) {
mm/mempool.c
207
void *element = remove_element(pool);
mm/mempool.c
208
pool->free(element, pool->pool_data);
mm/mempool.c
210
kfree(pool->elements);
mm/mempool.c
211
pool->elements = NULL;
mm/mempool.c
223
void mempool_destroy(struct mempool *pool)
mm/mempool.c
225
if (unlikely(!pool))
mm/mempool.c
228
mempool_exit(pool);
mm/mempool.c
229
kfree(pool);
mm/mempool.c
233
int mempool_init_node(struct mempool *pool, int min_nr,
mm/mempool.c
237
spin_lock_init(&pool->lock);
mm/mempool.c
238
pool->min_nr = min_nr;
mm/mempool.c
239
pool->pool_data = pool_data;
mm/mempool.c
240
pool->alloc = alloc_fn;
mm/mempool.c
241
pool->free = free_fn;
mm/mempool.c
242
init_waitqueue_head(&pool->wait);
mm/mempool.c
247
pool->elements = kmalloc_array_node(max(1, min_nr), sizeof(void *),
mm/mempool.c
249
if (!pool->elements)
mm/mempool.c
256
while (pool->curr_nr < max(1, pool->min_nr)) {
mm/mempool.c
259
element = pool->alloc(gfp_mask, pool->pool_data);
mm/mempool.c
261
mempool_exit(pool);
mm/mempool.c
264
add_element(pool, element);
mm/mempool.c
285
int mempool_init_noprof(struct mempool *pool, int min_nr,
mm/mempool.c
289
return mempool_init_node(pool, min_nr, alloc_fn, free_fn,
mm/mempool.c
317
struct mempool *pool;
mm/mempool.c
319
pool = kmalloc_node_noprof(sizeof(*pool), gfp_mask | __GFP_ZERO, node_id);
mm/mempool.c
320
if (!pool)
mm/mempool.c
323
if (mempool_init_node(pool, min_nr, alloc_fn, free_fn, pool_data,
mm/mempool.c
325
kfree(pool);
mm/mempool.c
329
return pool;
mm/mempool.c
351
int mempool_resize(struct mempool *pool, int new_min_nr)
mm/mempool.c
360
spin_lock_irqsave(&pool->lock, flags);
mm/mempool.c
361
if (new_min_nr <= pool->min_nr) {
mm/mempool.c
362
while (new_min_nr < pool->curr_nr) {
mm/mempool.c
363
element = remove_element(pool);
mm/mempool.c
364
spin_unlock_irqrestore(&pool->lock, flags);
mm/mempool.c
365
pool->free(element, pool->pool_data);
mm/mempool.c
366
spin_lock_irqsave(&pool->lock, flags);
mm/mempool.c
368
pool->min_nr = new_min_nr;
mm/mempool.c
371
spin_unlock_irqrestore(&pool->lock, flags);
mm/mempool.c
378
spin_lock_irqsave(&pool->lock, flags);
mm/mempool.c
379
if (unlikely(new_min_nr <= pool->min_nr)) {
mm/mempool.c
381
spin_unlock_irqrestore(&pool->lock, flags);
mm/mempool.c
385
memcpy(new_elements, pool->elements,
mm/mempool.c
386
pool->curr_nr * sizeof(*new_elements));
mm/mempool.c
387
kfree(pool->elements);
mm/mempool.c
388
pool->elements = new_elements;
mm/mempool.c
389
pool->min_nr = new_min_nr;
mm/mempool.c
391
while (pool->curr_nr < pool->min_nr) {
mm/mempool.c
392
spin_unlock_irqrestore(&pool->lock, flags);
mm/mempool.c
393
element = pool->alloc(GFP_KERNEL, pool->pool_data);
mm/mempool.c
396
spin_lock_irqsave(&pool->lock, flags);
mm/mempool.c
397
if (pool->curr_nr < pool->min_nr) {
mm/mempool.c
398
add_element(pool, element);
mm/mempool.c
400
spin_unlock_irqrestore(&pool->lock, flags);
mm/mempool.c
401
pool->free(element, pool->pool_data); /* Raced */
mm/mempool.c
406
spin_unlock_irqrestore(&pool->lock, flags);
mm/mempool.c
41
static void poison_error(struct mempool *pool, void *element, size_t size,
mm/mempool.c
412
static unsigned int mempool_alloc_from_pool(struct mempool *pool, void **elems,
mm/mempool.c
419
spin_lock_irqsave(&pool->lock, flags);
mm/mempool.c
420
if (unlikely(pool->curr_nr < count - allocated))
mm/mempool.c
424
elems[i] = remove_element(pool);
mm/mempool.c
428
spin_unlock_irqrestore(&pool->lock, flags);
mm/mempool.c
44
const int nr = pool->curr_nr;
mm/mempool.c
445
prepare_to_wait(&pool->wait, &wait, TASK_UNINTERRUPTIBLE);
mm/mempool.c
446
spin_unlock_irqrestore(&pool->lock, flags);
mm/mempool.c
455
finish_wait(&pool->wait, &wait);
mm/mempool.c
458
spin_unlock_irqrestore(&pool->lock, flags);
mm/mempool.c
492
int mempool_alloc_bulk_noprof(struct mempool *pool, void **elems,
mm/mempool.c
499
VM_WARN_ON_ONCE(count > pool->min_nr);
mm/mempool.c
50
pr_err("Mempool %p size %zu\n", pool, size);
mm/mempool.c
520
elems[i] = pool->alloc(gfp_temp, pool->pool_data);
mm/mempool.c
529
allocated = mempool_alloc_from_pool(pool, elems, count, allocated,
mm/mempool.c
552
void *mempool_alloc_noprof(struct mempool *pool, gfp_t gfp_mask)
mm/mempool.c
566
element = pool->alloc(gfp_temp, pool->pool_data);
mm/mempool.c
577
if (!mempool_alloc_from_pool(pool, &element, 1, 0, gfp_temp)) {
mm/mempool.c
58
static void __check_element(struct mempool *pool, void *element, size_t size)
mm/mempool.c
604
void *mempool_alloc_preallocated(struct mempool *pool)
mm/mempool.c
608
mempool_alloc_from_pool(pool, &element, 1, 0, GFP_NOWAIT);
mm/mempool.c
627
unsigned int mempool_free_bulk(struct mempool *pool, void **elems,
mm/mempool.c
67
poison_error(pool, element, size, i);
mm/mempool.c
673
if (unlikely(READ_ONCE(pool->curr_nr) < pool->min_nr)) {
mm/mempool.c
674
spin_lock_irqsave(&pool->lock, flags);
mm/mempool.c
675
while (pool->curr_nr < pool->min_nr && freed < count) {
mm/mempool.c
676
add_element(pool, elems[freed++]);
mm/mempool.c
679
spin_unlock_irqrestore(&pool->lock, flags);
mm/mempool.c
680
} else if (unlikely(pool->min_nr == 0 &&
mm/mempool.c
681
READ_ONCE(pool->curr_nr) == 0)) {
mm/mempool.c
683
spin_lock_irqsave(&pool->lock, flags);
mm/mempool.c
684
if (likely(pool->curr_nr == 0)) {
mm/mempool.c
685
add_element(pool, elems[freed++]);
mm/mempool.c
688
spin_unlock_irqrestore(&pool->lock, flags);
mm/mempool.c
691
if (unlikely(added) && wq_has_sleeper(&pool->wait))
mm/mempool.c
692
wake_up(&pool->wait);
mm/mempool.c
708
void mempool_free(void *element, struct mempool *pool)
mm/mempool.c
710
if (likely(element) && !mempool_free_bulk(pool, &element, 1))
mm/mempool.c
711
pool->free(element, pool->pool_data);
mm/mempool.c
74
static void check_element(struct mempool *pool, void *element)
mm/mempool.c
81
if (pool->free == mempool_kfree) {
mm/mempool.c
82
__check_element(pool, element, (size_t)pool->pool_data);
mm/mempool.c
83
} else if (pool->free == mempool_free_slab) {
mm/mempool.c
84
__check_element(pool, element, kmem_cache_size(pool->pool_data));
mm/mempool.c
85
} else if (pool->free == mempool_free_pages) {
mm/mempool.c
87
int order = (int)(long)pool->pool_data;
mm/mempool.c
94
__check_element(pool, addr, PAGE_SIZE);
mm/page_io.c
334
mempool_t *pool = mempool_create_kmalloc_pool(
mm/page_io.c
336
if (cmpxchg(&sio_pool, NULL, pool))
mm/page_io.c
337
mempool_destroy(pool);
mm/vmalloc.c
1923
return &vn->pool[idx];
mm/vmalloc.c
2235
if (list_empty(&vn->pool[i].head))
mm/vmalloc.c
2240
list_replace_init(&vn->pool[i].head, &tmp_list);
mm/vmalloc.c
2243
pool_len = n_decay = vn->pool[i].len;
mm/vmalloc.c
2244
WRITE_ONCE(vn->pool[i].len, 0);
mm/vmalloc.c
2267
list_replace_init(&tmp_list, &vn->pool[i].head);
mm/vmalloc.c
2268
WRITE_ONCE(vn->pool[i].len, pool_len);
mm/vmalloc.c
5391
INIT_LIST_HEAD(&vn->pool[i].head);
mm/vmalloc.c
5392
WRITE_ONCE(vn->pool[i].len, 0);
mm/vmalloc.c
5408
count += READ_ONCE(vn->pool[i].len);
mm/vmalloc.c
938
struct vmap_pool pool[MAX_VA_SIZE_PAGES];
mm/zsmalloc.c
1021
unsigned int zs_lookup_class_index(struct zs_pool *pool, unsigned int size)
mm/zsmalloc.c
1025
class = pool->size_class[get_size_class_index(size)];
mm/zsmalloc.c
1031
unsigned long zs_get_total_pages(struct zs_pool *pool)
mm/zsmalloc.c
1033
return atomic_long_read(&pool->pages_allocated);
mm/zsmalloc.c
1037
void *zs_obj_read_begin(struct zs_pool *pool, unsigned long handle,
mm/zsmalloc.c
1048
read_lock(&pool->lock);
mm/zsmalloc.c
1055
read_unlock(&pool->lock);
mm/zsmalloc.c
1057
class = zspage_class(pool, zspage);
mm/zsmalloc.c
1087
void zs_obj_read_end(struct zs_pool *pool, unsigned long handle,
mm/zsmalloc.c
1099
class = zspage_class(pool, zspage);
mm/zsmalloc.c
1114
void zs_obj_read_sg_begin(struct zs_pool *pool, unsigned long handle,
mm/zsmalloc.c
1124
read_lock(&pool->lock);
mm/zsmalloc.c
1131
read_unlock(&pool->lock);
mm/zsmalloc.c
1133
class = zspage_class(pool, zspage);
mm/zsmalloc.c
1161
void zs_obj_read_sg_end(struct zs_pool *pool, unsigned long handle)
mm/zsmalloc.c
1176
void zs_obj_write(struct zs_pool *pool, unsigned long handle,
mm/zsmalloc.c
1186
read_lock(&pool->lock);
mm/zsmalloc.c
1193
read_unlock(&pool->lock);
mm/zsmalloc.c
1195
class = zspage_class(pool, zspage);
mm/zsmalloc.c
1238
size_t zs_huge_class_size(struct zs_pool *pool)
mm/zsmalloc.c
1244
static unsigned long obj_malloc(struct zs_pool *pool,
mm/zsmalloc.c
1256
class = pool->size_class[zspage->class];
mm/zsmalloc.c
1297
unsigned long zs_malloc(struct zs_pool *pool, size_t size, gfp_t gfp,
mm/zsmalloc.c
1317
class = pool->size_class[get_size_class_index(size)];
mm/zsmalloc.c
1323
obj_malloc(pool, zspage, handle);
mm/zsmalloc.c
1333
zspage = alloc_zspage(pool, class, gfp, nid);
mm/zsmalloc.c
1340
obj_malloc(pool, zspage, handle);
mm/zsmalloc.c
1343
atomic_long_add(class->pages_per_zspage, &pool->pages_allocated);
mm/zsmalloc.c
1348
SetZsPageMovable(pool, zspage);
mm/zsmalloc.c
1384
void zs_free(struct zs_pool *pool, unsigned long handle)
mm/zsmalloc.c
1399
read_lock(&pool->lock);
mm/zsmalloc.c
1403
class = zspage_class(pool, zspage);
mm/zsmalloc.c
1405
read_unlock(&pool->lock);
mm/zsmalloc.c
1412
free_zspage(pool, class, zspage);
mm/zsmalloc.c
1519
static void migrate_zspage(struct zs_pool *pool, struct zspage *src_zspage,
mm/zsmalloc.c
1526
struct size_class *class = pool->size_class[src_zspage->class];
mm/zsmalloc.c
1539
free_obj = obj_malloc(pool, dst_zspage, handle);
mm/zsmalloc.c
1689
struct zs_pool *pool;
mm/zsmalloc.c
1713
pool = zspage->pool;
mm/zsmalloc.c
1719
write_lock(&pool->lock);
mm/zsmalloc.c
1720
class = zspage_class(pool, zspage);
mm/zsmalloc.c
1729
write_unlock(&pool->lock);
mm/zsmalloc.c
1763
write_unlock(&pool->lock);
mm/zsmalloc.c
1799
struct zs_pool *pool = container_of(work, struct zs_pool,
mm/zsmalloc.c
1803
class = pool->size_class[i];
mm/zsmalloc.c
1817
class = zspage_class(pool, zspage);
mm/zsmalloc.c
1820
__free_zspage(pool, class, zspage);
mm/zsmalloc.c
1825
static void kick_deferred_free(struct zs_pool *pool)
mm/zsmalloc.c
1827
schedule_work(&pool->free_work);
mm/zsmalloc.c
1830
static void zs_flush_migration(struct zs_pool *pool)
mm/zsmalloc.c
1832
flush_work(&pool->free_work);
mm/zsmalloc.c
1835
static void init_deferred_free(struct zs_pool *pool)
mm/zsmalloc.c
1837
INIT_WORK(&pool->free_work, async_free_zspage);
mm/zsmalloc.c
1840
static void SetZsPageMovable(struct zs_pool *pool, struct zspage *zspage)
mm/zsmalloc.c
1851
static inline void zs_flush_migration(struct zs_pool *pool) { }
mm/zsmalloc.c
1874
static unsigned long __zs_compact(struct zs_pool *pool,
mm/zsmalloc.c
1885
write_lock(&pool->lock);
mm/zsmalloc.c
1903
migrate_zspage(pool, src_zspage, dst_zspage);
mm/zsmalloc.c
1908
free_zspage(pool, class, src_zspage);
mm/zsmalloc.c
1914
|| rwlock_is_contended(&pool->lock)) {
mm/zsmalloc.c
1919
write_unlock(&pool->lock);
mm/zsmalloc.c
1921
write_lock(&pool->lock);
mm/zsmalloc.c
1933
write_unlock(&pool->lock);
mm/zsmalloc.c
1938
unsigned long zs_compact(struct zs_pool *pool)
mm/zsmalloc.c
1950
if (atomic_xchg(&pool->compaction_in_progress, 1))
mm/zsmalloc.c
1954
class = pool->size_class[i];
mm/zsmalloc.c
1957
pages_freed += __zs_compact(pool, class);
mm/zsmalloc.c
1959
atomic_long_add(pages_freed, &pool->stats.pages_compacted);
mm/zsmalloc.c
1960
atomic_set(&pool->compaction_in_progress, 0);
mm/zsmalloc.c
1966
void zs_pool_stats(struct zs_pool *pool, struct zs_pool_stats *stats)
mm/zsmalloc.c
1968
memcpy(stats, &pool->stats, sizeof(struct zs_pool_stats));
mm/zsmalloc.c
1976
struct zs_pool *pool = shrinker->private_data;
mm/zsmalloc.c
1983
pages_freed = zs_compact(pool);
mm/zsmalloc.c
1994
struct zs_pool *pool = shrinker->private_data;
mm/zsmalloc.c
1997
class = pool->size_class[i];
mm/zsmalloc.c
2007
static void zs_unregister_shrinker(struct zs_pool *pool)
mm/zsmalloc.c
2009
shrinker_free(pool->shrinker);
mm/zsmalloc.c
2012
static int zs_register_shrinker(struct zs_pool *pool)
mm/zsmalloc.c
2014
pool->shrinker = shrinker_alloc(0, "mm-zspool:%s", pool->name);
mm/zsmalloc.c
2015
if (!pool->shrinker)
mm/zsmalloc.c
2018
pool->shrinker->scan_objects = zs_shrinker_scan;
mm/zsmalloc.c
2019
pool->shrinker->count_objects = zs_shrinker_count;
mm/zsmalloc.c
2020
pool->shrinker->batch = 0;
mm/zsmalloc.c
2021
pool->shrinker->private_data = pool;
mm/zsmalloc.c
2023
shrinker_register(pool->shrinker);
mm/zsmalloc.c
2062
struct zs_pool *pool;
mm/zsmalloc.c
2065
pool = kzalloc_obj(*pool);
mm/zsmalloc.c
2066
if (!pool)
mm/zsmalloc.c
2069
init_deferred_free(pool);
mm/zsmalloc.c
2070
rwlock_init(&pool->lock);
mm/zsmalloc.c
2071
atomic_set(&pool->compaction_in_progress, 0);
mm/zsmalloc.c
2073
pool->name = kstrdup(name, GFP_KERNEL);
mm/zsmalloc.c
2074
if (!pool->name)
mm/zsmalloc.c
2126
pool->size_class[i] = prev_class;
mm/zsmalloc.c
2140
pool->size_class[i] = class;
mm/zsmalloc.c
2152
zs_pool_stat_create(pool, name);
mm/zsmalloc.c
2160
zs_register_shrinker(pool);
mm/zsmalloc.c
2162
return pool;
mm/zsmalloc.c
2165
zs_destroy_pool(pool);
mm/zsmalloc.c
2170
void zs_destroy_pool(struct zs_pool *pool)
mm/zsmalloc.c
2174
zs_unregister_shrinker(pool);
mm/zsmalloc.c
2175
zs_flush_migration(pool);
mm/zsmalloc.c
2176
zs_pool_stat_destroy(pool);
mm/zsmalloc.c
2180
struct size_class *class = pool->size_class[i];
mm/zsmalloc.c
2198
kfree(pool->name);
mm/zsmalloc.c
2199
kfree(pool);
mm/zsmalloc.c
272
struct zs_pool *pool;
mm/zsmalloc.c
366
static void kick_deferred_free(struct zs_pool *pool);
mm/zsmalloc.c
367
static void init_deferred_free(struct zs_pool *pool);
mm/zsmalloc.c
368
static void SetZsPageMovable(struct zs_pool *pool, struct zspage *zspage);
mm/zsmalloc.c
370
static void kick_deferred_free(struct zs_pool *pool) {}
mm/zsmalloc.c
371
static void init_deferred_free(struct zs_pool *pool) {}
mm/zsmalloc.c
372
static void SetZsPageMovable(struct zs_pool *pool, struct zspage *zspage) {}
mm/zsmalloc.c
457
static struct size_class *zspage_class(struct zs_pool *pool,
mm/zsmalloc.c
460
return pool->size_class[zspage->class];
mm/zsmalloc.c
520
struct zs_pool *pool = s->private;
mm/zsmalloc.c
536
class = pool->size_class[i];
mm/zsmalloc.c
582
static void zs_pool_stat_create(struct zs_pool *pool, const char *name)
mm/zsmalloc.c
589
pool->stat_dentry = debugfs_create_dir(name, zs_stat_root);
mm/zsmalloc.c
591
debugfs_create_file("classes", S_IFREG | 0444, pool->stat_dentry, pool,
mm/zsmalloc.c
595
static void zs_pool_stat_destroy(struct zs_pool *pool)
mm/zsmalloc.c
597
debugfs_remove_recursive(pool->stat_dentry);
mm/zsmalloc.c
609
static inline void zs_pool_stat_create(struct zs_pool *pool, const char *name)
mm/zsmalloc.c
613
static inline void zs_pool_stat_destroy(struct zs_pool *pool)
mm/zsmalloc.c
804
static void __free_zspage(struct zs_pool *pool, struct size_class *class,
mm/zsmalloc.c
828
atomic_long_sub(class->pages_per_zspage, &pool->pages_allocated);
mm/zsmalloc.c
831
static void free_zspage(struct zs_pool *pool, struct size_class *class,
mm/zsmalloc.c
843
kick_deferred_free(pool);
mm/zsmalloc.c
848
__free_zspage(pool, class, zspage);
mm/zsmalloc.c
932
static struct zspage *alloc_zspage(struct zs_pool *pool,
mm/zsmalloc.c
947
zspage->pool = pool;
mm/zswap.c
1410
struct zswap_pool *pool)
mm/zswap.c
1422
if (!zswap_compress(page, entry, pool))
mm/zswap.c
1451
zswap_pool_get(pool);
mm/zswap.c
1470
entry->pool = pool;
mm/zswap.c
1482
zs_free(pool->zs_pool, entry->handle);
mm/zswap.c
1494
struct zswap_pool *pool;
mm/zswap.c
1517
pool = zswap_pool_current_get();
mm/zswap.c
1518
if (!pool)
mm/zswap.c
1533
if (!zswap_store_page(page, objcg, pool))
mm/zswap.c
1545
zswap_pool_put(pool);
mm/zswap.c
1780
struct zswap_pool *pool;
mm/zswap.c
1810
pool = __zswap_pool_create_fallback();
mm/zswap.c
1811
if (pool) {
mm/zswap.c
1812
pr_info("loaded using pool %s\n", pool->tfm_name);
mm/zswap.c
1813
list_add(&pool->list, &zswap_pools);
mm/zswap.c
194
struct zswap_pool *pool;
mm/zswap.c
247
struct zswap_pool *pool;
mm/zswap.c
254
pool = kzalloc_obj(*pool);
mm/zswap.c
255
if (!pool)
mm/zswap.c
260
pool->zs_pool = zs_create_pool(name);
mm/zswap.c
261
if (!pool->zs_pool)
mm/zswap.c
264
strscpy(pool->tfm_name, compressor, sizeof(pool->tfm_name));
mm/zswap.c
266
pool->acomp_ctx = alloc_percpu(*pool->acomp_ctx);
mm/zswap.c
267
if (!pool->acomp_ctx) {
mm/zswap.c
273
mutex_init(&per_cpu_ptr(pool->acomp_ctx, cpu)->mutex);
mm/zswap.c
276
&pool->node);
mm/zswap.c
283
ret = percpu_ref_init(&pool->ref, __zswap_pool_empty,
mm/zswap.c
287
INIT_LIST_HEAD(&pool->list);
mm/zswap.c
289
zswap_pool_debug("created", pool);
mm/zswap.c
291
return pool;
mm/zswap.c
294
cpuhp_state_remove_instance(CPUHP_MM_ZSWP_POOL_PREPARE, &pool->node);
mm/zswap.c
296
if (pool->acomp_ctx)
mm/zswap.c
297
free_percpu(pool->acomp_ctx);
mm/zswap.c
298
if (pool->zs_pool)
mm/zswap.c
299
zs_destroy_pool(pool->zs_pool);
mm/zswap.c
300
kfree(pool);
mm/zswap.c
323
static void zswap_pool_destroy(struct zswap_pool *pool)
mm/zswap.c
325
zswap_pool_debug("destroying", pool);
mm/zswap.c
327
cpuhp_state_remove_instance(CPUHP_MM_ZSWP_POOL_PREPARE, &pool->node);
mm/zswap.c
328
free_percpu(pool->acomp_ctx);
mm/zswap.c
330
zs_destroy_pool(pool->zs_pool);
mm/zswap.c
331
kfree(pool);
mm/zswap.c
336
struct zswap_pool *pool = container_of(work, typeof(*pool),
mm/zswap.c
342
WARN_ON(!percpu_ref_is_zero(&pool->ref));
mm/zswap.c
343
percpu_ref_exit(&pool->ref);
mm/zswap.c
346
zswap_pool_destroy(pool);
mm/zswap.c
353
struct zswap_pool *pool;
mm/zswap.c
355
pool = container_of(ref, typeof(*pool), ref);
mm/zswap.c
359
WARN_ON(pool == zswap_pool_current());
mm/zswap.c
361
list_del_rcu(&pool->list);
mm/zswap.c
363
INIT_WORK(&pool->release_work, __zswap_pool_release);
mm/zswap.c
364
schedule_work(&pool->release_work);
mm/zswap.c
369
static int __must_check zswap_pool_tryget(struct zswap_pool *pool)
mm/zswap.c
371
if (!pool)
mm/zswap.c
374
return percpu_ref_tryget(&pool->ref);
mm/zswap.c
378
static void zswap_pool_get(struct zswap_pool *pool)
mm/zswap.c
380
percpu_ref_get(&pool->ref);
mm/zswap.c
383
static void zswap_pool_put(struct zswap_pool *pool)
mm/zswap.c
385
percpu_ref_put(&pool->ref);
mm/zswap.c
390
struct zswap_pool *pool;
mm/zswap.c
392
pool = list_first_or_null_rcu(&zswap_pools, typeof(*pool), list);
mm/zswap.c
393
WARN_ONCE(!pool && zswap_has_pool,
mm/zswap.c
396
return pool;
mm/zswap.c
408
struct zswap_pool *pool;
mm/zswap.c
412
pool = __zswap_pool_current();
mm/zswap.c
413
if (!zswap_pool_tryget(pool))
mm/zswap.c
414
pool = NULL;
mm/zswap.c
418
return pool;
mm/zswap.c
424
struct zswap_pool *pool;
mm/zswap.c
428
list_for_each_entry_rcu(pool, &zswap_pools, list) {
mm/zswap.c
429
if (strcmp(pool->tfm_name, compressor))
mm/zswap.c
432
if (!zswap_pool_tryget(pool))
mm/zswap.c
434
return pool;
mm/zswap.c
452
struct zswap_pool *pool;
mm/zswap.c
456
list_for_each_entry_rcu(pool, &zswap_pools, list)
mm/zswap.c
457
total += zs_get_total_pages(pool->zs_pool);
mm/zswap.c
484
struct zswap_pool *pool, *put_pool = NULL;
mm/zswap.c
515
pool = zswap_pool_find_get(s);
mm/zswap.c
516
if (pool) {
mm/zswap.c
517
zswap_pool_debug("using existing", pool);
mm/zswap.c
518
WARN_ON(pool == zswap_pool_current());
mm/zswap.c
519
list_del_rcu(&pool->list);
mm/zswap.c
524
if (!pool)
mm/zswap.c
525
pool = zswap_pool_create(s);
mm/zswap.c
532
percpu_ref_resurrect(&pool->ref);
mm/zswap.c
535
zswap_pool_put(pool);
mm/zswap.c
538
if (pool)
mm/zswap.c
547
list_add_rcu(&pool->list, &zswap_pools);
mm/zswap.c
549
} else if (pool) {
mm/zswap.c
555
list_add_tail_rcu(&pool->list, &zswap_pools);
mm/zswap.c
556
put_pool = pool;
mm/zswap.c
720
zs_free(entry->pool->zs_pool, entry->handle);
mm/zswap.c
721
zswap_pool_put(entry->pool);
mm/zswap.c
737
struct zswap_pool *pool = hlist_entry(node, struct zswap_pool, node);
mm/zswap.c
738
struct crypto_acomp_ctx *acomp_ctx = per_cpu_ptr(pool->acomp_ctx, cpu);
mm/zswap.c
750
acomp = crypto_alloc_acomp_node(pool->tfm_name, 0, 0, cpu_to_node(cpu));
mm/zswap.c
753
pool->tfm_name, acomp);
mm/zswap.c
761
pool->tfm_name);
mm/zswap.c
797
struct zswap_pool *pool = hlist_entry(node, struct zswap_pool, node);
mm/zswap.c
798
struct crypto_acomp_ctx *acomp_ctx = per_cpu_ptr(pool->acomp_ctx, cpu);
mm/zswap.c
828
static struct crypto_acomp_ctx *acomp_ctx_get_cpu_lock(struct zswap_pool *pool)
mm/zswap.c
833
acomp_ctx = raw_cpu_ptr(pool->acomp_ctx);
mm/zswap.c
854
struct zswap_pool *pool)
mm/zswap.c
865
acomp_ctx = acomp_ctx_get_cpu_lock(pool);
mm/zswap.c
908
handle = zs_malloc(pool->zs_pool, dlen, gfp, page_to_nid(page));
mm/zswap.c
914
zs_obj_write(pool->zs_pool, handle, dst, dlen);
mm/zswap.c
934
struct zswap_pool *pool = entry->pool;
mm/zswap.c
940
acomp_ctx = acomp_ctx_get_cpu_lock(pool);
mm/zswap.c
941
zs_obj_read_sg_begin(pool->zs_pool, entry->handle, input, entry->length);
mm/zswap.c
964
zs_obj_read_sg_end(pool->zs_pool, entry->handle);
mm/zswap.c
974
entry->pool->tfm_name,
net/bridge/netfilter/ebt_among.c
37
p = &wh->pool[i];
net/bridge/netfilter/ebt_among.c
44
p = &wh->pool[i];
net/ceph/debugfs.c
101
seq_printf(s, "pg_temp %llu.%x [", pg->pgid.pool,
net/ceph/debugfs.c
112
seq_printf(s, "primary_temp %llu.%x %d\n", pg->pgid.pool,
net/ceph/debugfs.c
119
seq_printf(s, "pg_upmap %llu.%x [", pg->pgid.pool,
net/ceph/debugfs.c
130
seq_printf(s, "pg_upmap_items %llu.%x [", pg->pgid.pool,
net/ceph/debugfs.c
184
seq_printf(s, "%llu.%x", spgid->pgid.pool, spgid->pgid.seed);
net/ceph/debugfs.c
193
seq_printf(s, "osd%d\t%llu.%x\t", t->osd, t->pgid.pool, t->pgid.seed);
net/ceph/debugfs.c
309
hoid->pool == S64_MIN) {
net/ceph/debugfs.c
317
seq_printf(s, "%lld:%08x:", hoid->pool, hoid->hash_reverse_bits);
net/ceph/messenger.c
2179
if (m->pool)
net/ceph/messenger.c
2180
ceph_msgpool_put(m->pool, m);
net/ceph/msgpool.c
14
struct ceph_msgpool *pool = arg;
net/ceph/msgpool.c
17
msg = ceph_msg_new2(pool->type, pool->front_len, pool->max_data_items,
net/ceph/msgpool.c
20
dout("msgpool_alloc %s failed\n", pool->name);
net/ceph/msgpool.c
22
dout("msgpool_alloc %s %p\n", pool->name, msg);
net/ceph/msgpool.c
23
msg->pool = pool;
net/ceph/msgpool.c
30
struct ceph_msgpool *pool = arg;
net/ceph/msgpool.c
33
dout("msgpool_release %s %p\n", pool->name, msg);
net/ceph/msgpool.c
34
msg->pool = NULL;
net/ceph/msgpool.c
38
int ceph_msgpool_init(struct ceph_msgpool *pool, int type,
net/ceph/msgpool.c
43
pool->type = type;
net/ceph/msgpool.c
44
pool->front_len = front_len;
net/ceph/msgpool.c
45
pool->max_data_items = max_data_items;
net/ceph/msgpool.c
46
pool->pool = mempool_create(size, msgpool_alloc, msgpool_free, pool);
net/ceph/msgpool.c
47
if (!pool->pool)
net/ceph/msgpool.c
49
pool->name = name;
net/ceph/msgpool.c
53
void ceph_msgpool_destroy(struct ceph_msgpool *pool)
net/ceph/msgpool.c
55
dout("msgpool %s destroy\n", pool->name);
net/ceph/msgpool.c
56
mempool_destroy(pool->pool);
net/ceph/msgpool.c
59
struct ceph_msg *ceph_msgpool_get(struct ceph_msgpool *pool, int front_len,
net/ceph/msgpool.c
64
if (front_len > pool->front_len ||
net/ceph/msgpool.c
65
max_data_items > pool->max_data_items) {
net/ceph/msgpool.c
67
__func__, front_len, max_data_items, pool->name,
net/ceph/msgpool.c
68
pool->front_len, pool->max_data_items);
net/ceph/msgpool.c
72
return ceph_msg_new2(pool->type, front_len, max_data_items,
net/ceph/msgpool.c
76
msg = mempool_alloc(pool->pool, GFP_NOFS);
net/ceph/msgpool.c
77
dout("msgpool_get %s %p\n", pool->name, msg);
net/ceph/msgpool.c
81
void ceph_msgpool_put(struct ceph_msgpool *pool, struct ceph_msg *msg)
net/ceph/msgpool.c
83
dout("msgpool_put %s %p\n", pool->name, msg);
net/ceph/msgpool.c
86
msg->front.iov_len = pool->front_len;
net/ceph/msgpool.c
87
msg->hdr.front_len = cpu_to_le32(pool->front_len);
net/ceph/msgpool.c
93
mempool_free(msg, pool->pool);
net/ceph/osd_client.c
1107
req->r_base_oloc.pool = layout->pool_id;
net/ceph/osd_client.c
1530
WARN_ON(pi->id != t->target_oloc.pool);
net/ceph/osd_client.c
1601
pi = ceph_pg_pool_by_id(osdc->osdmap, t->base_oloc.pool);
net/ceph/osd_client.c
1622
t->target_oloc.pool = pi->read_tier;
net/ceph/osd_client.c
1624
t->target_oloc.pool = pi->write_tier;
net/ceph/osd_client.c
1626
pi = ceph_pg_pool_by_id(osdc->osdmap, t->target_oloc.pool);
net/ceph/osd_client.c
1635
last_pgid.pool = pgid.pool;
net/ceph/osd_client.c
1790
if (lhs->pool < rhs->pool)
net/ceph/osd_client.c
1792
if (lhs->pool > rhs->pool)
net/ceph/osd_client.c
1874
ceph_decode_64_safe(p, end, hoid->pool, e_inval);
net/ceph/osd_client.c
1898
ceph_encode_64(p, hoid->pool);
net/ceph/osd_client.c
2012
hoid->pool = t->target_oloc.pool;
net/ceph/osd_client.c
2033
__func__, req, req->r_tid, osd->o_osd, backoff->spgid.pgid.pool,
net/ceph/osd_client.c
2118
ceph_encode_64(p, pgid->pool);
net/ceph/osd_client.c
2134
ceph_encode_64(p, oloc->pool);
net/ceph/osd_client.c
2260
pgid.pool = ceph_decode_64(&p);
net/ceph/osd_client.c
2343
__func__, req, req->r_tid, req->r_t.pgid.pool, req->r_t.pgid.seed,
net/ceph/osd_client.c
2344
req->r_t.spgid.pgid.pool, req->r_t.spgid.pgid.seed,
net/ceph/osd_client.c
2426
pool_full(osdc, req->r_t.base_oloc.pool))) {
net/ceph/osd_client.c
2435
req->r_t.base_oloc.pool);
net/ceph/osd_client.c
2654
pool_full(osdc, req->r_t.base_oloc.pool))) {
net/ceph/osd_client.c
3567
oloc->pool = ceph_decode_64(p);
net/ceph/osd_client.c
3785
__func__, req, req->r_tid, m.flags, m.pgid.pool, m.pgid.seed,
net/ceph/osd_client.c
3802
m.redirect.oloc.pool);
net/ceph/osd_client.c
3810
req->r_t.target_oloc.pool = m.redirect.oloc.pool;
net/ceph/osd_client.c
3829
req->r_t.pgid.pool = 0;
net/ceph/osd_client.c
3952
pool_cleared_full(osdc, lreq->t.base_oloc.pool));
net/ceph/osd_client.c
3987
pool_cleared_full(osdc, req->r_t.base_oloc.pool));
net/ceph/osd_client.c
4405
m->spgid.pgid.pool, m->spgid.pgid.seed, m->spgid.shard, m->id);
net/ceph/osd_client.c
4465
m->spgid.pgid.pool, m->spgid.pgid.seed, m->spgid.shard, m->id);
net/ceph/osd_client.c
4470
__func__, osd->o_osd, m->spgid.pgid.pool,
net/ceph/osd_client.c
4478
__func__, osd->o_osd, m->spgid.pgid.pool,
net/ceph/osdmap.c
1338
u64 pool;
net/ceph/osdmap.c
1341
ceph_decode_64_safe(p, end, pool, e_inval);
net/ceph/osdmap.c
1343
pi = lookup_pg_pool(&map->pg_pools, pool);
net/ceph/osdmap.c
1350
pi->id = pool;
net/ceph/osdmap.c
1967
u64 pool;
net/ceph/osdmap.c
2042
ceph_decode_64_safe(p, end, pool, e_inval);
net/ceph/osdmap.c
2043
pi = lookup_pg_pool(&map->pg_pools, pool);
net/ceph/osdmap.c
2120
dest->pool = src->pool;
net/ceph/osdmap.c
2395
WARN_ON(pi->id != oloc->pool);
net/ceph/osdmap.c
2398
raw_pgid->pool = oloc->pool;
net/ceph/osdmap.c
2402
raw_pgid->pool, raw_pgid->seed);
net/ceph/osdmap.c
2414
raw_pgid->pool = oloc->pool;
net/ceph/osdmap.c
2420
raw_pgid->pool, raw_pgid->seed);
net/ceph/osdmap.c
2431
pi = ceph_pg_pool_by_id(osdmap, oloc->pool);
net/ceph/osdmap.c
2447
pgid->pool = raw_pgid->pool;
net/ceph/osdmap.c
2466
raw_pgid->pool);
net/ceph/osdmap.c
2476
(unsigned)raw_pgid->pool;
net/ceph/osdmap.c
2822
WARN_ON(pi->id != raw_pgid->pool);
net/ceph/osdmap.c
2848
WARN_ON(pi->id != raw_pgid->pool);
net/ceph/osdmap.c
2878
pi = ceph_pg_pool_by_id(osdmap, raw_pgid->pool);
net/ceph/osdmap.c
682
if (lhs->pool < rhs->pool)
net/ceph/osdmap.c
684
if (lhs->pool > rhs->pool)
net/ceph/osdmap.c
947
u64 pool;
net/ceph/osdmap.c
952
ceph_decode_64_safe(p, end, pool, bad);
net/ceph/osdmap.c
954
dout(" pool %llu len %d\n", pool, len);
net/ceph/osdmap.c
956
pi = lookup_pg_pool(&map->pg_pools, pool);
net/core/dev.c
13180
per_cpu(system_page_pool.pool, cpuid) = pp_ptr;
net/core/dev.c
13314
pp_ptr = per_cpu(system_page_pool.pool, i);
net/core/dev.c
13320
per_cpu(system_page_pool.pool, i) = NULL;
net/core/dev.c
5529
err = skb_cow_data_for_xdp(this_cpu_read(system_page_pool.pool), pskb, prog);
net/core/devmem.c
432
int mp_dmabuf_devmem_init(struct page_pool *pool)
net/core/devmem.c
434
struct net_devmem_dmabuf_binding *binding = pool->mp_priv;
net/core/devmem.c
442
pool->dma_sync = false;
net/core/devmem.c
443
pool->dma_sync_for_cpu = false;
net/core/devmem.c
445
if (pool->p.order != 0)
net/core/devmem.c
452
netmem_ref mp_dmabuf_devmem_alloc_netmems(struct page_pool *pool, gfp_t gfp)
net/core/devmem.c
454
struct net_devmem_dmabuf_binding *binding = pool->mp_priv;
net/core/devmem.c
464
page_pool_set_pp_info(pool, netmem);
net/core/devmem.c
466
pool->pages_state_hold_cnt++;
net/core/devmem.c
467
trace_page_pool_state_hold(pool, netmem, pool->pages_state_hold_cnt);
net/core/devmem.c
471
void mp_dmabuf_devmem_destroy(struct page_pool *pool)
net/core/devmem.c
473
struct net_devmem_dmabuf_binding *binding = pool->mp_priv;
net/core/devmem.c
478
bool mp_dmabuf_devmem_release_page(struct page_pool *pool, netmem_ref netmem)
net/core/mp_dmabuf_devmem.h
14
int mp_dmabuf_devmem_init(struct page_pool *pool);
net/core/mp_dmabuf_devmem.h
16
netmem_ref mp_dmabuf_devmem_alloc_netmems(struct page_pool *pool, gfp_t gfp);
net/core/mp_dmabuf_devmem.h
18
void mp_dmabuf_devmem_destroy(struct page_pool *pool);
net/core/mp_dmabuf_devmem.h
20
bool mp_dmabuf_devmem_release_page(struct page_pool *pool, netmem_ref netmem);
net/core/mp_dmabuf_devmem.h
22
static inline int mp_dmabuf_devmem_init(struct page_pool *pool)
net/core/mp_dmabuf_devmem.h
28
mp_dmabuf_devmem_alloc_netmems(struct page_pool *pool, gfp_t gfp)
net/core/mp_dmabuf_devmem.h
33
static inline void mp_dmabuf_devmem_destroy(struct page_pool *pool)
net/core/mp_dmabuf_devmem.h
38
mp_dmabuf_devmem_release_page(struct page_pool *pool, netmem_ref netmem)
net/core/netdev-genl.c
418
if (rxq->pool)
net/core/netdev-genl.c
429
if (txq->pool)
net/core/netdev_rx_queue.c
152
if (rxq->pool) {
net/core/netmem_priv.h
28
static inline void netmem_set_pp(netmem_ref netmem, struct page_pool *pool)
net/core/netmem_priv.h
30
netmem_to_nmdesc(netmem)->pp = pool;
net/core/page_pool.c
1007
netmem = __page_pool_put_page(pool, netmem, -1,
net/core/page_pool.c
1015
page_pool_recycle_ring_bulk(pool, bulk, bulk_len);
net/core/page_pool.c
1022
static netmem_ref page_pool_drain_frag(struct page_pool *pool,
net/core/page_pool.c
1025
long drain_count = BIAS_MAX - pool->frag_users;
net/core/page_pool.c
103
per_cpu_ptr(pool->recycle_stats, cpu);
net/core/page_pool.c
1032
page_pool_dma_sync_for_device(pool, netmem, -1);
net/core/page_pool.c
1036
page_pool_return_netmem(pool, netmem);
net/core/page_pool.c
1040
static void page_pool_free_frag(struct page_pool *pool)
net/core/page_pool.c
1042
long drain_count = BIAS_MAX - pool->frag_users;
net/core/page_pool.c
1043
netmem_ref netmem = pool->frag_page;
net/core/page_pool.c
1045
pool->frag_page = 0;
net/core/page_pool.c
1050
page_pool_return_netmem(pool, netmem);
net/core/page_pool.c
1053
netmem_ref page_pool_alloc_frag_netmem(struct page_pool *pool,
net/core/page_pool.c
1057
unsigned int max_size = PAGE_SIZE << pool->p.order;
net/core/page_pool.c
1058
netmem_ref netmem = pool->frag_page;
net/core/page_pool.c
1064
*offset = pool->frag_offset;
net/core/page_pool.c
1067
netmem = page_pool_drain_frag(pool, netmem);
net/core/page_pool.c
1069
recycle_stat_inc(pool, cached);
net/core/page_pool.c
1070
alloc_stat_inc(pool, fast);
net/core/page_pool.c
1076
netmem = page_pool_alloc_netmems(pool, gfp);
net/core/page_pool.c
1078
pool->frag_page = 0;
net/core/page_pool.c
1082
pool->frag_page = netmem;
net/core/page_pool.c
1085
pool->frag_users = 1;
net/core/page_pool.c
1087
pool->frag_offset = size;
net/core/page_pool.c
1092
pool->frag_users++;
net/core/page_pool.c
1093
pool->frag_offset = *offset + size;
net/core/page_pool.c
1098
struct page *page_pool_alloc_frag(struct page_pool *pool, unsigned int *offset,
net/core/page_pool.c
1101
return netmem_to_page(page_pool_alloc_frag_netmem(pool, offset, size,
net/core/page_pool.c
1106
static void page_pool_empty_ring(struct page_pool *pool)
net/core/page_pool.c
1111
while ((netmem = (__force netmem_ref)ptr_ring_consume_bh(&pool->ring))) {
net/core/page_pool.c
1117
page_pool_return_netmem(pool, netmem);
net/core/page_pool.c
1121
static void __page_pool_destroy(struct page_pool *pool)
net/core/page_pool.c
1123
if (pool->disconnect)
net/core/page_pool.c
1124
pool->disconnect(pool);
net/core/page_pool.c
1126
page_pool_unlist(pool);
net/core/page_pool.c
1127
page_pool_uninit(pool);
net/core/page_pool.c
1129
if (pool->mp_ops) {
net/core/page_pool.c
1130
pool->mp_ops->destroy(pool);
net/core/page_pool.c
1134
kfree(pool);
net/core/page_pool.c
1137
static void page_pool_empty_alloc_cache_once(struct page_pool *pool)
net/core/page_pool.c
1141
if (pool->destroy_cnt)
net/core/page_pool.c
1148
while (pool->alloc.count) {
net/core/page_pool.c
1149
netmem = pool->alloc.cache[--pool->alloc.count];
net/core/page_pool.c
1150
page_pool_return_netmem(pool, netmem);
net/core/page_pool.c
1154
static void page_pool_scrub(struct page_pool *pool)
net/core/page_pool.c
1159
page_pool_empty_alloc_cache_once(pool);
net/core/page_pool.c
1160
if (!pool->destroy_cnt++ && pool->dma_map) {
net/core/page_pool.c
1161
if (pool->dma_sync) {
net/core/page_pool.c
1163
pool->dma_sync = false;
net/core/page_pool.c
1171
if (dma_dev_need_sync(pool->p.dev) &&
net/core/page_pool.c
1172
!xa_empty(&pool->dma_mapped))
net/core/page_pool.c
1176
xa_for_each(&pool->dma_mapped, id, ptr)
net/core/page_pool.c
1177
__page_pool_release_netmem_dma(pool, page_to_netmem((struct page *)ptr));
net/core/page_pool.c
1183
page_pool_empty_ring(pool);
net/core/page_pool.c
1186
static int page_pool_release(struct page_pool *pool)
net/core/page_pool.c
1191
page_pool_scrub(pool);
net/core/page_pool.c
1192
inflight = page_pool_inflight(pool, true);
net/core/page_pool.c
1194
in_softirq = page_pool_producer_lock(pool);
net/core/page_pool.c
1195
page_pool_producer_unlock(pool, in_softirq);
net/core/page_pool.c
1197
__page_pool_destroy(pool);
net/core/page_pool.c
1205
struct page_pool *pool = container_of(dwq, typeof(*pool), release_dw);
net/core/page_pool.c
1209
inflight = page_pool_release(pool);
net/core/page_pool.c
1220
netdev = READ_ONCE(pool->slow.netdev);
net/core/page_pool.c
1221
if (time_after_eq(jiffies, pool->defer_warn) &&
net/core/page_pool.c
1223
int sec = (s32)((u32)jiffies - (u32)pool->defer_start) / HZ;
net/core/page_pool.c
1226
__func__, pool->user.id, inflight, sec);
net/core/page_pool.c
1227
pool->defer_warn = jiffies + DEFER_WARN_INTERVAL;
net/core/page_pool.c
1231
schedule_delayed_work(&pool->release_dw, DEFER_TIME);
net/core/page_pool.c
1234
void page_pool_use_xdp_mem(struct page_pool *pool, void (*disconnect)(void *),
net/core/page_pool.c
1237
refcount_inc(&pool->user_cnt);
net/core/page_pool.c
1238
pool->disconnect = disconnect;
net/core/page_pool.c
1239
pool->xdp_mem_id = mem->id;
net/core/page_pool.c
1258
void page_pool_enable_direct_recycling(struct page_pool *pool,
net/core/page_pool.c
1261
if (READ_ONCE(pool->p.napi) == napi)
net/core/page_pool.c
1263
WARN_ON(!napi || pool->p.napi);
net/core/page_pool.c
1266
WRITE_ONCE(pool->p.napi, napi);
net/core/page_pool.c
1271
void page_pool_disable_direct_recycling(struct page_pool *pool)
net/core/page_pool.c
1276
WRITE_ONCE(pool->cpuid, -1);
net/core/page_pool.c
1278
if (!pool->p.napi)
net/core/page_pool.c
1281
napi_assert_will_not_race(pool->p.napi);
net/core/page_pool.c
1284
WRITE_ONCE(pool->p.napi, NULL);
net/core/page_pool.c
1289
void page_pool_destroy(struct page_pool *pool)
net/core/page_pool.c
1291
if (!pool)
net/core/page_pool.c
1294
if (!page_pool_put(pool))
net/core/page_pool.c
1297
page_pool_disable_direct_recycling(pool);
net/core/page_pool.c
1298
page_pool_free_frag(pool);
net/core/page_pool.c
1300
if (!page_pool_release(pool))
net/core/page_pool.c
1303
page_pool_detached(pool);
net/core/page_pool.c
1304
pool->defer_start = jiffies;
net/core/page_pool.c
1305
pool->defer_warn = jiffies + DEFER_WARN_INTERVAL;
net/core/page_pool.c
1307
INIT_DELAYED_WORK(&pool->release_dw, page_pool_release_retry);
net/core/page_pool.c
1308
schedule_delayed_work(&pool->release_dw, DEFER_TIME);
net/core/page_pool.c
1313
void page_pool_update_nid(struct page_pool *pool, int new_nid)
net/core/page_pool.c
1317
trace_page_pool_update_nid(pool, new_nid);
net/core/page_pool.c
1318
pool->p.nid = new_nid;
net/core/page_pool.c
1321
while (pool->alloc.count) {
net/core/page_pool.c
1322
netmem = pool->alloc.cache[--pool->alloc.count];
net/core/page_pool.c
1323
page_pool_return_netmem(pool, netmem);
net/core/page_pool.c
1336
void net_mp_niov_set_page_pool(struct page_pool *pool, struct net_iov *niov)
net/core/page_pool.c
1340
page_pool_set_pp_info(pool, netmem);
net/core/page_pool.c
1342
pool->pages_state_hold_cnt++;
net/core/page_pool.c
1343
trace_page_pool_state_hold(pool, netmem, pool->pages_state_hold_cnt);
net/core/page_pool.c
161
static bool page_pool_producer_lock(struct page_pool *pool)
net/core/page_pool.c
162
__acquires(&pool->ring.producer_lock)
net/core/page_pool.c
167
spin_lock(&pool->ring.producer_lock);
net/core/page_pool.c
169
spin_lock_bh(&pool->ring.producer_lock);
net/core/page_pool.c
174
static void page_pool_producer_unlock(struct page_pool *pool,
net/core/page_pool.c
176
__releases(&pool->ring.producer_lock)
net/core/page_pool.c
179
spin_unlock(&pool->ring.producer_lock);
net/core/page_pool.c
181
spin_unlock_bh(&pool->ring.producer_lock);
net/core/page_pool.c
193
static int page_pool_init(struct page_pool *pool,
net/core/page_pool.c
203
memcpy(&pool->p, ¶ms->fast, sizeof(pool->p));
net/core/page_pool.c
204
memcpy(&pool->slow, ¶ms->slow, sizeof(pool->slow));
net/core/page_pool.c
206
pool->cpuid = cpuid;
net/core/page_pool.c
207
pool->dma_sync_for_cpu = true;
net/core/page_pool.c
210
if (pool->slow.flags & ~PP_FLAG_ALL)
net/core/page_pool.c
213
if (pool->p.pool_size)
net/core/page_pool.c
214
ring_qsize = min(pool->p.pool_size, 16384);
net/core/page_pool.c
220
if (pool->slow.flags & PP_FLAG_DMA_MAP) {
net/core/page_pool.c
221
if ((pool->p.dma_dir != DMA_FROM_DEVICE) &&
net/core/page_pool.c
222
(pool->p.dma_dir != DMA_BIDIRECTIONAL))
net/core/page_pool.c
225
pool->dma_map = true;
net/core/page_pool.c
228
if (pool->slow.flags & PP_FLAG_DMA_SYNC_DEV) {
net/core/page_pool.c
232
if (!(pool->slow.flags & PP_FLAG_DMA_MAP))
net/core/page_pool.c
235
if (!pool->p.max_len)
net/core/page_pool.c
238
pool->dma_sync = true;
net/core/page_pool.c
245
pool->has_init_callback = !!pool->slow.init_callback;
net/core/page_pool.c
248
if (!(pool->slow.flags & PP_FLAG_SYSTEM_POOL)) {
net/core/page_pool.c
249
pool->recycle_stats = alloc_percpu(struct page_pool_recycle_stats);
net/core/page_pool.c
250
if (!pool->recycle_stats)
net/core/page_pool.c
257
pool->recycle_stats = &pp_system_recycle_stats;
net/core/page_pool.c
258
pool->system = true;
net/core/page_pool.c
262
if (ptr_ring_init(&pool->ring, ring_qsize, GFP_KERNEL) < 0) {
net/core/page_pool.c
264
if (!pool->system)
net/core/page_pool.c
265
free_percpu(pool->recycle_stats);
net/core/page_pool.c
270
atomic_set(&pool->pages_state_release_cnt, 0);
net/core/page_pool.c
273
refcount_set(&pool->user_cnt, 1);
net/core/page_pool.c
275
xa_init_flags(&pool->dma_mapped, XA_FLAGS_ALLOC1);
net/core/page_pool.c
277
if (pool->slow.flags & PP_FLAG_ALLOW_UNREADABLE_NETMEM) {
net/core/page_pool.c
278
netdev_assert_locked(pool->slow.netdev);
net/core/page_pool.c
279
rxq = __netif_get_rx_queue(pool->slow.netdev,
net/core/page_pool.c
280
pool->slow.queue_idx);
net/core/page_pool.c
281
pool->mp_priv = rxq->mp_params.mp_priv;
net/core/page_pool.c
282
pool->mp_ops = rxq->mp_params.mp_ops;
net/core/page_pool.c
285
if (pool->mp_ops) {
net/core/page_pool.c
286
if (!pool->dma_map || !pool->dma_sync) {
net/core/page_pool.c
291
if (WARN_ON(!is_kernel_rodata((unsigned long)pool->mp_ops))) {
net/core/page_pool.c
296
err = pool->mp_ops->init(pool);
net/core/page_pool.c
304
} else if (pool->p.order > MAX_PAGE_ORDER) {
net/core/page_pool.c
312
ptr_ring_cleanup(&pool->ring, NULL);
net/core/page_pool.c
313
xa_destroy(&pool->dma_mapped);
net/core/page_pool.c
315
if (!pool->system)
net/core/page_pool.c
316
free_percpu(pool->recycle_stats);
net/core/page_pool.c
321
static void page_pool_uninit(struct page_pool *pool)
net/core/page_pool.c
323
ptr_ring_cleanup(&pool->ring, NULL);
net/core/page_pool.c
324
xa_destroy(&pool->dma_mapped);
net/core/page_pool.c
327
if (!pool->system)
net/core/page_pool.c
328
free_percpu(pool->recycle_stats);
net/core/page_pool.c
340
struct page_pool *pool;
net/core/page_pool.c
343
pool = kzalloc_node(sizeof(*pool), GFP_KERNEL, params->nid);
net/core/page_pool.c
344
if (!pool)
net/core/page_pool.c
347
err = page_pool_init(pool, params, cpuid);
net/core/page_pool.c
351
err = page_pool_list(pool);
net/core/page_pool.c
355
return pool;
net/core/page_pool.c
358
page_pool_uninit(pool);
net/core/page_pool.c
361
kfree(pool);
net/core/page_pool.c
376
static void page_pool_return_netmem(struct page_pool *pool, netmem_ref netmem);
net/core/page_pool.c
378
static noinline netmem_ref page_pool_refill_alloc_cache(struct page_pool *pool)
net/core/page_pool.c
380
struct ptr_ring *r = &pool->ring;
net/core/page_pool.c
386
alloc_stat_inc(pool, empty);
net/core/page_pool.c
394
pref_nid = (pool->p.nid == NUMA_NO_NODE) ? numa_mem_id() : pool->p.nid;
net/core/page_pool.c
407
pool->alloc.cache[pool->alloc.count++] = netmem;
net/core/page_pool.c
414
page_pool_return_netmem(pool, netmem);
net/core/page_pool.c
415
alloc_stat_inc(pool, waive);
net/core/page_pool.c
419
} while (pool->alloc.count < PP_ALLOC_CACHE_REFILL);
net/core/page_pool.c
422
if (likely(pool->alloc.count > 0)) {
net/core/page_pool.c
423
netmem = pool->alloc.cache[--pool->alloc.count];
net/core/page_pool.c
424
alloc_stat_inc(pool, refill);
net/core/page_pool.c
431
static netmem_ref __page_pool_get_cached(struct page_pool *pool)
net/core/page_pool.c
436
if (likely(pool->alloc.count)) {
net/core/page_pool.c
438
netmem = pool->alloc.cache[--pool->alloc.count];
net/core/page_pool.c
439
alloc_stat_inc(pool, fast);
net/core/page_pool.c
441
netmem = page_pool_refill_alloc_cache(pool);
net/core/page_pool.c
447
static void __page_pool_dma_sync_for_device(const struct page_pool *pool,
net/core/page_pool.c
454
dma_sync_size = min(dma_sync_size, pool->p.max_len);
net/core/page_pool.c
455
__dma_sync_single_for_device(pool->p.dev, dma_addr + pool->p.offset,
net/core/page_pool.c
456
dma_sync_size, pool->p.dma_dir);
net/core/page_pool.c
46
#define alloc_stat_inc(pool, __stat) (pool->alloc_stats.__stat++)
net/core/page_pool.c
461
page_pool_dma_sync_for_device(const struct page_pool *pool,
net/core/page_pool.c
465
if (pool->dma_sync && dma_dev_need_sync(pool->p.dev)) {
net/core/page_pool.c
468
if (pool->dma_sync)
net/core/page_pool.c
469
__page_pool_dma_sync_for_device(pool, netmem,
net/core/page_pool.c
475
static int page_pool_register_dma_index(struct page_pool *pool,
net/core/page_pool.c
48
#define recycle_stat_inc(pool, __stat) \
net/core/page_pool.c
485
err = xa_alloc(&pool->dma_mapped, &id, netmem_to_page(netmem),
net/core/page_pool.c
488
err = xa_alloc_bh(&pool->dma_mapped, &id, netmem_to_page(netmem),
net/core/page_pool.c
50
struct page_pool_recycle_stats __percpu *s = pool->recycle_stats; \
net/core/page_pool.c
500
static int page_pool_release_dma_index(struct page_pool *pool,
net/core/page_pool.c
514
old = xa_cmpxchg(&pool->dma_mapped, id, page, NULL, 0);
net/core/page_pool.c
516
old = xa_cmpxchg_bh(&pool->dma_mapped, id, page, NULL, 0);
net/core/page_pool.c
525
static bool page_pool_dma_map(struct page_pool *pool, netmem_ref netmem, gfp_t gfp)
net/core/page_pool.c
535
dma = dma_map_page_attrs(pool->p.dev, netmem_to_page(netmem), 0,
net/core/page_pool.c
536
(PAGE_SIZE << pool->p.order), pool->p.dma_dir,
net/core/page_pool.c
539
if (dma_mapping_error(pool->p.dev, dma))
net/core/page_pool.c
54
#define recycle_stat_add(pool, __stat, val) \
net/core/page_pool.c
547
err = page_pool_register_dma_index(pool, netmem, gfp);
net/core/page_pool.c
551
page_pool_dma_sync_for_device(pool, netmem, pool->p.max_len);
net/core/page_pool.c
558
dma_unmap_page_attrs(pool->p.dev, dma,
net/core/page_pool.c
559
PAGE_SIZE << pool->p.order, pool->p.dma_dir,
net/core/page_pool.c
56
struct page_pool_recycle_stats __percpu *s = pool->recycle_stats; \
net/core/page_pool.c
564
static struct page *__page_pool_alloc_page_order(struct page_pool *pool,
net/core/page_pool.c
570
page = alloc_pages_node(pool->p.nid, gfp, pool->p.order);
net/core/page_pool.c
574
if (pool->dma_map && unlikely(!page_pool_dma_map(pool, page_to_netmem(page), gfp))) {
net/core/page_pool.c
579
alloc_stat_inc(pool, slow_high_order);
net/core/page_pool.c
580
page_pool_set_pp_info(pool, page_to_netmem(page));
net/core/page_pool.c
583
pool->pages_state_hold_cnt++;
net/core/page_pool.c
584
trace_page_pool_state_hold(pool, page_to_netmem(page),
net/core/page_pool.c
585
pool->pages_state_hold_cnt);
net/core/page_pool.c
590
static noinline netmem_ref __page_pool_alloc_netmems_slow(struct page_pool *pool,
net/core/page_pool.c
594
unsigned int pp_order = pool->p.order;
net/core/page_pool.c
595
bool dma_map = pool->dma_map;
net/core/page_pool.c
607
return page_to_netmem(__page_pool_alloc_page_order(pool, gfp));
net/core/page_pool.c
610
if (unlikely(pool->alloc.count > 0))
net/core/page_pool.c
611
return pool->alloc.cache[--pool->alloc.count];
net/core/page_pool.c
614
memset(&pool->alloc.cache, 0, sizeof(void *) * bulk);
net/core/page_pool.c
616
nr_pages = alloc_pages_bulk_node(gfp, pool->p.nid, bulk,
net/core/page_pool.c
617
(struct page **)pool->alloc.cache);
net/core/page_pool.c
625
netmem = pool->alloc.cache[i];
net/core/page_pool.c
626
if (dma_map && unlikely(!page_pool_dma_map(pool, netmem, gfp))) {
net/core/page_pool.c
631
page_pool_set_pp_info(pool, netmem);
net/core/page_pool.c
632
pool->alloc.cache[pool->alloc.count++] = netmem;
net/core/page_pool.c
634
pool->pages_state_hold_cnt++;
net/core/page_pool.c
635
trace_page_pool_state_hold(pool, netmem,
net/core/page_pool.c
636
pool->pages_state_hold_cnt);
net/core/page_pool.c
640
if (likely(pool->alloc.count > 0)) {
net/core/page_pool.c
641
netmem = pool->alloc.cache[--pool->alloc.count];
net/core/page_pool.c
642
alloc_stat_inc(pool, slow);
net/core/page_pool.c
654
netmem_ref page_pool_alloc_netmems(struct page_pool *pool, gfp_t gfp)
net/core/page_pool.c
659
netmem = __page_pool_get_cached(pool);
net/core/page_pool.c
664
if (static_branch_unlikely(&page_pool_mem_providers) && pool->mp_ops)
net/core/page_pool.c
665
netmem = pool->mp_ops->alloc_netmems(pool, gfp);
net/core/page_pool.c
667
netmem = __page_pool_alloc_netmems_slow(pool, gfp);
net/core/page_pool.c
673
struct page *page_pool_alloc_pages(struct page_pool *pool, gfp_t gfp)
net/core/page_pool.c
675
return netmem_to_page(page_pool_alloc_netmems(pool, gfp));
net/core/page_pool.c
684
s32 page_pool_inflight(const struct page_pool *pool, bool strict)
net/core/page_pool.c
686
u32 release_cnt = atomic_read(&pool->pages_state_release_cnt);
net/core/page_pool.c
687
u32 hold_cnt = READ_ONCE(pool->pages_state_hold_cnt);
net/core/page_pool.c
693
trace_page_pool_release(pool, inflight, hold_cnt, release_cnt);
net/core/page_pool.c
703
void page_pool_set_pp_info(struct page_pool *pool, netmem_ref netmem)
net/core/page_pool.c
705
netmem_set_pp(netmem, pool);
net/core/page_pool.c
715
if (pool->has_init_callback)
net/core/page_pool.c
716
pool->slow.init_callback(netmem, pool->slow.init_arg);
net/core/page_pool.c
725
static __always_inline void __page_pool_release_netmem_dma(struct page_pool *pool,
net/core/page_pool.c
730
if (!pool->dma_map)
net/core/page_pool.c
736
if (page_pool_release_dma_index(pool, netmem))
net/core/page_pool.c
742
dma_unmap_page_attrs(pool->p.dev, dma,
net/core/page_pool.c
743
PAGE_SIZE << pool->p.order, pool->p.dma_dir,
net/core/page_pool.c
753
static void page_pool_return_netmem(struct page_pool *pool, netmem_ref netmem)
net/core/page_pool.c
759
if (static_branch_unlikely(&page_pool_mem_providers) && pool->mp_ops)
net/core/page_pool.c
760
put = pool->mp_ops->release_netmem(pool, netmem);
net/core/page_pool.c
762
__page_pool_release_netmem_dma(pool, netmem);
net/core/page_pool.c
767
count = atomic_inc_return_relaxed(&pool->pages_state_release_cnt);
net/core/page_pool.c
768
trace_page_pool_state_release(pool, netmem, count);
net/core/page_pool.c
780
static bool page_pool_recycle_in_ring(struct page_pool *pool, netmem_ref netmem)
net/core/page_pool.c
785
in_softirq = page_pool_producer_lock(pool);
net/core/page_pool.c
786
ret = !__ptr_ring_produce(&pool->ring, (__force void *)netmem);
net/core/page_pool.c
788
recycle_stat_inc(pool, ring);
net/core/page_pool.c
789
page_pool_producer_unlock(pool, in_softirq);
net/core/page_pool.c
800
struct page_pool *pool)
net/core/page_pool.c
802
if (unlikely(pool->alloc.count == PP_ALLOC_CACHE_SIZE)) {
net/core/page_pool.c
803
recycle_stat_inc(pool, cache_full);
net/core/page_pool.c
808
pool->alloc.cache[pool->alloc.count++] = netmem;
net/core/page_pool.c
809
recycle_stat_inc(pool, cached);
net/core/page_pool.c
827
__page_pool_put_page(struct page_pool *pool, netmem_ref netmem,
net/core/page_pool.c
844
page_pool_dma_sync_for_device(pool, netmem, dma_sync_size);
net/core/page_pool.c
846
if (allow_direct && page_pool_recycle_in_cache(netmem, pool))
net/core/page_pool.c
85
bool page_pool_get_stats(const struct page_pool *pool,
net/core/page_pool.c
866
recycle_stat_inc(pool, released_refcnt);
net/core/page_pool.c
867
page_pool_return_netmem(pool, netmem);
net/core/page_pool.c
872
static bool page_pool_napi_local(const struct page_pool *pool)
net/core/page_pool.c
891
if (READ_ONCE(pool->cpuid) == cpuid)
net/core/page_pool.c
894
napi = READ_ONCE(pool->p.napi);
net/core/page_pool.c
899
void page_pool_put_unrefed_netmem(struct page_pool *pool, netmem_ref netmem,
net/core/page_pool.c
903
allow_direct = page_pool_napi_local(pool);
net/core/page_pool.c
905
netmem = __page_pool_put_page(pool, netmem, dma_sync_size,
net/core/page_pool.c
907
if (netmem && !page_pool_recycle_in_ring(pool, netmem)) {
net/core/page_pool.c
909
recycle_stat_inc(pool, ring_full);
net/core/page_pool.c
910
page_pool_return_netmem(pool, netmem);
net/core/page_pool.c
915
void page_pool_put_unrefed_page(struct page_pool *pool, struct page *page,
net/core/page_pool.c
918
page_pool_put_unrefed_netmem(pool, page_to_netmem(page), dma_sync_size,
net/core/page_pool.c
923
static void page_pool_recycle_ring_bulk(struct page_pool *pool,
net/core/page_pool.c
931
in_softirq = page_pool_producer_lock(pool);
net/core/page_pool.c
934
if (__ptr_ring_produce(&pool->ring, (__force void *)bulk[i])) {
net/core/page_pool.c
936
recycle_stat_inc(pool, ring_full);
net/core/page_pool.c
94
stats->alloc_stats.fast += pool->alloc_stats.fast;
net/core/page_pool.c
941
page_pool_producer_unlock(pool, in_softirq);
net/core/page_pool.c
942
recycle_stat_add(pool, ring, i);
net/core/page_pool.c
95
stats->alloc_stats.slow += pool->alloc_stats.slow;
net/core/page_pool.c
953
page_pool_return_netmem(pool, bulk[i]);
net/core/page_pool.c
96
stats->alloc_stats.slow_high_order += pool->alloc_stats.slow_high_order;
net/core/page_pool.c
97
stats->alloc_stats.empty += pool->alloc_stats.empty;
net/core/page_pool.c
98
stats->alloc_stats.refill += pool->alloc_stats.refill;
net/core/page_pool.c
984
struct page_pool *pool = NULL;
net/core/page_pool.c
99
stats->alloc_stats.waive += pool->alloc_stats.waive;
net/core/page_pool.c
995
if (unlikely(!pool)) {
net/core/page_pool.c
996
pool = netmem_pp;
net/core/page_pool.c
997
allow_direct = page_pool_napi_local(pool);
net/core/page_pool.c
998
} else if (netmem_pp != pool) {
net/core/page_pool_priv.h
12
s32 page_pool_inflight(const struct page_pool *pool, bool strict);
net/core/page_pool_priv.h
14
int page_pool_list(struct page_pool *pool);
net/core/page_pool_priv.h
15
void page_pool_detached(struct page_pool *pool);
net/core/page_pool_priv.h
16
void page_pool_unlist(struct page_pool *pool);
net/core/page_pool_priv.h
41
void page_pool_set_pp_info(struct page_pool *pool, netmem_ref netmem);
net/core/page_pool_priv.h
46
static inline void page_pool_set_pp_info(struct page_pool *pool,
net/core/page_pool_user.c
114
page_pool_nl_stats_fill(struct sk_buff *rsp, const struct page_pool *pool,
net/core/page_pool_user.c
122
if (!page_pool_get_stats(pool, &stats))
net/core/page_pool_user.c
131
if (nla_put_uint(rsp, NETDEV_A_PAGE_POOL_ID, pool->user.id) ||
net/core/page_pool_user.c
132
(pool->slow.netdev->ifindex != LOOPBACK_IFINDEX &&
net/core/page_pool_user.c
134
pool->slow.netdev->ifindex)))
net/core/page_pool_user.c
216
page_pool_nl_fill(struct sk_buff *rsp, const struct page_pool *pool,
net/core/page_pool_user.c
227
if (nla_put_uint(rsp, NETDEV_A_PAGE_POOL_ID, pool->user.id))
net/core/page_pool_user.c
230
if (pool->slow.netdev->ifindex != LOOPBACK_IFINDEX &&
net/core/page_pool_user.c
232
pool->slow.netdev->ifindex))
net/core/page_pool_user.c
235
napi_id = pool->p.napi ? READ_ONCE(pool->p.napi->napi_id) : 0;
net/core/page_pool_user.c
240
inflight = page_pool_inflight(pool, false);
net/core/page_pool_user.c
241
refsz = PAGE_SIZE << pool->p.order;
net/core/page_pool_user.c
246
if (pool->user.detach_time &&
net/core/page_pool_user.c
248
ktime_divns(pool->user.detach_time, NSEC_PER_SEC)))
net/core/page_pool_user.c
251
if (pool->mp_ops && pool->mp_ops->nl_fill(pool->mp_priv, rsp, NULL))
net/core/page_pool_user.c
262
static void netdev_nl_page_pool_event(const struct page_pool *pool, u32 cmd)
net/core/page_pool_user.c
271
if (hlist_unhashed(&pool->user.list))
net/core/page_pool_user.c
273
net = dev_net(pool->slow.netdev);
net/core/page_pool_user.c
284
if (page_pool_nl_fill(ntf, pool, &info)) {
net/core/page_pool_user.c
311
int page_pool_list(struct page_pool *pool)
net/core/page_pool_user.c
317
err = xa_alloc_cyclic(&page_pools, &pool->user.id, pool, xa_limit_32b,
net/core/page_pool_user.c
322
INIT_HLIST_NODE(&pool->user.list);
net/core/page_pool_user.c
323
if (pool->slow.netdev) {
net/core/page_pool_user.c
324
hlist_add_head(&pool->user.list,
net/core/page_pool_user.c
325
&pool->slow.netdev->page_pools);
net/core/page_pool_user.c
326
netdev_nl_page_pool_event(pool, NETDEV_CMD_PAGE_POOL_ADD_NTF);
net/core/page_pool_user.c
337
void page_pool_detached(struct page_pool *pool)
net/core/page_pool_user.c
340
pool->user.detach_time = ktime_get_boottime();
net/core/page_pool_user.c
341
netdev_nl_page_pool_event(pool, NETDEV_CMD_PAGE_POOL_CHANGE_NTF);
net/core/page_pool_user.c
345
void page_pool_unlist(struct page_pool *pool)
net/core/page_pool_user.c
348
netdev_nl_page_pool_event(pool, NETDEV_CMD_PAGE_POOL_DEL_NTF);
net/core/page_pool_user.c
349
xa_erase(&page_pools, pool->user.id);
net/core/page_pool_user.c
350
if (!hlist_unhashed(&pool->user.list))
net/core/page_pool_user.c
351
hlist_del(&pool->user.list);
net/core/page_pool_user.c
359
struct page_pool *pool;
net/core/page_pool_user.c
36
typedef int (*pp_nl_fill_cb)(struct sk_buff *rsp, const struct page_pool *pool,
net/core/page_pool_user.c
366
hlist_for_each_entry_safe(pool, n, &dev->page_pools, user.list) {
net/core/page_pool_user.c
367
if (pool->mp_priv != binding)
net/core/page_pool_user.c
370
if (pool->slow.queue_idx == get_netdev_rx_queue_index(rxq)) {
net/core/page_pool_user.c
381
struct page_pool *pool;
net/core/page_pool_user.c
385
hlist_for_each_entry_safe(pool, n, &netdev->page_pools, user.list) {
net/core/page_pool_user.c
386
hlist_del_init(&pool->user.list);
net/core/page_pool_user.c
387
pool->slow.netdev = NET_PTR_POISON;
net/core/page_pool_user.c
394
struct page_pool *pool, *last;
net/core/page_pool_user.c
401
hlist_for_each_entry(pool, &netdev->page_pools, user.list) {
net/core/page_pool_user.c
402
pool->slow.netdev = lo;
net/core/page_pool_user.c
403
netdev_nl_page_pool_event(pool,
net/core/page_pool_user.c
405
last = pool;
net/core/page_pool_user.c
42
struct page_pool *pool;
net/core/page_pool_user.c
47
pool = xa_load(&page_pools, id);
net/core/page_pool_user.c
48
if (!pool || hlist_unhashed(&pool->user.list) ||
net/core/page_pool_user.c
49
!net_eq(dev_net(pool->slow.netdev), genl_info_net(info))) {
net/core/page_pool_user.c
60
err = fill(rsp, pool, info);
net/core/page_pool_user.c
88
struct page_pool *pool;
net/core/page_pool_user.c
94
hlist_for_each_entry(pool, &netdev->page_pools, user.list) {
net/core/page_pool_user.c
95
if (state->pp_id && state->pp_id < pool->user.id)
net/core/page_pool_user.c
98
state->pp_id = pool->user.id;
net/core/page_pool_user.c
99
err = fill(skb, pool, info);
net/core/skbuff.c
1021
int skb_cow_data_for_xdp(struct page_pool *pool, struct sk_buff **pskb,
net/core/skbuff.c
1027
return skb_pp_cow_data(pool, pskb, XDP_PACKET_HEADROOM);
net/core/skbuff.c
939
int skb_pp_cow_data(struct page_pool *pool, struct sk_buff **pskb,
net/core/skbuff.c
960
data = page_pool_dev_alloc_va(pool, &truesize);
net/core/skbuff.c
966
page_pool_free_va(pool, data, true);
net/core/skbuff.c
993
page = page_pool_dev_alloc(pool, &page_off, &truesize);
net/core/xdp.c
381
int xdp_reg_page_pool(struct page_pool *pool)
net/core/xdp.c
385
return xdp_reg_mem_model(&mem, MEM_TYPE_PAGE_POOL, pool);
net/core/xdp.c
396
void xdp_unreg_page_pool(const struct page_pool *pool)
net/core/xdp.c
400
.id = pool->xdp_mem_id,
net/core/xdp.c
416
const struct page_pool *pool)
net/core/xdp.c
420
.id = pool->xdp_mem_id,
net/core/xdp.c
753
pp = this_cpu_read(system_page_pool.pool);
net/rds/ib_frmr.c
102
atomic_dec(&pool->item_count);
net/rds/ib_frmr.c
108
struct rds_ib_mr_pool *pool = ibmr->pool;
net/rds/ib_frmr.c
111
llist_add(&ibmr->llnode, &pool->drop_list);
net/rds/ib_frmr.c
113
llist_add(&ibmr->llnode, &pool->free_list);
net/rds/ib_frmr.c
114
atomic_add(ibmr->sg_len, &pool->free_pinned);
net/rds/ib_frmr.c
115
atomic_inc(&pool->dirty_count);
net/rds/ib_frmr.c
118
if (atomic_read(&pool->free_pinned) >= pool->max_free_pinned ||
net/rds/ib_frmr.c
119
atomic_read(&pool->dirty_count) >= pool->max_items / 5)
net/rds/ib_frmr.c
120
queue_delayed_work(rds_ib_mr_wq, &pool->flush_worker, 10);
net/rds/ib_frmr.c
192
struct rds_ib_mr_pool *pool,
net/rds/ib_frmr.c
247
if (frmr->dma_npages > ibmr->pool->max_pages) {
net/rds/ib_frmr.c
256
if (ibmr->pool->pool_type == RDS_IB_MR_8K_POOL)
net/rds/ib_frmr.c
391
if (ibmr->pool->pool_type == RDS_IB_MR_8K_POOL)
net/rds/ib_frmr.c
430
ret = rds_ib_map_frmr(rds_ibdev, ibmr->pool, ibmr, sg, nents);
net/rds/ib_frmr.c
443
struct rds_ib_mr_pool *pool = ibmr->pool;
net/rds/ib_frmr.c
447
llist_add(&ibmr->llnode, &pool->drop_list);
net/rds/ib_frmr.c
449
llist_add(&ibmr->llnode, &pool->free_list);
net/rds/ib_frmr.c
56
struct rds_ib_mr_pool *pool;
net/rds/ib_frmr.c
62
pool = rds_ibdev->mr_8k_pool;
net/rds/ib_frmr.c
64
pool = rds_ibdev->mr_1m_pool;
net/rds/ib_frmr.c
66
ibmr = rds_ib_try_reuse_ibmr(pool);
net/rds/ib_frmr.c
79
pool->max_pages);
net/rds/ib_frmr.c
86
ibmr->pool = pool;
net/rds/ib_frmr.c
87
if (pool->pool_type == RDS_IB_MR_8K_POOL)
net/rds/ib_frmr.c
92
if (atomic_read(&pool->item_count) > pool->max_items_soft)
net/rds/ib_frmr.c
93
pool->max_items_soft = pool->max_items;
net/rds/ib_mr.h
68
struct rds_ib_mr_pool *pool;
net/rds/ib_rdma.c
194
struct rds_ib_mr *rds_ib_reuse_mr(struct rds_ib_mr_pool *pool)
net/rds/ib_rdma.c
200
spin_lock_irqsave(&pool->clean_lock, flags);
net/rds/ib_rdma.c
201
ret = llist_del_first(&pool->clean_list);
net/rds/ib_rdma.c
202
spin_unlock_irqrestore(&pool->clean_lock, flags);
net/rds/ib_rdma.c
205
if (pool->pool_type == RDS_IB_MR_8K_POOL)
net/rds/ib_rdma.c
271
struct rds_ib_mr_pool *pool = ibmr->pool;
net/rds/ib_rdma.c
273
atomic_sub(pinned, &pool->free_pinned);
net/rds/ib_rdma.c
277
static inline unsigned int rds_ib_flush_goal(struct rds_ib_mr_pool *pool, int free_all)
net/rds/ib_rdma.c
281
item_count = atomic_read(&pool->item_count);
net/rds/ib_rdma.c
338
int rds_ib_flush_mr_pool(struct rds_ib_mr_pool *pool,
net/rds/ib_rdma.c
348
if (pool->pool_type == RDS_IB_MR_8K_POOL)
net/rds/ib_rdma.c
355
while (!mutex_trylock(&pool->flush_lock)) {
net/rds/ib_rdma.c
356
ibmr = rds_ib_reuse_mr(pool);
net/rds/ib_rdma.c
359
finish_wait(&pool->flush_wait, &wait);
net/rds/ib_rdma.c
363
prepare_to_wait(&pool->flush_wait, &wait,
net/rds/ib_rdma.c
365
if (llist_empty(&pool->clean_list))
net/rds/ib_rdma.c
368
ibmr = rds_ib_reuse_mr(pool);
net/rds/ib_rdma.c
371
finish_wait(&pool->flush_wait, &wait);
net/rds/ib_rdma.c
375
finish_wait(&pool->flush_wait, &wait);
net/rds/ib_rdma.c
377
mutex_lock(&pool->flush_lock);
net/rds/ib_rdma.c
380
ibmr = rds_ib_reuse_mr(pool);
net/rds/ib_rdma.c
390
dirty_to_clean = llist_append_to_list(&pool->drop_list, &unmap_list);
net/rds/ib_rdma.c
391
dirty_to_clean += llist_append_to_list(&pool->free_list, &unmap_list);
net/rds/ib_rdma.c
395
spin_lock_irqsave(&pool->clean_lock, flags);
net/rds/ib_rdma.c
396
llist_append_to_list(&pool->clean_list, &unmap_list);
net/rds/ib_rdma.c
397
spin_unlock_irqrestore(&pool->clean_lock, flags);
net/rds/ib_rdma.c
400
free_goal = rds_ib_flush_goal(pool, free_all);
net/rds/ib_rdma.c
417
spin_lock_irqsave(&pool->clean_lock, flags);
net/rds/ib_rdma.c
419
&pool->clean_list);
net/rds/ib_rdma.c
420
spin_unlock_irqrestore(&pool->clean_lock, flags);
net/rds/ib_rdma.c
424
atomic_sub(unpinned, &pool->free_pinned);
net/rds/ib_rdma.c
425
atomic_sub(dirty_to_clean, &pool->dirty_count);
net/rds/ib_rdma.c
426
atomic_sub(nfreed, &pool->item_count);
net/rds/ib_rdma.c
429
mutex_unlock(&pool->flush_lock);
net/rds/ib_rdma.c
430
if (waitqueue_active(&pool->flush_wait))
net/rds/ib_rdma.c
431
wake_up(&pool->flush_wait);
net/rds/ib_rdma.c
436
struct rds_ib_mr *rds_ib_try_reuse_ibmr(struct rds_ib_mr_pool *pool)
net/rds/ib_rdma.c
442
ibmr = rds_ib_reuse_mr(pool);
net/rds/ib_rdma.c
446
if (atomic_inc_return(&pool->item_count) <= pool->max_items)
net/rds/ib_rdma.c
449
atomic_dec(&pool->item_count);
net/rds/ib_rdma.c
452
if (pool->pool_type == RDS_IB_MR_8K_POOL)
net/rds/ib_rdma.c
460
if (pool->pool_type == RDS_IB_MR_8K_POOL)
net/rds/ib_rdma.c
465
rds_ib_flush_mr_pool(pool, 0, &ibmr);
net/rds/ib_rdma.c
475
struct rds_ib_mr_pool *pool = container_of(work, struct rds_ib_mr_pool, flush_worker.work);
net/rds/ib_rdma.c
477
rds_ib_flush_mr_pool(pool, 0, NULL);
net/rds/ib_rdma.c
483
struct rds_ib_mr_pool *pool = ibmr->pool;
net/rds/ib_rdma.c
501
atomic_add(ibmr->sg_len, &pool->free_pinned);
net/rds/ib_rdma.c
502
atomic_inc(&pool->dirty_count);
net/rds/ib_rdma.c
505
if (atomic_read(&pool->free_pinned) >= pool->max_free_pinned ||
net/rds/ib_rdma.c
506
atomic_read(&pool->dirty_count) >= pool->max_items / 5)
net/rds/ib_rdma.c
507
queue_delayed_work(rds_ib_mr_wq, &pool->flush_worker, 10);
net/rds/ib_rdma.c
511
rds_ib_flush_mr_pool(pool, 0, NULL);
net/rds/ib_rdma.c
517
&pool->flush_worker, 10);
net/rds/ib_rdma.c
635
void rds_ib_destroy_mr_pool(struct rds_ib_mr_pool *pool)
net/rds/ib_rdma.c
637
cancel_delayed_work_sync(&pool->flush_worker);
net/rds/ib_rdma.c
638
rds_ib_flush_mr_pool(pool, 1, NULL);
net/rds/ib_rdma.c
639
WARN_ON(atomic_read(&pool->item_count));
net/rds/ib_rdma.c
640
WARN_ON(atomic_read(&pool->free_pinned));
net/rds/ib_rdma.c
641
kfree(pool);
net/rds/ib_rdma.c
647
struct rds_ib_mr_pool *pool;
net/rds/ib_rdma.c
649
pool = kzalloc_obj(*pool);
net/rds/ib_rdma.c
650
if (!pool)
net/rds/ib_rdma.c
653
pool->pool_type = pool_type;
net/rds/ib_rdma.c
654
init_llist_head(&pool->free_list);
net/rds/ib_rdma.c
655
init_llist_head(&pool->drop_list);
net/rds/ib_rdma.c
656
init_llist_head(&pool->clean_list);
net/rds/ib_rdma.c
657
spin_lock_init(&pool->clean_lock);
net/rds/ib_rdma.c
658
mutex_init(&pool->flush_lock);
net/rds/ib_rdma.c
659
init_waitqueue_head(&pool->flush_wait);
net/rds/ib_rdma.c
660
INIT_DELAYED_WORK(&pool->flush_worker, rds_ib_mr_pool_flush_worker);
net/rds/ib_rdma.c
664
pool->max_pages = RDS_MR_1M_MSG_SIZE + 1;
net/rds/ib_rdma.c
665
pool->max_items = rds_ibdev->max_1m_mrs;
net/rds/ib_rdma.c
668
pool->max_pages = RDS_MR_8K_MSG_SIZE + 1;
net/rds/ib_rdma.c
669
pool->max_items = rds_ibdev->max_8k_mrs;
net/rds/ib_rdma.c
672
pool->max_free_pinned = pool->max_items * pool->max_pages / 4;
net/rds/ib_rdma.c
673
pool->max_items_soft = rds_ibdev->max_mrs * 3 / 4;
net/rds/ib_rdma.c
675
return pool;
net/sunrpc/svc.c
1006
struct svc_pool *pool = rqstp->rq_pool;
net/sunrpc/svc.c
1010
pool->sp_nrthreads -= 1;
net/sunrpc/svc.c
1016
clear_and_wake_up_bit(SP_VICTIM_REMAINS, &pool->sp_flags);
net/sunrpc/svc.c
533
struct svc_pool *pool = &serv->sv_pools[i];
net/sunrpc/svc.c
538
pool->sp_id = i;
net/sunrpc/svc.c
539
lwq_init(&pool->sp_xprts);
net/sunrpc/svc.c
540
INIT_LIST_HEAD(&pool->sp_all_threads);
net/sunrpc/svc.c
541
init_llist_head(&pool->sp_idle_threads);
net/sunrpc/svc.c
543
percpu_counter_init(&pool->sp_messages_arrived, 0, GFP_KERNEL);
net/sunrpc/svc.c
544
percpu_counter_init(&pool->sp_sockets_queued, 0, GFP_KERNEL);
net/sunrpc/svc.c
545
percpu_counter_init(&pool->sp_threads_woken, 0, GFP_KERNEL);
net/sunrpc/svc.c
625
struct svc_pool *pool = &serv->sv_pools[i];
net/sunrpc/svc.c
627
percpu_counter_destroy(&pool->sp_messages_arrived);
net/sunrpc/svc.c
628
percpu_counter_destroy(&pool->sp_sockets_queued);
net/sunrpc/svc.c
629
percpu_counter_destroy(&pool->sp_threads_woken);
net/sunrpc/svc.c
680
svc_prepare_thread(struct svc_serv *serv, struct svc_pool *pool, int node)
net/sunrpc/svc.c
691
rqstp->rq_pool = pool;
net/sunrpc/svc.c
717
pool->sp_nrthreads += 1;
net/sunrpc/svc.c
722
list_add_rcu(&rqstp->rq_all, &pool->sp_all_threads);
net/sunrpc/svc.c
740
void svc_pool_wake_idle_thread(struct svc_pool *pool)
net/sunrpc/svc.c
746
ln = READ_ONCE(pool->sp_idle_threads.first);
net/sunrpc/svc.c
752
trace_svc_pool_thread_wake(pool, rqstp->rq_task->pid);
net/sunrpc/svc.c
753
percpu_counter_inc(&pool->sp_threads_woken);
net/sunrpc/svc.c
755
trace_svc_pool_thread_running(pool, rqstp->rq_task->pid);
net/sunrpc/svc.c
761
trace_svc_pool_thread_noidle(pool, 0);
net/sunrpc/svc.c
775
int svc_new_thread(struct svc_serv *serv, struct svc_pool *pool)
net/sunrpc/svc.c
782
node = svc_pool_map_get_node(pool->sp_id);
net/sunrpc/svc.c
784
rqstp = svc_prepare_thread(serv, pool, node);
net/sunrpc/svc.c
796
svc_pool_map_set_cpumask(task, pool->sp_id);
net/sunrpc/svc.c
812
svc_start_kthreads(struct svc_serv *serv, struct svc_pool *pool, int nrservs)
net/sunrpc/svc.c
817
err = svc_new_thread(serv, pool);
net/sunrpc/svc.c
823
svc_stop_kthreads(struct svc_serv *serv, struct svc_pool *pool, int nrservs)
net/sunrpc/svc.c
826
set_bit(SP_VICTIM_REMAINS, &pool->sp_flags);
net/sunrpc/svc.c
827
set_bit(SP_NEED_VICTIM, &pool->sp_flags);
net/sunrpc/svc.c
828
svc_pool_wake_idle_thread(pool);
net/sunrpc/svc.c
829
wait_on_bit(&pool->sp_flags, SP_VICTIM_REMAINS, TASK_IDLE);
net/sunrpc/svc.c
855
svc_set_pool_threads(struct svc_serv *serv, struct svc_pool *pool,
net/sunrpc/svc.c
860
if (!pool)
net/sunrpc/svc.c
867
pool->sp_nrthrmin = min_threads;
net/sunrpc/svc.c
868
pool->sp_nrthrmax = max_threads;
net/sunrpc/svc.c
875
if (pool->sp_nrthreads > max_threads)
net/sunrpc/svc.c
877
else if (pool->sp_nrthreads < min_threads)
net/sunrpc/svc.c
885
delta -= pool->sp_nrthreads;
net/sunrpc/svc.c
887
return svc_start_kthreads(serv, pool, delta);
net/sunrpc/svc.c
889
return svc_stop_kthreads(serv, pool, delta);
net/sunrpc/svc.c
920
struct svc_pool *pool = &serv->sv_pools[i];
net/sunrpc/svc.c
928
err = svc_set_pool_threads(serv, pool, min_threads, threads);
net/sunrpc/svc_xprt.c
1130
struct svc_pool *pool = &serv->sv_pools[i];
net/sunrpc/svc_xprt.c
1133
q = lwq_dequeue_all(&pool->sp_xprts);
net/sunrpc/svc_xprt.c
1143
lwq_enqueue_batch(q, &pool->sp_xprts);
net/sunrpc/svc_xprt.c
1465
struct svc_pool *pool = p;
net/sunrpc/svc_xprt.c
1472
pool = NULL;
net/sunrpc/svc_xprt.c
1474
pool = &serv->sv_pools[0];
net/sunrpc/svc_xprt.c
1476
unsigned int pidx = (pool - &serv->sv_pools[0]);
net/sunrpc/svc_xprt.c
1478
pool = &serv->sv_pools[pidx+1];
net/sunrpc/svc_xprt.c
1480
pool = NULL;
net/sunrpc/svc_xprt.c
1483
return pool;
net/sunrpc/svc_xprt.c
1495
struct svc_pool *pool = p;
net/sunrpc/svc_xprt.c
1503
pool->sp_id,
net/sunrpc/svc_xprt.c
1504
percpu_counter_sum_positive(&pool->sp_messages_arrived),
net/sunrpc/svc_xprt.c
1505
percpu_counter_sum_positive(&pool->sp_sockets_queued),
net/sunrpc/svc_xprt.c
1506
percpu_counter_sum_positive(&pool->sp_threads_woken));
net/sunrpc/svc_xprt.c
475
struct svc_pool *pool;
net/sunrpc/svc_xprt.c
488
pool = svc_pool_for_cpu(xprt->xpt_server);
net/sunrpc/svc_xprt.c
490
percpu_counter_inc(&pool->sp_sockets_queued);
net/sunrpc/svc_xprt.c
492
lwq_enqueue(&xprt->xpt_ready, &pool->sp_xprts);
net/sunrpc/svc_xprt.c
494
svc_pool_wake_idle_thread(pool);
net/sunrpc/svc_xprt.c
501
static struct svc_xprt *svc_xprt_dequeue(struct svc_pool *pool)
net/sunrpc/svc_xprt.c
505
xprt = lwq_dequeue(&pool->sp_xprts, struct svc_xprt, xpt_ready);
net/sunrpc/svc_xprt.c
588
struct svc_pool *pool = &serv->sv_pools[0];
net/sunrpc/svc_xprt.c
590
set_bit(SP_TASK_PENDING, &pool->sp_flags);
net/sunrpc/svc_xprt.c
591
svc_pool_wake_idle_thread(pool);
net/sunrpc/svc_xprt.c
693
struct svc_pool *pool = rqstp->rq_pool;
net/sunrpc/svc_xprt.c
696
if (test_bit(SP_TASK_PENDING, &pool->sp_flags))
net/sunrpc/svc_xprt.c
700
if (!lwq_empty(&pool->sp_xprts))
net/sunrpc/svc_xprt.c
724
struct svc_pool *pool = rqstp->rq_pool;
net/sunrpc/svc_xprt.c
729
llist_add(&rqstp->rq_idle, &pool->sp_idle_threads);
net/sunrpc/svc_xprt.c
733
while (!llist_del_first_this(&pool->sp_idle_threads,
net/sunrpc/svc_xprt.c
859
struct svc_pool *pool = rqstp->rq_pool;
net/sunrpc/svc_xprt.c
869
pool->sp_nrthrmin && pool->sp_nrthreads > pool->sp_nrthrmin)
net/sunrpc/svc_xprt.c
872
clear_bit(SP_TASK_PENDING, &pool->sp_flags);
net/sunrpc/svc_xprt.c
879
rqstp->rq_xprt = svc_xprt_dequeue(pool);
net/sunrpc/svc_xprt.c
888
if (pool->sp_idle_threads.first) {
net/sunrpc/svc_xprt.c
901
&pool->sp_flags))
net/xdp/xsk.c
100
bool xsk_uses_need_wakeup(struct xsk_buff_pool *pool)
net/xdp/xsk.c
102
return pool->uses_need_wakeup;
net/xdp/xsk.c
1036
struct xsk_buff_pool *pool;
net/xdp/xsk.c
1053
pool = xs->pool;
net/xdp/xsk.c
1054
if (pool->cached_need_wakeup & XDP_WAKEUP_TX) {
net/xdp/xsk.c
1094
if (xs->pool->cached_need_wakeup & XDP_WAKEUP_RX && xs->zc)
net/xdp/xsk.c
110
return dev->_rx[queue_id].pool;
net/xdp/xsk.c
1116
struct xsk_buff_pool *pool;
net/xdp/xsk.c
112
return dev->_tx[queue_id].pool;
net/xdp/xsk.c
1124
pool = xs->pool;
net/xdp/xsk.c
1126
if (pool->cached_need_wakeup) {
net/xdp/xsk.c
1128
xsk_wakeup(xs, pool->cached_need_wakeup);
net/xdp/xsk.c
1170
xp_del_xsk(xs->pool, xs);
net/xdp/xsk.c
121
dev->_rx[queue_id].pool = NULL;
net/xdp/xsk.c
123
dev->_tx[queue_id].pool = NULL;
net/xdp/xsk.c
130
int xsk_reg_pool_at_qid(struct net_device *dev, struct xsk_buff_pool *pool,
net/xdp/xsk.c
1368
xs->pool = xp_create_and_assign_umem(xs,
net/xdp/xsk.c
1370
if (!xs->pool) {
net/xdp/xsk.c
1376
err = xp_assign_dev_shared(xs->pool, umem_xs, dev,
net/xdp/xsk.c
1379
xp_destroy(xs->pool);
net/xdp/xsk.c
1380
xs->pool = NULL;
net/xdp/xsk.c
139
dev->_rx[queue_id].pool = pool;
net/xdp/xsk.c
1393
xp_get_pool(umem_xs->pool);
net/xdp/xsk.c
1394
xs->pool = umem_xs->pool;
net/xdp/xsk.c
1400
if (xs->tx && !xs->pool->tx_descs) {
net/xdp/xsk.c
1401
err = xp_alloc_tx_descs(xs->pool, xs);
net/xdp/xsk.c
1403
xp_put_pool(xs->pool);
net/xdp/xsk.c
1404
xs->pool = NULL;
net/xdp/xsk.c
141
dev->_tx[queue_id].pool = pool;
net/xdp/xsk.c
1419
xs->pool = xp_create_and_assign_umem(xs, xs->umem);
net/xdp/xsk.c
1420
if (!xs->pool) {
net/xdp/xsk.c
1425
err = xp_assign_dev(xs->pool, dev, qid, flags);
net/xdp/xsk.c
1427
xp_destroy(xs->pool);
net/xdp/xsk.c
1428
xs->pool = NULL;
net/xdp/xsk.c
1441
xp_add_xsk(xs->pool, xs);
net/xdp/xsk.c
152
addr = xp_get_handle(xskb, xskb->pool);
net/xdp/xsk.c
1654
xs->pool ? xskq_nb_queue_empty_descs(xs->pool->fq) : 0;
net/xdp/xsk.c
1770
READ_ONCE(xs->pool->fq);
net/xdp/xsk.c
1773
READ_ONCE(xs->pool->cq);
net/xdp/xsk.c
1809
xp_clear_dev(xs->pool);
net/xdp/xsk.c
1852
if (!xp_put_pool(xs->pool))
net/xdp/xsk.c
1853
xdp_put_umem(xs->umem, !xs->pool);
net/xdp/xsk.c
189
xskb_list = &xskb->pool->xskb_list;
net/xdp/xsk.c
242
u32 frame_size = xsk_pool_get_rx_frame_size(xs->pool);
net/xdp/xsk.c
256
xsk_xdp = xsk_buff_alloc(xs->pool);
net/xdp/xsk.c
274
if (!xsk_buff_can_alloc(xs->pool, num_desc)) {
net/xdp/xsk.c
294
xsk_xdp = xsk_buff_alloc(xs->pool);
net/xdp/xsk.c
341
if (len > xsk_pool_get_rx_frame_size(xs->pool) && !xs->sg) {
net/xdp/xsk.c
352
__xskq_cons_release(xs->pool->fq);
net/xdp/xsk.c
363
spin_lock_bh(&xs->pool->rx_lock);
net/xdp/xsk.c
366
spin_unlock_bh(&xs->pool->rx_lock);
net/xdp/xsk.c
419
void xsk_tx_completed(struct xsk_buff_pool *pool, u32 nb_entries)
net/xdp/xsk.c
421
xskq_prod_submit_n(pool->cq, nb_entries);
net/xdp/xsk.c
425
void xsk_tx_release(struct xsk_buff_pool *pool)
net/xdp/xsk.c
430
list_for_each_entry_rcu(xs, &pool->xsk_tx_list, tx_list)
net/xdp/xsk.c
436
bool xsk_tx_peek_desc(struct xsk_buff_pool *pool, struct xdp_desc *desc)
net/xdp/xsk.c
443
list_for_each_entry_rcu(xs, &pool->xsk_tx_list, tx_list) {
net/xdp/xsk.c
449
if (!xskq_cons_peek_desc(xs->tx, desc, pool)) {
net/xdp/xsk.c
46
void xsk_set_rx_need_wakeup(struct xsk_buff_pool *pool)
net/xdp/xsk.c
462
if (xskq_prod_reserve_addr(pool->cq, desc->addr))
net/xdp/xsk.c
471
list_for_each_entry_rcu(xs, &pool->xsk_tx_list, tx_list)
net/xdp/xsk.c
48
if (pool->cached_need_wakeup & XDP_WAKEUP_RX)
net/xdp/xsk.c
484
static u32 xsk_tx_peek_release_fallback(struct xsk_buff_pool *pool, u32 max_entries)
net/xdp/xsk.c
486
struct xdp_desc *descs = pool->tx_descs;
net/xdp/xsk.c
489
while (nb_pkts < max_entries && xsk_tx_peek_desc(pool, &descs[nb_pkts]))
net/xdp/xsk.c
492
xsk_tx_release(pool);
net/xdp/xsk.c
496
u32 xsk_tx_peek_release_desc_batch(struct xsk_buff_pool *pool, u32 nb_pkts)
net/xdp/xsk.c
501
if (!list_is_singular(&pool->xsk_tx_list)) {
net/xdp/xsk.c
504
return xsk_tx_peek_release_fallback(pool, nb_pkts);
net/xdp/xsk.c
507
xs = list_first_or_null_rcu(&pool->xsk_tx_list, struct xdp_sock, tx_list);
net/xdp/xsk.c
51
pool->fq->ring->flags |= XDP_RING_NEED_WAKEUP;
net/xdp/xsk.c
52
pool->cached_need_wakeup |= XDP_WAKEUP_RX;
net/xdp/xsk.c
521
nb_pkts = xskq_prod_nb_free(pool->cq, nb_pkts);
net/xdp/xsk.c
525
nb_pkts = xskq_cons_read_desc_batch(xs->tx, pool, nb_pkts);
net/xdp/xsk.c
532
xskq_prod_write_addr_batch(pool->cq, pool->tx_descs, nb_pkts);
net/xdp/xsk.c
548
static int xsk_cq_reserve_locked(struct xsk_buff_pool *pool)
net/xdp/xsk.c
552
spin_lock(&pool->cq->cq_cached_prod_lock);
net/xdp/xsk.c
553
ret = xskq_prod_reserve(pool->cq);
net/xdp/xsk.c
554
spin_unlock(&pool->cq->cq_cached_prod_lock);
net/xdp/xsk.c
56
void xsk_set_tx_need_wakeup(struct xsk_buff_pool *pool)
net/xdp/xsk.c
596
static void xsk_cq_submit_addr_locked(struct xsk_buff_pool *pool,
net/xdp/xsk.c
60
if (pool->cached_need_wakeup & XDP_WAKEUP_TX)
net/xdp/xsk.c
605
spin_lock_irqsave(&pool->cq_prod_lock, flags);
net/xdp/xsk.c
606
idx = xskq_get_prod(pool->cq);
net/xdp/xsk.c
612
xskq_prod_write_addr(pool->cq, idx + descs_processed,
net/xdp/xsk.c
618
xskq_prod_write_addr(pool->cq, idx,
net/xdp/xsk.c
622
xskq_prod_submit_n(pool->cq, descs_processed);
net/xdp/xsk.c
623
spin_unlock_irqrestore(&pool->cq_prod_lock, flags);
net/xdp/xsk.c
626
static void xsk_cq_cancel_locked(struct xsk_buff_pool *pool, u32 n)
net/xdp/xsk.c
628
spin_lock(&pool->cq->cq_cached_prod_lock);
net/xdp/xsk.c
629
xskq_prod_cancel_n(pool->cq, n);
net/xdp/xsk.c
630
spin_unlock(&pool->cq->cq_cached_prod_lock);
net/xdp/xsk.c
64
list_for_each_entry_rcu(xs, &pool->xsk_tx_list, tx_list) {
net/xdp/xsk.c
643
xsk_cq_submit_addr_locked(xdp_sk(skb->sk)->pool, skb);
net/xdp/xsk.c
669
xsk_cq_cancel_locked(xs->pool, num_descs);
net/xdp/xsk.c
682
struct xdp_desc *desc, struct xsk_buff_pool *pool,
net/xdp/xsk.c
687
if (unlikely(pool->tx_metadata_len == 0))
net/xdp/xsk.c
69
pool->cached_need_wakeup |= XDP_WAKEUP_TX;
net/xdp/xsk.c
690
meta = buffer - pool->tx_metadata_len;
net/xdp/xsk.c
704
if (unlikely(pool->tx_sw_csum)) {
net/xdp/xsk.c
723
struct xsk_buff_pool *pool = xs->pool;
net/xdp/xsk.c
73
void xsk_clear_rx_need_wakeup(struct xsk_buff_pool *pool)
net/xdp/xsk.c
732
buffer = xsk_buff_raw_get_data(pool, addr);
net/xdp/xsk.c
745
err = xsk_skb_metadata(skb, buffer, desc, pool, hr);
net/xdp/xsk.c
75
if (!(pool->cached_need_wakeup & XDP_WAKEUP_RX))
net/xdp/xsk.c
773
ts = pool->unaligned ? len : pool->chunk_size;
net/xdp/xsk.c
776
addr = buffer - pool->addrs;
net/xdp/xsk.c
78
pool->fq->ring->flags &= ~XDP_RING_NEED_WAKEUP;
net/xdp/xsk.c
782
page = pool->umem->pgs[addr >> PAGE_SHIFT];
net/xdp/xsk.c
79
pool->cached_need_wakeup &= ~XDP_WAKEUP_RX;
net/xdp/xsk.c
820
buffer = xsk_buff_raw_get_data(xs->pool, desc->addr);
net/xdp/xsk.c
83
void xsk_clear_tx_need_wakeup(struct xsk_buff_pool *pool)
net/xdp/xsk.c
840
xs->pool, hr);
net/xdp/xsk.c
87
if (!(pool->cached_need_wakeup & XDP_WAKEUP_TX))
net/xdp/xsk.c
902
xsk_cq_cancel_locked(xs->pool, 1);
net/xdp/xsk.c
91
list_for_each_entry_rcu(xs, &pool->xsk_tx_list, tx_list) {
net/xdp/xsk.c
929
while (xskq_cons_peek_desc(xs->tx, &desc, xs->pool)) {
net/xdp/xsk.c
940
err = xsk_cq_reserve_locked(xs->pool);
net/xdp/xsk.c
96
pool->cached_need_wakeup &= ~XDP_WAKEUP_TX;
net/xdp/xsk.h
45
int xsk_reg_pool_at_qid(struct net_device *dev, struct xsk_buff_pool *pool,
net/xdp/xsk_buff_pool.c
100
xskb = &pool->heads[i];
net/xdp/xsk_buff_pool.c
101
xskb->pool = pool;
net/xdp/xsk_buff_pool.c
104
if (pool->unaligned)
net/xdp/xsk_buff_pool.c
105
pool->free_heads[i] = xskb;
net/xdp/xsk_buff_pool.c
107
xp_init_xskb_addr(xskb, pool, (u64)i * pool->chunk_size);
net/xdp/xsk_buff_pool.c
110
return pool;
net/xdp/xsk_buff_pool.c
113
xp_destroy(pool);
net/xdp/xsk_buff_pool.c
117
void xp_set_rxq_info(struct xsk_buff_pool *pool, struct xdp_rxq_info *rxq)
net/xdp/xsk_buff_pool.c
121
for (i = 0; i < pool->heads_cnt; i++)
net/xdp/xsk_buff_pool.c
122
pool->heads[i].xdp.rxq = rxq;
net/xdp/xsk_buff_pool.c
126
void xp_fill_cb(struct xsk_buff_pool *pool, struct xsk_cb_desc *desc)
net/xdp/xsk_buff_pool.c
13
void xp_add_xsk(struct xsk_buff_pool *pool, struct xdp_sock *xs)
net/xdp/xsk_buff_pool.c
130
for (i = 0; i < pool->heads_cnt; i++) {
net/xdp/xsk_buff_pool.c
131
struct xdp_buff_xsk *xskb = &pool->heads[i];
net/xdp/xsk_buff_pool.c
138
static void xp_disable_drv_zc(struct xsk_buff_pool *pool)
net/xdp/xsk_buff_pool.c
145
if (pool->umem->zc) {
net/xdp/xsk_buff_pool.c
147
bpf.xsk.pool = NULL;
net/xdp/xsk_buff_pool.c
148
bpf.xsk.queue_id = pool->queue_id;
net/xdp/xsk_buff_pool.c
150
err = pool->netdev->netdev_ops->ndo_bpf(pool->netdev, &bpf);
net/xdp/xsk_buff_pool.c
157
int xp_assign_dev(struct xsk_buff_pool *pool,
net/xdp/xsk_buff_pool.c
175
pool->netdev = netdev;
net/xdp/xsk_buff_pool.c
176
pool->queue_id = queue_id;
net/xdp/xsk_buff_pool.c
177
err = xsk_reg_pool_at_qid(netdev, pool, queue_id);
net/xdp/xsk_buff_pool.c
18
spin_lock(&pool->xsk_tx_list_lock);
net/xdp/xsk_buff_pool.c
182
pool->umem->flags |= XDP_UMEM_SG_FLAG;
net/xdp/xsk_buff_pool.c
185
pool->uses_need_wakeup = true;
net/xdp/xsk_buff_pool.c
19
list_add_rcu(&xs->tx_list, &pool->xsk_tx_list);
net/xdp/xsk_buff_pool.c
190
pool->cached_need_wakeup = XDP_WAKEUP_TX;
net/xdp/xsk_buff_pool.c
20
spin_unlock(&pool->xsk_tx_list_lock);
net/xdp/xsk_buff_pool.c
214
bpf.xsk.pool = pool;
net/xdp/xsk_buff_pool.c
222
if (!pool->dma_pages) {
net/xdp/xsk_buff_pool.c
227
pool->umem->zc = true;
net/xdp/xsk_buff_pool.c
228
pool->xdp_zc_max_segs = netdev->xdp_zc_max_segs;
net/xdp/xsk_buff_pool.c
23
void xp_del_xsk(struct xsk_buff_pool *pool, struct xdp_sock *xs)
net/xdp/xsk_buff_pool.c
232
xp_disable_drv_zc(pool);
net/xdp/xsk_buff_pool.c
243
int xp_assign_dev_shared(struct xsk_buff_pool *pool, struct xdp_sock *umem_xs,
net/xdp/xsk_buff_pool.c
250
if (umem_xs->pool->uses_need_wakeup)
net/xdp/xsk_buff_pool.c
253
return xp_assign_dev(pool, dev, queue_id, flags);
net/xdp/xsk_buff_pool.c
256
void xp_clear_dev(struct xsk_buff_pool *pool)
net/xdp/xsk_buff_pool.c
258
struct net_device *netdev = pool->netdev;
net/xdp/xsk_buff_pool.c
260
if (!pool->netdev)
net/xdp/xsk_buff_pool.c
264
xp_disable_drv_zc(pool);
net/xdp/xsk_buff_pool.c
265
xsk_clear_pool_at_qid(pool->netdev, pool->queue_id);
net/xdp/xsk_buff_pool.c
266
pool->netdev = NULL;
net/xdp/xsk_buff_pool.c
273
struct xsk_buff_pool *pool = container_of(work, struct xsk_buff_pool,
net/xdp/xsk_buff_pool.c
277
xp_clear_dev(pool);
net/xdp/xsk_buff_pool.c
28
spin_lock(&pool->xsk_tx_list_lock);
net/xdp/xsk_buff_pool.c
280
if (pool->fq) {
net/xdp/xsk_buff_pool.c
281
xskq_destroy(pool->fq);
net/xdp/xsk_buff_pool.c
282
pool->fq = NULL;
net/xdp/xsk_buff_pool.c
285
if (pool->cq) {
net/xdp/xsk_buff_pool.c
286
xskq_destroy(pool->cq);
net/xdp/xsk_buff_pool.c
287
pool->cq = NULL;
net/xdp/xsk_buff_pool.c
290
xdp_put_umem(pool->umem, false);
net/xdp/xsk_buff_pool.c
291
xp_destroy(pool);
net/xdp/xsk_buff_pool.c
294
void xp_get_pool(struct xsk_buff_pool *pool)
net/xdp/xsk_buff_pool.c
296
refcount_inc(&pool->users);
net/xdp/xsk_buff_pool.c
299
bool xp_put_pool(struct xsk_buff_pool *pool)
net/xdp/xsk_buff_pool.c
30
spin_unlock(&pool->xsk_tx_list_lock);
net/xdp/xsk_buff_pool.c
301
if (!pool)
net/xdp/xsk_buff_pool.c
304
if (refcount_dec_and_test(&pool->users)) {
net/xdp/xsk_buff_pool.c
305
INIT_WORK(&pool->work, xp_release_deferred);
net/xdp/xsk_buff_pool.c
306
schedule_work(&pool->work);
net/xdp/xsk_buff_pool.c
313
static struct xsk_dma_map *xp_find_dma_map(struct xsk_buff_pool *pool)
net/xdp/xsk_buff_pool.c
317
list_for_each_entry(dma_map, &pool->umem->xsk_dma_list, list) {
net/xdp/xsk_buff_pool.c
318
if (dma_map->netdev == pool->netdev)
net/xdp/xsk_buff_pool.c
33
void xp_destroy(struct xsk_buff_pool *pool)
net/xdp/xsk_buff_pool.c
35
if (!pool)
net/xdp/xsk_buff_pool.c
373
void xp_dma_unmap(struct xsk_buff_pool *pool, unsigned long attrs)
net/xdp/xsk_buff_pool.c
377
if (!pool->dma_pages)
net/xdp/xsk_buff_pool.c
38
kvfree(pool->tx_descs);
net/xdp/xsk_buff_pool.c
380
dma_map = xp_find_dma_map(pool);
net/xdp/xsk_buff_pool.c
389
kvfree(pool->dma_pages);
net/xdp/xsk_buff_pool.c
39
kvfree(pool->heads);
net/xdp/xsk_buff_pool.c
390
pool->dma_pages = NULL;
net/xdp/xsk_buff_pool.c
391
pool->dma_pages_cnt = 0;
net/xdp/xsk_buff_pool.c
392
pool->dev = NULL;
net/xdp/xsk_buff_pool.c
40
kvfree(pool);
net/xdp/xsk_buff_pool.c
408
static int xp_init_dma_info(struct xsk_buff_pool *pool, struct xsk_dma_map *dma_map)
net/xdp/xsk_buff_pool.c
410
if (!pool->unaligned) {
net/xdp/xsk_buff_pool.c
413
for (i = 0; i < pool->heads_cnt; i++) {
net/xdp/xsk_buff_pool.c
414
struct xdp_buff_xsk *xskb = &pool->heads[i];
net/xdp/xsk_buff_pool.c
417
orig_addr = xskb->xdp.data_hard_start - pool->addrs - pool->headroom;
net/xdp/xsk_buff_pool.c
418
xp_init_xskb_dma(xskb, pool, dma_map->dma_pages, orig_addr);
net/xdp/xsk_buff_pool.c
422
pool->dma_pages = kvzalloc_objs(*pool->dma_pages,
net/xdp/xsk_buff_pool.c
424
if (!pool->dma_pages)
net/xdp/xsk_buff_pool.c
427
pool->dev = dma_map->dev;
net/xdp/xsk_buff_pool.c
428
pool->dma_pages_cnt = dma_map->dma_pages_cnt;
net/xdp/xsk_buff_pool.c
429
memcpy(pool->dma_pages, dma_map->dma_pages,
net/xdp/xsk_buff_pool.c
43
int xp_alloc_tx_descs(struct xsk_buff_pool *pool, struct xdp_sock *xs)
net/xdp/xsk_buff_pool.c
430
pool->dma_pages_cnt * sizeof(*pool->dma_pages));
net/xdp/xsk_buff_pool.c
435
int xp_dma_map(struct xsk_buff_pool *pool, struct device *dev,
net/xdp/xsk_buff_pool.c
443
dma_map = xp_find_dma_map(pool);
net/xdp/xsk_buff_pool.c
445
err = xp_init_dma_info(pool, dma_map);
net/xdp/xsk_buff_pool.c
45
pool->tx_descs = kvzalloc_objs(*pool->tx_descs, xs->tx->nentries);
net/xdp/xsk_buff_pool.c
453
dma_map = xp_create_dma_map(dev, pool->netdev, nr_pages, pool->umem);
net/xdp/xsk_buff_pool.c
46
if (!pool->tx_descs)
net/xdp/xsk_buff_pool.c
467
if (pool->unaligned)
net/xdp/xsk_buff_pool.c
470
err = xp_init_dma_info(pool, dma_map);
net/xdp/xsk_buff_pool.c
480
static bool xp_addr_crosses_non_contig_pg(struct xsk_buff_pool *pool,
net/xdp/xsk_buff_pool.c
483
return xp_desc_crosses_non_contig_pg(pool, addr, pool->chunk_size);
net/xdp/xsk_buff_pool.c
486
static bool xp_check_unaligned(struct xsk_buff_pool *pool, u64 *addr)
net/xdp/xsk_buff_pool.c
489
if (*addr >= pool->addrs_cnt ||
net/xdp/xsk_buff_pool.c
490
*addr + pool->chunk_size > pool->addrs_cnt ||
net/xdp/xsk_buff_pool.c
491
xp_addr_crosses_non_contig_pg(pool, *addr))
net/xdp/xsk_buff_pool.c
496
static bool xp_check_aligned(struct xsk_buff_pool *pool, u64 *addr)
net/xdp/xsk_buff_pool.c
498
*addr = xp_aligned_extract_addr(pool, *addr);
net/xdp/xsk_buff_pool.c
499
return *addr < pool->addrs_cnt;
net/xdp/xsk_buff_pool.c
502
static struct xdp_buff_xsk *xp_get_xskb(struct xsk_buff_pool *pool, u64 addr)
net/xdp/xsk_buff_pool.c
506
if (pool->unaligned) {
net/xdp/xsk_buff_pool.c
507
xskb = pool->free_heads[--pool->free_heads_cnt];
net/xdp/xsk_buff_pool.c
508
xp_init_xskb_addr(xskb, pool, addr);
net/xdp/xsk_buff_pool.c
509
if (pool->dma_pages)
net/xdp/xsk_buff_pool.c
510
xp_init_xskb_dma(xskb, pool, pool->dma_pages, addr);
net/xdp/xsk_buff_pool.c
512
xskb = &pool->heads[xp_aligned_extract_idx(pool, addr)];
net/xdp/xsk_buff_pool.c
518
static struct xdp_buff_xsk *__xp_alloc(struct xsk_buff_pool *pool)
net/xdp/xsk_buff_pool.c
524
if (pool->free_heads_cnt == 0)
net/xdp/xsk_buff_pool.c
528
if (!xskq_cons_peek_addr_unchecked(pool->fq, &addr)) {
net/xdp/xsk_buff_pool.c
529
pool->fq->queue_empty_descs++;
net/xdp/xsk_buff_pool.c
533
ok = pool->unaligned ? xp_check_unaligned(pool, &addr) :
net/xdp/xsk_buff_pool.c
534
xp_check_aligned(pool, &addr);
net/xdp/xsk_buff_pool.c
536
pool->fq->invalid_descs++;
net/xdp/xsk_buff_pool.c
537
xskq_cons_release(pool->fq);
net/xdp/xsk_buff_pool.c
543
xskb = xp_get_xskb(pool, addr);
net/xdp/xsk_buff_pool.c
545
xskq_cons_release(pool->fq);
net/xdp/xsk_buff_pool.c
549
struct xdp_buff *xp_alloc(struct xsk_buff_pool *pool)
net/xdp/xsk_buff_pool.c
553
if (!pool->free_list_cnt) {
net/xdp/xsk_buff_pool.c
554
xskb = __xp_alloc(pool);
net/xdp/xsk_buff_pool.c
558
pool->free_list_cnt--;
net/xdp/xsk_buff_pool.c
559
xskb = list_first_entry(&pool->free_list, struct xdp_buff_xsk,
net/xdp/xsk_buff_pool.c
56
struct xsk_buff_pool *pool;
net/xdp/xsk_buff_pool.c
568
if (pool->dev)
net/xdp/xsk_buff_pool.c
569
xp_dma_sync_for_device(pool, xskb->dma, pool->frame_len);
net/xdp/xsk_buff_pool.c
575
static u32 xp_alloc_new_from_fq(struct xsk_buff_pool *pool, struct xdp_buff **xdp, u32 max)
net/xdp/xsk_buff_pool.c
579
if (max > pool->free_heads_cnt)
net/xdp/xsk_buff_pool.c
580
max = pool->free_heads_cnt;
net/xdp/xsk_buff_pool.c
581
max = xskq_cons_nb_entries(pool->fq, max);
net/xdp/xsk_buff_pool.c
583
cached_cons = pool->fq->cached_cons;
net/xdp/xsk_buff_pool.c
591
__xskq_cons_read_addr_unchecked(pool->fq, cached_cons++, &addr);
net/xdp/xsk_buff_pool.c
593
ok = pool->unaligned ? xp_check_unaligned(pool, &addr) :
net/xdp/xsk_buff_pool.c
594
xp_check_aligned(pool, &addr);
net/xdp/xsk_buff_pool.c
596
pool->fq->invalid_descs++;
net/xdp/xsk_buff_pool.c
601
xskb = xp_get_xskb(pool, addr);
net/xdp/xsk_buff_pool.c
607
xskq_cons_release_n(pool->fq, max);
net/xdp/xsk_buff_pool.c
61
pool = kvzalloc_flex(*pool, free_heads, entries);
net/xdp/xsk_buff_pool.c
611
static u32 xp_alloc_reused(struct xsk_buff_pool *pool, struct xdp_buff **xdp, u32 nb_entries)
net/xdp/xsk_buff_pool.c
616
nb_entries = min_t(u32, nb_entries, pool->free_list_cnt);
net/xdp/xsk_buff_pool.c
62
if (!pool)
net/xdp/xsk_buff_pool.c
620
xskb = list_first_entry(&pool->free_list, struct xdp_buff_xsk, list_node);
net/xdp/xsk_buff_pool.c
626
pool->free_list_cnt -= nb_entries;
net/xdp/xsk_buff_pool.c
631
static u32 xp_alloc_slow(struct xsk_buff_pool *pool, struct xdp_buff **xdp,
net/xdp/xsk_buff_pool.c
639
buff = xp_alloc(pool);
net/xdp/xsk_buff_pool.c
649
u32 xp_alloc_batch(struct xsk_buff_pool *pool, struct xdp_buff **xdp, u32 max)
net/xdp/xsk_buff_pool.c
65
pool->heads = kvzalloc_objs(*pool->heads, umem->chunks);
net/xdp/xsk_buff_pool.c
653
if (unlikely(pool->dev && dma_dev_need_sync(pool->dev)))
net/xdp/xsk_buff_pool.c
654
return xp_alloc_slow(pool, xdp, max);
net/xdp/xsk_buff_pool.c
656
if (unlikely(pool->free_list_cnt)) {
net/xdp/xsk_buff_pool.c
657
nb_entries1 = xp_alloc_reused(pool, xdp, max);
net/xdp/xsk_buff_pool.c
66
if (!pool->heads)
net/xdp/xsk_buff_pool.c
665
nb_entries2 = xp_alloc_new_from_fq(pool, xdp, max);
net/xdp/xsk_buff_pool.c
667
pool->fq->queue_empty_descs++;
net/xdp/xsk_buff_pool.c
673
bool xp_can_alloc(struct xsk_buff_pool *pool, u32 count)
net/xdp/xsk_buff_pool.c
677
if (pool->free_list_cnt >= count)
net/xdp/xsk_buff_pool.c
680
req_count = count - pool->free_list_cnt;
net/xdp/xsk_buff_pool.c
681
avail_count = xskq_cons_nb_entries(pool->fq, req_count);
net/xdp/xsk_buff_pool.c
683
pool->fq->queue_empty_descs++;
net/xdp/xsk_buff_pool.c
694
xskb->pool->free_list_cnt++;
net/xdp/xsk_buff_pool.c
695
list_add(&xskb->list_node, &xskb->pool->free_list);
net/xdp/xsk_buff_pool.c
699
static u64 __xp_raw_get_addr(const struct xsk_buff_pool *pool, u64 addr)
net/xdp/xsk_buff_pool.c
70
if (xp_alloc_tx_descs(pool, xs))
net/xdp/xsk_buff_pool.c
701
return pool->unaligned ? xp_unaligned_add_offset_to_addr(addr) : addr;
net/xdp/xsk_buff_pool.c
704
static void *__xp_raw_get_data(const struct xsk_buff_pool *pool, u64 addr)
net/xdp/xsk_buff_pool.c
706
return pool->addrs + addr;
net/xdp/xsk_buff_pool.c
709
void *xp_raw_get_data(struct xsk_buff_pool *pool, u64 addr)
net/xdp/xsk_buff_pool.c
711
return __xp_raw_get_data(pool, __xp_raw_get_addr(pool, addr));
net/xdp/xsk_buff_pool.c
715
static dma_addr_t __xp_raw_get_dma(const struct xsk_buff_pool *pool, u64 addr)
net/xdp/xsk_buff_pool.c
717
return (pool->dma_pages[addr >> PAGE_SHIFT] &
net/xdp/xsk_buff_pool.c
722
dma_addr_t xp_raw_get_dma(struct xsk_buff_pool *pool, u64 addr)
net/xdp/xsk_buff_pool.c
724
return __xp_raw_get_dma(pool, __xp_raw_get_addr(pool, addr));
net/xdp/xsk_buff_pool.c
73
pool->chunk_mask = ~((u64)umem->chunk_size - 1);
net/xdp/xsk_buff_pool.c
74
pool->addrs_cnt = umem->size;
net/xdp/xsk_buff_pool.c
740
struct xdp_desc_ctx xp_raw_get_ctx(const struct xsk_buff_pool *pool, u64 addr)
net/xdp/xsk_buff_pool.c
744
addr = __xp_raw_get_addr(pool, addr);
net/xdp/xsk_buff_pool.c
746
ret.dma = __xp_raw_get_dma(pool, addr);
net/xdp/xsk_buff_pool.c
747
ret.meta = __xsk_buff_get_metadata(pool, __xp_raw_get_data(pool, addr));
net/xdp/xsk_buff_pool.c
75
pool->heads_cnt = umem->chunks;
net/xdp/xsk_buff_pool.c
76
pool->free_heads_cnt = umem->chunks;
net/xdp/xsk_buff_pool.c
77
pool->headroom = umem->headroom;
net/xdp/xsk_buff_pool.c
78
pool->chunk_size = umem->chunk_size;
net/xdp/xsk_buff_pool.c
79
pool->chunk_shift = ffs(umem->chunk_size) - 1;
net/xdp/xsk_buff_pool.c
80
pool->unaligned = unaligned;
net/xdp/xsk_buff_pool.c
81
pool->frame_len = umem->chunk_size - umem->headroom -
net/xdp/xsk_buff_pool.c
83
pool->umem = umem;
net/xdp/xsk_buff_pool.c
84
pool->addrs = umem->addrs;
net/xdp/xsk_buff_pool.c
85
pool->tx_metadata_len = umem->tx_metadata_len;
net/xdp/xsk_buff_pool.c
86
pool->tx_sw_csum = umem->flags & XDP_UMEM_TX_SW_CSUM;
net/xdp/xsk_buff_pool.c
87
spin_lock_init(&pool->rx_lock);
net/xdp/xsk_buff_pool.c
88
INIT_LIST_HEAD(&pool->free_list);
net/xdp/xsk_buff_pool.c
89
INIT_LIST_HEAD(&pool->xskb_list);
net/xdp/xsk_buff_pool.c
90
INIT_LIST_HEAD(&pool->xsk_tx_list);
net/xdp/xsk_buff_pool.c
91
spin_lock_init(&pool->xsk_tx_list_lock);
net/xdp/xsk_buff_pool.c
92
spin_lock_init(&pool->cq_prod_lock);
net/xdp/xsk_buff_pool.c
94
refcount_set(&pool->users, 1);
net/xdp/xsk_buff_pool.c
96
pool->fq = xs->fq_tmp;
net/xdp/xsk_buff_pool.c
97
pool->cq = xs->cq_tmp;
net/xdp/xsk_buff_pool.c
99
for (i = 0; i < pool->free_heads_cnt; i++) {
net/xdp/xsk_diag.c
49
struct xsk_buff_pool *pool = xs->pool;
net/xdp/xsk_diag.c
62
du.ifindex = (pool && pool->netdev) ? pool->netdev->ifindex : 0;
net/xdp/xsk_diag.c
63
du.queue_id = pool ? pool->queue_id : 0;
net/xdp/xsk_diag.c
70
if (!err && pool && pool->fq)
net/xdp/xsk_diag.c
71
err = xsk_diag_put_ring(pool->fq,
net/xdp/xsk_diag.c
73
if (!err && pool && pool->cq)
net/xdp/xsk_diag.c
74
err = xsk_diag_put_ring(pool->cq,
net/xdp/xsk_diag.c
86
du.n_fill_ring_empty = xs->pool ? xskq_nb_queue_empty_descs(xs->pool->fq) : 0;
net/xdp/xsk_queue.h
148
static inline bool xp_aligned_validate_desc(struct xsk_buff_pool *pool,
net/xdp/xsk_queue.h
158
if (check_sub_overflow(desc->addr, pool->tx_metadata_len, &addr))
net/xdp/xsk_queue.h
161
offset = addr & (pool->chunk_size - 1);
net/xdp/xsk_queue.h
168
if (offset + len + pool->tx_metadata_len > pool->chunk_size)
net/xdp/xsk_queue.h
171
if (addr >= pool->addrs_cnt)
net/xdp/xsk_queue.h
180
static inline bool xp_unaligned_validate_desc(struct xsk_buff_pool *pool,
net/xdp/xsk_queue.h
190
len += pool->tx_metadata_len;
net/xdp/xsk_queue.h
191
if (len > pool->chunk_size)
net/xdp/xsk_queue.h
196
pool->tx_metadata_len, &addr))
net/xdp/xsk_queue.h
199
if (addr >= pool->addrs_cnt)
net/xdp/xsk_queue.h
203
if (check_add_overflow(addr, len, &end) || end > pool->addrs_cnt)
net/xdp/xsk_queue.h
206
if (xp_desc_crosses_non_contig_pg(pool, addr, len))
net/xdp/xsk_queue.h
215
static inline bool xp_validate_desc(struct xsk_buff_pool *pool,
net/xdp/xsk_queue.h
218
return pool->unaligned ? xp_unaligned_validate_desc(pool, desc) :
net/xdp/xsk_queue.h
219
xp_aligned_validate_desc(pool, desc);
net/xdp/xsk_queue.h
229
struct xsk_buff_pool *pool)
net/xdp/xsk_queue.h
231
if (!xp_validate_desc(pool, d)) {
net/xdp/xsk_queue.h
240
struct xsk_buff_pool *pool)
net/xdp/xsk_queue.h
247
return xskq_cons_is_valid_desc(q, desc, pool);
net/xdp/xsk_queue.h
259
static inline void parse_desc(struct xsk_queue *q, struct xsk_buff_pool *pool,
net/xdp/xsk_queue.h
262
parsed->valid = xskq_cons_is_valid_desc(q, desc, pool);
net/xdp/xsk_queue.h
267
u32 xskq_cons_read_desc_batch(struct xsk_queue *q, struct xsk_buff_pool *pool,
net/xdp/xsk_queue.h
271
struct xdp_desc *descs = pool->tx_descs;
net/xdp/xsk_queue.h
284
parse_desc(q, pool, &descs[nb_entries], &parsed);
net/xdp/xsk_queue.h
293
if (nr_frags == pool->xdp_zc_max_segs) {
net/xdp/xsk_queue.h
348
struct xsk_buff_pool *pool)
net/xdp/xsk_queue.h
352
return xskq_cons_read_desc(q, desc, pool);
sound/core/memalloc.c
425
struct gen_pool *pool;
sound/core/memalloc.c
429
pool = of_gen_pool_get(dev->of_node, "iram", 0);
sound/core/memalloc.c
431
dmab->private_data = pool;
sound/core/memalloc.c
433
p = gen_pool_dma_alloc_align(pool, size, &dmab->addr, PAGE_SIZE);
sound/core/memalloc.c
447
struct gen_pool *pool = dmab->private_data;
sound/core/memalloc.c
449
if (pool && dmab->area)
sound/core/memalloc.c
450
gen_pool_free(pool, (unsigned long)dmab->area, dmab->bytes);
sound/core/seq/oss/seq_oss_writeq.c
138
struct snd_seq_client_pool pool;
sound/core/seq/oss/seq_oss_writeq.c
139
pool.client = q->dp->cseq;
sound/core/seq/oss/seq_oss_writeq.c
140
snd_seq_oss_control(q->dp, SNDRV_SEQ_IOCTL_GET_CLIENT_POOL, &pool);
sound/core/seq/oss/seq_oss_writeq.c
141
return pool.output_free;
sound/core/seq/oss/seq_oss_writeq.c
151
struct snd_seq_client_pool pool;
sound/core/seq/oss/seq_oss_writeq.c
152
pool.client = q->dp->cseq;
sound/core/seq/oss/seq_oss_writeq.c
153
snd_seq_oss_control(q->dp, SNDRV_SEQ_IOCTL_GET_CLIENT_POOL, &pool);
sound/core/seq/oss/seq_oss_writeq.c
154
pool.output_room = val;
sound/core/seq/oss/seq_oss_writeq.c
155
snd_seq_oss_control(q->dp, SNDRV_SEQ_IOCTL_SET_CLIENT_POOL, &pool);
sound/core/seq/oss/seq_oss_writeq.c
28
struct snd_seq_client_pool pool;
sound/core/seq/oss/seq_oss_writeq.c
40
memset(&pool, 0, sizeof(pool));
sound/core/seq/oss/seq_oss_writeq.c
41
pool.client = dp->cseq;
sound/core/seq/oss/seq_oss_writeq.c
42
pool.output_pool = maxlen;
sound/core/seq/oss/seq_oss_writeq.c
43
pool.output_room = maxlen / 2;
sound/core/seq/oss/seq_oss_writeq.c
45
snd_seq_oss_control(dp, SNDRV_SEQ_IOCTL_SET_CLIENT_POOL, &pool);
sound/core/seq/seq_clientmgr.c
1104
if (snd_seq_pool_poll_wait(client->pool, file, wait))
sound/core/seq/seq_clientmgr.c
1800
info->output_pool = cptr->pool->size;
sound/core/seq/seq_clientmgr.c
1801
info->output_room = cptr->pool->room;
sound/core/seq/seq_clientmgr.c
1803
info->output_free = snd_seq_unused_cells(cptr->pool);
sound/core/seq/seq_clientmgr.c
1828
info->output_pool != client->pool->size)) {
sound/core/seq/seq_clientmgr.c
1831
if (atomic_read(&client->pool->counter))
sound/core/seq/seq_clientmgr.c
1834
snd_seq_pool_mark_closing(client->pool);
sound/core/seq/seq_clientmgr.c
1835
snd_seq_pool_done(client->pool);
sound/core/seq/seq_clientmgr.c
1837
client->pool->size = info->output_pool;
sound/core/seq/seq_clientmgr.c
1838
rc = snd_seq_pool_init(client->pool);
sound/core/seq/seq_clientmgr.c
1853
info->output_room <= client->pool->size) {
sound/core/seq/seq_clientmgr.c
1854
client->pool->room = info->output_room;
sound/core/seq/seq_clientmgr.c
208
client->pool = snd_seq_pool_new(poolsize);
sound/core/seq/seq_clientmgr.c
209
if (client->pool == NULL) {
sound/core/seq/seq_clientmgr.c
240
snd_seq_pool_delete(&client->pool);
sound/core/seq/seq_clientmgr.c
2473
if (snd_seq_pool_poll_wait(client->pool, file, wait))
sound/core/seq/seq_clientmgr.c
257
if (client->pool)
sound/core/seq/seq_clientmgr.c
258
snd_seq_pool_delete(&client->pool);
sound/core/seq/seq_clientmgr.c
2620
snd_seq_info_pool(buffer, client->pool, " ");
sound/core/seq/seq_clientmgr.c
2623
client->data.user.fifo->pool) {
sound/core/seq/seq_clientmgr.c
2625
snd_seq_info_pool(buffer, client->data.user.fifo->pool, " ");
sound/core/seq/seq_clientmgr.c
911
err = snd_seq_event_dup(client->pool, event, &cell, !blocking || atomic,
sound/core/seq/seq_clientmgr.c
95
return snd_seq_total_cells(client->pool) > 0;
sound/core/seq/seq_clientmgr.c
979
if (!client->accept_output || client->pool == NULL)
sound/core/seq/seq_clientmgr.c
986
if (client->pool->size > 0 && !snd_seq_write_pool_allocated(client)) {
sound/core/seq/seq_clientmgr.c
987
err = snd_seq_pool_init(client->pool);
sound/core/seq/seq_clientmgr.h
56
struct snd_seq_pool *pool; /* memory pool for this client */
sound/core/seq/seq_fifo.c
110
err = snd_seq_event_dup(f->pool, event, &cell, 1, NULL, NULL); /* always non-blocking */
sound/core/seq/seq_fifo.c
223
if (snd_BUG_ON(!f || !f->pool))
sound/core/seq/seq_fifo.c
237
oldpool = f->pool;
sound/core/seq/seq_fifo.c
240
f->pool = newpool;
sound/core/seq/seq_fifo.c
26
f->pool = snd_seq_pool_new(poolsize);
sound/core/seq/seq_fifo.c
269
return snd_seq_unused_cells(f->pool);
sound/core/seq/seq_fifo.c
27
if (f->pool == NULL) {
sound/core/seq/seq_fifo.c
31
if (snd_seq_pool_init(f->pool) < 0) {
sound/core/seq/seq_fifo.c
32
snd_seq_pool_delete(&f->pool);
sound/core/seq/seq_fifo.c
60
if (f->pool)
sound/core/seq/seq_fifo.c
61
snd_seq_pool_mark_closing(f->pool);
sound/core/seq/seq_fifo.c
72
if (f->pool) {
sound/core/seq/seq_fifo.c
73
snd_seq_pool_done(f->pool);
sound/core/seq/seq_fifo.c
74
snd_seq_pool_delete(&f->pool);
sound/core/seq/seq_fifo.h
16
struct snd_seq_pool *pool; /* FIFO pool */
sound/core/seq/seq_memory.c
22
static inline int snd_seq_pool_available(struct snd_seq_pool *pool)
sound/core/seq/seq_memory.c
225
static inline void free_cell(struct snd_seq_pool *pool,
sound/core/seq/seq_memory.c
228
cell->next = pool->free;
sound/core/seq/seq_memory.c
229
pool->free = cell;
sound/core/seq/seq_memory.c
230
atomic_dec(&pool->counter);
sound/core/seq/seq_memory.c
235
struct snd_seq_pool *pool;
sound/core/seq/seq_memory.c
239
pool = cell->pool;
sound/core/seq/seq_memory.c
24
return pool->total_elements - atomic_read(&pool->counter);
sound/core/seq/seq_memory.c
240
if (snd_BUG_ON(!pool))
sound/core/seq/seq_memory.c
243
guard(spinlock_irqsave)(&pool->lock);
sound/core/seq/seq_memory.c
244
free_cell(pool, cell);
sound/core/seq/seq_memory.c
251
curp->next = pool->free;
sound/core/seq/seq_memory.c
252
free_cell(pool, curp);
sound/core/seq/seq_memory.c
256
if (waitqueue_active(&pool->output_sleep)) {
sound/core/seq/seq_memory.c
258
if (snd_seq_output_ok(pool))
sound/core/seq/seq_memory.c
259
wake_up(&pool->output_sleep);
sound/core/seq/seq_memory.c
267
static int snd_seq_cell_alloc(struct snd_seq_pool *pool,
sound/core/seq/seq_memory.c
27
static inline int snd_seq_output_ok(struct snd_seq_pool *pool)
sound/core/seq/seq_memory.c
277
if (pool == NULL)
sound/core/seq/seq_memory.c
283
spin_lock_irqsave(&pool->lock, flags);
sound/core/seq/seq_memory.c
284
if (pool->ptr == NULL) { /* not initialized */
sound/core/seq/seq_memory.c
289
while (pool->free == NULL && ! nonblock && ! pool->closing) {
sound/core/seq/seq_memory.c
29
return snd_seq_pool_available(pool) >= pool->room;
sound/core/seq/seq_memory.c
292
add_wait_queue(&pool->output_sleep, &wait);
sound/core/seq/seq_memory.c
293
spin_unlock_irqrestore(&pool->lock, flags);
sound/core/seq/seq_memory.c
299
spin_lock_irqsave(&pool->lock, flags);
sound/core/seq/seq_memory.c
300
remove_wait_queue(&pool->output_sleep, &wait);
sound/core/seq/seq_memory.c
307
if (pool->closing) { /* closing.. */
sound/core/seq/seq_memory.c
312
cell = pool->free;
sound/core/seq/seq_memory.c
315
pool->free = cell->next;
sound/core/seq/seq_memory.c
316
atomic_inc(&pool->counter);
sound/core/seq/seq_memory.c
317
used = atomic_read(&pool->counter);
sound/core/seq/seq_memory.c
318
if (pool->max_used < used)
sound/core/seq/seq_memory.c
319
pool->max_used = used;
sound/core/seq/seq_memory.c
320
pool->event_alloc_success++;
sound/core/seq/seq_memory.c
325
pool->event_alloc_failures++;
sound/core/seq/seq_memory.c
329
spin_unlock_irqrestore(&pool->lock, flags);
sound/core/seq/seq_memory.c
339
int snd_seq_event_dup(struct snd_seq_pool *pool, struct snd_seq_event *event,
sound/core/seq/seq_memory.c
356
if (ncells >= pool->total_elements)
sound/core/seq/seq_memory.c
359
err = snd_seq_cell_alloc(pool, &cell, nonblock, file, mutexp);
sound/core/seq/seq_memory.c
390
err = snd_seq_cell_alloc(pool, &tmp, nonblock, file,
sound/core/seq/seq_memory.c
426
int snd_seq_pool_poll_wait(struct snd_seq_pool *pool, struct file *file,
sound/core/seq/seq_memory.c
429
poll_wait(file, &pool->output_sleep, wait);
sound/core/seq/seq_memory.c
430
guard(spinlock_irq)(&pool->lock);
sound/core/seq/seq_memory.c
431
return snd_seq_output_ok(pool);
sound/core/seq/seq_memory.c
436
int snd_seq_pool_init(struct snd_seq_pool *pool)
sound/core/seq/seq_memory.c
441
if (snd_BUG_ON(!pool))
sound/core/seq/seq_memory.c
444
cellptr = kvmalloc_objs(struct snd_seq_event_cell, pool->size);
sound/core/seq/seq_memory.c
449
guard(spinlock_irq)(&pool->lock);
sound/core/seq/seq_memory.c
450
if (pool->ptr) {
sound/core/seq/seq_memory.c
455
pool->ptr = cellptr;
sound/core/seq/seq_memory.c
456
pool->free = NULL;
sound/core/seq/seq_memory.c
458
for (cell = 0; cell < pool->size; cell++) {
sound/core/seq/seq_memory.c
459
cellptr = pool->ptr + cell;
sound/core/seq/seq_memory.c
460
cellptr->pool = pool;
sound/core/seq/seq_memory.c
461
cellptr->next = pool->free;
sound/core/seq/seq_memory.c
462
pool->free = cellptr;
sound/core/seq/seq_memory.c
464
pool->room = (pool->size + 1) / 2;
sound/core/seq/seq_memory.c
467
pool->max_used = 0;
sound/core/seq/seq_memory.c
468
pool->total_elements = pool->size;
sound/core/seq/seq_memory.c
473
void snd_seq_pool_mark_closing(struct snd_seq_pool *pool)
sound/core/seq/seq_memory.c
475
if (snd_BUG_ON(!pool))
sound/core/seq/seq_memory.c
477
guard(spinlock_irqsave)(&pool->lock);
sound/core/seq/seq_memory.c
478
pool->closing = 1;
sound/core/seq/seq_memory.c
482
int snd_seq_pool_done(struct snd_seq_pool *pool)
sound/core/seq/seq_memory.c
486
if (snd_BUG_ON(!pool))
sound/core/seq/seq_memory.c
490
if (waitqueue_active(&pool->output_sleep))
sound/core/seq/seq_memory.c
491
wake_up(&pool->output_sleep);
sound/core/seq/seq_memory.c
493
while (atomic_read(&pool->counter) > 0)
sound/core/seq/seq_memory.c
497
scoped_guard(spinlock_irq, &pool->lock) {
sound/core/seq/seq_memory.c
498
ptr = pool->ptr;
sound/core/seq/seq_memory.c
499
pool->ptr = NULL;
sound/core/seq/seq_memory.c
500
pool->free = NULL;
sound/core/seq/seq_memory.c
501
pool->total_elements = 0;
sound/core/seq/seq_memory.c
506
guard(spinlock_irq)(&pool->lock);
sound/core/seq/seq_memory.c
507
pool->closing = 0;
sound/core/seq/seq_memory.c
516
struct snd_seq_pool *pool;
sound/core/seq/seq_memory.c
519
pool = kzalloc_obj(*pool);
sound/core/seq/seq_memory.c
520
if (!pool)
sound/core/seq/seq_memory.c
522
spin_lock_init(&pool->lock);
sound/core/seq/seq_memory.c
523
pool->ptr = NULL;
sound/core/seq/seq_memory.c
524
pool->free = NULL;
sound/core/seq/seq_memory.c
525
pool->total_elements = 0;
sound/core/seq/seq_memory.c
526
atomic_set(&pool->counter, 0);
sound/core/seq/seq_memory.c
527
pool->closing = 0;
sound/core/seq/seq_memory.c
528
init_waitqueue_head(&pool->output_sleep);
sound/core/seq/seq_memory.c
530
pool->size = poolsize;
sound/core/seq/seq_memory.c
533
pool->max_used = 0;
sound/core/seq/seq_memory.c
534
return pool;
sound/core/seq/seq_memory.c
540
struct snd_seq_pool *pool = *ppool;
sound/core/seq/seq_memory.c
543
if (pool == NULL)
sound/core/seq/seq_memory.c
545
snd_seq_pool_mark_closing(pool);
sound/core/seq/seq_memory.c
546
snd_seq_pool_done(pool);
sound/core/seq/seq_memory.c
547
kfree(pool);
sound/core/seq/seq_memory.c
553
struct snd_seq_pool *pool, char *space)
sound/core/seq/seq_memory.c
555
if (pool == NULL)
sound/core/seq/seq_memory.c
557
snd_iprintf(buffer, "%sPool size : %d\n", space, pool->total_elements);
sound/core/seq/seq_memory.c
558
snd_iprintf(buffer, "%sCells in use : %d\n", space, atomic_read(&pool->counter));
sound/core/seq/seq_memory.c
559
snd_iprintf(buffer, "%sPeak cells in use : %d\n", space, pool->max_used);
sound/core/seq/seq_memory.c
560
snd_iprintf(buffer, "%sAlloc success : %d\n", space, pool->event_alloc_success);
sound/core/seq/seq_memory.c
561
snd_iprintf(buffer, "%sAlloc failures : %d\n", space, pool->event_alloc_failures);
sound/core/seq/seq_memory.h
100
int snd_seq_pool_poll_wait(struct snd_seq_pool *pool, struct file *file, poll_table *wait);
sound/core/seq/seq_memory.h
103
struct snd_seq_pool *pool, char *space);
sound/core/seq/seq_memory.h
34
struct snd_seq_pool *pool; /* used pool */
sound/core/seq/seq_memory.h
70
int snd_seq_event_dup(struct snd_seq_pool *pool, struct snd_seq_event *event,
sound/core/seq/seq_memory.h
75
static inline int snd_seq_unused_cells(struct snd_seq_pool *pool)
sound/core/seq/seq_memory.h
77
return pool ? pool->total_elements - atomic_read(&pool->counter) : 0;
sound/core/seq/seq_memory.h
81
static inline int snd_seq_total_cells(struct snd_seq_pool *pool)
sound/core/seq/seq_memory.h
83
return pool ? pool->total_elements : 0;
sound/core/seq/seq_memory.h
87
int snd_seq_pool_init(struct snd_seq_pool *pool);
sound/core/seq/seq_memory.h
90
void snd_seq_pool_mark_closing(struct snd_seq_pool *pool);
sound/core/seq/seq_memory.h
91
int snd_seq_pool_done(struct snd_seq_pool *pool);
sound/core/seq/seq_memory.h
97
int snd_seq_pool_delete(struct snd_seq_pool **pool);
tools/hv/hv_kvp_daemon.c
134
static void kvp_acquire_lock(int pool)
tools/hv/hv_kvp_daemon.c
139
if (fcntl(kvp_file_info[pool].fd, F_SETLKW, &fl) == -1) {
tools/hv/hv_kvp_daemon.c
140
syslog(LOG_ERR, "Failed to acquire the lock pool: %d; error: %d %s", pool,
tools/hv/hv_kvp_daemon.c
146
static void kvp_release_lock(int pool)
tools/hv/hv_kvp_daemon.c
151
if (fcntl(kvp_file_info[pool].fd, F_SETLK, &fl) == -1) {
tools/hv/hv_kvp_daemon.c
152
syslog(LOG_ERR, "Failed to release the lock pool: %d; error: %d %s", pool,
tools/hv/hv_kvp_daemon.c
158
static void kvp_update_file(int pool)
tools/hv/hv_kvp_daemon.c
166
kvp_acquire_lock(pool);
tools/hv/hv_kvp_daemon.c
168
filep = fopen(kvp_file_info[pool].fname, "we");
tools/hv/hv_kvp_daemon.c
170
syslog(LOG_ERR, "Failed to open file, pool: %d; error: %d %s", pool,
tools/hv/hv_kvp_daemon.c
172
kvp_release_lock(pool);
tools/hv/hv_kvp_daemon.c
176
fwrite(kvp_file_info[pool].records, sizeof(struct kvp_record),
tools/hv/hv_kvp_daemon.c
177
kvp_file_info[pool].num_records, filep);
tools/hv/hv_kvp_daemon.c
1785
int pool;
tools/hv/hv_kvp_daemon.c
180
kvp_release_lock(pool);
tools/hv/hv_kvp_daemon.c
181
syslog(LOG_ERR, "Failed to write file, pool: %d", pool);
tools/hv/hv_kvp_daemon.c
185
kvp_release_lock(pool);
tools/hv/hv_kvp_daemon.c
188
static void kvp_dump_initial_pools(int pool)
tools/hv/hv_kvp_daemon.c
1893
pool = hv_msg->kvp_hdr.pool;
tools/hv/hv_kvp_daemon.c
193
pool);
tools/hv/hv_kvp_daemon.c
1945
if (kvp_key_add_or_modify(pool,
tools/hv/hv_kvp_daemon.c
195
for (i = 0; i < kvp_file_info[pool].num_records; i++)
tools/hv/hv_kvp_daemon.c
1954
if (kvp_get_value(pool,
tools/hv/hv_kvp_daemon.c
1963
if (kvp_key_delete(pool,
tools/hv/hv_kvp_daemon.c
197
pool, i + 1, kvp_file_info[pool].num_records,
tools/hv/hv_kvp_daemon.c
198
kvp_file_info[pool].records[i].key,
tools/hv/hv_kvp_daemon.c
1981
if (pool != KVP_POOL_AUTO) {
tools/hv/hv_kvp_daemon.c
1982
if (kvp_pool_enumerate(pool,
tools/hv/hv_kvp_daemon.c
199
kvp_file_info[pool].records[i].value);
tools/hv/hv_kvp_daemon.c
202
static void kvp_update_mem_state(int pool)
tools/hv/hv_kvp_daemon.c
206
struct kvp_record *record = kvp_file_info[pool].records;
tools/hv/hv_kvp_daemon.c
208
int num_blocks = kvp_file_info[pool].num_blocks;
tools/hv/hv_kvp_daemon.c
211
kvp_acquire_lock(pool);
tools/hv/hv_kvp_daemon.c
213
filep = fopen(kvp_file_info[pool].fname, "re");
tools/hv/hv_kvp_daemon.c
215
syslog(LOG_ERR, "Failed to open file, pool: %d; error: %d %s", pool,
tools/hv/hv_kvp_daemon.c
217
kvp_release_lock(pool);
tools/hv/hv_kvp_daemon.c
229
pool, errno, strerror(errno));
tools/hv/hv_kvp_daemon.c
230
kvp_release_lock(pool);
tools/hv/hv_kvp_daemon.c
243
kvp_release_lock(pool);
tools/hv/hv_kvp_daemon.c
251
kvp_file_info[pool].num_blocks = num_blocks;
tools/hv/hv_kvp_daemon.c
252
kvp_file_info[pool].records = record;
tools/hv/hv_kvp_daemon.c
253
kvp_file_info[pool].num_records = records_read;
tools/hv/hv_kvp_daemon.c
256
kvp_release_lock(pool);
tools/hv/hv_kvp_daemon.c
296
static int kvp_key_delete(int pool, const __u8 *key, int key_size)
tools/hv/hv_kvp_daemon.c
306
kvp_update_mem_state(pool);
tools/hv/hv_kvp_daemon.c
308
num_records = kvp_file_info[pool].num_records;
tools/hv/hv_kvp_daemon.c
309
record = kvp_file_info[pool].records;
tools/hv/hv_kvp_daemon.c
320
__func__, pool, record[i].key, record[i].value);
tools/hv/hv_kvp_daemon.c
322
kvp_file_info[pool].num_records--;
tools/hv/hv_kvp_daemon.c
323
kvp_update_file(pool);
tools/hv/hv_kvp_daemon.c
335
kvp_file_info[pool].num_records--;
tools/hv/hv_kvp_daemon.c
336
kvp_update_file(pool);
tools/hv/hv_kvp_daemon.c
342
__func__, pool, key);
tools/hv/hv_kvp_daemon.c
347
static int kvp_key_add_or_modify(int pool, const __u8 *key, int key_size,
tools/hv/hv_kvp_daemon.c
357
__func__, pool, key, value);
tools/hv/hv_kvp_daemon.c
366
__func__, pool, key, value);
tools/hv/hv_kvp_daemon.c
373
kvp_update_mem_state(pool);
tools/hv/hv_kvp_daemon.c
375
num_records = kvp_file_info[pool].num_records;
tools/hv/hv_kvp_daemon.c
376
record = kvp_file_info[pool].records;
tools/hv/hv_kvp_daemon.c
377
num_blocks = kvp_file_info[pool].num_blocks;
tools/hv/hv_kvp_daemon.c
387
kvp_update_file(pool);
tools/hv/hv_kvp_daemon.c
390
__func__, pool, key, value);
tools/hv/hv_kvp_daemon.c
406
kvp_file_info[pool].num_blocks++;
tools/hv/hv_kvp_daemon.c
411
kvp_file_info[pool].records = record;
tools/hv/hv_kvp_daemon.c
412
kvp_file_info[pool].num_records++;
tools/hv/hv_kvp_daemon.c
416
__func__, pool, key, value);
tools/hv/hv_kvp_daemon.c
418
kvp_update_file(pool);
tools/hv/hv_kvp_daemon.c
422
static int kvp_get_value(int pool, const __u8 *key, int key_size, __u8 *value,
tools/hv/hv_kvp_daemon.c
436
kvp_update_mem_state(pool);
tools/hv/hv_kvp_daemon.c
438
num_records = kvp_file_info[pool].num_records;
tools/hv/hv_kvp_daemon.c
439
record = kvp_file_info[pool].records;
tools/hv/hv_kvp_daemon.c
454
static int kvp_pool_enumerate(int pool, int index, __u8 *key, int key_size,
tools/hv/hv_kvp_daemon.c
462
kvp_update_mem_state(pool);
tools/hv/hv_kvp_daemon.c
463
record = kvp_file_info[pool].records;
tools/hv/hv_kvp_daemon.c
465
if (index >= kvp_file_info[pool].num_records) {
tools/sched_ext/scx_sdt.bpf.c
101
void __arena *scx_alloc_from_pool(struct sdt_pool *pool)
tools/sched_ext/scx_sdt.bpf.c
107
elem_size = pool->elem_size;
tools/sched_ext/scx_sdt.bpf.c
108
max_elems = pool->max_elems;
tools/sched_ext/scx_sdt.bpf.c
111
if (pool->idx >= max_elems) {
tools/sched_ext/scx_sdt.bpf.c
117
pool->slab = slab;
tools/sched_ext/scx_sdt.bpf.c
118
pool->idx = 0;
tools/sched_ext/scx_sdt.bpf.c
121
ptr = (void __arena *)((__u64) pool->slab + elem_size * pool->idx);
tools/sched_ext/scx_sdt.bpf.c
122
pool->idx += 1;
tools/sched_ext/scx_sdt.bpf.c
159
static int pool_set_size(struct sdt_pool *pool, __u64 data_size, __u64 nr_pages)
tools/sched_ext/scx_sdt.bpf.c
167
pool->elem_size = data_size;
tools/sched_ext/scx_sdt.bpf.c
168
pool->max_elems = (PAGE_SIZE * nr_pages) / pool->elem_size;
tools/sched_ext/scx_sdt.bpf.c
170
pool->idx = pool->max_elems;
tools/sched_ext/scx_sdt.bpf.c
202
ret = pool_set_size(&alloc->pool, data_size, min_chunk_size);
tools/sched_ext/scx_sdt.bpf.c
320
for (i = zero; i < alloc->pool.elem_size / 8 && can_loop; i++) {
tools/sched_ext/scx_sdt.bpf.c
491
data = scx_alloc_from_pool(&alloc->pool);
tools/sched_ext/scx_sdt.h
87
struct sdt_pool pool;
tools/usb/usbip/libsrc/names.c
143
struct pool *next;
tools/usb/usbip/libsrc/names.c
147
static struct pool *pool_head;
tools/usb/usbip/libsrc/names.c
151
struct pool *p;
tools/usb/usbip/libsrc/names.c
153
p = calloc(1, sizeof(struct pool));
tools/usb/usbip/libsrc/names.c
171
struct pool *pool;
tools/usb/usbip/libsrc/names.c
176
for (pool = pool_head; pool != NULL; ) {
tools/usb/usbip/libsrc/names.c
177
struct pool *tmp;
tools/usb/usbip/libsrc/names.c
179
if (pool->mem)
tools/usb/usbip/libsrc/names.c
180
free(pool->mem);
tools/usb/usbip/libsrc/names.c
182
tmp = pool;
tools/usb/usbip/libsrc/names.c
183
pool = pool->next;