arch/arc/kernel/mcip.c
378
struct irq_domain *domain;
arch/arc/kernel/mcip.c
394
domain = irq_domain_create_linear(of_fwnode_handle(intc), nr_irqs,
arch/arc/kernel/mcip.c
411
irq_set_chained_handler_and_data(virq, idu_cascade_isr, domain);
arch/arm/common/sa1111.c
450
struct irq_domain *domain = sachip->irqdomain;
arch/arm/common/sa1111.c
462
irq_dispose_mapping(irq_find_mapping(domain, i));
arch/arm/common/sa1111.c
463
irq_domain_remove(domain);
arch/arm/include/asm/dma-iommu.h
13
struct iommu_domain *domain;
arch/arm/include/asm/domain.h
87
unsigned int domain;
arch/arm/include/asm/domain.h
91
: "=r" (domain)
arch/arm/include/asm/domain.h
94
return domain;
arch/arm/include/asm/uaccess-asm.h
50
mcr p15, 0, \tmp, c3, c0, 0 @ Set domain register
arch/arm/kernel/process.c
117
unsigned int domain;
arch/arm/kernel/process.c
125
domain = DACR_UACCESS_ENABLE;
arch/arm/kernel/process.c
127
domain = to_svc_pt_regs(regs)->dacr;
arch/arm/kernel/process.c
130
domain = get_domain();
arch/arm/kernel/process.c
163
if ((domain & domain_mask(DOMAIN_USER)) ==
arch/arm/kernel/process.c
190
transbase, domain);
arch/arm/mach-exynos/suspend.c
157
static int exynos_pmu_domain_alloc(struct irq_domain *domain,
arch/arm/mach-exynos/suspend.c
174
irq_domain_set_hwirq_and_chip(domain, virq + i, hwirq + i,
arch/arm/mach-exynos/suspend.c
178
parent_fwspec.fwnode = domain->parent->fwnode;
arch/arm/mach-exynos/suspend.c
179
return irq_domain_alloc_irqs_parent(domain, virq, nr_irqs,
arch/arm/mach-exynos/suspend.c
192
struct irq_domain *parent_domain, *domain;
arch/arm/mach-exynos/suspend.c
212
domain = irq_domain_create_hierarchy(parent_domain, 0, 0, of_fwnode_handle(node),
arch/arm/mach-exynos/suspend.c
214
if (!domain) {
arch/arm/mach-imx/avic.c
157
generic_handle_domain_irq(domain, nivector);
arch/arm/mach-imx/avic.c
204
domain = irq_domain_create_legacy(of_fwnode_handle(np), AVIC_NUM_IRQS, irq_base, 0,
arch/arm/mach-imx/avic.c
206
WARN_ON(!domain);
arch/arm/mach-imx/avic.c
49
static struct irq_domain *domain;
arch/arm/mach-imx/gpc.c
193
static int imx_gpc_domain_alloc(struct irq_domain *domain,
arch/arm/mach-imx/gpc.c
212
irq_domain_set_hwirq_and_chip(domain, irq + i, hwirq + i,
arch/arm/mach-imx/gpc.c
216
parent_fwspec.fwnode = domain->parent->fwnode;
arch/arm/mach-imx/gpc.c
217
return irq_domain_alloc_irqs_parent(domain, irq, nr_irqs,
arch/arm/mach-imx/gpc.c
230
struct irq_domain *parent_domain, *domain;
arch/arm/mach-imx/gpc.c
248
domain = irq_domain_create_hierarchy(parent_domain, 0, GPC_MAX_IRQS, of_fwnode_handle(node),
arch/arm/mach-imx/gpc.c
250
if (!domain) {
arch/arm/mach-imx/tzic.c
137
generic_handle_domain_irq(domain, irqofs + i * 32);
arch/arm/mach-imx/tzic.c
178
domain = irq_domain_create_legacy(of_fwnode_handle(np), TZIC_NUM_IRQS, irq_base, 0,
arch/arm/mach-imx/tzic.c
180
WARN_ON(!domain);
arch/arm/mach-imx/tzic.c
47
static struct irq_domain *domain;
arch/arm/mach-omap1/irq.c
159
generic_handle_domain_irq(domain, irqnr);
arch/arm/mach-omap1/irq.c
223
domain = irq_domain_create_legacy(NULL, nr_irqs, irq_base, 0, &irq_domain_simple_ops, NULL);
arch/arm/mach-omap1/irq.c
251
d = irq_get_irq_data(irq_find_mapping(domain, omap_l2_irq));
arch/arm/mach-omap1/irq.c
67
static struct irq_domain *domain;
arch/arm/mach-omap2/cm2xxx_3xxx.h
75
static inline u32 omap2_cm_read_mod_bits_shift(s16 domain, s16 idx, u32 mask)
arch/arm/mach-omap2/cm2xxx_3xxx.h
79
v = omap2_cm_read_mod_reg(domain, idx);
arch/arm/mach-omap2/omap-wakeupgen.c
509
static int wakeupgen_domain_alloc(struct irq_domain *domain,
arch/arm/mach-omap2/omap-wakeupgen.c
528
irq_domain_set_hwirq_and_chip(domain, virq + i, hwirq + i,
arch/arm/mach-omap2/omap-wakeupgen.c
532
parent_fwspec.fwnode = domain->parent->fwnode;
arch/arm/mach-omap2/omap-wakeupgen.c
533
return irq_domain_alloc_irqs_parent(domain, virq, nr_irqs,
arch/arm/mach-omap2/omap-wakeupgen.c
549
struct irq_domain *parent_domain, *domain;
arch/arm/mach-omap2/omap-wakeupgen.c
588
domain = irq_domain_create_hierarchy(parent_domain, 0, max_irqs, of_fwnode_handle(node),
arch/arm/mach-omap2/omap-wakeupgen.c
590
if (!domain) {
arch/arm/mach-omap2/prm2xxx_3xxx.h
78
static inline u32 omap2_prm_read_mod_bits_shift(s16 domain, s16 idx, u32 mask)
arch/arm/mach-omap2/prm2xxx_3xxx.h
82
v = omap2_prm_read_mod_reg(domain, idx);
arch/arm/mach-s3c/pm-s3c64xx.c
41
static int s3c64xx_pd_off(struct generic_pm_domain *domain)
arch/arm/mach-s3c/pm-s3c64xx.c
46
pd = container_of(domain, struct s3c64xx_pm_domain, pd);
arch/arm/mach-s3c/pm-s3c64xx.c
55
static int s3c64xx_pd_on(struct generic_pm_domain *domain)
arch/arm/mach-s3c/pm-s3c64xx.c
61
pd = container_of(domain, struct s3c64xx_pm_domain, pd);
arch/arm/mm/dma-mapping.c
1011
iommu_unmap(mapping->domain, iova, size);
arch/arm/mm/dma-mapping.c
1210
ret = iommu_map(mapping->domain, iova, phys, len, prot,
arch/arm/mm/dma-mapping.c
1221
iommu_unmap(mapping->domain, iova_base, count * PAGE_SIZE);
arch/arm/mm/dma-mapping.c
1384
ret = iommu_map(mapping->domain, dma_addr, addr, len, prot, GFP_KERNEL);
arch/arm/mm/dma-mapping.c
1417
phys_addr_t phys = iommu_iova_to_phys(mapping->domain, iova);
arch/arm/mm/dma-mapping.c
1422
iommu_unmap(mapping->domain, iova, len);
arch/arm/mm/dma-mapping.c
1437
phys = iommu_iova_to_phys(mapping->domain, iova);
arch/arm/mm/dma-mapping.c
1452
phys = iommu_iova_to_phys(mapping->domain, iova);
arch/arm/mm/dma-mapping.c
1528
mapping->domain = iommu_paging_domain_alloc(dev);
arch/arm/mm/dma-mapping.c
1529
if (IS_ERR(mapping->domain)) {
arch/arm/mm/dma-mapping.c
1530
err = PTR_ERR(mapping->domain);
arch/arm/mm/dma-mapping.c
1553
iommu_domain_free(mapping->domain);
arch/arm/mm/dma-mapping.c
1590
err = iommu_attach_device(mapping->domain, dev);
arch/arm/mm/dma-mapping.c
1645
iommu_detach_device(mapping->domain, dev);
arch/arm/mm/dma-mapping.c
985
ret = iommu_map(mapping->domain, iova, phys, len,
arch/arm/mm/dma-mapping.c
995
iommu_unmap(mapping->domain, dma_addr, iova-dma_addr);
arch/arm/mm/dump.c
262
unsigned int level, u64 val, const char *domain)
arch/arm/mm/dump.c
270
st->current_domain = domain;
arch/arm/mm/dump.c
273
domain != st->current_domain ||
arch/arm/mm/dump.c
305
st->current_domain = domain;
arch/arm/mm/dump.c
311
const char *domain)
arch/arm/mm/dump.c
319
note_page(st, addr, 5, pte_val(*pte), domain);
arch/arm/mm/dump.c
347
const char *domain;
arch/arm/mm/dump.c
351
domain = get_domain_name(pmd);
arch/arm/mm/dump.c
353
note_page(st, addr, 4, pmd_val(*pmd), domain);
arch/arm/mm/dump.c
355
walk_pte(st, pmd, addr, domain);
arch/arm/mm/dump.c
360
domain = get_domain_name(pmd);
arch/arm/mm/dump.c
361
note_page(st, addr, 4, pmd_val(*pmd), domain);
arch/arm/mm/mm.h
43
unsigned int domain;
arch/arm/mm/mmu.c
238
.domain = DOMAIN_IO,
arch/arm/mm/mmu.c
244
.domain = DOMAIN_IO,
arch/arm/mm/mmu.c
250
.domain = DOMAIN_IO,
arch/arm/mm/mmu.c
256
.domain = DOMAIN_IO,
arch/arm/mm/mmu.c
262
.domain = DOMAIN_IO,
arch/arm/mm/mmu.c
266
.domain = DOMAIN_KERNEL,
arch/arm/mm/mmu.c
271
.domain = DOMAIN_KERNEL,
arch/arm/mm/mmu.c
278
.domain = DOMAIN_VECTORS,
arch/arm/mm/mmu.c
284
.domain = DOMAIN_VECTORS,
arch/arm/mm/mmu.c
290
.domain = DOMAIN_KERNEL,
arch/arm/mm/mmu.c
297
.domain = DOMAIN_KERNEL,
arch/arm/mm/mmu.c
308
.domain = DOMAIN_KERNEL,
arch/arm/mm/mmu.c
312
.domain = DOMAIN_KERNEL,
arch/arm/mm/mmu.c
319
.domain = DOMAIN_KERNEL,
arch/arm/mm/mmu.c
326
.domain = DOMAIN_KERNEL,
arch/arm/mm/mmu.c
331
.domain = DOMAIN_KERNEL,
arch/arm/mm/mmu.c
339
.domain = DOMAIN_KERNEL,
arch/arm/mm/mmu.c
345
.domain = DOMAIN_KERNEL,
arch/arm/mm/mmu.c
706
t->prot_l1 |= PMD_DOMAIN(t->domain);
arch/arm/mm/mmu.c
708
t->prot_sect |= PMD_DOMAIN(t->domain);
arch/arm/mm/mmu.c
890
if (type->domain) {
arch/arm/plat-orion/gpio.c
230
return irq_create_mapping(ochip->domain,
arch/arm/plat-orion/gpio.c
49
struct irq_domain *domain;
arch/arm/plat-orion/gpio.c
605
ochip->domain = irq_domain_create_legacy(NULL,
arch/arm/plat-orion/gpio.c
611
if (!ochip->domain)
arch/arm64/include/asm/assembler.h
381
.macro dcache_by_myline_op op, domain, start, end, linesz, tmp, fixup
arch/arm64/include/asm/assembler.h
431
.macro dcache_by_line_op op, domain, start, end, tmp1, tmp2, fixup
arch/arm64/kernel/pci.c
14
int raw_pci_read(unsigned int domain, unsigned int bus,
arch/arm64/kernel/pci.c
17
struct pci_bus *b = pci_find_bus(domain, bus);
arch/arm64/kernel/pci.c
24
int raw_pci_write(unsigned int domain, unsigned int bus,
arch/arm64/kernel/pci.c
27
struct pci_bus *b = pci_find_bus(domain, bus);
arch/arm64/kvm/arch_timer.c
1294
static int timer_irq_domain_alloc(struct irq_domain *domain, unsigned int virq,
arch/arm64/kvm/arch_timer.c
1299
return irq_domain_set_hwirq_and_chip(domain, virq, hwirq,
arch/arm64/kvm/arch_timer.c
1303
static void timer_irq_domain_free(struct irq_domain *domain, unsigned int virq,
arch/arm64/kvm/arch_timer.c
1325
struct irq_domain *domain = NULL;
arch/arm64/kvm/arch_timer.c
1346
domain = irq_domain_create_hierarchy(data->domain, 0,
arch/arm64/kvm/arch_timer.c
1349
if (!domain) {
arch/arm64/kvm/arch_timer.c
1355
WARN_ON(irq_domain_push_irq(domain, host_vtimer_irq,
arch/arm64/kvm/arch_timer.c
1363
if (domain)
arch/arm64/kvm/arch_timer.c
1364
WARN_ON(irq_domain_push_irq(domain, host_ptimer_irq,
arch/loongarch/pci/acpi.c
199
int domain = root->segment;
arch/loongarch/pci/acpi.c
204
pr_warn("pci_bus %04x:%02x: ignored (out of memory)\n", domain, busnum);
arch/loongarch/pci/acpi.c
225
bus = pci_find_bus(domain, busnum);
arch/loongarch/pci/pci.c
24
int raw_pci_read(unsigned int domain, unsigned int bus, unsigned int devfn,
arch/loongarch/pci/pci.c
27
struct pci_bus *bus_tmp = pci_find_bus(domain, bus);
arch/loongarch/pci/pci.c
34
int raw_pci_write(unsigned int domain, unsigned int bus, unsigned int devfn,
arch/loongarch/pci/pci.c
37
struct pci_bus *bus_tmp = pci_find_bus(domain, bus);
arch/mips/ath25/ar2315.c
147
struct irq_domain *domain;
arch/mips/ath25/ar2315.c
152
domain = irq_domain_create_linear(NULL, AR2315_MISC_IRQ_COUNT,
arch/mips/ath25/ar2315.c
154
if (!domain)
arch/mips/ath25/ar2315.c
157
irq = irq_create_mapping(domain, AR2315_MISC_IRQ_AHB);
arch/mips/ath25/ar2315.c
163
ar2315_misc_irq_handler, domain);
arch/mips/ath25/ar2315.c
165
ar2315_misc_irq_domain = domain;
arch/mips/ath25/ar2315.c
76
struct irq_domain *domain = irq_desc_get_handler_data(desc);
arch/mips/ath25/ar2315.c
85
ret = generic_handle_domain_irq(domain, nr);
arch/mips/ath25/ar5312.c
141
struct irq_domain *domain;
arch/mips/ath25/ar5312.c
146
domain = irq_domain_create_linear(NULL, AR5312_MISC_IRQ_COUNT,
arch/mips/ath25/ar5312.c
148
if (!domain)
arch/mips/ath25/ar5312.c
151
irq = irq_create_mapping(domain, AR5312_MISC_IRQ_AHB_PROC);
arch/mips/ath25/ar5312.c
157
ar5312_misc_irq_handler, domain);
arch/mips/ath25/ar5312.c
159
ar5312_misc_irq_domain = domain;
arch/mips/ath25/ar5312.c
80
struct irq_domain *domain = irq_desc_get_handler_data(desc);
arch/mips/ath25/ar5312.c
84
ret = generic_handle_domain_irq(domain, nr);
arch/mips/cavium-octeon/octeon-irq.c
127
static int octeon_irq_force_ciu_mapping(struct irq_domain *domain,
arch/mips/cavium-octeon/octeon-irq.c
133
of_node = irq_domain_get_of_node(domain);
arch/mips/cavium-octeon/octeon-irq.c
140
return irq_domain_associate(domain, irq, line << 6 | bit);
arch/mips/cavium-octeon/octeon-irq.c
2610
struct irq_domain *domain;
arch/mips/cavium-octeon/octeon-irq.c
2615
domain = ciu3_info->domain[block];
arch/mips/cavium-octeon/octeon-irq.c
2617
hw = ciu3_info->intsn2hw[block](domain, intsn);
arch/mips/cavium-octeon/octeon-irq.c
2622
ret = generic_handle_domain_irq(domain, hw);
arch/mips/cavium-octeon/octeon-irq.c
2878
struct irq_domain *domain;
arch/mips/cavium-octeon/octeon-irq.c
2922
domain = irq_domain_create_tree(of_fwnode_handle(ciu_node), &octeon_dflt_domain_ciu3_ops,
arch/mips/cavium-octeon/octeon-irq.c
2925
ciu3_info->domain[i] = domain;
arch/mips/cavium-octeon/octeon-irq.c
2933
irq_set_default_domain(domain);
arch/mips/cavium-octeon/octeon-irq.c
3001
return ciu3_info->domain[block];
arch/mips/cavium-octeon/octeon-irq.c
46
struct irq_domain *domain[MAX_CIU3_DOMAINS];
arch/mips/include/asm/irq.h
60
extern void do_domain_IRQ(struct irq_domain *domain, unsigned int irq);
arch/mips/include/asm/pci/bridge.h
808
struct irq_domain *domain;
arch/mips/kernel/irq.c
112
void __irq_entry do_domain_IRQ(struct irq_domain *domain, unsigned int hwirq)
arch/mips/kernel/irq.c
116
generic_handle_domain_irq(domain, hwirq);
arch/mips/pci/pci-ar2315.c
161
struct irq_domain *domain;
arch/mips/pci/pci-ar2315.c
343
ret = generic_handle_domain_irq(apc->domain, __ffs(pending));
arch/mips/pci/pci-ar2315.c
397
apc->irq_ext = irq_create_mapping(apc->domain, AR2315_PCI_IRQ_EXT);
arch/mips/pci/pci-ar2315.c
472
apc->domain = irq_domain_create_linear(NULL, AR2315_PCI_IRQ_COUNT,
arch/mips/pci/pci-ar2315.c
474
if (!apc->domain) {
arch/mips/pci/pci-xtalk-bridge.c
334
static int bridge_domain_alloc(struct irq_domain *domain, unsigned int virq,
arch/mips/pci/pci-xtalk-bridge.c
348
ret = irq_domain_alloc_irqs_parent(domain, virq, nr_irqs, arg);
arch/mips/pci/pci-xtalk-bridge.c
352
irq_domain_set_info(domain, virq, info->pin, &bridge_irq_chip,
arch/mips/pci/pci-xtalk-bridge.c
361
static void bridge_domain_free(struct irq_domain *domain, unsigned int virq,
arch/mips/pci/pci-xtalk-bridge.c
364
struct irq_data *irqd = irq_domain_get_irq_data(domain, virq);
arch/mips/pci/pci-xtalk-bridge.c
370
irq_domain_free_irqs_top(domain, virq, nr_irqs);
arch/mips/pci/pci-xtalk-bridge.c
373
static int bridge_domain_activate(struct irq_domain *domain,
arch/mips/pci/pci-xtalk-bridge.c
410
static void bridge_domain_deactivate(struct irq_domain *domain,
arch/mips/pci/pci-xtalk-bridge.c
458
irq = irq_domain_alloc_irqs(bc->domain, 1, bc->nasid, &info);
arch/mips/pci/pci-xtalk-bridge.c
613
struct irq_domain *domain, *parent;
arch/mips/pci/pci-xtalk-bridge.c
629
domain = irq_domain_create_hierarchy(parent, 0, 8, fn,
arch/mips/pci/pci-xtalk-bridge.c
631
if (!domain) {
arch/mips/pci/pci-xtalk-bridge.c
651
bc->domain = domain;
arch/mips/pci/pci-xtalk-bridge.c
731
irq_domain_remove(domain);
arch/mips/pci/pci-xtalk-bridge.c
740
struct fwnode_handle *fn = bc->domain->fwnode;
arch/mips/pci/pci-xtalk-bridge.c
742
irq_domain_remove(bc->domain);
arch/mips/ralink/irq.c
103
struct irq_domain *domain = irq_desc_get_handler_data(desc);
arch/mips/ralink/irq.c
104
generic_handle_domain_irq(domain, __ffs(pending));
arch/mips/ralink/irq.c
151
struct irq_domain *domain;
arch/mips/ralink/irq.c
180
domain = irq_domain_create_legacy(of_fwnode_handle(node), RALINK_INTC_IRQ_COUNT,
arch/mips/ralink/irq.c
182
if (!domain)
arch/mips/ralink/irq.c
187
irq_set_chained_handler_and_data(irq, ralink_intc_irq_handler, domain);
arch/mips/ralink/irq.c
190
rt_perfcount_irq = irq_create_mapping(domain, 9);
arch/mips/sgi-ip27/ip27-irq.c
120
static int hub_domain_alloc(struct irq_domain *domain, unsigned int virq,
arch/mips/sgi-ip27/ip27-irq.c
141
irq_domain_set_info(domain, virq, swlevel, &hub_irq_type, hd,
arch/mips/sgi-ip27/ip27-irq.c
159
static void hub_domain_free(struct irq_domain *domain,
arch/mips/sgi-ip27/ip27-irq.c
167
irqd = irq_domain_get_irq_data(domain, virq);
arch/mips/sgi-ip27/ip27-irq.c
193
struct irq_domain *domain;
arch/mips/sgi-ip27/ip27-irq.c
220
domain = irq_desc_get_handler_data(desc);
arch/mips/sgi-ip27/ip27-irq.c
221
ret = generic_handle_domain_irq(domain, __ffs(pend0));
arch/mips/sgi-ip27/ip27-irq.c
233
struct irq_domain *domain;
arch/mips/sgi-ip27/ip27-irq.c
244
domain = irq_desc_get_handler_data(desc);
arch/mips/sgi-ip27/ip27-irq.c
245
ret = generic_handle_domain_irq(domain, __ffs(pend1) + 64);
arch/mips/sgi-ip27/ip27-irq.c
278
struct irq_domain *domain;
arch/mips/sgi-ip27/ip27-irq.c
295
domain = irq_domain_create_linear(fn, IP27_HUB_IRQ_COUNT,
arch/mips/sgi-ip27/ip27-irq.c
297
if (WARN_ON(domain == NULL))
arch/mips/sgi-ip27/ip27-irq.c
300
irq_set_default_domain(domain);
arch/mips/sgi-ip27/ip27-irq.c
304
domain);
arch/mips/sgi-ip27/ip27-irq.c
307
domain);
arch/mips/sgi-ip30/ip30-irq.c
100
struct irq_domain *domain;
arch/mips/sgi-ip30/ip30-irq.c
132
domain = irq_desc_get_handler_data(desc);
arch/mips/sgi-ip30/ip30-irq.c
133
ret = generic_handle_domain_irq(domain, __ffs(pend));
arch/mips/sgi-ip30/ip30-irq.c
202
static int heart_domain_alloc(struct irq_domain *domain, unsigned int virq,
arch/mips/sgi-ip30/ip30-irq.c
221
irq_domain_set_info(domain, virq, hwirq, &heart_irq_chip, hd,
arch/mips/sgi-ip30/ip30-irq.c
227
static void heart_domain_free(struct irq_domain *domain,
arch/mips/sgi-ip30/ip30-irq.c
235
irqd = irq_domain_get_irq_data(domain, virq);
arch/mips/sgi-ip30/ip30-irq.c
264
struct irq_domain *domain;
arch/mips/sgi-ip30/ip30-irq.c
310
domain = irq_domain_create_linear(fn, HEART_NUM_IRQS,
arch/mips/sgi-ip30/ip30-irq.c
312
WARN_ON(domain == NULL);
arch/mips/sgi-ip30/ip30-irq.c
313
if (!domain)
arch/mips/sgi-ip30/ip30-irq.c
316
irq_set_default_domain(domain);
arch/mips/sgi-ip30/ip30-irq.c
320
domain);
arch/mips/sgi-ip30/ip30-irq.c
323
domain);
arch/mips/sgi-ip30/ip30-irq.c
326
domain);
arch/mips/sgi-ip30/ip30-irq.c
329
domain);
arch/nios2/kernel/irq.c
63
struct irq_domain *domain;
arch/nios2/kernel/irq.c
72
domain = irq_domain_create_linear(of_fwnode_handle(node),
arch/nios2/kernel/irq.c
74
BUG_ON(!domain);
arch/nios2/kernel/irq.c
76
irq_set_default_domain(domain);
arch/powerpc/include/asm/fsl_pamu_stash.h
19
int fsl_pamu_configure_l1_stash(struct iommu_domain *domain, u32 cpu);
arch/powerpc/include/asm/imc-pmu.h
127
int domain;
arch/powerpc/kernel/eeh.c
1679
uint32_t domain, bus, dev, fn;
arch/powerpc/kernel/eeh.c
1689
ret = sscanf(buf, "%x:%x:%x.%x", &domain, &bus, &dev, &fn);
arch/powerpc/kernel/eeh.c
1695
pdev = pci_get_domain_bus_and_slot(domain, bus, (dev << 3) | fn);
arch/powerpc/kernel/iommu.c
1162
struct iommu_domain *domain = iommu_driver_get_domain_for_dev(dev);
arch/powerpc/kernel/iommu.c
1167
if (!domain)
arch/powerpc/perf/hv-24x7-catalog.h
37
__u8 domain; /* Chip = 1, Core = 2 */
arch/powerpc/perf/hv-24x7.c
110
static bool domain_needs_aggregation(unsigned int domain)
arch/powerpc/perf/hv-24x7.c
113
(domain == HV_PERF_DOMAIN_PHYS_CORE ||
arch/powerpc/perf/hv-24x7.c
114
(domain >= HV_PERF_DOMAIN_VCPU_HOME_CORE &&
arch/powerpc/perf/hv-24x7.c
115
domain <= HV_PERF_DOMAIN_VCPU_REMOTE_NODE));
arch/powerpc/perf/hv-24x7.c
118
static const char *domain_name(unsigned int domain)
arch/powerpc/perf/hv-24x7.c
120
if (!domain_is_valid(domain))
arch/powerpc/perf/hv-24x7.c
123
switch (domain) {
arch/powerpc/perf/hv-24x7.c
132
WARN_ON_ONCE(domain);
arch/powerpc/perf/hv-24x7.c
136
static bool catalog_entry_domain_is_valid(unsigned int domain)
arch/powerpc/perf/hv-24x7.c
1376
unsigned int domain;
arch/powerpc/perf/hv-24x7.c
140
return is_physical_domain(domain);
arch/powerpc/perf/hv-24x7.c
1408
domain = event_get_domain(event);
arch/powerpc/perf/hv-24x7.c
1409
if (domain == 0 || domain >= HV_PERF_DOMAIN_MAX) {
arch/powerpc/perf/hv-24x7.c
1410
pr_devel("invalid domain %d\n", domain);
arch/powerpc/perf/hv-24x7.c
142
return domain_is_valid(domain);
arch/powerpc/perf/hv-24x7.c
1421
if (!caps.collect_privileged && (is_physical_domain(domain) ||
arch/powerpc/perf/hv-24x7.c
1424
is_physical_domain(domain),
arch/powerpc/perf/hv-24x7.c
170
EVENT_DEFINE_RANGE_FORMAT(domain, config, 0, 3);
arch/powerpc/perf/hv-24x7.c
37
static bool domain_is_valid(unsigned int domain)
arch/powerpc/perf/hv-24x7.c
388
static char *event_fmt(struct hv_24x7_event_data *event, unsigned int domain)
arch/powerpc/perf/hv-24x7.c
39
switch (domain) {
arch/powerpc/perf/hv-24x7.c
395
switch (domain) {
arch/powerpc/perf/hv-24x7.c
397
snprintf(buf, sizeof(buf), "%d", domain);
arch/powerpc/perf/hv-24x7.c
51
static bool is_physical_domain(unsigned int domain)
arch/powerpc/perf/hv-24x7.c
511
unsigned int domain,
arch/powerpc/perf/hv-24x7.c
518
if (!domain_is_valid(domain)) {
arch/powerpc/perf/hv-24x7.c
520
ix, domain);
arch/powerpc/perf/hv-24x7.c
524
val = event_fmt(event, domain);
arch/powerpc/perf/hv-24x7.c
53
switch (domain) {
arch/powerpc/perf/hv-24x7.c
582
*attrs = event_to_attr(ix, event, event->domain, nonce);
arch/powerpc/perf/hv-24x7.c
595
unsigned int domain;
arch/powerpc/perf/hv-24x7.c
623
unsigned int domain)
arch/powerpc/perf/hv-24x7.c
634
result = ev_uniq_ord(name, nl, domain, it->name, it->nl,
arch/powerpc/perf/hv-24x7.c
635
it->domain);
arch/powerpc/perf/hv-24x7.c
658
.domain = domain,
arch/powerpc/perf/hv-24x7.c
893
if (!catalog_entry_domain_is_valid(event->domain)) {
arch/powerpc/perf/hv-24x7.c
895
event_idx, nl, name, event->domain);
arch/powerpc/perf/hv-24x7.c
941
if (!catalog_entry_domain_is_valid(event->domain))
arch/powerpc/perf/hv-24x7.c
948
nonce = event_uniq_add(&ev_uniq, name, nl, event->domain);
arch/powerpc/perf/imc-pmu.c
109
switch(imc_pmu->domain){
arch/powerpc/perf/imc-pmu.c
1492
switch (pmu->domain) {
arch/powerpc/perf/imc-pmu.c
1637
if (pmu_ptr->domain == IMC_DOMAIN_NEST) {
arch/powerpc/perf/imc-pmu.c
1652
if (pmu_ptr->domain == IMC_DOMAIN_CORE) {
arch/powerpc/perf/imc-pmu.c
1658
if (pmu_ptr->domain == IMC_DOMAIN_THREAD) {
arch/powerpc/perf/imc-pmu.c
1663
if (pmu_ptr->domain == IMC_DOMAIN_TRACE) {
arch/powerpc/perf/imc-pmu.c
1692
switch (pmu_ptr->domain) {
arch/powerpc/perf/imc-pmu.c
1793
switch (pmu_ptr->domain) {
arch/powerpc/platforms/powernv/ocxl.c
144
if (link->domain == pci_domain_nr(dev->bus) &&
arch/powerpc/platforms/powernv/ocxl.c
155
link->domain = pci_domain_nr(dev->bus);
arch/powerpc/platforms/powernv/ocxl.c
242
link->domain, link->bus, link->dev, i,
arch/powerpc/platforms/powernv/ocxl.c
27
int domain;
arch/powerpc/platforms/powernv/opal-imc.c
137
static struct imc_pmu *imc_pmu_create(struct device_node *parent, int pmu_index, int domain)
arch/powerpc/platforms/powernv/opal-imc.c
144
if (domain < 0)
arch/powerpc/platforms/powernv/opal-imc.c
153
pmu_ptr->domain = domain;
arch/powerpc/platforms/powernv/opal-imc.c
169
if (pmu_ptr->domain == IMC_DOMAIN_NEST)
arch/powerpc/platforms/powernv/opal-imc.c
237
int pmu_count = 0, domain;
arch/powerpc/platforms/powernv/opal-imc.c
260
domain = IMC_DOMAIN_NEST;
arch/powerpc/platforms/powernv/opal-imc.c
263
domain =IMC_DOMAIN_CORE;
arch/powerpc/platforms/powernv/opal-imc.c
266
domain = IMC_DOMAIN_THREAD;
arch/powerpc/platforms/powernv/opal-imc.c
269
domain = IMC_DOMAIN_TRACE;
arch/powerpc/platforms/powernv/opal-imc.c
273
domain = -1;
arch/powerpc/platforms/powernv/opal-imc.c
277
pmu = imc_pmu_create(imc_dev, pmu_count, domain);
arch/powerpc/platforms/powernv/opal-imc.c
279
if (domain == IMC_DOMAIN_NEST) {
arch/powerpc/platforms/powernv/opal-imc.c
284
if (domain == IMC_DOMAIN_CORE)
arch/powerpc/platforms/powernv/opal-imc.c
286
if (domain == IMC_DOMAIN_THREAD)
arch/powerpc/platforms/powernv/opal-irqchip.c
194
opal_event_irqchip.domain = irq_domain_create_linear(of_fwnode_handle(dn),
arch/powerpc/platforms/powernv/opal-irqchip.c
198
if (!opal_event_irqchip.domain) {
arch/powerpc/platforms/powernv/opal-irqchip.c
309
if (WARN_ON_ONCE(!opal_event_irqchip.domain))
arch/powerpc/platforms/powernv/opal-irqchip.c
312
return irq_create_mapping(opal_event_irqchip.domain, opal_event_nr);
arch/powerpc/platforms/powernv/opal-irqchip.c
33
struct irq_domain *domain;
arch/powerpc/platforms/powernv/opal-irqchip.c
56
generic_handle_domain_irq(opal_event_irqchip.domain, hwirq);
arch/powerpc/platforms/powernv/pci-ioda.c
1717
static bool pnv_init_dev_msi_info(struct device *dev, struct irq_domain *domain,
arch/powerpc/platforms/powernv/pci-ioda.c
1722
if (!msi_lib_init_dev_msi_info(dev, domain, real_parent, info))
arch/powerpc/platforms/powernv/pci-ioda.c
1792
static int pnv_irq_parent_domain_alloc(struct irq_domain *domain,
arch/powerpc/platforms/powernv/pci-ioda.c
1798
parent_fwspec.fwnode = domain->parent->fwnode;
arch/powerpc/platforms/powernv/pci-ioda.c
1803
ret = irq_domain_alloc_irqs_parent(domain, virq, 1, &parent_fwspec);
arch/powerpc/platforms/powernv/pci-ioda.c
1810
static int pnv_irq_domain_alloc(struct irq_domain *domain, unsigned int virq,
arch/powerpc/platforms/powernv/pci-ioda.c
1813
struct pci_controller *hose = domain->host_data;
arch/powerpc/platforms/powernv/pci-ioda.c
1830
ret = pnv_irq_parent_domain_alloc(domain, virq + i,
arch/powerpc/platforms/powernv/pci-ioda.c
1835
irq_domain_set_hwirq_and_chip(domain, virq + i, hwirq + i,
arch/powerpc/platforms/powernv/pci-ioda.c
1842
irq_domain_free_irqs_parent(domain, virq, i);
arch/powerpc/platforms/powernv/pci-ioda.c
1847
static void pnv_irq_domain_free(struct irq_domain *domain, unsigned int virq,
arch/powerpc/platforms/powernv/pci-ioda.c
1850
struct irq_data *d = irq_domain_get_irq_data(domain, virq);
arch/powerpc/platforms/powernv/pci-ioda.c
1858
irq_domain_free_irqs_parent(domain, virq, nr_irqs);
arch/powerpc/platforms/pseries/msi.c
435
static int pseries_msi_ops_prepare(struct irq_domain *domain, struct device *dev,
arch/powerpc/platforms/pseries/msi.c
438
struct msi_domain_info *info = domain->host_data;
arch/powerpc/platforms/pseries/msi.c
469
static void pseries_msi_ops_teardown(struct irq_domain *domain, msi_alloc_info_t *arg)
arch/powerpc/platforms/pseries/msi.c
472
struct pci_dev *pdev = to_pci_dev(domain->dev);
arch/powerpc/platforms/pseries/msi.c
501
static bool pseries_init_dev_msi_info(struct device *dev, struct irq_domain *domain,
arch/powerpc/platforms/pseries/msi.c
506
if (!msi_lib_init_dev_msi_info(dev, domain, real_parent, info))
arch/powerpc/platforms/pseries/msi.c
556
static int pseries_irq_parent_domain_alloc(struct irq_domain *domain, unsigned int virq,
arch/powerpc/platforms/pseries/msi.c
562
parent_fwspec.fwnode = domain->parent->fwnode;
arch/powerpc/platforms/pseries/msi.c
567
ret = irq_domain_alloc_irqs_parent(domain, virq, 1, &parent_fwspec);
arch/powerpc/platforms/pseries/msi.c
574
static int pseries_irq_domain_alloc(struct irq_domain *domain, unsigned int virq,
arch/powerpc/platforms/pseries/msi.c
577
struct pci_controller *phb = domain->host_data;
arch/powerpc/platforms/pseries/msi.c
600
ret = pseries_irq_parent_domain_alloc(domain, virq + i, hwirq + i);
arch/powerpc/platforms/pseries/msi.c
604
irq_domain_set_hwirq_and_chip(domain, virq + i, hwirq + i,
arch/powerpc/platforms/pseries/msi.c
613
irq_domain_free_irqs_parent(domain, virq, i);
arch/powerpc/platforms/pseries/msi.c
617
static void pseries_irq_domain_free(struct irq_domain *domain, unsigned int virq,
arch/powerpc/platforms/pseries/msi.c
620
struct irq_data *d = irq_domain_get_irq_data(domain, virq);
arch/powerpc/platforms/pseries/msi.c
622
struct pci_controller *phb = domain->host_data;
arch/powerpc/platforms/pseries/msi.c
626
irq_domain_free_irqs_parent(domain, virq, nr_irqs);
arch/powerpc/platforms/pseries/vas.c
263
u64 *domain, u8 wintype)
arch/powerpc/platforms/pseries/vas.c
267
rc = h_allocate_vas_window(txwin, domain, wintype, DEF_WIN_CREDS);
arch/powerpc/platforms/pseries/vas.c
321
long domain[PLPAR_HCALL9_BUFSIZE] = {VAS_DEFAULT_DOMAIN_ID};
arch/powerpc/platforms/pseries/vas.c
382
rc = plpar_hcall9(H_HOME_NODE_ASSOCIATIVITY, domain,
arch/powerpc/platforms/pseries/vas.c
411
rc = allocate_setup_window(txwin, (u64 *)&domain[0],
arch/powerpc/platforms/pseries/vas.c
654
long domain[PLPAR_HCALL9_BUFSIZE] = {VAS_DEFAULT_DOMAIN_ID};
arch/powerpc/platforms/pseries/vas.c
721
rc = allocate_setup_window(win, (u64 *)&domain[0],
arch/powerpc/platforms/pseries/vas.c
73
static int h_allocate_vas_window(struct pseries_vas_window *win, u64 *domain,
arch/powerpc/platforms/pseries/vas.c
81
credits, domain[0], domain[1], domain[2],
arch/powerpc/platforms/pseries/vas.c
82
domain[3], domain[4], domain[5]);
arch/powerpc/platforms/pseries/vas.h
117
__be64 domain[6];
arch/powerpc/platforms/pseries/vas.h
127
u64 domain[6]; /* Associativity domain Ids */
arch/powerpc/sysdev/fsl_msi.c
71
struct fsl_msi *msi_data = irqd->domain->host_data;
arch/powerpc/sysdev/xics/xics-common.c
326
static int xics_host_map(struct irq_domain *domain, unsigned int virq,
arch/powerpc/sysdev/xics/xics-common.c
352
irq_domain_set_info(domain, virq, hwirq, xics_ics->chip,
arch/powerpc/sysdev/xics/xics-common.c
422
static int xics_host_domain_alloc(struct irq_domain *domain, unsigned int virq,
arch/powerpc/sysdev/xics/xics-common.c
430
rc = xics_host_domain_translate(domain, fwspec, &hwirq, &type);
arch/powerpc/sysdev/xics/xics-common.c
437
irq_domain_set_info(domain, virq + i, hwirq + i, xics_ics->chip,
arch/powerpc/sysdev/xics/xics-common.c
443
static void xics_host_domain_free(struct irq_domain *domain,
arch/powerpc/sysdev/xive/common.c
1113
static int xive_ipi_irq_domain_alloc(struct irq_domain *domain, unsigned int virq,
arch/powerpc/sysdev/xive/common.c
1120
irq_domain_set_info(domain, virq + i, info->hwirq + i, &xive_ipi_chip,
arch/powerpc/sysdev/xive/common.c
1121
domain->host_data, handle_percpu_irq,
arch/powerpc/sysdev/xive/common.c
1402
static int xive_irq_domain_alloc(struct irq_domain *domain, unsigned int virq,
arch/powerpc/sysdev/xive/common.c
1411
rc = xive_irq_domain_translate(domain, fwspec, &hwirq, &type);
arch/powerpc/sysdev/xive/common.c
1431
irq_domain_set_hwirq_and_chip(domain, virq + i, hwirq + i, &xive_irq_chip, xd);
arch/powerpc/sysdev/xive/common.c
1438
static void xive_irq_domain_free(struct irq_domain *domain,
arch/powerpc/sysdev/xive/common.c
1571
if (d->domain != xive_irq_domain)
arch/riscv/kernel/acpi.c
319
int raw_pci_read(unsigned int domain, unsigned int bus,
arch/riscv/kernel/acpi.c
322
struct pci_bus *b = pci_find_bus(domain, bus);
arch/riscv/kernel/acpi.c
329
int raw_pci_write(unsigned int domain, unsigned int bus,
arch/riscv/kernel/acpi.c
332
struct pci_bus *b = pci_find_bus(domain, bus);
arch/riscv/kernel/sbi-ipi.c
42
struct irq_domain *domain;
arch/riscv/kernel/sbi-ipi.c
47
domain = irq_find_matching_fwnode(riscv_get_intc_hwnode(),
arch/riscv/kernel/sbi-ipi.c
49
if (!domain) {
arch/riscv/kernel/sbi-ipi.c
54
sbi_ipi_virq = irq_create_mapping(domain, RV_IRQ_SOFT);
arch/riscv/kvm/aia.c
490
struct irq_domain *domain;
arch/riscv/kvm/aia.c
510
domain = irq_find_matching_fwnode(riscv_get_intc_hwnode(),
arch/riscv/kvm/aia.c
512
if (!domain) {
arch/riscv/kvm/aia.c
518
hgei_parent_irq = irq_create_mapping(domain, IRQ_S_GEXT);
arch/s390/include/uapi/asm/pkey.h
125
__u16 domain; /* in: domain or FFFF for any */
arch/s390/include/uapi/asm/pkey.h
137
__u16 domain; /* in: domain or FFFF for any */
arch/s390/include/uapi/asm/pkey.h
150
__u16 domain; /* in: domain or FFFF for any */
arch/s390/include/uapi/asm/pkey.h
175
__u16 domain; /* out: domain number */
arch/s390/include/uapi/asm/pkey.h
201
__u16 domain; /* out: domain number */
arch/s390/include/uapi/asm/pkey.h
338
__u16 domain; /* in/out: domain number */
arch/s390/include/uapi/asm/pkey.h
86
__u16 domain;
arch/s390/include/uapi/asm/zcrypt.h
120
__u16 domain; /* Domain */
arch/s390/pci/pci.c
654
static int __zpci_register_domain(int domain)
arch/s390/pci/pci.c
657
if (test_bit(domain, zpci_domain)) {
arch/s390/pci/pci.c
659
pr_err("Domain %04x is already assigned\n", domain);
arch/s390/pci/pci.c
662
set_bit(domain, zpci_domain);
arch/s390/pci/pci.c
664
return domain;
arch/s390/pci/pci.c
669
int domain;
arch/s390/pci/pci.c
677
domain = find_first_zero_bit(zpci_domain, ZPCI_NR_DEVICES);
arch/s390/pci/pci.c
678
set_bit(domain, zpci_domain);
arch/s390/pci/pci.c
680
return domain;
arch/s390/pci/pci.c
683
int zpci_alloc_domain(int domain)
arch/s390/pci/pci.c
686
if (domain)
arch/s390/pci/pci.c
687
return __zpci_register_domain(domain);
arch/s390/pci/pci.c
694
void zpci_free_domain(int domain)
arch/s390/pci/pci.c
697
clear_bit(domain, zpci_domain);
arch/s390/pci/pci_bus.c
175
int domain;
arch/s390/pci/pci_bus.c
177
domain = zpci_alloc_domain((u16)fr->uid);
arch/s390/pci/pci_bus.c
178
if (domain < 0)
arch/s390/pci/pci_bus.c
179
return domain;
arch/s390/pci/pci_bus.c
181
zbus->domain_nr = domain;
arch/s390/pci/pci_bus.h
45
int zpci_alloc_domain(int domain);
arch/s390/pci/pci_bus.h
46
void zpci_free_domain(int domain);
arch/s390/pci/pci_irq.c
357
static void zpci_msi_teardown(struct irq_domain *domain, msi_alloc_info_t *arg)
arch/s390/pci/pci_irq.c
359
struct zpci_dev *zdev = to_zpci_dev(domain->dev);
arch/s390/pci/pci_irq.c
368
static int zpci_msi_prepare(struct irq_domain *domain,
arch/s390/pci/pci_irq.c
405
static int zpci_msi_domain_alloc(struct irq_domain *domain, unsigned int virq,
arch/s390/pci/pci_irq.c
422
irq_domain_set_info(domain, virq + i, hwirq + i,
arch/s390/pci/pci_irq.c
464
static void zpci_msi_domain_free(struct irq_domain *domain, unsigned int virq,
arch/s390/pci/pci_irq.c
471
d = irq_domain_get_irq_data(domain, virq + i);
arch/s390/pci/pci_irq.c
482
static bool zpci_init_dev_msi_info(struct device *dev, struct irq_domain *domain,
arch/s390/pci/pci_irq.c
486
if (!msi_lib_init_dev_msi_info(dev, domain, real_parent, info))
arch/sh/boards/mach-x3proto/gpio.c
85
static int x3proto_gpio_irq_map(struct irq_domain *domain, unsigned int virq,
arch/um/drivers/virt-pci.c
362
static int um_pci_inner_domain_alloc(struct irq_domain *domain,
arch/um/drivers/virt-pci.c
380
irq_domain_set_info(domain, virq, bit, &um_pci_msi_bottom_irq_chip,
arch/um/drivers/virt-pci.c
381
domain->host_data, handle_simple_irq,
arch/um/drivers/virt-pci.c
387
static void um_pci_inner_domain_free(struct irq_domain *domain,
arch/um/drivers/virt-pci.c
390
struct irq_data *d = irq_domain_get_irq_data(domain, virq);
arch/x86/events/intel/uncore.c
1815
.domain[0].discovery_base = UNCORE_DISCOVERY_MSR,
arch/x86/events/intel/uncore.c
1816
.domain[0].global_init = uncore_mmio_global_init,
arch/x86/events/intel/uncore.c
1822
.domain[0].discovery_base = PACKAGE_UNCORE_DISCOVERY_MSR,
arch/x86/events/intel/uncore.c
1823
.domain[0].global_init = uncore_mmio_global_init,
arch/x86/events/intel/uncore.c
1842
.domain[0].base_is_pci = true,
arch/x86/events/intel/uncore.c
1843
.domain[0].discovery_base = UNCORE_DISCOVERY_TABLE_DEVICE,
arch/x86/events/intel/uncore.c
1844
.domain[0].units_ignore = spr_uncore_units_ignore,
arch/x86/events/intel/uncore.c
1851
.domain[0].base_is_pci = true,
arch/x86/events/intel/uncore.c
1852
.domain[0].discovery_base = UNCORE_DISCOVERY_TABLE_DEVICE,
arch/x86/events/intel/uncore.c
1853
.domain[0].units_ignore = gnr_uncore_units_ignore,
arch/x86/events/intel/uncore.c
1859
.domain[0].base_is_pci = true,
arch/x86/events/intel/uncore.c
1860
.domain[0].discovery_base = DMR_UNCORE_DISCOVERY_TABLE_DEVICE,
arch/x86/events/intel/uncore.c
1861
.domain[0].units_ignore = dmr_uncore_imh_units_ignore,
arch/x86/events/intel/uncore.c
1862
.domain[1].discovery_base = CBB_UNCORE_DISCOVERY_MSR,
arch/x86/events/intel/uncore.c
1863
.domain[1].units_ignore = dmr_uncore_cbb_units_ignore,
arch/x86/events/intel/uncore.c
1864
.domain[1].global_init = uncore_mmio_global_init,
arch/x86/events/intel/uncore.c
1871
.domain[0].base_is_pci = true,
arch/x86/events/intel/uncore.c
1872
.domain[0].discovery_base = PCI_ANY_ID,
arch/x86/events/intel/uncore.c
1873
.domain[1].discovery_base = UNCORE_DISCOVERY_MSR,
arch/x86/events/intel/uncore.c
1944
if (config->domain[i].discovery_base)
arch/x86/events/intel/uncore.h
68
struct uncore_discovery_domain domain[UNCORE_DISCOVERY_DOMAINS];
arch/x86/events/intel/uncore_discovery.c
245
struct uncore_discovery_domain *domain)
arch/x86/events/intel/uncore_discovery.c
249
if (!domain || !domain->units_ignore)
arch/x86/events/intel/uncore_discovery.c
252
for (i = 0; domain->units_ignore[i] != UNCORE_IGNORE_END ; i++) {
arch/x86/events/intel/uncore_discovery.c
253
if (unit->box_type == domain->units_ignore[i])
arch/x86/events/intel/uncore_discovery.c
260
static int __parse_discovery_table(struct uncore_discovery_domain *domain,
arch/x86/events/intel/uncore_discovery.c
289
if (domain->global_init && domain->global_init(global.ctl))
arch/x86/events/intel/uncore_discovery.c
303
if (uncore_ignore_unit(&unit, domain))
arch/x86/events/intel/uncore_discovery.c
314
static int parse_discovery_table(struct uncore_discovery_domain *domain,
arch/x86/events/intel/uncore_discovery.c
336
return __parse_discovery_table(domain, addr, die, parsed);
arch/x86/events/intel/uncore_discovery.c
339
static bool uncore_discovery_pci(struct uncore_discovery_domain *domain)
arch/x86/events/intel/uncore_discovery.c
346
device = domain->discovery_base;
arch/x86/events/intel/uncore_discovery.c
372
parse_discovery_table(domain, dev, die, bar_offset, &parsed);
arch/x86/events/intel/uncore_discovery.c
385
static bool uncore_discovery_msr(struct uncore_discovery_domain *domain)
arch/x86/events/intel/uncore_discovery.c
403
if (rdmsrq_safe_on_cpu(cpu, domain->discovery_base, &base))
arch/x86/events/intel/uncore_discovery.c
409
__parse_discovery_table(domain, base, die, &parsed);
arch/x86/events/intel/uncore_discovery.c
420
struct uncore_discovery_domain *domain;
arch/x86/events/intel/uncore_discovery.c
425
domain = &init->domain[i];
arch/x86/events/intel/uncore_discovery.c
426
if (domain->discovery_base) {
arch/x86/events/intel/uncore_discovery.c
427
if (!domain->base_is_pci)
arch/x86/events/intel/uncore_discovery.c
428
ret |= uncore_discovery_msr(domain);
arch/x86/events/intel/uncore_discovery.c
430
ret |= uncore_discovery_pci(domain);
arch/x86/hyperv/irqdomain.c
304
static bool hv_init_dev_msi_info(struct device *dev, struct irq_domain *domain,
arch/x86/hyperv/irqdomain.c
309
if (!msi_lib_init_dev_msi_info(dev, domain, real_parent, info))
arch/x86/include/asm/irqdomain.h
43
extern int mp_irqdomain_alloc(struct irq_domain *domain, unsigned int virq,
arch/x86/include/asm/irqdomain.h
45
extern void mp_irqdomain_free(struct irq_domain *domain, unsigned int virq,
arch/x86/include/asm/irqdomain.h
47
extern int mp_irqdomain_activate(struct irq_domain *domain,
arch/x86/include/asm/irqdomain.h
49
extern void mp_irqdomain_deactivate(struct irq_domain *domain,
arch/x86/include/asm/irqdomain.h
51
extern int mp_irqdomain_ioapic_idx(struct irq_domain *domain);
arch/x86/include/asm/msi.h
9
int pci_msi_prepare(struct irq_domain *domain, struct device *dev, int nvec,
arch/x86/include/asm/pci.h
15
int domain; /* PCI domain */
arch/x86/include/asm/pci.h
45
return to_pci_sysdata(bus)->domain;
arch/x86/include/asm/pci_x86.h
119
int (*read)(unsigned int domain, unsigned int bus, unsigned int devfn,
arch/x86/include/asm/pci_x86.h
121
int (*write)(unsigned int domain, unsigned int bus, unsigned int devfn,
arch/x86/include/asm/uv/bios.h
185
extern int uv_bios_set_legacy_vga_target(bool decode, int domain, int bus);
arch/x86/kernel/apic/io_apic.c
1052
if (!irq_data || !irq_data->domain)
arch/x86/kernel/apic/io_apic.c
2011
struct irq_domain *domain = mp_ioapic_irqdomain(ioapic);
arch/x86/kernel/apic/io_apic.c
2014
if (domain) {
arch/x86/kernel/apic/io_apic.c
2021
irq = alloc_isa_irq_from_domain(domain, 0, ioapic, pin, &info);
arch/x86/kernel/apic/io_apic.c
2859
int mp_irqdomain_alloc(struct irq_domain *domain, unsigned int virq,
arch/x86/kernel/apic/io_apic.c
2870
irq_data = irq_domain_get_irq_data(domain, virq);
arch/x86/kernel/apic/io_apic.c
2874
ioapic = mp_irqdomain_ioapic_idx(domain);
arch/x86/kernel/apic/io_apic.c
2876
if (irq_resolve_mapping(domain, (irq_hw_number_t)pin))
arch/x86/kernel/apic/io_apic.c
2883
ret = irq_domain_alloc_irqs_parent(domain, virq, nr_irqs, info);
arch/x86/kernel/apic/io_apic.c
2889
irq_data->chip = (domain->parent == x86_vector_domain) ?
arch/x86/kernel/apic/io_apic.c
2912
irq_domain_free_irqs_parent(domain, virq, nr_irqs);
arch/x86/kernel/apic/io_apic.c
2918
void mp_irqdomain_free(struct irq_domain *domain, unsigned int virq,
arch/x86/kernel/apic/io_apic.c
2925
irq_data = irq_domain_get_irq_data(domain, virq);
arch/x86/kernel/apic/io_apic.c
2928
__remove_pin_from_irq(data, mp_irqdomain_ioapic_idx(domain), (int)irq_data->hwirq);
arch/x86/kernel/apic/io_apic.c
2932
irq_domain_free_irqs_top(domain, virq, nr_irqs);
arch/x86/kernel/apic/io_apic.c
2935
int mp_irqdomain_activate(struct irq_domain *domain, struct irq_data *irq_data, bool reserve)
arch/x86/kernel/apic/io_apic.c
2942
void mp_irqdomain_deactivate(struct irq_domain *domain,
arch/x86/kernel/apic/io_apic.c
2946
ioapic_mask_entry(mp_irqdomain_ioapic_idx(domain), (int)irq_data->hwirq);
arch/x86/kernel/apic/io_apic.c
2949
int mp_irqdomain_ioapic_idx(struct irq_domain *domain)
arch/x86/kernel/apic/io_apic.c
2951
return (int)(long)domain->host_data;
arch/x86/kernel/apic/io_apic.c
882
static int alloc_irq_from_domain(struct irq_domain *domain, int ioapic, u32 gsi,
arch/x86/kernel/apic/io_apic.c
909
return __irq_domain_alloc_irqs(domain, irq, 1, ioapic_alloc_attr_node(info),
arch/x86/kernel/apic/io_apic.c
923
static int alloc_isa_irq_from_domain(struct irq_domain *domain, int irq, int ioapic, int pin,
arch/x86/kernel/apic/io_apic.c
942
irq = __irq_domain_alloc_irqs(domain, irq, 1, node, info, true, NULL);
arch/x86/kernel/apic/io_apic.c
944
irq_data = irq_domain_get_irq_data(domain, irq);
arch/x86/kernel/apic/io_apic.c
956
struct irq_domain *domain = mp_ioapic_irqdomain(ioapic);
arch/x86/kernel/apic/io_apic.c
962
if (!domain)
arch/x86/kernel/apic/io_apic.c
983
irq = irq_find_mapping(domain, pin);
arch/x86/kernel/apic/io_apic.c
990
irq = alloc_isa_irq_from_domain(domain, irq,
arch/x86/kernel/apic/io_apic.c
992
else if ((irq = irq_find_mapping(domain, pin)) == 0)
arch/x86/kernel/apic/io_apic.c
993
irq = alloc_irq_from_domain(domain, ioapic, gsi, &tmp);
arch/x86/kernel/apic/msi.c
152
struct irq_domain *domain = dev_get_msi_domain(&dev->dev);
arch/x86/kernel/apic/msi.c
154
if (!domain)
arch/x86/kernel/apic/msi.c
155
domain = dev_get_msi_domain(&dev->bus->dev);
arch/x86/kernel/apic/msi.c
156
if (!domain)
arch/x86/kernel/apic/msi.c
159
return domain == x86_vector_domain;
arch/x86/kernel/apic/msi.c
175
static int x86_msi_prepare(struct irq_domain *domain, struct device *dev,
arch/x86/kernel/apic/msi.c
178
struct msi_domain_info *info = domain->host_data;
arch/x86/kernel/apic/msi.c
205
static bool x86_init_dev_msi_info(struct device *dev, struct irq_domain *domain,
arch/x86/kernel/apic/msi.c
214
if (WARN_ON_ONCE(domain != real_parent))
arch/x86/kernel/apic/msi.c
280
int pci_msi_prepare(struct irq_domain *domain, struct device *dev, int nvec,
arch/x86/kernel/apic/msi.c
323
static int dmar_msi_init(struct irq_domain *domain,
arch/x86/kernel/apic/msi.c
327
irq_domain_set_info(domain, virq, arg->devid, info->chip, NULL,
arch/x86/kernel/apic/msi.c
367
struct irq_domain *domain = dmar_get_irq_domain();
arch/x86/kernel/apic/msi.c
370
if (!domain)
arch/x86/kernel/apic/msi.c
379
return irq_domain_alloc_irqs(domain, 1, node, &info);
arch/x86/kernel/apic/vector.c
498
static void x86_vector_free_irqs(struct irq_domain *domain,
arch/x86/kernel/apic/vector.c
548
static int x86_vector_alloc_irqs(struct irq_domain *domain, unsigned int virq,
arch/x86/kernel/apic/vector.c
568
irqd = irq_domain_get_irq_data(domain, virq + i);
arch/x86/kernel/apic/vector.c
617
x86_vector_free_irqs(domain, virq, i);
arch/x86/kernel/apic/x2apic_uv_x.c
1131
int domain, bus, rc;
arch/x86/kernel/apic/x2apic_uv_x.c
1139
domain = pci_domain_nr(pdev->bus);
arch/x86/kernel/apic/x2apic_uv_x.c
1142
rc = uv_bios_set_legacy_vga_target(decode, domain, bus);
arch/x86/kernel/devicetree.c
209
static int dt_irqdomain_alloc(struct irq_domain *domain, unsigned int virq,
arch/x86/kernel/devicetree.c
226
tmp.devid = mpc_ioapic_id(mp_irqdomain_ioapic_idx(domain));
arch/x86/kernel/devicetree.c
229
return mp_irqdomain_alloc(domain, virq, nr_irqs, &tmp);
arch/x86/kernel/hpet.c
517
static int hpet_msi_init(struct irq_domain *domain,
arch/x86/kernel/hpet.c
521
irq_domain_set_info(domain, virq, arg->hwirq, info->chip, NULL,
arch/x86/kernel/hpet.c
582
static inline int hpet_dev_id(struct irq_domain *domain)
arch/x86/kernel/hpet.c
584
struct msi_domain_info *info = msi_get_domain_info(domain);
arch/x86/kernel/hpet.c
589
static int hpet_assign_irq(struct irq_domain *domain, struct hpet_channel *hc,
arch/x86/kernel/hpet.c
597
info.devid = hpet_dev_id(domain);
arch/x86/kernel/hpet.c
600
return irq_domain_alloc_irqs(domain, 1, NUMA_NO_NODE, &info);
arch/x86/pci/acpi.c
403
seg = info->sd.domain;
arch/x86/pci/acpi.c
435
pci_mmconfig_delete(info->sd.domain,
arch/x86/pci/acpi.c
535
int domain = root->segment;
arch/x86/pci/acpi.c
541
root->segment = domain = 0;
arch/x86/pci/acpi.c
543
if (domain && !pci_domains_supported) {
arch/x86/pci/acpi.c
545
domain, busnum);
arch/x86/pci/acpi.c
549
bus = pci_find_bus(domain, busnum);
arch/x86/pci/acpi.c
556
.domain = domain,
arch/x86/pci/acpi.c
569
domain, busnum);
arch/x86/pci/acpi.c
571
info->sd.domain = domain;
arch/x86/pci/common.c
40
int raw_pci_read(unsigned int domain, unsigned int bus, unsigned int devfn,
arch/x86/pci/common.c
43
if (domain == 0 && reg < 256 && raw_pci_ops)
arch/x86/pci/common.c
44
return raw_pci_ops->read(domain, bus, devfn, reg, len, val);
arch/x86/pci/common.c
46
return raw_pci_ext_ops->read(domain, bus, devfn, reg, len, val);
arch/x86/pci/common.c
50
int raw_pci_write(unsigned int domain, unsigned int bus, unsigned int devfn,
arch/x86/pci/common.c
53
if (domain == 0 && reg < 256 && raw_pci_ops)
arch/x86/pci/common.c
54
return raw_pci_ops->write(domain, bus, devfn, reg, len, val);
arch/x86/pci/common.c
56
return raw_pci_ext_ops->write(domain, bus, devfn, reg, len, val);
arch/x86/pci/intel_mid.c
101
unsigned int domain, busnum;
arch/x86/pci/intel_mid.c
104
domain = pci_domain_nr(bus);
arch/x86/pci/intel_mid.c
110
raw_pci_ext_ops->read(domain, busnum, devfn,
arch/x86/pci/intel_mid.c
133
return raw_pci_ext_ops->write(domain, busnum, devfn, reg, 4,
arch/x86/pci/intel_mid.c
138
return raw_pci_ext_ops->write(domain, busnum, devfn, reg, len, val);
arch/x86/pci/xen.c
411
static int xen_msi_domain_alloc_irqs(struct irq_domain *domain,
arch/x86/pci/xen.c
424
static void xen_msi_domain_free_irqs(struct irq_domain *domain,
arch/x86/platform/uv/bios_uv.c
169
int uv_bios_set_legacy_vga_target(bool decode, int domain, int bus)
arch/x86/platform/uv/bios_uv.c
172
(u64)decode, (u64)domain, (u64)bus, 0, 0);
arch/x86/platform/uv/uv_irq.c
107
static void uv_domain_free(struct irq_domain *domain, unsigned int virq,
arch/x86/platform/uv/uv_irq.c
110
struct irq_data *irq_data = irq_domain_get_irq_data(domain, virq);
arch/x86/platform/uv/uv_irq.c
115
irq_domain_free_irqs_top(domain, virq, nr_irqs);
arch/x86/platform/uv/uv_irq.c
122
static int uv_domain_activate(struct irq_domain *domain,
arch/x86/platform/uv/uv_irq.c
133
static void uv_domain_deactivate(struct irq_domain *domain,
arch/x86/platform/uv/uv_irq.c
185
struct irq_domain *domain = uv_get_irq_domain();
arch/x86/platform/uv/uv_irq.c
187
if (!domain)
arch/x86/platform/uv/uv_irq.c
197
return irq_domain_alloc_irqs(domain, 1,
arch/x86/platform/uv/uv_irq.c
75
static int uv_domain_alloc(struct irq_domain *domain, unsigned int virq,
arch/x86/platform/uv/uv_irq.c
80
struct irq_data *irq_data = irq_domain_get_irq_data(domain, virq);
arch/x86/platform/uv/uv_irq.c
91
ret = irq_domain_alloc_irqs_parent(domain, virq, nr_irqs, arg);
arch/x86/platform/uv/uv_irq.c
98
irq_domain_set_info(domain, virq, virq, &uv_irq_chip, chip_data,
block/kyber-iosched.c
849
#define KYBER_LAT_SHOW_STORE(domain, name) \
block/kyber-iosched.c
855
return sprintf(page, "%llu\n", kqd->latency_targets[domain]); \
block/kyber-iosched.c
869
kqd->latency_targets[domain] = nsec; \
block/kyber-iosched.c
886
#define KYBER_DEBUGFS_DOMAIN_ATTRS(domain, name) \
block/kyber-iosched.c
892
sbitmap_queue_show(&kqd->domain_tokens[domain], m); \
block/kyber-iosched.c
903
return seq_list_start(&khd->rqs[domain], *pos); \
block/kyber-iosched.c
912
return seq_list_next(v, &khd->rqs[domain], pos); \
block/kyber-iosched.c
935
wait_queue_entry_t *wait = &khd->domain_wait[domain].wait; \
drivers/accel/ivpu/vpu_jsm_api.h
1541
u32 domain;
drivers/accel/rocket/rocket_drv.c
111
rocket_iommu_domain_put(rocket_priv->domain);
drivers/accel/rocket/rocket_drv.c
127
rocket_iommu_domain_put(rocket_priv->domain);
drivers/accel/rocket/rocket_drv.c
31
struct rocket_iommu_domain *domain = container_of(kref, struct rocket_iommu_domain, kref);
drivers/accel/rocket/rocket_drv.c
33
iommu_domain_free(domain->domain);
drivers/accel/rocket/rocket_drv.c
34
domain->domain = NULL;
drivers/accel/rocket/rocket_drv.c
35
kfree(domain);
drivers/accel/rocket/rocket_drv.c
41
struct rocket_iommu_domain *domain = kmalloc_obj(*domain);
drivers/accel/rocket/rocket_drv.c
44
if (!domain)
drivers/accel/rocket/rocket_drv.c
47
domain->domain = iommu_paging_domain_alloc(dev);
drivers/accel/rocket/rocket_drv.c
48
if (IS_ERR(domain->domain)) {
drivers/accel/rocket/rocket_drv.c
49
err = ERR_CAST(domain->domain);
drivers/accel/rocket/rocket_drv.c
50
kfree(domain);
drivers/accel/rocket/rocket_drv.c
53
kref_init(&domain->kref);
drivers/accel/rocket/rocket_drv.c
55
return domain;
drivers/accel/rocket/rocket_drv.c
61
kref_get(&rocket_priv->domain->kref);
drivers/accel/rocket/rocket_drv.c
62
return rocket_priv->domain;
drivers/accel/rocket/rocket_drv.c
66
rocket_iommu_domain_put(struct rocket_iommu_domain *domain)
drivers/accel/rocket/rocket_drv.c
68
kref_put(&domain->kref, rocket_iommu_domain_destroy);
drivers/accel/rocket/rocket_drv.c
89
rocket_priv->domain = rocket_iommu_domain_create(rdev->cores[0].dev);
drivers/accel/rocket/rocket_drv.c
90
if (IS_ERR(rocket_priv->domain)) {
drivers/accel/rocket/rocket_drv.c
91
ret = PTR_ERR(rocket_priv->domain);
drivers/accel/rocket/rocket_drv.c
97
start = rocket_priv->domain->domain->geometry.aperture_start;
drivers/accel/rocket/rocket_drv.c
98
end = rocket_priv->domain->domain->geometry.aperture_end;
drivers/accel/rocket/rocket_drv.h
15
struct iommu_domain *domain;
drivers/accel/rocket/rocket_drv.h
22
struct rocket_iommu_domain *domain;
drivers/accel/rocket/rocket_drv.h
30
void rocket_iommu_domain_put(struct rocket_iommu_domain *domain);
drivers/accel/rocket/rocket_gem.c
22
unmapped = iommu_unmap(bo->domain->domain, bo->mm.start, bo->size);
drivers/accel/rocket/rocket_gem.c
29
rocket_iommu_domain_put(bo->domain);
drivers/accel/rocket/rocket_gem.c
30
bo->domain = NULL;
drivers/accel/rocket/rocket_gem.c
78
rkt_obj->domain = rocket_iommu_domain_get(rocket_priv);
drivers/accel/rocket/rocket_gem.c
99
ret = iommu_map_sgtable(rocket_priv->domain->domain,
drivers/accel/rocket/rocket_gem.h
14
struct rocket_iommu_domain *domain;
drivers/accel/rocket/rocket_job.c
236
rocket_iommu_domain_put(job->domain);
drivers/accel/rocket/rocket_job.c
317
ret = iommu_attach_group(job->domain->domain, core->iommu_group);
drivers/accel/rocket/rocket_job.c
577
rjob->domain = rocket_iommu_domain_get(file_priv);
drivers/accel/rocket/rocket_job.h
39
struct rocket_iommu_domain *domain;
drivers/acpi/acpi_extlog.c
145
int domain;
drivers/acpi/acpi_extlog.c
153
domain = pcie_err->device_id.segment;
drivers/acpi/acpi_extlog.c
157
pdev = pci_get_domain_bus_and_slot(domain, bus, devfn);
drivers/acpi/arm64/iort.c
828
struct irq_domain *domain;
drivers/acpi/arm64/iort.c
859
domain = irq_find_matching_fwnode(iort_fwnode, DOMAIN_BUS_PLATFORM_MSI);
drivers/acpi/arm64/iort.c
860
if (domain)
drivers/acpi/arm64/iort.c
861
dev_set_msi_domain(dev, domain);
drivers/acpi/cppc_acpi.c
536
if (match_pdomain->domain != pdomain->domain)
drivers/acpi/irq.c
279
struct irq_domain *domain;
drivers/acpi/irq.c
287
domain = irq_find_matching_fwnode(fwspec.fwnode, DOMAIN_BUS_ANY);
drivers/acpi/irq.c
288
if (!domain)
drivers/acpi/processor_perflib.c
682
if (match_pdomain->domain != pdomain->domain)
drivers/acpi/processor_perflib.c
710
if (match_pdomain->domain != pdomain->domain)
drivers/acpi/processor_throttling.c
124
if (match_pdomain->domain != pdomain->domain)
drivers/acpi/processor_throttling.c
156
if (match_pdomain->domain != pdomain->domain)
drivers/base/map.c
102
mutex_lock(domain->lock);
drivers/base/map.c
103
for (p = domain->probes[MAJOR(dev) % 255]; p; p = p->next) {
drivers/base/map.c
123
mutex_unlock(domain->lock);
drivers/base/map.c
131
mutex_unlock(domain->lock);
drivers/base/map.c
32
int kobj_map(struct kobj_map *domain, dev_t dev, unsigned long range,
drivers/base/map.c
56
mutex_lock(domain->lock);
drivers/base/map.c
58
struct probe **s = &domain->probes[index % 255];
drivers/base/map.c
64
mutex_unlock(domain->lock);
drivers/base/map.c
68
void kobj_unmap(struct kobj_map *domain, dev_t dev, unsigned long range)
drivers/base/map.c
78
mutex_lock(domain->lock);
drivers/base/map.c
81
for (s = &domain->probes[index % 255]; *s; s = &(*s)->next) {
drivers/base/map.c
91
mutex_unlock(domain->lock);
drivers/base/map.c
95
struct kobject *kobj_lookup(struct kobj_map *domain, dev_t dev, int *index)
drivers/base/platform-msi.c
72
struct irq_domain *domain = dev->msi.domain;
drivers/base/platform-msi.c
74
if (!domain || !write_msi_msg)
drivers/base/regmap/regmap-irq.c
1020
virq = irq_find_mapping(d->domain, hwirq);
drivers/base/regmap/regmap-irq.c
1025
irq_domain_remove(d->domain);
drivers/base/regmap/regmap-irq.c
1189
return irq_create_mapping(data->domain, irq);
drivers/base/regmap/regmap-irq.c
1206
return data->domain;
drivers/base/regmap/regmap-irq.c
31
struct irq_domain *domain;
drivers/base/regmap/regmap-irq.c
515
handle_nested_irq(irq_find_mapping(data->domain, i));
drivers/base/regmap/regmap-irq.c
645
d->domain = irq_domain_instantiate(&info);
drivers/base/regmap/regmap-irq.c
646
if (IS_ERR(d->domain)) {
drivers/base/regmap/regmap-irq.c
648
return PTR_ERR(d->domain);
drivers/bcma/driver_gpio.c
124
generic_handle_domain_irq_safe(gc->irq.domain, gpio);
drivers/bluetooth/btintel.c
2485
u32 domain, mode;
drivers/bluetooth/btintel.c
2527
domain = (u32)p->package.elements[0].integer.value;
drivers/bluetooth/btintel.c
2531
if (domain != 0x12) {
drivers/bluetooth/btintel.c
2854
union acpi_object *domain, *bt_pkg;
drivers/bluetooth/btintel.c
2859
domain = &bt_pkg->package.elements[0];
drivers/bluetooth/btintel.c
2860
if (domain->type == ACPI_TYPE_INTEGER &&
drivers/bluetooth/btintel.c
2861
domain->integer.value == BTINTEL_BT_DOMAIN)
drivers/bus/fsl-mc/fsl-mc-msi.c
163
struct irq_domain *domain;
drivers/bus/fsl-mc/fsl-mc-msi.c
173
domain = msi_create_irq_domain(fwnode, info, parent);
drivers/bus/fsl-mc/fsl-mc-msi.c
174
if (domain)
drivers/bus/fsl-mc/fsl-mc-msi.c
175
irq_domain_update_bus_token(domain, DOMAIN_BUS_FSL_MC_MSI);
drivers/bus/fsl-mc/fsl-mc-msi.c
177
return domain;
drivers/bus/moxtet.c
706
sub_irq = irq_find_mapping(moxtet->irq.domain, i);
drivers/bus/moxtet.c
729
irq = irq_find_mapping(moxtet->irq.domain, i);
drivers/bus/moxtet.c
734
irq_domain_remove(moxtet->irq.domain);
drivers/bus/moxtet.c
741
moxtet->irq.domain = irq_domain_create_simple(dev_fwnode(moxtet->dev), MOXTET_NIRQS, 0,
drivers/bus/moxtet.c
743
if (moxtet->irq.domain == NULL) {
drivers/bus/moxtet.c
750
irq_create_mapping(moxtet->irq.domain, i);
drivers/clk/qcom/gdsc.c
259
static int gdsc_enable(struct generic_pm_domain *domain)
drivers/clk/qcom/gdsc.c
261
struct gdsc *sc = domain_to_gdsc(domain);
drivers/clk/qcom/gdsc.c
317
static int gdsc_disable(struct generic_pm_domain *domain)
drivers/clk/qcom/gdsc.c
319
struct gdsc *sc = domain_to_gdsc(domain);
drivers/clk/qcom/gdsc.c
356
ret = gdsc_toggle_logic(sc, GDSC_OFF, domain->synced_poweroff);
drivers/clk/qcom/gdsc.c
366
static int gdsc_set_hwmode(struct generic_pm_domain *domain, struct device *dev, bool mode)
drivers/clk/qcom/gdsc.c
368
struct gdsc *sc = domain_to_gdsc(domain);
drivers/clk/qcom/gdsc.c
393
static bool gdsc_get_hwmode(struct generic_pm_domain *domain, struct device *dev)
drivers/clk/qcom/gdsc.c
395
struct gdsc *sc = domain_to_gdsc(domain);
drivers/clk/qcom/gdsc.c
52
#define domain_to_gdsc(domain) container_of(domain, struct gdsc, pd)
drivers/clk/qcom/gdsc.c
654
int gdsc_gx_do_nothing_enable(struct generic_pm_domain *domain)
drivers/clk/qcom/gdsc.c
656
struct gdsc *sc = domain_to_gdsc(domain);
drivers/clk/qcom/gdsc.h
90
int gdsc_gx_do_nothing_enable(struct generic_pm_domain *domain);
drivers/clk/renesas/rzg2l-cpg.c
1903
static int rzg2l_cpg_attach_dev(struct generic_pm_domain *domain, struct device *dev)
drivers/clk/renesas/rzg2l-cpg.c
1905
struct rzg2l_cpg_priv *priv = container_of(domain, struct rzg2l_cpg_priv, genpd);
drivers/clk/renesas/rzv2h-cpg.c
1488
static int rzv2h_cpg_attach_dev(struct generic_pm_domain *domain, struct device *dev)
drivers/clk/renesas/rzv2h-cpg.c
1490
struct rzv2h_cpg_pd *pd = container_of(domain, struct rzv2h_cpg_pd, genpd);
drivers/clocksource/ingenic-timer.c
145
struct irq_domain *domain;
drivers/clocksource/ingenic-timer.c
163
domain = irq_find_host(tcu->np);
drivers/clocksource/ingenic-timer.c
164
if (!domain) {
drivers/clocksource/ingenic-timer.c
169
timer_virq = irq_create_mapping(domain, timer->channel);
drivers/clocksource/timer-riscv.c
161
struct irq_domain *domain;
drivers/clocksource/timer-riscv.c
164
domain = irq_find_matching_fwnode(intc_fwnode, DOMAIN_BUS_ANY);
drivers/clocksource/timer-riscv.c
165
if (!domain) {
drivers/clocksource/timer-riscv.c
171
riscv_clock_event_irq = irq_create_mapping(domain, RV_IRQ_TIMER);
drivers/cpufreq/scmi-cpufreq.c
109
scmi_get_sharing_cpus(struct device *cpu_dev, int domain,
drivers/cpufreq/scmi-cpufreq.c
124
if (tdomain == domain)
drivers/cpufreq/scmi-cpufreq.c
137
int ret, domain;
drivers/cpufreq/scmi-cpufreq.c
139
domain = scmi_cpu_domain_id(cpu_dev);
drivers/cpufreq/scmi-cpufreq.c
140
if (domain < 0)
drivers/cpufreq/scmi-cpufreq.c
141
return domain;
drivers/cpufreq/scmi-cpufreq.c
145
ret = perf_ops->est_power_get(ph, domain, &Hz, power);
drivers/cpufreq/scmi-cpufreq.c
160
scmi_get_rate_limit(u32 domain, bool has_fast_switch)
drivers/cpufreq/scmi-cpufreq.c
169
ret = perf_ops->fast_switch_rate_limit(ph, domain,
drivers/cpufreq/scmi-cpufreq.c
175
ret = perf_ops->rate_limit_get(ph, domain, &rate_limit);
drivers/cpufreq/scmi-cpufreq.c
200
int ret, nr_opp, domain;
drivers/cpufreq/scmi-cpufreq.c
213
domain = scmi_cpu_domain_id(cpu_dev);
drivers/cpufreq/scmi-cpufreq.c
214
if (domain < 0)
drivers/cpufreq/scmi-cpufreq.c
215
return domain;
drivers/cpufreq/scmi-cpufreq.c
227
ret = scmi_get_sharing_cpus(cpu_dev, domain, policy->cpus);
drivers/cpufreq/scmi-cpufreq.c
255
ret = perf_ops->device_opps_add(ph, cpu_dev, domain);
drivers/cpufreq/scmi-cpufreq.c
288
priv->domain_id = domain;
drivers/cpufreq/scmi-cpufreq.c
296
latency = perf_ops->transition_latency_get(ph, domain);
drivers/cpufreq/scmi-cpufreq.c
303
perf_ops->fast_switch_possible(ph, domain);
drivers/cpufreq/scmi-cpufreq.c
306
scmi_get_rate_limit(domain, policy->fast_switch_possible);
drivers/cpufreq/scpi-cpufreq.c
68
int cpu, domain, tdomain;
drivers/cpufreq/scpi-cpufreq.c
71
domain = scpi_ops->device_domain_id(cpu_dev);
drivers/cpufreq/scpi-cpufreq.c
72
if (domain < 0)
drivers/cpufreq/scpi-cpufreq.c
73
return domain;
drivers/cpufreq/scpi-cpufreq.c
84
if (tdomain == domain)
drivers/crypto/caam/caamalg_qi2.c
100
phys_addr = priv->domain ? iommu_iova_to_phys(priv->domain, iova_addr) :
drivers/crypto/caam/caamalg_qi2.c
5222
priv->domain = iommu_get_domain_for_dev(dev);
drivers/crypto/caam/caamalg_qi2.h
65
struct iommu_domain *domain;
drivers/crypto/caam/ctrl.c
1080
ctrlpriv->domain = iommu_get_domain_for_dev(dev);
drivers/crypto/caam/intern.h
107
struct iommu_domain *domain;
drivers/crypto/caam/qi.c
101
phys_addr = domain ? iommu_iova_to_phys(domain, iova_addr) : iova_addr;
drivers/crypto/caam/qi.c
152
drv_req = caam_iova_to_virt(priv->domain, qm_fd_addr_get64(fd));
drivers/crypto/caam/qi.c
583
drv_req = caam_iova_to_virt(priv->domain, qm_fd_addr_get64(fd));
drivers/crypto/caam/qi.c
96
static void *caam_iova_to_virt(struct iommu_domain *domain,
drivers/crypto/hisilicon/sec/sec_drv.c
373
struct iommu_domain *domain;
drivers/crypto/hisilicon/sec/sec_drv.c
376
domain = iommu_get_domain_for_dev(info->dev);
drivers/crypto/hisilicon/sec/sec_drv.c
379
if (domain && (domain->type & __IOMMU_DOMAIN_PAGING))
drivers/crypto/hisilicon/sec/sec_drv.c
922
struct iommu_domain *domain;
drivers/crypto/hisilicon/sec/sec_drv.c
928
domain = iommu_get_domain_for_dev(info->dev);
drivers/crypto/hisilicon/sec/sec_drv.c
934
if (domain && (domain->type & __IOMMU_DOMAIN_PAGING))
drivers/crypto/hisilicon/sec/sec_drv.c
974
if (domain && (domain->type & __IOMMU_DOMAIN_PAGING)) {
drivers/crypto/hisilicon/sec2/sec_main.c
1383
struct iommu_domain *domain;
drivers/crypto/hisilicon/sec2/sec_main.c
1386
domain = iommu_get_domain_for_dev(dev);
drivers/crypto/hisilicon/sec2/sec_main.c
1390
if (domain) {
drivers/crypto/hisilicon/sec2/sec_main.c
1391
if (domain->type & __IOMMU_DOMAIN_PAGING)
drivers/crypto/hisilicon/sec2/sec_main.c
1394
domain->type);
drivers/dca/dca-core.c
100
list_for_each_entry_safe(dca, _dca, &domain->dca_providers, node)
drivers/dca/dca-core.c
103
dca_free_domain(domain);
drivers/dca/dca-core.c
115
struct dca_domain *domain;
drivers/dca/dca-core.c
117
list_for_each_entry(domain, &dca_domains, node)
drivers/dca/dca-core.c
118
if (domain->pci_rc == rc)
drivers/dca/dca-core.c
119
return domain;
drivers/dca/dca-core.c
127
struct dca_domain *domain;
drivers/dca/dca-core.c
130
domain = dca_find_domain(rc);
drivers/dca/dca-core.c
132
if (!domain) {
drivers/dca/dca-core.c
137
return domain;
drivers/dca/dca-core.c
144
struct dca_domain *domain;
drivers/dca/dca-core.c
148
domain = dca_find_domain(rc);
drivers/dca/dca-core.c
149
if (!domain)
drivers/dca/dca-core.c
153
domain = list_first_entry(&dca_domains,
drivers/dca/dca-core.c
160
list_for_each_entry(dca, &domain->dca_providers, node)
drivers/dca/dca-core.c
177
struct dca_domain *domain;
drivers/dca/dca-core.c
192
domain = dca_find_domain(pci_rc);
drivers/dca/dca-core.c
193
if (!domain) {
drivers/dca/dca-core.c
198
list_for_each_entry(dca, &domain->dca_providers, node) {
drivers/dca/dca-core.c
343
struct dca_domain *domain, *newdomain = NULL;
drivers/dca/dca-core.c
357
domain = dca_get_domain(dev);
drivers/dca/dca-core.c
358
if (!domain) {
drivers/dca/dca-core.c
375
domain = dca_get_domain(dev);
drivers/dca/dca-core.c
376
if (!domain) {
drivers/dca/dca-core.c
377
domain = newdomain;
drivers/dca/dca-core.c
379
list_add(&domain->node, &dca_domains);
drivers/dca/dca-core.c
382
list_add(&dca->node, &domain->dca_providers);
drivers/dca/dca-core.c
400
struct dca_domain *domain;
drivers/dca/dca-core.c
415
domain = dca_find_domain(pci_rc);
drivers/dca/dca-core.c
416
if (list_empty(&domain->dca_providers))
drivers/dca/dca-core.c
417
dca_free_domain(domain);
drivers/dca/dca-core.c
45
struct dca_domain *domain;
drivers/dca/dca-core.c
47
domain = kzalloc_obj(*domain, GFP_NOWAIT);
drivers/dca/dca-core.c
48
if (!domain)
drivers/dca/dca-core.c
51
INIT_LIST_HEAD(&domain->dca_providers);
drivers/dca/dca-core.c
52
domain->pci_rc = rc;
drivers/dca/dca-core.c
54
return domain;
drivers/dca/dca-core.c
57
static void dca_free_domain(struct dca_domain *domain)
drivers/dca/dca-core.c
59
list_del(&domain->node);
drivers/dca/dca-core.c
60
kfree(domain);
drivers/dca/dca-core.c
82
struct dca_domain *domain;
drivers/dca/dca-core.c
98
domain = list_first_entry(&dca_domains, struct dca_domain, node);
drivers/dma/idxd/init.c
666
struct iommu_domain *domain;
drivers/dma/idxd/init.c
674
domain = iommu_get_domain_for_dev(dev);
drivers/dma/idxd/init.c
675
if (!domain)
drivers/dma/idxd/init.c
686
ret = iommu_attach_device_pasid(domain, dev, pasid, NULL);
drivers/dma/idxd/init.c
689
pasid, domain->type);
drivers/dma/idxd/init.c
705
struct iommu_domain *domain;
drivers/dma/idxd/init.c
707
domain = iommu_get_domain_for_dev(dev);
drivers/dma/idxd/init.c
708
if (!domain)
drivers/dma/idxd/init.c
711
iommu_detach_device_pasid(domain, dev, idxd->pasid);
drivers/dma/ti/k3-udma.c
5506
dev->msi.domain = of_msi_get_domain(dev, dev->of_node,
drivers/dma/ti/k3-udma.c
5508
if (!dev->msi.domain) {
drivers/edac/altera_edac.c
1850
generic_handle_domain_irq(edac->domain, dberr * 32 + bit);
drivers/edac/altera_edac.c
2142
edac->domain = irq_domain_create_linear(dev_fwnode(&pdev->dev), 64, &a10_eccmgr_ic_ops,
drivers/edac/altera_edac.c
2144
if (!edac->domain) {
drivers/edac/altera_edac.h
394
struct irq_domain *domain;
drivers/edac/sb_edac.c
351
enum domain dom;
drivers/edac/sb_edac.c
368
enum domain dom;
drivers/edac/sb_edac.c
408
#define PCI_DESCR(device_id, opt, domain) \
drivers/edac/sb_edac.c
411
.dom = domain
drivers/edac/sb_edac.c
742
static struct sbridge_dev *get_sbridge_dev(int seg, u8 bus, enum domain dom,
drivers/edac/sb_edac.c
769
static struct sbridge_dev *alloc_sbridge_dev(int seg, u8 bus, enum domain dom,
drivers/firmware/arm_scmi/driver.c
1873
__le32 domain;
drivers/firmware/arm_scmi/driver.c
1903
u32 domain, void __iomem **p_addr,
drivers/firmware/arm_scmi/driver.c
1924
pi->proto->id, message_id, domain, ret);
drivers/firmware/arm_scmi/driver.c
1939
info->domain = cpu_to_le32(domain);
drivers/firmware/arm_scmi/driver.c
2002
pi->proto->id, message_id, domain);
drivers/firmware/arm_scmi/driver.c
2018
pi->proto->id, message_id, domain, ret);
drivers/firmware/arm_scmi/perf.c
102
__le32 domain;
drivers/firmware/arm_scmi/perf.c
1021
u32 domain)
drivers/firmware/arm_scmi/perf.c
1025
dom = scmi_perf_domain_lookup(ph, domain);
drivers/firmware/arm_scmi/perf.c
1033
u32 domain, u32 *rate_limit)
drivers/firmware/arm_scmi/perf.c
1040
dom = scmi_perf_domain_lookup(ph, domain);
drivers/firmware/arm_scmi/perf.c
107
__le32 domain;
drivers/firmware/arm_scmi/perf.c
1271
int domain, ret;
drivers/firmware/arm_scmi/perf.c
1290
for (domain = 0; domain < pinfo->num_domains; domain++) {
drivers/firmware/arm_scmi/perf.c
1291
struct perf_dom_info *dom = pinfo->dom_info + domain;
drivers/firmware/arm_scmi/perf.c
1293
dom->id = domain;
drivers/firmware/arm_scmi/perf.c
241
int domain;
drivers/firmware/arm_scmi/perf.c
244
for (domain = 0; domain < pinfo->num_domains; domain++) {
drivers/firmware/arm_scmi/perf.c
245
xa_destroy(&((pinfo->dom_info + domain)->opps_by_idx));
drivers/firmware/arm_scmi/perf.c
246
xa_destroy(&((pinfo->dom_info + domain)->opps_by_lvl));
drivers/firmware/arm_scmi/perf.c
353
msg->domain = cpu_to_le32(perf_dom->id);
drivers/firmware/arm_scmi/perf.c
502
scmi_perf_domain_lookup(const struct scmi_protocol_handle *ph, u32 domain)
drivers/firmware/arm_scmi/perf.c
506
if (domain >= pi->num_domains)
drivers/firmware/arm_scmi/perf.c
509
return pi->dom_info + domain;
drivers/firmware/arm_scmi/perf.c
513
scmi_perf_info_get(const struct scmi_protocol_handle *ph, u32 domain)
drivers/firmware/arm_scmi/perf.c
517
dom = scmi_perf_domain_lookup(ph, domain);
drivers/firmware/arm_scmi/perf.c
525
u32 domain, u32 max_perf, u32 min_perf)
drivers/firmware/arm_scmi/perf.c
537
limits->domain = cpu_to_le32(domain);
drivers/firmware/arm_scmi/perf.c
566
u32 domain, u32 max_perf, u32 min_perf)
drivers/firmware/arm_scmi/perf.c
570
dom = scmi_perf_domain_lookup(ph, domain);
drivers/firmware/arm_scmi/perf.c
604
u32 domain, u32 *max_perf, u32 *min_perf)
drivers/firmware/arm_scmi/perf.c
615
put_unaligned_le32(domain, t->tx.buf);
drivers/firmware/arm_scmi/perf.c
647
u32 domain, u32 *max_perf, u32 *min_perf)
drivers/firmware/arm_scmi/perf.c
652
dom = scmi_perf_domain_lookup(ph, domain);
drivers/firmware/arm_scmi/perf.c
680
u32 domain, u32 level, bool poll)
drivers/firmware/arm_scmi/perf.c
692
lvl->domain = cpu_to_le32(domain);
drivers/firmware/arm_scmi/perf.c
719
u32 domain, u32 level, bool poll)
drivers/firmware/arm_scmi/perf.c
723
dom = scmi_perf_domain_lookup(ph, domain);
drivers/firmware/arm_scmi/perf.c
744
u32 domain, u32 *level, bool poll)
drivers/firmware/arm_scmi/perf.c
755
put_unaligned_le32(domain, t->tx.buf);
drivers/firmware/arm_scmi/perf.c
780
u32 domain, u32 *level, bool poll)
drivers/firmware/arm_scmi/perf.c
785
dom = scmi_perf_domain_lookup(ph, domain);
drivers/firmware/arm_scmi/perf.c
807
u32 domain, int message_id,
drivers/firmware/arm_scmi/perf.c
819
notify->domain = cpu_to_le32(domain);
drivers/firmware/arm_scmi/perf.c
86
__le32 domain;
drivers/firmware/arm_scmi/perf.c
865
struct device *dev, u32 domain)
drivers/firmware/arm_scmi/perf.c
872
dom = scmi_perf_domain_lookup(ph, domain);
drivers/firmware/arm_scmi/perf.c
891
domain, dom->info.name, idx, freq);
drivers/firmware/arm_scmi/perf.c
897
domain, dom->info.name, idx, freq);
drivers/firmware/arm_scmi/perf.c
904
u32 domain)
drivers/firmware/arm_scmi/perf.c
908
dom = scmi_perf_domain_lookup(ph, domain);
drivers/firmware/arm_scmi/perf.c
91
__le32 domain;
drivers/firmware/arm_scmi/perf.c
918
u32 domain, u32 *rate_limit)
drivers/firmware/arm_scmi/perf.c
925
dom = scmi_perf_domain_lookup(ph, domain);
drivers/firmware/arm_scmi/perf.c
933
static int scmi_dvfs_freq_set(const struct scmi_protocol_handle *ph, u32 domain,
drivers/firmware/arm_scmi/perf.c
939
dom = scmi_perf_domain_lookup(ph, domain);
drivers/firmware/arm_scmi/perf.c
959
static int scmi_dvfs_freq_get(const struct scmi_protocol_handle *ph, u32 domain,
drivers/firmware/arm_scmi/perf.c
966
dom = scmi_perf_domain_lookup(ph, domain);
drivers/firmware/arm_scmi/perf.c
990
u32 domain, unsigned long *freq,
drivers/firmware/arm_scmi/perf.c
998
dom = scmi_perf_domain_lookup(ph, domain);
drivers/firmware/arm_scmi/power.c
110
u32 domain, struct power_dom_info *dom_info,
drivers/firmware/arm_scmi/power.c
119
sizeof(domain), sizeof(*attr), &t);
drivers/firmware/arm_scmi/power.c
123
put_unaligned_le32(domain, t->tx.buf);
drivers/firmware/arm_scmi/power.c
146
domain, NULL, dom_info->name,
drivers/firmware/arm_scmi/power.c
154
u32 domain, u32 state)
drivers/firmware/arm_scmi/power.c
166
st->domain = cpu_to_le32(domain);
drivers/firmware/arm_scmi/power.c
176
u32 domain, u32 *state)
drivers/firmware/arm_scmi/power.c
185
put_unaligned_le32(domain, t->tx.buf);
drivers/firmware/arm_scmi/power.c
204
u32 domain)
drivers/firmware/arm_scmi/power.c
207
struct power_dom_info *dom = pi->dom_info + domain;
drivers/firmware/arm_scmi/power.c
220
u32 domain, bool enable)
drivers/firmware/arm_scmi/power.c
232
notify->domain = cpu_to_le32(domain);
drivers/firmware/arm_scmi/power.c
324
int domain, ret;
drivers/firmware/arm_scmi/power.c
343
for (domain = 0; domain < pinfo->num_domains; domain++) {
drivers/firmware/arm_scmi/power.c
344
struct power_dom_info *dom = pinfo->dom_info + domain;
drivers/firmware/arm_scmi/power.c
346
scmi_power_domain_attributes_get(ph, domain, dom,
drivers/firmware/arm_scmi/power.c
47
__le32 domain;
drivers/firmware/arm_scmi/power.c
52
__le32 domain;
drivers/firmware/arm_scmi/powercap.c
1000
&pinfo->powercaps[domain].fc_info);
drivers/firmware/arm_scmi/powercap.c
1005
&pinfo->powercaps[domain],
drivers/firmware/arm_scmi/powercap.c
1006
&pinfo->states[domain].last_pcap);
drivers/firmware/arm_scmi/powercap.c
1010
pinfo->states[domain].enabled =
drivers/firmware/arm_scmi/powercap.c
1011
!!pinfo->states[domain].last_pcap;
drivers/firmware/arm_scmi/powercap.c
138
u32 domain, int message_id, bool enable);
drivers/firmware/arm_scmi/powercap.c
195
struct powercap_info *pinfo, u32 domain)
drivers/firmware/arm_scmi/powercap.c
200
struct scmi_powercap_info *dom_info = pinfo->powercaps + domain;
drivers/firmware/arm_scmi/powercap.c
204
sizeof(domain), sizeof(*resp), &t);
drivers/firmware/arm_scmi/powercap.c
208
put_unaligned_le32(domain, t->tx.buf);
drivers/firmware/arm_scmi/powercap.c
215
dom_info->id = domain;
drivers/firmware/arm_scmi/powercap.c
291
domain, NULL, dom_info->name,
drivers/firmware/arm_scmi/powercap.c
379
msg->domain = cpu_to_le32(pc->id);
drivers/firmware/arm_scmi/powercap.c
393
if (le32_to_cpu(resp->domain) == pc->id)
drivers/firmware/arm_scmi/powercap.c
520
msg->domain = cpu_to_le32(domain_id);
drivers/firmware/arm_scmi/powercap.c
71
__le32 domain;
drivers/firmware/arm_scmi/powercap.c
710
u32 domain, struct scmi_fc_info **p_fc)
drivers/firmware/arm_scmi/powercap.c
719
POWERCAP_CAP_SET, 4, domain,
drivers/firmware/arm_scmi/powercap.c
725
POWERCAP_CAP_GET, 4, domain,
drivers/firmware/arm_scmi/powercap.c
730
POWERCAP_PAI_SET, 4, domain,
drivers/firmware/arm_scmi/powercap.c
736
POWERCAP_PAI_GET, 4, domain,
drivers/firmware/arm_scmi/powercap.c
744
u32 domain, int message_id, bool enable)
drivers/firmware/arm_scmi/powercap.c
760
notify->domain = cpu_to_le32(domain);
drivers/firmware/arm_scmi/powercap.c
776
ret = scmi_powercap_measurements_threshold_get(ph, domain,
drivers/firmware/arm_scmi/powercap.c
79
__le32 domain;
drivers/firmware/arm_scmi/powercap.c
794
notify->domain = cpu_to_le32(domain);
drivers/firmware/arm_scmi/powercap.c
89
__le32 domain;
drivers/firmware/arm_scmi/powercap.c
94
__le32 domain;
drivers/firmware/arm_scmi/powercap.c
962
int domain, ret;
drivers/firmware/arm_scmi/powercap.c
993
for (domain = 0; domain < pinfo->num_domains; domain++) {
drivers/firmware/arm_scmi/powercap.c
994
ret = scmi_powercap_domain_attributes_get(ph, pinfo, domain);
drivers/firmware/arm_scmi/powercap.c
998
if (pinfo->powercaps[domain].fastchannels)
drivers/firmware/arm_scmi/powercap.c
999
scmi_powercap_domain_init_fc(ph, domain,
drivers/firmware/arm_scmi/protocols.h
283
u32 valid_size, u32 domain,
drivers/firmware/arm_scmi/reset.c
101
scmi_reset_domain_lookup(const struct scmi_protocol_handle *ph, u32 domain)
drivers/firmware/arm_scmi/reset.c
105
if (domain >= pi->num_domains)
drivers/firmware/arm_scmi/reset.c
108
return pi->dom_info + domain;
drivers/firmware/arm_scmi/reset.c
113
struct scmi_reset_info *pinfo, u32 domain)
drivers/firmware/arm_scmi/reset.c
119
struct reset_dom_info *dom_info = pinfo->dom_info + domain;
drivers/firmware/arm_scmi/reset.c
122
sizeof(domain), sizeof(*attr), &t);
drivers/firmware/arm_scmi/reset.c
126
put_unaligned_le32(domain, t->tx.buf);
drivers/firmware/arm_scmi/reset.c
151
ph->hops->extended_name_get(ph, RESET_DOMAIN_NAME_GET, domain,
drivers/firmware/arm_scmi/reset.c
166
scmi_reset_name_get(const struct scmi_protocol_handle *ph, u32 domain)
drivers/firmware/arm_scmi/reset.c
170
dom_info = scmi_reset_domain_lookup(ph, domain);
drivers/firmware/arm_scmi/reset.c
178
u32 domain)
drivers/firmware/arm_scmi/reset.c
182
dom_info = scmi_reset_domain_lookup(ph, domain);
drivers/firmware/arm_scmi/reset.c
189
static int scmi_domain_reset(const struct scmi_protocol_handle *ph, u32 domain,
drivers/firmware/arm_scmi/reset.c
197
dom_info = scmi_reset_domain_lookup(ph, domain);
drivers/firmware/arm_scmi/reset.c
209
dom->domain_id = cpu_to_le32(domain);
drivers/firmware/arm_scmi/reset.c
223
u32 domain)
drivers/firmware/arm_scmi/reset.c
225
return scmi_domain_reset(ph, domain, AUTONOMOUS_RESET,
drivers/firmware/arm_scmi/reset.c
230
scmi_reset_domain_assert(const struct scmi_protocol_handle *ph, u32 domain)
drivers/firmware/arm_scmi/reset.c
232
return scmi_domain_reset(ph, domain, EXPLICIT_RESET_ASSERT,
drivers/firmware/arm_scmi/reset.c
237
scmi_reset_domain_deassert(const struct scmi_protocol_handle *ph, u32 domain)
drivers/firmware/arm_scmi/reset.c
239
return scmi_domain_reset(ph, domain, 0, ARCH_COLD_RESET);
drivers/firmware/arm_scmi/reset.c
356
int domain, ret;
drivers/firmware/arm_scmi/reset.c
375
for (domain = 0; domain < pinfo->num_domains; domain++)
drivers/firmware/arm_scmi/reset.c
376
scmi_reset_domain_attributes_get(ph, pinfo, domain);
drivers/firmware/arm_scpi.c
305
u8 domain;
drivers/firmware/arm_scpi.c
315
u8 domain;
drivers/firmware/arm_scpi.c
589
static int scpi_dvfs_get_idx(u8 domain)
drivers/firmware/arm_scpi.c
594
ret = scpi_send_message(CMD_GET_DVFS, &domain, sizeof(domain),
drivers/firmware/arm_scpi.c
600
static int scpi_dvfs_set_idx(u8 domain, u8 index)
drivers/firmware/arm_scpi.c
603
struct dvfs_set dvfs = {domain, index};
drivers/firmware/arm_scpi.c
616
static struct scpi_dvfs_info *scpi_dvfs_get_info(u8 domain)
drivers/firmware/arm_scpi.c
623
if (domain >= MAX_DVFS_DOMAINS)
drivers/firmware/arm_scpi.c
626
if (scpi_info->dvfs[domain]) /* data already populated */
drivers/firmware/arm_scpi.c
627
return scpi_info->dvfs[domain];
drivers/firmware/arm_scpi.c
629
ret = scpi_send_message(CMD_GET_DVFS_INFO, &domain, sizeof(domain),
drivers/firmware/arm_scpi.c
657
scpi_info->dvfs[domain] = info;
drivers/firmware/arm_scpi.c
674
int domain = scpi_dev_domain_id(dev);
drivers/firmware/arm_scpi.c
676
if (domain < 0)
drivers/firmware/arm_scpi.c
677
return ERR_PTR(domain);
drivers/firmware/arm_scpi.c
679
return scpi_dvfs_get_info(domain);
drivers/gpio/gpio-104-dio-48e.c
326
config.domain = regmap_irq_get_domain(chip_data);
drivers/gpio/gpio-adnp.c
270
child_irq = irq_find_mapping(adnp->gpio.irq.domain,
drivers/gpio/gpio-adp5585.c
286
irq = irq_find_mapping(adp5585_gpio->gpio_chip.irq.domain, off);
drivers/gpio/gpio-altera.c
179
struct irq_domain *irqdomain = gc->irq.domain;
drivers/gpio/gpio-altera.c
203
struct irq_domain *irqdomain = gc->irq.domain;
drivers/gpio/gpio-aspeed-sgpio.c
381
generic_handle_domain_irq(gc->irq.domain, (i * 32 + p) * 2);
drivers/gpio/gpio-aspeed.c
643
generic_handle_domain_irq(gc->irq.domain, i * 32 + p);
drivers/gpio/gpio-ath79.c
205
generic_handle_domain_irq(gc->irq.domain, irq);
drivers/gpio/gpio-blzp1600.c
191
generic_handle_domain_irq(gpio->gen_gc.gc.irq.domain, hwirq);
drivers/gpio/gpio-brcmstb.c
267
struct irq_domain *domain = priv->irq_domain;
drivers/gpio/gpio-brcmstb.c
279
generic_handle_domain_irq(domain, hwbase + offset);
drivers/gpio/gpio-cadence.c
156
generic_handle_domain_irq(chip->irq.domain, hwirq);
drivers/gpio/gpio-crystalcove.c
295
virq = irq_find_mapping(cg->chip.irq.domain, gpio);
drivers/gpio/gpio-crystalcove.c
383
irq_domain_update_bus_token(cg->chip.irq.domain, DOMAIN_BUS_WIRED);
drivers/gpio/gpio-dln2.c
435
ret = generic_handle_domain_irq(dln2->gpio.irq.domain, pin);
drivers/gpio/gpio-dwapb.c
211
int gpio_irq = irq_find_mapping(gen_gc->gc.irq.domain, hwirq);
drivers/gpio/gpio-eic-sprd.c
560
girq = irq_find_mapping(chip->irq.domain, offset);
drivers/gpio/gpio-em.c
261
struct irq_domain *domain = data;
drivers/gpio/gpio-em.c
263
irq_domain_remove(domain);
drivers/gpio/gpio-ep93xx.c
122
generic_handle_domain_irq(gc->irq.domain, i);
drivers/gpio/gpio-ep93xx.c
98
generic_handle_domain_irq(gc->irq.domain, offset);
drivers/gpio/gpio-ftgpio010.c
154
generic_handle_domain_irq(gc->irq.domain, offset);
drivers/gpio/gpio-graniterapids.c
305
generic_handle_domain_irq(priv->gc.irq.domain, hwirq);
drivers/gpio/gpio-grgpio.c
113
return irq_create_mapping(priv->domain, offset);
drivers/gpio/gpio-grgpio.c
313
struct irq_domain *domain = data;
drivers/gpio/gpio-grgpio.c
315
irq_domain_remove(domain);
drivers/gpio/gpio-grgpio.c
397
priv->domain = irq_domain_create_linear(dev_fwnode(&ofdev->dev), gc->ngpio,
drivers/gpio/gpio-grgpio.c
399
if (!priv->domain) {
drivers/gpio/gpio-grgpio.c
405
priv->domain);
drivers/gpio/gpio-grgpio.c
439
priv->regs, gc->base, gc->ngpio, str_on_off(priv->domain));
drivers/gpio/gpio-grgpio.c
75
struct irq_domain *domain;
drivers/gpio/gpio-hisi.c
192
generic_handle_domain_irq(hisi_gpio->chip.gc.irq.domain,
drivers/gpio/gpio-hlwd.c
102
generic_handle_domain_irq(hlwd->gpioc.gc.irq.domain, hwirq);
drivers/gpio/gpio-i8255.c
132
gpio_config.irq_domain = config->domain;
drivers/gpio/gpio-i8255.h
28
struct irq_domain *domain;
drivers/gpio/gpio-idt3243x.c
41
virq = irq_find_mapping(gc->irq.domain, bit);
drivers/gpio/gpio-ljca.c
295
generic_handle_domain_irq(ljca_gpio->gc.irq.domain,
drivers/gpio/gpio-loongson-64bit.c
235
generic_handle_domain_irq(lgpio->chip.gc.irq.domain, i);
drivers/gpio/gpio-lpc18xx.c
169
static int lpc18xx_gpio_pin_ic_domain_alloc(struct irq_domain *domain,
drivers/gpio/gpio-lpc18xx.c
174
struct lpc18xx_gpio_pin_ic *ic = domain->host_data;
drivers/gpio/gpio-lpc18xx.c
189
parent_fwspec.fwnode = domain->parent->fwnode;
drivers/gpio/gpio-lpc18xx.c
193
ret = irq_domain_alloc_irqs_parent(domain, virq, 1, &parent_fwspec);
drivers/gpio/gpio-lpc18xx.c
200
return irq_domain_set_hwirq_and_chip(domain, virq, hwirq,
drivers/gpio/gpio-lpc18xx.c
251
ic->domain = irq_domain_create_hierarchy(parent_domain, 0, NR_LPC18XX_GPIO_PIN_IC_IRQS,
drivers/gpio/gpio-lpc18xx.c
254
if (!ic->domain) {
drivers/gpio/gpio-lpc18xx.c
391
irq_domain_remove(gc->pin_ic->domain);
drivers/gpio/gpio-lpc18xx.c
43
struct irq_domain *domain;
drivers/gpio/gpio-max732x.c
491
handle_nested_irq(irq_find_mapping(chip->gpio_chip.irq.domain,
drivers/gpio/gpio-max77620.c
44
virq = irq_find_mapping(gpio->gpio_chip.irq.domain, offset);
drivers/gpio/gpio-max77759.c
422
handle_nested_irq(irq_find_mapping(gc->irq.domain,
drivers/gpio/gpio-mlxbf2.c
280
generic_handle_domain_irq_safe(gc->irq.domain, level);
drivers/gpio/gpio-mlxbf3.c
104
generic_handle_domain_irq(gc->irq.domain, level);
drivers/gpio/gpio-mpsse.c
464
irq = irq_find_mapping(priv->gpio.irq.domain,
drivers/gpio/gpio-mt7621.c
98
generic_handle_domain_irq(gc->irq.domain, bit);
drivers/gpio/gpio-mvebu.c
1111
struct irq_domain *domain = data;
drivers/gpio/gpio-mvebu.c
1113
irq_domain_remove(domain);
drivers/gpio/gpio-mvebu.c
118
struct irq_domain *domain;
drivers/gpio/gpio-mvebu.c
1237
mvchip->domain = irq_domain_create_linear(dev_fwnode(&pdev->dev), ngpios,
drivers/gpio/gpio-mvebu.c
1239
if (!mvchip->domain) {
drivers/gpio/gpio-mvebu.c
1246
mvchip->domain);
drivers/gpio/gpio-mvebu.c
1251
mvchip->domain, ngpios, 2, np->name, handle_level_irq,
drivers/gpio/gpio-mvebu.c
1263
gc = irq_get_domain_generic_chip(mvchip->domain, 0);
drivers/gpio/gpio-mvebu.c
399
return irq_create_mapping(mvchip->domain, pin);
drivers/gpio/gpio-mvebu.c
579
irq = irq_find_mapping(mvchip->domain, i);
drivers/gpio/gpio-mxc.c
265
generic_handle_domain_irq(port->domain, irqoffset);
drivers/gpio/gpio-mxc.c
378
return irq_find_mapping(port->domain, offset);
drivers/gpio/gpio-mxc.c
512
port->domain = irq_domain_create_legacy(dev_fwnode(&pdev->dev), 32, irq_base, 0,
drivers/gpio/gpio-mxc.c
514
if (!port->domain) {
drivers/gpio/gpio-mxc.c
519
irq_domain_set_pm_device(port->domain, &pdev->dev);
drivers/gpio/gpio-mxc.c
534
irq_domain_remove(port->domain);
drivers/gpio/gpio-mxc.c
68
struct irq_domain *domain;
drivers/gpio/gpio-mxs.c
160
generic_handle_domain_irq(port->domain, irqoffset);
drivers/gpio/gpio-mxs.c
236
return irq_find_mapping(port->domain, offset);
drivers/gpio/gpio-mxs.c
308
port->domain = irq_domain_create_legacy(dev_fwnode(&pdev->dev), 32, irq_base, 0,
drivers/gpio/gpio-mxs.c
310
if (!port->domain) {
drivers/gpio/gpio-mxs.c
348
irq_domain_remove(port->domain);
drivers/gpio/gpio-mxs.c
51
struct irq_domain *domain;
drivers/gpio/gpio-nct6694.c
265
handle_nested_irq(irq_find_mapping(data->gpio.irq.domain, bit));
drivers/gpio/gpio-nct6694.c
428
data->irq = irq_create_mapping(nct6694->domain,
drivers/gpio/gpio-nomadik.c
300
generic_handle_domain_irq_safe(chip->irq.domain, bit);
drivers/gpio/gpio-npcm-sgpio.c
447
generic_handle_domain_irq(gc->irq.domain,
drivers/gpio/gpio-omap.c
1083
irq_domain_set_pm_device(bank->chip.irq.domain, pm_dev);
drivers/gpio/gpio-omap.c
615
generic_handle_domain_irq(bank->chip.irq.domain, bit);
drivers/gpio/gpio-pca953x.c
1033
int nested_irq = irq_find_mapping(gc->irq.domain, level);
drivers/gpio/gpio-pcf857x.c
213
handle_nested_irq(irq_find_mapping(gpio->chip.irq.domain, i));
drivers/gpio/gpio-pl061.c
224
generic_handle_domain_irq(gc->irq.domain,
drivers/gpio/gpio-pmic-eic-sprd.c
281
girq = irq_find_mapping(chip->irq.domain, n);
drivers/gpio/gpio-rcar.c
229
generic_handle_domain_irq(p->gpio_chip.irq.domain,
drivers/gpio/gpio-rcar.c
561
irq_domain_set_pm_device(gpio_chip->irq.domain, dev);
drivers/gpio/gpio-rda.c
195
generic_handle_domain_irq(chip->irq.domain, n);
drivers/gpio/gpio-realtek-otto.c
263
generic_handle_domain_irq(gc->irq.domain, offset);
drivers/gpio/gpio-rockchip.c
312
if (!bank->domain)
drivers/gpio/gpio-rockchip.c
315
virq = irq_create_mapping(bank->domain, offset);
drivers/gpio/gpio-rockchip.c
379
generic_handle_domain_irq(bank->domain, irq);
drivers/gpio/gpio-rockchip.c
519
bank->domain = irq_domain_create_linear(dev_fwnode(bank->dev), 32, &irq_generic_chip_ops,
drivers/gpio/gpio-rockchip.c
521
if (!bank->domain) {
drivers/gpio/gpio-rockchip.c
527
ret = irq_alloc_domain_generic_chips(bank->domain, 32, 1,
drivers/gpio/gpio-rockchip.c
534
irq_domain_remove(bank->domain);
drivers/gpio/gpio-rockchip.c
538
gc = irq_get_domain_generic_chip(bank->domain, 0);
drivers/gpio/gpio-rtd.c
380
struct irq_domain *domain = data->gpio_chip.irq.domain;
drivers/gpio/gpio-rtd.c
410
int girq = irq_find_mapping(domain, hwirq);
drivers/gpio/gpio-rtd.c
415
generic_handle_domain_irq(domain, hwirq);
drivers/gpio/gpio-sch.c
280
generic_handle_domain_irq(gc->irq.domain, offset);
drivers/gpio/gpio-sifive.c
217
parent = irq_get_irq_data(chip->irq_number[0])->domain;
drivers/gpio/gpio-sim.c
235
static int gpio_sim_irq_requested(struct irq_domain *domain,
drivers/gpio/gpio-sim.c
243
static void gpio_sim_irq_released(struct irq_domain *domain,
drivers/gpio/gpio-siox.c
78
struct irq_domain *irqdomain = ddata->gchip.irq.domain;
drivers/gpio/gpio-spacemit-k1.c
263
irq_domain_update_bus_token(girq->domain, DOMAIN_BUS_WIRED);
drivers/gpio/gpio-spacemit-k1.c
97
handle_nested_irq(irq_find_mapping(gb->chip.gc.irq.domain, n));
drivers/gpio/gpio-sprd.c
206
generic_handle_domain_irq(chip->irq.domain,
drivers/gpio/gpio-stmpe.c
428
int child_irq = irq_find_mapping(stmpe_gpio->chip.irq.domain,
drivers/gpio/gpio-tangier.c
367
generic_handle_domain_irq(gc->irq.domain, base + gpio);
drivers/gpio/gpio-tb10x.c
166
tb10x_gpio->domain = irq_domain_create_linear(dev_fwnode(dev),
drivers/gpio/gpio-tb10x.c
169
if (!tb10x_gpio->domain) {
drivers/gpio/gpio-tb10x.c
173
ret = irq_alloc_domain_generic_chips(tb10x_gpio->domain,
drivers/gpio/gpio-tb10x.c
180
gc = tb10x_gpio->domain->gc->gc[0];
drivers/gpio/gpio-tb10x.c
194
irq_domain_remove(tb10x_gpio->domain);
drivers/gpio/gpio-tb10x.c
203
irq_remove_generic_chip(tb10x_gpio->domain->gc->gc[0],
drivers/gpio/gpio-tb10x.c
205
kfree(tb10x_gpio->domain->gc);
drivers/gpio/gpio-tb10x.c
206
irq_domain_remove(tb10x_gpio->domain);
drivers/gpio/gpio-tb10x.c
43
struct irq_domain *domain;
drivers/gpio/gpio-tb10x.c
57
return irq_create_mapping(tb10x_gpio->domain, offset);
drivers/gpio/gpio-tb10x.c
81
generic_handle_domain_irq(tb10x_gpio->domain, i);
drivers/gpio/gpio-tc3589x.c
284
int irq = irq_find_mapping(tc3589x_gpio->chip.irq.domain,
drivers/gpio/gpio-tegra.c
384
struct irq_domain *domain = tgi->gc.irq.domain;
drivers/gpio/gpio-tegra.c
425
ret = generic_handle_domain_irq(domain, gpio + pin);
drivers/gpio/gpio-tegra186.c
662
struct irq_domain *domain = gpio->gpio.irq.domain;
drivers/gpio/gpio-tegra186.c
689
int ret = generic_handle_domain_irq(domain, offset + pin);
drivers/gpio/gpio-tegra186.c
700
static int tegra186_gpio_irq_domain_translate(struct irq_domain *domain,
drivers/gpio/gpio-tegra186.c
705
struct tegra_gpio *gpio = gpiochip_get_data(domain->host_data);
drivers/gpio/gpio-thunderx.c
543
irq_get_irq_data(txgpio->msix_entries[0].vector)->domain;
drivers/gpio/gpio-thunderx.c
561
err = irq_domain_push_irq(girq->domain,
drivers/gpio/gpio-thunderx.c
582
irq_domain_pop_irq(txgpio->chip.irq.domain,
drivers/gpio/gpio-thunderx.c
585
irq_domain_remove(txgpio->chip.irq.domain);
drivers/gpio/gpio-tqmx86.c
275
generic_handle_domain_irq(gpio->chip.irq.domain,
drivers/gpio/gpio-tqmx86.c
404
irq_domain_set_pm_device(girq->domain, dev);
drivers/gpio/gpio-uniphier.c
248
static int uniphier_gpio_irq_domain_translate(struct irq_domain *domain,
drivers/gpio/gpio-uniphier.c
262
static int uniphier_gpio_irq_domain_alloc(struct irq_domain *domain,
drivers/gpio/gpio-uniphier.c
266
struct uniphier_gpio_priv *priv = domain->host_data;
drivers/gpio/gpio-uniphier.c
275
ret = uniphier_gpio_irq_domain_translate(domain, arg, &hwirq, &type);
drivers/gpio/gpio-uniphier.c
284
parent_fwspec.fwnode = domain->parent->fwnode;
drivers/gpio/gpio-uniphier.c
290
ret = irq_domain_set_hwirq_and_chip(domain, virq, hwirq,
drivers/gpio/gpio-uniphier.c
295
return irq_domain_alloc_irqs_parent(domain, virq, 1, &parent_fwspec);
drivers/gpio/gpio-uniphier.c
298
static int uniphier_gpio_irq_domain_activate(struct irq_domain *domain,
drivers/gpio/gpio-uniphier.c
30
struct irq_domain *domain;
drivers/gpio/gpio-uniphier.c
301
struct uniphier_gpio_priv *priv = domain->host_data;
drivers/gpio/gpio-uniphier.c
308
static void uniphier_gpio_irq_domain_deactivate(struct irq_domain *domain,
drivers/gpio/gpio-uniphier.c
311
struct uniphier_gpio_priv *priv = domain->host_data;
drivers/gpio/gpio-uniphier.c
409
priv->domain = irq_domain_create_hierarchy(
drivers/gpio/gpio-uniphier.c
414
if (!priv->domain)
drivers/gpio/gpio-uniphier.c
426
irq_domain_remove(priv->domain);
drivers/gpio/gpio-vf610.c
112
generic_handle_domain_irq(port->chip.gc.irq.domain, pin);
drivers/gpio/gpio-virtio.c
425
ret = generic_handle_domain_irq(vgpio->gc.irq.domain, gpio);
drivers/gpio/gpio-wcove.c
354
virq = irq_find_mapping(wg->chip.irq.domain, gpio);
drivers/gpio/gpio-xgene-sb.c
199
static int xgene_gpio_sb_domain_alloc(struct irq_domain *domain,
drivers/gpio/gpio-xgene-sb.c
205
struct xgene_gpio_sb *priv = domain->host_data;
drivers/gpio/gpio-xgene-sb.c
211
irq_domain_set_hwirq_and_chip(domain, virq + i, hwirq + i,
drivers/gpio/gpio-xgene-sb.c
214
parent_fwspec.fwnode = domain->parent->fwnode;
drivers/gpio/gpio-xgene-sb.c
228
return irq_domain_alloc_irqs_parent(domain, virq, nr_irqs,
drivers/gpio/gpio-xgene-sb.c
262
parent_domain = irq_get_irq_data(ret)->domain;
drivers/gpio/gpio-xgene-sb.c
309
priv->chip.gc.irq.domain = priv->irq_domain;
drivers/gpio/gpio-xgs-iproc.c
191
generic_handle_domain_irq(gc->irq.domain, bit);
drivers/gpio/gpio-xilinx.c
517
generic_handle_domain_irq(gc->irq.domain, irq_offset);
drivers/gpio/gpio-xlp.c
200
generic_handle_domain_irq(priv->chip.irq.domain, gpio);
drivers/gpio/gpio-zynq.c
633
struct irq_domain *irqdomain = gpio->chip.irq.domain;
drivers/gpio/gpiolib.c
1554
ret = irq_domain_alloc_irqs(gc->irq.domain, 1,
drivers/gpio/gpiolib.c
1682
static int gpiochip_irq_domain_activate(struct irq_domain *domain,
drivers/gpio/gpiolib.c
1685
struct gpio_chip *gc = domain->host_data;
drivers/gpio/gpiolib.c
1700
static void gpiochip_irq_domain_deactivate(struct irq_domain *domain,
drivers/gpio/gpiolib.c
1703
struct gpio_chip *gc = domain->host_data;
drivers/gpio/gpiolib.c
1730
struct irq_domain *domain;
drivers/gpio/gpiolib.c
1747
domain = irq_domain_create_hierarchy(
drivers/gpio/gpiolib.c
1755
if (!domain)
drivers/gpio/gpiolib.c
1760
return domain;
drivers/gpio/gpiolib.c
1903
struct irq_domain *domain;
drivers/gpio/gpiolib.c
1905
domain = irq_domain_create_simple(fwnode, gc->ngpio, gc->irq.first,
drivers/gpio/gpiolib.c
1907
if (!domain)
drivers/gpio/gpiolib.c
1910
return domain;
drivers/gpio/gpiolib.c
1915
struct irq_domain *domain = gc->irq.domain;
drivers/gpio/gpiolib.c
1929
if (irq_domain_is_hierarchy(domain)) {
drivers/gpio/gpiolib.c
1932
spec.fwnode = domain->fwnode;
drivers/gpio/gpiolib.c
1941
return irq_create_mapping(domain, offset);
drivers/gpio/gpiolib.c
2046
struct irq_domain *domain,
drivers/gpio/gpiolib.c
2049
if (!domain)
drivers/gpio/gpiolib.c
2057
gc->irq.domain = domain;
drivers/gpio/gpiolib.c
2086
struct irq_domain *domain;
drivers/gpio/gpiolib.c
2116
domain = gpiochip_hierarchy_create_domain(gc);
drivers/gpio/gpiolib.c
2118
domain = gpiochip_simple_create_domain(gc);
drivers/gpio/gpiolib.c
2120
if (IS_ERR(domain))
drivers/gpio/gpiolib.c
2121
return PTR_ERR(domain);
drivers/gpio/gpiolib.c
2145
ret = gpiochip_irqchip_add_allocated_domain(gc, domain, false);
drivers/gpio/gpiolib.c
2177
if (!gc->irq.domain_is_allocated_externally && gc->irq.domain) {
drivers/gpio/gpiolib.c
2184
irq = irq_find_mapping(gc->irq.domain, offset);
drivers/gpio/gpiolib.c
2188
irq_domain_remove(gc->irq.domain);
drivers/gpio/gpiolib.c
2219
struct irq_domain *domain)
drivers/gpio/gpiolib.c
2221
return gpiochip_irqchip_add_allocated_domain(gc, domain, true);
drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd.c
324
u32 domain, void **mem_obj, uint64_t *gpu_addr,
drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd.c
335
bp.domain = domain;
drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd.c
359
r = amdgpu_bo_pin(bo, domain);
drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd.c
421
bp.domain = AMDGPU_GEM_DOMAIN_GWS;
drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd.h
209
uint32_t domain,
drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd.h
237
uint32_t domain,
drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd.h
245
u32 domain, void **mem_obj, uint64_t *gpu_addr,
drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd.h
78
uint32_t domain;
drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c
1123
amdgpu_bo_placement_from_domain(bo, mem->domain);
drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c
1509
static int amdgpu_amdkfd_gpuvm_pin_bo(struct amdgpu_bo *bo, u32 domain)
drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c
1534
ret = amdgpu_bo_pin(bo, domain);
drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c
1536
pr_err("Error in Pinning BO to domain: %d\n", domain);
drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c
1721
u32 domain, alloc_domain;
drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c
1731
domain = alloc_domain = AMDGPU_GEM_DOMAIN_VRAM;
drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c
1734
domain = AMDGPU_GEM_DOMAIN_GTT;
drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c
1749
domain = alloc_domain = AMDGPU_GEM_DOMAIN_GTT;
drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c
1752
domain = AMDGPU_GEM_DOMAIN_GTT;
drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c
1838
(*mem)->domain = domain;
drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c
1862
ret = amdgpu_amdkfd_bo_validate_and_fence(bo, domain,
drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c
2022
uint32_t domain;
drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c
2052
domain = mem->domain;
drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c
2058
avm, domain_string(domain));
drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c
2408
(*mem)->domain = (bo->preferred_domains & AMDGPU_GEM_DOMAIN_VRAM) &&
drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c
2421
ret = amdgpu_amdkfd_bo_validate_and_fence(bo, (*mem)->domain,
drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c
2713
amdgpu_bo_placement_from_domain(bo, mem->domain);
drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c
2968
uint32_t domain = mem->domain;
drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c
2974
ret = amdgpu_amdkfd_bo_validate(bo, domain, false);
drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c
3143
(*mem)->domain = AMDGPU_GEM_DOMAIN_GWS;
drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c
417
static int amdgpu_amdkfd_bo_validate(struct amdgpu_bo *bo, uint32_t domain,
drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c
431
amdgpu_bo_placement_from_domain(bo, domain);
drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c
444
uint32_t domain,
drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c
452
ret = amdgpu_amdkfd_bo_validate(bo, domain, true);
drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c
73
#define domain_string(domain) domain_bit_to_string[ffs(domain)-1]
drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c
898
((mem->domain == AMDGPU_GEM_DOMAIN_VRAM) ||
drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c
901
if (mem->domain == AMDGPU_GEM_DOMAIN_VRAM)
drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c
919
(mem->domain == AMDGPU_GEM_DOMAIN_GTT && reuse_dmamap(adev, bo_adev)) ||
drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c
949
} else if (mem->domain == AMDGPU_GEM_DOMAIN_GTT ||
drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c
950
mem->domain == AMDGPU_GEM_DOMAIN_VRAM) {
drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c
806
uint32_t domain;
drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c
825
domain = bo->preferred_domains;
drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c
827
domain = bo->allowed_domains;
drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c
829
domain = bo->preferred_domains;
drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c
832
domain = bo->allowed_domains;
drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c
836
amdgpu_bo_placement_from_domain(bo, domain);
drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c
844
if (unlikely(r == -ENOMEM) && domain != bo->allowed_domains) {
drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c
845
domain = bo->allowed_domains;
drivers/gpu/drm/amd/amdgpu/amdgpu_csa.c
39
u32 domain, uint32_t size)
drivers/gpu/drm/amd/amdgpu/amdgpu_csa.c
44
domain, bo,
drivers/gpu/drm/amd/amdgpu/amdgpu_csa.h
33
u32 domain, uint32_t size);
drivers/gpu/drm/amd/amdgpu/amdgpu_device.c
4284
struct iommu_domain *domain;
drivers/gpu/drm/amd/amdgpu/amdgpu_device.c
4286
domain = iommu_get_domain_for_dev(adev->dev);
drivers/gpu/drm/amd/amdgpu/amdgpu_device.c
4287
if (!domain || domain->type == IOMMU_DOMAIN_IDENTITY)
drivers/gpu/drm/amd/amdgpu/amdgpu_device.c
4301
struct iommu_domain *domain;
drivers/gpu/drm/amd/amdgpu/amdgpu_device.c
4303
domain = iommu_get_domain_for_dev(adev->dev);
drivers/gpu/drm/amd/amdgpu/amdgpu_device.c
4304
if (domain && (domain->type == IOMMU_DOMAIN_DMA ||
drivers/gpu/drm/amd/amdgpu/amdgpu_device.c
4305
domain->type == IOMMU_DOMAIN_DMA_FQ))
drivers/gpu/drm/amd/amdgpu/amdgpu_display.c
560
uint32_t domain = AMDGPU_GEM_DOMAIN_VRAM;
drivers/gpu/drm/amd/amdgpu/amdgpu_display.c
575
domain |= AMDGPU_GEM_DOMAIN_GTT;
drivers/gpu/drm/amd/amdgpu/amdgpu_display.c
578
return domain;
drivers/gpu/drm/amd/amdgpu/amdgpu_dma_buf.c
302
u32 domain = amdgpu_display_supported_domains(adev, bo->flags);
drivers/gpu/drm/amd/amdgpu/amdgpu_dma_buf.c
307
if (!reads || !(domain & AMDGPU_GEM_DOMAIN_GTT))
drivers/gpu/drm/amd/amdgpu/amdgpu_gart.c
174
bp.domain = AMDGPU_GEM_DOMAIN_CPU;
drivers/gpu/drm/amd/amdgpu/amdgpu_gem.c
1263
u32 domain;
drivers/gpu/drm/amd/amdgpu/amdgpu_gem.c
1278
domain = amdgpu_bo_get_preferred_domain(adev,
drivers/gpu/drm/amd/amdgpu/amdgpu_gem.c
1280
r = amdgpu_gem_object_create(adev, args->size, 0, domain, flags,
drivers/gpu/drm/amd/amdgpu/amdgpu_gem.c
184
bp.domain = initial_domain;
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.c
387
u32 domain = AMDGPU_GEM_DOMAIN_GTT;
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.c
394
domain |= AMDGPU_GEM_DOMAIN_VRAM;
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.c
430
PAGE_SIZE, domain, &ring->mqd_obj,
drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.c
454
PAGE_SIZE, domain, &ring->mqd_obj,
drivers/gpu/drm/amd/amdgpu/amdgpu_gmc.c
67
bp.domain = AMDGPU_GEM_DOMAIN_VRAM;
drivers/gpu/drm/amd/amdgpu/amdgpu_irq.c
506
generic_handle_domain_irq(adev->irq.domain, src_id);
drivers/gpu/drm/amd/amdgpu/amdgpu_irq.c
769
adev->irq.domain = irq_domain_create_linear(NULL, AMDGPU_MAX_IRQ_SRC_ID,
drivers/gpu/drm/amd/amdgpu/amdgpu_irq.c
771
if (!adev->irq.domain) {
drivers/gpu/drm/amd/amdgpu/amdgpu_irq.c
789
if (adev->irq.domain) {
drivers/gpu/drm/amd/amdgpu/amdgpu_irq.c
790
irq_domain_remove(adev->irq.domain);
drivers/gpu/drm/amd/amdgpu/amdgpu_irq.c
791
adev->irq.domain = NULL;
drivers/gpu/drm/amd/amdgpu/amdgpu_irq.c
810
adev->irq.virq[src_id] = irq_create_mapping(adev->irq.domain, src_id);
drivers/gpu/drm/amd/amdgpu/amdgpu_irq.h
99
struct irq_domain *domain; /* GPU irq controller domain */
drivers/gpu/drm/amd/amdgpu/amdgpu_object.c
110
void amdgpu_bo_placement_from_domain(struct amdgpu_bo *abo, u32 domain)
drivers/gpu/drm/amd/amdgpu/amdgpu_object.c
118
if (domain & AMDGPU_GEM_DOMAIN_VRAM) {
drivers/gpu/drm/amd/amdgpu/amdgpu_object.c
148
if (domain & AMDGPU_GEM_DOMAIN_DOORBELL) {
drivers/gpu/drm/amd/amdgpu/amdgpu_object.c
1541
u32 domain;
drivers/gpu/drm/amd/amdgpu/amdgpu_object.c
1551
domain = bo->preferred_domains & AMDGPU_GEM_DOMAIN_MASK;
drivers/gpu/drm/amd/amdgpu/amdgpu_object.c
1552
if (!domain)
drivers/gpu/drm/amd/amdgpu/amdgpu_object.c
1555
switch (rounddown_pow_of_two(domain)) {
drivers/gpu/drm/amd/amdgpu/amdgpu_object.c
156
if (domain & AMDGPU_GEM_DOMAIN_GTT) {
drivers/gpu/drm/amd/amdgpu/amdgpu_object.c
1584
uint32_t domain)
drivers/gpu/drm/amd/amdgpu/amdgpu_object.c
1586
if ((domain == (AMDGPU_GEM_DOMAIN_VRAM | AMDGPU_GEM_DOMAIN_GTT)) &&
drivers/gpu/drm/amd/amdgpu/amdgpu_object.c
1588
domain = AMDGPU_GEM_DOMAIN_VRAM;
drivers/gpu/drm/amd/amdgpu/amdgpu_object.c
1590
domain = AMDGPU_GEM_DOMAIN_GTT;
drivers/gpu/drm/amd/amdgpu/amdgpu_object.c
1592
return domain;
drivers/gpu/drm/amd/amdgpu/amdgpu_object.c
168
domain & abo->preferred_domains & AMDGPU_GEM_DOMAIN_VRAM)
drivers/gpu/drm/amd/amdgpu/amdgpu_object.c
173
if (domain & AMDGPU_GEM_DOMAIN_CPU) {
drivers/gpu/drm/amd/amdgpu/amdgpu_object.c
181
if (domain & AMDGPU_GEM_DOMAIN_GDS) {
drivers/gpu/drm/amd/amdgpu/amdgpu_object.c
189
if (domain & AMDGPU_GEM_DOMAIN_GWS) {
drivers/gpu/drm/amd/amdgpu/amdgpu_object.c
197
if (domain & AMDGPU_GEM_DOMAIN_OA) {
drivers/gpu/drm/amd/amdgpu/amdgpu_object.c
240
u32 domain, struct amdgpu_bo **bo_ptr,
drivers/gpu/drm/amd/amdgpu/amdgpu_object.c
255
bp.domain = domain;
drivers/gpu/drm/amd/amdgpu/amdgpu_object.c
279
r = amdgpu_bo_pin(*bo_ptr, domain);
drivers/gpu/drm/amd/amdgpu/amdgpu_object.c
339
u32 domain, struct amdgpu_bo **bo_ptr,
drivers/gpu/drm/amd/amdgpu/amdgpu_object.c
344
r = amdgpu_bo_create_reserved(adev, size, align, domain, bo_ptr,
drivers/gpu/drm/amd/amdgpu/amdgpu_object.c
376
struct dma_buf *dma_buf, u32 domain, struct amdgpu_bo **bo,
drivers/gpu/drm/amd/amdgpu/amdgpu_object.c
396
r = amdgpu_bo_pin(*bo, domain);
drivers/gpu/drm/amd/amdgpu/amdgpu_object.c
551
unsigned long size, u32 domain)
drivers/gpu/drm/amd/amdgpu/amdgpu_object.c
559
if (domain & AMDGPU_GEM_DOMAIN_GTT)
drivers/gpu/drm/amd/amdgpu/amdgpu_object.c
561
else if (domain & AMDGPU_GEM_DOMAIN_VRAM)
drivers/gpu/drm/amd/amdgpu/amdgpu_object.c
567
if (domain & AMDGPU_GEM_DOMAIN_GTT)
drivers/gpu/drm/amd/amdgpu/amdgpu_object.c
642
if (bp->domain & (AMDGPU_GEM_DOMAIN_GWS | AMDGPU_GEM_DOMAIN_OA)) {
drivers/gpu/drm/amd/amdgpu/amdgpu_object.c
647
} else if (bp->domain & AMDGPU_GEM_DOMAIN_GDS) {
drivers/gpu/drm/amd/amdgpu/amdgpu_object.c
657
if (!amdgpu_bo_validate_size(adev, size, bp->domain))
drivers/gpu/drm/amd/amdgpu/amdgpu_object.c
670
bp->domain;
drivers/gpu/drm/amd/amdgpu/amdgpu_object.c
690
if (bp->domain & (AMDGPU_GEM_DOMAIN_GWS | AMDGPU_GEM_DOMAIN_OA |
drivers/gpu/drm/amd/amdgpu/amdgpu_object.c
694
amdgpu_bo_placement_from_domain(bo, bp->domain);
drivers/gpu/drm/amd/amdgpu/amdgpu_object.c
925
int amdgpu_bo_pin(struct amdgpu_bo *bo, u32 domain)
drivers/gpu/drm/amd/amdgpu/amdgpu_object.c
935
if (bo->preferred_domains & domain)
drivers/gpu/drm/amd/amdgpu/amdgpu_object.c
936
domain = bo->preferred_domains & domain;
drivers/gpu/drm/amd/amdgpu/amdgpu_object.c
940
if (domain & AMDGPU_GEM_DOMAIN_GTT)
drivers/gpu/drm/amd/amdgpu/amdgpu_object.c
941
domain = AMDGPU_GEM_DOMAIN_GTT;
drivers/gpu/drm/amd/amdgpu/amdgpu_object.c
950
if (!(domain & amdgpu_mem_type_to_domain(mem_type)))
drivers/gpu/drm/amd/amdgpu/amdgpu_object.c
965
domain = amdgpu_bo_get_preferred_domain(adev, domain);
drivers/gpu/drm/amd/amdgpu/amdgpu_object.c
973
amdgpu_bo_placement_from_domain(bo, domain);
drivers/gpu/drm/amd/amdgpu/amdgpu_object.h
251
void amdgpu_bo_placement_from_domain(struct amdgpu_bo *abo, u32 domain);
drivers/gpu/drm/amd/amdgpu/amdgpu_object.h
258
u32 domain, struct amdgpu_bo **bo_ptr,
drivers/gpu/drm/amd/amdgpu/amdgpu_object.h
262
u32 domain, struct amdgpu_bo **bo_ptr,
drivers/gpu/drm/amd/amdgpu/amdgpu_object.h
265
struct dma_buf *dbuf, u32 domain,
drivers/gpu/drm/amd/amdgpu/amdgpu_object.h
285
int amdgpu_bo_pin(struct amdgpu_bo *bo, u32 domain);
drivers/gpu/drm/amd/amdgpu/amdgpu_object.h
312
uint32_t domain);
drivers/gpu/drm/amd/amdgpu/amdgpu_object.h
337
unsigned size, u32 align, u32 domain);
drivers/gpu/drm/amd/amdgpu/amdgpu_object.h
52
u32 domain;
drivers/gpu/drm/amd/amdgpu/amdgpu_reset.h
123
static inline bool amdgpu_reset_get_reset_domain(struct amdgpu_reset_domain *domain)
drivers/gpu/drm/amd/amdgpu/amdgpu_reset.h
125
return kref_get_unless_zero(&domain->refcount) != 0;
drivers/gpu/drm/amd/amdgpu/amdgpu_reset.h
128
static inline void amdgpu_reset_put_reset_domain(struct amdgpu_reset_domain *domain)
drivers/gpu/drm/amd/amdgpu/amdgpu_reset.h
130
if (domain)
drivers/gpu/drm/amd/amdgpu/amdgpu_reset.h
131
kref_put(&domain->refcount, amdgpu_reset_destroy_reset_domain);
drivers/gpu/drm/amd/amdgpu/amdgpu_reset.h
134
static inline bool amdgpu_reset_domain_schedule(struct amdgpu_reset_domain *domain,
drivers/gpu/drm/amd/amdgpu/amdgpu_reset.h
137
return queue_work(domain->wq, work);
drivers/gpu/drm/amd/amdgpu/amdgpu_reset.h
140
static inline bool amdgpu_reset_pending(struct amdgpu_reset_domain *domain)
drivers/gpu/drm/amd/amdgpu/amdgpu_reset.h
142
lockdep_assert_held(&domain->sem);
drivers/gpu/drm/amd/amdgpu/amdgpu_reset.h
143
return rwsem_is_contended(&domain->sem);
drivers/gpu/drm/amd/amdgpu/amdgpu_sa.c
49
unsigned int size, u32 suballoc_align, u32 domain)
drivers/gpu/drm/amd/amdgpu/amdgpu_sa.c
53
r = amdgpu_bo_create_kernel(adev, size, AMDGPU_GPU_PAGE_SIZE, domain,
drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c
1936
bp.domain = 0;
drivers/gpu/drm/amd/amdgpu/amdgpu_userq.c
506
bp.domain = AMDGPU_GEM_DOMAIN_GTT;
drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c
600
uint32_t domain = AMDGPU_GEM_DOMAIN_VRAM;
drivers/gpu/drm/amd/amdgpu/amdgpu_uvd.c
602
amdgpu_bo_placement_from_domain(bo, domain);
drivers/gpu/drm/amd/amdgpu/amdgpu_vkms.c
307
uint32_t domain;
drivers/gpu/drm/amd/amdgpu/amdgpu_vkms.c
338
domain = amdgpu_display_supported_domains(adev, rbo->flags);
drivers/gpu/drm/amd/amdgpu/amdgpu_vkms.c
340
domain = AMDGPU_GEM_DOMAIN_VRAM;
drivers/gpu/drm/amd/amdgpu/amdgpu_vkms.c
343
r = amdgpu_bo_pin(rbo, domain);
drivers/gpu/drm/amd/amdgpu/amdgpu_vm_pt.c
454
bp.domain = AMDGPU_GEM_DOMAIN_VRAM;
drivers/gpu/drm/amd/amdgpu/amdgpu_vm_pt.c
456
bp.domain = AMDGPU_GEM_DOMAIN_GTT;
drivers/gpu/drm/amd/amdgpu/amdgpu_vm_pt.c
458
bp.domain = amdgpu_bo_get_preferred_domain(adev, bp.domain);
drivers/gpu/drm/amd/amdkfd/kfd_chardev.c
1340
((struct kgd_mem *)mem)->domain);
drivers/gpu/drm/amd/amdkfd/kfd_svm.c
1212
struct svm_range *prange, int domain)
drivers/gpu/drm/amd/amdkfd/kfd_svm.c
1219
bool snoop = (domain != SVM_RANGE_VRAM_DOMAIN);
drivers/gpu/drm/amd/amdkfd/kfd_svm.c
1224
if (domain == SVM_RANGE_VRAM_DOMAIN)
drivers/gpu/drm/amd/amdkfd/kfd_svm.c
1229
if (domain == SVM_RANGE_VRAM_DOMAIN) {
drivers/gpu/drm/amd/amdkfd/kfd_svm.c
1245
if (domain == SVM_RANGE_VRAM_DOMAIN) {
drivers/gpu/drm/amd/amdkfd/kfd_svm.c
1271
if (domain == SVM_RANGE_VRAM_DOMAIN) {
drivers/gpu/drm/amd/amdkfd/kfd_svm.c
1311
if (domain == SVM_RANGE_VRAM_DOMAIN) {
drivers/gpu/drm/amd/amdkfd/kfd_svm.c
1335
pte_flags |= (domain == SVM_RANGE_VRAM_DOMAIN) ? 0 : AMDGPU_PTE_SYSTEM;
drivers/gpu/drm/amd/amdkfd/kfd_svm.c
596
bp.domain = AMDGPU_GEM_DOMAIN_VRAM;
drivers/gpu/drm/amd/amdkfd/kfd_topology.c
2105
dev->node_props.domain = pci_domain_nr(gpu->adev->pdev->bus);
drivers/gpu/drm/amd/amdkfd/kfd_topology.c
481
dev->node_props.domain);
drivers/gpu/drm/amd/amdkfd/kfd_topology.h
71
uint32_t domain;
drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm.c
1722
u32 domain = (type == DC_MEM_ALLOC_TYPE_GART) ?
drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm.c
1731
domain, &da->bo,
drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm_plane.c
933
uint32_t domain;
drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm_plane.c
963
domain = amdgpu_display_supported_domains(adev, rbo->flags);
drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm_plane.c
965
domain = AMDGPU_GEM_DOMAIN_VRAM;
drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm_plane.c
968
r = amdgpu_bo_pin(rbo, domain);
drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm_wb.c
115
domain = amdgpu_display_supported_domains(adev, rbo->flags);
drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm_wb.c
118
r = amdgpu_bo_pin(rbo, domain);
drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm_wb.c
90
uint32_t domain;
drivers/gpu/drm/amd/pm/swsmu/amdgpu_smu.c
1000
driver_table->domain = AMDGPU_GEM_DOMAIN_VRAM;
drivers/gpu/drm/amd/pm/swsmu/amdgpu_smu.c
1015
driver_table->domain,
drivers/gpu/drm/amd/pm/swsmu/amdgpu_smu.c
1086
memory_pool->domain =
drivers/gpu/drm/amd/pm/swsmu/amdgpu_smu.c
1099
memory_pool->domain,
drivers/gpu/drm/amd/pm/swsmu/amdgpu_smu.c
1144
dummy_read_1_table->domain,
drivers/gpu/drm/amd/pm/swsmu/amdgpu_smu.c
978
tables[SMU_TABLE_PMSTATUSLOG].domain,
drivers/gpu/drm/amd/pm/swsmu/amdgpu_smu.c
988
driver_table->domain = AMDGPU_GEM_DOMAIN_VRAM | AMDGPU_GEM_DOMAIN_GTT;
drivers/gpu/drm/amd/pm/swsmu/amdgpu_smu.c
997
if ((tables[i].domain &
drivers/gpu/drm/amd/pm/swsmu/inc/amdgpu_smu.h
252
tables[table_id].domain = d; \
drivers/gpu/drm/amd/pm/swsmu/inc/amdgpu_smu.h
266
uint8_t domain;
drivers/gpu/drm/amd/pm/swsmu/smu11/navi10_ppt.c
517
dummy_read_1_table->domain = AMDGPU_GEM_DOMAIN_VRAM;
drivers/gpu/drm/amd/ras/rascore/ras_cmd.h
126
uint32_t domain : 16;
drivers/gpu/drm/etnaviv/etnaviv_gem_submit.c
348
submit->pmrs[i].domain = r->domain;
drivers/gpu/drm/etnaviv/etnaviv_perfmon.c
100
const struct etnaviv_pm_domain *domain,
drivers/gpu/drm/etnaviv/etnaviv_perfmon.c
114
const struct etnaviv_pm_domain *domain,
drivers/gpu/drm/etnaviv/etnaviv_perfmon.c
19
const struct etnaviv_pm_domain *domain,
drivers/gpu/drm/etnaviv/etnaviv_perfmon.c
41
const struct etnaviv_pm_domain *domain,
drivers/gpu/drm/etnaviv/etnaviv_perfmon.c
44
gpu_write(gpu, domain->profile_config, signal->data);
drivers/gpu/drm/etnaviv/etnaviv_perfmon.c
46
return gpu_read(gpu, domain->profile_read);
drivers/gpu/drm/etnaviv/etnaviv_perfmon.c
483
const struct etnaviv_pm_domain *domain = NULL;
drivers/gpu/drm/etnaviv/etnaviv_perfmon.c
497
domain = meta->domains + (index - offset);
drivers/gpu/drm/etnaviv/etnaviv_perfmon.c
500
return domain;
drivers/gpu/drm/etnaviv/etnaviv_perfmon.c
504
struct drm_etnaviv_pm_domain *domain)
drivers/gpu/drm/etnaviv/etnaviv_perfmon.c
509
if (domain->iter >= nr_domains)
drivers/gpu/drm/etnaviv/etnaviv_perfmon.c
512
dom = pm_domain(gpu, domain->iter);
drivers/gpu/drm/etnaviv/etnaviv_perfmon.c
516
domain->id = domain->iter;
drivers/gpu/drm/etnaviv/etnaviv_perfmon.c
517
domain->nr_signals = dom->nr_signals;
drivers/gpu/drm/etnaviv/etnaviv_perfmon.c
518
strscpy_pad(domain->name, dom->name, sizeof(domain->name));
drivers/gpu/drm/etnaviv/etnaviv_perfmon.c
520
domain->iter++;
drivers/gpu/drm/etnaviv/etnaviv_perfmon.c
521
if (domain->iter == nr_domains)
drivers/gpu/drm/etnaviv/etnaviv_perfmon.c
522
domain->iter = 0xff;
drivers/gpu/drm/etnaviv/etnaviv_perfmon.c
534
if (signal->domain >= nr_domains)
drivers/gpu/drm/etnaviv/etnaviv_perfmon.c
537
dom = pm_domain(gpu, signal->domain);
drivers/gpu/drm/etnaviv/etnaviv_perfmon.c
562
if (r->domain >= meta->nr_domains)
drivers/gpu/drm/etnaviv/etnaviv_perfmon.c
565
dom = meta->domains + r->domain;
drivers/gpu/drm/etnaviv/etnaviv_perfmon.c
58
const struct etnaviv_pm_domain *domain,
drivers/gpu/drm/etnaviv/etnaviv_perfmon.c
582
dom = meta->domains + pmr->domain;
drivers/gpu/drm/etnaviv/etnaviv_perfmon.c
69
value += perf_reg_read(gpu, domain, signal);
drivers/gpu/drm/etnaviv/etnaviv_perfmon.c
79
const struct etnaviv_pm_domain *domain,
drivers/gpu/drm/etnaviv/etnaviv_perfmon.h
17
u8 domain;
drivers/gpu/drm/etnaviv/etnaviv_perfmon.h
27
struct drm_etnaviv_pm_domain *domain);
drivers/gpu/drm/gma500/cdv_device.c
148
static inline u32 CDV_MSG_READ32(int domain, uint port, uint offset)
drivers/gpu/drm/gma500/cdv_device.c
152
struct pci_dev *pci_root = pci_get_domain_bus_and_slot(domain, 0, 0);
drivers/gpu/drm/gma500/cdv_device.c
159
static inline void CDV_MSG_WRITE32(int domain, uint port, uint offset,
drivers/gpu/drm/gma500/cdv_device.c
163
struct pci_dev *pci_root = pci_get_domain_bus_and_slot(domain, 0, 0);
drivers/gpu/drm/gma500/cdv_device.c
181
int domain = pci_domain_nr(pdev->bus);
drivers/gpu/drm/gma500/cdv_device.c
184
dev_priv->apm_base = CDV_MSG_READ32(domain, PSB_PUNIT_PORT,
drivers/gpu/drm/gma500/cdv_device.c
186
dev_priv->ospm_base = CDV_MSG_READ32(domain, PSB_PUNIT_PORT,
drivers/gpu/drm/gma500/mid_bios.c
101
pci_get_domain_bus_and_slot(domain, 0, PCI_DEVFN(2, 0));
drivers/gpu/drm/gma500/mid_bios.c
99
int domain = pci_domain_nr(pdev->bus);
drivers/gpu/drm/gma500/psb_drv.c
268
int domain = pci_domain_nr(pdev->bus);
drivers/gpu/drm/gma500/psb_drv.c
271
pci_get_domain_bus_and_slot(domain, 0,
drivers/gpu/drm/gma500/psb_drv.c
293
pci_get_domain_bus_and_slot(domain, 0,
drivers/gpu/drm/i915/display/intel_ddi.c
1003
enum intel_display_power_domain domain =
drivers/gpu/drm/i915/display/intel_ddi.c
1011
intel_display_power_put(display, domain, wf);
drivers/gpu/drm/i915/display/intel_ddi.c
987
enum intel_display_power_domain domain =
drivers/gpu/drm/i915/display/intel_ddi.c
992
if (domain == POWER_DOMAIN_INVALID)
drivers/gpu/drm/i915/display/intel_ddi.c
995
dig_port->aux_wakeref = intel_display_power_get(display, domain);
drivers/gpu/drm/i915/display/intel_display.c
1989
enum intel_display_power_domain domain;
drivers/gpu/drm/i915/display/intel_display.c
2003
for_each_power_domain(domain, &new_domains)
drivers/gpu/drm/i915/display/intel_display.c
2006
domain);
drivers/gpu/drm/i915/display/intel_display_irq.c
2132
enum intel_display_power_domain domain;
drivers/gpu/drm/i915/display/intel_display_irq.c
2134
domain = POWER_DOMAIN_TRANSCODER(trans);
drivers/gpu/drm/i915/display/intel_display_irq.c
2135
if (!intel_display_power_is_enabled(display, domain))
drivers/gpu/drm/i915/display/intel_display_irq.c
2379
enum intel_display_power_domain domain;
drivers/gpu/drm/i915/display/intel_display_irq.c
2381
domain = POWER_DOMAIN_TRANSCODER(trans);
drivers/gpu/drm/i915/display/intel_display_irq.c
2382
if (!intel_display_power_is_enabled(display, domain))
drivers/gpu/drm/i915/display/intel_display_power.c
203
MISSING_CASE(domain);
drivers/gpu/drm/i915/display/intel_display_power.c
209
enum intel_display_power_domain domain)
drivers/gpu/drm/i915/display/intel_display_power.c
2176
enum intel_display_power_domain domain;
drivers/gpu/drm/i915/display/intel_display_power.c
2181
for_each_power_domain(domain, intel_power_well_domains(power_well))
drivers/gpu/drm/i915/display/intel_display_power.c
2183
intel_display_power_domain_str(domain),
drivers/gpu/drm/i915/display/intel_display_power.c
2184
power_domains->domain_use_count[domain]);
drivers/gpu/drm/i915/display/intel_display_power.c
219
for_each_power_domain_well_reverse(display, power_well, domain) {
drivers/gpu/drm/i915/display/intel_display_power.c
2210
enum intel_display_power_domain domain;
drivers/gpu/drm/i915/display/intel_display_power.c
2224
for_each_power_domain(domain, intel_power_well_domains(power_well))
drivers/gpu/drm/i915/display/intel_display_power.c
2225
domains_count += power_domains->domain_use_count[domain];
drivers/gpu/drm/i915/display/intel_display_power.c
250
enum intel_display_power_domain domain)
drivers/gpu/drm/i915/display/intel_display_power.c
256
ret = __intel_display_power_is_enabled(display, domain);
drivers/gpu/drm/i915/display/intel_display_power.c
392
enum intel_display_power_domain domain;
drivers/gpu/drm/i915/display/intel_display_power.c
401
for_each_power_domain(domain, &async_put_mask)
drivers/gpu/drm/i915/display/intel_display_power.c
403
power_domains->domain_use_count[domain] != 1);
drivers/gpu/drm/i915/display/intel_display_power.c
414
enum intel_display_power_domain domain;
drivers/gpu/drm/i915/display/intel_display_power.c
417
for_each_power_domain(domain, mask)
drivers/gpu/drm/i915/display/intel_display_power.c
419
intel_display_power_domain_str(domain),
drivers/gpu/drm/i915/display/intel_display_power.c
420
power_domains->domain_use_count[domain]);
drivers/gpu/drm/i915/display/intel_display_power.c
47
intel_display_power_domain_str(enum intel_display_power_domain domain)
drivers/gpu/drm/i915/display/intel_display_power.c
471
enum intel_display_power_domain domain)
drivers/gpu/drm/i915/display/intel_display_power.c
475
clear_bit(domain, power_domains->async_put_domains[0].bits);
drivers/gpu/drm/i915/display/intel_display_power.c
476
clear_bit(domain, power_domains->async_put_domains[1].bits);
drivers/gpu/drm/i915/display/intel_display_power.c
49
switch (domain) {
drivers/gpu/drm/i915/display/intel_display_power.c
492
enum intel_display_power_domain domain)
drivers/gpu/drm/i915/display/intel_display_power.c
499
if (!test_bit(domain, async_put_mask.bits))
drivers/gpu/drm/i915/display/intel_display_power.c
502
async_put_domains_clear_domain(power_domains, domain);
drivers/gpu/drm/i915/display/intel_display_power.c
521
enum intel_display_power_domain domain)
drivers/gpu/drm/i915/display/intel_display_power.c
526
if (intel_display_power_grab_async_put_ref(display, domain))
drivers/gpu/drm/i915/display/intel_display_power.c
529
for_each_power_domain_well(display, power_well, domain)
drivers/gpu/drm/i915/display/intel_display_power.c
532
power_domains->domain_use_count[domain]++;
drivers/gpu/drm/i915/display/intel_display_power.c
548
enum intel_display_power_domain domain)
drivers/gpu/drm/i915/display/intel_display_power.c
556
__intel_display_power_get_domain(display, domain);
drivers/gpu/drm/i915/display/intel_display_power.c
576
enum intel_display_power_domain domain)
drivers/gpu/drm/i915/display/intel_display_power.c
588
if (__intel_display_power_is_enabled(display, domain)) {
drivers/gpu/drm/i915/display/intel_display_power.c
589
__intel_display_power_get_domain(display, domain);
drivers/gpu/drm/i915/display/intel_display_power.c
607
enum intel_display_power_domain domain)
drivers/gpu/drm/i915/display/intel_display_power.c
611
const char *name = intel_display_power_domain_str(domain);
drivers/gpu/drm/i915/display/intel_display_power.c
614
drm_WARN(display->drm, !power_domains->domain_use_count[domain],
drivers/gpu/drm/i915/display/intel_display_power.c
619
test_bit(domain, async_put_mask.bits),
drivers/gpu/drm/i915/display/intel_display_power.c
623
power_domains->domain_use_count[domain]--;
drivers/gpu/drm/i915/display/intel_display_power.c
625
for_each_power_domain_well_reverse(display, power_well, domain)
drivers/gpu/drm/i915/display/intel_display_power.c
630
enum intel_display_power_domain domain)
drivers/gpu/drm/i915/display/intel_display_power.c
635
__intel_display_power_put_domain(display, domain);
drivers/gpu/drm/i915/display/intel_display_power.c
661
enum intel_display_power_domain domain;
drivers/gpu/drm/i915/display/intel_display_power.c
666
for_each_power_domain(domain, mask) {
drivers/gpu/drm/i915/display/intel_display_power.c
668
async_put_domains_clear_domain(power_domains, domain);
drivers/gpu/drm/i915/display/intel_display_power.c
669
__intel_display_power_put_domain(display, domain);
drivers/gpu/drm/i915/display/intel_display_power.c
742
enum intel_display_power_domain domain,
drivers/gpu/drm/i915/display/intel_display_power.c
755
if (power_domains->domain_use_count[domain] > 1) {
drivers/gpu/drm/i915/display/intel_display_power.c
756
__intel_display_power_put_domain(display, domain);
drivers/gpu/drm/i915/display/intel_display_power.c
761
drm_WARN_ON(display->drm, power_domains->domain_use_count[domain] != 1);
drivers/gpu/drm/i915/display/intel_display_power.c
765
set_bit(domain, power_domains->async_put_domains[1].bits);
drivers/gpu/drm/i915/display/intel_display_power.c
769
set_bit(domain, power_domains->async_put_domains[0].bits);
drivers/gpu/drm/i915/display/intel_display_power.c
855
enum intel_display_power_domain domain,
drivers/gpu/drm/i915/display/intel_display_power.c
858
__intel_display_power_put(display, domain);
drivers/gpu/drm/i915/display/intel_display_power.c
876
enum intel_display_power_domain domain)
drivers/gpu/drm/i915/display/intel_display_power.c
878
__intel_display_power_put(display, domain);
drivers/gpu/drm/i915/display/intel_display_power.c
886
enum intel_display_power_domain domain)
drivers/gpu/drm/i915/display/intel_display_power.c
890
drm_WARN_ON(display->drm, test_bit(domain, power_domain_set->mask.bits));
drivers/gpu/drm/i915/display/intel_display_power.c
892
wf = intel_display_power_get(display, domain);
drivers/gpu/drm/i915/display/intel_display_power.c
894
power_domain_set->wakerefs[domain] = wf;
drivers/gpu/drm/i915/display/intel_display_power.c
896
set_bit(domain, power_domain_set->mask.bits);
drivers/gpu/drm/i915/display/intel_display_power.c
902
enum intel_display_power_domain domain)
drivers/gpu/drm/i915/display/intel_display_power.c
906
drm_WARN_ON(display->drm, test_bit(domain, power_domain_set->mask.bits));
drivers/gpu/drm/i915/display/intel_display_power.c
908
wf = intel_display_power_get_if_enabled(display, domain);
drivers/gpu/drm/i915/display/intel_display_power.c
913
power_domain_set->wakerefs[domain] = wf;
drivers/gpu/drm/i915/display/intel_display_power.c
915
set_bit(domain, power_domain_set->mask.bits);
drivers/gpu/drm/i915/display/intel_display_power.c
925
enum intel_display_power_domain domain;
drivers/gpu/drm/i915/display/intel_display_power.c
930
for_each_power_domain(domain, mask) {
drivers/gpu/drm/i915/display/intel_display_power.c
934
wf = fetch_and_zero(&power_domain_set->wakerefs[domain]);
drivers/gpu/drm/i915/display/intel_display_power.c
936
intel_display_power_put(display, domain, wf);
drivers/gpu/drm/i915/display/intel_display_power.c
937
clear_bit(domain, power_domain_set->mask.bits);
drivers/gpu/drm/i915/display/intel_display_power.h
191
enum intel_display_power_domain domain);
drivers/gpu/drm/i915/display/intel_display_power.h
193
enum intel_display_power_domain domain);
drivers/gpu/drm/i915/display/intel_display_power.h
196
enum intel_display_power_domain domain);
drivers/gpu/drm/i915/display/intel_display_power.h
198
enum intel_display_power_domain domain,
drivers/gpu/drm/i915/display/intel_display_power.h
204
enum intel_display_power_domain domain,
drivers/gpu/drm/i915/display/intel_display_power.h
208
enum intel_display_power_domain domain,
drivers/gpu/drm/i915/display/intel_display_power.h
211
__intel_display_power_put_async(display, domain, wakeref, -1);
drivers/gpu/drm/i915/display/intel_display_power.h
216
enum intel_display_power_domain domain,
drivers/gpu/drm/i915/display/intel_display_power.h
220
__intel_display_power_put_async(display, domain, wakeref, delay_ms);
drivers/gpu/drm/i915/display/intel_display_power.h
224
enum intel_display_power_domain domain);
drivers/gpu/drm/i915/display/intel_display_power.h
228
enum intel_display_power_domain domain,
drivers/gpu/drm/i915/display/intel_display_power.h
231
intel_display_power_put_unchecked(display, domain);
drivers/gpu/drm/i915/display/intel_display_power.h
236
enum intel_display_power_domain domain,
drivers/gpu/drm/i915/display/intel_display_power.h
239
__intel_display_power_put_async(display, domain, INTEL_WAKEREF_DEF, -1);
drivers/gpu/drm/i915/display/intel_display_power.h
244
enum intel_display_power_domain domain,
drivers/gpu/drm/i915/display/intel_display_power.h
248
__intel_display_power_put_async(display, domain, INTEL_WAKEREF_DEF, delay_ms);
drivers/gpu/drm/i915/display/intel_display_power.h
255
enum intel_display_power_domain domain);
drivers/gpu/drm/i915/display/intel_display_power.h
260
enum intel_display_power_domain domain);
drivers/gpu/drm/i915/display/intel_display_power.h
302
#define __with_intel_display_power(display, domain, wf) \
drivers/gpu/drm/i915/display/intel_display_power.h
303
for (struct ref_tracker *(wf) = intel_display_power_get((display), (domain)); (wf); \
drivers/gpu/drm/i915/display/intel_display_power.h
304
intel_display_power_put_async((display), (domain), (wf)), (wf) = NULL)
drivers/gpu/drm/i915/display/intel_display_power.h
306
#define with_intel_display_power(display, domain) \
drivers/gpu/drm/i915/display/intel_display_power.h
307
__with_intel_display_power(display, domain, __UNIQUE_ID(wakeref))
drivers/gpu/drm/i915/display/intel_display_power.h
309
#define __with_intel_display_power_if_enabled(display, domain, wf) \
drivers/gpu/drm/i915/display/intel_display_power.h
310
for (struct ref_tracker *(wf) = intel_display_power_get_if_enabled((display), (domain)); (wf); \
drivers/gpu/drm/i915/display/intel_display_power.h
311
intel_display_power_put_async((display), (domain), (wf)), (wf) = NULL)
drivers/gpu/drm/i915/display/intel_display_power.h
313
#define with_intel_display_power_if_enabled(display, domain) \
drivers/gpu/drm/i915/display/intel_display_power.h
314
__with_intel_display_power_if_enabled(display, domain, __UNIQUE_ID(wakeref))
drivers/gpu/drm/i915/display/intel_tc.c
1173
enum intel_display_power_domain domain;
drivers/gpu/drm/i915/display/intel_tc.c
1175
tc_cold_wref = __tc_cold_block(tc, &domain);
drivers/gpu/drm/i915/display/intel_tc.c
1194
__tc_cold_unblock(tc, domain, tc_cold_wref);
drivers/gpu/drm/i915/display/intel_tc.c
186
__tc_cold_block(struct intel_tc_port *tc, enum intel_display_power_domain *domain)
drivers/gpu/drm/i915/display/intel_tc.c
190
*domain = tc_phy_cold_off_domain(tc);
drivers/gpu/drm/i915/display/intel_tc.c
192
return intel_display_power_get(display, *domain);
drivers/gpu/drm/i915/display/intel_tc.c
198
enum intel_display_power_domain domain;
drivers/gpu/drm/i915/display/intel_tc.c
201
wakeref = __tc_cold_block(tc, &domain);
drivers/gpu/drm/i915/display/intel_tc.c
203
tc->lock_power_domain = domain;
drivers/gpu/drm/i915/display/intel_tc.c
209
__tc_cold_unblock(struct intel_tc_port *tc, enum intel_display_power_domain domain,
drivers/gpu/drm/i915/display/intel_tc.c
214
intel_display_power_put(display, domain, wakeref);
drivers/gpu/drm/i915/display/intel_tc.c
221
enum intel_display_power_domain domain = tc_phy_cold_off_domain(tc);
drivers/gpu/drm/i915/display/intel_tc.c
224
drm_WARN_ON(display->drm, tc->lock_power_domain != domain);
drivers/gpu/drm/i915/display/intel_tc.c
226
__tc_cold_unblock(tc, domain, wakeref);
drivers/gpu/drm/i915/display/intel_tc.c
627
enum intel_display_power_domain domain;
drivers/gpu/drm/i915/display/intel_tc.c
630
tc_cold_wref = __tc_cold_block(tc, &domain);
drivers/gpu/drm/i915/display/intel_tc.c
639
__tc_cold_unblock(tc, domain, tc_cold_wref);
drivers/gpu/drm/i915/i915_gmch.c
20
int domain = pci_domain_nr(to_pci_dev(i915->drm.dev)->bus);
drivers/gpu/drm/i915/i915_gmch.c
22
i915->gmch.pdev = pci_get_domain_bus_and_slot(domain, 0, PCI_DEVFN(0, 0));
drivers/gpu/drm/i915/intel_uncore.c
1888
struct intel_uncore_forcewake_domain *domain;
drivers/gpu/drm/i915/intel_uncore.c
1893
for_each_fw_domain_masked(domain, fw_domains, uncore, tmp)
drivers/gpu/drm/i915/intel_uncore.c
1894
fw_domain_arm_timer(domain);
drivers/gpu/drm/i915/intel_uncore.c
436
struct intel_uncore_forcewake_domain *domain =
drivers/gpu/drm/i915/intel_uncore.c
438
struct intel_uncore *uncore = domain->uncore;
drivers/gpu/drm/i915/intel_uncore.c
443
if (xchg(&domain->active, false))
drivers/gpu/drm/i915/intel_uncore.c
448
uncore->fw_domains_timer &= ~domain->mask;
drivers/gpu/drm/i915/intel_uncore.c
450
GEM_BUG_ON(!domain->wake_count);
drivers/gpu/drm/i915/intel_uncore.c
451
if (--domain->wake_count == 0)
drivers/gpu/drm/i915/intel_uncore.c
452
fw_domains_put(uncore, domain->mask);
drivers/gpu/drm/i915/intel_uncore.c
464
struct intel_uncore_forcewake_domain *domain;
drivers/gpu/drm/i915/intel_uncore.c
479
for_each_fw_domain(domain, uncore, tmp) {
drivers/gpu/drm/i915/intel_uncore.c
480
smp_store_mb(domain->active, false);
drivers/gpu/drm/i915/intel_uncore.c
481
if (hrtimer_cancel(&domain->timer) == 0)
drivers/gpu/drm/i915/intel_uncore.c
484
intel_uncore_fw_release_timer(&domain->timer);
drivers/gpu/drm/i915/intel_uncore.c
489
for_each_fw_domain(domain, uncore, tmp) {
drivers/gpu/drm/i915/intel_uncore.c
490
if (hrtimer_active(&domain->timer))
drivers/gpu/drm/i915/intel_uncore.c
491
active_domains |= domain->mask;
drivers/gpu/drm/i915/intel_uncore.c
666
struct intel_uncore_forcewake_domain *domain;
drivers/gpu/drm/i915/intel_uncore.c
671
for_each_fw_domain_masked(domain, fw_domains, uncore, tmp) {
drivers/gpu/drm/i915/intel_uncore.c
672
if (domain->wake_count++) {
drivers/gpu/drm/i915/intel_uncore.c
673
fw_domains &= ~domain->mask;
drivers/gpu/drm/i915/intel_uncore.c
674
domain->active = true;
drivers/gpu/drm/i915/intel_uncore.c
768
struct intel_uncore_forcewake_domain *domain;
drivers/gpu/drm/i915/intel_uncore.c
773
for_each_fw_domain_masked(domain, fw_domains, uncore, tmp) {
drivers/gpu/drm/i915/intel_uncore.c
774
GEM_BUG_ON(!domain->wake_count);
drivers/gpu/drm/i915/intel_uncore.c
776
if (--domain->wake_count) {
drivers/gpu/drm/i915/intel_uncore.c
777
domain->active = true;
drivers/gpu/drm/i915/intel_uncore.c
782
!(domain->uncore->fw_domains_timer & domain->mask))
drivers/gpu/drm/i915/intel_uncore.c
783
fw_domain_arm_timer(domain);
drivers/gpu/drm/i915/intel_uncore.c
785
fw_domains_put(uncore, domain->mask);
drivers/gpu/drm/i915/intel_uncore.c
831
struct intel_uncore_forcewake_domain *domain;
drivers/gpu/drm/i915/intel_uncore.c
838
for_each_fw_domain_masked(domain, fw_domains, uncore, tmp) {
drivers/gpu/drm/i915/intel_uncore.c
839
WRITE_ONCE(domain->active, false);
drivers/gpu/drm/i915/intel_uncore.c
840
if (hrtimer_cancel(&domain->timer))
drivers/gpu/drm/i915/intel_uncore.c
841
intel_uncore_fw_release_timer(&domain->timer);
drivers/gpu/drm/i915/intel_uncore.c
877
struct intel_uncore_forcewake_domain *domain;
drivers/gpu/drm/i915/intel_uncore.c
899
for_each_fw_domain_masked(domain, fw_domains, uncore, tmp) {
drivers/gpu/drm/i915/intel_uncore.c
900
unsigned int actual = READ_ONCE(domain->wake_count);
drivers/gpu/drm/i915/intel_uncore.c
903
if (uncore->fw_domains_timer & domain->mask)
drivers/gpu/drm/i915/intel_uncore.c
908
domain->id, actual))
drivers/gpu/drm/i915/selftests/intel_uncore.c
163
struct intel_uncore_forcewake_domain *domain;
drivers/gpu/drm/i915/selftests/intel_uncore.c
202
for_each_fw_domain(domain, uncore, tmp) {
drivers/gpu/drm/i915/selftests/intel_uncore.c
203
smp_store_mb(domain->active, false);
drivers/gpu/drm/i915/selftests/intel_uncore.c
204
if (!hrtimer_cancel(&domain->timer))
drivers/gpu/drm/i915/selftests/intel_uncore.c
207
intel_uncore_fw_release_timer(&domain->timer);
drivers/gpu/drm/i915/selftests/intel_uncore.c
224
for_each_fw_domain_masked(domain, fw_domains, uncore, tmp) {
drivers/gpu/drm/i915/selftests/intel_uncore.c
225
if (!domain->wake_count)
drivers/gpu/drm/i915/selftests/intel_uncore.c
229
intel_uncore_forcewake_domain_to_str(domain->id));
drivers/gpu/drm/i915/selftests/intel_uncore.c
239
for_each_fw_domain_masked(domain, fw_domains, uncore, tmp) {
drivers/gpu/drm/i915/selftests/intel_uncore.c
240
smp_store_mb(domain->active, false);
drivers/gpu/drm/i915/selftests/intel_uncore.c
241
if (hrtimer_cancel(&domain->timer))
drivers/gpu/drm/i915/selftests/intel_uncore.c
242
intel_uncore_fw_release_timer(&domain->timer);
drivers/gpu/drm/i915/selftests/intel_uncore.c
245
err = wait_ack_clear(domain, FORCEWAKE_KERNEL);
drivers/gpu/drm/i915/selftests/intel_uncore.c
249
intel_uncore_forcewake_domain_to_str(domain->id));
drivers/gpu/drm/imx/dc/dc-ic.c
101
virq = irq_find_mapping(data->domain, entry->irq);
drivers/gpu/drm/imx/dc/dc-ic.c
177
data->domain = irq_domain_add_linear(dev->of_node, IRQ_COUNT,
drivers/gpu/drm/imx/dc/dc-ic.c
179
if (!data->domain) {
drivers/gpu/drm/imx/dc/dc-ic.c
184
irq_domain_set_pm_device(data->domain, dev);
drivers/gpu/drm/imx/dc/dc-ic.c
186
ret = irq_alloc_domain_generic_chips(data->domain, 32, 1, "DC",
drivers/gpu/drm/imx/dc/dc-ic.c
190
irq_domain_remove(data->domain);
drivers/gpu/drm/imx/dc/dc-ic.c
196
gc = irq_get_domain_generic_chip(data->domain, i);
drivers/gpu/drm/imx/dc/dc-ic.c
236
irq_domain_remove(data->domain);
drivers/gpu/drm/imx/dc/dc-ic.c
35
struct irq_domain *domain;
drivers/gpu/drm/loongson/lsdc_gem.c
143
u32 domain,
drivers/gpu/drm/loongson/lsdc_gem.c
154
lbo = lsdc_bo_create(ddev, domain, size, kerenl, sg, resv);
drivers/gpu/drm/loongson/lsdc_gem.c
207
u32 domain = LSDC_GEM_DOMAIN_VRAM;
drivers/gpu/drm/loongson/lsdc_gem.c
221
gobj = lsdc_gem_object_create(ddev, domain, args->size, false, NULL, NULL);
drivers/gpu/drm/loongson/lsdc_gem.h
31
u32 domain,
drivers/gpu/drm/loongson/lsdc_ttm.c
287
int lsdc_bo_pin(struct lsdc_bo *lbo, u32 domain, u64 *gpu_addr)
drivers/gpu/drm/loongson/lsdc_ttm.c
297
if (lbo->sharing_count && domain == LSDC_GEM_DOMAIN_VRAM)
drivers/gpu/drm/loongson/lsdc_ttm.c
300
if (domain)
drivers/gpu/drm/loongson/lsdc_ttm.c
301
lsdc_bo_set_placement(lbo, domain);
drivers/gpu/drm/loongson/lsdc_ttm.c
309
if (domain == LSDC_GEM_DOMAIN_VRAM)
drivers/gpu/drm/loongson/lsdc_ttm.c
311
else if (domain == LSDC_GEM_DOMAIN_GTT)
drivers/gpu/drm/loongson/lsdc_ttm.c
32
const char *lsdc_domain_to_str(u32 domain)
drivers/gpu/drm/loongson/lsdc_ttm.c
34
switch (domain) {
drivers/gpu/drm/loongson/lsdc_ttm.c
431
u32 domain,
drivers/gpu/drm/loongson/lsdc_ttm.c
450
lbo->initial_domain = domain & (LSDC_GEM_DOMAIN_VRAM |
drivers/gpu/drm/loongson/lsdc_ttm.c
473
lsdc_bo_set_placement(lbo, domain);
drivers/gpu/drm/loongson/lsdc_ttm.c
48
static void lsdc_bo_set_placement(struct lsdc_bo *lbo, u32 domain)
drivers/gpu/drm/loongson/lsdc_ttm.c
487
u32 domain,
drivers/gpu/drm/loongson/lsdc_ttm.c
493
lbo = lsdc_bo_create(ddev, domain, size, true, NULL, NULL);
drivers/gpu/drm/loongson/lsdc_ttm.c
503
ret = lsdc_bo_pin(lbo, domain, NULL);
drivers/gpu/drm/loongson/lsdc_ttm.c
59
if (domain & LSDC_GEM_DOMAIN_VRAM) {
drivers/gpu/drm/loongson/lsdc_ttm.c
64
if (domain & LSDC_GEM_DOMAIN_GTT) {
drivers/gpu/drm/loongson/lsdc_ttm.c
69
if (domain & LSDC_GEM_DOMAIN_SYSTEM) {
drivers/gpu/drm/loongson/lsdc_ttm.h
63
const char *lsdc_domain_to_str(u32 domain);
drivers/gpu/drm/loongson/lsdc_ttm.h
66
u32 domain,
drivers/gpu/drm/loongson/lsdc_ttm.h
73
u32 domain,
drivers/gpu/drm/loongson/lsdc_ttm.h
81
int lsdc_bo_pin(struct lsdc_bo *lbo, u32 domain, u64 *gpu_addr);
drivers/gpu/drm/msm/msm_iommu.c
129
iommu_flush_iotlb_all(to_msm_iommu(pagetable->parent)->domain);
drivers/gpu/drm/msm/msm_iommu.c
16
struct iommu_domain *domain;
drivers/gpu/drm/msm/msm_iommu.c
271
return &iommu->domain->geometry;
drivers/gpu/drm/msm/msm_iommu.c
490
static int msm_gpu_fault_handler(struct iommu_domain *domain, struct device *dev,
drivers/gpu/drm/msm/msm_iommu.c
629
static int msm_gpu_fault_handler(struct iommu_domain *domain, struct device *dev,
drivers/gpu/drm/msm/msm_iommu.c
649
static int msm_disp_fault_handler(struct iommu_domain *domain, struct device *dev,
drivers/gpu/drm/msm/msm_iommu.c
672
iommu_detach_device(iommu->domain, mmu->dev);
drivers/gpu/drm/msm/msm_iommu.c
688
ret = iommu_map_sgtable(iommu->domain, iova, sgt, prot);
drivers/gpu/drm/msm/msm_iommu.c
701
iommu_unmap(iommu->domain, iova, len);
drivers/gpu/drm/msm/msm_iommu.c
709
iommu_domain_free(iommu->domain);
drivers/gpu/drm/msm/msm_iommu.c
724
struct iommu_domain *domain;
drivers/gpu/drm/msm/msm_iommu.c
731
domain = iommu_paging_domain_alloc(dev);
drivers/gpu/drm/msm/msm_iommu.c
732
if (IS_ERR(domain))
drivers/gpu/drm/msm/msm_iommu.c
733
return ERR_CAST(domain);
drivers/gpu/drm/msm/msm_iommu.c
735
iommu_set_pgtable_quirks(domain, quirks);
drivers/gpu/drm/msm/msm_iommu.c
739
iommu_domain_free(domain);
drivers/gpu/drm/msm/msm_iommu.c
743
iommu->domain = domain;
drivers/gpu/drm/msm/msm_iommu.c
748
ret = iommu_attach_device(iommu->domain, dev);
drivers/gpu/drm/msm/msm_iommu.c
750
iommu_domain_free(domain);
drivers/gpu/drm/msm/msm_iommu.c
768
iommu_set_fault_handler(iommu->domain, msm_disp_fault_handler, iommu);
drivers/gpu/drm/msm/msm_iommu.c
792
iommu_set_fault_handler(iommu->domain, msm_gpu_fault_handler, iommu);
drivers/gpu/drm/msm/msm_mdss.c
134
static int msm_mdss_irqdomain_map(struct irq_domain *domain,
drivers/gpu/drm/msm/msm_mdss.c
137
struct msm_mdss *msm_mdss = domain->host_data;
drivers/gpu/drm/msm/msm_mdss.c
153
struct irq_domain *domain;
drivers/gpu/drm/msm/msm_mdss.c
157
domain = irq_domain_create_linear(dev_fwnode(dev), 32, &msm_mdss_irqdomain_ops, msm_mdss);
drivers/gpu/drm/msm/msm_mdss.c
158
if (!domain) {
drivers/gpu/drm/msm/msm_mdss.c
164
msm_mdss->irq_controller.domain = domain;
drivers/gpu/drm/msm/msm_mdss.c
341
irq_domain_remove(msm_mdss->irq_controller.domain);
drivers/gpu/drm/msm/msm_mdss.c
342
msm_mdss->irq_controller.domain = NULL;
drivers/gpu/drm/msm/msm_mdss.c
40
struct irq_domain *domain;
drivers/gpu/drm/msm/msm_mdss.c
90
rc = generic_handle_domain_irq(msm_mdss->irq_controller.domain,
drivers/gpu/drm/nouveau/dispnv04/arb.c
213
int domain = pci_domain_nr(pdev->bus);
drivers/gpu/drm/nouveau/dispnv04/arb.c
215
pci_read_config_dword(pci_get_domain_bus_and_slot(domain, 0, 1),
drivers/gpu/drm/nouveau/dispnv04/hw.c
220
int domain;
drivers/gpu/drm/nouveau/dispnv04/hw.c
222
domain = pci_domain_nr(pdev->bus);
drivers/gpu/drm/nouveau/dispnv04/hw.c
227
pci_read_config_dword(pci_get_domain_bus_and_slot(domain, 0, 3),
drivers/gpu/drm/nouveau/dispnv04/hw.c
239
pci_read_config_dword(pci_get_domain_bus_and_slot(domain, 0, 5),
drivers/gpu/drm/nouveau/include/nvkm/core/tegra.h
28
struct iommu_domain *domain;
drivers/gpu/drm/nouveau/include/nvkm/subdev/bios/boost.h
19
u8 domain;
drivers/gpu/drm/nouveau/include/nvkm/subdev/clk.h
58
u32 domain[nv_clk_src_max];
drivers/gpu/drm/nouveau/nouveau_bo.c
211
nouveau_bo_alloc(struct nouveau_cli *cli, u64 *size, int *align, u32 domain,
drivers/gpu/drm/nouveau/nouveau_bo.c
238
if (domain & NOUVEAU_GEM_DOMAIN_COHERENT) {
drivers/gpu/drm/nouveau/nouveau_bo.c
279
(domain & NOUVEAU_GEM_DOMAIN_VRAM) && !vmm->page[i].vram)
drivers/gpu/drm/nouveau/nouveau_bo.c
281
if ((domain & NOUVEAU_GEM_DOMAIN_GART) &&
drivers/gpu/drm/nouveau/nouveau_bo.c
319
if ((domain & NOUVEAU_GEM_DOMAIN_VRAM) && !vmm->page[i].vram)
drivers/gpu/drm/nouveau/nouveau_bo.c
321
if ((domain & NOUVEAU_GEM_DOMAIN_GART) &&
drivers/gpu/drm/nouveau/nouveau_bo.c
345
nouveau_bo_init(struct nouveau_bo *nvbo, u64 size, int align, u32 domain,
drivers/gpu/drm/nouveau/nouveau_bo.c
356
nouveau_bo_placement_set(nvbo, domain, 0);
drivers/gpu/drm/nouveau/nouveau_bo.c
375
uint32_t domain, uint32_t tile_mode, uint32_t tile_flags,
drivers/gpu/drm/nouveau/nouveau_bo.c
382
nvbo = nouveau_bo_alloc(cli, &size, &align, domain, tile_mode,
drivers/gpu/drm/nouveau/nouveau_bo.c
396
ret = nouveau_bo_init(nvbo, size, align, domain, sg, robj);
drivers/gpu/drm/nouveau/nouveau_bo.c
420
nouveau_bo_new_pin(struct nouveau_cli *cli, u32 domain, u32 size, struct nouveau_bo **pnvbo)
drivers/gpu/drm/nouveau/nouveau_bo.c
425
ret = nouveau_bo_new(cli, size, 0, domain, 0, 0, NULL, NULL, &nvbo);
drivers/gpu/drm/nouveau/nouveau_bo.c
429
ret = nouveau_bo_pin(nvbo, domain, false);
drivers/gpu/drm/nouveau/nouveau_bo.c
440
nouveau_bo_new_map(struct nouveau_cli *cli, u32 domain, u32 size, struct nouveau_bo **pnvbo)
drivers/gpu/drm/nouveau/nouveau_bo.c
445
ret = nouveau_bo_new_pin(cli, domain, size, &nvbo);
drivers/gpu/drm/nouveau/nouveau_bo.c
460
nouveau_bo_new_map_gpu(struct nouveau_cli *cli, u32 domain, u32 size,
drivers/gpu/drm/nouveau/nouveau_bo.c
467
ret = nouveau_bo_new_map(cli, domain, size, &nvbo);
drivers/gpu/drm/nouveau/nouveau_bo.c
482
set_placement_range(struct nouveau_bo *nvbo, uint32_t domain)
drivers/gpu/drm/nouveau/nouveau_bo.c
489
nvbo->mode && (domain & NOUVEAU_GEM_DOMAIN_VRAM) &&
drivers/gpu/drm/nouveau/nouveau_bo.c
512
nouveau_bo_placement_set(struct nouveau_bo *nvbo, uint32_t domain,
drivers/gpu/drm/nouveau/nouveau_bo.c
518
domain |= busy;
drivers/gpu/drm/nouveau/nouveau_bo.c
521
if (domain & NOUVEAU_GEM_DOMAIN_VRAM) {
drivers/gpu/drm/nouveau/nouveau_bo.c
527
if (domain & NOUVEAU_GEM_DOMAIN_GART) {
drivers/gpu/drm/nouveau/nouveau_bo.c
533
if (domain & NOUVEAU_GEM_DOMAIN_CPU) {
drivers/gpu/drm/nouveau/nouveau_bo.c
541
set_placement_range(nvbo, domain);
drivers/gpu/drm/nouveau/nouveau_bo.c
544
int nouveau_bo_pin_locked(struct nouveau_bo *nvbo, uint32_t domain, bool contig)
drivers/gpu/drm/nouveau/nouveau_bo.c
554
domain == NOUVEAU_GEM_DOMAIN_VRAM && contig) {
drivers/gpu/drm/nouveau/nouveau_bo.c
567
error |= !(domain & NOUVEAU_GEM_DOMAIN_VRAM);
drivers/gpu/drm/nouveau/nouveau_bo.c
570
error |= !(domain & NOUVEAU_GEM_DOMAIN_GART);
drivers/gpu/drm/nouveau/nouveau_bo.c
579
bo->resource->mem_type, domain);
drivers/gpu/drm/nouveau/nouveau_bo.c
593
nouveau_bo_placement_set(nvbo, domain, 0);
drivers/gpu/drm/nouveau/nouveau_bo.c
639
int nouveau_bo_pin(struct nouveau_bo *nvbo, uint32_t domain, bool contig)
drivers/gpu/drm/nouveau/nouveau_bo.c
647
ret = nouveau_bo_pin_locked(nvbo, domain, contig);
drivers/gpu/drm/nouveau/nouveau_bo.h
67
u32 domain, u32 tile_mode, u32 tile_flags, bool internal);
drivers/gpu/drm/nouveau/nouveau_bo.h
68
int nouveau_bo_init(struct nouveau_bo *, u64 size, int align, u32 domain,
drivers/gpu/drm/nouveau/nouveau_bo.h
70
int nouveau_bo_new(struct nouveau_cli *, u64 size, int align, u32 domain,
drivers/gpu/drm/nouveau/nouveau_bo.h
74
int nouveau_bo_pin_locked(struct nouveau_bo *nvbo, uint32_t domain, bool contig);
drivers/gpu/drm/nouveau/nouveau_bo.h
93
int nouveau_bo_new_pin(struct nouveau_cli *, u32 domain, u32 size, struct nouveau_bo **);
drivers/gpu/drm/nouveau/nouveau_bo.h
94
int nouveau_bo_new_map(struct nouveau_cli *, u32 domain, u32 size, struct nouveau_bo **);
drivers/gpu/drm/nouveau/nouveau_bo.h
95
int nouveau_bo_new_map_gpu(struct nouveau_cli *, u32 domain, u32 size,
drivers/gpu/drm/nouveau/nouveau_display.c
808
uint32_t domain;
drivers/gpu/drm/nouveau/nouveau_display.c
817
domain = NOUVEAU_GEM_DOMAIN_VRAM;
drivers/gpu/drm/nouveau/nouveau_display.c
819
domain = NOUVEAU_GEM_DOMAIN_GART;
drivers/gpu/drm/nouveau/nouveau_display.c
821
ret = nouveau_gem_new(cli, args->size, 0, domain, 0, 0, &bo);
drivers/gpu/drm/nouveau/nouveau_gem.c
232
nouveau_gem_new(struct nouveau_cli *cli, u64 size, int align, uint32_t domain,
drivers/gpu/drm/nouveau/nouveau_gem.c
242
if (domain & NOUVEAU_GEM_DOMAIN_NO_SHARE) {
drivers/gpu/drm/nouveau/nouveau_gem.c
249
if (!(domain & (NOUVEAU_GEM_DOMAIN_VRAM | NOUVEAU_GEM_DOMAIN_GART)))
drivers/gpu/drm/nouveau/nouveau_gem.c
250
domain |= NOUVEAU_GEM_DOMAIN_CPU;
drivers/gpu/drm/nouveau/nouveau_gem.c
252
nvbo = nouveau_bo_alloc(cli, &size, &align, domain, tile_mode,
drivers/gpu/drm/nouveau/nouveau_gem.c
258
nvbo->no_share = domain & NOUVEAU_GEM_DOMAIN_NO_SHARE;
drivers/gpu/drm/nouveau/nouveau_gem.c
272
ret = nouveau_bo_init(nvbo, size, align, domain, NULL, resv);
drivers/gpu/drm/nouveau/nouveau_gem.c
287
nvbo->valid_domains &= domain;
drivers/gpu/drm/nouveau/nouveau_gem.c
308
rep->domain = nvbo->valid_domains;
drivers/gpu/drm/nouveau/nouveau_gem.c
310
rep->domain = NOUVEAU_GEM_DOMAIN_GART;
drivers/gpu/drm/nouveau/nouveau_gem.c
312
rep->domain = NOUVEAU_GEM_DOMAIN_VRAM;
drivers/gpu/drm/nouveau/nouveau_gem.c
353
req->info.domain, req->info.tile_mode,
drivers/gpu/drm/nouveau/nouveau_gem.c
603
b->presumed.domain & NOUVEAU_GEM_DOMAIN_VRAM) ||
drivers/gpu/drm/nouveau/nouveau_gem.c
605
b->presumed.domain & NOUVEAU_GEM_DOMAIN_GART)))
drivers/gpu/drm/nouveau/nouveau_gem.c
609
b->presumed.domain = NOUVEAU_GEM_DOMAIN_GART;
drivers/gpu/drm/nouveau/nouveau_gem.c
611
b->presumed.domain = NOUVEAU_GEM_DOMAIN_VRAM;
drivers/gpu/drm/nouveau/nouveau_gem.c
715
if (b->presumed.domain == NOUVEAU_GEM_DOMAIN_GART)
drivers/gpu/drm/nouveau/nouveau_gem.h
18
uint32_t domain, uint32_t tile_mode,
drivers/gpu/drm/nouveau/nv84_fence.c
198
u32 domain;
drivers/gpu/drm/nouveau/nv84_fence.c
216
domain = drm->client.device.info.ram_size != 0 ?
drivers/gpu/drm/nouveau/nv84_fence.c
224
ret = nouveau_bo_new_map(&drm->client, domain, 16 * drm->chan_total, &priv->bo);
drivers/gpu/drm/nouveau/nvkm/engine/device/ctrl.c
102
if (domain->name == nv_clk_src_max)
drivers/gpu/drm/nouveau/nvkm/engine/device/ctrl.c
111
lo = pstate->base.domain[domain->name];
drivers/gpu/drm/nouveau/nvkm/engine/device/ctrl.c
114
lo = min(lo, cstate->domain[domain->name]);
drivers/gpu/drm/nouveau/nvkm/engine/device/ctrl.c
115
hi = max(hi, cstate->domain[domain->name]);
drivers/gpu/drm/nouveau/nvkm/engine/device/ctrl.c
120
lo = max(nvkm_clk_read(clk, domain->name), 0);
drivers/gpu/drm/nouveau/nvkm/engine/device/ctrl.c
124
snprintf(args->v0.name, sizeof(args->v0.name), "%s", domain->mname);
drivers/gpu/drm/nouveau/nvkm/engine/device/ctrl.c
126
args->v0.min = lo / domain->mdiv;
drivers/gpu/drm/nouveau/nvkm/engine/device/ctrl.c
127
args->v0.max = hi / domain->mdiv;
drivers/gpu/drm/nouveau/nvkm/engine/device/ctrl.c
130
while ((++domain)->name != nv_clk_src_max) {
drivers/gpu/drm/nouveau/nvkm/engine/device/ctrl.c
131
if (domain->mname) {
drivers/gpu/drm/nouveau/nvkm/engine/device/ctrl.c
74
const struct nvkm_domain *domain;
drivers/gpu/drm/nouveau/nvkm/engine/device/ctrl.c
94
domain = clk->domains;
drivers/gpu/drm/nouveau/nvkm/engine/device/ctrl.c
96
while (domain->name != nv_clk_src_max) {
drivers/gpu/drm/nouveau/nvkm/engine/device/ctrl.c
97
if (domain->mname && ++j == args->v0.index)
drivers/gpu/drm/nouveau/nvkm/engine/device/ctrl.c
99
domain++;
drivers/gpu/drm/nouveau/nvkm/engine/device/tegra.c
123
tdev->iommu.domain = iommu_paging_domain_alloc(dev);
drivers/gpu/drm/nouveau/nvkm/engine/device/tegra.c
124
if (IS_ERR(tdev->iommu.domain))
drivers/gpu/drm/nouveau/nvkm/engine/device/tegra.c
132
pgsize_bitmap = tdev->iommu.domain->pgsize_bitmap;
drivers/gpu/drm/nouveau/nvkm/engine/device/tegra.c
144
ret = iommu_attach_device(tdev->iommu.domain, dev);
drivers/gpu/drm/nouveau/nvkm/engine/device/tegra.c
158
iommu_detach_device(tdev->iommu.domain, dev);
drivers/gpu/drm/nouveau/nvkm/engine/device/tegra.c
161
iommu_domain_free(tdev->iommu.domain);
drivers/gpu/drm/nouveau/nvkm/engine/device/tegra.c
164
tdev->iommu.domain = NULL;
drivers/gpu/drm/nouveau/nvkm/engine/device/tegra.c
174
if (tdev->iommu.domain) {
drivers/gpu/drm/nouveau/nvkm/engine/device/tegra.c
176
iommu_detach_device(tdev->iommu.domain, tdev->device.dev);
drivers/gpu/drm/nouveau/nvkm/engine/device/tegra.c
177
iommu_domain_free(tdev->iommu.domain);
drivers/gpu/drm/nouveau/nvkm/subdev/bios/boost.c
120
info->domain = nvbios_rd08(bios, data + 0x00);
drivers/gpu/drm/nouveau/nvkm/subdev/bios/therm.c
68
enum nvbios_therm_domain domain,
drivers/gpu/drm/nouveau/nvkm/subdev/bios/therm.c
76
if (domain != NVBIOS_THERM_DOMAIN_CORE)
drivers/gpu/drm/nouveau/nvkm/subdev/clk/base.c
229
const struct nvkm_domain *domain = clk->domains;
drivers/gpu/drm/nouveau/nvkm/subdev/clk/base.c
250
while (domain && domain->name != nv_clk_src_max) {
drivers/gpu/drm/nouveau/nvkm/subdev/clk/base.c
251
if (domain->flags & NVKM_CLK_DOM_FLAG_CORE) {
drivers/gpu/drm/nouveau/nvkm/subdev/clk/base.c
253
domain->bios, cstepX.freq);
drivers/gpu/drm/nouveau/nvkm/subdev/clk/base.c
254
cstate->domain[domain->name] = freq;
drivers/gpu/drm/nouveau/nvkm/subdev/clk/base.c
256
domain++;
drivers/gpu/drm/nouveau/nvkm/subdev/clk/base.c
287
int khz = pstate->base.domain[nv_clk_src_mem];
drivers/gpu/drm/nouveau/nvkm/subdev/clk/base.c
359
u32 lo = pstate->base.domain[clock->name];
drivers/gpu/drm/nouveau/nvkm/subdev/clk/base.c
366
u32 freq = cstate->domain[clock->name];
drivers/gpu/drm/nouveau/nvkm/subdev/clk/base.c
405
const struct nvkm_domain *domain = clk->domains - 1;
drivers/gpu/drm/nouveau/nvkm/subdev/clk/base.c
42
u8 pstate, u8 domain, u32 input)
drivers/gpu/drm/nouveau/nvkm/subdev/clk/base.c
431
cstate->domain[nv_clk_src_core] = perfE.core;
drivers/gpu/drm/nouveau/nvkm/subdev/clk/base.c
432
cstate->domain[nv_clk_src_shader] = perfE.shader;
drivers/gpu/drm/nouveau/nvkm/subdev/clk/base.c
433
cstate->domain[nv_clk_src_mem] = perfE.memory;
drivers/gpu/drm/nouveau/nvkm/subdev/clk/base.c
434
cstate->domain[nv_clk_src_vdec] = perfE.vdec;
drivers/gpu/drm/nouveau/nvkm/subdev/clk/base.c
435
cstate->domain[nv_clk_src_dom6] = perfE.disp;
drivers/gpu/drm/nouveau/nvkm/subdev/clk/base.c
437
while (ver >= 0x40 && (++domain)->name != nv_clk_src_max) {
drivers/gpu/drm/nouveau/nvkm/subdev/clk/base.c
440
u32 perfSe = nvbios_perfSp(bios, data, domain->bios,
drivers/gpu/drm/nouveau/nvkm/subdev/clk/base.c
445
if (domain->flags & NVKM_CLK_DOM_FLAG_CORE) {
drivers/gpu/drm/nouveau/nvkm/subdev/clk/base.c
448
domain->bios,
drivers/gpu/drm/nouveau/nvkm/subdev/clk/base.c
452
cstate->domain[domain->name] = perfS.v40.freq;
drivers/gpu/drm/nouveau/nvkm/subdev/clk/base.c
606
clk->bstate.base.domain[clock->name] = ret;
drivers/gpu/drm/nouveau/nvkm/subdev/clk/base.c
62
if (subd && boostS.domain == domain) {
drivers/gpu/drm/nouveau/nvkm/subdev/clk/base.c
82
const struct nvkm_domain *domain = clk->domains;
drivers/gpu/drm/nouveau/nvkm/subdev/clk/base.c
86
while (domain && domain->name != nv_clk_src_max) {
drivers/gpu/drm/nouveau/nvkm/subdev/clk/base.c
87
if (domain->flags & NVKM_CLK_DOM_FLAG_VPSTATE) {
drivers/gpu/drm/nouveau/nvkm/subdev/clk/base.c
88
u32 freq = cstate->domain[domain->name];
drivers/gpu/drm/nouveau/nvkm/subdev/clk/base.c
99
domain++;
drivers/gpu/drm/nouveau/nvkm/subdev/clk/gf100.c
277
u32 freq = cstate->domain[dom];
drivers/gpu/drm/nouveau/nvkm/subdev/clk/gf100.c
294
clk1 = cstate->domain[nv_clk_src_hubk06];
drivers/gpu/drm/nouveau/nvkm/subdev/clk/gk104.c
291
u32 freq = cstate->domain[dom];
drivers/gpu/drm/nouveau/nvkm/subdev/clk/gk104.c
308
clk1 = cstate->domain[nv_clk_src_hubk06];
drivers/gpu/drm/nouveau/nvkm/subdev/clk/gk20a.c
370
.domain[nv_clk_src_gpc] = 72000,
drivers/gpu/drm/nouveau/nvkm/subdev/clk/gk20a.c
376
.domain[nv_clk_src_gpc] = 108000,
drivers/gpu/drm/nouveau/nvkm/subdev/clk/gk20a.c
382
.domain[nv_clk_src_gpc] = 180000,
drivers/gpu/drm/nouveau/nvkm/subdev/clk/gk20a.c
388
.domain[nv_clk_src_gpc] = 252000,
drivers/gpu/drm/nouveau/nvkm/subdev/clk/gk20a.c
394
.domain[nv_clk_src_gpc] = 324000,
drivers/gpu/drm/nouveau/nvkm/subdev/clk/gk20a.c
400
.domain[nv_clk_src_gpc] = 396000,
drivers/gpu/drm/nouveau/nvkm/subdev/clk/gk20a.c
406
.domain[nv_clk_src_gpc] = 468000,
drivers/gpu/drm/nouveau/nvkm/subdev/clk/gk20a.c
412
.domain[nv_clk_src_gpc] = 540000,
drivers/gpu/drm/nouveau/nvkm/subdev/clk/gk20a.c
418
.domain[nv_clk_src_gpc] = 612000,
drivers/gpu/drm/nouveau/nvkm/subdev/clk/gk20a.c
424
.domain[nv_clk_src_gpc] = 648000,
drivers/gpu/drm/nouveau/nvkm/subdev/clk/gk20a.c
430
.domain[nv_clk_src_gpc] = 684000,
drivers/gpu/drm/nouveau/nvkm/subdev/clk/gk20a.c
436
.domain[nv_clk_src_gpc] = 708000,
drivers/gpu/drm/nouveau/nvkm/subdev/clk/gk20a.c
442
.domain[nv_clk_src_gpc] = 756000,
drivers/gpu/drm/nouveau/nvkm/subdev/clk/gk20a.c
448
.domain[nv_clk_src_gpc] = 804000,
drivers/gpu/drm/nouveau/nvkm/subdev/clk/gk20a.c
454
.domain[nv_clk_src_gpc] = 852000,
drivers/gpu/drm/nouveau/nvkm/subdev/clk/gk20a.c
485
return gk20a_pllg_calc_mnp(clk, cstate->domain[nv_clk_src_gpc] *
drivers/gpu/drm/nouveau/nvkm/subdev/clk/gk20a_devfreq.c
193
if (pstates[i].base.domain[nv_clk_src_gpc] * GK20A_CLK_GPC_MDIV >= *freq)
drivers/gpu/drm/nouveau/nvkm/subdev/clk/gk20a_devfreq.c
203
*freq = pstates[i].base.domain[nv_clk_src_gpc] * GK20A_CLK_GPC_MDIV;
drivers/gpu/drm/nouveau/nvkm/subdev/clk/gk20a_devfreq.c
279
pstates[i].base.domain[nv_clk_src_gpc] * GK20A_CLK_GPC_MDIV, 0);
drivers/gpu/drm/nouveau/nvkm/subdev/clk/gm20b.c
1004
pstates[i].base.domain[nv_clk_src_gpc]);
drivers/gpu/drm/nouveau/nvkm/subdev/clk/gm20b.c
472
ret = gk20a_pllg_calc_mnp(&clk->base, cstate->domain[nv_clk_src_gpc] *
drivers/gpu/drm/nouveau/nvkm/subdev/clk/gm20b.c
642
.domain[nv_clk_src_gpc] = 76800,
drivers/gpu/drm/nouveau/nvkm/subdev/clk/gm20b.c
648
.domain[nv_clk_src_gpc] = 153600,
drivers/gpu/drm/nouveau/nvkm/subdev/clk/gm20b.c
654
.domain[nv_clk_src_gpc] = 230400,
drivers/gpu/drm/nouveau/nvkm/subdev/clk/gm20b.c
660
.domain[nv_clk_src_gpc] = 307200,
drivers/gpu/drm/nouveau/nvkm/subdev/clk/gm20b.c
666
.domain[nv_clk_src_gpc] = 384000,
drivers/gpu/drm/nouveau/nvkm/subdev/clk/gm20b.c
672
.domain[nv_clk_src_gpc] = 460800,
drivers/gpu/drm/nouveau/nvkm/subdev/clk/gm20b.c
678
.domain[nv_clk_src_gpc] = 537600,
drivers/gpu/drm/nouveau/nvkm/subdev/clk/gm20b.c
684
.domain[nv_clk_src_gpc] = 614400,
drivers/gpu/drm/nouveau/nvkm/subdev/clk/gm20b.c
690
.domain[nv_clk_src_gpc] = 691200,
drivers/gpu/drm/nouveau/nvkm/subdev/clk/gm20b.c
696
.domain[nv_clk_src_gpc] = 768000,
drivers/gpu/drm/nouveau/nvkm/subdev/clk/gm20b.c
702
.domain[nv_clk_src_gpc] = 844800,
drivers/gpu/drm/nouveau/nvkm/subdev/clk/gm20b.c
708
.domain[nv_clk_src_gpc] = 921600,
drivers/gpu/drm/nouveau/nvkm/subdev/clk/gm20b.c
714
.domain[nv_clk_src_gpc] = 998400,
drivers/gpu/drm/nouveau/nvkm/subdev/clk/gp10b.c
104
.domain[nv_clk_src_gpc] = 624750,
drivers/gpu/drm/nouveau/nvkm/subdev/clk/gp10b.c
109
.domain[nv_clk_src_gpc] = 726750,
drivers/gpu/drm/nouveau/nvkm/subdev/clk/gp10b.c
114
.domain[nv_clk_src_gpc] = 828750,
drivers/gpu/drm/nouveau/nvkm/subdev/clk/gp10b.c
119
.domain[nv_clk_src_gpc] = 930750,
drivers/gpu/drm/nouveau/nvkm/subdev/clk/gp10b.c
124
.domain[nv_clk_src_gpc] = 1032750,
drivers/gpu/drm/nouveau/nvkm/subdev/clk/gp10b.c
129
.domain[nv_clk_src_gpc] = 1134750,
drivers/gpu/drm/nouveau/nvkm/subdev/clk/gp10b.c
134
.domain[nv_clk_src_gpc] = 1236750,
drivers/gpu/drm/nouveau/nvkm/subdev/clk/gp10b.c
139
.domain[nv_clk_src_gpc] = 1300500,
drivers/gpu/drm/nouveau/nvkm/subdev/clk/gp10b.c
53
u32 target_rate = cstate->domain[nv_clk_src_gpc] * GK20A_CLK_GPC_MDIV;
drivers/gpu/drm/nouveau/nvkm/subdev/clk/gp10b.c
79
.domain[nv_clk_src_gpc] = 114750,
drivers/gpu/drm/nouveau/nvkm/subdev/clk/gp10b.c
84
.domain[nv_clk_src_gpc] = 216750,
drivers/gpu/drm/nouveau/nvkm/subdev/clk/gp10b.c
89
.domain[nv_clk_src_gpc] = 318750,
drivers/gpu/drm/nouveau/nvkm/subdev/clk/gp10b.c
94
.domain[nv_clk_src_gpc] = 420750,
drivers/gpu/drm/nouveau/nvkm/subdev/clk/gp10b.c
99
.domain[nv_clk_src_gpc] = 522750,
drivers/gpu/drm/nouveau/nvkm/subdev/clk/gt215.c
277
int ret = gt215_pll_info(&clk->base, idx, pll, cstate->domain[dom],
drivers/gpu/drm/nouveau/nvkm/subdev/clk/gt215.c
288
u32 kHz = cstate->domain[nv_clk_src_host];
drivers/gpu/drm/nouveau/nvkm/subdev/clk/gt215.c
476
cstate->domain[nv_clk_src_core_intm],
drivers/gpu/drm/nouveau/nvkm/subdev/clk/mcp77.c
203
const int shader = cstate->domain[nv_clk_src_shader];
drivers/gpu/drm/nouveau/nvkm/subdev/clk/mcp77.c
204
const int core = cstate->domain[nv_clk_src_core];
drivers/gpu/drm/nouveau/nvkm/subdev/clk/mcp77.c
205
const int vdec = cstate->domain[nv_clk_src_vdec];
drivers/gpu/drm/nouveau/nvkm/subdev/clk/nv40.c
149
int gclk = cstate->domain[nv_clk_src_core];
drivers/gpu/drm/nouveau/nvkm/subdev/clk/nv40.c
150
int sclk = cstate->domain[nv_clk_src_shader];
drivers/gpu/drm/nouveau/nvkm/subdev/clk/nv50.c
375
const int shader = cstate->domain[nv_clk_src_shader];
drivers/gpu/drm/nouveau/nvkm/subdev/clk/nv50.c
376
const int core = cstate->domain[nv_clk_src_core];
drivers/gpu/drm/nouveau/nvkm/subdev/clk/nv50.c
377
const int vdec = cstate->domain[nv_clk_src_vdec];
drivers/gpu/drm/nouveau/nvkm/subdev/clk/nv50.c
378
const int dom6 = cstate->domain[nv_clk_src_dom6];
drivers/gpu/drm/nouveau/nvkm/subdev/fb/ramnv1a.c
31
int domain = 0;
drivers/gpu/drm/nouveau/nvkm/subdev/fb/ramnv1a.c
38
domain = pci_domain_nr(pdev->bus);
drivers/gpu/drm/nouveau/nvkm/subdev/fb/ramnv1a.c
40
bridge = pci_get_domain_bus_and_slot(domain, 0, PCI_DEVFN(0, 1));
drivers/gpu/drm/nouveau/nvkm/subdev/instmem/gk20a.c
106
struct iommu_domain *domain;
drivers/gpu/drm/nouveau/nvkm/subdev/instmem/gk20a.c
336
iommu_unmap(imem->domain,
drivers/gpu/drm/nouveau/nvkm/subdev/instmem/gk20a.c
477
ret = iommu_map(imem->domain, offset, node->dma_addrs[i],
drivers/gpu/drm/nouveau/nvkm/subdev/instmem/gk20a.c
485
iommu_unmap(imem->domain, offset, PAGE_SIZE);
drivers/gpu/drm/nouveau/nvkm/subdev/instmem/gk20a.c
524
imem->domain ? "IOMMU" : "DMA", size, align);
drivers/gpu/drm/nouveau/nvkm/subdev/instmem/gk20a.c
530
if (imem->domain)
drivers/gpu/drm/nouveau/nvkm/subdev/instmem/gk20a.c
591
if (tdev->iommu.domain) {
drivers/gpu/drm/nouveau/nvkm/subdev/instmem/gk20a.c
594
imem->domain = tdev->iommu.domain;
drivers/gpu/drm/panthor/panthor_pwr.c
119
static const char *get_domain_name(u8 domain)
drivers/gpu/drm/panthor/panthor_pwr.c
121
switch (domain) {
drivers/gpu/drm/panthor/panthor_pwr.c
136
static u32 get_domain_base(u8 domain)
drivers/gpu/drm/panthor/panthor_pwr.c
138
switch (domain) {
drivers/gpu/drm/panthor/panthor_pwr.c
153
static u32 get_domain_ready_reg(u32 domain)
drivers/gpu/drm/panthor/panthor_pwr.c
155
return get_domain_base(domain) + (PWR_L2_READY - PWR_L2_PRESENT);
drivers/gpu/drm/panthor/panthor_pwr.c
158
static u32 get_domain_pwrtrans_reg(u32 domain)
drivers/gpu/drm/panthor/panthor_pwr.c
160
return get_domain_base(domain) + (PWR_L2_PWRTRANS - PWR_L2_PRESENT);
drivers/gpu/drm/panthor/panthor_pwr.c
163
static bool is_valid_domain(u32 domain)
drivers/gpu/drm/panthor/panthor_pwr.c
165
return get_domain_base(domain) != 0;
drivers/gpu/drm/panthor/panthor_pwr.c
173
static u8 get_domain_subdomain(struct panthor_device *ptdev, u32 domain)
drivers/gpu/drm/panthor/panthor_pwr.c
175
if (domain == PWR_COMMAND_DOMAIN_SHADER && has_rtu(ptdev))
drivers/gpu/drm/panthor/panthor_pwr.c
181
static int panthor_pwr_domain_wait_transition(struct panthor_device *ptdev, u32 domain,
drivers/gpu/drm/panthor/panthor_pwr.c
184
u32 pwrtrans_reg = get_domain_pwrtrans_reg(domain);
drivers/gpu/drm/panthor/panthor_pwr.c
192
get_domain_name(domain), val);
drivers/gpu/drm/panthor/panthor_pwr.c
214
static int panthor_pwr_domain_transition(struct panthor_device *ptdev, u32 cmd, u32 domain,
drivers/gpu/drm/panthor/panthor_pwr.c
217
u32 ready_reg = get_domain_ready_reg(domain);
drivers/gpu/drm/panthor/panthor_pwr.c
218
u32 pwr_cmd = PWR_COMMAND_DEF(cmd, domain, get_domain_subdomain(ptdev, domain));
drivers/gpu/drm/panthor/panthor_pwr.c
223
if (drm_WARN_ON(&ptdev->base, !is_valid_domain(domain)))
drivers/gpu/drm/panthor/panthor_pwr.c
238
ret = panthor_pwr_domain_wait_transition(ptdev, domain, timeout_us);
drivers/gpu/drm/panthor/panthor_pwr.c
253
get_domain_name(domain), pwr_cmd, mask);
drivers/gpu/drm/panthor/panthor_pwr.c
279
static int retract_domain(struct panthor_device *ptdev, u32 domain)
drivers/gpu/drm/panthor/panthor_pwr.c
281
const u32 pwr_cmd = PWR_COMMAND_DEF(PWR_COMMAND_RETRACT, domain, 0);
drivers/gpu/drm/panthor/panthor_pwr.c
283
const u64 delegated_mask = PWR_STATUS_DOMAIN_DELEGATED(domain);
drivers/gpu/drm/panthor/panthor_pwr.c
284
const u64 allow_mask = PWR_STATUS_DOMAIN_ALLOWED(domain);
drivers/gpu/drm/panthor/panthor_pwr.c
288
if (drm_WARN_ON(&ptdev->base, domain == PWR_COMMAND_DOMAIN_L2))
drivers/gpu/drm/panthor/panthor_pwr.c
294
drm_err(&ptdev->base, "%s domain retract pending", get_domain_name(domain));
drivers/gpu/drm/panthor/panthor_pwr.c
299
drm_dbg(&ptdev->base, "%s domain already retracted", get_domain_name(domain));
drivers/gpu/drm/panthor/panthor_pwr.c
314
get_domain_name(domain), pwr_cmd);
drivers/gpu/drm/panthor/panthor_pwr.c
333
static int delegate_domain(struct panthor_device *ptdev, u32 domain)
drivers/gpu/drm/panthor/panthor_pwr.c
335
const u32 pwr_cmd = PWR_COMMAND_DEF(PWR_COMMAND_DELEGATE, domain, 0);
drivers/gpu/drm/panthor/panthor_pwr.c
337
const u64 allow_mask = PWR_STATUS_DOMAIN_ALLOWED(domain);
drivers/gpu/drm/panthor/panthor_pwr.c
338
const u64 delegated_mask = PWR_STATUS_DOMAIN_DELEGATED(domain);
drivers/gpu/drm/panthor/panthor_pwr.c
342
if (drm_WARN_ON(&ptdev->base, domain == PWR_COMMAND_DOMAIN_L2))
drivers/gpu/drm/panthor/panthor_pwr.c
351
drm_warn(&ptdev->base, "Delegating %s domain not allowed", get_domain_name(domain));
drivers/gpu/drm/panthor/panthor_pwr.c
355
ret = panthor_pwr_domain_wait_transition(ptdev, domain, PWR_TRANSITION_TIMEOUT_US);
drivers/gpu/drm/panthor/panthor_pwr.c
370
get_domain_name(domain), pwr_cmd);
drivers/gpu/drm/panthor/panthor_pwr.c
411
static int panthor_pwr_domain_force_off(struct panthor_device *ptdev, u32 domain)
drivers/gpu/drm/panthor/panthor_pwr.c
413
const u64 domain_ready = gpu_read64(ptdev, get_domain_ready_reg(domain));
drivers/gpu/drm/panthor/panthor_pwr.c
421
ret = retract_domain(ptdev, domain);
drivers/gpu/drm/panthor/panthor_pwr.c
425
return panthor_pwr_domain_power_off(ptdev, domain, domain_ready, PWR_TRANSITION_TIMEOUT_US);
drivers/gpu/drm/panthor/panthor_regs.h
266
#define PWR_COMMAND_DEF(cmd, domain, subdomain) \
drivers/gpu/drm/panthor/panthor_regs.h
267
(((subdomain) << 16) | ((domain) << 8) | (cmd))
drivers/gpu/drm/qxl/qxl_drv.h
307
u32 domain,
drivers/gpu/drm/qxl/qxl_gem.c
84
u32 domain,
drivers/gpu/drm/qxl/qxl_gem.c
96
domain,
drivers/gpu/drm/qxl/qxl_ioctl.c
44
u32 domain = QXL_GEM_DOMAIN_VRAM;
drivers/gpu/drm/qxl/qxl_ioctl.c
51
domain,
drivers/gpu/drm/qxl/qxl_object.c
105
bool kernel, bool pinned, u32 domain, u32 priority,
drivers/gpu/drm/qxl/qxl_object.c
129
bo->type = domain;
drivers/gpu/drm/qxl/qxl_object.c
136
qxl_ttm_placement_from_domain(bo, domain);
drivers/gpu/drm/qxl/qxl_object.c
146
size, domain);
drivers/gpu/drm/qxl/qxl_object.c
56
void qxl_ttm_placement_from_domain(struct qxl_bo *qbo, u32 domain)
drivers/gpu/drm/qxl/qxl_object.c
66
if (domain == QXL_GEM_DOMAIN_VRAM) {
drivers/gpu/drm/qxl/qxl_object.c
70
if (domain == QXL_GEM_DOMAIN_SURFACE) {
drivers/gpu/drm/qxl/qxl_object.c
76
if (domain == QXL_GEM_DOMAIN_CPU) {
drivers/gpu/drm/qxl/qxl_object.h
58
bool kernel, bool pinned, u32 domain,
drivers/gpu/drm/qxl/qxl_object.h
74
extern void qxl_ttm_placement_from_domain(struct qxl_bo *qbo, u32 domain);
drivers/gpu/drm/radeon/radeon.h
2804
extern void radeon_ttm_placement_from_domain(struct radeon_bo *rbo, u32 domain);
drivers/gpu/drm/radeon/radeon.h
518
u32 domain;
drivers/gpu/drm/radeon/radeon_cs.c
146
uint32_t domain = r->write_domain ?
drivers/gpu/drm/radeon/radeon_cs.c
149
if (domain & RADEON_GEM_DOMAIN_CPU) {
drivers/gpu/drm/radeon/radeon_cs.c
155
p->relocs[i].preferred_domains = domain;
drivers/gpu/drm/radeon/radeon_cs.c
156
if (domain == RADEON_GEM_DOMAIN_VRAM)
drivers/gpu/drm/radeon/radeon_cs.c
157
domain |= RADEON_GEM_DOMAIN_GTT;
drivers/gpu/drm/radeon/radeon_cs.c
158
p->relocs[i].allowed_domains = domain;
drivers/gpu/drm/radeon/radeon_cs.c
162
uint32_t domain = p->relocs[i].preferred_domains;
drivers/gpu/drm/radeon/radeon_cs.c
163
if (!(domain & RADEON_GEM_DOMAIN_GTT)) {
drivers/gpu/drm/radeon/radeon_cs.c
169
domain = RADEON_GEM_DOMAIN_GTT;
drivers/gpu/drm/radeon/radeon_cs.c
170
p->relocs[i].preferred_domains = domain;
drivers/gpu/drm/radeon/radeon_cs.c
171
p->relocs[i].allowed_domains = domain;
drivers/gpu/drm/radeon/radeon_gem.c
146
uint32_t domain;
drivers/gpu/drm/radeon/radeon_gem.c
152
domain = wdomain;
drivers/gpu/drm/radeon/radeon_gem.c
153
if (!domain) {
drivers/gpu/drm/radeon/radeon_gem.c
154
domain = rdomain;
drivers/gpu/drm/radeon/radeon_gem.c
156
if (!domain) {
drivers/gpu/drm/radeon/radeon_gem.c
161
if (domain == RADEON_GEM_DOMAIN_CPU) {
drivers/gpu/drm/radeon/radeon_gem.c
174
if (domain == RADEON_GEM_DOMAIN_VRAM && robj->prime_shared_count) {
drivers/gpu/drm/radeon/radeon_gem.c
516
args->domain = radeon_mem_type_to_domain(cur_placement);
drivers/gpu/drm/radeon/radeon_gem.c
611
unsigned domain;
drivers/gpu/drm/radeon/radeon_gem.c
637
domain = radeon_mem_type_to_domain(entry->robj->tbo.resource->mem_type);
drivers/gpu/drm/radeon/radeon_gem.c
640
if (domain == RADEON_GEM_DOMAIN_CPU)
drivers/gpu/drm/radeon/radeon_gem.c
873
unsigned domain;
drivers/gpu/drm/radeon/radeon_gem.c
876
domain = radeon_mem_type_to_domain(rbo->tbo.resource->mem_type);
drivers/gpu/drm/radeon/radeon_gem.c
877
switch (domain) {
drivers/gpu/drm/radeon/radeon_object.c
104
if (domain & RADEON_GEM_DOMAIN_CPU) {
drivers/gpu/drm/radeon/radeon_object.c
130
u32 domain, u32 flags, struct sg_table *sg,
drivers/gpu/drm/radeon/radeon_object.c
159
bo->initial_domain = domain & (RADEON_GEM_DOMAIN_VRAM |
drivers/gpu/drm/radeon/radeon_object.c
201
radeon_ttm_placement_from_domain(bo, domain);
drivers/gpu/drm/radeon/radeon_object.c
272
int radeon_bo_pin_restricted(struct radeon_bo *bo, u32 domain, u64 max_offset,
drivers/gpu/drm/radeon/radeon_object.c
289
if (domain == RADEON_GEM_DOMAIN_VRAM)
drivers/gpu/drm/radeon/radeon_object.c
299
if (bo->prime_shared_count && domain == RADEON_GEM_DOMAIN_VRAM) {
drivers/gpu/drm/radeon/radeon_object.c
304
radeon_ttm_placement_from_domain(bo, domain);
drivers/gpu/drm/radeon/radeon_object.c
321
if (domain == RADEON_GEM_DOMAIN_VRAM)
drivers/gpu/drm/radeon/radeon_object.c
331
int radeon_bo_pin(struct radeon_bo *bo, u32 domain, u64 *gpu_addr)
drivers/gpu/drm/radeon/radeon_object.c
333
return radeon_bo_pin_restricted(bo, domain, 0, gpu_addr);
drivers/gpu/drm/radeon/radeon_object.c
489
u32 domain = lobj->preferred_domains;
drivers/gpu/drm/radeon/radeon_object.c
503
(domain & current_domain) == 0 && /* will be moved */
drivers/gpu/drm/radeon/radeon_object.c
506
domain = current_domain;
drivers/gpu/drm/radeon/radeon_object.c
510
radeon_ttm_placement_from_domain(bo, domain);
drivers/gpu/drm/radeon/radeon_object.c
521
domain != lobj->allowed_domains) {
drivers/gpu/drm/radeon/radeon_object.c
522
domain = lobj->allowed_domains;
drivers/gpu/drm/radeon/radeon_object.c
76
void radeon_ttm_placement_from_domain(struct radeon_bo *rbo, u32 domain)
drivers/gpu/drm/radeon/radeon_object.c
81
if (domain & RADEON_GEM_DOMAIN_VRAM) {
drivers/gpu/drm/radeon/radeon_object.c
98
if (domain & RADEON_GEM_DOMAIN_GTT) {
drivers/gpu/drm/radeon/radeon_object.h
138
bool kernel, u32 domain, u32 flags,
drivers/gpu/drm/radeon/radeon_object.h
146
extern int radeon_bo_pin(struct radeon_bo *bo, u32 domain, u64 *gpu_addr);
drivers/gpu/drm/radeon/radeon_object.h
147
extern int radeon_bo_pin_restricted(struct radeon_bo *bo, u32 domain,
drivers/gpu/drm/radeon/radeon_object.h
192
unsigned size, u32 align, u32 domain,
drivers/gpu/drm/radeon/radeon_sa.c
49
unsigned int size, u32 sa_align, u32 domain,
drivers/gpu/drm/radeon/radeon_sa.c
55
domain, flags, NULL, NULL, &sa_manager->bo);
drivers/gpu/drm/radeon/radeon_sa.c
61
sa_manager->domain = domain;
drivers/gpu/drm/radeon/radeon_sa.c
91
r = radeon_bo_pin(sa_manager->bo, sa_manager->domain, &sa_manager->gpu_addr);
drivers/gpu/drm/rockchip/rockchip_drm_drv.c
115
private->domain = iommu_paging_domain_alloc(private->iommu_dev);
drivers/gpu/drm/rockchip/rockchip_drm_drv.c
116
if (IS_ERR(private->domain)) {
drivers/gpu/drm/rockchip/rockchip_drm_drv.c
117
ret = PTR_ERR(private->domain);
drivers/gpu/drm/rockchip/rockchip_drm_drv.c
118
private->domain = NULL;
drivers/gpu/drm/rockchip/rockchip_drm_drv.c
122
geometry = &private->domain->geometry;
drivers/gpu/drm/rockchip/rockchip_drm_drv.c
138
if (!private->domain)
drivers/gpu/drm/rockchip/rockchip_drm_drv.c
142
iommu_domain_free(private->domain);
drivers/gpu/drm/rockchip/rockchip_drm_drv.c
59
if (!private->domain)
drivers/gpu/drm/rockchip/rockchip_drm_drv.c
71
ret = iommu_attach_device(private->domain, dev);
drivers/gpu/drm/rockchip/rockchip_drm_drv.c
85
if (!private->domain)
drivers/gpu/drm/rockchip/rockchip_drm_drv.c
88
iommu_detach_device(private->domain, dev);
drivers/gpu/drm/rockchip/rockchip_drm_drv.h
68
struct iommu_domain *domain;
drivers/gpu/drm/rockchip/rockchip_drm_gem.c
187
if (private->domain)
drivers/gpu/drm/rockchip/rockchip_drm_gem.c
338
if (private->domain) {
drivers/gpu/drm/rockchip/rockchip_drm_gem.c
43
ret = iommu_map_sgtable(private->domain, rk_obj->dma_addr, rk_obj->sgt,
drivers/gpu/drm/rockchip/rockchip_drm_gem.c
497
if (private->domain)
drivers/gpu/drm/rockchip/rockchip_drm_gem.c
69
iommu_unmap(private->domain, rk_obj->dma_addr, rk_obj->size);
drivers/gpu/drm/tegra/drm.c
1003
domain = iommu_get_domain_for_dev(client->dev);
drivers/gpu/drm/tegra/drm.c
1004
if (domain)
drivers/gpu/drm/tegra/drm.c
1005
iommu_detach_group(tegra->domain, client->group);
drivers/gpu/drm/tegra/drm.c
1019
if (tegra->domain)
drivers/gpu/drm/tegra/drm.c
1020
size = iova_align(&tegra->carveout.domain, size);
drivers/gpu/drm/tegra/drm.c
1025
if (!tegra->domain) {
drivers/gpu/drm/tegra/drm.c
1039
if (!tegra->domain) {
drivers/gpu/drm/tegra/drm.c
1048
alloc = alloc_iova(&tegra->carveout.domain,
drivers/gpu/drm/tegra/drm.c
1056
*dma = iova_dma_addr(&tegra->carveout.domain, alloc);
drivers/gpu/drm/tegra/drm.c
1057
err = iommu_map(tegra->domain, *dma, virt_to_phys(virt),
drivers/gpu/drm/tegra/drm.c
1065
__free_iova(&tegra->carveout.domain, alloc);
drivers/gpu/drm/tegra/drm.c
1075
if (tegra->domain)
drivers/gpu/drm/tegra/drm.c
1076
size = iova_align(&tegra->carveout.domain, size);
drivers/gpu/drm/tegra/drm.c
1080
if (tegra->domain) {
drivers/gpu/drm/tegra/drm.c
1081
iommu_unmap(tegra->domain, dma, size);
drivers/gpu/drm/tegra/drm.c
1082
free_iova(&tegra->carveout.domain,
drivers/gpu/drm/tegra/drm.c
1083
iova_pfn(&tegra->carveout.domain, dma));
drivers/gpu/drm/tegra/drm.c
1092
struct iommu_domain *domain;
drivers/gpu/drm/tegra/drm.c
1125
domain = iommu_get_domain_for_dev(dev->dev.parent);
drivers/gpu/drm/tegra/drm.c
1133
if (!domain && host1x_get_dma_mask(host1x) <= DMA_BIT_MASK(32))
drivers/gpu/drm/tegra/drm.c
1136
return domain != NULL;
drivers/gpu/drm/tegra/drm.c
1157
tegra->domain = iommu_paging_domain_alloc(dma_dev);
drivers/gpu/drm/tegra/drm.c
1158
if (IS_ERR(tegra->domain)) {
drivers/gpu/drm/tegra/drm.c
1159
err = PTR_ERR(tegra->domain);
drivers/gpu/drm/tegra/drm.c
1165
goto domain;
drivers/gpu/drm/tegra/drm.c
1207
start = tegra->domain->geometry.aperture_start & dma_mask;
drivers/gpu/drm/tegra/drm.c
1208
end = tegra->domain->geometry.aperture_end & dma_mask;
drivers/gpu/drm/tegra/drm.c
1215
order = __ffs(tegra->domain->pgsize_bitmap);
drivers/gpu/drm/tegra/drm.c
1216
init_iova_domain(&tegra->carveout.domain, 1UL << order,
drivers/gpu/drm/tegra/drm.c
1219
tegra->carveout.shift = iova_shift(&tegra->carveout.domain);
drivers/gpu/drm/tegra/drm.c
1229
} else if (tegra->domain) {
drivers/gpu/drm/tegra/drm.c
1230
iommu_domain_free(tegra->domain);
drivers/gpu/drm/tegra/drm.c
1231
tegra->domain = NULL;
drivers/gpu/drm/tegra/drm.c
1283
if (tegra->domain) {
drivers/gpu/drm/tegra/drm.c
1286
put_iova_domain(&tegra->carveout.domain);
drivers/gpu/drm/tegra/drm.c
1294
domain:
drivers/gpu/drm/tegra/drm.c
1295
if (tegra->domain)
drivers/gpu/drm/tegra/drm.c
1296
iommu_domain_free(tegra->domain);
drivers/gpu/drm/tegra/drm.c
1323
if (tegra->domain) {
drivers/gpu/drm/tegra/drm.c
1326
put_iova_domain(&tegra->carveout.domain);
drivers/gpu/drm/tegra/drm.c
1328
iommu_domain_free(tegra->domain);
drivers/gpu/drm/tegra/drm.c
860
if (tegra->domain) {
drivers/gpu/drm/tegra/drm.c
944
struct iommu_domain *domain = iommu_get_domain_for_dev(client->dev);
drivers/gpu/drm/tegra/drm.c
957
domain = iommu_get_domain_for_dev(client->dev);
drivers/gpu/drm/tegra/drm.c
966
if (domain && domain->type != IOMMU_DOMAIN_IDENTITY &&
drivers/gpu/drm/tegra/drm.c
967
domain != tegra->domain)
drivers/gpu/drm/tegra/drm.c
970
if (tegra->domain) {
drivers/gpu/drm/tegra/drm.c
975
if (domain != tegra->domain) {
drivers/gpu/drm/tegra/drm.c
976
err = iommu_attach_group(tegra->domain, group);
drivers/gpu/drm/tegra/drm.c
995
struct iommu_domain *domain;
drivers/gpu/drm/tegra/drm.h
37
struct iommu_domain *domain;
drivers/gpu/drm/tegra/drm.h
43
struct iova_domain domain;
drivers/gpu/drm/tegra/gem.c
259
bo->size = iommu_map_sgtable(tegra->domain, bo->iova, bo->sgt, prot);
drivers/gpu/drm/tegra/gem.c
284
iommu_unmap(tegra->domain, bo->iova, bo->size);
drivers/gpu/drm/tegra/gem.c
378
if (tegra->domain) {
drivers/gpu/drm/tegra/gem.c
472
if (tegra->domain) {
drivers/gpu/drm/tegra/gem.c
524
if (tegra->domain) {
drivers/gpu/drm/vmwgfx/vmwgfx_bo.c
431
vmw_bo_placement_set(vmw_bo, params->domain, params->busy_domain);
drivers/gpu/drm/vmwgfx/vmwgfx_bo.c
737
static u32 placement_flags(u32 domain, u32 desired, u32 fallback)
drivers/gpu/drm/vmwgfx/vmwgfx_bo.c
739
if (desired & fallback & domain)
drivers/gpu/drm/vmwgfx/vmwgfx_bo.c
742
if (desired & domain)
drivers/gpu/drm/vmwgfx/vmwgfx_bo.c
751
u32 domain = desired | fallback;
drivers/gpu/drm/vmwgfx/vmwgfx_bo.c
757
if (domain & VMW_BO_DOMAIN_MOB) {
drivers/gpu/drm/vmwgfx/vmwgfx_bo.c
765
if (domain & VMW_BO_DOMAIN_GMR) {
drivers/gpu/drm/vmwgfx/vmwgfx_bo.c
773
if (domain & VMW_BO_DOMAIN_VRAM) {
drivers/gpu/drm/vmwgfx/vmwgfx_bo.c
781
if (domain & VMW_BO_DOMAIN_WAITABLE_SYS) {
drivers/gpu/drm/vmwgfx/vmwgfx_bo.c
789
if (domain & VMW_BO_DOMAIN_SYS) {
drivers/gpu/drm/vmwgfx/vmwgfx_bo.c
809
void vmw_bo_placement_set(struct vmw_bo *bo, u32 domain, u32 busy_domain)
drivers/gpu/drm/vmwgfx/vmwgfx_bo.c
818
pl->num_placement = set_placement_list(bo->places, domain, busy_domain);
drivers/gpu/drm/vmwgfx/vmwgfx_bo.c
830
__func__, bo->tbo.resource->mem_type, domain);
drivers/gpu/drm/vmwgfx/vmwgfx_bo.c
839
u32 domain = VMW_BO_DOMAIN_GMR | VMW_BO_DOMAIN_VRAM;
drivers/gpu/drm/vmwgfx/vmwgfx_bo.c
842
domain = VMW_BO_DOMAIN_MOB;
drivers/gpu/drm/vmwgfx/vmwgfx_bo.c
844
vmw_bo_placement_set(bo, domain, domain);
drivers/gpu/drm/vmwgfx/vmwgfx_bo.h
105
void vmw_bo_placement_set(struct vmw_bo *bo, u32 domain, u32 busy_domain);
drivers/gpu/drm/vmwgfx/vmwgfx_bo.h
56
u32 domain;
drivers/gpu/drm/vmwgfx/vmwgfx_cmdbuf.c
1240
.domain = VMW_BO_DOMAIN_MOB,
drivers/gpu/drm/vmwgfx/vmwgfx_context.c
109
.domain = VMW_BO_DOMAIN_MOB,
drivers/gpu/drm/vmwgfx/vmwgfx_context.c
79
.domain = VMW_BO_DOMAIN_SYS,
drivers/gpu/drm/vmwgfx/vmwgfx_context.c
94
.domain = VMW_BO_DOMAIN_MOB,
drivers/gpu/drm/vmwgfx/vmwgfx_cotable.c
139
.domain = VMW_BO_DOMAIN_MOB,
drivers/gpu/drm/vmwgfx/vmwgfx_cotable.c
413
.domain = VMW_BO_DOMAIN_MOB,
drivers/gpu/drm/vmwgfx/vmwgfx_drv.c
382
.domain = VMW_BO_DOMAIN_SYS,
drivers/gpu/drm/vmwgfx/vmwgfx_drv.h
935
u32 domain,
drivers/gpu/drm/vmwgfx/vmwgfx_gem.c
165
.domain = (dev_priv->has_mob) ? VMW_BO_DOMAIN_SYS : VMW_BO_DOMAIN_VRAM,
drivers/gpu/drm/vmwgfx/vmwgfx_gem.c
190
.domain = (dev_priv->has_mob) ? VMW_BO_DOMAIN_SYS : VMW_BO_DOMAIN_VRAM,
drivers/gpu/drm/vmwgfx/vmwgfx_resource.c
337
.domain = res->func->domain,
drivers/gpu/drm/vmwgfx/vmwgfx_resource.c
546
vmw_bo_placement_set(res->guest_memory_bo, res->func->domain,
drivers/gpu/drm/vmwgfx/vmwgfx_resource.c
988
res->func->domain,
drivers/gpu/drm/vmwgfx/vmwgfx_resource_priv.h
87
u32 domain;
drivers/gpu/drm/vmwgfx/vmwgfx_scrn.c
411
.domain = VMW_BO_DOMAIN_VRAM,
drivers/gpu/drm/vmwgfx/vmwgfx_shader.c
112
.domain = VMW_BO_DOMAIN_MOB,
drivers/gpu/drm/vmwgfx/vmwgfx_shader.c
895
.domain = VMW_BO_DOMAIN_SYS,
drivers/gpu/drm/vmwgfx/vmwgfx_shader.c
97
.domain = VMW_BO_DOMAIN_MOB,
drivers/gpu/drm/vmwgfx/vmwgfx_so.c
88
.domain = VMW_BO_DOMAIN_SYS,
drivers/gpu/drm/vmwgfx/vmwgfx_streamoutput.c
69
.domain = VMW_BO_DOMAIN_MOB,
drivers/gpu/drm/vmwgfx/vmwgfx_surface.c
113
.domain = VMW_BO_DOMAIN_GMR,
drivers/gpu/drm/vmwgfx/vmwgfx_surface.c
128
.domain = VMW_BO_DOMAIN_MOB,
drivers/gpu/drm/vmwgfx/vmwgfx_surface.c
826
.domain = VMW_BO_DOMAIN_SYS,
drivers/gpu/drm/vmwgfx/vmwgfx_ttm_buffer.c
561
size_t bo_size, u32 domain,
drivers/gpu/drm/vmwgfx/vmwgfx_ttm_buffer.c
571
.domain = domain,
drivers/gpu/drm/vmwgfx/vmwgfx_ttm_buffer.c
572
.busy_domain = domain,
drivers/gpu/drm/vmwgfx/vmwgfx_va.c
87
.domain = VMW_BO_DOMAIN_SYS,
drivers/gpu/drm/vmwgfx/vmwgfx_validation.c
428
res->func->domain,
drivers/gpu/drm/vmwgfx/vmwgfx_validation.c
604
vmw_bo_placement_set(vbo, res->func->domain,
drivers/gpu/drm/xe/xe_configfs.c
334
unsigned int domain, bus, slot, function;
drivers/gpu/drm/xe/xe_configfs.c
342
if (sscanf(name, "%x:%x:%x.%x", &domain, &bus, &slot, &function) != 4)
drivers/gpu/drm/xe/xe_configfs.c
345
pdev = pci_get_domain_bus_and_slot(domain, bus, PCI_DEVFN(slot, function));
drivers/gpu/drm/xe/xe_configfs.c
950
unsigned int domain, bus, slot, function;
drivers/gpu/drm/xe/xe_configfs.c
959
ret = sscanf(name, "%x:%x:%x.%x", &domain, &bus, &slot, &function);
drivers/gpu/drm/xe/xe_configfs.c
963
ret = scnprintf(canonical, sizeof(canonical), "%04x:%02x:%02x.%d", domain, bus,
drivers/gpu/drm/xe/xe_configfs.c
969
pdev = pci_get_domain_bus_and_slot(domain, bus, PCI_DEVFN(slot, function));
drivers/gpu/drm/xe/xe_configfs.c
974
pdev = pci_get_domain_bus_and_slot(domain, bus, PCI_DEVFN(slot, 0));
drivers/gpu/drm/xe/xe_configfs.c
976
pdev = pci_get_domain_bus_and_slot(domain, bus, PCI_DEVFN(0, 0));
drivers/gpu/drm/xe/xe_drm_client.c
296
enum xe_force_wake_domains domain;
drivers/gpu/drm/xe/xe_drm_client.c
306
domain = xe_hw_engine_to_fw_domain(hwe);
drivers/gpu/drm/xe/xe_drm_client.c
308
fw_ref = xe_force_wake_constructor(gt_to_fw(hwe->gt), domain);
drivers/gpu/drm/xe/xe_drm_client.c
309
if (xe_force_wake_ref_has_domain(fw_ref.domains, domain))
drivers/gpu/drm/xe/xe_force_wake.c
101
xe_mmio_write32(>->mmio, domain->reg_ctl, domain->mask | (wake ? domain->val : 0));
drivers/gpu/drm/xe/xe_force_wake.c
104
static int __domain_wait(struct xe_gt *gt, struct xe_force_wake_domain *domain, bool wake)
drivers/gpu/drm/xe/xe_force_wake.c
112
ret = xe_mmio_wait32(>->mmio, domain->reg_ack, domain->val, wake ? domain->val : 0,
drivers/gpu/drm/xe/xe_force_wake.c
117
domain->id, str_wake_sleep(wake), ERR_PTR(ret),
drivers/gpu/drm/xe/xe_force_wake.c
118
domain->reg_ack.addr, value);
drivers/gpu/drm/xe/xe_force_wake.c
122
domain->id, str_wake_sleep(wake));
drivers/gpu/drm/xe/xe_force_wake.c
129
static void domain_wake(struct xe_gt *gt, struct xe_force_wake_domain *domain)
drivers/gpu/drm/xe/xe_force_wake.c
131
__domain_ctl(gt, domain, true);
drivers/gpu/drm/xe/xe_force_wake.c
135
struct xe_force_wake_domain *domain)
drivers/gpu/drm/xe/xe_force_wake.c
137
return __domain_wait(gt, domain, true);
drivers/gpu/drm/xe/xe_force_wake.c
140
static void domain_sleep(struct xe_gt *gt, struct xe_force_wake_domain *domain)
drivers/gpu/drm/xe/xe_force_wake.c
142
__domain_ctl(gt, domain, false);
drivers/gpu/drm/xe/xe_force_wake.c
146
struct xe_force_wake_domain *domain)
drivers/gpu/drm/xe/xe_force_wake.c
148
return __domain_wait(gt, domain, false);
drivers/gpu/drm/xe/xe_force_wake.c
183
struct xe_force_wake_domain *domain;
drivers/gpu/drm/xe/xe_force_wake.c
194
for_each_fw_domain_masked(domain, ref_rqst, fw, tmp) {
drivers/gpu/drm/xe/xe_force_wake.c
195
if (!domain->ref++) {
drivers/gpu/drm/xe/xe_force_wake.c
196
awake_rqst |= BIT(domain->id);
drivers/gpu/drm/xe/xe_force_wake.c
197
domain_wake(gt, domain);
drivers/gpu/drm/xe/xe_force_wake.c
199
ref_incr |= BIT(domain->id);
drivers/gpu/drm/xe/xe_force_wake.c
201
for_each_fw_domain_masked(domain, awake_rqst, fw, tmp) {
drivers/gpu/drm/xe/xe_force_wake.c
202
if (domain_wake_wait(gt, domain) == 0) {
drivers/gpu/drm/xe/xe_force_wake.c
203
fw->awake_domains |= BIT(domain->id);
drivers/gpu/drm/xe/xe_force_wake.c
205
awake_failed |= BIT(domain->id);
drivers/gpu/drm/xe/xe_force_wake.c
206
--domain->ref;
drivers/gpu/drm/xe/xe_force_wake.c
234
struct xe_force_wake_domain *domain;
drivers/gpu/drm/xe/xe_force_wake.c
250
for_each_fw_domain_masked(domain, fw_ref, fw, tmp) {
drivers/gpu/drm/xe/xe_force_wake.c
251
xe_gt_assert(gt, domain->ref);
drivers/gpu/drm/xe/xe_force_wake.c
253
if (!--domain->ref) {
drivers/gpu/drm/xe/xe_force_wake.c
254
sleep |= BIT(domain->id);
drivers/gpu/drm/xe/xe_force_wake.c
255
domain_sleep(gt, domain);
drivers/gpu/drm/xe/xe_force_wake.c
258
for_each_fw_domain_masked(domain, sleep, fw, tmp) {
drivers/gpu/drm/xe/xe_force_wake.c
259
if (domain_sleep_wait(gt, domain) == 0)
drivers/gpu/drm/xe/xe_force_wake.c
260
fw->awake_domains &= ~BIT(domain->id);
drivers/gpu/drm/xe/xe_force_wake.c
262
ack_fail |= BIT(domain->id);
drivers/gpu/drm/xe/xe_force_wake.c
34
struct xe_force_wake_domain *domain = &fw->domains[id];
drivers/gpu/drm/xe/xe_force_wake.c
36
domain->id = id;
drivers/gpu/drm/xe/xe_force_wake.c
37
domain->reg_ctl = reg;
drivers/gpu/drm/xe/xe_force_wake.c
38
domain->reg_ack = ack;
drivers/gpu/drm/xe/xe_force_wake.c
39
domain->val = FORCEWAKE_MT(FORCEWAKE_KERNEL);
drivers/gpu/drm/xe/xe_force_wake.c
40
domain->mask = FORCEWAKE_MT_MASK(FORCEWAKE_KERNEL);
drivers/gpu/drm/xe/xe_force_wake.c
96
static void __domain_ctl(struct xe_gt *gt, struct xe_force_wake_domain *domain, bool wake)
drivers/gpu/drm/xe/xe_force_wake.h
24
enum xe_force_wake_domains domain)
drivers/gpu/drm/xe/xe_force_wake.h
26
xe_gt_assert(fw->gt, domain != XE_FORCEWAKE_ALL);
drivers/gpu/drm/xe/xe_force_wake.h
27
return fw->domains[ffs(domain) - 1].ref;
drivers/gpu/drm/xe/xe_force_wake.h
42
enum xe_force_wake_domains domain)
drivers/gpu/drm/xe/xe_force_wake.h
44
xe_gt_assert(fw->gt, domain != XE_FORCEWAKE_ALL);
drivers/gpu/drm/xe/xe_force_wake.h
45
xe_gt_assert(fw->gt, fw->awake_domains & domain);
drivers/gpu/drm/xe/xe_force_wake.h
59
xe_force_wake_ref_has_domain(unsigned int fw_ref, enum xe_force_wake_domains domain)
drivers/gpu/drm/xe/xe_force_wake.h
61
return fw_ref & domain;
drivers/gpu/drm/xe/xe_guc_capture.c
1839
snapshot->forcewake.domain, snapshot->forcewake.ref);
drivers/gpu/drm/xe/xe_hw_engine.c
102
.domain = XE_FW_RENDER,
drivers/gpu/drm/xe/xe_hw_engine.c
1070
return engine_infos[hwe->engine_id].domain;
drivers/gpu/drm/xe/xe_hw_engine.c
110
.domain = XE_FW_RENDER,
drivers/gpu/drm/xe/xe_hw_engine.c
118
.domain = XE_FW_RENDER,
drivers/gpu/drm/xe/xe_hw_engine.c
126
.domain = XE_FW_RENDER,
drivers/gpu/drm/xe/xe_hw_engine.c
134
.domain = XE_FW_RENDER,
drivers/gpu/drm/xe/xe_hw_engine.c
143
.domain = XE_FW_MEDIA_VDBOX0,
drivers/gpu/drm/xe/xe_hw_engine.c
151
.domain = XE_FW_MEDIA_VDBOX1,
drivers/gpu/drm/xe/xe_hw_engine.c
159
.domain = XE_FW_MEDIA_VDBOX2,
drivers/gpu/drm/xe/xe_hw_engine.c
167
.domain = XE_FW_MEDIA_VDBOX3,
drivers/gpu/drm/xe/xe_hw_engine.c
175
.domain = XE_FW_MEDIA_VDBOX4,
drivers/gpu/drm/xe/xe_hw_engine.c
183
.domain = XE_FW_MEDIA_VDBOX5,
drivers/gpu/drm/xe/xe_hw_engine.c
191
.domain = XE_FW_MEDIA_VDBOX6,
drivers/gpu/drm/xe/xe_hw_engine.c
199
.domain = XE_FW_MEDIA_VDBOX7,
drivers/gpu/drm/xe/xe_hw_engine.c
207
.domain = XE_FW_MEDIA_VEBOX0,
drivers/gpu/drm/xe/xe_hw_engine.c
215
.domain = XE_FW_MEDIA_VEBOX1,
drivers/gpu/drm/xe/xe_hw_engine.c
223
.domain = XE_FW_MEDIA_VEBOX2,
drivers/gpu/drm/xe/xe_hw_engine.c
231
.domain = XE_FW_MEDIA_VEBOX3,
drivers/gpu/drm/xe/xe_hw_engine.c
239
.domain = XE_FW_RENDER,
drivers/gpu/drm/xe/xe_hw_engine.c
247
.domain = XE_FW_RENDER,
drivers/gpu/drm/xe/xe_hw_engine.c
255
.domain = XE_FW_RENDER,
drivers/gpu/drm/xe/xe_hw_engine.c
263
.domain = XE_FW_RENDER,
drivers/gpu/drm/xe/xe_hw_engine.c
270
.domain = XE_FW_GSC,
drivers/gpu/drm/xe/xe_hw_engine.c
299
xe_force_wake_assert_held(gt_to_fw(hwe->gt), hwe->domain);
drivers/gpu/drm/xe/xe_hw_engine.c
319
xe_force_wake_assert_held(gt_to_fw(hwe->gt), hwe->domain);
drivers/gpu/drm/xe/xe_hw_engine.c
52
enum xe_force_wake_domains domain;
drivers/gpu/drm/xe/xe_hw_engine.c
524
hwe->domain = info->domain;
drivers/gpu/drm/xe/xe_hw_engine.c
62
.domain = XE_FW_RENDER,
drivers/gpu/drm/xe/xe_hw_engine.c
70
.domain = XE_FW_RENDER,
drivers/gpu/drm/xe/xe_hw_engine.c
78
.domain = XE_FW_RENDER,
drivers/gpu/drm/xe/xe_hw_engine.c
86
.domain = XE_FW_RENDER,
drivers/gpu/drm/xe/xe_hw_engine.c
938
snapshot->forcewake.domain = hwe->domain;
drivers/gpu/drm/xe/xe_hw_engine.c
94
.domain = XE_FW_RENDER,
drivers/gpu/drm/xe/xe_hw_engine.c
940
hwe->domain);
drivers/gpu/drm/xe/xe_hw_engine_types.h
136
enum xe_force_wake_domains domain;
drivers/gpu/drm/xe/xe_hw_engine_types.h
175
enum xe_force_wake_domains domain;
drivers/gpu/drm/xe/xe_i2c.c
220
struct irq_domain *domain;
drivers/gpu/drm/xe/xe_i2c.c
226
domain = irq_domain_create_linear(dev_fwnode(i2c->drm_dev), 1, &xe_i2c_irq_ops, NULL);
drivers/gpu/drm/xe/xe_i2c.c
227
if (!domain)
drivers/gpu/drm/xe/xe_i2c.c
230
i2c->adapter_irq = irq_create_mapping(domain, 0);
drivers/gpu/drm/xe/xe_i2c.c
231
i2c->irqdomain = domain;
drivers/gpu/drm/xe/xe_mocs.c
810
enum xe_force_wake_domains domain;
drivers/gpu/drm/xe/xe_mocs.c
816
domain = flags & HAS_LNCF_MOCS ? XE_FORCEWAKE_ALL : XE_FW_GT;
drivers/gpu/drm/xe/xe_mocs.c
819
CLASS(xe_force_wake, fw_ref)(gt_to_fw(gt), domain);
drivers/gpu/drm/xe/xe_mocs.c
820
if (!xe_force_wake_ref_has_domain(fw_ref.domains, domain))
drivers/gpu/host1x/cdma.c
107
err = iommu_map(host1x->domain, pb->dma, pb->phys, size,
drivers/gpu/host1x/cdma.c
56
if (host1x->domain) {
drivers/gpu/host1x/cdma.c
57
iommu_unmap(host1x->domain, pb->dma, pb->alloc_size);
drivers/gpu/host1x/cdma.c
88
if (host1x->domain) {
drivers/gpu/host1x/dev.c
379
struct iommu_domain *domain = iommu_get_domain_for_dev(host->dev);
drivers/gpu/host1x/dev.c
389
domain = iommu_get_domain_for_dev(host->dev);
drivers/gpu/host1x/dev.c
401
if (domain && domain->type == IOMMU_DOMAIN_IDENTITY)
drivers/gpu/host1x/dev.c
402
domain = NULL;
drivers/gpu/host1x/dev.c
403
if (!host1x_wants_iommu(host) || domain)
drivers/gpu/host1x/dev.c
404
return domain;
drivers/gpu/host1x/dev.c
416
host->domain = iommu_paging_domain_alloc(host->dev);
drivers/gpu/host1x/dev.c
417
if (IS_ERR(host->domain)) {
drivers/gpu/host1x/dev.c
418
err = PTR_ERR(host->domain);
drivers/gpu/host1x/dev.c
419
host->domain = NULL;
drivers/gpu/host1x/dev.c
423
err = iommu_attach_group(host->domain, host->group);
drivers/gpu/host1x/dev.c
431
geometry = &host->domain->geometry;
drivers/gpu/host1x/dev.c
435
order = __ffs(host->domain->pgsize_bitmap);
drivers/gpu/host1x/dev.c
439
domain = host->domain;
drivers/gpu/host1x/dev.c
442
return domain;
drivers/gpu/host1x/dev.c
445
iommu_domain_free(host->domain);
drivers/gpu/host1x/dev.c
446
host->domain = NULL;
drivers/gpu/host1x/dev.c
459
struct iommu_domain *domain;
drivers/gpu/host1x/dev.c
462
domain = host1x_iommu_attach(host);
drivers/gpu/host1x/dev.c
463
if (IS_ERR(domain)) {
drivers/gpu/host1x/dev.c
464
err = PTR_ERR(domain);
drivers/gpu/host1x/dev.c
477
if (!domain && !host->info->has_wide_gather)
drivers/gpu/host1x/dev.c
491
if (host->domain) {
drivers/gpu/host1x/dev.c
493
iommu_detach_group(host->domain, host->group);
drivers/gpu/host1x/dev.c
495
iommu_domain_free(host->domain);
drivers/gpu/host1x/dev.c
496
host->domain = NULL;
drivers/gpu/host1x/dev.h
145
struct iommu_domain *domain;
drivers/gpu/host1x/job.c
237
if (host->domain) {
drivers/gpu/host1x/job.c
251
err = iommu_map_sgtable(host->domain, iova_dma_addr(&host->iova, alloc),
drivers/gpu/host1x/job.c
662
if (!job->enable_firewall && map->size && host->domain) {
drivers/gpu/host1x/job.c
663
iommu_unmap(host->domain, job->addr_phys[i], map->size);
drivers/gpu/ipu-v3/ipu-common.c
1011
virq = irq_find_mapping(ipu->domain, irq);
drivers/gpu/ipu-v3/ipu-common.c
1013
virq = irq_create_mapping(ipu->domain, irq);
drivers/gpu/ipu-v3/ipu-common.c
1172
ipu->domain = irq_domain_create_linear(of_fwnode_handle(ipu->dev->of_node), IPU_NUM_IRQS,
drivers/gpu/ipu-v3/ipu-common.c
1174
if (!ipu->domain) {
drivers/gpu/ipu-v3/ipu-common.c
1179
ret = irq_alloc_domain_generic_chips(ipu->domain, 32, 1, "IPU",
drivers/gpu/ipu-v3/ipu-common.c
1183
irq_domain_remove(ipu->domain);
drivers/gpu/ipu-v3/ipu-common.c
1194
gc = irq_get_domain_generic_chip(ipu->domain, i);
drivers/gpu/ipu-v3/ipu-common.c
1222
irq = irq_find_mapping(ipu->domain, i);
drivers/gpu/ipu-v3/ipu-common.c
1227
irq_domain_remove(ipu->domain);
drivers/gpu/ipu-v3/ipu-common.c
976
generic_handle_domain_irq(ipu->domain,
drivers/gpu/ipu-v3/ipu-prv.h
188
struct irq_domain *domain;
drivers/gpu/vga/vga_switcheroo.c
1057
struct dev_pm_domain *domain)
drivers/gpu/vga/vga_switcheroo.c
1061
domain->ops = *dev->bus->pm;
drivers/gpu/vga/vga_switcheroo.c
1062
domain->ops.runtime_suspend = vga_switcheroo_runtime_suspend;
drivers/gpu/vga/vga_switcheroo.c
1063
domain->ops.runtime_resume = vga_switcheroo_runtime_resume;
drivers/gpu/vga/vga_switcheroo.c
1065
dev_pm_domain_set(dev, domain);
drivers/hid/hid-cp2112.c
1125
irq = irq_find_mapping(dev->gc.irq.domain, virq);
drivers/hid/hid-rmi.c
595
struct irq_domain *domain = hdata->domain;
drivers/hid/hid-rmi.c
597
if (!domain)
drivers/hid/hid-rmi.c
600
irq_dispose_mapping(irq_find_mapping(domain, 0));
drivers/hid/hid-rmi.c
602
irq_domain_remove(domain);
drivers/hid/hid-rmi.c
603
hdata->domain = NULL;
drivers/hid/hid-rmi.c
624
hdata->domain = irq_domain_create_linear(hdev->dev.fwnode, 1,
drivers/hid/hid-rmi.c
626
if (!hdata->domain)
drivers/hid/hid-rmi.c
633
hdata->rmi_irq = irq_create_mapping(hdata->domain, 0);
drivers/hid/hid-rmi.c
99
struct irq_domain *domain;
drivers/hid/surface-hid/surface_kbd.c
251
shid->uid.domain = SSAM_DOMAIN_SERIALHUB;
drivers/i2c/i2c-core-base.c
1446
struct irq_domain *domain = adap->host_notify_domain;
drivers/i2c/i2c-core-base.c
1449
if (!domain)
drivers/i2c/i2c-core-base.c
1453
irq_dispose_mapping(irq_find_mapping(domain, hwirq));
drivers/i2c/i2c-core-base.c
1455
irq_domain_remove(domain);
drivers/i2c/i2c-core-base.c
1474
struct irq_domain *domain;
drivers/i2c/i2c-core-base.c
1479
domain = irq_domain_create_linear(dev_fwnode(adap->dev.parent),
drivers/i2c/i2c-core-base.c
1482
if (!domain)
drivers/i2c/i2c-core-base.c
1485
adap->host_notify_domain = domain;
drivers/iio/adc/stm32-adc-core.c
107
struct irq_domain *domain;
drivers/iio/adc/stm32-adc-core.c
380
generic_handle_domain_irq(priv->domain, i);
drivers/iio/adc/stm32-adc-core.c
423
priv->domain = irq_domain_create_simple(dev_fwnode(&pdev->dev),
drivers/iio/adc/stm32-adc-core.c
427
if (!priv->domain) {
drivers/iio/adc/stm32-adc-core.c
446
irq_dispose_mapping(irq_find_mapping(priv->domain, hwirq));
drivers/iio/adc/stm32-adc-core.c
447
irq_domain_remove(priv->domain);
drivers/infiniband/hw/mlx4/main.c
1521
int domain,
drivers/infiniband/hw/mlx4/main.c
1546
ctrl->prio = cpu_to_be16(domain | flow_attr->priority);
drivers/infiniband/hw/mlx5/wr.c
329
static void mlx5_fill_inl_bsf(struct ib_sig_domain *domain,
drivers/infiniband/hw/mlx5/wr.c
335
inl->dif_apptag = cpu_to_be16(domain->sig.dif.app_tag);
drivers/infiniband/hw/mlx5/wr.c
336
inl->dif_reftag = cpu_to_be32(domain->sig.dif.ref_tag);
drivers/infiniband/hw/mlx5/wr.c
339
inl->sig_type = domain->sig.dif.bg_type == IB_T10DIF_CRC ?
drivers/infiniband/hw/mlx5/wr.c
342
if (domain->sig.dif.ref_remap)
drivers/infiniband/hw/mlx5/wr.c
345
if (domain->sig.dif.app_escape) {
drivers/infiniband/hw/mlx5/wr.c
346
if (domain->sig.dif.ref_escape)
drivers/infiniband/hw/mlx5/wr.c
353
cpu_to_be16(domain->sig.dif.apptag_check_mask);
drivers/infiniband/hw/mlx5/wr.c
608
static int set_psv_wr(struct ib_sig_domain *domain,
drivers/infiniband/hw/mlx5/wr.c
615
switch (domain->sig_type) {
drivers/infiniband/hw/mlx5/wr.c
619
psv_seg->transient_sig = cpu_to_be32(domain->sig.dif.bg << 16 |
drivers/infiniband/hw/mlx5/wr.c
620
domain->sig.dif.app_tag);
drivers/infiniband/hw/mlx5/wr.c
621
psv_seg->ref_tag = cpu_to_be32(domain->sig.dif.ref_tag);
drivers/infiniband/hw/mlx5/wr.c
625
domain->sig_type);
drivers/infiniband/hw/mlx5/wr.c
816
struct ib_sig_domain *domain, u32 psv_index,
drivers/infiniband/hw/mlx5/wr.c
831
err = set_psv_wr(domain, psv_index, seg, size);
drivers/infiniband/hw/usnic/usnic_uiom.c
201
iommu_unmap(pd->domain, va, PAGE_SIZE);
drivers/infiniband/hw/usnic/usnic_uiom.c
278
err = iommu_map(pd->domain, va_start, pa_start,
drivers/infiniband/hw/usnic/usnic_uiom.c
295
err = iommu_map(pd->domain, va_start, pa_start,
drivers/infiniband/hw/usnic/usnic_uiom.c
439
void *domain;
drivers/infiniband/hw/usnic/usnic_uiom.c
445
pd->domain = domain = iommu_paging_domain_alloc(dev);
drivers/infiniband/hw/usnic/usnic_uiom.c
446
if (IS_ERR(domain)) {
drivers/infiniband/hw/usnic/usnic_uiom.c
449
return ERR_CAST(domain);
drivers/infiniband/hw/usnic/usnic_uiom.c
452
iommu_set_fault_handler(pd->domain, usnic_uiom_dma_fault, NULL);
drivers/infiniband/hw/usnic/usnic_uiom.c
462
iommu_domain_free(pd->domain);
drivers/infiniband/hw/usnic/usnic_uiom.c
476
err = iommu_attach_device(pd->domain, dev);
drivers/infiniband/hw/usnic/usnic_uiom.c
495
iommu_detach_device(pd->domain, dev);
drivers/infiniband/hw/usnic/usnic_uiom.c
525
return iommu_detach_device(pd->domain, dev);
drivers/infiniband/hw/usnic/usnic_uiom.c
54
static int usnic_uiom_dma_fault(struct iommu_domain *domain,
drivers/infiniband/hw/usnic/usnic_uiom.c
61
domain, iova, flags);
drivers/infiniband/hw/usnic/usnic_uiom.h
58
struct iommu_domain *domain;
drivers/infiniband/ulp/iser/iser_memory.c
177
struct ib_sig_domain *domain)
drivers/infiniband/ulp/iser/iser_memory.c
179
domain->sig_type = IB_SIG_TYPE_T10_DIF;
drivers/infiniband/ulp/iser/iser_memory.c
180
domain->sig.dif.pi_interval = scsi_prot_interval(sc);
drivers/infiniband/ulp/iser/iser_memory.c
181
domain->sig.dif.ref_tag = t10_pi_ref_tag(scsi_cmd_to_rq(sc));
drivers/infiniband/ulp/iser/iser_memory.c
186
domain->sig.dif.apptag_check_mask = 0xffff;
drivers/infiniband/ulp/iser/iser_memory.c
187
domain->sig.dif.app_escape = true;
drivers/infiniband/ulp/iser/iser_memory.c
188
domain->sig.dif.ref_escape = true;
drivers/infiniband/ulp/iser/iser_memory.c
190
domain->sig.dif.ref_remap = true;
drivers/infiniband/ulp/isert/ib_isert.c
1978
isert_set_dif_domain(struct se_cmd *se_cmd, struct ib_sig_domain *domain)
drivers/infiniband/ulp/isert/ib_isert.c
1980
domain->sig_type = IB_SIG_TYPE_T10_DIF;
drivers/infiniband/ulp/isert/ib_isert.c
1981
domain->sig.dif.bg_type = IB_T10DIF_CRC;
drivers/infiniband/ulp/isert/ib_isert.c
1982
domain->sig.dif.pi_interval = se_cmd->se_dev->dev_attrib.block_size;
drivers/infiniband/ulp/isert/ib_isert.c
1983
domain->sig.dif.ref_tag = se_cmd->reftag_seed;
drivers/infiniband/ulp/isert/ib_isert.c
1989
domain->sig.dif.apptag_check_mask = 0xffff;
drivers/infiniband/ulp/isert/ib_isert.c
1990
domain->sig.dif.app_escape = true;
drivers/infiniband/ulp/isert/ib_isert.c
1991
domain->sig.dif.ref_escape = true;
drivers/infiniband/ulp/isert/ib_isert.c
1994
domain->sig.dif.ref_remap = true;
drivers/input/keyboard/adp5588-keys.c
497
irq = irq_find_mapping(kpad->gc.irq.domain, hwirq);
drivers/iommu/amd/amd_iommu.h
169
return container_of(dom, struct protection_domain, domain);
drivers/iommu/amd/amd_iommu.h
187
void amd_iommu_domain_set_pgtable(struct protection_domain *domain,
drivers/iommu/amd/amd_iommu.h
193
struct protection_domain *domain, u16 domid,
drivers/iommu/amd/amd_iommu.h
54
int iommu_sva_set_dev_pasid(struct iommu_domain *domain,
drivers/iommu/amd/amd_iommu.h
58
struct iommu_domain *domain);
drivers/iommu/amd/amd_iommu.h
90
void amd_iommu_domain_flush_pages(struct protection_domain *domain,
drivers/iommu/amd/amd_iommu_types.h
529
struct iommu_domain domain; /* generic domain handle used by iommu core code */
drivers/iommu/amd/amd_iommu_types.h
542
struct iommu_domain domain;
drivers/iommu/amd/amd_iommu_types.h
563
PT_IOMMU_CHECK_DOMAIN(struct protection_domain, iommu, domain);
drivers/iommu/amd/amd_iommu_types.h
564
PT_IOMMU_CHECK_DOMAIN(struct protection_domain, amdv1.iommu, domain);
drivers/iommu/amd/amd_iommu_types.h
565
PT_IOMMU_CHECK_DOMAIN(struct protection_domain, amdv2.iommu, domain);
drivers/iommu/amd/amd_iommu_types.h
819
struct protection_domain *domain; /* Domain the device is bound to */
drivers/iommu/amd/init.c
2390
static int intcapxt_irqdomain_activate(struct irq_domain *domain,
drivers/iommu/amd/init.c
2396
static void intcapxt_irqdomain_deactivate(struct irq_domain *domain,
drivers/iommu/amd/init.c
2402
static int intcapxt_irqdomain_alloc(struct irq_domain *domain, unsigned int virq,
drivers/iommu/amd/init.c
2411
ret = irq_domain_alloc_irqs_parent(domain, virq, nr_irqs, arg);
drivers/iommu/amd/init.c
2416
struct irq_data *irqd = irq_domain_get_irq_data(domain, i);
drivers/iommu/amd/init.c
2427
static void intcapxt_irqdomain_free(struct irq_domain *domain, unsigned int virq,
drivers/iommu/amd/init.c
2430
irq_domain_free_irqs_top(domain, virq, nr_irqs);
drivers/iommu/amd/init.c
2519
struct irq_domain *domain;
drivers/iommu/amd/init.c
2524
domain = iommu_get_irqdomain();
drivers/iommu/amd/init.c
2525
if (!domain)
drivers/iommu/amd/init.c
2533
irq = irq_domain_alloc_irqs(domain, 1, node, &info);
drivers/iommu/amd/init.c
2535
irq_domain_remove(domain);
drivers/iommu/amd/init.c
2543
irq_domain_remove(domain);
drivers/iommu/amd/iommu.c
1473
static void domain_flush_complete(struct protection_domain *domain)
drivers/iommu/amd/iommu.c
1478
lockdep_assert_held(&domain->lock);
drivers/iommu/amd/iommu.c
1484
xa_for_each(&domain->iommu_array, i, pdom_iommu_info)
drivers/iommu/amd/iommu.c
1739
static void __domain_flush_pages(struct protection_domain *domain,
drivers/iommu/amd/iommu.c
1747
lockdep_assert_held(&domain->lock);
drivers/iommu/amd/iommu.c
1749
if (pdom_is_v2_pgtbl_mode(domain)) {
drivers/iommu/amd/iommu.c
1751
ret = domain_flush_pages_v2(domain, address, size);
drivers/iommu/amd/iommu.c
1753
ret = domain_flush_pages_v1(domain, address, size);
drivers/iommu/amd/iommu.c
1756
list_for_each_entry(dev_data, &domain->dev_list, list) {
drivers/iommu/amd/iommu.c
1767
void amd_iommu_domain_flush_pages(struct protection_domain *domain,
drivers/iommu/amd/iommu.c
1770
lockdep_assert_held(&domain->lock);
drivers/iommu/amd/iommu.c
1773
__domain_flush_pages(domain, address, size);
drivers/iommu/amd/iommu.c
1776
domain_flush_complete(domain);
drivers/iommu/amd/iommu.c
1810
__domain_flush_pages(domain, address, flush_size);
drivers/iommu/amd/iommu.c
1816
domain_flush_complete(domain);
drivers/iommu/amd/iommu.c
1820
static void amd_iommu_domain_flush_all(struct protection_domain *domain)
drivers/iommu/amd/iommu.c
1822
amd_iommu_domain_flush_pages(domain, 0,
drivers/iommu/amd/iommu.c
2084
(pdom_is_v2_pgtbl_mode(dev_data->domain) ? DTE_FLAG_GIOV : 0) |
drivers/iommu/amd/iommu.c
2103
struct protection_domain *domain, u16 domid,
drivers/iommu/amd/iommu.c
2112
(domain->dirty_tracking ? DTE_FLAG_HAD : 0) |
drivers/iommu/amd/iommu.c
2121
struct protection_domain *domain, u16 domid,
drivers/iommu/amd/iommu.c
2138
pt_iommu_amdv1_hw_info(&domain->amdv1, &pt_info);
drivers/iommu/amd/iommu.c
2141
amd_iommu_set_dte_v1(dev_data, domain, domid, &pt_info, new);
drivers/iommu/amd/iommu.c
2145
struct protection_domain *domain,
drivers/iommu/amd/iommu.c
2150
new->data[1] |= FIELD_PREP(DTE_DOMID_MASK, domain->id) |
drivers/iommu/amd/iommu.c
2160
struct protection_domain *domain = dev_data->domain;
drivers/iommu/amd/iommu.c
2169
else if (domain->domain.type == IOMMU_DOMAIN_IDENTITY)
drivers/iommu/amd/iommu.c
2170
set_dte_passthrough(dev_data, domain, &new);
drivers/iommu/amd/iommu.c
2171
else if ((domain->domain.type & __IOMMU_DOMAIN_PAGING) &&
drivers/iommu/amd/iommu.c
2172
domain->pd_mode == PD_MODE_V1)
drivers/iommu/amd/iommu.c
2173
set_dte_v1(dev_data, domain, domain->id, top_paddr, top_level, &new);
drivers/iommu/amd/iommu.c
2330
struct protection_domain *domain)
drivers/iommu/amd/iommu.c
2340
if (dev_data->domain != NULL) {
drivers/iommu/amd/iommu.c
2346
ret = pdom_attach_iommu(iommu, domain);
drivers/iommu/amd/iommu.c
2351
if (pdom_is_sva_capable(domain)) {
drivers/iommu/amd/iommu.c
2352
ret = init_gcr3_table(dev_data, domain);
drivers/iommu/amd/iommu.c
2354
pdom_detach_iommu(iommu, domain);
drivers/iommu/amd/iommu.c
2360
if (pdev && pdom_is_sva_capable(domain)) {
drivers/iommu/amd/iommu.c
2375
dev_data->domain = domain;
drivers/iommu/amd/iommu.c
2376
spin_lock_irqsave(&domain->lock, flags);
drivers/iommu/amd/iommu.c
2377
list_add(&dev_data->list, &domain->dev_list);
drivers/iommu/amd/iommu.c
2378
spin_unlock_irqrestore(&domain->lock, flags);
drivers/iommu/amd/iommu.c
2396
struct protection_domain *domain = dev_data->domain;
drivers/iommu/amd/iommu.c
2407
if (WARN_ON(!dev_data->domain))
drivers/iommu/amd/iommu.c
2423
spin_lock_irqsave(&domain->lock, flags);
drivers/iommu/amd/iommu.c
2424
amd_iommu_domain_flush_all(domain);
drivers/iommu/amd/iommu.c
2426
spin_unlock_irqrestore(&domain->lock, flags);
drivers/iommu/amd/iommu.c
2429
if (pdom_is_sva_capable(domain))
drivers/iommu/amd/iommu.c
2430
destroy_gcr3_table(dev_data, domain);
drivers/iommu/amd/iommu.c
2433
dev_data->domain = NULL;
drivers/iommu/amd/iommu.c
2436
pdom_detach_iommu(iommu, domain);
drivers/iommu/amd/iommu.c
244
return (pdom->domain.type == IOMMU_DOMAIN_IDENTITY);
drivers/iommu/amd/iommu.c
2510
WARN_ON(dev_data->domain);
drivers/iommu/amd/iommu.c
2536
static void protection_domain_init(struct protection_domain *domain)
drivers/iommu/amd/iommu.c
2538
spin_lock_init(&domain->lock);
drivers/iommu/amd/iommu.c
2539
INIT_LIST_HEAD(&domain->dev_list);
drivers/iommu/amd/iommu.c
2540
INIT_LIST_HEAD(&domain->dev_data_list);
drivers/iommu/amd/iommu.c
2541
INIT_LIST_HEAD(&domain->viommu_list);
drivers/iommu/amd/iommu.c
2542
xa_init(&domain->iommu_array);
drivers/iommu/amd/iommu.c
2547
struct protection_domain *domain;
drivers/iommu/amd/iommu.c
2550
domain = kzalloc_obj(*domain);
drivers/iommu/amd/iommu.c
2551
if (!domain)
drivers/iommu/amd/iommu.c
2556
kfree(domain);
drivers/iommu/amd/iommu.c
2559
domain->id = domid;
drivers/iommu/amd/iommu.c
2561
protection_domain_init(domain);
drivers/iommu/amd/iommu.c
2563
return domain;
drivers/iommu/amd/iommu.c
2617
struct protection_domain *domain = to_pdomain(dom);
drivers/iommu/amd/iommu.c
2623
spin_lock_irqsave(&domain->lock, flags);
drivers/iommu/amd/iommu.c
2624
amd_iommu_domain_flush_pages(domain, iova, size);
drivers/iommu/amd/iommu.c
2625
spin_unlock_irqrestore(&domain->lock, flags);
drivers/iommu/amd/iommu.c
2629
static void amd_iommu_flush_iotlb_all(struct iommu_domain *domain)
drivers/iommu/amd/iommu.c
2631
struct protection_domain *dom = to_pdomain(domain);
drivers/iommu/amd/iommu.c
2639
static void amd_iommu_iotlb_sync(struct iommu_domain *domain,
drivers/iommu/amd/iommu.c
2642
struct protection_domain *dom = to_pdomain(domain);
drivers/iommu/amd/iommu.c
2676
struct protection_domain *domain;
drivers/iommu/amd/iommu.c
2682
domain = protection_domain_alloc();
drivers/iommu/amd/iommu.c
2683
if (!domain)
drivers/iommu/amd/iommu.c
2686
domain->pd_mode = PD_MODE_V1;
drivers/iommu/amd/iommu.c
2687
domain->iommu.driver_ops = &amd_hw_driver_ops_v1;
drivers/iommu/amd/iommu.c
2688
domain->iommu.nid = dev_to_node(dev);
drivers/iommu/amd/iommu.c
2690
domain->domain.dirty_ops = &amdv1_dirty_ops;
drivers/iommu/amd/iommu.c
2719
domain->domain.ops = &amdv1_ops;
drivers/iommu/amd/iommu.c
2721
ret = pt_iommu_amdv1_init(&domain->amdv1, &cfg, GFP_KERNEL);
drivers/iommu/amd/iommu.c
2723
amd_iommu_domain_free(&domain->domain);
drivers/iommu/amd/iommu.c
2731
domain->domain.pgsize_bitmap &= amd_iommu_pgsize_bitmap;
drivers/iommu/amd/iommu.c
2732
return &domain->domain;
drivers/iommu/amd/iommu.c
2758
struct protection_domain *domain;
drivers/iommu/amd/iommu.c
2764
domain = protection_domain_alloc();
drivers/iommu/amd/iommu.c
2765
if (!domain)
drivers/iommu/amd/iommu.c
2768
domain->pd_mode = PD_MODE_V2;
drivers/iommu/amd/iommu.c
2769
domain->iommu.nid = dev_to_node(dev);
drivers/iommu/amd/iommu.c
2793
domain->domain.ops = &amdv2_ops;
drivers/iommu/amd/iommu.c
2795
ret = pt_iommu_x86_64_init(&domain->amdv2, &cfg, GFP_KERNEL);
drivers/iommu/amd/iommu.c
2797
amd_iommu_domain_free(&domain->domain);
drivers/iommu/amd/iommu.c
2800
return &domain->domain;
drivers/iommu/amd/iommu.c
2869
struct protection_domain *domain = to_pdomain(dom);
drivers/iommu/amd/iommu.c
2871
WARN_ON(!list_empty(&domain->dev_list));
drivers/iommu/amd/iommu.c
2872
pt_iommu_deinit(&domain->iommu);
drivers/iommu/amd/iommu.c
2873
amd_iommu_pdom_id_free(domain->id);
drivers/iommu/amd/iommu.c
2874
kfree(domain);
drivers/iommu/amd/iommu.c
2877
static int blocked_domain_attach_device(struct iommu_domain *domain,
drivers/iommu/amd/iommu.c
2883
if (dev_data->domain)
drivers/iommu/amd/iommu.c
2894
static int blocked_domain_set_dev_pasid(struct iommu_domain *domain,
drivers/iommu/amd/iommu.c
2931
struct iommu_domain *domain = &identity_domain.domain;
drivers/iommu/amd/iommu.c
2933
domain->type = IOMMU_DOMAIN_IDENTITY;
drivers/iommu/amd/iommu.c
2934
domain->ops = &identity_domain_ops;
drivers/iommu/amd/iommu.c
2935
domain->owner = &amd_iommu_ops;
drivers/iommu/amd/iommu.c
2946
struct protection_domain *domain = to_pdomain(dom);
drivers/iommu/amd/iommu.c
2954
if (dev_data->domain == domain)
drivers/iommu/amd/iommu.c
2966
if (dev_data->domain)
drivers/iommu/amd/iommu.c
2969
ret = attach_device(dev, domain);
drivers/iommu/amd/iommu.c
3008
static int amd_iommu_set_dirty_tracking(struct iommu_domain *domain,
drivers/iommu/amd/iommu.c
3011
struct protection_domain *pdomain = to_pdomain(domain);
drivers/iommu/amd/iommu.c
3146
static bool amd_iommu_enforce_cache_coherency(struct iommu_domain *domain)
drivers/iommu/amd/iommu.c
3157
.identity_domain = &identity_domain.domain,
drivers/iommu/amd/iommu.c
3734
static int irq_remapping_alloc(struct irq_domain *domain, unsigned int virq,
drivers/iommu/amd/iommu.c
3765
ret = irq_domain_alloc_irqs_parent(domain, virq, nr_irqs, arg);
drivers/iommu/amd/iommu.c
3807
irq_data = irq_domain_get_irq_data(domain, virq + i);
drivers/iommu/amd/iommu.c
3839
irq_data = irq_domain_get_irq_data(domain, virq + i);
drivers/iommu/amd/iommu.c
3846
irq_domain_free_irqs_common(domain, virq, nr_irqs);
drivers/iommu/amd/iommu.c
3850
static void irq_remapping_free(struct irq_domain *domain, unsigned int virq,
drivers/iommu/amd/iommu.c
3859
irq_data = irq_domain_get_irq_data(domain, virq + i);
drivers/iommu/amd/iommu.c
3868
irq_domain_free_irqs_common(domain, virq, nr_irqs);
drivers/iommu/amd/iommu.c
3876
static int irq_remapping_activate(struct irq_domain *domain,
drivers/iommu/amd/iommu.c
3893
static void irq_remapping_deactivate(struct irq_domain *domain,
drivers/iommu/amd/iommu.c
87
static bool amd_iommu_enforce_cache_coherency(struct iommu_domain *domain);
drivers/iommu/amd/iommu.c
872
if (dev_data->domain == NULL) {
drivers/iommu/amd/iommu.c
88
static int amd_iommu_set_dirty_tracking(struct iommu_domain *domain,
drivers/iommu/amd/iommu.c
880
if (!report_iommu_fault(&dev_data->domain->domain,
drivers/iommu/amd/nested.c
120
ndom->domain.ops = &nested_domain_ops;
drivers/iommu/amd/nested.c
121
ndom->domain.type = IOMMU_DOMAIN_NESTED;
drivers/iommu/amd/nested.c
156
return &ndom->domain;
drivers/iommu/amd/nested.c
177
return &ndom->domain;
drivers/iommu/amd/nested.c
18
return container_of(dom, struct nested_domain, domain);
drivers/iommu/amd/pasid.c
102
int iommu_sva_set_dev_pasid(struct iommu_domain *domain,
drivers/iommu/amd/pasid.c
107
struct protection_domain *sva_pdom = to_pdomain(domain);
drivers/iommu/amd/pasid.c
135
iommu_virt_to_phys(domain->mm->pgd));
drivers/iommu/amd/pasid.c
149
struct iommu_domain *domain)
drivers/iommu/amd/pasid.c
157
sva_pdom = to_pdomain(domain);
drivers/iommu/amd/pasid.c
167
static void iommu_sva_domain_free(struct iommu_domain *domain)
drivers/iommu/amd/pasid.c
169
struct protection_domain *sva_pdom = to_pdomain(domain);
drivers/iommu/amd/pasid.c
172
mmu_notifier_unregister(&sva_pdom->mn, domain->mm);
drivers/iommu/amd/pasid.c
174
amd_iommu_domain_free(domain);
drivers/iommu/amd/pasid.c
192
pdom->domain.ops = &amd_sva_domain_ops;
drivers/iommu/amd/pasid.c
194
pdom->domain.type = IOMMU_DOMAIN_SVA;
drivers/iommu/amd/pasid.c
198
amd_iommu_domain_free(&pdom->domain);
drivers/iommu/amd/pasid.c
202
return &pdom->domain;
drivers/iommu/apple-dart.c
273
struct iommu_domain domain;
drivers/iommu/apple-dart.c
309
return container_of(dom, struct apple_dart_domain, domain);
drivers/iommu/apple-dart.c
497
static void apple_dart_domain_flush_tlb(struct apple_dart_domain *domain)
drivers/iommu/apple-dart.c
503
for_each_stream_map(i, domain, domain_stream_map) {
drivers/iommu/apple-dart.c
513
static void apple_dart_flush_iotlb_all(struct iommu_domain *domain)
drivers/iommu/apple-dart.c
515
apple_dart_domain_flush_tlb(to_dart_domain(domain));
drivers/iommu/apple-dart.c
518
static void apple_dart_iotlb_sync(struct iommu_domain *domain,
drivers/iommu/apple-dart.c
521
apple_dart_domain_flush_tlb(to_dart_domain(domain));
drivers/iommu/apple-dart.c
524
static int apple_dart_iotlb_sync_map(struct iommu_domain *domain,
drivers/iommu/apple-dart.c
527
apple_dart_domain_flush_tlb(to_dart_domain(domain));
drivers/iommu/apple-dart.c
531
static phys_addr_t apple_dart_iova_to_phys(struct iommu_domain *domain,
drivers/iommu/apple-dart.c
534
struct apple_dart_domain *dart_domain = to_dart_domain(domain);
drivers/iommu/apple-dart.c
543
static int apple_dart_map_pages(struct iommu_domain *domain, unsigned long iova,
drivers/iommu/apple-dart.c
548
struct apple_dart_domain *dart_domain = to_dart_domain(domain);
drivers/iommu/apple-dart.c
558
static size_t apple_dart_unmap_pages(struct iommu_domain *domain,
drivers/iommu/apple-dart.c
563
struct apple_dart_domain *dart_domain = to_dart_domain(domain);
drivers/iommu/apple-dart.c
570
apple_dart_setup_translation(struct apple_dart_domain *domain,
drivers/iommu/apple-dart.c
575
&io_pgtable_ops_to_pgtable(domain->pgtbl_ops)->cfg;
drivers/iommu/apple-dart.c
620
&dart_domain->domain);
drivers/iommu/apple-dart.c
626
dart_domain->domain.pgsize_bitmap = pgtbl_cfg.pgsize_bitmap;
drivers/iommu/apple-dart.c
627
dart_domain->domain.geometry.aperture_start = 0;
drivers/iommu/apple-dart.c
628
dart_domain->domain.geometry.aperture_end =
drivers/iommu/apple-dart.c
630
dart_domain->domain.geometry.force_aperture = true;
drivers/iommu/apple-dart.c
667
static int apple_dart_domain_add_streams(struct apple_dart_domain *domain,
drivers/iommu/apple-dart.c
670
return apple_dart_mod_streams(domain->stream_maps, cfg->stream_maps,
drivers/iommu/apple-dart.c
674
static int apple_dart_attach_dev_paging(struct iommu_domain *domain,
drivers/iommu/apple-dart.c
681
struct apple_dart_domain *dart_domain = to_dart_domain(domain);
drivers/iommu/apple-dart.c
696
static int apple_dart_attach_dev_identity(struct iommu_domain *domain,
drivers/iommu/apple-dart.c
721
static int apple_dart_attach_dev_blocked(struct iommu_domain *domain,
drivers/iommu/apple-dart.c
787
return &dart_domain->domain;
drivers/iommu/apple-dart.c
790
static void apple_dart_domain_free(struct iommu_domain *domain)
drivers/iommu/apple-dart.c
792
struct apple_dart_domain *dart_domain = to_dart_domain(domain);
drivers/iommu/arm/arm-smmu-v3/arm-smmu-v3-iommufd.c
151
static int arm_smmu_attach_dev_nested(struct iommu_domain *domain,
drivers/iommu/arm/arm-smmu-v3/arm-smmu-v3-iommufd.c
156
to_smmu_nested_domain(domain);
drivers/iommu/arm/arm-smmu-v3/arm-smmu-v3-iommufd.c
183
ret = arm_smmu_attach_prepare(&state, domain);
drivers/iommu/arm/arm-smmu-v3/arm-smmu-v3-iommufd.c
197
static void arm_smmu_domain_nested_free(struct iommu_domain *domain)
drivers/iommu/arm/arm-smmu-v3/arm-smmu-v3-iommufd.c
199
kfree(to_smmu_nested_domain(domain));
drivers/iommu/arm/arm-smmu-v3/arm-smmu-v3-iommufd.c
268
nested_domain->domain.type = IOMMU_DOMAIN_NESTED;
drivers/iommu/arm/arm-smmu-v3/arm-smmu-v3-iommufd.c
269
nested_domain->domain.ops = &arm_smmu_nested_ops;
drivers/iommu/arm/arm-smmu-v3/arm-smmu-v3-iommufd.c
275
return &nested_domain->domain;
drivers/iommu/arm/arm-smmu-v3/arm-smmu-v3-sva.c
270
static int arm_smmu_sva_set_dev_pasid(struct iommu_domain *domain,
drivers/iommu/arm/arm-smmu-v3/arm-smmu-v3-sva.c
274
struct arm_smmu_domain *smmu_domain = to_smmu_domain(domain);
drivers/iommu/arm/arm-smmu-v3/arm-smmu-v3-sva.c
283
if (!mmget_not_zero(domain->mm))
drivers/iommu/arm/arm-smmu-v3/arm-smmu-v3-sva.c
290
arm_smmu_make_sva_cd(&target, master, domain->mm, smmu_domain->cd.asid);
drivers/iommu/arm/arm-smmu-v3/arm-smmu-v3-sva.c
293
mmput(domain->mm);
drivers/iommu/arm/arm-smmu-v3/arm-smmu-v3-sva.c
297
static void arm_smmu_sva_domain_free(struct iommu_domain *domain)
drivers/iommu/arm/arm-smmu-v3/arm-smmu-v3-sva.c
299
struct arm_smmu_domain *smmu_domain = to_smmu_domain(domain);
drivers/iommu/arm/arm-smmu-v3/arm-smmu-v3-sva.c
341
smmu_domain->domain.type = IOMMU_DOMAIN_SVA;
drivers/iommu/arm/arm-smmu-v3/arm-smmu-v3-sva.c
342
smmu_domain->domain.ops = &arm_smmu_sva_domain_ops;
drivers/iommu/arm/arm-smmu-v3/arm-smmu-v3-sva.c
348
smmu_domain->domain.pgsize_bitmap = PAGE_SIZE;
drivers/iommu/arm/arm-smmu-v3/arm-smmu-v3-sva.c
362
return &smmu_domain->domain;
drivers/iommu/arm/arm-smmu-v3/arm-smmu-v3.c
2344
tg = __ffs(smmu_domain->domain.pgsize_bitmap);
drivers/iommu/arm/arm-smmu-v3/arm-smmu-v3.c
2457
struct iommu_domain *domain = &smmu_domain->domain;
drivers/iommu/arm/arm-smmu-v3/arm-smmu-v3.c
2459
iommu_iotlb_gather_add_page(domain, gather, iova, granule);
drivers/iommu/arm/arm-smmu-v3/arm-smmu-v3.c
2502
static bool arm_smmu_enforce_cache_coherency(struct iommu_domain *domain)
drivers/iommu/arm/arm-smmu-v3/arm-smmu-v3.c
2504
struct arm_smmu_domain *smmu_domain = to_smmu_domain(domain);
drivers/iommu/arm/arm-smmu-v3/arm-smmu-v3.c
2536
static void arm_smmu_domain_free_paging(struct iommu_domain *domain)
drivers/iommu/arm/arm-smmu-v3/arm-smmu-v3.c
2538
struct arm_smmu_domain *smmu_domain = to_smmu_domain(domain);
drivers/iommu/arm/arm-smmu-v3/arm-smmu-v3.c
2640
smmu_domain->domain.pgsize_bitmap = pgtbl_cfg.pgsize_bitmap;
drivers/iommu/arm/arm-smmu-v3/arm-smmu-v3.c
2641
smmu_domain->domain.geometry.aperture_end = (1UL << pgtbl_cfg.ias) - 1;
drivers/iommu/arm/arm-smmu-v3/arm-smmu-v3.c
2642
smmu_domain->domain.geometry.force_aperture = true;
drivers/iommu/arm/arm-smmu-v3/arm-smmu-v3.c
2644
smmu_domain->domain.dirty_ops = &arm_smmu_dirty_ops;
drivers/iommu/arm/arm-smmu-v3/arm-smmu-v3.c
2783
struct iommu_domain *domain,
drivers/iommu/arm/arm-smmu-v3/arm-smmu-v3.c
2794
master_domain->domain == domain &&
drivers/iommu/arm/arm-smmu-v3/arm-smmu-v3.c
2808
to_smmu_domain_devices(struct iommu_domain *domain)
drivers/iommu/arm/arm-smmu-v3/arm-smmu-v3.c
2811
if (!domain)
drivers/iommu/arm/arm-smmu-v3/arm-smmu-v3.c
2813
if ((domain->type & __IOMMU_DOMAIN_PAGING) ||
drivers/iommu/arm/arm-smmu-v3/arm-smmu-v3.c
2814
domain->type == IOMMU_DOMAIN_SVA)
drivers/iommu/arm/arm-smmu-v3/arm-smmu-v3.c
2815
return to_smmu_domain(domain);
drivers/iommu/arm/arm-smmu-v3/arm-smmu-v3.c
2816
if (domain->type == IOMMU_DOMAIN_NESTED)
drivers/iommu/arm/arm-smmu-v3/arm-smmu-v3.c
2817
return to_smmu_nested_domain(domain)->vsmmu->s2_parent;
drivers/iommu/arm/arm-smmu-v3/arm-smmu-v3.c
2874
struct iommu_domain *domain,
drivers/iommu/arm/arm-smmu-v3/arm-smmu-v3.c
2877
struct arm_smmu_domain *smmu_domain = to_smmu_domain_devices(domain);
drivers/iommu/arm/arm-smmu-v3/arm-smmu-v3.c
2885
if (domain->type == IOMMU_DOMAIN_NESTED)
drivers/iommu/arm/arm-smmu-v3/arm-smmu-v3.c
2886
nested_ats_flush = to_smmu_nested_domain(domain)->enable_ats;
drivers/iommu/arm/arm-smmu-v3/arm-smmu-v3.c
2889
master_domain = arm_smmu_find_master_domain(smmu_domain, domain, master,
drivers/iommu/arm/arm-smmu-v3/arm-smmu-v3.c
2972
master_domain->domain = new_domain;
drivers/iommu/arm/arm-smmu-v3/arm-smmu-v3.c
3065
static int arm_smmu_attach_dev(struct iommu_domain *domain, struct device *dev,
drivers/iommu/arm/arm-smmu-v3/arm-smmu-v3.c
3072
struct arm_smmu_domain *smmu_domain = to_smmu_domain(domain);
drivers/iommu/arm/arm-smmu-v3/arm-smmu-v3.c
3104
ret = arm_smmu_attach_prepare(&state, domain);
drivers/iommu/arm/arm-smmu-v3/arm-smmu-v3.c
3135
static int arm_smmu_s1_set_dev_pasid(struct iommu_domain *domain,
drivers/iommu/arm/arm-smmu-v3/arm-smmu-v3.c
3139
struct arm_smmu_domain *smmu_domain = to_smmu_domain(domain);
drivers/iommu/arm/arm-smmu-v3/arm-smmu-v3.c
3155
return arm_smmu_set_pasid(master, to_smmu_domain(domain), id,
drivers/iommu/arm/arm-smmu-v3/arm-smmu-v3.c
3213
ret = arm_smmu_attach_prepare(&state, &smmu_domain->domain);
drivers/iommu/arm/arm-smmu-v3/arm-smmu-v3.c
3246
arm_smmu_remove_master_domain(master, &smmu_domain->domain, pasid);
drivers/iommu/arm/arm-smmu-v3/arm-smmu-v3.c
3265
static void arm_smmu_attach_dev_ste(struct iommu_domain *domain,
drivers/iommu/arm/arm-smmu-v3/arm-smmu-v3.c
3297
arm_smmu_attach_prepare(&state, domain);
drivers/iommu/arm/arm-smmu-v3/arm-smmu-v3.c
3300
arm_smmu_attach_prepare(&state, domain);
drivers/iommu/arm/arm-smmu-v3/arm-smmu-v3.c
3314
static int arm_smmu_attach_dev_identity(struct iommu_domain *domain,
drivers/iommu/arm/arm-smmu-v3/arm-smmu-v3.c
3323
arm_smmu_attach_dev_ste(domain, old_domain, dev, &ste,
drivers/iommu/arm/arm-smmu-v3/arm-smmu-v3.c
3337
static int arm_smmu_attach_dev_blocked(struct iommu_domain *domain,
drivers/iommu/arm/arm-smmu-v3/arm-smmu-v3.c
3346
arm_smmu_attach_dev_ste(domain, old_domain, dev, &ste,
drivers/iommu/arm/arm-smmu-v3/arm-smmu-v3.c
3412
smmu_domain->domain.type = IOMMU_DOMAIN_UNMANAGED;
drivers/iommu/arm/arm-smmu-v3/arm-smmu-v3.c
3413
smmu_domain->domain.ops = arm_smmu_ops.default_domain_ops;
drivers/iommu/arm/arm-smmu-v3/arm-smmu-v3.c
3417
return &smmu_domain->domain;
drivers/iommu/arm/arm-smmu-v3/arm-smmu-v3.c
3424
static int arm_smmu_map_pages(struct iommu_domain *domain, unsigned long iova,
drivers/iommu/arm/arm-smmu-v3/arm-smmu-v3.c
3428
struct io_pgtable_ops *ops = to_smmu_domain(domain)->pgtbl_ops;
drivers/iommu/arm/arm-smmu-v3/arm-smmu-v3.c
3436
static size_t arm_smmu_unmap_pages(struct iommu_domain *domain, unsigned long iova,
drivers/iommu/arm/arm-smmu-v3/arm-smmu-v3.c
3440
struct arm_smmu_domain *smmu_domain = to_smmu_domain(domain);
drivers/iommu/arm/arm-smmu-v3/arm-smmu-v3.c
3449
static void arm_smmu_flush_iotlb_all(struct iommu_domain *domain)
drivers/iommu/arm/arm-smmu-v3/arm-smmu-v3.c
3451
struct arm_smmu_domain *smmu_domain = to_smmu_domain(domain);
drivers/iommu/arm/arm-smmu-v3/arm-smmu-v3.c
3457
static void arm_smmu_iotlb_sync(struct iommu_domain *domain,
drivers/iommu/arm/arm-smmu-v3/arm-smmu-v3.c
3460
struct arm_smmu_domain *smmu_domain = to_smmu_domain(domain);
drivers/iommu/arm/arm-smmu-v3/arm-smmu-v3.c
3471
arm_smmu_iova_to_phys(struct iommu_domain *domain, dma_addr_t iova)
drivers/iommu/arm/arm-smmu-v3/arm-smmu-v3.c
3473
struct io_pgtable_ops *ops = to_smmu_domain(domain)->pgtbl_ops;
drivers/iommu/arm/arm-smmu-v3/arm-smmu-v3.c
3658
static int arm_smmu_read_and_clear_dirty(struct iommu_domain *domain,
drivers/iommu/arm/arm-smmu-v3/arm-smmu-v3.c
3663
struct arm_smmu_domain *smmu_domain = to_smmu_domain(domain);
drivers/iommu/arm/arm-smmu-v3/arm-smmu-v3.c
3669
static int arm_smmu_set_dirty_tracking(struct iommu_domain *domain,
drivers/iommu/arm/arm-smmu-v3/arm-smmu-v3.c
4033
if (!dev->msi.domain) {
drivers/iommu/arm/arm-smmu-v3/arm-smmu-v3.h
873
struct iommu_domain domain;
drivers/iommu/arm/arm-smmu-v3/arm-smmu-v3.h
885
struct iommu_domain domain;
drivers/iommu/arm/arm-smmu-v3/arm-smmu-v3.h
936
struct iommu_domain *domain;
drivers/iommu/arm/arm-smmu-v3/arm-smmu-v3.h
944
return container_of(dom, struct arm_smmu_domain, domain);
drivers/iommu/arm/arm-smmu-v3/arm-smmu-v3.h
950
return container_of(dom, struct arm_smmu_nested_domain, domain);
drivers/iommu/arm/arm-smmu/arm-smmu-qcom-debug.c
402
ret = report_iommu_fault(&smmu_domain->domain, NULL, cfi.iova,
drivers/iommu/arm/arm-smmu/arm-smmu-qcom-debug.c
420
tmp = report_iommu_fault(&smmu_domain->domain, NULL, cfi.iova,
drivers/iommu/arm/arm-smmu/arm-smmu.c
108
return container_of(dom, struct arm_smmu_domain, domain);
drivers/iommu/arm/arm-smmu/arm-smmu.c
1168
static int arm_smmu_attach_dev(struct iommu_domain *domain, struct device *dev,
drivers/iommu/arm/arm-smmu/arm-smmu.c
1171
struct arm_smmu_domain *smmu_domain = to_smmu_domain(domain);
drivers/iommu/arm/arm-smmu/arm-smmu.c
1237
static int arm_smmu_attach_dev_identity(struct iommu_domain *domain,
drivers/iommu/arm/arm-smmu/arm-smmu.c
1253
static int arm_smmu_attach_dev_blocked(struct iommu_domain *domain,
drivers/iommu/arm/arm-smmu/arm-smmu.c
1269
static int arm_smmu_map_pages(struct iommu_domain *domain, unsigned long iova,
drivers/iommu/arm/arm-smmu/arm-smmu.c
1273
struct io_pgtable_ops *ops = to_smmu_domain(domain)->pgtbl_ops;
drivers/iommu/arm/arm-smmu/arm-smmu.c
1274
struct arm_smmu_device *smmu = to_smmu_domain(domain)->smmu;
drivers/iommu/arm/arm-smmu/arm-smmu.c
1287
static size_t arm_smmu_unmap_pages(struct iommu_domain *domain, unsigned long iova,
drivers/iommu/arm/arm-smmu/arm-smmu.c
1291
struct io_pgtable_ops *ops = to_smmu_domain(domain)->pgtbl_ops;
drivers/iommu/arm/arm-smmu/arm-smmu.c
1292
struct arm_smmu_device *smmu = to_smmu_domain(domain)->smmu;
drivers/iommu/arm/arm-smmu/arm-smmu.c
1305
static void arm_smmu_flush_iotlb_all(struct iommu_domain *domain)
drivers/iommu/arm/arm-smmu/arm-smmu.c
1307
struct arm_smmu_domain *smmu_domain = to_smmu_domain(domain);
drivers/iommu/arm/arm-smmu/arm-smmu.c
1317
static void arm_smmu_iotlb_sync(struct iommu_domain *domain,
drivers/iommu/arm/arm-smmu/arm-smmu.c
1320
struct arm_smmu_domain *smmu_domain = to_smmu_domain(domain);
drivers/iommu/arm/arm-smmu/arm-smmu.c
1335
static phys_addr_t arm_smmu_iova_to_phys_hard(struct iommu_domain *domain,
drivers/iommu/arm/arm-smmu/arm-smmu.c
1338
struct arm_smmu_domain *smmu_domain = to_smmu_domain(domain);
drivers/iommu/arm/arm-smmu/arm-smmu.c
1387
static phys_addr_t arm_smmu_iova_to_phys(struct iommu_domain *domain,
drivers/iommu/arm/arm-smmu/arm-smmu.c
1390
struct arm_smmu_domain *smmu_domain = to_smmu_domain(domain);
drivers/iommu/arm/arm-smmu/arm-smmu.c
1398
return arm_smmu_iova_to_phys_hard(domain, iova);
drivers/iommu/arm/arm-smmu/arm-smmu.c
1574
static int arm_smmu_set_pgtable_quirks(struct iommu_domain *domain,
drivers/iommu/arm/arm-smmu/arm-smmu.c
1577
struct arm_smmu_domain *smmu_domain = to_smmu_domain(domain);
drivers/iommu/arm/arm-smmu/arm-smmu.c
470
ret = report_iommu_fault(&smmu_domain->domain, NULL, cfi.iova,
drivers/iommu/arm/arm-smmu/arm-smmu.c
684
struct iommu_domain *domain = &smmu_domain->domain;
drivers/iommu/arm/arm-smmu/arm-smmu.c
832
domain->pgsize_bitmap = pgtbl_cfg.pgsize_bitmap;
drivers/iommu/arm/arm-smmu/arm-smmu.c
835
domain->geometry.aperture_start = ~0UL << ias;
drivers/iommu/arm/arm-smmu/arm-smmu.c
836
domain->geometry.aperture_end = ~0UL;
drivers/iommu/arm/arm-smmu/arm-smmu.c
838
domain->geometry.aperture_end = (1UL << ias) - 1;
drivers/iommu/arm/arm-smmu/arm-smmu.c
841
domain->geometry.force_aperture = true;
drivers/iommu/arm/arm-smmu/arm-smmu.c
936
smmu_domain->domain.pgsize_bitmap = smmu->pgsize_bitmap;
drivers/iommu/arm/arm-smmu/arm-smmu.c
938
return &smmu_domain->domain;
drivers/iommu/arm/arm-smmu/arm-smmu.c
941
static void arm_smmu_domain_free(struct iommu_domain *domain)
drivers/iommu/arm/arm-smmu/arm-smmu.c
943
struct arm_smmu_domain *smmu_domain = to_smmu_domain(domain);
drivers/iommu/arm/arm-smmu/arm-smmu.h
387
struct iommu_domain domain;
drivers/iommu/arm/arm-smmu/qcom_iommu.c
203
if (!report_iommu_fault(ctx->domain, ctx->dev, iova, 0)) {
drivers/iommu/arm/arm-smmu/qcom_iommu.c
216
static int qcom_iommu_init_domain(struct iommu_domain *domain,
drivers/iommu/arm/arm-smmu/qcom_iommu.c
220
struct qcom_iommu_domain *qcom_domain = to_qcom_iommu_domain(domain);
drivers/iommu/arm/arm-smmu/qcom_iommu.c
232
.pgsize_bitmap = domain->pgsize_bitmap,
drivers/iommu/arm/arm-smmu/qcom_iommu.c
249
domain->geometry.aperture_end = (1ULL << pgtbl_cfg.ias) - 1;
drivers/iommu/arm/arm-smmu/qcom_iommu.c
250
domain->geometry.force_aperture = true;
drivers/iommu/arm/arm-smmu/qcom_iommu.c
266
ctx->domain = domain;
drivers/iommu/arm/arm-smmu/qcom_iommu.c
306
ctx->domain = domain;
drivers/iommu/arm/arm-smmu/qcom_iommu.c
338
qcom_domain->domain.pgsize_bitmap = SZ_4K;
drivers/iommu/arm/arm-smmu/qcom_iommu.c
340
return &qcom_domain->domain;
drivers/iommu/arm/arm-smmu/qcom_iommu.c
343
static void qcom_iommu_domain_free(struct iommu_domain *domain)
drivers/iommu/arm/arm-smmu/qcom_iommu.c
345
struct qcom_iommu_domain *qcom_domain = to_qcom_iommu_domain(domain);
drivers/iommu/arm/arm-smmu/qcom_iommu.c
362
static int qcom_iommu_attach_dev(struct iommu_domain *domain,
drivers/iommu/arm/arm-smmu/qcom_iommu.c
366
struct qcom_iommu_domain *qcom_domain = to_qcom_iommu_domain(domain);
drivers/iommu/arm/arm-smmu/qcom_iommu.c
376
ret = qcom_iommu_init_domain(domain, qcom_iommu, dev);
drivers/iommu/arm/arm-smmu/qcom_iommu.c
414
ctx->domain = NULL;
drivers/iommu/arm/arm-smmu/qcom_iommu.c
429
static int qcom_iommu_map(struct iommu_domain *domain, unsigned long iova,
drivers/iommu/arm/arm-smmu/qcom_iommu.c
435
struct qcom_iommu_domain *qcom_domain = to_qcom_iommu_domain(domain);
drivers/iommu/arm/arm-smmu/qcom_iommu.c
447
static size_t qcom_iommu_unmap(struct iommu_domain *domain, unsigned long iova,
drivers/iommu/arm/arm-smmu/qcom_iommu.c
453
struct qcom_iommu_domain *qcom_domain = to_qcom_iommu_domain(domain);
drivers/iommu/arm/arm-smmu/qcom_iommu.c
473
static void qcom_iommu_flush_iotlb_all(struct iommu_domain *domain)
drivers/iommu/arm/arm-smmu/qcom_iommu.c
475
struct qcom_iommu_domain *qcom_domain = to_qcom_iommu_domain(domain);
drivers/iommu/arm/arm-smmu/qcom_iommu.c
486
static void qcom_iommu_iotlb_sync(struct iommu_domain *domain,
drivers/iommu/arm/arm-smmu/qcom_iommu.c
489
qcom_iommu_flush_iotlb_all(domain);
drivers/iommu/arm/arm-smmu/qcom_iommu.c
492
static phys_addr_t qcom_iommu_iova_to_phys(struct iommu_domain *domain,
drivers/iommu/arm/arm-smmu/qcom_iommu.c
497
struct qcom_iommu_domain *qcom_domain = to_qcom_iommu_domain(domain);
drivers/iommu/arm/arm-smmu/qcom_iommu.c
63
struct iommu_domain *domain;
drivers/iommu/arm/arm-smmu/qcom_iommu.c
70
struct iommu_domain domain;
drivers/iommu/arm/arm-smmu/qcom_iommu.c
77
return container_of(dom, struct qcom_iommu_domain, domain);
drivers/iommu/dma-iommu.c
1153
struct iommu_domain *domain = iommu_get_dma_domain(dev);
drivers/iommu/dma-iommu.c
1154
struct iova_domain *iovad = &domain->iova_cookie->iovad;
drivers/iommu/dma-iommu.c
1203
struct iommu_domain *domain = iommu_get_dma_domain(dev);
drivers/iommu/dma-iommu.c
1204
struct iommu_dma_cookie *cookie = domain->iova_cookie;
drivers/iommu/dma-iommu.c
1392
struct iommu_domain *domain = iommu_get_dma_domain(dev);
drivers/iommu/dma-iommu.c
1393
struct iommu_dma_cookie *cookie = domain->iova_cookie;
drivers/iommu/dma-iommu.c
1405
ret = iommu_deferred_attach(dev, domain);
drivers/iommu/dma-iommu.c
1484
iova = iommu_dma_alloc_iova(domain, iova_len, dma_get_mask(dev), dev);
drivers/iommu/dma-iommu.c
1494
ret = iommu_map_sg(domain, iova, sg, nents, prot, GFP_ATOMIC);
drivers/iommu/dma-iommu.c
1501
iommu_dma_free_iova(domain, iova, iova_len, NULL);
drivers/iommu/dma-iommu.c
1729
struct iommu_domain *domain = iommu_get_dma_domain(dev);
drivers/iommu/dma-iommu.c
1731
return (1UL << __ffs(domain->pgsize_bitmap)) - 1;
drivers/iommu/dma-iommu.c
1767
struct iommu_domain *domain;
drivers/iommu/dma-iommu.c
1776
domain = iommu_get_dma_domain(dev);
drivers/iommu/dma-iommu.c
1777
cookie = domain->iova_cookie;
drivers/iommu/dma-iommu.c
1796
addr = iommu_dma_alloc_iova(domain,
drivers/iommu/dma-iommu.c
1822
struct iommu_domain *domain = iommu_get_dma_domain(dev);
drivers/iommu/dma-iommu.c
1823
struct iommu_dma_cookie *cookie = domain->iova_cookie;
drivers/iommu/dma-iommu.c
1828
iommu_dma_free_iova(domain, state->addr - iova_start_pad,
drivers/iommu/dma-iommu.c
1852
struct iommu_domain *domain = iommu_get_dma_domain(dev);
drivers/iommu/dma-iommu.c
1853
struct iova_domain *iovad = &domain->iova_cookie->iovad;
drivers/iommu/dma-iommu.c
1874
struct iommu_domain *domain = iommu_get_dma_domain(dev);
drivers/iommu/dma-iommu.c
1875
struct iommu_dma_cookie *cookie = domain->iova_cookie;
drivers/iommu/dma-iommu.c
1941
struct iommu_domain *domain = iommu_get_dma_domain(dev);
drivers/iommu/dma-iommu.c
1942
struct iommu_dma_cookie *cookie = domain->iova_cookie;
drivers/iommu/dma-iommu.c
1990
struct iommu_domain *domain = iommu_get_dma_domain(dev);
drivers/iommu/dma-iommu.c
1991
struct iommu_dma_cookie *cookie = domain->iova_cookie;
drivers/iommu/dma-iommu.c
1996
return iommu_sync_map(domain, addr - iova_start_pad,
drivers/iommu/dma-iommu.c
2005
struct iommu_domain *domain = iommu_get_dma_domain(dev);
drivers/iommu/dma-iommu.c
2006
struct iommu_dma_cookie *cookie = domain->iova_cookie;
drivers/iommu/dma-iommu.c
2015
phys = iommu_iova_to_phys(domain, addr);
drivers/iommu/dma-iommu.c
2039
struct iommu_domain *domain = iommu_get_dma_domain(dev);
drivers/iommu/dma-iommu.c
2040
struct iommu_dma_cookie *cookie = domain->iova_cookie;
drivers/iommu/dma-iommu.c
2057
unmapped = iommu_unmap_fast(domain, addr, size, &iotlb_gather);
drivers/iommu/dma-iommu.c
2061
iommu_iotlb_sync(domain, &iotlb_gather);
drivers/iommu/dma-iommu.c
2063
iommu_dma_free_iova(domain, addr, size, &iotlb_gather);
drivers/iommu/dma-iommu.c
2113
void iommu_setup_dma_ops(struct device *dev, struct iommu_domain *domain)
drivers/iommu/dma-iommu.c
2118
dev->dma_iommu = iommu_is_dma_domain(domain);
drivers/iommu/dma-iommu.c
2119
if (dev->dma_iommu && iommu_dma_init_domain(domain, dev))
drivers/iommu/dma-iommu.c
2129
static bool has_msi_cookie(const struct iommu_domain *domain)
drivers/iommu/dma-iommu.c
2131
return domain && (domain->cookie_type == IOMMU_COOKIE_DMA_IOVA ||
drivers/iommu/dma-iommu.c
2132
domain->cookie_type == IOMMU_COOKIE_DMA_MSI);
drivers/iommu/dma-iommu.c
2135
static size_t cookie_msi_granule(const struct iommu_domain *domain)
drivers/iommu/dma-iommu.c
2137
switch (domain->cookie_type) {
drivers/iommu/dma-iommu.c
2139
return domain->iova_cookie->iovad.granule;
drivers/iommu/dma-iommu.c
2147
static struct list_head *cookie_msi_pages(const struct iommu_domain *domain)
drivers/iommu/dma-iommu.c
2149
switch (domain->cookie_type) {
drivers/iommu/dma-iommu.c
2151
return &domain->iova_cookie->msi_page_list;
drivers/iommu/dma-iommu.c
2153
return &domain->msi_cookie->msi_page_list;
drivers/iommu/dma-iommu.c
2160
phys_addr_t msi_addr, struct iommu_domain *domain)
drivers/iommu/dma-iommu.c
2162
struct list_head *msi_page_list = cookie_msi_pages(domain);
drivers/iommu/dma-iommu.c
2166
size_t size = cookie_msi_granule(domain);
drivers/iommu/dma-iommu.c
2177
iova = iommu_dma_alloc_iova(domain, size, dma_get_mask(dev), dev);
drivers/iommu/dma-iommu.c
2181
if (iommu_map(domain, iova, msi_addr, size, prot, GFP_KERNEL))
drivers/iommu/dma-iommu.c
2191
iommu_dma_free_iova(domain, iova, size, NULL);
drivers/iommu/dma-iommu.c
2197
int iommu_dma_sw_msi(struct iommu_domain *domain, struct msi_desc *desc,
drivers/iommu/dma-iommu.c
2203
if (!has_msi_cookie(domain)) {
drivers/iommu/dma-iommu.c
2209
msi_page = iommu_dma_get_msi_page(dev, msi_addr, domain);
drivers/iommu/dma-iommu.c
2214
ilog2(cookie_msi_granule(domain)));
drivers/iommu/dma-iommu.c
332
int iommu_dma_init_fq(struct iommu_domain *domain)
drivers/iommu/dma-iommu.c
334
struct iommu_dma_cookie *cookie = domain->iova_cookie;
drivers/iommu/dma-iommu.c
360
WRITE_ONCE(cookie->fq_domain, domain);
drivers/iommu/dma-iommu.c
368
int iommu_get_dma_cookie(struct iommu_domain *domain)
drivers/iommu/dma-iommu.c
372
if (domain->cookie_type != IOMMU_COOKIE_NONE)
drivers/iommu/dma-iommu.c
380
domain->cookie_type = IOMMU_COOKIE_DMA_IOVA;
drivers/iommu/dma-iommu.c
381
domain->iova_cookie = cookie;
drivers/iommu/dma-iommu.c
397
int iommu_get_msi_cookie(struct iommu_domain *domain, dma_addr_t base)
drivers/iommu/dma-iommu.c
401
if (domain->type != IOMMU_DOMAIN_UNMANAGED)
drivers/iommu/dma-iommu.c
404
if (domain->cookie_type != IOMMU_COOKIE_NONE)
drivers/iommu/dma-iommu.c
413
domain->cookie_type = IOMMU_COOKIE_DMA_MSI;
drivers/iommu/dma-iommu.c
414
domain->msi_cookie = cookie;
drivers/iommu/dma-iommu.c
423
void iommu_put_dma_cookie(struct iommu_domain *domain)
drivers/iommu/dma-iommu.c
425
struct iommu_dma_cookie *cookie = domain->iova_cookie;
drivers/iommu/dma-iommu.c
441
void iommu_put_msi_cookie(struct iommu_domain *domain)
drivers/iommu/dma-iommu.c
443
struct iommu_dma_msi_cookie *cookie = domain->msi_cookie;
drivers/iommu/dma-iommu.c
553
struct iommu_domain *domain)
drivers/iommu/dma-iommu.c
555
struct iommu_dma_cookie *cookie = domain->iova_cookie;
drivers/iommu/dma-iommu.c
660
static int iommu_dma_init_domain(struct iommu_domain *domain, struct device *dev)
drivers/iommu/dma-iommu.c
662
struct iommu_dma_cookie *cookie = domain->iova_cookie;
drivers/iommu/dma-iommu.c
668
if (!cookie || domain->cookie_type != IOMMU_COOKIE_DMA_IOVA)
drivers/iommu/dma-iommu.c
674
order = __ffs(domain->pgsize_bitmap);
drivers/iommu/dma-iommu.c
679
if (dma_range_map_min(map) > domain->geometry.aperture_end ||
drivers/iommu/dma-iommu.c
680
dma_range_map_max(map) < domain->geometry.aperture_start) {
drivers/iommu/dma-iommu.c
687
domain->geometry.aperture_start >> order);
drivers/iommu/dma-iommu.c
708
if (domain->type == IOMMU_DOMAIN_DMA_FQ &&
drivers/iommu/dma-iommu.c
709
(!device_iommu_capable(dev, IOMMU_CAP_DEFERRED_FLUSH) || iommu_dma_init_fq(domain)))
drivers/iommu/dma-iommu.c
710
domain->type = IOMMU_DOMAIN_DMA;
drivers/iommu/dma-iommu.c
712
return iova_reserve_iommu_regions(dev, domain);
drivers/iommu/dma-iommu.c
749
static dma_addr_t iommu_dma_alloc_iova(struct iommu_domain *domain,
drivers/iommu/dma-iommu.c
752
struct iommu_dma_cookie *cookie = domain->iova_cookie;
drivers/iommu/dma-iommu.c
756
if (domain->cookie_type == IOMMU_COOKIE_DMA_MSI) {
drivers/iommu/dma-iommu.c
757
domain->msi_cookie->msi_iova += size;
drivers/iommu/dma-iommu.c
758
return domain->msi_cookie->msi_iova - size;
drivers/iommu/dma-iommu.c
766
if (domain->geometry.force_aperture)
drivers/iommu/dma-iommu.c
767
dma_limit = min(dma_limit, (u64)domain->geometry.aperture_end);
drivers/iommu/dma-iommu.c
795
static void iommu_dma_free_iova(struct iommu_domain *domain, dma_addr_t iova,
drivers/iommu/dma-iommu.c
798
struct iova_domain *iovad = &domain->iova_cookie->iovad;
drivers/iommu/dma-iommu.c
801
if (domain->cookie_type == IOMMU_COOKIE_DMA_MSI)
drivers/iommu/dma-iommu.c
802
domain->msi_cookie->msi_iova -= size;
drivers/iommu/dma-iommu.c
804
queue_iova(domain->iova_cookie, iova_pfn(iovad, iova),
drivers/iommu/dma-iommu.c
815
struct iommu_domain *domain = iommu_get_dma_domain(dev);
drivers/iommu/dma-iommu.c
816
struct iommu_dma_cookie *cookie = domain->iova_cookie;
drivers/iommu/dma-iommu.c
827
unmapped = iommu_unmap_fast(domain, dma_addr, size, &iotlb_gather);
drivers/iommu/dma-iommu.c
831
iommu_iotlb_sync(domain, &iotlb_gather);
drivers/iommu/dma-iommu.c
832
iommu_dma_free_iova(domain, dma_addr, size, &iotlb_gather);
drivers/iommu/dma-iommu.c
838
struct iommu_domain *domain = iommu_get_dma_domain(dev);
drivers/iommu/dma-iommu.c
839
struct iommu_dma_cookie *cookie = domain->iova_cookie;
drivers/iommu/dma-iommu.c
845
iommu_deferred_attach(dev, domain))
drivers/iommu/dma-iommu.c
855
iova = iommu_dma_alloc_iova(domain, size, dma_mask, dev);
drivers/iommu/dma-iommu.c
859
if (iommu_map(domain, iova, phys - iova_off, size, prot, GFP_ATOMIC)) {
drivers/iommu/dma-iommu.c
860
iommu_dma_free_iova(domain, iova, size, NULL);
drivers/iommu/dma-iommu.c
932
struct iommu_domain *domain = iommu_get_dma_domain(dev);
drivers/iommu/dma-iommu.c
933
struct iommu_dma_cookie *cookie = domain->iova_cookie;
drivers/iommu/dma-iommu.c
937
unsigned int count, min_size, alloc_sizes = domain->pgsize_bitmap;
drivers/iommu/dma-iommu.c
943
iommu_deferred_attach(dev, domain))
drivers/iommu/dma-iommu.c
963
iova = iommu_dma_alloc_iova(domain, size, dev->coherent_dma_mask, dev);
drivers/iommu/dma-iommu.c
985
ret = iommu_map_sg(domain, iova, sgt->sgl, sgt->orig_nents, ioprot,
drivers/iommu/dma-iommu.c
997
iommu_dma_free_iova(domain, iova, size, NULL);
drivers/iommu/dma-iommu.h
12
void iommu_setup_dma_ops(struct device *dev, struct iommu_domain *domain);
drivers/iommu/dma-iommu.h
14
int iommu_get_dma_cookie(struct iommu_domain *domain);
drivers/iommu/dma-iommu.h
15
void iommu_put_dma_cookie(struct iommu_domain *domain);
drivers/iommu/dma-iommu.h
16
void iommu_put_msi_cookie(struct iommu_domain *domain);
drivers/iommu/dma-iommu.h
18
int iommu_dma_init_fq(struct iommu_domain *domain);
drivers/iommu/dma-iommu.h
22
int iommu_dma_sw_msi(struct iommu_domain *domain, struct msi_desc *desc,
drivers/iommu/dma-iommu.h
30
struct iommu_domain *domain)
drivers/iommu/dma-iommu.h
34
static inline int iommu_dma_init_fq(struct iommu_domain *domain)
drivers/iommu/dma-iommu.h
39
static inline int iommu_get_dma_cookie(struct iommu_domain *domain)
drivers/iommu/dma-iommu.h
44
static inline void iommu_put_dma_cookie(struct iommu_domain *domain)
drivers/iommu/dma-iommu.h
48
static inline void iommu_put_msi_cookie(struct iommu_domain *domain)
drivers/iommu/dma-iommu.h
56
static inline int iommu_dma_sw_msi(struct iommu_domain *domain,
drivers/iommu/exynos-iommu.c
1000
pagetable = virt_to_phys(domain->pgtable);
drivers/iommu/exynos-iommu.c
1011
spin_lock_irqsave(&domain->lock, flags);
drivers/iommu/exynos-iommu.c
1012
list_for_each_entry_safe(data, next, &domain->clients, domain_node) {
drivers/iommu/exynos-iommu.c
1015
data->domain = NULL;
drivers/iommu/exynos-iommu.c
1019
owner->domain = identity_domain;
drivers/iommu/exynos-iommu.c
1020
spin_unlock_irqrestore(&domain->lock, flags);
drivers/iommu/exynos-iommu.c
1042
struct exynos_iommu_domain *domain = to_exynos_domain(iommu_domain);
drivers/iommu/exynos-iommu.c
1045
phys_addr_t pagetable = virt_to_phys(domain->pgtable);
drivers/iommu/exynos-iommu.c
1055
spin_lock_irqsave(&domain->lock, flags);
drivers/iommu/exynos-iommu.c
1059
data->domain = domain;
drivers/iommu/exynos-iommu.c
1060
list_add_tail(&data->domain_node, &domain->clients);
drivers/iommu/exynos-iommu.c
1063
owner->domain = iommu_domain;
drivers/iommu/exynos-iommu.c
1064
spin_unlock_irqrestore(&domain->lock, flags);
drivers/iommu/exynos-iommu.c
1081
static sysmmu_pte_t *alloc_lv2entry(struct exynos_iommu_domain *domain,
drivers/iommu/exynos-iommu.c
1129
spin_lock(&domain->lock);
drivers/iommu/exynos-iommu.c
1130
list_for_each_entry(data, &domain->clients, domain_node)
drivers/iommu/exynos-iommu.c
1132
spin_unlock(&domain->lock);
drivers/iommu/exynos-iommu.c
1139
static int lv1set_section(struct exynos_iommu_domain *domain,
drivers/iommu/exynos-iommu.c
1162
spin_lock(&domain->lock);
drivers/iommu/exynos-iommu.c
1169
list_for_each_entry(data, &domain->clients, domain_node)
drivers/iommu/exynos-iommu.c
1172
spin_unlock(&domain->lock);
drivers/iommu/exynos-iommu.c
1241
struct exynos_iommu_domain *domain = to_exynos_domain(iommu_domain);
drivers/iommu/exynos-iommu.c
1247
BUG_ON(domain->pgtable == NULL);
drivers/iommu/exynos-iommu.c
1250
spin_lock_irqsave(&domain->pgtablelock, flags);
drivers/iommu/exynos-iommu.c
1252
entry = section_entry(domain->pgtable, iova);
drivers/iommu/exynos-iommu.c
1255
ret = lv1set_section(domain, entry, iova, paddr, prot,
drivers/iommu/exynos-iommu.c
1256
&domain->lv2entcnt[lv1ent_offset(iova)]);
drivers/iommu/exynos-iommu.c
1260
pent = alloc_lv2entry(domain, entry, iova,
drivers/iommu/exynos-iommu.c
1261
&domain->lv2entcnt[lv1ent_offset(iova)]);
drivers/iommu/exynos-iommu.c
1267
&domain->lv2entcnt[lv1ent_offset(iova)]);
drivers/iommu/exynos-iommu.c
1276
spin_unlock_irqrestore(&domain->pgtablelock, flags);
drivers/iommu/exynos-iommu.c
1281
static void exynos_iommu_tlb_invalidate_entry(struct exynos_iommu_domain *domain,
drivers/iommu/exynos-iommu.c
1287
spin_lock_irqsave(&domain->lock, flags);
drivers/iommu/exynos-iommu.c
1289
list_for_each_entry(data, &domain->clients, domain_node)
drivers/iommu/exynos-iommu.c
1292
spin_unlock_irqrestore(&domain->lock, flags);
drivers/iommu/exynos-iommu.c
1299
struct exynos_iommu_domain *domain = to_exynos_domain(iommu_domain);
drivers/iommu/exynos-iommu.c
1305
BUG_ON(domain->pgtable == NULL);
drivers/iommu/exynos-iommu.c
1307
spin_lock_irqsave(&domain->pgtablelock, flags);
drivers/iommu/exynos-iommu.c
1309
ent = section_entry(domain->pgtable, iova);
drivers/iommu/exynos-iommu.c
1341
domain->lv2entcnt[lv1ent_offset(iova)] += 1;
drivers/iommu/exynos-iommu.c
1359
domain->lv2entcnt[lv1ent_offset(iova)] += SPAGES_PER_LPAGE;
drivers/iommu/exynos-iommu.c
1361
spin_unlock_irqrestore(&domain->pgtablelock, flags);
drivers/iommu/exynos-iommu.c
1363
exynos_iommu_tlb_invalidate_entry(domain, iova, size);
drivers/iommu/exynos-iommu.c
1367
spin_unlock_irqrestore(&domain->pgtablelock, flags);
drivers/iommu/exynos-iommu.c
1378
struct exynos_iommu_domain *domain = to_exynos_domain(iommu_domain);
drivers/iommu/exynos-iommu.c
1383
spin_lock_irqsave(&domain->pgtablelock, flags);
drivers/iommu/exynos-iommu.c
1385
entry = section_entry(domain->pgtable, iova);
drivers/iommu/exynos-iommu.c
1398
spin_unlock_irqrestore(&domain->pgtablelock, flags);
drivers/iommu/exynos-iommu.c
1460
owner->domain = &exynos_identity_domain;
drivers/iommu/exynos-iommu.c
239
struct iommu_domain *domain; /* domain this device is attached */
drivers/iommu/exynos-iommu.c
255
struct iommu_domain domain; /* generic domain data structure */
drivers/iommu/exynos-iommu.c
298
struct exynos_iommu_domain *domain; /* domain we belong to */
drivers/iommu/exynos-iommu.c
425
return container_of(dom, struct exynos_iommu_domain, domain);
drivers/iommu/exynos-iommu.c
587
if (data->domain) {
drivers/iommu/exynos-iommu.c
588
ret = report_iommu_fault(&data->domain->domain, data->master,
drivers/iommu/exynos-iommu.c
836
if (data->domain) {
drivers/iommu/exynos-iommu.c
854
if (data->domain) {
drivers/iommu/exynos-iommu.c
895
struct exynos_iommu_domain *domain;
drivers/iommu/exynos-iommu.c
902
domain = kzalloc_obj(*domain);
drivers/iommu/exynos-iommu.c
903
if (!domain)
drivers/iommu/exynos-iommu.c
906
domain->pgtable = iommu_alloc_pages_sz(GFP_KERNEL, SZ_16K);
drivers/iommu/exynos-iommu.c
907
if (!domain->pgtable)
drivers/iommu/exynos-iommu.c
910
domain->lv2entcnt = iommu_alloc_pages_sz(GFP_KERNEL, SZ_8K);
drivers/iommu/exynos-iommu.c
911
if (!domain->lv2entcnt)
drivers/iommu/exynos-iommu.c
916
domain->pgtable[i] = ZERO_LV2LINK;
drivers/iommu/exynos-iommu.c
918
handle = dma_map_single(dma_dev, domain->pgtable, LV1TABLE_SIZE,
drivers/iommu/exynos-iommu.c
921
BUG_ON(handle != virt_to_phys(domain->pgtable));
drivers/iommu/exynos-iommu.c
925
spin_lock_init(&domain->lock);
drivers/iommu/exynos-iommu.c
926
spin_lock_init(&domain->pgtablelock);
drivers/iommu/exynos-iommu.c
927
INIT_LIST_HEAD(&domain->clients);
drivers/iommu/exynos-iommu.c
929
domain->domain.pgsize_bitmap = SECT_SIZE | LPAGE_SIZE | SPAGE_SIZE;
drivers/iommu/exynos-iommu.c
931
domain->domain.geometry.aperture_start = 0;
drivers/iommu/exynos-iommu.c
932
domain->domain.geometry.aperture_end = ~0UL;
drivers/iommu/exynos-iommu.c
933
domain->domain.geometry.force_aperture = true;
drivers/iommu/exynos-iommu.c
935
return &domain->domain;
drivers/iommu/exynos-iommu.c
938
iommu_free_pages(domain->lv2entcnt);
drivers/iommu/exynos-iommu.c
940
iommu_free_pages(domain->pgtable);
drivers/iommu/exynos-iommu.c
942
kfree(domain);
drivers/iommu/exynos-iommu.c
948
struct exynos_iommu_domain *domain = to_exynos_domain(iommu_domain);
drivers/iommu/exynos-iommu.c
953
WARN_ON(!list_empty(&domain->clients));
drivers/iommu/exynos-iommu.c
955
spin_lock_irqsave(&domain->lock, flags);
drivers/iommu/exynos-iommu.c
957
list_for_each_entry_safe(data, next, &domain->clients, domain_node) {
drivers/iommu/exynos-iommu.c
961
data->domain = NULL;
drivers/iommu/exynos-iommu.c
966
spin_unlock_irqrestore(&domain->lock, flags);
drivers/iommu/exynos-iommu.c
968
dma_unmap_single(dma_dev, virt_to_phys(domain->pgtable), LV1TABLE_SIZE,
drivers/iommu/exynos-iommu.c
972
if (lv1ent_page(domain->pgtable + i)) {
drivers/iommu/exynos-iommu.c
973
phys_addr_t base = lv2table_base(domain->pgtable + i);
drivers/iommu/exynos-iommu.c
981
iommu_free_pages(domain->pgtable);
drivers/iommu/exynos-iommu.c
982
iommu_free_pages(domain->lv2entcnt);
drivers/iommu/exynos-iommu.c
983
kfree(domain);
drivers/iommu/exynos-iommu.c
991
struct exynos_iommu_domain *domain;
drivers/iommu/exynos-iommu.c
996
if (owner->domain == identity_domain)
drivers/iommu/exynos-iommu.c
999
domain = to_exynos_domain(owner->domain);
drivers/iommu/fsl_pamu_domain.c
149
if (old_domain_info && old_domain_info->domain != dma_domain) {
drivers/iommu/fsl_pamu_domain.c
151
detach_device(dev, old_domain_info->domain);
drivers/iommu/fsl_pamu_domain.c
159
info->domain = dma_domain;
drivers/iommu/fsl_pamu_domain.c
172
static phys_addr_t fsl_pamu_iova_to_phys(struct iommu_domain *domain,
drivers/iommu/fsl_pamu_domain.c
175
if (iova < domain->geometry.aperture_start ||
drivers/iommu/fsl_pamu_domain.c
176
iova > domain->geometry.aperture_end)
drivers/iommu/fsl_pamu_domain.c
186
static void fsl_pamu_domain_free(struct iommu_domain *domain)
drivers/iommu/fsl_pamu_domain.c
188
struct fsl_dma_domain *dma_domain = to_fsl_dma_domain(domain);
drivers/iommu/fsl_pamu_domain.c
240
static int fsl_pamu_attach_device(struct iommu_domain *domain,
drivers/iommu/fsl_pamu_domain.c
243
struct fsl_dma_domain *dma_domain = to_fsl_dma_domain(domain);
drivers/iommu/fsl_pamu_domain.c
353
int fsl_pamu_configure_l1_stash(struct iommu_domain *domain, u32 cpu)
drivers/iommu/fsl_pamu_domain.c
355
struct fsl_dma_domain *dma_domain = to_fsl_dma_domain(domain);
drivers/iommu/fsl_pamu_domain.h
25
struct fsl_dma_domain *domain; /* pointer to domain */
drivers/iommu/generic_pt/iommu_pt.h
1042
size_t DOMAIN_NS(unmap_pages)(struct iommu_domain *domain, unsigned long iova,
drivers/iommu/generic_pt/iommu_pt.h
1047
container_of(domain, struct pt_iommu, domain);
drivers/iommu/generic_pt/iommu_pt.h
1165
struct iommu_domain *domain)
drivers/iommu/generic_pt/iommu_pt.h
1173
domain->type = __IOMMU_DOMAIN_PAGING;
drivers/iommu/generic_pt/iommu_pt.h
1174
domain->pgsize_bitmap = info.pgsize_bitmap;
drivers/iommu/generic_pt/iommu_pt.h
1183
domain->geometry.aperture_start = (unsigned long)range.va;
drivers/iommu/generic_pt/iommu_pt.h
1184
if ((pt_vaddr_t)domain->geometry.aperture_start != range.va)
drivers/iommu/generic_pt/iommu_pt.h
1194
domain->geometry.aperture_end = (unsigned long)range.last_va;
drivers/iommu/generic_pt/iommu_pt.h
1195
if ((pt_vaddr_t)domain->geometry.aperture_end != range.last_va) {
drivers/iommu/generic_pt/iommu_pt.h
1196
domain->geometry.aperture_end = ULONG_MAX;
drivers/iommu/generic_pt/iommu_pt.h
1197
domain->pgsize_bitmap &= ULONG_MAX;
drivers/iommu/generic_pt/iommu_pt.h
1199
domain->geometry.force_aperture = true;
drivers/iommu/generic_pt/iommu_pt.h
1209
static_assert(offsetof(struct pt_iommu_table, iommu.domain) == 0);
drivers/iommu/generic_pt/iommu_pt.h
1210
memset_after(fmt_table, 0, iommu.domain);
drivers/iommu/generic_pt/iommu_pt.h
1263
ret = pt_iommu_init_domain(iommu_table, &iommu_table->domain);
drivers/iommu/generic_pt/iommu_pt.h
133
iommu_table->domain.pgsize_bitmap,
drivers/iommu/generic_pt/iommu_pt.h
170
phys_addr_t DOMAIN_NS(iova_to_phys)(struct iommu_domain *domain,
drivers/iommu/generic_pt/iommu_pt.h
174
container_of(domain, struct pt_iommu, domain);
drivers/iommu/generic_pt/iommu_pt.h
269
int DOMAIN_NS(read_and_clear_dirty)(struct iommu_domain *domain,
drivers/iommu/generic_pt/iommu_pt.h
275
container_of(domain, struct pt_iommu, domain);
drivers/iommu/generic_pt/iommu_pt.h
56
iommu_iotlb_sync(&iommu_table->domain, iotlb_gather);
drivers/iommu/generic_pt/iommu_pt.h
843
int DOMAIN_NS(map_pages)(struct iommu_domain *domain, unsigned long iova,
drivers/iommu/generic_pt/iommu_pt.h
848
container_of(domain, struct pt_iommu, domain);
drivers/iommu/generic_pt/iommu_pt.h
849
pt_vaddr_t pgsize_bitmap = iommu_table->domain.pgsize_bitmap;
drivers/iommu/generic_pt/iommu_pt.h
914
iommu_iotlb_sync(&iommu_table->domain, &iotlb_gather);
drivers/iommu/generic_pt/kunit_generic_pt.h
19
ret = iommu_map(&priv->domain, va, pa, len, IOMMU_READ | IOMMU_WRITE,
drivers/iommu/generic_pt/kunit_iommu.h
143
priv->domain.ops = &kunit_pt_ops;
drivers/iommu/generic_pt/kunit_iommu.h
60
struct iommu_domain domain;
drivers/iommu/generic_pt/kunit_iommu.h
77
PT_IOMMU_CHECK_DOMAIN(struct kunit_iommu_priv, fmt_table.iommu, domain);
drivers/iommu/generic_pt/kunit_iommu.h
79
static void pt_kunit_iotlb_sync(struct iommu_domain *domain,
drivers/iommu/generic_pt/kunit_iommu_pt.h
266
gnmapped = iommu_unmap(&priv->domain, vaddr, base_len);
drivers/iommu/generic_pt/kunit_iommu_pt.h
272
gnmapped = iommu_unmap(&priv->domain, vaddr, base_len);
drivers/iommu/generic_pt/kunit_iommu_pt.h
274
gnmapped = iommu_unmap(&priv->domain, vaddr + next_len,
drivers/iommu/generic_pt/kunit_iommu_pt.h
371
ret = iommu_map(&priv->domain, start, paddr, end - start,
drivers/iommu/generic_pt/kunit_iommu_pt.h
82
ret = iommu_unmap(&priv->domain, va, len);
drivers/iommu/generic_pt/kunit_iommu_pt.h
94
phys_addr_t res = iommu_iova_to_phys(&priv->domain,
drivers/iommu/hyperv-iommu.c
100
irq_domain_free_irqs_common(domain, virq, nr_irqs);
drivers/iommu/hyperv-iommu.c
263
static int hyperv_root_irq_remapping_alloc(struct irq_domain *domain,
drivers/iommu/hyperv-iommu.c
275
ret = irq_domain_alloc_irqs_parent(domain, virq, nr_irqs, arg);
drivers/iommu/hyperv-iommu.c
281
irq_domain_free_irqs_common(domain, virq, nr_irqs);
drivers/iommu/hyperv-iommu.c
285
irq_data = irq_domain_get_irq_data(domain, virq);
drivers/iommu/hyperv-iommu.c
288
irq_domain_free_irqs_common(domain, virq, nr_irqs);
drivers/iommu/hyperv-iommu.c
301
static void hyperv_root_irq_remapping_free(struct irq_domain *domain,
drivers/iommu/hyperv-iommu.c
310
irq_data = irq_domain_get_irq_data(domain, virq + i);
drivers/iommu/hyperv-iommu.c
325
irq_domain_free_irqs_common(domain, virq, nr_irqs);
drivers/iommu/hyperv-iommu.c
65
static int hyperv_irq_remapping_alloc(struct irq_domain *domain,
drivers/iommu/hyperv-iommu.c
76
ret = irq_domain_alloc_irqs_parent(domain, virq, nr_irqs, arg);
drivers/iommu/hyperv-iommu.c
80
irq_data = irq_domain_get_irq_data(domain, virq);
drivers/iommu/hyperv-iommu.c
82
irq_domain_free_irqs_common(domain, virq, nr_irqs);
drivers/iommu/hyperv-iommu.c
97
static void hyperv_irq_remapping_free(struct irq_domain *domain,
drivers/iommu/intel/cache.c
102
spin_lock_irqsave(&domain->cache_lock, flags);
drivers/iommu/intel/cache.c
103
list_for_each_entry(tag, &domain->cache_tags, node) {
drivers/iommu/intel/cache.c
113
spin_unlock_irqrestore(&domain->cache_lock, flags);
drivers/iommu/intel/cache.c
117
static int domain_qi_batch_alloc(struct dmar_domain *domain)
drivers/iommu/intel/cache.c
122
spin_lock_irqsave(&domain->cache_lock, flags);
drivers/iommu/intel/cache.c
123
if (domain->qi_batch)
drivers/iommu/intel/cache.c
126
domain->qi_batch = kzalloc_obj(*domain->qi_batch, GFP_ATOMIC);
drivers/iommu/intel/cache.c
127
if (!domain->qi_batch)
drivers/iommu/intel/cache.c
130
spin_unlock_irqrestore(&domain->cache_lock, flags);
drivers/iommu/intel/cache.c
135
static int __cache_tag_assign_domain(struct dmar_domain *domain, u16 did,
drivers/iommu/intel/cache.c
141
ret = domain_qi_batch_alloc(domain);
drivers/iommu/intel/cache.c
145
ret = cache_tag_assign(domain, did, dev, pasid, CACHE_TAG_IOTLB);
drivers/iommu/intel/cache.c
149
ret = cache_tag_assign(domain, did, dev, pasid, CACHE_TAG_DEVTLB);
drivers/iommu/intel/cache.c
151
cache_tag_unassign(domain, did, dev, pasid, CACHE_TAG_IOTLB);
drivers/iommu/intel/cache.c
156
static void __cache_tag_unassign_domain(struct dmar_domain *domain, u16 did,
drivers/iommu/intel/cache.c
161
cache_tag_unassign(domain, did, dev, pasid, CACHE_TAG_IOTLB);
drivers/iommu/intel/cache.c
164
cache_tag_unassign(domain, did, dev, pasid, CACHE_TAG_DEVTLB);
drivers/iommu/intel/cache.c
167
static int __cache_tag_assign_parent_domain(struct dmar_domain *domain, u16 did,
drivers/iommu/intel/cache.c
173
ret = domain_qi_batch_alloc(domain);
drivers/iommu/intel/cache.c
177
ret = cache_tag_assign(domain, did, dev, pasid, CACHE_TAG_NESTING_IOTLB);
drivers/iommu/intel/cache.c
181
ret = cache_tag_assign(domain, did, dev, pasid, CACHE_TAG_NESTING_DEVTLB);
drivers/iommu/intel/cache.c
183
cache_tag_unassign(domain, did, dev, pasid, CACHE_TAG_NESTING_IOTLB);
drivers/iommu/intel/cache.c
188
static void __cache_tag_unassign_parent_domain(struct dmar_domain *domain, u16 did,
drivers/iommu/intel/cache.c
193
cache_tag_unassign(domain, did, dev, pasid, CACHE_TAG_NESTING_IOTLB);
drivers/iommu/intel/cache.c
196
cache_tag_unassign(domain, did, dev, pasid, CACHE_TAG_NESTING_DEVTLB);
drivers/iommu/intel/cache.c
199
static u16 domain_get_id_for_dev(struct dmar_domain *domain, struct device *dev)
drivers/iommu/intel/cache.c
208
if (domain->domain.type == IOMMU_DOMAIN_SVA)
drivers/iommu/intel/cache.c
211
return domain_id_iommu(domain, iommu);
drivers/iommu/intel/cache.c
222
int cache_tag_assign_domain(struct dmar_domain *domain,
drivers/iommu/intel/cache.c
225
u16 did = domain_get_id_for_dev(domain, dev);
drivers/iommu/intel/cache.c
228
ret = __cache_tag_assign_domain(domain, did, dev, pasid);
drivers/iommu/intel/cache.c
229
if (ret || domain->domain.type != IOMMU_DOMAIN_NESTED)
drivers/iommu/intel/cache.c
232
ret = __cache_tag_assign_parent_domain(domain->s2_domain, did, dev, pasid);
drivers/iommu/intel/cache.c
234
__cache_tag_unassign_domain(domain, did, dev, pasid);
drivers/iommu/intel/cache.c
246
void cache_tag_unassign_domain(struct dmar_domain *domain,
drivers/iommu/intel/cache.c
249
u16 did = domain_get_id_for_dev(domain, dev);
drivers/iommu/intel/cache.c
251
__cache_tag_unassign_domain(domain, did, dev, pasid);
drivers/iommu/intel/cache.c
252
if (domain->domain.type == IOMMU_DOMAIN_NESTED)
drivers/iommu/intel/cache.c
253
__cache_tag_unassign_parent_domain(domain->s2_domain, did, dev, pasid);
drivers/iommu/intel/cache.c
366
static bool intel_domain_use_piotlb(struct dmar_domain *domain)
drivers/iommu/intel/cache.c
368
return domain->domain.type == IOMMU_DOMAIN_SVA ||
drivers/iommu/intel/cache.c
369
domain->domain.type == IOMMU_DOMAIN_NESTED ||
drivers/iommu/intel/cache.c
370
intel_domain_is_fs_paging(domain);
drivers/iommu/intel/cache.c
373
static void cache_tag_flush_iotlb(struct dmar_domain *domain, struct cache_tag *tag,
drivers/iommu/intel/cache.c
380
if (intel_domain_use_piotlb(domain)) {
drivers/iommu/intel/cache.c
382
pages, ih, domain->qi_batch);
drivers/iommu/intel/cache.c
400
domain->qi_batch);
drivers/iommu/intel/cache.c
405
static void cache_tag_flush_devtlb_psi(struct dmar_domain *domain, struct cache_tag *tag,
drivers/iommu/intel/cache.c
417
addr, mask, domain->qi_batch);
drivers/iommu/intel/cache.c
420
addr, mask, domain->qi_batch);
drivers/iommu/intel/cache.c
425
info->ats_qdep, addr, mask, domain->qi_batch);
drivers/iommu/intel/cache.c
429
domain->qi_batch);
drivers/iommu/intel/cache.c
43
int cache_tag_assign(struct dmar_domain *domain, u16 did, struct device *dev,
drivers/iommu/intel/cache.c
436
void cache_tag_flush_range(struct dmar_domain *domain, unsigned long start,
drivers/iommu/intel/cache.c
452
spin_lock_irqsave(&domain->cache_lock, flags);
drivers/iommu/intel/cache.c
453
list_for_each_entry(tag, &domain->cache_tags, node) {
drivers/iommu/intel/cache.c
455
qi_batch_flush_descs(iommu, domain->qi_batch);
drivers/iommu/intel/cache.c
461
cache_tag_flush_iotlb(domain, tag, addr, pages, mask, ih);
drivers/iommu/intel/cache.c
475
cache_tag_flush_devtlb_psi(domain, tag, addr, mask);
drivers/iommu/intel/cache.c
481
qi_batch_flush_descs(iommu, domain->qi_batch);
drivers/iommu/intel/cache.c
482
spin_unlock_irqrestore(&domain->cache_lock, flags);
drivers/iommu/intel/cache.c
489
void cache_tag_flush_all(struct dmar_domain *domain)
drivers/iommu/intel/cache.c
491
cache_tag_flush_range(domain, 0, ULONG_MAX, 0);
drivers/iommu/intel/cache.c
505
void cache_tag_flush_range_np(struct dmar_domain *domain, unsigned long start,
drivers/iommu/intel/cache.c
515
spin_lock_irqsave(&domain->cache_lock, flags);
drivers/iommu/intel/cache.c
516
list_for_each_entry(tag, &domain->cache_tags, node) {
drivers/iommu/intel/cache.c
518
qi_batch_flush_descs(iommu, domain->qi_batch);
drivers/iommu/intel/cache.c
522
intel_domain_is_fs_paging(domain)) {
drivers/iommu/intel/cache.c
529
cache_tag_flush_iotlb(domain, tag, addr, pages, mask, 0);
drivers/iommu/intel/cache.c
533
qi_batch_flush_descs(iommu, domain->qi_batch);
drivers/iommu/intel/cache.c
534
spin_unlock_irqrestore(&domain->cache_lock, flags);
drivers/iommu/intel/cache.c
67
spin_lock_irqsave(&domain->cache_lock, flags);
drivers/iommu/intel/cache.c
68
prev = &domain->cache_tags;
drivers/iommu/intel/cache.c
69
list_for_each_entry(temp, &domain->cache_tags, node) {
drivers/iommu/intel/cache.c
72
spin_unlock_irqrestore(&domain->cache_lock, flags);
drivers/iommu/intel/cache.c
86
spin_unlock_irqrestore(&domain->cache_lock, flags);
drivers/iommu/intel/cache.c
93
static void cache_tag_unassign(struct dmar_domain *domain, u16 did,
drivers/iommu/intel/iommu.c
1025
int domain_attach_iommu(struct dmar_domain *domain, struct intel_iommu *iommu)
drivers/iommu/intel/iommu.c
1030
if (domain->domain.type == IOMMU_DOMAIN_SVA)
drivers/iommu/intel/iommu.c
1038
curr = xa_load(&domain->iommu_array, iommu->seq_id);
drivers/iommu/intel/iommu.c
1055
curr = xa_cmpxchg(&domain->iommu_array, iommu->seq_id,
drivers/iommu/intel/iommu.c
1071
void domain_detach_iommu(struct dmar_domain *domain, struct intel_iommu *iommu)
drivers/iommu/intel/iommu.c
1075
if (domain->domain.type == IOMMU_DOMAIN_SVA)
drivers/iommu/intel/iommu.c
1079
info = xa_load(&domain->iommu_array, iommu->seq_id);
drivers/iommu/intel/iommu.c
1082
xa_erase(&domain->iommu_array, iommu->seq_id);
drivers/iommu/intel/iommu.c
1142
static int domain_context_mapping_one(struct dmar_domain *domain,
drivers/iommu/intel/iommu.c
1147
domain_lookup_dev_info(domain, iommu, bus, devfn);
drivers/iommu/intel/iommu.c
1148
u16 did = domain_id_iommu(domain, iommu);
drivers/iommu/intel/iommu.c
1154
if (WARN_ON(!intel_domain_is_ss_paging(domain)))
drivers/iommu/intel/iommu.c
1157
pt_iommu_vtdss_hw_info(&domain->sspt, &pt_info);
drivers/iommu/intel/iommu.c
1202
struct dmar_domain *domain = opaque;
drivers/iommu/intel/iommu.c
1204
return domain_context_mapping_one(domain, iommu,
drivers/iommu/intel/iommu.c
1209
domain_context_mapping(struct dmar_domain *domain, struct device *dev)
drivers/iommu/intel/iommu.c
1217
return domain_context_mapping_one(domain, iommu, bus, devfn);
drivers/iommu/intel/iommu.c
1220
domain_context_mapping_cb, domain);
drivers/iommu/intel/iommu.c
1262
struct dmar_domain *domain,
drivers/iommu/intel/iommu.c
1269
return intel_pasid_setup_second_level(iommu, domain, dev, pasid);
drivers/iommu/intel/iommu.c
1283
struct dmar_domain *domain,
drivers/iommu/intel/iommu.c
1290
pt_iommu_x86_64_hw_info(&domain->fspt, &pt_info);
drivers/iommu/intel/iommu.c
1297
if (domain->force_snooping)
drivers/iommu/intel/iommu.c
1300
if (!(domain->fspt.x86_64_pt.common.features &
drivers/iommu/intel/iommu.c
1305
domain_id_iommu(domain, iommu),
drivers/iommu/intel/iommu.c
1309
static int dmar_domain_attach_device(struct dmar_domain *domain,
drivers/iommu/intel/iommu.c
1317
ret = domain_attach_iommu(domain, iommu);
drivers/iommu/intel/iommu.c
1321
info->domain = domain;
drivers/iommu/intel/iommu.c
1323
spin_lock_irqsave(&domain->lock, flags);
drivers/iommu/intel/iommu.c
1324
list_add(&info->link, &domain->devices);
drivers/iommu/intel/iommu.c
1325
spin_unlock_irqrestore(&domain->lock, flags);
drivers/iommu/intel/iommu.c
1331
ret = domain_context_mapping(domain, dev);
drivers/iommu/intel/iommu.c
1332
else if (intel_domain_is_fs_paging(domain))
drivers/iommu/intel/iommu.c
1333
ret = domain_setup_first_level(iommu, domain, dev,
drivers/iommu/intel/iommu.c
1335
else if (intel_domain_is_ss_paging(domain))
drivers/iommu/intel/iommu.c
1336
ret = domain_setup_second_level(iommu, domain, dev,
drivers/iommu/intel/iommu.c
1344
ret = cache_tag_assign_domain(domain, dev, IOMMU_NO_PASID);
drivers/iommu/intel/iommu.c
197
static void intel_iommu_domain_free(struct iommu_domain *domain);
drivers/iommu/intel/iommu.c
2730
if (info->domain)
drivers/iommu/intel/iommu.c
2731
cache_tag_unassign_domain(info->domain, dev, IOMMU_NO_PASID);
drivers/iommu/intel/iommu.c
2744
if (!info->domain)
drivers/iommu/intel/iommu.c
2747
spin_lock_irqsave(&info->domain->lock, flags);
drivers/iommu/intel/iommu.c
2749
spin_unlock_irqrestore(&info->domain->lock, flags);
drivers/iommu/intel/iommu.c
2751
domain_detach_iommu(info->domain, iommu);
drivers/iommu/intel/iommu.c
2752
info->domain = NULL;
drivers/iommu/intel/iommu.c
2755
static int blocking_domain_attach_dev(struct iommu_domain *domain,
drivers/iommu/intel/iommu.c
2761
iopf_for_domain_remove(info->domain ? &info->domain->domain : NULL, dev);
drivers/iommu/intel/iommu.c
2766
static int blocking_domain_set_dev_pasid(struct iommu_domain *domain,
drivers/iommu/intel/iommu.c
2780
struct dmar_domain *domain;
drivers/iommu/intel/iommu.c
2782
domain = kzalloc_obj(*domain);
drivers/iommu/intel/iommu.c
2783
if (!domain)
drivers/iommu/intel/iommu.c
2786
INIT_LIST_HEAD(&domain->devices);
drivers/iommu/intel/iommu.c
2787
INIT_LIST_HEAD(&domain->dev_pasids);
drivers/iommu/intel/iommu.c
2788
INIT_LIST_HEAD(&domain->cache_tags);
drivers/iommu/intel/iommu.c
2789
spin_lock_init(&domain->lock);
drivers/iommu/intel/iommu.c
2790
spin_lock_init(&domain->cache_lock);
drivers/iommu/intel/iommu.c
2791
xa_init(&domain->iommu_array);
drivers/iommu/intel/iommu.c
2792
INIT_LIST_HEAD(&domain->s1_domains);
drivers/iommu/intel/iommu.c
2793
spin_lock_init(&domain->s1_lock);
drivers/iommu/intel/iommu.c
2795
return domain;
drivers/iommu/intel/iommu.c
2849
dmar_domain->domain.ops = &intel_fs_paging_domain_ops;
drivers/iommu/intel/iommu.c
2865
dmar_domain->domain.pgsize_bitmap &= ~(u64)SZ_1G;
drivers/iommu/intel/iommu.c
2867
dmar_domain->domain.pgsize_bitmap = SZ_4K;
drivers/iommu/intel/iommu.c
2869
return &dmar_domain->domain;
drivers/iommu/intel/iommu.c
2946
dmar_domain->domain.ops = &intel_ss_paging_domain_ops;
drivers/iommu/intel/iommu.c
2950
dmar_domain->domain.dirty_ops = &intel_second_stage_dirty_ops;
drivers/iommu/intel/iommu.c
2961
dmar_domain->domain.pgsize_bitmap &= ~(u64)SZ_2M;
drivers/iommu/intel/iommu.c
2963
dmar_domain->domain.pgsize_bitmap &= ~(u64)SZ_1G;
drivers/iommu/intel/iommu.c
2965
dmar_domain->domain.pgsize_bitmap = SZ_4K;
drivers/iommu/intel/iommu.c
2975
return &dmar_domain->domain;
drivers/iommu/intel/iommu.c
2984
struct iommu_domain *domain;
drivers/iommu/intel/iommu.c
2990
domain = intel_iommu_domain_alloc_first_stage(dev, iommu, flags);
drivers/iommu/intel/iommu.c
2991
if (domain != ERR_PTR(-EOPNOTSUPP))
drivers/iommu/intel/iommu.c
2992
return domain;
drivers/iommu/intel/iommu.c
2996
static void intel_iommu_domain_free(struct iommu_domain *domain)
drivers/iommu/intel/iommu.c
2998
struct dmar_domain *dmar_domain = to_dmar_domain(domain);
drivers/iommu/intel/iommu.c
3016
if (WARN_ON(dmar_domain->domain.dirty_ops ||
drivers/iommu/intel/iommu.c
3036
(dmar_domain->domain.pgsize_bitmap & SZ_1G))
drivers/iommu/intel/iommu.c
3056
if (dmar_domain->domain.dirty_ops && !ssads_supported(iommu))
drivers/iommu/intel/iommu.c
3079
if (!(sslps & BIT(0)) && (dmar_domain->domain.pgsize_bitmap & SZ_2M))
drivers/iommu/intel/iommu.c
3081
if (!(sslps & BIT(1)) && (dmar_domain->domain.pgsize_bitmap & SZ_1G))
drivers/iommu/intel/iommu.c
3100
int paging_domain_compatible(struct iommu_domain *domain, struct device *dev)
drivers/iommu/intel/iommu.c
3103
struct dmar_domain *dmar_domain = to_dmar_domain(domain);
drivers/iommu/intel/iommu.c
3123
static int intel_iommu_attach_device(struct iommu_domain *domain,
drivers/iommu/intel/iommu.c
3131
ret = paging_domain_compatible(domain, dev);
drivers/iommu/intel/iommu.c
3135
ret = iopf_for_domain_set(domain, dev);
drivers/iommu/intel/iommu.c
3139
ret = dmar_domain_attach_device(to_dmar_domain(domain), dev);
drivers/iommu/intel/iommu.c
3141
iopf_for_domain_remove(domain, dev);
drivers/iommu/intel/iommu.c
3146
static void intel_iommu_tlb_sync(struct iommu_domain *domain,
drivers/iommu/intel/iommu.c
3149
cache_tag_flush_range(to_dmar_domain(domain), gather->start,
drivers/iommu/intel/iommu.c
3155
static bool domain_support_force_snooping(struct dmar_domain *domain)
drivers/iommu/intel/iommu.c
3160
assert_spin_locked(&domain->lock);
drivers/iommu/intel/iommu.c
3161
list_for_each_entry(info, &domain->devices, link) {
drivers/iommu/intel/iommu.c
3171
static bool intel_iommu_enforce_cache_coherency_fs(struct iommu_domain *domain)
drivers/iommu/intel/iommu.c
3173
struct dmar_domain *dmar_domain = to_dmar_domain(domain);
drivers/iommu/intel/iommu.c
3191
static bool intel_iommu_enforce_cache_coherency_ss(struct iommu_domain *domain)
drivers/iommu/intel/iommu.c
3193
struct dmar_domain *dmar_domain = to_dmar_domain(domain);
drivers/iommu/intel/iommu.c
3341
if (info->ats_enabled && info->domain) {
drivers/iommu/intel/iommu.c
3342
u16 did = domain_id_iommu(info->domain, iommu);
drivers/iommu/intel/iommu.c
3344
if (cache_tag_assign(info->domain, did, dev,
drivers/iommu/intel/iommu.c
3489
return translation_pre_enabled(info->iommu) && !info->domain;
drivers/iommu/intel/iommu.c
3509
static int intel_iommu_iotlb_sync_map(struct iommu_domain *domain,
drivers/iommu/intel/iommu.c
3512
struct dmar_domain *dmar_domain = to_dmar_domain(domain);
drivers/iommu/intel/iommu.c
3520
void domain_remove_dev_pasid(struct iommu_domain *domain,
drivers/iommu/intel/iommu.c
3529
if (!domain)
drivers/iommu/intel/iommu.c
3533
if (domain->type == IOMMU_DOMAIN_IDENTITY)
drivers/iommu/intel/iommu.c
3536
dmar_domain = to_dmar_domain(domain);
drivers/iommu/intel/iommu.c
3555
static int blocking_domain_set_dev_pasid(struct iommu_domain *domain,
drivers/iommu/intel/iommu.c
3569
domain_add_dev_pasid(struct iommu_domain *domain,
drivers/iommu/intel/iommu.c
3573
struct dmar_domain *dmar_domain = to_dmar_domain(domain);
drivers/iommu/intel/iommu.c
3605
static int intel_iommu_set_dev_pasid(struct iommu_domain *domain,
drivers/iommu/intel/iommu.c
3610
struct dmar_domain *dmar_domain = to_dmar_domain(domain);
drivers/iommu/intel/iommu.c
3615
if (WARN_ON_ONCE(!(domain->type & __IOMMU_DOMAIN_PAGING)))
drivers/iommu/intel/iommu.c
3621
if (domain->dirty_ops)
drivers/iommu/intel/iommu.c
3627
ret = paging_domain_compatible(domain, dev);
drivers/iommu/intel/iommu.c
3631
dev_pasid = domain_add_dev_pasid(domain, dev, pasid);
drivers/iommu/intel/iommu.c
3635
ret = iopf_for_domain_replace(domain, old, dev);
drivers/iommu/intel/iommu.c
3658
iopf_for_domain_replace(old, domain, dev);
drivers/iommu/intel/iommu.c
3660
domain_remove_dev_pasid(domain, dev, pasid);
drivers/iommu/intel/iommu.c
3706
static int parent_domain_set_dirty_tracking(struct dmar_domain *domain,
drivers/iommu/intel/iommu.c
3713
spin_lock(&domain->s1_lock);
drivers/iommu/intel/iommu.c
3714
list_for_each_entry(s1_domain, &domain->s1_domains, s2_link) {
drivers/iommu/intel/iommu.c
3721
spin_unlock(&domain->s1_lock);
drivers/iommu/intel/iommu.c
3725
list_for_each_entry(s1_domain, &domain->s1_domains, s2_link) {
drivers/iommu/intel/iommu.c
3728
domain->dirty_tracking);
drivers/iommu/intel/iommu.c
3731
spin_unlock(&domain->s1_lock);
drivers/iommu/intel/iommu.c
3735
static int intel_iommu_set_dirty_tracking(struct iommu_domain *domain,
drivers/iommu/intel/iommu.c
3738
struct dmar_domain *dmar_domain = to_dmar_domain(domain);
drivers/iommu/intel/iommu.c
3824
static int identity_domain_attach_dev(struct iommu_domain *domain,
drivers/iommu/intel/iommu.c
3853
static int identity_domain_set_dev_pasid(struct iommu_domain *domain,
drivers/iommu/intel/iommu.c
3864
ret = iopf_for_domain_replace(domain, old, dev);
drivers/iommu/intel/iommu.c
3870
iopf_for_domain_replace(old, domain, dev);
drivers/iommu/intel/iommu.c
49
static int intel_iommu_set_dirty_tracking(struct iommu_domain *domain,
drivers/iommu/intel/iommu.c
828
domain_lookup_dev_info(struct dmar_domain *domain,
drivers/iommu/intel/iommu.c
834
spin_lock_irqsave(&domain->lock, flags);
drivers/iommu/intel/iommu.c
835
list_for_each_entry(info, &domain->devices, link) {
drivers/iommu/intel/iommu.c
838
spin_unlock_irqrestore(&domain->lock, flags);
drivers/iommu/intel/iommu.c
842
spin_unlock_irqrestore(&domain->lock, flags);
drivers/iommu/intel/iommu.c
920
static void intel_flush_iotlb_all(struct iommu_domain *domain)
drivers/iommu/intel/iommu.c
922
cache_tag_flush_all(to_dmar_domain(domain));
drivers/iommu/intel/iommu.h
1194
int domain_attach_iommu(struct dmar_domain *domain, struct intel_iommu *iommu);
drivers/iommu/intel/iommu.h
1195
void domain_detach_iommu(struct dmar_domain *domain, struct intel_iommu *iommu);
drivers/iommu/intel/iommu.h
1197
int paging_domain_compatible(struct iommu_domain *domain, struct device *dev);
drivers/iommu/intel/iommu.h
1200
domain_add_dev_pasid(struct iommu_domain *domain,
drivers/iommu/intel/iommu.h
1202
void domain_remove_dev_pasid(struct iommu_domain *domain,
drivers/iommu/intel/iommu.h
1241
int cache_tag_assign(struct dmar_domain *domain, u16 did, struct device *dev,
drivers/iommu/intel/iommu.h
1243
int cache_tag_assign_domain(struct dmar_domain *domain,
drivers/iommu/intel/iommu.h
1245
void cache_tag_unassign_domain(struct dmar_domain *domain,
drivers/iommu/intel/iommu.h
1247
void cache_tag_flush_range(struct dmar_domain *domain, unsigned long start,
drivers/iommu/intel/iommu.h
1249
void cache_tag_flush_all(struct dmar_domain *domain);
drivers/iommu/intel/iommu.h
1250
void cache_tag_flush_range_np(struct dmar_domain *domain, unsigned long start,
drivers/iommu/intel/iommu.h
1265
static inline int iopf_for_domain_set(struct iommu_domain *domain,
drivers/iommu/intel/iommu.h
1268
if (!domain || !domain->iopf_handler)
drivers/iommu/intel/iommu.h
1274
static inline void iopf_for_domain_remove(struct iommu_domain *domain,
drivers/iommu/intel/iommu.h
1277
if (!domain || !domain->iopf_handler)
drivers/iommu/intel/iommu.h
1333
static inline bool intel_domain_is_fs_paging(struct dmar_domain *domain)
drivers/iommu/intel/iommu.h
1335
return domain->domain.ops == &intel_fs_paging_domain_ops;
drivers/iommu/intel/iommu.h
1338
static inline bool intel_domain_is_ss_paging(struct dmar_domain *domain)
drivers/iommu/intel/iommu.h
1340
return domain->domain.ops == &intel_ss_paging_domain_ops;
drivers/iommu/intel/iommu.h
599
struct iommu_domain domain;
drivers/iommu/intel/iommu.h
649
PT_IOMMU_CHECK_DOMAIN(struct dmar_domain, iommu, domain);
drivers/iommu/intel/iommu.h
650
PT_IOMMU_CHECK_DOMAIN(struct dmar_domain, sspt.iommu, domain);
drivers/iommu/intel/iommu.h
651
PT_IOMMU_CHECK_DOMAIN(struct dmar_domain, fspt.iommu, domain);
drivers/iommu/intel/iommu.h
763
struct dmar_domain *domain; /* pointer to domain */
drivers/iommu/intel/iommu.h
791
return container_of(dom, struct dmar_domain, domain);
drivers/iommu/intel/iommu.h
814
domain_id_iommu(struct dmar_domain *domain, struct intel_iommu *iommu)
drivers/iommu/intel/iommu.h
817
xa_load(&domain->iommu_array, iommu->seq_id);
drivers/iommu/intel/iommu.h
823
iommu_domain_did(struct iommu_domain *domain, struct intel_iommu *iommu)
drivers/iommu/intel/iommu.h
825
if (domain->type == IOMMU_DOMAIN_SVA ||
drivers/iommu/intel/iommu.h
826
domain->type == IOMMU_DOMAIN_IDENTITY)
drivers/iommu/intel/iommu.h
828
return domain_id_iommu(to_dmar_domain(domain), iommu);
drivers/iommu/intel/irq_remapping.c
1386
static void intel_free_irq_resources(struct irq_domain *domain,
drivers/iommu/intel/irq_remapping.c
1395
irq_data = irq_domain_get_irq_data(domain, virq + i);
drivers/iommu/intel/irq_remapping.c
1408
static int intel_irq_remapping_alloc(struct irq_domain *domain,
drivers/iommu/intel/irq_remapping.c
1412
struct intel_iommu *iommu = domain->host_data;
drivers/iommu/intel/irq_remapping.c
1424
ret = irq_domain_alloc_irqs_parent(domain, virq, nr_irqs, arg);
drivers/iommu/intel/irq_remapping.c
1441
irq_data = irq_domain_get_irq_data(domain, virq + i);
drivers/iommu/intel/irq_remapping.c
1474
intel_free_irq_resources(domain, virq, i);
drivers/iommu/intel/irq_remapping.c
1476
irq_domain_free_irqs_common(domain, virq, nr_irqs);
drivers/iommu/intel/irq_remapping.c
1480
static void intel_irq_remapping_free(struct irq_domain *domain,
drivers/iommu/intel/irq_remapping.c
1483
intel_free_irq_resources(domain, virq, nr_irqs);
drivers/iommu/intel/irq_remapping.c
1484
irq_domain_free_irqs_common(domain, virq, nr_irqs);
drivers/iommu/intel/irq_remapping.c
1487
static int intel_irq_remapping_activate(struct irq_domain *domain,
drivers/iommu/intel/irq_remapping.c
1494
static void intel_irq_remapping_deactivate(struct irq_domain *domain,
drivers/iommu/intel/nested.c
135
struct dmar_domain *domain,
drivers/iommu/intel/nested.c
142
return intel_pasid_setup_nested(iommu, dev, pasid, domain);
drivers/iommu/intel/nested.c
145
static int intel_nested_set_dev_pasid(struct iommu_domain *domain,
drivers/iommu/intel/nested.c
150
struct dmar_domain *dmar_domain = to_dmar_domain(domain);
drivers/iommu/intel/nested.c
161
ret = paging_domain_compatible(&dmar_domain->s2_domain->domain, dev);
drivers/iommu/intel/nested.c
165
dev_pasid = domain_add_dev_pasid(domain, dev, pasid);
drivers/iommu/intel/nested.c
169
ret = iopf_for_domain_replace(domain, old, dev);
drivers/iommu/intel/nested.c
182
iopf_for_domain_replace(old, domain, dev);
drivers/iommu/intel/nested.c
184
domain_remove_dev_pasid(domain, dev, pasid);
drivers/iommu/intel/nested.c
204
struct dmar_domain *domain;
drivers/iommu/intel/nested.c
21
static int intel_nested_attach_dev(struct iommu_domain *domain,
drivers/iommu/intel/nested.c
221
domain = kzalloc_obj(*domain, GFP_KERNEL_ACCOUNT);
drivers/iommu/intel/nested.c
222
if (!domain)
drivers/iommu/intel/nested.c
225
domain->s2_domain = s2_domain;
drivers/iommu/intel/nested.c
226
domain->s1_cfg = vtd;
drivers/iommu/intel/nested.c
227
domain->domain.ops = &intel_nested_domain_ops;
drivers/iommu/intel/nested.c
228
domain->domain.type = IOMMU_DOMAIN_NESTED;
drivers/iommu/intel/nested.c
229
INIT_LIST_HEAD(&domain->devices);
drivers/iommu/intel/nested.c
230
INIT_LIST_HEAD(&domain->dev_pasids);
drivers/iommu/intel/nested.c
231
INIT_LIST_HEAD(&domain->cache_tags);
drivers/iommu/intel/nested.c
232
spin_lock_init(&domain->lock);
drivers/iommu/intel/nested.c
233
spin_lock_init(&domain->cache_lock);
drivers/iommu/intel/nested.c
234
xa_init(&domain->iommu_array);
drivers/iommu/intel/nested.c
237
list_add(&domain->s2_link, &s2_domain->s1_domains);
drivers/iommu/intel/nested.c
240
return &domain->domain;
drivers/iommu/intel/nested.c
25
struct dmar_domain *dmar_domain = to_dmar_domain(domain);
drivers/iommu/intel/nested.c
37
ret = paging_domain_compatible(&dmar_domain->s2_domain->domain, dev);
drivers/iommu/intel/nested.c
53
ret = iopf_for_domain_set(domain, dev);
drivers/iommu/intel/nested.c
62
info->domain = dmar_domain;
drivers/iommu/intel/nested.c
70
iopf_for_domain_remove(domain, dev);
drivers/iommu/intel/nested.c
79
static void intel_nested_domain_free(struct iommu_domain *domain)
drivers/iommu/intel/nested.c
81
struct dmar_domain *dmar_domain = to_dmar_domain(domain);
drivers/iommu/intel/nested.c
91
static int intel_nested_cache_invalidate_user(struct iommu_domain *domain,
drivers/iommu/intel/nested.c
94
struct dmar_domain *dmar_domain = to_dmar_domain(domain);
drivers/iommu/intel/pasid.c
425
struct dmar_domain *domain, u16 did)
drivers/iommu/intel/pasid.c
431
pt_iommu_vtdss_hw_info(&domain->sspt, &pt_info);
drivers/iommu/intel/pasid.c
438
pasid_set_page_snoop(pte, !(domain->sspt.vtdss_pt.common.features &
drivers/iommu/intel/pasid.c
440
if (domain->dirty_tracking)
drivers/iommu/intel/pasid.c
447
struct dmar_domain *domain,
drivers/iommu/intel/pasid.c
464
did = domain_id_iommu(domain, iommu);
drivers/iommu/intel/pasid.c
478
pasid_pte_config_second_level(iommu, pte, domain, did);
drivers/iommu/intel/pasid.c
676
u32 pasid, struct dmar_domain *domain)
drivers/iommu/intel/pasid.c
678
struct iommu_hwpt_vtd_s1 *s1_cfg = &domain->s1_cfg;
drivers/iommu/intel/pasid.c
679
struct dmar_domain *s2_domain = domain->s2_domain;
drivers/iommu/intel/pasid.c
680
u16 did = domain_id_iommu(domain, iommu);
drivers/iommu/intel/pasid.h
310
struct dmar_domain *domain,
drivers/iommu/intel/pasid.h
318
u32 pasid, struct dmar_domain *domain);
drivers/iommu/intel/perfmon.c
419
iommu_pmu_set_filter(domain, event->attr.config1,
drivers/iommu/intel/prq.c
63
struct dmar_domain *domain;
drivers/iommu/intel/prq.c
74
domain = info->domain;
drivers/iommu/intel/prq.c
76
did = domain ? domain_id_iommu(domain, iommu) : FLPT_DEFAULT_DID;
drivers/iommu/intel/svm.c
101
struct dmar_domain *domain = container_of(mn, struct dmar_domain, notifier);
drivers/iommu/intel/svm.c
103
kfree(domain->qi_batch);
drivers/iommu/intel/svm.c
104
kfree(domain);
drivers/iommu/intel/svm.c
148
static int intel_svm_set_dev_pasid(struct iommu_domain *domain,
drivers/iommu/intel/svm.c
154
struct mm_struct *mm = domain->mm;
drivers/iommu/intel/svm.c
163
dev_pasid = domain_add_dev_pasid(domain, dev, pasid);
drivers/iommu/intel/svm.c
169
ret = iopf_for_domain_replace(domain, old, dev);
drivers/iommu/intel/svm.c
188
iopf_for_domain_replace(old, domain, dev);
drivers/iommu/intel/svm.c
190
domain_remove_dev_pasid(domain, dev, pasid);
drivers/iommu/intel/svm.c
194
static void intel_svm_domain_free(struct iommu_domain *domain)
drivers/iommu/intel/svm.c
196
struct dmar_domain *dmar_domain = to_dmar_domain(domain);
drivers/iommu/intel/svm.c
210
struct dmar_domain *domain;
drivers/iommu/intel/svm.c
217
domain = kzalloc_obj(*domain);
drivers/iommu/intel/svm.c
218
if (!domain)
drivers/iommu/intel/svm.c
221
domain->domain.ops = &intel_svm_domain_ops;
drivers/iommu/intel/svm.c
222
INIT_LIST_HEAD(&domain->dev_pasids);
drivers/iommu/intel/svm.c
223
INIT_LIST_HEAD(&domain->cache_tags);
drivers/iommu/intel/svm.c
224
spin_lock_init(&domain->cache_lock);
drivers/iommu/intel/svm.c
225
spin_lock_init(&domain->lock);
drivers/iommu/intel/svm.c
227
domain->notifier.ops = &intel_mmuops;
drivers/iommu/intel/svm.c
228
ret = mmu_notifier_register(&domain->notifier, mm);
drivers/iommu/intel/svm.c
230
kfree(domain);
drivers/iommu/intel/svm.c
234
return &domain->domain;
drivers/iommu/intel/svm.c
55
struct dmar_domain *domain = container_of(mn, struct dmar_domain, notifier);
drivers/iommu/intel/svm.c
58
cache_tag_flush_all(domain);
drivers/iommu/intel/svm.c
67
cache_tag_flush_range(domain, start, end - 1, 0);
drivers/iommu/intel/svm.c
72
struct dmar_domain *domain = container_of(mn, struct dmar_domain, notifier);
drivers/iommu/intel/svm.c
89
spin_lock_irqsave(&domain->lock, flags);
drivers/iommu/intel/svm.c
90
list_for_each_entry(dev_pasid, &domain->dev_pasids, link_domain) {
drivers/iommu/intel/svm.c
95
spin_unlock_irqrestore(&domain->lock, flags);
drivers/iommu/io-pgfault.c
152
if (!attach_handle->domain->iopf_handler)
drivers/iommu/io-pgfault.c
262
if (group->attach_handle->domain->iopf_handler(group))
drivers/iommu/iommu-debug-pagealloc.c
102
size_t page_size = iommu_debug_page_size(domain);
drivers/iommu/iommu-debug-pagealloc.c
111
static void __iommu_debug_update_iova(struct iommu_domain *domain,
drivers/iommu/iommu-debug-pagealloc.c
115
size_t page_size = iommu_debug_page_size(domain);
drivers/iommu/iommu-debug-pagealloc.c
121
phys_addr_t phys = iommu_iova_to_phys(domain, iova + off);
drivers/iommu/iommu-debug-pagealloc.c
133
void __iommu_debug_unmap_begin(struct iommu_domain *domain,
drivers/iommu/iommu-debug-pagealloc.c
136
__iommu_debug_update_iova(domain, iova, size, false);
drivers/iommu/iommu-debug-pagealloc.c
139
void __iommu_debug_unmap_end(struct iommu_domain *domain,
drivers/iommu/iommu-debug-pagealloc.c
147
__iommu_debug_update_iova(domain, iova + unmapped,
drivers/iommu/iommu-debug-pagealloc.c
72
static size_t iommu_debug_page_size(struct iommu_domain *domain)
drivers/iommu/iommu-debug-pagealloc.c
74
return 1UL << __ffs(domain->pgsize_bitmap);
drivers/iommu/iommu-debug-pagealloc.c
99
void __iommu_debug_map(struct iommu_domain *domain, phys_addr_t phys, size_t size)
drivers/iommu/iommu-priv.h
104
static inline void iommu_debug_map(struct iommu_domain *domain,
drivers/iommu/iommu-priv.h
109
static inline void iommu_debug_unmap_begin(struct iommu_domain *domain,
drivers/iommu/iommu-priv.h
114
static inline void iommu_debug_unmap_end(struct iommu_domain *domain,
drivers/iommu/iommu-priv.h
46
int iommu_attach_group_handle(struct iommu_domain *domain,
drivers/iommu/iommu-priv.h
49
void iommu_detach_group_handle(struct iommu_domain *domain,
drivers/iommu/iommu-priv.h
56
int iommufd_sw_msi(struct iommu_domain *domain, struct msi_desc *desc,
drivers/iommu/iommu-priv.h
59
static inline int iommufd_sw_msi(struct iommu_domain *domain,
drivers/iommu/iommu-priv.h
66
int iommu_replace_device_pasid(struct iommu_domain *domain,
drivers/iommu/iommu-priv.h
72
void __iommu_debug_map(struct iommu_domain *domain, phys_addr_t phys,
drivers/iommu/iommu-priv.h
74
void __iommu_debug_unmap_begin(struct iommu_domain *domain,
drivers/iommu/iommu-priv.h
76
void __iommu_debug_unmap_end(struct iommu_domain *domain,
drivers/iommu/iommu-priv.h
79
static inline void iommu_debug_map(struct iommu_domain *domain,
drivers/iommu/iommu-priv.h
83
__iommu_debug_map(domain, phys, size);
drivers/iommu/iommu-priv.h
86
static inline void iommu_debug_unmap_begin(struct iommu_domain *domain,
drivers/iommu/iommu-priv.h
90
__iommu_debug_unmap_begin(domain, iova, size);
drivers/iommu/iommu-priv.h
93
static inline void iommu_debug_unmap_end(struct iommu_domain *domain,
drivers/iommu/iommu-priv.h
98
__iommu_debug_unmap_end(domain, iova, size, unmapped);
drivers/iommu/iommu-sva.c
118
list_for_each_entry(domain, &mm->iommu_mm->sva_domains, next) {
drivers/iommu/iommu-sva.c
119
ret = iommu_attach_device_pasid(domain, dev, iommu_mm->pasid,
drivers/iommu/iommu-sva.c
122
domain->users++;
drivers/iommu/iommu-sva.c
128
domain = iommu_sva_domain_alloc(dev, mm);
drivers/iommu/iommu-sva.c
129
if (IS_ERR(domain)) {
drivers/iommu/iommu-sva.c
130
ret = PTR_ERR(domain);
drivers/iommu/iommu-sva.c
134
ret = iommu_attach_device_pasid(domain, dev, iommu_mm->pasid,
drivers/iommu/iommu-sva.c
138
domain->users = 1;
drivers/iommu/iommu-sva.c
145
list_add(&domain->next, &iommu_mm->sva_domains);
drivers/iommu/iommu-sva.c
153
iommu_domain_free(domain);
drivers/iommu/iommu-sva.c
172
struct iommu_domain *domain = handle->handle.domain;
drivers/iommu/iommu-sva.c
173
struct iommu_mm_data *iommu_mm = domain->mm->iommu_mm;
drivers/iommu/iommu-sva.c
182
iommu_detach_device_pasid(domain, dev, iommu_mm->pasid);
drivers/iommu/iommu-sva.c
183
if (--domain->users == 0) {
drivers/iommu/iommu-sva.c
184
list_del(&domain->next);
drivers/iommu/iommu-sva.c
191
iommu_domain_free(domain);
drivers/iommu/iommu-sva.c
201
struct iommu_domain *domain = handle->handle.domain;
drivers/iommu/iommu-sva.c
203
return mm_get_enqcmd_pasid(domain->mm);
drivers/iommu/iommu-sva.c
291
group->attach_handle->domain->mm);
drivers/iommu/iommu-sva.c
313
struct iommu_domain *domain;
drivers/iommu/iommu-sva.c
318
domain = ops->domain_alloc_sva(dev, mm);
drivers/iommu/iommu-sva.c
319
if (IS_ERR(domain))
drivers/iommu/iommu-sva.c
320
return domain;
drivers/iommu/iommu-sva.c
322
domain->type = IOMMU_DOMAIN_SVA;
drivers/iommu/iommu-sva.c
323
domain->cookie_type = IOMMU_COOKIE_SVA;
drivers/iommu/iommu-sva.c
325
domain->mm = mm;
drivers/iommu/iommu-sva.c
326
domain->owner = ops;
drivers/iommu/iommu-sva.c
327
domain->iopf_handler = iommu_sva_iopf_handler;
drivers/iommu/iommu-sva.c
329
return domain;
drivers/iommu/iommu-sva.c
77
struct iommu_domain *domain;
drivers/iommu/iommu-sva.c
97
if (attach_handle->domain->mm != mm) {
drivers/iommu/iommu.c
107
static int __iommu_attach_device(struct iommu_domain *domain,
drivers/iommu/iommu.c
109
static int __iommu_attach_group(struct iommu_domain *domain,
drivers/iommu/iommu.c
1179
static int iommu_create_device_direct_mappings(struct iommu_domain *domain,
drivers/iommu/iommu.c
1187
pg_size = domain->pgsize_bitmap ? 1UL << __ffs(domain->pgsize_bitmap) : 0;
drivers/iommu/iommu.c
1189
if (WARN_ON_ONCE(iommu_is_dma_domain(domain) && !pg_size))
drivers/iommu/iommu.c
1204
!iommu_is_dma_domain(domain))
drivers/iommu/iommu.c
1220
phys_addr = iommu_iova_to_phys(domain, addr ? addr : 1);
drivers/iommu/iommu.c
1228
ret = iommu_map(domain, addr - map_size,
drivers/iommu/iommu.c
141
static int iommu_create_device_direct_mappings(struct iommu_domain *domain,
drivers/iommu/iommu.c
149
static void iommu_domain_init(struct iommu_domain *domain, unsigned int type,
drivers/iommu/iommu.c
1682
struct iommu_domain *domain;
drivers/iommu/iommu.c
1688
domain = ops->domain_alloc_identity(dev);
drivers/iommu/iommu.c
1689
if (IS_ERR(domain))
drivers/iommu/iommu.c
1690
return domain;
drivers/iommu/iommu.c
1695
iommu_domain_init(domain, IOMMU_DOMAIN_IDENTITY, ops);
drivers/iommu/iommu.c
1696
return domain;
drivers/iommu/iommu.c
2036
void iommu_set_fault_handler(struct iommu_domain *domain,
drivers/iommu/iommu.c
2040
if (WARN_ON(!domain || domain->cookie_type != IOMMU_COOKIE_NONE))
drivers/iommu/iommu.c
2043
domain->cookie_type = IOMMU_COOKIE_FAULT_HANDLER;
drivers/iommu/iommu.c
2044
domain->handler = handler;
drivers/iommu/iommu.c
2045
domain->handler_token = token;
drivers/iommu/iommu.c
2049
static void iommu_domain_init(struct iommu_domain *domain, unsigned int type,
drivers/iommu/iommu.c
2052
domain->type = type;
drivers/iommu/iommu.c
2053
domain->owner = ops;
drivers/iommu/iommu.c
2054
if (!domain->ops)
drivers/iommu/iommu.c
2055
domain->ops = ops->default_domain_ops;
drivers/iommu/iommu.c
2063
struct iommu_domain *domain;
drivers/iommu/iommu.c
2071
domain = ops->domain_alloc_paging(dev);
drivers/iommu/iommu.c
2073
domain = ops->domain_alloc_paging_flags(dev, flags, NULL);
drivers/iommu/iommu.c
2076
domain = ops->domain_alloc(IOMMU_DOMAIN_UNMANAGED);
drivers/iommu/iommu.c
2081
if (IS_ERR(domain))
drivers/iommu/iommu.c
2082
return domain;
drivers/iommu/iommu.c
2083
if (!domain)
drivers/iommu/iommu.c
2086
iommu_domain_init(domain, type, ops);
drivers/iommu/iommu.c
2087
return domain;
drivers/iommu/iommu.c
2106
void iommu_domain_free(struct iommu_domain *domain)
drivers/iommu/iommu.c
2108
switch (domain->cookie_type) {
drivers/iommu/iommu.c
2110
iommu_put_dma_cookie(domain);
drivers/iommu/iommu.c
2113
iommu_put_msi_cookie(domain);
drivers/iommu/iommu.c
2116
mmdrop(domain->mm);
drivers/iommu/iommu.c
2121
if (domain->ops->free)
drivers/iommu/iommu.c
2122
domain->ops->free(domain);
drivers/iommu/iommu.c
2142
static int __iommu_attach_device(struct iommu_domain *domain,
drivers/iommu/iommu.c
2147
if (unlikely(domain->ops->attach_dev == NULL))
drivers/iommu/iommu.c
2150
ret = domain->ops->attach_dev(domain, dev, old);
drivers/iommu/iommu.c
2170
int iommu_attach_device(struct iommu_domain *domain, struct device *dev)
drivers/iommu/iommu.c
2188
ret = __iommu_attach_group(domain, group);
drivers/iommu/iommu.c
2196
int iommu_deferred_attach(struct device *dev, struct iommu_domain *domain)
drivers/iommu/iommu.c
2217
return __iommu_attach_device(domain, dev, NULL);
drivers/iommu/iommu.c
2220
void iommu_detach_device(struct iommu_domain *domain, struct device *dev)
drivers/iommu/iommu.c
2229
if (WARN_ON(domain != group->domain) ||
drivers/iommu/iommu.c
2258
return group->domain;
drivers/iommu/iommu.c
2286
return group->domain;
drivers/iommu/iommu.c
2299
static void *iommu_make_pasid_array_entry(struct iommu_domain *domain,
drivers/iommu/iommu.c
2303
handle->domain = domain;
drivers/iommu/iommu.c
2307
return xa_tag_pointer(domain, IOMMU_PASID_ARRAY_DOMAIN);
drivers/iommu/iommu.c
2311
struct iommu_domain *domain)
drivers/iommu/iommu.c
2313
if (domain->owner == ops)
drivers/iommu/iommu.c
2317
if (domain == ops->blocked_domain || domain == ops->identity_domain)
drivers/iommu/iommu.c
2323
static int __iommu_attach_group(struct iommu_domain *domain,
drivers/iommu/iommu.c
2328
if (group->domain && group->domain != group->default_domain &&
drivers/iommu/iommu.c
2329
group->domain != group->blocking_domain)
drivers/iommu/iommu.c
2334
!domain_iommu_ops_compatible(dev_iommu_ops(dev), domain))
drivers/iommu/iommu.c
2337
return __iommu_group_set_domain(group, domain);
drivers/iommu/iommu.c
2352
int iommu_attach_group(struct iommu_domain *domain, struct iommu_group *group)
drivers/iommu/iommu.c
2357
ret = __iommu_attach_group(domain, group);
drivers/iommu/iommu.c
2436
if (group->domain == new_domain)
drivers/iommu/iommu.c
2458
group->domain, flags);
drivers/iommu/iommu.c
2473
group->domain = new_domain;
drivers/iommu/iommu.c
2491
if (group->domain)
drivers/iommu/iommu.c
2493
group, gdev->dev, group->domain, new_domain,
drivers/iommu/iommu.c
2499
void iommu_detach_group(struct iommu_domain *domain, struct iommu_group *group)
drivers/iommu/iommu.c
2507
phys_addr_t iommu_iova_to_phys(struct iommu_domain *domain, dma_addr_t iova)
drivers/iommu/iommu.c
2509
if (domain->type == IOMMU_DOMAIN_IDENTITY)
drivers/iommu/iommu.c
2512
if (domain->type == IOMMU_DOMAIN_BLOCKED)
drivers/iommu/iommu.c
2515
return domain->ops->iova_to_phys(domain, iova);
drivers/iommu/iommu.c
2519
static size_t iommu_pgsize(struct iommu_domain *domain, unsigned long iova,
drivers/iommu/iommu.c
2529
pgsizes = domain->pgsize_bitmap & GENMASK(__fls(size), 0);
drivers/iommu/iommu.c
2545
pgsizes = domain->pgsize_bitmap & ~GENMASK(pgsize_idx, 0);
drivers/iommu/iommu.c
2575
int iommu_map_nosync(struct iommu_domain *domain, unsigned long iova,
drivers/iommu/iommu.c
2578
const struct iommu_domain_ops *ops = domain->ops;
drivers/iommu/iommu.c
2587
if (unlikely(!(domain->type & __IOMMU_DOMAIN_PAGING)))
drivers/iommu/iommu.c
2590
if (WARN_ON(!ops->map_pages || domain->pgsize_bitmap == 0UL))
drivers/iommu/iommu.c
2599
min_pagesz = 1 << __ffs(domain->pgsize_bitmap);
drivers/iommu/iommu.c
2617
pgsize = iommu_pgsize(domain, iova, paddr, size, &count);
drivers/iommu/iommu.c
2621
ret = ops->map_pages(domain, iova, paddr, pgsize, count, prot,
drivers/iommu/iommu.c
2638
iommu_unmap(domain, orig_iova, orig_size - size);
drivers/iommu/iommu.c
2641
iommu_debug_map(domain, orig_paddr, orig_size);
drivers/iommu/iommu.c
2647
int iommu_sync_map(struct iommu_domain *domain, unsigned long iova, size_t size)
drivers/iommu/iommu.c
2649
const struct iommu_domain_ops *ops = domain->ops;
drivers/iommu/iommu.c
2653
return ops->iotlb_sync_map(domain, iova, size);
drivers/iommu/iommu.c
2656
int iommu_map(struct iommu_domain *domain, unsigned long iova,
drivers/iommu/iommu.c
2661
ret = iommu_map_nosync(domain, iova, paddr, size, prot, gfp);
drivers/iommu/iommu.c
2665
ret = iommu_sync_map(domain, iova, size);
drivers/iommu/iommu.c
2667
iommu_unmap(domain, iova, size);
drivers/iommu/iommu.c
2673
static size_t __iommu_unmap(struct iommu_domain *domain,
drivers/iommu/iommu.c
2677
const struct iommu_domain_ops *ops = domain->ops;
drivers/iommu/iommu.c
2682
if (unlikely(!(domain->type & __IOMMU_DOMAIN_PAGING)))
drivers/iommu/iommu.c
2685
if (WARN_ON(!ops->unmap_pages || domain->pgsize_bitmap == 0UL))
drivers/iommu/iommu.c
2689
min_pagesz = 1 << __ffs(domain->pgsize_bitmap);
drivers/iommu/iommu.c
2704
iommu_debug_unmap_begin(domain, iova, size);
drivers/iommu/iommu.c
2713
pgsize = iommu_pgsize(domain, iova, iova, size - unmapped, &count);
drivers/iommu/iommu.c
2714
unmapped_page = ops->unmap_pages(domain, iova, pgsize, count, iotlb_gather);
drivers/iommu/iommu.c
2732
iommu_debug_unmap_end(domain, orig_iova, size, unmapped);
drivers/iommu/iommu.c
2750
size_t iommu_unmap(struct iommu_domain *domain,
drivers/iommu/iommu.c
2757
ret = __iommu_unmap(domain, iova, size, &iotlb_gather);
drivers/iommu/iommu.c
2758
iommu_iotlb_sync(domain, &iotlb_gather);
drivers/iommu/iommu.c
2783
size_t iommu_unmap_fast(struct iommu_domain *domain,
drivers/iommu/iommu.c
2787
return __iommu_unmap(domain, iova, size, iotlb_gather);
drivers/iommu/iommu.c
2791
ssize_t iommu_map_sg(struct iommu_domain *domain, unsigned long iova,
drivers/iommu/iommu.c
2804
ret = iommu_map_nosync(domain, iova + mapped, start,
drivers/iommu/iommu.c
2828
ret = iommu_sync_map(domain, iova, mapped);
drivers/iommu/iommu.c
2836
iommu_unmap(domain, iova, mapped);
drivers/iommu/iommu.c
2866
int report_iommu_fault(struct iommu_domain *domain, struct device *dev,
drivers/iommu/iommu.c
2875
if (domain->cookie_type == IOMMU_COOKIE_FAULT_HANDLER &&
drivers/iommu/iommu.c
2876
domain->handler)
drivers/iommu/iommu.c
2877
ret = domain->handler(domain, dev, iova, flags,
drivers/iommu/iommu.c
2878
domain->handler_token);
drivers/iommu/iommu.c
2897
int iommu_set_pgtable_quirks(struct iommu_domain *domain,
drivers/iommu/iommu.c
2900
if (domain->type != IOMMU_DOMAIN_UNMANAGED)
drivers/iommu/iommu.c
2902
if (!domain->ops->set_pgtable_quirks)
drivers/iommu/iommu.c
2904
return domain->ops->set_pgtable_quirks(domain, quirk);
drivers/iommu/iommu.c
3129
if (!group->domain) {
drivers/iommu/iommu.c
3265
if (group->domain != group->default_domain || group->owner ||
drivers/iommu/iommu.c
3306
struct iommu_domain *domain;
drivers/iommu/iommu.c
3320
domain = iommu_paging_domain_alloc(dev);
drivers/iommu/iommu.c
3321
if (IS_ERR(domain))
drivers/iommu/iommu.c
3322
return PTR_ERR(domain);
drivers/iommu/iommu.c
3323
group->blocking_domain = domain;
drivers/iommu/iommu.c
3331
if ((group->domain && group->domain != group->default_domain) ||
drivers/iommu/iommu.c
3480
struct iommu_domain *domain)
drivers/iommu/iommu.c
3486
dev, pasid, domain));
drivers/iommu/iommu.c
3489
static int __iommu_set_group_pasid(struct iommu_domain *domain,
drivers/iommu/iommu.c
3498
ret = domain->ops->set_dev_pasid(domain, device->dev,
drivers/iommu/iommu.c
3521
pasid, domain)))
drivers/iommu/iommu.c
3522
iommu_remove_dev_pasid(device->dev, pasid, domain);
drivers/iommu/iommu.c
3530
struct iommu_domain *domain)
drivers/iommu/iommu.c
3536
iommu_remove_dev_pasid(device->dev, pasid, domain);
drivers/iommu/iommu.c
3552
int iommu_attach_device_pasid(struct iommu_domain *domain,
drivers/iommu/iommu.c
3568
if (!domain->ops->set_dev_pasid ||
drivers/iommu/iommu.c
3573
if (!domain_iommu_ops_compatible(ops, domain) ||
drivers/iommu/iommu.c
3601
entry = iommu_make_pasid_array_entry(domain, handle);
drivers/iommu/iommu.c
3611
ret = __iommu_set_group_pasid(domain, group, pasid, NULL);
drivers/iommu/iommu.c
3649
int iommu_replace_device_pasid(struct iommu_domain *domain,
drivers/iommu/iommu.c
3663
if (!domain->ops->set_dev_pasid)
drivers/iommu/iommu.c
3666
if (!domain_iommu_ops_compatible(dev_iommu_ops(dev), domain) ||
drivers/iommu/iommu.c
3681
entry = iommu_make_pasid_array_entry(domain, handle);
drivers/iommu/iommu.c
3713
if (curr_domain != domain) {
drivers/iommu/iommu.c
3714
ret = __iommu_set_group_pasid(domain, group,
drivers/iommu/iommu.c
3742
void iommu_detach_device_pasid(struct iommu_domain *domain, struct device *dev,
drivers/iommu/iommu.c
3749
__iommu_remove_group_pasid(group, pasid, domain);
drivers/iommu/iommu.c
3808
if (type && handle->domain->type != type)
drivers/iommu/iommu.c
3832
int iommu_attach_group_handle(struct iommu_domain *domain,
drivers/iommu/iommu.c
3843
entry = iommu_make_pasid_array_entry(domain, handle);
drivers/iommu/iommu.c
3849
ret = __iommu_attach_group(domain, group);
drivers/iommu/iommu.c
3878
void iommu_detach_group_handle(struct iommu_domain *domain,
drivers/iommu/iommu.c
3983
if (group->domain != group->blocking_domain) {
drivers/iommu/iommu.c
3985
group->domain);
drivers/iommu/iommu.c
4039
if (group->domain != group->blocking_domain) {
drivers/iommu/iommu.c
4040
WARN_ON(__iommu_attach_device(group->domain, &pdev->dev,
drivers/iommu/iommu.c
4082
if (group->domain && group->domain->type != IOMMU_DOMAIN_IDENTITY) {
drivers/iommu/iommu.c
4083
switch (group->domain->cookie_type) {
drivers/iommu/iommu.c
4086
ret = iommu_dma_sw_msi(group->domain, desc, msi_addr);
drivers/iommu/iommu.c
4089
ret = iommufd_sw_msi(group->domain, desc, msi_addr);
drivers/iommu/iommu.c
566
group->domain);
drivers/iommu/iommu.c
585
group->domain = NULL;
drivers/iommu/iommu.c
601
return ((struct iommu_attach_handle *)xa_untag_pointer(entry))->domain;
drivers/iommu/iommu.c
648
WARN_ON(group->default_domain && !group->domain);
drivers/iommu/iommu.c
651
if (group->domain) {
drivers/iommu/iommu.c
652
ret = __iommu_device_set_domain(group, dev, group->domain, NULL,
drivers/iommu/iommu.c
69
struct iommu_domain *domain;
drivers/iommu/iommu.c
723
group->domain != group->default_domain);
drivers/iommu/iommufd/device.c
517
rc = iommu_attach_group_handle(hwpt->domain, idev->igroup->group,
drivers/iommu/iommufd/device.c
520
rc = iommu_attach_device_pasid(hwpt->domain, idev->dev, pasid,
drivers/iommu/iommufd/device.c
553
iommu_detach_group_handle(hwpt->domain, idev->igroup->group);
drivers/iommu/iommufd/device.c
555
iommu_detach_device_pasid(hwpt->domain, idev->dev, pasid);
drivers/iommu/iommufd/device.c
585
hwpt->domain, &handle->handle);
drivers/iommu/iommufd/device.c
587
rc = iommu_replace_device_pasid(hwpt->domain, idev->dev,
drivers/iommu/iommufd/driver.c
229
rc = iommu_map(hwpt_paging->common.domain, iova,
drivers/iommu/iommufd/driver.c
247
int iommufd_sw_msi(struct iommu_domain *domain, struct msi_desc *desc,
drivers/iommu/iommufd/driver.c
270
hwpt_paging = find_hwpt_paging(domain->iommufd_hwpt);
drivers/iommu/iommufd/eventq.c
460
hwpt = group->attach_handle->domain->iommufd_hwpt;
drivers/iommu/iommufd/hw_pagetable.c
13
if (hwpt->domain)
drivers/iommu/iommufd/hw_pagetable.c
14
iommu_domain_free(hwpt->domain);
drivers/iommu/iommufd/hw_pagetable.c
149
hwpt->domain = ops->domain_alloc_paging_flags(idev->dev,
drivers/iommu/iommufd/hw_pagetable.c
151
if (IS_ERR(hwpt->domain)) {
drivers/iommu/iommufd/hw_pagetable.c
152
rc = PTR_ERR(hwpt->domain);
drivers/iommu/iommufd/hw_pagetable.c
153
hwpt->domain = NULL;
drivers/iommu/iommufd/hw_pagetable.c
156
hwpt->domain->owner = ops;
drivers/iommu/iommufd/hw_pagetable.c
158
hwpt->domain = iommu_paging_domain_alloc(idev->dev);
drivers/iommu/iommufd/hw_pagetable.c
159
if (IS_ERR(hwpt->domain)) {
drivers/iommu/iommufd/hw_pagetable.c
160
rc = PTR_ERR(hwpt->domain);
drivers/iommu/iommufd/hw_pagetable.c
161
hwpt->domain = NULL;
drivers/iommu/iommufd/hw_pagetable.c
165
hwpt->domain->iommufd_hwpt = hwpt;
drivers/iommu/iommufd/hw_pagetable.c
166
hwpt->domain->cookie_type = IOMMU_COOKIE_IOMMUFD;
drivers/iommu/iommufd/hw_pagetable.c
200
rc = iopt_table_add_domain(&ioas->iopt, hwpt->domain);
drivers/iommu/iommufd/hw_pagetable.c
241
parent->common.domain->owner != ops)
drivers/iommu/iommufd/hw_pagetable.c
254
hwpt->domain = ops->domain_alloc_nested(
drivers/iommu/iommufd/hw_pagetable.c
255
idev->dev, parent->common.domain,
drivers/iommu/iommufd/hw_pagetable.c
257
if (IS_ERR(hwpt->domain)) {
drivers/iommu/iommufd/hw_pagetable.c
258
rc = PTR_ERR(hwpt->domain);
drivers/iommu/iommufd/hw_pagetable.c
259
hwpt->domain = NULL;
drivers/iommu/iommufd/hw_pagetable.c
262
hwpt->domain->owner = ops;
drivers/iommu/iommufd/hw_pagetable.c
263
hwpt->domain->iommufd_hwpt = hwpt;
drivers/iommu/iommufd/hw_pagetable.c
264
hwpt->domain->cookie_type = IOMMU_COOKIE_IOMMUFD;
drivers/iommu/iommufd/hw_pagetable.c
266
if (WARN_ON_ONCE(hwpt->domain->type != IOMMU_DOMAIN_NESTED)) {
drivers/iommu/iommufd/hw_pagetable.c
31
hwpt_paging->common.domain);
drivers/iommu/iommufd/hw_pagetable.c
312
hwpt->domain = viommu->ops->alloc_domain_nested(
drivers/iommu/iommufd/hw_pagetable.c
314
if (IS_ERR(hwpt->domain)) {
drivers/iommu/iommufd/hw_pagetable.c
315
rc = PTR_ERR(hwpt->domain);
drivers/iommu/iommufd/hw_pagetable.c
316
hwpt->domain = NULL;
drivers/iommu/iommufd/hw_pagetable.c
319
hwpt->domain->iommufd_hwpt = hwpt;
drivers/iommu/iommufd/hw_pagetable.c
320
hwpt->domain->owner = viommu->iommu_dev->ops;
drivers/iommu/iommufd/hw_pagetable.c
321
hwpt->domain->cookie_type = IOMMU_COOKIE_IOMMUFD;
drivers/iommu/iommufd/hw_pagetable.c
323
if (WARN_ON_ONCE(hwpt->domain->type != IOMMU_DOMAIN_NESTED)) {
drivers/iommu/iommufd/hw_pagetable.c
420
hwpt->domain->iopf_handler = iommufd_fault_iopf_handler;
drivers/iommu/iommufd/hw_pagetable.c
462
rc = iopt_set_dirty_tracking(&ioas->iopt, hwpt_paging->common.domain,
drivers/iommu/iommufd/hw_pagetable.c
486
&ioas->iopt, hwpt_paging->common.domain, cmd->flags, cmd);
drivers/iommu/iommufd/hw_pagetable.c
49
hwpt_paging->common.domain);
drivers/iommu/iommufd/hw_pagetable.c
524
if (!hwpt->domain->ops ||
drivers/iommu/iommufd/hw_pagetable.c
525
!hwpt->domain->ops->cache_invalidate_user) {
drivers/iommu/iommufd/hw_pagetable.c
529
rc = hwpt->domain->ops->cache_invalidate_user(hwpt->domain,
drivers/iommu/iommufd/hw_pagetable.c
74
struct iommu_domain *paging_domain = hwpt_paging->common.domain;
drivers/iommu/iommufd/io_pagetable.c
1003
iopt_area_unmap_domain(area, domain);
drivers/iommu/iommufd/io_pagetable.c
1004
iopt_dmabuf_untrack_domain(pages, area, domain);
drivers/iommu/iommufd/io_pagetable.c
1009
iopt_area_unmap_domain(area, domain);
drivers/iommu/iommufd/io_pagetable.c
1023
WARN_ON(area->storage_domain != domain);
drivers/iommu/iommufd/io_pagetable.c
1025
iopt_area_unfill_domain(area, pages, domain);
drivers/iommu/iommufd/io_pagetable.c
1027
iopt_dmabuf_untrack_domain(pages, area, domain);
drivers/iommu/iommufd/io_pagetable.c
1041
struct iommu_domain *domain)
drivers/iommu/iommufd/io_pagetable.c
1059
rc = iopt_dmabuf_track_domain(pages, area, domain);
drivers/iommu/iommufd/io_pagetable.c
1063
rc = iopt_area_fill_domain(area, domain);
drivers/iommu/iommufd/io_pagetable.c
1066
iopt_dmabuf_untrack_domain(pages, area, domain);
drivers/iommu/iommufd/io_pagetable.c
1071
area->storage_domain = domain;
drivers/iommu/iommufd/io_pagetable.c
1094
iopt_area_unfill_domain(area, pages, domain);
drivers/iommu/iommufd/io_pagetable.c
1096
iopt_dmabuf_untrack_domain(pages, area, domain);
drivers/iommu/iommufd/io_pagetable.c
1132
struct iommu_domain *domain)
drivers/iommu/iommufd/io_pagetable.c
1134
const struct iommu_domain_geometry *geometry = &domain->geometry;
drivers/iommu/iommufd/io_pagetable.c
1144
if (WARN_ON(iter_domain == domain)) {
drivers/iommu/iommufd/io_pagetable.c
1159
1UL << __ffs(domain->pgsize_bitmap),
drivers/iommu/iommufd/io_pagetable.c
1174
domain);
drivers/iommu/iommufd/io_pagetable.c
1180
ULONG_MAX, domain);
drivers/iommu/iommufd/io_pagetable.c
1189
rc = iopt_fill_domain(iopt, domain);
drivers/iommu/iommufd/io_pagetable.c
1194
xa_store(&iopt->domains, iopt->next_domain_id, domain, GFP_KERNEL);
drivers/iommu/iommufd/io_pagetable.c
1202
__iopt_remove_reserved_iova(iopt, domain);
drivers/iommu/iommufd/io_pagetable.c
1213
struct iommu_domain *domain;
drivers/iommu/iommufd/io_pagetable.c
1225
xa_for_each(&iopt->domains, index, domain)
drivers/iommu/iommufd/io_pagetable.c
1227
1UL << __ffs(domain->pgsize_bitmap),
drivers/iommu/iommufd/io_pagetable.c
1246
struct iommu_domain *domain)
drivers/iommu/iommufd/io_pagetable.c
1255
if (iter_domain == domain)
drivers/iommu/iommufd/io_pagetable.c
1257
if (WARN_ON(iter_domain != domain) || index >= iopt->next_domain_id)
drivers/iommu/iommufd/io_pagetable.c
1269
iopt_unfill_domain(iopt, domain);
drivers/iommu/iommufd/io_pagetable.c
1270
__iopt_remove_reserved_iova(iopt, domain);
drivers/iommu/iommufd/io_pagetable.c
532
struct iommu_domain *domain;
drivers/iommu/iommufd/io_pagetable.c
543
struct iommu_domain *domain = arg->domain;
drivers/iommu/iommufd/io_pagetable.c
545
const struct iommu_dirty_ops *ops = domain->dirty_ops;
drivers/iommu/iommufd/io_pagetable.c
553
ret = ops->read_and_clear_dirty(domain, iter.cur_iova,
drivers/iommu/iommufd/io_pagetable.c
566
iommu_read_and_clear_dirty(struct iommu_domain *domain,
drivers/iommu/iommufd/io_pagetable.c
570
const struct iommu_dirty_ops *ops = domain->dirty_ops;
drivers/iommu/iommufd/io_pagetable.c
590
arg.domain = domain;
drivers/iommu/iommufd/io_pagetable.c
595
iommu_iotlb_sync(domain, &gather);
drivers/iommu/iommufd/io_pagetable.c
629
struct iommu_domain *domain,
drivers/iommu/iommufd/io_pagetable.c
640
ret = iommu_read_and_clear_dirty(domain, iopt, flags, bitmap);
drivers/iommu/iommufd/io_pagetable.c
647
struct iommu_domain *domain)
drivers/iommu/iommufd/io_pagetable.c
649
const struct iommu_dirty_ops *ops = domain->dirty_ops;
drivers/iommu/iommufd/io_pagetable.c
664
ret = ops->read_and_clear_dirty(domain, iopt_area_iova(area),
drivers/iommu/iommufd/io_pagetable.c
671
iommu_iotlb_sync(domain, &gather);
drivers/iommu/iommufd/io_pagetable.c
676
struct iommu_domain *domain, bool enable)
drivers/iommu/iommufd/io_pagetable.c
678
const struct iommu_dirty_ops *ops = domain->dirty_ops;
drivers/iommu/iommufd/io_pagetable.c
688
ret = iopt_clear_dirty_data(iopt, domain);
drivers/iommu/iommufd/io_pagetable.c
693
ret = ops->set_dirty_tracking(domain, enable);
drivers/iommu/iommufd/io_pagetable.c
973
struct iommu_domain *domain)
drivers/iommu/iommufd/io_pagetable.c
999
if (area->storage_domain == domain)
drivers/iommu/iommufd/io_pagetable.h
198
struct iommu_domain *domain;
drivers/iommu/iommufd/io_pagetable.h
67
int iopt_area_fill_domain(struct iopt_area *area, struct iommu_domain *domain);
drivers/iommu/iommufd/io_pagetable.h
69
struct iommu_domain *domain);
drivers/iommu/iommufd/io_pagetable.h
71
struct iommu_domain *domain);
drivers/iommu/iommufd/io_pagetable.h
74
struct iommu_domain *domain);
drivers/iommu/iommufd/io_pagetable.h
77
struct iommu_domain *domain);
drivers/iommu/iommufd/iommufd_private.h
123
struct iommu_domain *domain,
drivers/iommu/iommufd/iommufd_private.h
127
struct iommu_domain *domain, bool enable);
drivers/iommu/iommufd/iommufd_private.h
132
struct iommu_domain *domain);
drivers/iommu/iommufd/iommufd_private.h
134
struct iommu_domain *domain);
drivers/iommu/iommufd/iommufd_private.h
365
struct iommu_domain *domain;
drivers/iommu/iommufd/pages.c
1446
iopt_area_unmap_domain_range(area, track->domain,
drivers/iommu/iommufd/pages.c
1566
struct iommu_domain *domain)
drivers/iommu/iommufd/pages.c
1575
if (WARN_ON(track->domain == domain && track->area == area))
drivers/iommu/iommufd/pages.c
1581
track->domain = domain;
drivers/iommu/iommufd/pages.c
1590
struct iommu_domain *domain)
drivers/iommu/iommufd/pages.c
1598
if (track->domain == domain && track->area == area) {
drivers/iommu/iommufd/pages.c
1611
struct iommu_domain *domain;
drivers/iommu/iommufd/pages.c
1619
xa_for_each(&area->iopt->domains, index, domain) {
drivers/iommu/iommufd/pages.c
1620
rc = iopt_dmabuf_track_domain(pages, area, domain);
drivers/iommu/iommufd/pages.c
1671
struct iopt_pages *pages, struct iommu_domain *domain,
drivers/iommu/iommufd/pages.c
1688
batch_from_domain(batch, domain, area, start,
drivers/iommu/iommufd/pages.c
1709
batch_from_domain_continue(batch, domain, area,
drivers/iommu/iommufd/pages.c
1715
area, domain, *unmapped_end_index,
drivers/iommu/iommufd/pages.c
1732
struct iommu_domain *domain,
drivers/iommu/iommufd/pages.c
1746
iopt_area_unmap_domain_range(area, domain, start_index,
drivers/iommu/iommufd/pages.c
1774
iopt_area_unpin_domain(&batch, area, pages, domain,
drivers/iommu/iommufd/pages.c
1783
iopt_area_unmap_domain_range(area, domain, unmapped_end_index,
drivers/iommu/iommufd/pages.c
1792
struct iommu_domain *domain,
drivers/iommu/iommufd/pages.c
1796
__iopt_area_unfill_domain(area, pages, domain, end_index - 1);
drivers/iommu/iommufd/pages.c
1807
void iopt_area_unmap_domain(struct iopt_area *area, struct iommu_domain *domain)
drivers/iommu/iommufd/pages.c
1809
iommu_unmap_nofail(domain, iopt_area_iova(area),
drivers/iommu/iommufd/pages.c
1824
struct iommu_domain *domain)
drivers/iommu/iommufd/pages.c
1829
__iopt_area_unfill_domain(area, pages, domain,
drivers/iommu/iommufd/pages.c
1841
int iopt_area_fill_domain(struct iopt_area *area, struct iommu_domain *domain)
drivers/iommu/iommufd/pages.c
1859
rc = batch_to_domain(&pfns.batch, domain, area,
drivers/iommu/iommufd/pages.c
1877
iopt_area_unfill_partial_domain(area, area->pages, domain,
drivers/iommu/iommufd/pages.c
1899
struct iommu_domain *domain;
drivers/iommu/iommufd/pages.c
1926
xa_for_each(&area->iopt->domains, index, domain) {
drivers/iommu/iommufd/pages.c
1927
rc = batch_to_domain(&pfns.batch, domain, area,
drivers/iommu/iommufd/pages.c
1952
xa_for_each(&area->iopt->domains, unmap_index, domain) {
drivers/iommu/iommufd/pages.c
1968
area, domain, iopt_area_index(area),
drivers/iommu/iommufd/pages.c
1971
iopt_area_unfill_partial_domain(area, pages, domain,
drivers/iommu/iommufd/pages.c
1995
struct iommu_domain *domain;
drivers/iommu/iommufd/pages.c
2004
xa_for_each(&iopt->domains, index, domain) {
drivers/iommu/iommufd/pages.c
2005
if (domain == area->storage_domain)
drivers/iommu/iommufd/pages.c
2010
area, domain, iopt_area_index(area),
drivers/iommu/iommufd/pages.c
225
static void iommu_unmap_nofail(struct iommu_domain *domain, unsigned long iova,
drivers/iommu/iommufd/pages.c
230
ret = iommu_unmap(domain, iova, size);
drivers/iommu/iommufd/pages.c
242
struct iommu_domain *domain,
drivers/iommu/iommufd/pages.c
248
iommu_unmap_nofail(domain, start_iova,
drivers/iommu/iommufd/pages.c
408
struct iommu_domain *domain,
drivers/iommu/iommufd/pages.c
425
phys = iommu_iova_to_phys(domain, iova) - page_offset;
drivers/iommu/iommufd/pages.c
434
static struct page **raw_pages_from_domain(struct iommu_domain *domain,
drivers/iommu/iommufd/pages.c
448
phys = iommu_iova_to_phys(domain, iova) - page_offset;
drivers/iommu/iommufd/pages.c
459
struct iommu_domain *domain,
drivers/iommu/iommufd/pages.c
467
batch_from_domain(batch, domain, area, start_index, last_index);
drivers/iommu/iommufd/pages.c
479
static int batch_iommu_map_small(struct iommu_domain *domain,
drivers/iommu/iommufd/pages.c
491
rc = iommu_map(domain, iova, paddr, PAGE_SIZE, prot,
drivers/iommu/iommufd/pages.c
503
iommu_unmap_nofail(domain, start_iova, iova - start_iova);
drivers/iommu/iommufd/pages.c
507
static int batch_to_domain(struct pfn_batch *batch, struct iommu_domain *domain,
drivers/iommu/iommufd/pages.c
536
domain, iova,
drivers/iommu/iommufd/pages.c
540
rc = iommu_map(domain, iova,
drivers/iommu/iommufd/pages.c
553
iommu_unmap_nofail(domain, start_iova, iova - start_iova);
drivers/iommu/iommufd/selftest.c
1000
*mock_nested = to_mock_nested(hwpt->domain);
drivers/iommu/iommufd/selftest.c
117
struct iommu_domain domain;
drivers/iommu/iommufd/selftest.c
1221
page_size = 1 << __ffs(mock->domain.pgsize_bitmap);
drivers/iommu/iommufd/selftest.c
123
PT_IOMMU_CHECK_DOMAIN(struct mock_iommu_domain, iommu, domain);
drivers/iommu/iommufd/selftest.c
124
PT_IOMMU_CHECK_DOMAIN(struct mock_iommu_domain, amdv1.iommu, domain);
drivers/iommu/iommufd/selftest.c
1248
io_phys = mock->domain.ops->iova_to_phys(&mock->domain, iova);
drivers/iommu/iommufd/selftest.c
127
to_mock_domain(struct iommu_domain *domain)
drivers/iommu/iommufd/selftest.c
129
return container_of(domain, struct mock_iommu_domain, domain);
drivers/iommu/iommufd/selftest.c
1310
mock_nested = to_mock_nested(hwpt->domain);
drivers/iommu/iommufd/selftest.c
133
struct iommu_domain domain;
drivers/iommu/iommufd/selftest.c
139
to_mock_nested(struct iommu_domain *domain)
drivers/iommu/iommufd/selftest.c
141
return container_of(domain, struct mock_iommu_domain_nested, domain);
drivers/iommu/iommufd/selftest.c
186
struct iommu_domain *domain;
drivers/iommu/iommufd/selftest.c
1861
attached_domain = handle->domain;
drivers/iommu/iommufd/selftest.c
1876
if (attached_domain != hwpt->domain)
drivers/iommu/iommufd/selftest.c
212
static int mock_domain_nop_attach(struct iommu_domain *domain,
drivers/iommu/iommufd/selftest.c
220
if (domain->dirty_ops && (mdev->flags & MOCK_FLAGS_DEVICE_NO_DIRTY))
drivers/iommu/iommufd/selftest.c
224
if (domain->type == IOMMU_DOMAIN_NESTED) {
drivers/iommu/iommufd/selftest.c
225
new_viommu = to_mock_nested(domain)->mock_viommu;
drivers/iommu/iommufd/selftest.c
240
rc = mock_dev_enable_iopf(dev, domain);
drivers/iommu/iommufd/selftest.c
244
mock_dev_disable_iopf(dev, mdev->domain);
drivers/iommu/iommufd/selftest.c
245
mdev->domain = domain;
drivers/iommu/iommufd/selftest.c
250
static int mock_domain_set_dev_pasid_nop(struct iommu_domain *domain,
drivers/iommu/iommufd/selftest.c
268
if (domain->type == IOMMU_DOMAIN_BLOCKED) {
drivers/iommu/iommufd/selftest.c
283
rc = mock_dev_enable_iopf(dev, domain);
drivers/iommu/iommufd/selftest.c
322
static int mock_domain_set_dirty_tracking(struct iommu_domain *domain,
drivers/iommu/iommufd/selftest.c
325
struct mock_iommu_domain *mock = to_mock_domain(domain);
drivers/iommu/iommufd/selftest.c
328
if (enable && !domain->dirty_ops)
drivers/iommu/iommufd/selftest.c
359
mock_nested->domain.ops = &domain_nested_ops;
drivers/iommu/iommufd/selftest.c
360
mock_nested->domain.type = IOMMU_DOMAIN_NESTED;
drivers/iommu/iommufd/selftest.c
385
return &mock_nested->domain;
drivers/iommu/iommufd/selftest.c
388
static void mock_domain_free(struct iommu_domain *domain)
drivers/iommu/iommufd/selftest.c
390
struct mock_iommu_domain *mock = to_mock_domain(domain);
drivers/iommu/iommufd/selftest.c
396
static void mock_iotlb_sync(struct iommu_domain *domain,
drivers/iommu/iommufd/selftest.c
447
mock->domain.type = IOMMU_DOMAIN_UNMANAGED;
drivers/iommu/iommufd/selftest.c
461
mock->domain.ops = &amdv1_mock_huge_ops;
drivers/iommu/iommufd/selftest.c
463
mock->domain.ops = &amdv1_mock_ops;
drivers/iommu/iommufd/selftest.c
474
mock->domain.pgsize_bitmap = MOCK_HUGE_PAGE_SIZE |
drivers/iommu/iommufd/selftest.c
477
mock->domain.dirty_ops = &amdv1_mock_dirty_ops;
drivers/iommu/iommufd/selftest.c
490
mock->domain.ops = &amdv1_ops;
drivers/iommu/iommufd/selftest.c
495
mock->domain.dirty_ops = &amdv1_dirty_ops;
drivers/iommu/iommufd/selftest.c
50
static int mock_dev_enable_iopf(struct device *dev, struct iommu_domain *domain);
drivers/iommu/iommufd/selftest.c
507
WARN_ON(mock->domain.geometry.aperture_start != 0);
drivers/iommu/iommufd/selftest.c
508
WARN_ON(mock->domain.geometry.aperture_end < MOCK_APERTURE_LAST);
drivers/iommu/iommufd/selftest.c
51
static void mock_dev_disable_iopf(struct device *dev, struct iommu_domain *domain);
drivers/iommu/iommufd/selftest.c
510
mock->domain.geometry.aperture_start = MOCK_APERTURE_START;
drivers/iommu/iommufd/selftest.c
511
mock->domain.geometry.aperture_end = MOCK_APERTURE_LAST;
drivers/iommu/iommufd/selftest.c
551
return &mock->domain;
drivers/iommu/iommufd/selftest.c
590
static int mock_dev_enable_iopf(struct device *dev, struct iommu_domain *domain)
drivers/iommu/iommufd/selftest.c
595
if (!domain || !domain->iopf_handler)
drivers/iommu/iommufd/selftest.c
615
static void mock_dev_disable_iopf(struct device *dev, struct iommu_domain *domain)
drivers/iommu/iommufd/selftest.c
619
if (!domain || !domain->iopf_handler)
drivers/iommu/iommufd/selftest.c
659
return &mock_nested->domain;
drivers/iommu/iommufd/selftest.c
784
if (base_addr_pa != iommu_iova_to_phys(&mock_viommu->s2_parent->domain,
drivers/iommu/iommufd/selftest.c
897
static void mock_domain_free_nested(struct iommu_domain *domain)
drivers/iommu/iommufd/selftest.c
899
kfree(to_mock_nested(domain));
drivers/iommu/iommufd/selftest.c
903
mock_domain_cache_invalidate_user(struct iommu_domain *domain,
drivers/iommu/iommufd/selftest.c
906
struct mock_iommu_domain_nested *mock_nested = to_mock_nested(domain);
drivers/iommu/iommufd/selftest.c
977
if (hwpt->domain->type != IOMMU_DOMAIN_UNMANAGED ||
drivers/iommu/iommufd/selftest.c
978
hwpt->domain->owner != &mock_ops) {
drivers/iommu/iommufd/selftest.c
982
*mock = to_mock_domain(hwpt->domain);
drivers/iommu/iommufd/selftest.c
995
if (hwpt->domain->type != IOMMU_DOMAIN_NESTED ||
drivers/iommu/iommufd/selftest.c
996
hwpt->domain->ops != &domain_nested_ops) {
drivers/iommu/iommufd/vfio_compat.c
355
struct iommu_domain *domain;
drivers/iommu/iommufd/vfio_compat.c
359
xa_for_each(&iopt->domains, index, domain)
drivers/iommu/iommufd/vfio_compat.c
360
pgsize_bitmap &= domain->pgsize_bitmap;
drivers/iommu/iommufd/viommu.c
92
rc = ops->viommu_init(viommu, hwpt_paging->common.domain,
drivers/iommu/ipmmu-vmsa.c
211
static u32 ipmmu_ctx_read_root(struct ipmmu_vmsa_domain *domain,
drivers/iommu/ipmmu-vmsa.c
214
return ipmmu_ctx_read(domain->mmu->root, domain->context_id, reg);
drivers/iommu/ipmmu-vmsa.c
217
static void ipmmu_ctx_write_root(struct ipmmu_vmsa_domain *domain,
drivers/iommu/ipmmu-vmsa.c
220
ipmmu_ctx_write(domain->mmu->root, domain->context_id, reg, data);
drivers/iommu/ipmmu-vmsa.c
223
static void ipmmu_ctx_write_all(struct ipmmu_vmsa_domain *domain,
drivers/iommu/ipmmu-vmsa.c
226
if (domain->mmu != domain->mmu->root)
drivers/iommu/ipmmu-vmsa.c
227
ipmmu_ctx_write(domain->mmu, domain->context_id, reg, data);
drivers/iommu/ipmmu-vmsa.c
229
ipmmu_ctx_write(domain->mmu->root, domain->context_id, reg, data);
drivers/iommu/ipmmu-vmsa.c
254
static void ipmmu_tlb_sync(struct ipmmu_vmsa_domain *domain)
drivers/iommu/ipmmu-vmsa.c
260
false, domain, IMCTR))
drivers/iommu/ipmmu-vmsa.c
261
dev_err_ratelimited(domain->mmu->dev,
drivers/iommu/ipmmu-vmsa.c
265
static void ipmmu_tlb_invalidate(struct ipmmu_vmsa_domain *domain)
drivers/iommu/ipmmu-vmsa.c
269
reg = ipmmu_ctx_read_root(domain, IMCTR);
drivers/iommu/ipmmu-vmsa.c
271
ipmmu_ctx_write_all(domain, IMCTR, reg);
drivers/iommu/ipmmu-vmsa.c
273
ipmmu_tlb_sync(domain);
drivers/iommu/ipmmu-vmsa.c
279
static void ipmmu_utlb_enable(struct ipmmu_vmsa_domain *domain,
drivers/iommu/ipmmu-vmsa.c
282
struct ipmmu_vmsa_device *mmu = domain->mmu;
drivers/iommu/ipmmu-vmsa.c
292
ipmmu_imuctr_write(mmu, utlb, IMUCTR_TTSEL_MMU(domain->context_id) |
drivers/iommu/ipmmu-vmsa.c
294
mmu->utlb_ctx[utlb] = domain->context_id;
drivers/iommu/ipmmu-vmsa.c
300
static void ipmmu_utlb_disable(struct ipmmu_vmsa_domain *domain,
drivers/iommu/ipmmu-vmsa.c
303
struct ipmmu_vmsa_device *mmu = domain->mmu;
drivers/iommu/ipmmu-vmsa.c
311
struct ipmmu_vmsa_domain *domain = cookie;
drivers/iommu/ipmmu-vmsa.c
313
ipmmu_tlb_invalidate(domain);
drivers/iommu/ipmmu-vmsa.c
332
struct ipmmu_vmsa_domain *domain)
drivers/iommu/ipmmu-vmsa.c
341
mmu->domains[ret] = domain;
drivers/iommu/ipmmu-vmsa.c
364
static void ipmmu_domain_setup_context(struct ipmmu_vmsa_domain *domain)
drivers/iommu/ipmmu-vmsa.c
370
ttbr = domain->cfg.arm_lpae_s1_cfg.ttbr;
drivers/iommu/ipmmu-vmsa.c
371
ipmmu_ctx_write_root(domain, IMTTLBR0, ttbr);
drivers/iommu/ipmmu-vmsa.c
372
ipmmu_ctx_write_root(domain, IMTTUBR0, ttbr >> 32);
drivers/iommu/ipmmu-vmsa.c
379
if (domain->mmu->features->twobit_imttbcr_sl0)
drivers/iommu/ipmmu-vmsa.c
384
if (domain->mmu->features->cache_snoop)
drivers/iommu/ipmmu-vmsa.c
388
ipmmu_ctx_write_root(domain, IMTTBCR, IMTTBCR_EAE | tmp);
drivers/iommu/ipmmu-vmsa.c
391
ipmmu_ctx_write_root(domain, IMMAIR0,
drivers/iommu/ipmmu-vmsa.c
392
domain->cfg.arm_lpae_s1_cfg.mair);
drivers/iommu/ipmmu-vmsa.c
395
if (domain->mmu->features->setup_imbuscr)
drivers/iommu/ipmmu-vmsa.c
396
ipmmu_ctx_write_root(domain, IMBUSCR,
drivers/iommu/ipmmu-vmsa.c
397
ipmmu_ctx_read_root(domain, IMBUSCR) &
drivers/iommu/ipmmu-vmsa.c
404
ipmmu_ctx_write_root(domain, IMSTR, ipmmu_ctx_read_root(domain, IMSTR));
drivers/iommu/ipmmu-vmsa.c
413
ipmmu_ctx_write_all(domain, IMCTR,
drivers/iommu/ipmmu-vmsa.c
417
static int ipmmu_domain_init_context(struct ipmmu_vmsa_domain *domain)
drivers/iommu/ipmmu-vmsa.c
432
domain->cfg.quirks = IO_PGTABLE_QUIRK_ARM_NS;
drivers/iommu/ipmmu-vmsa.c
433
domain->cfg.pgsize_bitmap = domain->io_domain.pgsize_bitmap;
drivers/iommu/ipmmu-vmsa.c
434
domain->cfg.ias = 32;
drivers/iommu/ipmmu-vmsa.c
435
domain->cfg.oas = 40;
drivers/iommu/ipmmu-vmsa.c
436
domain->cfg.tlb = &ipmmu_flush_ops;
drivers/iommu/ipmmu-vmsa.c
437
domain->io_domain.geometry.aperture_end = DMA_BIT_MASK(32);
drivers/iommu/ipmmu-vmsa.c
438
domain->io_domain.geometry.force_aperture = true;
drivers/iommu/ipmmu-vmsa.c
443
domain->cfg.coherent_walk = false;
drivers/iommu/ipmmu-vmsa.c
444
domain->cfg.iommu_dev = domain->mmu->root->dev;
drivers/iommu/ipmmu-vmsa.c
449
ret = ipmmu_domain_allocate_context(domain->mmu->root, domain);
drivers/iommu/ipmmu-vmsa.c
453
domain->context_id = ret;
drivers/iommu/ipmmu-vmsa.c
455
domain->iop = alloc_io_pgtable_ops(ARM_32_LPAE_S1, &domain->cfg,
drivers/iommu/ipmmu-vmsa.c
456
domain);
drivers/iommu/ipmmu-vmsa.c
457
if (!domain->iop) {
drivers/iommu/ipmmu-vmsa.c
458
ipmmu_domain_free_context(domain->mmu->root,
drivers/iommu/ipmmu-vmsa.c
459
domain->context_id);
drivers/iommu/ipmmu-vmsa.c
463
ipmmu_domain_setup_context(domain);
drivers/iommu/ipmmu-vmsa.c
467
static void ipmmu_domain_destroy_context(struct ipmmu_vmsa_domain *domain)
drivers/iommu/ipmmu-vmsa.c
469
if (!domain->mmu)
drivers/iommu/ipmmu-vmsa.c
478
ipmmu_ctx_write_all(domain, IMCTR, IMCTR_FLUSH);
drivers/iommu/ipmmu-vmsa.c
479
ipmmu_tlb_sync(domain);
drivers/iommu/ipmmu-vmsa.c
480
ipmmu_domain_free_context(domain->mmu->root, domain->context_id);
drivers/iommu/ipmmu-vmsa.c
487
static irqreturn_t ipmmu_domain_irq(struct ipmmu_vmsa_domain *domain)
drivers/iommu/ipmmu-vmsa.c
490
struct ipmmu_vmsa_device *mmu = domain->mmu;
drivers/iommu/ipmmu-vmsa.c
494
status = ipmmu_ctx_read_root(domain, IMSTR);
drivers/iommu/ipmmu-vmsa.c
498
iova = ipmmu_ctx_read_root(domain, IMELAR);
drivers/iommu/ipmmu-vmsa.c
500
iova |= (u64)ipmmu_ctx_read_root(domain, IMEUAR) << 32;
drivers/iommu/ipmmu-vmsa.c
508
ipmmu_ctx_write_root(domain, IMSTR, 0);
drivers/iommu/ipmmu-vmsa.c
527
if (!report_iommu_fault(&domain->io_domain, mmu->dev, iova, 0))
drivers/iommu/ipmmu-vmsa.c
567
struct ipmmu_vmsa_domain *domain;
drivers/iommu/ipmmu-vmsa.c
569
domain = kzalloc_obj(*domain);
drivers/iommu/ipmmu-vmsa.c
570
if (!domain)
drivers/iommu/ipmmu-vmsa.c
573
mutex_init(&domain->mutex);
drivers/iommu/ipmmu-vmsa.c
574
domain->io_domain.pgsize_bitmap = SZ_1G | SZ_2M | SZ_4K;
drivers/iommu/ipmmu-vmsa.c
576
return &domain->io_domain;
drivers/iommu/ipmmu-vmsa.c
581
struct ipmmu_vmsa_domain *domain = to_vmsa_domain(io_domain);
drivers/iommu/ipmmu-vmsa.c
587
ipmmu_domain_destroy_context(domain);
drivers/iommu/ipmmu-vmsa.c
588
free_io_pgtable_ops(domain->iop);
drivers/iommu/ipmmu-vmsa.c
589
kfree(domain);
drivers/iommu/ipmmu-vmsa.c
597
struct ipmmu_vmsa_domain *domain = to_vmsa_domain(io_domain);
drivers/iommu/ipmmu-vmsa.c
606
mutex_lock(&domain->mutex);
drivers/iommu/ipmmu-vmsa.c
608
if (!domain->mmu) {
drivers/iommu/ipmmu-vmsa.c
610
domain->mmu = mmu;
drivers/iommu/ipmmu-vmsa.c
611
ret = ipmmu_domain_init_context(domain);
drivers/iommu/ipmmu-vmsa.c
614
domain->mmu = NULL;
drivers/iommu/ipmmu-vmsa.c
617
domain->context_id);
drivers/iommu/ipmmu-vmsa.c
619
} else if (domain->mmu != mmu) {
drivers/iommu/ipmmu-vmsa.c
626
dev_info(dev, "Reusing IPMMU context %u\n", domain->context_id);
drivers/iommu/ipmmu-vmsa.c
628
mutex_unlock(&domain->mutex);
drivers/iommu/ipmmu-vmsa.c
634
ipmmu_utlb_enable(domain, fwspec->ids[i]);
drivers/iommu/ipmmu-vmsa.c
644
struct ipmmu_vmsa_domain *domain;
drivers/iommu/ipmmu-vmsa.c
650
domain = to_vmsa_domain(old);
drivers/iommu/ipmmu-vmsa.c
652
ipmmu_utlb_disable(domain, fwspec->ids[i]);
drivers/iommu/ipmmu-vmsa.c
673
struct ipmmu_vmsa_domain *domain = to_vmsa_domain(io_domain);
drivers/iommu/ipmmu-vmsa.c
675
return domain->iop->map_pages(domain->iop, iova, paddr, pgsize, pgcount,
drivers/iommu/ipmmu-vmsa.c
683
struct ipmmu_vmsa_domain *domain = to_vmsa_domain(io_domain);
drivers/iommu/ipmmu-vmsa.c
685
return domain->iop->unmap_pages(domain->iop, iova, pgsize, pgcount, gather);
drivers/iommu/ipmmu-vmsa.c
690
struct ipmmu_vmsa_domain *domain = to_vmsa_domain(io_domain);
drivers/iommu/ipmmu-vmsa.c
692
if (domain->mmu)
drivers/iommu/ipmmu-vmsa.c
693
ipmmu_tlb_flush_all(domain);
drivers/iommu/ipmmu-vmsa.c
705
struct ipmmu_vmsa_domain *domain = to_vmsa_domain(io_domain);
drivers/iommu/ipmmu-vmsa.c
709
return domain->iop->iova_to_phys(domain->iop, iova);
drivers/iommu/msm_iommu.c
315
priv->domain.pgsize_bitmap = MSM_IOMMU_PGSIZES;
drivers/iommu/msm_iommu.c
317
priv->domain.geometry.aperture_start = 0;
drivers/iommu/msm_iommu.c
318
priv->domain.geometry.aperture_end = (1ULL << 32) - 1;
drivers/iommu/msm_iommu.c
319
priv->domain.geometry.force_aperture = true;
drivers/iommu/msm_iommu.c
321
return &priv->domain;
drivers/iommu/msm_iommu.c
328
static void msm_iommu_domain_free(struct iommu_domain *domain)
drivers/iommu/msm_iommu.c
334
priv = to_msm_priv(domain);
drivers/iommu/msm_iommu.c
344
.pgsize_bitmap = priv->domain.pgsize_bitmap,
drivers/iommu/msm_iommu.c
394
static int msm_iommu_attach_dev(struct iommu_domain *domain, struct device *dev,
drivers/iommu/msm_iommu.c
400
struct msm_priv *priv = to_msm_priv(domain);
drivers/iommu/msm_iommu.c
42
struct iommu_domain domain;
drivers/iommu/msm_iommu.c
486
static int msm_iommu_map(struct iommu_domain *domain, unsigned long iova,
drivers/iommu/msm_iommu.c
490
struct msm_priv *priv = to_msm_priv(domain);
drivers/iommu/msm_iommu.c
502
static int msm_iommu_sync_map(struct iommu_domain *domain, unsigned long iova,
drivers/iommu/msm_iommu.c
505
struct msm_priv *priv = to_msm_priv(domain);
drivers/iommu/msm_iommu.c
51
return container_of(dom, struct msm_priv, domain);
drivers/iommu/msm_iommu.c
511
static size_t msm_iommu_unmap(struct iommu_domain *domain, unsigned long iova,
drivers/iommu/msm_iommu.c
515
struct msm_priv *priv = to_msm_priv(domain);
drivers/iommu/msm_iommu.c
526
static phys_addr_t msm_iommu_iova_to_phys(struct iommu_domain *domain,
drivers/iommu/msm_iommu.c
538
priv = to_msm_priv(domain);
drivers/iommu/mtk_iommu.c
289
struct iommu_domain domain;
drivers/iommu/mtk_iommu.c
387
return container_of(dom, struct mtk_iommu_domain, domain);
drivers/iommu/mtk_iommu.c
522
if (!dom || report_iommu_fault(&dom->domain, bank->parent_dev, fault_iova,
drivers/iommu/mtk_iommu.c
665
dom->domain.pgsize_bitmap = share_dom->domain.pgsize_bitmap;
drivers/iommu/mtk_iommu.c
673
.pgsize_bitmap = dom->domain.pgsize_bitmap,
drivers/iommu/mtk_iommu.c
697
dom->domain.geometry.aperture_start = region->iova_base;
drivers/iommu/mtk_iommu.c
698
dom->domain.geometry.aperture_end = region->iova_base + region->size - 1;
drivers/iommu/mtk_iommu.c
699
dom->domain.geometry.force_aperture = true;
drivers/iommu/mtk_iommu.c
711
dom->domain.pgsize_bitmap = SZ_4K | SZ_64K | SZ_1M | SZ_16M;
drivers/iommu/mtk_iommu.c
713
return &dom->domain;
drivers/iommu/mtk_iommu.c
716
static void mtk_iommu_domain_free(struct iommu_domain *domain)
drivers/iommu/mtk_iommu.c
718
kfree(to_mtk_domain(domain));
drivers/iommu/mtk_iommu.c
721
static int mtk_iommu_attach_device(struct iommu_domain *domain,
drivers/iommu/mtk_iommu.c
725
struct mtk_iommu_domain *dom = to_mtk_domain(domain);
drivers/iommu/mtk_iommu.c
811
static int mtk_iommu_map(struct iommu_domain *domain, unsigned long iova,
drivers/iommu/mtk_iommu.c
815
struct mtk_iommu_domain *dom = to_mtk_domain(domain);
drivers/iommu/mtk_iommu.c
825
static size_t mtk_iommu_unmap(struct iommu_domain *domain,
drivers/iommu/mtk_iommu.c
829
struct mtk_iommu_domain *dom = to_mtk_domain(domain);
drivers/iommu/mtk_iommu.c
835
static void mtk_iommu_flush_iotlb_all(struct iommu_domain *domain)
drivers/iommu/mtk_iommu.c
837
struct mtk_iommu_domain *dom = to_mtk_domain(domain);
drivers/iommu/mtk_iommu.c
843
static void mtk_iommu_iotlb_sync(struct iommu_domain *domain,
drivers/iommu/mtk_iommu.c
846
struct mtk_iommu_domain *dom = to_mtk_domain(domain);
drivers/iommu/mtk_iommu.c
852
static int mtk_iommu_sync_map(struct iommu_domain *domain, unsigned long iova,
drivers/iommu/mtk_iommu.c
855
struct mtk_iommu_domain *dom = to_mtk_domain(domain);
drivers/iommu/mtk_iommu.c
861
static phys_addr_t mtk_iommu_iova_to_phys(struct iommu_domain *domain,
drivers/iommu/mtk_iommu.c
864
struct mtk_iommu_domain *dom = to_mtk_domain(domain);
drivers/iommu/mtk_iommu_v1.c
123
struct iommu_domain domain;
drivers/iommu/mtk_iommu_v1.c
145
return container_of(dom, struct mtk_iommu_v1_domain, domain);
drivers/iommu/mtk_iommu_v1.c
225
if (report_iommu_fault(&dom->domain, data->dev, fault_iova,
drivers/iommu/mtk_iommu_v1.c
291
dom->domain.pgsize_bitmap = MT2701_IOMMU_PAGE_SIZE;
drivers/iommu/mtk_iommu_v1.c
293
return &dom->domain;
drivers/iommu/mtk_iommu_v1.c
296
static void mtk_iommu_v1_domain_free(struct iommu_domain *domain)
drivers/iommu/mtk_iommu_v1.c
298
struct mtk_iommu_v1_domain *dom = to_mtk_domain(domain);
drivers/iommu/mtk_iommu_v1.c
303
kfree(to_mtk_domain(domain));
drivers/iommu/mtk_iommu_v1.c
306
static int mtk_iommu_v1_attach_device(struct iommu_domain *domain,
drivers/iommu/mtk_iommu_v1.c
311
struct mtk_iommu_v1_domain *dom = to_mtk_domain(domain);
drivers/iommu/mtk_iommu_v1.c
317
if (mtk_mapping->domain != domain)
drivers/iommu/mtk_iommu_v1.c
352
static int mtk_iommu_v1_map(struct iommu_domain *domain, unsigned long iova,
drivers/iommu/mtk_iommu_v1.c
356
struct mtk_iommu_v1_domain *dom = to_mtk_domain(domain);
drivers/iommu/mtk_iommu_v1.c
378
static size_t mtk_iommu_v1_unmap(struct iommu_domain *domain, unsigned long iova,
drivers/iommu/mtk_iommu_v1.c
382
struct mtk_iommu_v1_domain *dom = to_mtk_domain(domain);
drivers/iommu/mtk_iommu_v1.c
396
static phys_addr_t mtk_iommu_v1_iova_to_phys(struct iommu_domain *domain, dma_addr_t iova)
drivers/iommu/mtk_iommu_v1.c
398
struct mtk_iommu_v1_domain *dom = to_mtk_domain(domain);
drivers/iommu/mtk_iommu_v1.c
40
struct iommu_domain *domain;
drivers/iommu/omap-iommu-debug.c
27
return !obj->domain;
drivers/iommu/omap-iommu.c
1018
if (obj->domain && obj->iopgd)
drivers/iommu/omap-iommu.c
1079
if (obj->domain)
drivers/iommu/omap-iommu.c
1297
static int omap_iommu_map(struct iommu_domain *domain, unsigned long da,
drivers/iommu/omap-iommu.c
1301
struct omap_iommu_domain *omap_domain = to_omap_domain(domain);
drivers/iommu/omap-iommu.c
1344
static size_t omap_iommu_unmap(struct iommu_domain *domain, unsigned long da,
drivers/iommu/omap-iommu.c
1347
struct omap_iommu_domain *omap_domain = to_omap_domain(domain);
drivers/iommu/omap-iommu.c
1433
static int omap_iommu_attach_dev(struct iommu_domain *domain,
drivers/iommu/omap-iommu.c
1437
struct omap_iommu_domain *omap_domain = to_omap_domain(domain);
drivers/iommu/omap-iommu.c
1474
oiommu->domain = domain;
drivers/iommu/omap-iommu.c
1489
oiommu->domain = NULL;
drivers/iommu/omap-iommu.c
1529
oiommu->domain = NULL;
drivers/iommu/omap-iommu.c
1572
omap_domain->domain.pgsize_bitmap = OMAP_IOMMU_PGSIZES;
drivers/iommu/omap-iommu.c
1574
omap_domain->domain.geometry.aperture_start = 0;
drivers/iommu/omap-iommu.c
1575
omap_domain->domain.geometry.aperture_end = (1ULL << 32) - 1;
drivers/iommu/omap-iommu.c
1576
omap_domain->domain.geometry.force_aperture = true;
drivers/iommu/omap-iommu.c
1578
return &omap_domain->domain;
drivers/iommu/omap-iommu.c
1581
static void omap_iommu_domain_free(struct iommu_domain *domain)
drivers/iommu/omap-iommu.c
1583
struct omap_iommu_domain *omap_domain = to_omap_domain(domain);
drivers/iommu/omap-iommu.c
1595
static phys_addr_t omap_iommu_iova_to_phys(struct iommu_domain *domain,
drivers/iommu/omap-iommu.c
1598
struct omap_iommu_domain *omap_domain = to_omap_domain(domain);
drivers/iommu/omap-iommu.c
61
return container_of(dom, struct omap_iommu_domain, domain);
drivers/iommu/omap-iommu.c
809
struct iommu_domain *domain = obj->domain;
drivers/iommu/omap-iommu.c
810
struct omap_iommu_domain *omap_domain = to_omap_domain(domain);
drivers/iommu/omap-iommu.c
820
if (!report_iommu_fault(domain, obj->dev, da, 0))
drivers/iommu/omap-iommu.c
949
int omap_iommu_domain_deactivate(struct iommu_domain *domain)
drivers/iommu/omap-iommu.c
951
struct omap_iommu_domain *omap_domain = to_omap_domain(domain);
drivers/iommu/omap-iommu.c
978
int omap_iommu_domain_activate(struct iommu_domain *domain)
drivers/iommu/omap-iommu.c
980
struct omap_iommu_domain *omap_domain = to_omap_domain(domain);
drivers/iommu/omap-iommu.h
51
struct iommu_domain domain;
drivers/iommu/omap-iommu.h
59
struct iommu_domain *domain;
drivers/iommu/riscv/iommu.c
1070
struct riscv_iommu_domain *domain = iommu_domain_to_riscv(iommu_domain);
drivers/iommu/riscv/iommu.c
1072
riscv_iommu_iotlb_inval(domain, 0, ULONG_MAX);
drivers/iommu/riscv/iommu.c
1078
struct riscv_iommu_domain *domain = iommu_domain_to_riscv(iommu_domain);
drivers/iommu/riscv/iommu.c
1080
riscv_iommu_iotlb_inval(domain, gather->start, gather->end);
drivers/iommu/riscv/iommu.c
1090
static void riscv_iommu_pte_free(struct riscv_iommu_domain *domain,
drivers/iommu/riscv/iommu.c
1106
riscv_iommu_pte_free(domain, pte, freelist);
drivers/iommu/riscv/iommu.c
1115
static unsigned long *riscv_iommu_pte_alloc(struct riscv_iommu_domain *domain,
drivers/iommu/riscv/iommu.c
1119
unsigned long *ptr = domain->pgd_root;
drivers/iommu/riscv/iommu.c
1121
int level = domain->pgd_mode - RISCV_IOMMU_DC_FSC_IOSATP_MODE_SV39 + 2;
drivers/iommu/riscv/iommu.c
1149
addr = iommu_alloc_pages_node_sz(domain->numa_node, gfp,
drivers/iommu/riscv/iommu.c
1166
static unsigned long *riscv_iommu_pte_fetch(struct riscv_iommu_domain *domain,
drivers/iommu/riscv/iommu.c
1169
unsigned long *ptr = domain->pgd_root;
drivers/iommu/riscv/iommu.c
1171
int level = domain->pgd_mode - RISCV_IOMMU_DC_FSC_IOSATP_MODE_SV39 + 2;
drivers/iommu/riscv/iommu.c
1195
struct riscv_iommu_domain *domain = iommu_domain_to_riscv(iommu_domain);
drivers/iommu/riscv/iommu.c
1204
else if (domain->amo_enabled)
drivers/iommu/riscv/iommu.c
1210
ptr = riscv_iommu_pte_alloc(domain, iova, pgsize, gfp);
drivers/iommu/riscv/iommu.c
1221
riscv_iommu_pte_free(domain, old, &freelist);
drivers/iommu/riscv/iommu.c
1239
riscv_iommu_iotlb_inval(domain, 0, ULONG_MAX);
drivers/iommu/riscv/iommu.c
1251
struct riscv_iommu_domain *domain = iommu_domain_to_riscv(iommu_domain);
drivers/iommu/riscv/iommu.c
1258
ptr = riscv_iommu_pte_fetch(domain, iova, &pte_size);
drivers/iommu/riscv/iommu.c
1270
iommu_iotlb_gather_add_page(&domain->domain, gather, iova,
drivers/iommu/riscv/iommu.c
1283
struct riscv_iommu_domain *domain = iommu_domain_to_riscv(iommu_domain);
drivers/iommu/riscv/iommu.c
1287
ptr = riscv_iommu_pte_fetch(domain, iova, &pte_size);
drivers/iommu/riscv/iommu.c
1296
struct riscv_iommu_domain *domain = iommu_domain_to_riscv(iommu_domain);
drivers/iommu/riscv/iommu.c
1297
const unsigned long pfn = virt_to_pfn(domain->pgd_root);
drivers/iommu/riscv/iommu.c
1299
WARN_ON(!list_empty(&domain->bonds));
drivers/iommu/riscv/iommu.c
1301
if ((int)domain->pscid > 0)
drivers/iommu/riscv/iommu.c
1302
ida_free(&riscv_iommu_pscids, domain->pscid);
drivers/iommu/riscv/iommu.c
1304
riscv_iommu_pte_free(domain, _io_pte_entry(pfn, _PAGE_TABLE), NULL);
drivers/iommu/riscv/iommu.c
1305
kfree(domain);
drivers/iommu/riscv/iommu.c
1327
struct riscv_iommu_domain *domain = iommu_domain_to_riscv(iommu_domain);
drivers/iommu/riscv/iommu.c
1332
if (!riscv_iommu_pt_supported(iommu, domain->pgd_mode))
drivers/iommu/riscv/iommu.c
1335
fsc = FIELD_PREP(RISCV_IOMMU_PC_FSC_MODE, domain->pgd_mode) |
drivers/iommu/riscv/iommu.c
1336
FIELD_PREP(RISCV_IOMMU_PC_FSC_PPN, virt_to_pfn(domain->pgd_root));
drivers/iommu/riscv/iommu.c
1337
ta = FIELD_PREP(RISCV_IOMMU_PC_TA_PSCID, domain->pscid) |
drivers/iommu/riscv/iommu.c
1340
if (riscv_iommu_bond_link(domain, dev))
drivers/iommu/riscv/iommu.c
1344
riscv_iommu_bond_unlink(info->domain, dev);
drivers/iommu/riscv/iommu.c
1345
info->domain = domain;
drivers/iommu/riscv/iommu.c
1362
struct riscv_iommu_domain *domain;
drivers/iommu/riscv/iommu.c
1383
domain = kzalloc_obj(*domain);
drivers/iommu/riscv/iommu.c
1384
if (!domain)
drivers/iommu/riscv/iommu.c
1387
INIT_LIST_HEAD_RCU(&domain->bonds);
drivers/iommu/riscv/iommu.c
1388
spin_lock_init(&domain->lock);
drivers/iommu/riscv/iommu.c
1389
domain->numa_node = dev_to_node(iommu->dev);
drivers/iommu/riscv/iommu.c
1390
domain->amo_enabled = !!(iommu->caps & RISCV_IOMMU_CAPABILITIES_AMO_HWAD);
drivers/iommu/riscv/iommu.c
1391
domain->pgd_mode = pgd_mode;
drivers/iommu/riscv/iommu.c
1392
domain->pgd_root = iommu_alloc_pages_node_sz(domain->numa_node,
drivers/iommu/riscv/iommu.c
1394
if (!domain->pgd_root) {
drivers/iommu/riscv/iommu.c
1395
kfree(domain);
drivers/iommu/riscv/iommu.c
1399
domain->pscid = ida_alloc_range(&riscv_iommu_pscids, 1,
drivers/iommu/riscv/iommu.c
1401
if (domain->pscid < 0) {
drivers/iommu/riscv/iommu.c
1402
iommu_free_pages(domain->pgd_root);
drivers/iommu/riscv/iommu.c
1403
kfree(domain);
drivers/iommu/riscv/iommu.c
1419
domain->domain.geometry.aperture_start = 0;
drivers/iommu/riscv/iommu.c
1420
domain->domain.geometry.aperture_end = va_mask;
drivers/iommu/riscv/iommu.c
1421
domain->domain.geometry.force_aperture = true;
drivers/iommu/riscv/iommu.c
1422
domain->domain.pgsize_bitmap = va_mask & (SZ_4K | SZ_2M | SZ_1G | SZ_512G);
drivers/iommu/riscv/iommu.c
1424
domain->domain.ops = &riscv_iommu_paging_domain_ops;
drivers/iommu/riscv/iommu.c
1426
return &domain->domain;
drivers/iommu/riscv/iommu.c
1438
riscv_iommu_bond_unlink(info->domain, dev);
drivers/iommu/riscv/iommu.c
1439
info->domain = NULL;
drivers/iommu/riscv/iommu.c
1459
riscv_iommu_bond_unlink(info->domain, dev);
drivers/iommu/riscv/iommu.c
1460
info->domain = NULL;
drivers/iommu/riscv/iommu.c
809
struct iommu_domain domain;
drivers/iommu/riscv/iommu.c
820
container_of(iommu_domain, struct riscv_iommu_domain, domain)
drivers/iommu/riscv/iommu.c
824
struct riscv_iommu_domain *domain;
drivers/iommu/riscv/iommu.c
849
static int riscv_iommu_bond_link(struct riscv_iommu_domain *domain,
drivers/iommu/riscv/iommu.c
866
spin_lock(&domain->lock);
drivers/iommu/riscv/iommu.c
867
list_for_each(bonds, &domain->bonds)
drivers/iommu/riscv/iommu.c
871
spin_unlock(&domain->lock);
drivers/iommu/riscv/iommu.c
879
static void riscv_iommu_bond_unlink(struct riscv_iommu_domain *domain,
drivers/iommu/riscv/iommu.c
887
if (!domain)
drivers/iommu/riscv/iommu.c
890
spin_lock(&domain->lock);
drivers/iommu/riscv/iommu.c
891
list_for_each_entry(bond, &domain->bonds, list) {
drivers/iommu/riscv/iommu.c
901
spin_unlock(&domain->lock);
drivers/iommu/riscv/iommu.c
910
riscv_iommu_cmd_inval_set_pscid(&cmd, domain->pscid);
drivers/iommu/riscv/iommu.c
925
static void riscv_iommu_iotlb_inval(struct riscv_iommu_domain *domain,
drivers/iommu/riscv/iommu.c
961
list_for_each_entry_rcu(bond, &domain->bonds, list) {
drivers/iommu/riscv/iommu.c
974
riscv_iommu_cmd_inval_set_pscid(&cmd, domain->pscid);
drivers/iommu/riscv/iommu.c
987
list_for_each_entry_rcu(bond, &domain->bonds, list) {
drivers/iommu/rockchip-iommu.c
1008
static int rk_iommu_attach_device(struct iommu_domain *domain,
drivers/iommu/rockchip-iommu.c
1012
struct rk_iommu_domain *rk_domain = to_rk_domain(domain);
drivers/iommu/rockchip-iommu.c
1027
if (iommu->domain == domain)
drivers/iommu/rockchip-iommu.c
1034
iommu->domain = domain;
drivers/iommu/rockchip-iommu.c
1054
iommu->domain));
drivers/iommu/rockchip-iommu.c
1094
rk_domain->domain.pgsize_bitmap = RK_IOMMU_PGSIZE_BITMAP;
drivers/iommu/rockchip-iommu.c
1096
rk_domain->domain.geometry.aperture_start = 0;
drivers/iommu/rockchip-iommu.c
1097
rk_domain->domain.geometry.aperture_end = DMA_BIT_MASK(32);
drivers/iommu/rockchip-iommu.c
1098
rk_domain->domain.geometry.force_aperture = true;
drivers/iommu/rockchip-iommu.c
1100
return &rk_domain->domain;
drivers/iommu/rockchip-iommu.c
1110
static void rk_iommu_domain_free(struct iommu_domain *domain)
drivers/iommu/rockchip-iommu.c
1112
struct rk_iommu_domain *rk_domain = to_rk_domain(domain);
drivers/iommu/rockchip-iommu.c
119
struct iommu_domain *domain; /* domain to which iommu is attached */
drivers/iommu/rockchip-iommu.c
1208
iommu->domain = &rk_identity_domain;
drivers/iommu/rockchip-iommu.c
1326
if (iommu->domain == &rk_identity_domain)
drivers/iommu/rockchip-iommu.c
1337
if (iommu->domain == &rk_identity_domain)
drivers/iommu/rockchip-iommu.c
140
return container_of(dom, struct rk_iommu_domain, domain);
drivers/iommu/rockchip-iommu.c
624
if (iommu->domain != &rk_identity_domain)
drivers/iommu/rockchip-iommu.c
625
report_iommu_fault(iommu->domain, iommu->dev, iova,
drivers/iommu/rockchip-iommu.c
651
static phys_addr_t rk_iommu_iova_to_phys(struct iommu_domain *domain,
drivers/iommu/rockchip-iommu.c
654
struct rk_iommu_domain *rk_domain = to_rk_domain(domain);
drivers/iommu/rockchip-iommu.c
822
static int rk_iommu_map(struct iommu_domain *domain, unsigned long _iova,
drivers/iommu/rockchip-iommu.c
826
struct rk_iommu_domain *rk_domain = to_rk_domain(domain);
drivers/iommu/rockchip-iommu.c
863
static size_t rk_iommu_unmap(struct iommu_domain *domain, unsigned long _iova,
drivers/iommu/rockchip-iommu.c
866
struct rk_iommu_domain *rk_domain = to_rk_domain(domain);
drivers/iommu/rockchip-iommu.c
93
struct iommu_domain domain;
drivers/iommu/rockchip-iommu.c
930
struct iommu_domain *domain = iommu->domain;
drivers/iommu/rockchip-iommu.c
931
struct rk_iommu_domain *rk_domain = to_rk_domain(domain);
drivers/iommu/rockchip-iommu.c
976
rk_domain = to_rk_domain(iommu->domain);
drivers/iommu/rockchip-iommu.c
980
if (iommu->domain == identity_domain)
drivers/iommu/rockchip-iommu.c
983
iommu->domain = identity_domain;
drivers/iommu/s390-iommu.c
1025
static size_t s390_iommu_unmap_pages(struct iommu_domain *domain,
drivers/iommu/s390-iommu.c
1030
struct s390_domain *s390_domain = to_s390_domain(domain);
drivers/iommu/s390-iommu.c
1034
if (WARN_ON(iova < s390_domain->domain.geometry.aperture_start ||
drivers/iommu/s390-iommu.c
1035
(iova + size - 1) > s390_domain->domain.geometry.aperture_end))
drivers/iommu/s390-iommu.c
1135
static int s390_attach_dev_identity(struct iommu_domain *domain,
drivers/iommu/s390-iommu.c
1146
cc = s390_iommu_domain_reg_ioat(zdev, domain, &status);
drivers/iommu/s390-iommu.c
1150
zdev_s390_domain_update(zdev, domain);
drivers/iommu/s390-iommu.c
275
static void dma_cleanup_tables(struct s390_domain *domain)
drivers/iommu/s390-iommu.c
279
if (!domain->dma_table)
drivers/iommu/s390-iommu.c
28
struct iommu_domain domain;
drivers/iommu/s390-iommu.c
282
switch (domain->origin_type) {
drivers/iommu/s390-iommu.c
285
if (reg_entry_isvalid(domain->dma_table[rfx]))
drivers/iommu/s390-iommu.c
286
dma_free_rs_table(domain->dma_table[rfx]);
drivers/iommu/s390-iommu.c
290
if (reg_entry_isvalid(domain->dma_table[rsx]))
drivers/iommu/s390-iommu.c
291
dma_free_rt_table(domain->dma_table[rsx]);
drivers/iommu/s390-iommu.c
295
if (reg_entry_isvalid(domain->dma_table[rtx]))
drivers/iommu/s390-iommu.c
296
dma_free_seg_table(domain->dma_table[rtx]);
drivers/iommu/s390-iommu.c
299
WARN_ONCE(1, "Invalid IOMMU table (%x)\n", domain->origin_type);
drivers/iommu/s390-iommu.c
303
dma_free_cpu_table(domain->dma_table);
drivers/iommu/s390-iommu.c
436
static unsigned long *dma_walk_region_tables(struct s390_domain *domain,
drivers/iommu/s390-iommu.c
439
switch (domain->origin_type) {
drivers/iommu/s390-iommu.c
441
return dma_walk_rf_table(domain->dma_table, dma_addr, gfp);
drivers/iommu/s390-iommu.c
443
return dma_walk_rs_table(domain->dma_table, dma_addr, gfp);
drivers/iommu/s390-iommu.c
445
return domain->dma_table;
drivers/iommu/s390-iommu.c
451
static unsigned long *dma_walk_cpu_trans(struct s390_domain *domain,
drivers/iommu/s390-iommu.c
457
rto = dma_walk_region_tables(domain, dma_addr, gfp);
drivers/iommu/s390-iommu.c
497
return container_of(dom, struct s390_domain, domain);
drivers/iommu/s390-iommu.c
514
static inline u64 max_tbl_size(struct s390_domain *domain)
drivers/iommu/s390-iommu.c
516
switch (domain->origin_type) {
drivers/iommu/s390-iommu.c
560
s390_domain->domain.pgsize_bitmap = SZ_4K;
drivers/iommu/s390-iommu.c
561
s390_domain->domain.geometry.force_aperture = true;
drivers/iommu/s390-iommu.c
562
s390_domain->domain.geometry.aperture_start = 0;
drivers/iommu/s390-iommu.c
563
s390_domain->domain.geometry.aperture_end = max_tbl_size(s390_domain);
drivers/iommu/s390-iommu.c
568
return &s390_domain->domain;
drivers/iommu/s390-iommu.c
579
static void s390_domain_free(struct iommu_domain *domain)
drivers/iommu/s390-iommu.c
581
struct s390_domain *s390_domain = to_s390_domain(domain);
drivers/iommu/s390-iommu.c
591
struct iommu_domain *domain)
drivers/iommu/s390-iommu.c
596
zdev->s390_domain = domain;
drivers/iommu/s390-iommu.c
600
static u64 get_iota_region_flag(struct s390_domain *domain)
drivers/iommu/s390-iommu.c
602
switch (domain->origin_type) {
drivers/iommu/s390-iommu.c
610
WARN_ONCE(1, "Invalid IOMMU table (%x)\n", domain->origin_type);
drivers/iommu/s390-iommu.c
633
struct iommu_domain *domain, u8 *status)
drivers/iommu/s390-iommu.c
639
switch (domain->type) {
drivers/iommu/s390-iommu.c
648
s390_domain = to_s390_domain(domain);
drivers/iommu/s390-iommu.c
672
static int blocking_domain_attach_device(struct iommu_domain *domain,
drivers/iommu/s390-iommu.c
692
zdev_s390_domain_update(zdev, domain);
drivers/iommu/s390-iommu.c
697
static int s390_iommu_attach_device(struct iommu_domain *domain,
drivers/iommu/s390-iommu.c
701
struct s390_domain *s390_domain = to_s390_domain(domain);
drivers/iommu/s390-iommu.c
710
if (WARN_ON(domain->geometry.aperture_start > zdev->end_dma ||
drivers/iommu/s390-iommu.c
711
domain->geometry.aperture_end < zdev->start_dma))
drivers/iommu/s390-iommu.c
717
cc = s390_iommu_domain_reg_ioat(zdev, domain, &status);
drivers/iommu/s390-iommu.c
721
zdev_s390_domain_update(zdev, domain);
drivers/iommu/s390-iommu.c
795
static void s390_iommu_flush_iotlb_all(struct iommu_domain *domain)
drivers/iommu/s390-iommu.c
797
struct s390_domain *s390_domain = to_s390_domain(domain);
drivers/iommu/s390-iommu.c
808
static void s390_iommu_iotlb_sync(struct iommu_domain *domain,
drivers/iommu/s390-iommu.c
811
struct s390_domain *s390_domain = to_s390_domain(domain);
drivers/iommu/s390-iommu.c
828
static int s390_iommu_iotlb_sync_map(struct iommu_domain *domain,
drivers/iommu/s390-iommu.c
831
struct s390_domain *s390_domain = to_s390_domain(domain);
drivers/iommu/s390-iommu.c
912
static int s390_iommu_map_pages(struct iommu_domain *domain,
drivers/iommu/s390-iommu.c
917
struct s390_domain *s390_domain = to_s390_domain(domain);
drivers/iommu/s390-iommu.c
924
if (iova < s390_domain->domain.geometry.aperture_start ||
drivers/iommu/s390-iommu.c
925
(iova + size - 1) > s390_domain->domain.geometry.aperture_end)
drivers/iommu/s390-iommu.c
944
static unsigned long *get_rso_from_iova(struct s390_domain *domain,
drivers/iommu/s390-iommu.c
951
switch (domain->origin_type) {
drivers/iommu/s390-iommu.c
953
rfo = domain->dma_table;
drivers/iommu/s390-iommu.c
960
return domain->dma_table;
drivers/iommu/s390-iommu.c
966
static unsigned long *get_rto_from_iova(struct s390_domain *domain,
drivers/iommu/s390-iommu.c
973
switch (domain->origin_type) {
drivers/iommu/s390-iommu.c
976
rso = get_rso_from_iova(domain, iova);
drivers/iommu/s390-iommu.c
983
return domain->dma_table;
drivers/iommu/s390-iommu.c
989
static phys_addr_t s390_iommu_iova_to_phys(struct iommu_domain *domain,
drivers/iommu/s390-iommu.c
992
struct s390_domain *s390_domain = to_s390_domain(domain);
drivers/iommu/s390-iommu.c
998
if (iova < domain->geometry.aperture_start ||
drivers/iommu/s390-iommu.c
999
iova > domain->geometry.aperture_end)
drivers/iommu/sprd-iommu.c
129
sprd_iommu_pgt_size(struct iommu_domain *domain)
drivers/iommu/sprd-iommu.c
131
return ((domain->geometry.aperture_end -
drivers/iommu/sprd-iommu.c
132
domain->geometry.aperture_start + 1) >>
drivers/iommu/sprd-iommu.c
146
dom->domain.pgsize_bitmap = SPRD_IOMMU_PAGE_SIZE;
drivers/iommu/sprd-iommu.c
148
dom->domain.geometry.aperture_start = 0;
drivers/iommu/sprd-iommu.c
149
dom->domain.geometry.aperture_end = SZ_256M - 1;
drivers/iommu/sprd-iommu.c
150
dom->domain.geometry.force_aperture = true;
drivers/iommu/sprd-iommu.c
152
return &dom->domain;
drivers/iommu/sprd-iommu.c
166
val = dom->domain.geometry.aperture_start >> SPRD_IOMMU_PAGE_SHIFT;
drivers/iommu/sprd-iommu.c
181
val = (dom->domain.geometry.aperture_end -
drivers/iommu/sprd-iommu.c
182
dom->domain.geometry.aperture_start) >> SPRD_IOMMU_PAGE_SHIFT;
drivers/iommu/sprd-iommu.c
235
pgt_size = sprd_iommu_pgt_size(&dom->domain);
drivers/iommu/sprd-iommu.c
241
static void sprd_iommu_domain_free(struct iommu_domain *domain)
drivers/iommu/sprd-iommu.c
243
struct sprd_iommu_domain *dom = to_sprd_domain(domain);
drivers/iommu/sprd-iommu.c
249
static int sprd_iommu_attach_device(struct iommu_domain *domain,
drivers/iommu/sprd-iommu.c
254
struct sprd_iommu_domain *dom = to_sprd_domain(domain);
drivers/iommu/sprd-iommu.c
255
size_t pgt_size = sprd_iommu_pgt_size(domain);
drivers/iommu/sprd-iommu.c
287
static int sprd_iommu_map(struct iommu_domain *domain, unsigned long iova,
drivers/iommu/sprd-iommu.c
291
struct sprd_iommu_domain *dom = to_sprd_domain(domain);
drivers/iommu/sprd-iommu.c
297
unsigned long start = domain->geometry.aperture_start;
drivers/iommu/sprd-iommu.c
298
unsigned long end = domain->geometry.aperture_end;
drivers/iommu/sprd-iommu.c
324
static size_t sprd_iommu_unmap(struct iommu_domain *domain, unsigned long iova,
drivers/iommu/sprd-iommu.c
328
struct sprd_iommu_domain *dom = to_sprd_domain(domain);
drivers/iommu/sprd-iommu.c
332
unsigned long start = domain->geometry.aperture_start;
drivers/iommu/sprd-iommu.c
333
unsigned long end = domain->geometry.aperture_end;
drivers/iommu/sprd-iommu.c
347
static int sprd_iommu_sync_map(struct iommu_domain *domain,
drivers/iommu/sprd-iommu.c
350
struct sprd_iommu_domain *dom = to_sprd_domain(domain);
drivers/iommu/sprd-iommu.c
363
static void sprd_iommu_sync(struct iommu_domain *domain,
drivers/iommu/sprd-iommu.c
366
sprd_iommu_sync_map(domain, 0, 0);
drivers/iommu/sprd-iommu.c
369
static phys_addr_t sprd_iommu_iova_to_phys(struct iommu_domain *domain,
drivers/iommu/sprd-iommu.c
372
struct sprd_iommu_domain *dom = to_sprd_domain(domain);
drivers/iommu/sprd-iommu.c
375
unsigned long start = domain->geometry.aperture_start;
drivers/iommu/sprd-iommu.c
376
unsigned long end = domain->geometry.aperture_end;
drivers/iommu/sprd-iommu.c
78
struct iommu_domain domain;
drivers/iommu/sprd-iommu.c
88
return container_of(dom, struct sprd_iommu_domain, domain);
drivers/iommu/sun50i-iommu.c
1008
iommu->domain = &sun50i_iommu_identity_domain;
drivers/iommu/sun50i-iommu.c
111
struct iommu_domain *domain;
drivers/iommu/sun50i-iommu.c
116
struct iommu_domain domain;
drivers/iommu/sun50i-iommu.c
128
static struct sun50i_iommu_domain *to_sun50i_domain(struct iommu_domain *domain)
drivers/iommu/sun50i-iommu.c
130
return container_of(domain, struct sun50i_iommu_domain, domain);
drivers/iommu/sun50i-iommu.c
384
static void sun50i_iommu_flush_iotlb_all(struct iommu_domain *domain)
drivers/iommu/sun50i-iommu.c
386
struct sun50i_iommu_domain *sun50i_domain = to_sun50i_domain(domain);
drivers/iommu/sun50i-iommu.c
406
static int sun50i_iommu_iotlb_sync_map(struct iommu_domain *domain,
drivers/iommu/sun50i-iommu.c
409
struct sun50i_iommu_domain *sun50i_domain = to_sun50i_domain(domain);
drivers/iommu/sun50i-iommu.c
420
static void sun50i_iommu_iotlb_sync(struct iommu_domain *domain,
drivers/iommu/sun50i-iommu.c
423
sun50i_iommu_flush_iotlb_all(domain);
drivers/iommu/sun50i-iommu.c
432
if (!iommu->domain)
drivers/iommu/sun50i-iommu.c
435
sun50i_domain = to_sun50i_domain(iommu->domain);
drivers/iommu/sun50i-iommu.c
595
static int sun50i_iommu_map(struct iommu_domain *domain, unsigned long iova,
drivers/iommu/sun50i-iommu.c
599
struct sun50i_iommu_domain *sun50i_domain = to_sun50i_domain(domain);
drivers/iommu/sun50i-iommu.c
638
static size_t sun50i_iommu_unmap(struct iommu_domain *domain, unsigned long iova,
drivers/iommu/sun50i-iommu.c
641
struct sun50i_iommu_domain *sun50i_domain = to_sun50i_domain(domain);
drivers/iommu/sun50i-iommu.c
662
static phys_addr_t sun50i_iommu_iova_to_phys(struct iommu_domain *domain,
drivers/iommu/sun50i-iommu.c
665
struct sun50i_iommu_domain *sun50i_domain = to_sun50i_domain(domain);
drivers/iommu/sun50i-iommu.c
700
sun50i_domain->domain.pgsize_bitmap = SZ_4K;
drivers/iommu/sun50i-iommu.c
702
sun50i_domain->domain.geometry.aperture_start = 0;
drivers/iommu/sun50i-iommu.c
703
sun50i_domain->domain.geometry.aperture_end = DMA_BIT_MASK(32);
drivers/iommu/sun50i-iommu.c
704
sun50i_domain->domain.geometry.force_aperture = true;
drivers/iommu/sun50i-iommu.c
706
return &sun50i_domain->domain;
drivers/iommu/sun50i-iommu.c
714
static void sun50i_iommu_domain_free(struct iommu_domain *domain)
drivers/iommu/sun50i-iommu.c
716
struct sun50i_iommu_domain *sun50i_domain = to_sun50i_domain(domain);
drivers/iommu/sun50i-iommu.c
727
iommu->domain = &sun50i_domain->domain;
drivers/iommu/sun50i-iommu.c
770
iommu->domain = NULL;
drivers/iommu/sun50i-iommu.c
782
if (iommu->domain == identity_domain)
drivers/iommu/sun50i-iommu.c
785
sun50i_domain = to_sun50i_domain(iommu->domain);
drivers/iommu/sun50i-iommu.c
800
static int sun50i_iommu_attach_device(struct iommu_domain *domain,
drivers/iommu/sun50i-iommu.c
804
struct sun50i_iommu_domain *sun50i_domain = to_sun50i_domain(domain);
drivers/iommu/sun50i-iommu.c
815
if (iommu->domain == domain)
drivers/iommu/sun50i-iommu.c
874
if (iommu->domain)
drivers/iommu/sun50i-iommu.c
875
report_iommu_fault(iommu->domain, iommu->dev, iova, prot);
drivers/iommu/tegra-smmu.c
321
as->domain.pgsize_bitmap = SZ_4K;
drivers/iommu/tegra-smmu.c
324
as->domain.geometry.aperture_start = 0;
drivers/iommu/tegra-smmu.c
325
as->domain.geometry.aperture_end = 0xffffffff;
drivers/iommu/tegra-smmu.c
326
as->domain.geometry.force_aperture = true;
drivers/iommu/tegra-smmu.c
328
return &as->domain;
drivers/iommu/tegra-smmu.c
331
static void tegra_smmu_domain_free(struct iommu_domain *domain)
drivers/iommu/tegra-smmu.c
333
struct tegra_smmu_as *as = to_smmu_as(domain);
drivers/iommu/tegra-smmu.c
492
static int tegra_smmu_attach_dev(struct iommu_domain *domain,
drivers/iommu/tegra-smmu.c
497
struct tegra_smmu_as *as = to_smmu_as(domain);
drivers/iommu/tegra-smmu.c
58
struct iommu_domain domain;
drivers/iommu/tegra-smmu.c
72
return container_of(dom, struct tegra_smmu_as, domain);
drivers/iommu/tegra-smmu.c
721
__tegra_smmu_map(struct iommu_domain *domain, unsigned long iova,
drivers/iommu/tegra-smmu.c
725
struct tegra_smmu_as *as = to_smmu_as(domain);
drivers/iommu/tegra-smmu.c
758
__tegra_smmu_unmap(struct iommu_domain *domain, unsigned long iova,
drivers/iommu/tegra-smmu.c
761
struct tegra_smmu_as *as = to_smmu_as(domain);
drivers/iommu/tegra-smmu.c
775
static int tegra_smmu_map(struct iommu_domain *domain, unsigned long iova,
drivers/iommu/tegra-smmu.c
779
struct tegra_smmu_as *as = to_smmu_as(domain);
drivers/iommu/tegra-smmu.c
784
ret = __tegra_smmu_map(domain, iova, paddr, size, prot, gfp, &flags);
drivers/iommu/tegra-smmu.c
793
static size_t tegra_smmu_unmap(struct iommu_domain *domain, unsigned long iova,
drivers/iommu/tegra-smmu.c
796
struct tegra_smmu_as *as = to_smmu_as(domain);
drivers/iommu/tegra-smmu.c
800
size = __tegra_smmu_unmap(domain, iova, size, gather);
drivers/iommu/tegra-smmu.c
806
static phys_addr_t tegra_smmu_iova_to_phys(struct iommu_domain *domain,
drivers/iommu/tegra-smmu.c
809
struct tegra_smmu_as *as = to_smmu_as(domain);
drivers/iommu/virtio-iommu.c
101
#define to_viommu_domain(domain) \
drivers/iommu/virtio-iommu.c
102
container_of(domain, struct viommu_domain, domain)
drivers/iommu/virtio-iommu.c
403
u64 iova = vdomain->domain.geometry.aperture_start;
drivers/iommu/virtio-iommu.c
404
u64 limit = vdomain->domain.geometry.aperture_end;
drivers/iommu/virtio-iommu.c
406
unsigned long granule = 1UL << __ffs(vdomain->domain.pgsize_bitmap);
drivers/iommu/virtio-iommu.c
464
.domain = cpu_to_le32(vdomain->id),
drivers/iommu/virtio-iommu.c
64
struct iommu_domain domain;
drivers/iommu/virtio-iommu.c
689
vdomain->domain.pgsize_bitmap = viommu->pgsize_bitmap;
drivers/iommu/virtio-iommu.c
690
vdomain->domain.geometry = viommu->geometry;
drivers/iommu/virtio-iommu.c
695
return &vdomain->domain;
drivers/iommu/virtio-iommu.c
698
static void viommu_domain_free(struct iommu_domain *domain)
drivers/iommu/virtio-iommu.c
700
struct viommu_domain *vdomain = to_viommu_domain(domain);
drivers/iommu/virtio-iommu.c
714
struct iommu_domain *domain;
drivers/iommu/virtio-iommu.c
719
return &viommu_identity_domain.domain;
drivers/iommu/virtio-iommu.c
721
domain = viommu_domain_alloc_paging(dev);
drivers/iommu/virtio-iommu.c
722
if (IS_ERR(domain))
drivers/iommu/virtio-iommu.c
723
return domain;
drivers/iommu/virtio-iommu.c
725
ret = viommu_domain_map_identity(vdev, to_viommu_domain(domain));
drivers/iommu/virtio-iommu.c
727
viommu_domain_free(domain);
drivers/iommu/virtio-iommu.c
730
return domain;
drivers/iommu/virtio-iommu.c
733
static int viommu_attach_dev(struct iommu_domain *domain, struct device *dev,
drivers/iommu/virtio-iommu.c
739
struct viommu_domain *vdomain = to_viommu_domain(domain);
drivers/iommu/virtio-iommu.c
761
.domain = cpu_to_le32(vdomain->id),
drivers/iommu/virtio-iommu.c
784
static int viommu_attach_identity_domain(struct iommu_domain *domain,
drivers/iommu/virtio-iommu.c
791
struct viommu_domain *vdomain = to_viommu_domain(domain);
drivers/iommu/virtio-iommu.c
795
.domain = cpu_to_le32(vdev->viommu->identity_domain_id),
drivers/iommu/virtio-iommu.c
811
.domain = {
drivers/iommu/virtio-iommu.c
831
.domain = cpu_to_le32(vdomain->id),
drivers/iommu/virtio-iommu.c
842
static int viommu_map_pages(struct iommu_domain *domain, unsigned long iova,
drivers/iommu/virtio-iommu.c
851
struct viommu_domain *vdomain = to_viommu_domain(domain);
drivers/iommu/virtio-iommu.c
867
.domain = cpu_to_le32(vdomain->id),
drivers/iommu/virtio-iommu.c
886
static size_t viommu_unmap_pages(struct iommu_domain *domain, unsigned long iova,
drivers/iommu/virtio-iommu.c
893
struct viommu_domain *vdomain = to_viommu_domain(domain);
drivers/iommu/virtio-iommu.c
906
.domain = cpu_to_le32(vdomain->id),
drivers/iommu/virtio-iommu.c
915
static phys_addr_t viommu_iova_to_phys(struct iommu_domain *domain,
drivers/iommu/virtio-iommu.c
922
struct viommu_domain *vdomain = to_viommu_domain(domain);
drivers/iommu/virtio-iommu.c
935
static void viommu_iotlb_sync(struct iommu_domain *domain,
drivers/iommu/virtio-iommu.c
938
struct viommu_domain *vdomain = to_viommu_domain(domain);
drivers/iommu/virtio-iommu.c
943
static int viommu_iotlb_sync_map(struct iommu_domain *domain,
drivers/iommu/virtio-iommu.c
946
struct viommu_domain *vdomain = to_viommu_domain(domain);
drivers/iommu/virtio-iommu.c
957
static void viommu_flush_iotlb_all(struct iommu_domain *domain)
drivers/iommu/virtio-iommu.c
959
struct viommu_domain *vdomain = to_viommu_domain(domain);
drivers/irqchip/irq-aclint-sswi.c
105
struct irq_domain *domain;
drivers/irqchip/irq-aclint-sswi.c
128
domain = irq_find_matching_fwnode(riscv_get_intc_hwnode(), DOMAIN_BUS_ANY);
drivers/irqchip/irq-aclint-sswi.c
129
if (!domain) {
drivers/irqchip/irq-aclint-sswi.c
134
sswi_ipi_virq = irq_create_mapping(domain, RV_IRQ_SOFT);
drivers/irqchip/irq-al-fic.c
101
struct irq_domain *domain = fic->domain;
drivers/irqchip/irq-al-fic.c
103
struct irq_chip_generic *gc = irq_get_domain_generic_chip(domain, 0);
drivers/irqchip/irq-al-fic.c
113
generic_handle_domain_irq(domain, hwirq);
drivers/irqchip/irq-al-fic.c
134
fic->domain = irq_domain_create_linear(of_fwnode_handle(node),
drivers/irqchip/irq-al-fic.c
138
if (!fic->domain) {
drivers/irqchip/irq-al-fic.c
143
ret = irq_alloc_domain_generic_chips(fic->domain,
drivers/irqchip/irq-al-fic.c
153
gc = irq_get_domain_generic_chip(fic->domain, 0);
drivers/irqchip/irq-al-fic.c
171
irq_domain_remove(fic->domain);
drivers/irqchip/irq-al-fic.c
38
struct irq_domain *domain;
drivers/irqchip/irq-alpine-msi.c
100
d = irq_domain_get_irq_data(domain->parent, virq);
drivers/irqchip/irq-alpine-msi.c
105
static int alpine_msix_middle_domain_alloc(struct irq_domain *domain, unsigned int virq,
drivers/irqchip/irq-alpine-msi.c
108
struct alpine_msix_data *priv = domain->host_data;
drivers/irqchip/irq-alpine-msi.c
116
err = alpine_msix_gic_domain_alloc(domain, virq + i, sgi + i);
drivers/irqchip/irq-alpine-msi.c
120
irq_domain_set_hwirq_and_chip(domain, virq + i, sgi + i,
drivers/irqchip/irq-alpine-msi.c
126
irq_domain_free_irqs_parent(domain, virq, i);
drivers/irqchip/irq-alpine-msi.c
131
static void alpine_msix_middle_domain_free(struct irq_domain *domain, unsigned int virq,
drivers/irqchip/irq-alpine-msi.c
134
struct irq_data *d = irq_domain_get_irq_data(domain, virq);
drivers/irqchip/irq-alpine-msi.c
137
irq_domain_free_irqs_parent(domain, virq, nr_irqs);
drivers/irqchip/irq-alpine-msi.c
81
static int alpine_msix_gic_domain_alloc(struct irq_domain *domain, unsigned int virq, int sgi)
drivers/irqchip/irq-alpine-msi.c
87
if (!is_of_node(domain->parent->fwnode))
drivers/irqchip/irq-alpine-msi.c
90
fwspec.fwnode = domain->parent->fwnode;
drivers/irqchip/irq-alpine-msi.c
96
ret = irq_domain_alloc_irqs_parent(domain, virq, 1, &fwspec);
drivers/irqchip/irq-apple-aic.c
734
static int aic_irq_domain_alloc(struct irq_domain *domain, unsigned int virq,
drivers/irqchip/irq-apple-aic.c
742
ret = aic_irq_domain_translate(domain, fwspec, &hwirq, &type);
drivers/irqchip/irq-apple-aic.c
747
ret = aic_irq_domain_map(domain, virq + i, hwirq + i);
drivers/irqchip/irq-apple-aic.c
755
static void aic_irq_domain_free(struct irq_domain *domain, unsigned int virq,
drivers/irqchip/irq-apple-aic.c
761
struct irq_data *d = irq_domain_get_irq_data(domain, virq + i);
drivers/irqchip/irq-armada-370-xp.c
174
struct irq_domain *domain;
drivers/irqchip/irq-armada-370-xp.c
269
static int mpic_msi_alloc(struct irq_domain *domain, unsigned int virq, unsigned int nr_irqs,
drivers/irqchip/irq-armada-370-xp.c
272
struct mpic *mpic = domain->host_data;
drivers/irqchip/irq-armada-370-xp.c
284
irq_domain_set_info(domain, virq + i, hwirq + i,
drivers/irqchip/irq-armada-370-xp.c
286
domain->host_data, handle_simple_irq,
drivers/irqchip/irq-armada-370-xp.c
293
static void mpic_msi_free(struct irq_domain *domain, unsigned int virq, unsigned int nr_irqs)
drivers/irqchip/irq-armada-370-xp.c
295
struct irq_data *d = irq_domain_get_irq_data(domain, virq);
drivers/irqchip/irq-armada-370-xp.c
296
struct mpic *mpic = domain->host_data;
drivers/irqchip/irq-armada-370-xp.c
531
for (irq_hw_number_t i = 0; i < mpic->domain->hwirq_max; i++)
drivers/irqchip/irq-armada-370-xp.c
551
unsigned int virq = irq_find_mapping(mpic->domain, i);
drivers/irqchip/irq-armada-370-xp.c
604
static int mpic_irq_map(struct irq_domain *domain, unsigned int virq, irq_hw_number_t hwirq)
drivers/irqchip/irq-armada-370-xp.c
606
struct mpic *mpic = domain->host_data;
drivers/irqchip/irq-armada-370-xp.c
697
generic_handle_domain_irq(mpic->domain, i);
drivers/irqchip/irq-armada-370-xp.c
717
generic_handle_domain_irq(mpic->domain, i);
drivers/irqchip/irq-armada-370-xp.c
744
for (irq_hw_number_t i = 0; i < mpic->domain->hwirq_max; i++) {
drivers/irqchip/irq-armada-370-xp.c
745
unsigned int virq = irq_find_mapping(mpic->domain, i);
drivers/irqchip/irq-armada-370-xp.c
870
mpic->domain = irq_domain_create_linear(of_fwnode_handle(node), nr_irqs, &mpic_irq_ops, mpic);
drivers/irqchip/irq-armada-370-xp.c
871
if (!mpic->domain) {
drivers/irqchip/irq-armada-370-xp.c
876
irq_domain_update_bus_token(mpic->domain, DOMAIN_BUS_WIRED);
drivers/irqchip/irq-armada-370-xp.c
889
irq_set_default_domain(mpic->domain);
drivers/irqchip/irq-aspeed-i2c-ic.c
50
static int aspeed_i2c_ic_map_irq_domain(struct irq_domain *domain,
drivers/irqchip/irq-aspeed-i2c-ic.c
54
irq_set_chip_data(irq, domain->host_data);
drivers/irqchip/irq-aspeed-intc.c
73
static int aspeed_intc_ic_map_irq_domain(struct irq_domain *domain, unsigned int irq,
drivers/irqchip/irq-aspeed-intc.c
77
irq_set_chip_data(irq, domain->host_data);
drivers/irqchip/irq-aspeed-scu-ic.c
193
static int aspeed_scu_ic_map(struct irq_domain *domain, unsigned int irq,
drivers/irqchip/irq-aspeed-scu-ic.c
196
struct aspeed_scu_ic *scu_ic = domain->host_data;
drivers/irqchip/irq-aspeed-scu-ic.c
202
irq_set_chip_data(irq, domain->host_data);
drivers/irqchip/irq-ath79-misc.c
120
struct irq_domain *domain, int irq)
drivers/irqchip/irq-ath79-misc.c
122
void __iomem *base = domain->host_data;
drivers/irqchip/irq-ath79-misc.c
124
ath79_perfcount_irq = irq_create_mapping(domain, ATH79_MISC_PERF_IRQ);
drivers/irqchip/irq-ath79-misc.c
130
irq_set_chained_handler_and_data(irq, ath79_misc_irq_handler, domain);
drivers/irqchip/irq-ath79-misc.c
136
struct irq_domain *domain;
drivers/irqchip/irq-ath79-misc.c
152
domain = irq_domain_create_linear(of_fwnode_handle(node), ATH79_MISC_IRQ_COUNT,
drivers/irqchip/irq-ath79-misc.c
154
if (!domain) {
drivers/irqchip/irq-ath79-misc.c
159
ath79_misc_intc_domain_init(domain, irq);
drivers/irqchip/irq-ath79-misc.c
36
struct irq_domain *domain = irq_desc_get_handler_data(desc);
drivers/irqchip/irq-ath79-misc.c
38
void __iomem *base = domain->host_data;
drivers/irqchip/irq-ath79-misc.c
55
generic_handle_domain_irq(domain, bit);
drivers/irqchip/irq-atmel-aic-common.c
109
static void __init aic_common_ext_irq_of_init(struct irq_domain *domain)
drivers/irqchip/irq-atmel-aic-common.c
111
struct device_node *node = irq_domain_get_of_node(domain);
drivers/irqchip/irq-atmel-aic-common.c
116
gc = irq_get_domain_generic_chip(domain, 0);
drivers/irqchip/irq-atmel-aic-common.c
122
gc = irq_get_domain_generic_chip(domain, hwirq);
drivers/irqchip/irq-atmel-aic-common.c
125
hwirq, domain->revmap_size);
drivers/irqchip/irq-atmel-aic-common.c
203
struct irq_domain *domain;
drivers/irqchip/irq-atmel-aic-common.c
222
domain = irq_domain_create_linear(of_fwnode_handle(node), nchips * 32, ops, aic);
drivers/irqchip/irq-atmel-aic-common.c
223
if (!domain) {
drivers/irqchip/irq-atmel-aic-common.c
228
ret = irq_alloc_domain_generic_chips(domain, 32, 1, name,
drivers/irqchip/irq-atmel-aic-common.c
236
gc = irq_get_domain_generic_chip(domain, i * 32);
drivers/irqchip/irq-atmel-aic-common.c
249
aic_common_ext_irq_of_init(domain);
drivers/irqchip/irq-atmel-aic-common.c
252
return domain;
drivers/irqchip/irq-atmel-aic-common.c
255
irq_domain_remove(domain);
drivers/irqchip/irq-atmel-aic.c
136
static void __init aic_hw_init(struct irq_domain *domain)
drivers/irqchip/irq-atmel-aic.c
138
struct irq_chip_generic *gc = irq_get_domain_generic_chip(domain, 0);
drivers/irqchip/irq-atmel-aic.c
237
struct irq_domain *domain;
drivers/irqchip/irq-atmel-aic.c
242
domain = aic_common_of_init(node, &aic_irq_ops, "atmel-aic",
drivers/irqchip/irq-atmel-aic.c
244
if (IS_ERR(domain))
drivers/irqchip/irq-atmel-aic.c
245
return PTR_ERR(domain);
drivers/irqchip/irq-atmel-aic.c
247
aic_domain = domain;
drivers/irqchip/irq-atmel-aic.c
248
gc = irq_get_domain_generic_chip(domain, 0);
drivers/irqchip/irq-atmel-aic.c
261
aic_hw_init(domain);
drivers/irqchip/irq-atmel-aic5.c
103
struct irq_domain *domain = d->domain;
drivers/irqchip/irq-atmel-aic5.c
104
struct irq_chip_generic *bgc = irq_get_domain_generic_chip(domain, 0);
drivers/irqchip/irq-atmel-aic5.c
119
struct irq_domain *domain = d->domain;
drivers/irqchip/irq-atmel-aic5.c
120
struct irq_chip_generic *bgc = irq_get_domain_generic_chip(domain, 0);
drivers/irqchip/irq-atmel-aic5.c
131
struct irq_domain *domain = d->domain;
drivers/irqchip/irq-atmel-aic5.c
132
struct irq_chip_generic *bgc = irq_get_domain_generic_chip(domain, 0);
drivers/irqchip/irq-atmel-aic5.c
150
struct irq_domain *domain = d->domain;
drivers/irqchip/irq-atmel-aic5.c
151
struct irq_domain_chip_generic *dgc = domain->gc;
drivers/irqchip/irq-atmel-aic5.c
152
struct irq_chip_generic *bgc = irq_get_domain_generic_chip(domain, 0);
drivers/irqchip/irq-atmel-aic5.c
158
for (i = 0; i < domain->revmap_size; i++) {
drivers/irqchip/irq-atmel-aic5.c
179
struct irq_domain *domain = d->domain;
drivers/irqchip/irq-atmel-aic5.c
180
struct irq_domain_chip_generic *dgc = domain->gc;
drivers/irqchip/irq-atmel-aic5.c
181
struct irq_chip_generic *bgc = irq_get_domain_generic_chip(domain, 0);
drivers/irqchip/irq-atmel-aic5.c
190
for (i = 0; i < domain->revmap_size; i++) {
drivers/irqchip/irq-atmel-aic5.c
214
struct irq_domain *domain = d->domain;
drivers/irqchip/irq-atmel-aic5.c
215
struct irq_domain_chip_generic *dgc = domain->gc;
drivers/irqchip/irq-atmel-aic5.c
216
struct irq_chip_generic *bgc = irq_get_domain_generic_chip(domain, 0);
drivers/irqchip/irq-atmel-aic5.c
233
static void __init aic5_hw_init(struct irq_domain *domain)
drivers/irqchip/irq-atmel-aic5.c
235
struct irq_chip_generic *gc = irq_get_domain_generic_chip(domain, 0);
drivers/irqchip/irq-atmel-aic5.c
256
for (i = 0; i < domain->revmap_size; i++) {
drivers/irqchip/irq-atmel-aic5.c
319
struct irq_domain *domain;
drivers/irqchip/irq-atmel-aic5.c
329
domain = aic_common_of_init(node, &aic5_irq_ops, "atmel-aic5",
drivers/irqchip/irq-atmel-aic5.c
331
if (IS_ERR(domain))
drivers/irqchip/irq-atmel-aic5.c
332
return PTR_ERR(domain);
drivers/irqchip/irq-atmel-aic5.c
334
aic5_domain = domain;
drivers/irqchip/irq-atmel-aic5.c
337
gc = irq_get_domain_generic_chip(domain, i * 32);
drivers/irqchip/irq-atmel-aic5.c
349
aic5_hw_init(domain);
drivers/irqchip/irq-atmel-aic5.c
87
struct irq_domain *domain = d->domain;
drivers/irqchip/irq-atmel-aic5.c
88
struct irq_chip_generic *bgc = irq_get_domain_generic_chip(domain, 0);
drivers/irqchip/irq-bcm2712-mip.c
100
fwspec.fwnode = domain->parent->fwnode;
drivers/irqchip/irq-bcm2712-mip.c
106
ret = irq_domain_alloc_irqs_parent(domain, virq, nr_irqs, &fwspec);
drivers/irqchip/irq-bcm2712-mip.c
111
irqd = irq_domain_get_irq_data(domain->parent, virq + i);
drivers/irqchip/irq-bcm2712-mip.c
114
ret = irq_domain_set_hwirq_and_chip(domain, virq + i, hwirq + i,
drivers/irqchip/irq-bcm2712-mip.c
127
irq_domain_free_irqs_parent(domain, virq, nr_irqs);
drivers/irqchip/irq-bcm2712-mip.c
133
static void mip_middle_domain_free(struct irq_domain *domain, unsigned int virq,
drivers/irqchip/irq-bcm2712-mip.c
136
struct irq_data *irqd = irq_domain_get_irq_data(domain, virq);
drivers/irqchip/irq-bcm2712-mip.c
145
irq_domain_free_irqs_parent(domain, virq, nr_irqs);
drivers/irqchip/irq-bcm2712-mip.c
85
static int mip_middle_domain_alloc(struct irq_domain *domain, unsigned int virq,
drivers/irqchip/irq-bcm2712-mip.c
88
struct mip_priv *mip = domain->host_data;
drivers/irqchip/irq-bcm2835.c
147
intc.domain = irq_domain_create_linear(of_fwnode_handle(node), MAKE_HWIRQ(NR_BANKS, 0),
drivers/irqchip/irq-bcm2835.c
149
if (!intc.domain)
drivers/irqchip/irq-bcm2835.c
158
irq = irq_create_mapping(intc.domain, MAKE_HWIRQ(b, i));
drivers/irqchip/irq-bcm2835.c
251
generic_handle_domain_irq(intc.domain, hwirq);
drivers/irqchip/irq-bcm2835.c
259
generic_handle_domain_irq(intc.domain, hwirq);
drivers/irqchip/irq-bcm2835.c
84
struct irq_domain *domain;
drivers/irqchip/irq-bcm2836.c
149
generic_handle_domain_irq(intc.domain, hwirq);
drivers/irqchip/irq-bcm2836.c
19
struct irq_domain *domain;
drivers/irqchip/irq-bcm2836.c
253
.fwnode = intc.domain->fwnode,
drivers/irqchip/irq-bcm2836.c
265
ipi_domain = irq_domain_create_linear(intc.domain->fwnode,
drivers/irqchip/irq-bcm2836.c
328
intc.domain = irq_domain_create_linear(of_fwnode_handle(node), LAST_IRQ + 1,
drivers/irqchip/irq-bcm2836.c
331
if (!intc.domain)
drivers/irqchip/irq-bcm2836.c
334
irq_domain_update_bus_token(intc.domain, DOMAIN_BUS_WIRED);
drivers/irqchip/irq-bcm6345-l1.c
134
if (generic_handle_domain_irq(intc->domain, base + hwirq))
drivers/irqchip/irq-bcm6345-l1.c
318
intc->domain = irq_domain_create_linear(of_fwnode_handle(dn), IRQS_PER_WORD * intc->n_words,
drivers/irqchip/irq-bcm6345-l1.c
321
if (!intc->domain) {
drivers/irqchip/irq-bcm6345-l1.c
78
struct irq_domain *domain;
drivers/irqchip/irq-bcm7038-l1.c
139
generic_handle_domain_irq(intc->domain, base + hwirq);
drivers/irqchip/irq-bcm7038-l1.c
40
struct irq_domain *domain;
drivers/irqchip/irq-bcm7038-l1.c
415
intc->domain = irq_domain_create_linear(of_fwnode_handle(dn), IRQS_PER_WORD * intc->n_words,
drivers/irqchip/irq-bcm7038-l1.c
418
if (!intc->domain) {
drivers/irqchip/irq-bcm7120-l2.c
252
data->domain = irq_domain_create_linear(of_fwnode_handle(dn), IRQS_PER_WORD * data->n_words,
drivers/irqchip/irq-bcm7120-l2.c
254
if (!data->domain) {
drivers/irqchip/irq-bcm7120-l2.c
266
ret = irq_alloc_domain_generic_chips(data->domain, IRQS_PER_WORD, 1,
drivers/irqchip/irq-bcm7120-l2.c
276
gc = irq_get_domain_generic_chip(data->domain, irq);
drivers/irqchip/irq-bcm7120-l2.c
318
irq_domain_remove(data->domain);
drivers/irqchip/irq-bcm7120-l2.c
47
struct irq_domain *domain;
drivers/irqchip/irq-bcm7120-l2.c
70
gc = irq_get_domain_generic_chip(b->domain, base);
drivers/irqchip/irq-bcm7120-l2.c
77
generic_handle_domain_irq(b->domain, base + hwirq);
drivers/irqchip/irq-brcmstb-l2.c
180
data->domain = irq_domain_create_linear(of_fwnode_handle(np), 32,
drivers/irqchip/irq-brcmstb-l2.c
182
if (!data->domain) {
drivers/irqchip/irq-brcmstb-l2.c
198
ret = irq_alloc_domain_generic_chips(data->domain, 32, 1,
drivers/irqchip/irq-brcmstb-l2.c
209
data->gc = irq_get_domain_generic_chip(data->domain, 0);
drivers/irqchip/irq-brcmstb-l2.c
251
irq_domain_remove(data->domain);
drivers/irqchip/irq-brcmstb-l2.c
56
struct irq_domain *domain;
drivers/irqchip/irq-brcmstb-l2.c
86
generic_handle_domain_irq(b->domain, irq);
drivers/irqchip/irq-clps711x.c
186
clps711x_intc->domain =
drivers/irqchip/irq-clps711x.c
189
if (!clps711x_intc->domain) {
drivers/irqchip/irq-clps711x.c
194
irq_set_default_domain(clps711x_intc->domain);
drivers/irqchip/irq-clps711x.c
68
struct irq_domain *domain;
drivers/irqchip/irq-clps711x.c
80
generic_handle_domain_irq(clps711x_intc->domain,
drivers/irqchip/irq-clps711x.c
86
generic_handle_domain_irq(clps711x_intc->domain,
drivers/irqchip/irq-crossbar.c
102
err = irq_domain_alloc_irqs_parent(domain, virq, 1, &fwspec);
drivers/irqchip/irq-crossbar.c
152
static void crossbar_domain_free(struct irq_domain *domain, unsigned int virq,
drivers/irqchip/irq-crossbar.c
159
struct irq_data *d = irq_domain_get_irq_data(domain, virq + i);
drivers/irqchip/irq-crossbar.c
336
struct irq_domain *parent_domain, *domain;
drivers/irqchip/irq-crossbar.c
354
domain = irq_domain_create_hierarchy(parent_domain, 0, cb->max_crossbar_sources,
drivers/irqchip/irq-crossbar.c
356
if (!domain) {
drivers/irqchip/irq-crossbar.c
74
static int allocate_gic_irq(struct irq_domain *domain, unsigned virq,
drivers/irqchip/irq-crossbar.c
81
if (!irq_domain_get_of_node(domain->parent))
drivers/irqchip/irq-crossbar.c
96
fwspec.fwnode = domain->parent->fwnode;
drivers/irqchip/irq-dw-apb-ictl.c
118
struct irq_domain *domain;
drivers/irqchip/irq-dw-apb-ictl.c
175
domain = irq_domain_create_linear(of_fwnode_handle(np), nrirqs, domain_ops, NULL);
drivers/irqchip/irq-dw-apb-ictl.c
176
if (!domain) {
drivers/irqchip/irq-dw-apb-ictl.c
182
ret = irq_alloc_domain_generic_chips(domain, 32, 1, np->name,
drivers/irqchip/irq-dw-apb-ictl.c
191
gc = irq_get_domain_generic_chip(domain, i * 32);
drivers/irqchip/irq-dw-apb-ictl.c
202
dw_apb_ictl_handle_irq_cascaded, domain);
drivers/irqchip/irq-dw-apb-ictl.c
204
dw_apb_ictl_irq_domain = domain;
drivers/irqchip/irq-dw-apb-ictl.c
74
static int dw_apb_ictl_irq_domain_alloc(struct irq_domain *domain, unsigned int virq,
drivers/irqchip/irq-dw-apb-ictl.c
82
ret = irq_domain_translate_onecell(domain, fwspec, &hwirq, &type);
drivers/irqchip/irq-dw-apb-ictl.c
87
irq_map_generic_chip(domain, virq + i, hwirq + i);
drivers/irqchip/irq-econet-en751221.c
134
struct irq_domain *domain;
drivers/irqchip/irq-econet-en751221.c
145
domain = irq_desc_get_handler_data(desc);
drivers/irqchip/irq-econet-en751221.c
146
econet_intc_handle_pending(domain, pending0, 0);
drivers/irqchip/irq-econet-en751221.c
147
econet_intc_handle_pending(domain, pending1, 32);
drivers/irqchip/irq-econet-en751221.c
254
struct irq_domain *domain;
drivers/irqchip/irq-econet-en751221.c
289
domain = irq_domain_create_linear(of_fwnode_handle(node), IRQ_COUNT,
drivers/irqchip/irq-econet-en751221.c
291
if (!domain) {
drivers/irqchip/irq-econet-en751221.c
297
irq_set_chained_handler_and_data(irq, econet_intc_from_parent, domain);
drivers/irqchip/irq-ftintc010.c
136
generic_handle_domain_irq(f->domain, irq);
drivers/irqchip/irq-ftintc010.c
183
f->domain = irq_domain_create_simple(of_fwnode_handle(node),
drivers/irqchip/irq-ftintc010.c
50
struct irq_domain *domain;
drivers/irqchip/irq-gic-its-msi-parent.c
106
msi_info = msi_get_domain_info(domain->parent);
drivers/irqchip/irq-gic-its-msi-parent.c
107
return msi_info->ops->msi_prepare(domain->parent, dev, nvec, info);
drivers/irqchip/irq-gic-its-msi-parent.c
110
static int its_v5_pci_msi_prepare(struct irq_domain *domain, struct device *dev,
drivers/irqchip/irq-gic-its-msi-parent.c
125
rid = pci_msi_map_rid_ctlr_node(domain->parent, pdev, &msi_node);
drivers/irqchip/irq-gic-its-msi-parent.c
143
msi_info = msi_get_domain_info(domain->parent);
drivers/irqchip/irq-gic-its-msi-parent.c
144
return msi_info->ops->msi_prepare(domain->parent, dev, nvec, info);
drivers/irqchip/irq-gic-its-msi-parent.c
151
static int of_pmsi_get_msi_info(struct irq_domain *domain, struct device *dev, u32 *dev_id,
drivers/irqchip/irq-gic-its-msi-parent.c
163
if (np == irq_domain_get_of_node(domain)) {
drivers/irqchip/irq-gic-its-msi-parent.c
185
static int its_pmsi_prepare(struct irq_domain *domain, struct device *dev,
drivers/irqchip/irq-gic-its-msi-parent.c
193
ret = of_pmsi_get_msi_info(domain->parent, dev, &dev_id, NULL);
drivers/irqchip/irq-gic-its-msi-parent.c
205
msi_info = msi_get_domain_info(domain->parent);
drivers/irqchip/irq-gic-its-msi-parent.c
206
return msi_info->ops->msi_prepare(domain->parent,
drivers/irqchip/irq-gic-its-msi-parent.c
210
static int its_v5_pmsi_prepare(struct irq_domain *domain, struct device *dev,
drivers/irqchip/irq-gic-its-msi-parent.c
219
ret = of_pmsi_get_msi_info(domain->parent, dev, &dev_id, &pa);
drivers/irqchip/irq-gic-its-msi-parent.c
233
msi_info = msi_get_domain_info(domain->parent);
drivers/irqchip/irq-gic-its-msi-parent.c
234
return msi_info->ops->msi_prepare(domain->parent, dev, nvec, info);
drivers/irqchip/irq-gic-its-msi-parent.c
237
static void its_msi_teardown(struct irq_domain *domain, msi_alloc_info_t *info)
drivers/irqchip/irq-gic-its-msi-parent.c
241
msi_info = msi_get_domain_info(domain->parent);
drivers/irqchip/irq-gic-its-msi-parent.c
242
msi_info->ops->msi_teardown(domain->parent, info);
drivers/irqchip/irq-gic-its-msi-parent.c
245
static bool its_init_dev_msi_info(struct device *dev, struct irq_domain *domain,
drivers/irqchip/irq-gic-its-msi-parent.c
248
if (!msi_lib_init_dev_msi_info(dev, domain, real_parent, info))
drivers/irqchip/irq-gic-its-msi-parent.c
288
static bool its_v5_init_dev_msi_info(struct device *dev, struct irq_domain *domain,
drivers/irqchip/irq-gic-its-msi-parent.c
291
if (!msi_lib_init_dev_msi_info(dev, domain, real_parent, info))
drivers/irqchip/irq-gic-its-msi-parent.c
66
static int its_pci_msi_prepare(struct irq_domain *domain, struct device *dev,
drivers/irqchip/irq-gic-its-msi-parent.c
92
info->scratchpad[0].ul = pci_msi_domain_get_msi_rid(domain->parent, pdev);
drivers/irqchip/irq-gic-v2m.c
109
static int gicv2m_irq_gic_domain_alloc(struct irq_domain *domain,
drivers/irqchip/irq-gic-v2m.c
117
if (is_of_node(domain->parent->fwnode)) {
drivers/irqchip/irq-gic-v2m.c
118
fwspec.fwnode = domain->parent->fwnode;
drivers/irqchip/irq-gic-v2m.c
123
} else if (is_fwnode_irqchip(domain->parent->fwnode)) {
drivers/irqchip/irq-gic-v2m.c
124
fwspec.fwnode = domain->parent->fwnode;
drivers/irqchip/irq-gic-v2m.c
132
err = irq_domain_alloc_irqs_parent(domain, virq, 1, &fwspec);
drivers/irqchip/irq-gic-v2m.c
137
d = irq_domain_get_irq_data(domain->parent, virq);
drivers/irqchip/irq-gic-v2m.c
151
static int gicv2m_irq_domain_alloc(struct irq_domain *domain, unsigned int virq,
drivers/irqchip/irq-gic-v2m.c
185
err = gicv2m_irq_gic_domain_alloc(domain, virq + i, hwirq + i);
drivers/irqchip/irq-gic-v2m.c
189
irq_domain_set_hwirq_and_chip(domain, virq + i, hwirq + i,
drivers/irqchip/irq-gic-v2m.c
196
irq_domain_free_irqs_parent(domain, virq, nr_irqs);
drivers/irqchip/irq-gic-v2m.c
201
static void gicv2m_irq_domain_free(struct irq_domain *domain,
drivers/irqchip/irq-gic-v2m.c
204
struct irq_data *d = irq_domain_get_irq_data(domain, virq);
drivers/irqchip/irq-gic-v2m.c
208
irq_domain_free_irqs_parent(domain, virq, nr_irqs);
drivers/irqchip/irq-gic-v3-its-fsl-mc-msi.c
26
static u32 fsl_mc_msi_domain_get_msi_id(struct irq_domain *domain,
drivers/irqchip/irq-gic-v3-its-fsl-mc-msi.c
32
of_node = irq_domain_get_of_node(domain);
drivers/irqchip/irq-gic-v3-its.c
3572
static int its_msi_prepare(struct irq_domain *domain, struct device *dev,
drivers/irqchip/irq-gic-v3-its.c
3589
msi_info = msi_get_domain_info(domain);
drivers/irqchip/irq-gic-v3-its.c
3631
static void its_msi_teardown(struct irq_domain *domain, msi_alloc_info_t *info)
drivers/irqchip/irq-gic-v3-its.c
3660
static int its_irq_gic_domain_alloc(struct irq_domain *domain,
drivers/irqchip/irq-gic-v3-its.c
3666
if (irq_domain_get_of_node(domain->parent)) {
drivers/irqchip/irq-gic-v3-its.c
3667
fwspec.fwnode = domain->parent->fwnode;
drivers/irqchip/irq-gic-v3-its.c
3672
} else if (is_fwnode_irqchip(domain->parent->fwnode)) {
drivers/irqchip/irq-gic-v3-its.c
3673
fwspec.fwnode = domain->parent->fwnode;
drivers/irqchip/irq-gic-v3-its.c
3681
return irq_domain_alloc_irqs_parent(domain, virq, 1, &fwspec);
drivers/irqchip/irq-gic-v3-its.c
3684
static int its_irq_domain_alloc(struct irq_domain *domain, unsigned int virq,
drivers/irqchip/irq-gic-v3-its.c
3704
err = its_irq_gic_domain_alloc(domain, virq + i, hwirq + i);
drivers/irqchip/irq-gic-v3-its.c
3708
irq_domain_set_hwirq_and_chip(domain, virq + i,
drivers/irqchip/irq-gic-v3-its.c
3722
static int its_irq_domain_activate(struct irq_domain *domain,
drivers/irqchip/irq-gic-v3-its.c
3742
static void its_irq_domain_deactivate(struct irq_domain *domain,
drivers/irqchip/irq-gic-v3-its.c
3753
static void its_irq_domain_free(struct irq_domain *domain, unsigned int virq,
drivers/irqchip/irq-gic-v3-its.c
3756
struct irq_data *d = irq_domain_get_irq_data(domain, virq);
drivers/irqchip/irq-gic-v3-its.c
3761
its_get_event_id(irq_domain_get_irq_data(domain, virq)),
drivers/irqchip/irq-gic-v3-its.c
3765
struct irq_data *data = irq_domain_get_irq_data(domain,
drivers/irqchip/irq-gic-v3-its.c
3771
irq_domain_free_irqs_parent(domain, virq, nr_irqs);
drivers/irqchip/irq-gic-v3-its.c
4487
static int its_sgi_irq_domain_alloc(struct irq_domain *domain,
drivers/irqchip/irq-gic-v3-its.c
4502
irq_domain_set_hwirq_and_chip(domain, virq + i, i,
drivers/irqchip/irq-gic-v3-its.c
4510
static void its_sgi_irq_domain_free(struct irq_domain *domain,
drivers/irqchip/irq-gic-v3-its.c
4517
static int its_sgi_irq_domain_activate(struct irq_domain *domain,
drivers/irqchip/irq-gic-v3-its.c
4525
static void its_sgi_irq_domain_deactivate(struct irq_domain *domain,
drivers/irqchip/irq-gic-v3-its.c
4602
static void its_vpe_irq_domain_free(struct irq_domain *domain,
drivers/irqchip/irq-gic-v3-its.c
4606
struct its_vm *vm = domain->host_data;
drivers/irqchip/irq-gic-v3-its.c
4609
irq_domain_free_irqs_parent(domain, virq, nr_irqs);
drivers/irqchip/irq-gic-v3-its.c
4612
struct irq_data *data = irq_domain_get_irq_data(domain,
drivers/irqchip/irq-gic-v3-its.c
4629
static int its_vpe_irq_domain_alloc(struct irq_domain *domain, unsigned int virq,
drivers/irqchip/irq-gic-v3-its.c
4667
err = its_irq_gic_domain_alloc(domain, virq + i,
drivers/irqchip/irq-gic-v3-its.c
4671
irq_domain_set_hwirq_and_chip(domain, virq + i, i,
drivers/irqchip/irq-gic-v3-its.c
4678
its_vpe_irq_domain_free(domain, virq, i);
drivers/irqchip/irq-gic-v3-its.c
4683
static int its_vpe_irq_domain_activate(struct irq_domain *domain,
drivers/irqchip/irq-gic-v3-its.c
4712
static void its_vpe_irq_domain_deactivate(struct irq_domain *domain,
drivers/irqchip/irq-gic-v3-mbi.c
116
err = mbi_irq_gic_domain_alloc(domain, virq + i, hwirq + i);
drivers/irqchip/irq-gic-v3-mbi.c
120
irq_domain_set_hwirq_and_chip(domain, virq + i, hwirq + i,
drivers/irqchip/irq-gic-v3-mbi.c
127
irq_domain_free_irqs_parent(domain, virq, nr_irqs);
drivers/irqchip/irq-gic-v3-mbi.c
132
static void mbi_irq_domain_free(struct irq_domain *domain,
drivers/irqchip/irq-gic-v3-mbi.c
135
struct irq_data *d = irq_domain_get_irq_data(domain, virq);
drivers/irqchip/irq-gic-v3-mbi.c
139
irq_domain_free_irqs_parent(domain, virq, nr_irqs);
drivers/irqchip/irq-gic-v3-mbi.c
164
static bool mbi_init_dev_msi_info(struct device *dev, struct irq_domain *domain,
drivers/irqchip/irq-gic-v3-mbi.c
167
if (!msi_lib_init_dev_msi_info(dev, domain, real_parent, info))
drivers/irqchip/irq-gic-v3-mbi.c
43
static int mbi_irq_gic_domain_alloc(struct irq_domain *domain,
drivers/irqchip/irq-gic-v3-mbi.c
55
if (!is_of_node(domain->parent->fwnode))
drivers/irqchip/irq-gic-v3-mbi.c
63
fwspec.fwnode = domain->parent->fwnode;
drivers/irqchip/irq-gic-v3-mbi.c
69
err = irq_domain_alloc_irqs_parent(domain, virq, 1, &fwspec);
drivers/irqchip/irq-gic-v3-mbi.c
73
d = irq_domain_get_irq_data(domain->parent, virq);
drivers/irqchip/irq-gic-v3-mbi.c
86
static int mbi_irq_domain_alloc(struct irq_domain *domain, unsigned int virq,
drivers/irqchip/irq-gic-v3.c
1421
base_sgi = irq_domain_alloc_irqs(gic_data.domain, 8, NUMA_NO_NODE, &sgi_fwspec);
drivers/irqchip/irq-gic-v3.c
1653
static int gic_irq_domain_alloc(struct irq_domain *domain, unsigned int virq,
drivers/irqchip/irq-gic-v3.c
1661
ret = gic_irq_domain_translate(domain, fwspec, &hwirq, &type);
drivers/irqchip/irq-gic-v3.c
1666
ret = gic_irq_domain_map(domain, virq + i, hwirq + i);
drivers/irqchip/irq-gic-v3.c
1674
static void gic_irq_domain_free(struct irq_domain *domain, unsigned int virq,
drivers/irqchip/irq-gic-v3.c
1680
struct irq_data *d = irq_domain_get_irq_data(domain, virq + i);
drivers/irqchip/irq-gic-v3.c
1710
return d == gic_data.domain;
drivers/irqchip/irq-gic-v3.c
2011
gic_data.domain = irq_domain_create_tree(handle, &gic_irq_domain_ops,
drivers/irqchip/irq-gic-v3.c
2022
if (WARN_ON(!gic_data.domain) || WARN_ON(!gic_data.rdists.rdist)) {
drivers/irqchip/irq-gic-v3.c
2027
irq_domain_update_bus_token(gic_data.domain, DOMAIN_BUS_WIRED);
drivers/irqchip/irq-gic-v3.c
2032
err = mbi_init(handle, gic_data.domain);
drivers/irqchip/irq-gic-v3.c
2050
its_init(handle, &gic_data.rdists, gic_data.domain, dist_prio_irq);
drivers/irqchip/irq-gic-v3.c
2055
gicv2m_init(handle, gic_data.domain);
drivers/irqchip/irq-gic-v3.c
2061
if (gic_data.domain)
drivers/irqchip/irq-gic-v3.c
2062
irq_domain_remove(gic_data.domain);
drivers/irqchip/irq-gic-v3.c
62
struct irq_domain *domain;
drivers/irqchip/irq-gic-v3.c
825
if (generic_handle_domain_irq(gic_data.domain, irqnr)) {
drivers/irqchip/irq-gic-v3.c
838
if (generic_handle_domain_nmi(gic_data.domain, irqnr)) {
drivers/irqchip/irq-gic-v4.c
167
vm->domain = irq_domain_create_hierarchy(gic_domain, 0, vm->nr_vpes,
drivers/irqchip/irq-gic-v4.c
170
if (!vm->domain)
drivers/irqchip/irq-gic-v4.c
178
vpe_base_irq = irq_domain_alloc_irqs(vm->domain, vm->nr_vpes,
drivers/irqchip/irq-gic-v4.c
194
if (vm->domain)
drivers/irqchip/irq-gic-v4.c
195
irq_domain_remove(vm->domain);
drivers/irqchip/irq-gic-v4.c
225
irq_domain_remove(vm->domain);
drivers/irqchip/irq-gic-v4.c
376
int its_init_v4(struct irq_domain *domain,
drivers/irqchip/irq-gic-v4.c
380
if (domain) {
drivers/irqchip/irq-gic-v4.c
382
gic_domain = domain;
drivers/irqchip/irq-gic-v5-its.c
1014
d = irq_domain_get_irq_data(domain, virq + i);
drivers/irqchip/irq-gic-v5-its.c
1018
irq_domain_free_irqs_parent(domain, virq + i, 1);
drivers/irqchip/irq-gic-v5-its.c
1025
static int gicv5_its_irq_domain_activate(struct irq_domain *domain, struct irq_data *d,
drivers/irqchip/irq-gic-v5-its.c
1038
static void gicv5_its_irq_domain_deactivate(struct irq_domain *domain,
drivers/irqchip/irq-gic-v5-its.c
798
static int gicv5_its_msi_prepare(struct irq_domain *domain, struct device *dev,
drivers/irqchip/irq-gic-v5-its.c
806
msi_info = msi_get_domain_info(domain);
drivers/irqchip/irq-gic-v5-its.c
821
static void gicv5_its_msi_teardown(struct irq_domain *domain, msi_alloc_info_t *info)
drivers/irqchip/irq-gic-v5-its.c
827
msi_info = msi_get_domain_info(domain);
drivers/irqchip/irq-gic-v5-its.c
929
static int gicv5_its_irq_domain_alloc(struct irq_domain *domain, unsigned int virq,
drivers/irqchip/irq-gic-v5-its.c
959
ret = irq_domain_alloc_irqs_parent(domain, virq + i, 1, &lpi);
drivers/irqchip/irq-gic-v5-its.c
972
irq_domain_set_info(domain, virq + i, hwirq,
drivers/irqchip/irq-gic-v5-its.c
985
irqd = irq_domain_get_irq_data(domain, virq + i);
drivers/irqchip/irq-gic-v5-its.c
988
irq_domain_free_irqs_parent(domain, virq + i, 1);
drivers/irqchip/irq-gic-v5-its.c
995
static void gicv5_its_irq_domain_free(struct irq_domain *domain, unsigned int virq,
drivers/irqchip/irq-gic-v5-its.c
998
struct irq_data *d = irq_domain_get_irq_data(domain, virq);
drivers/irqchip/irq-gic-v5-iwb.c
207
if (WARN_ON_ONCE(!dev->msi.domain))
drivers/irqchip/irq-gic-v5.c
642
static int gicv5_irq_ppi_domain_alloc(struct irq_domain *domain, unsigned int virq,
drivers/irqchip/irq-gic-v5.c
653
ret = gicv5_irq_ppi_domain_translate(domain, fwspec, &hwirq, &type);
drivers/irqchip/irq-gic-v5.c
661
irq_domain_set_info(domain, virq, hwirq, &gicv5_ppi_irq_chip, NULL,
drivers/irqchip/irq-gic-v5.c
667
static void gicv5_irq_domain_free(struct irq_domain *domain, unsigned int virq,
drivers/irqchip/irq-gic-v5.c
675
d = irq_domain_get_irq_data(domain, virq);
drivers/irqchip/irq-gic-v5.c
717
static int gicv5_irq_spi_domain_alloc(struct irq_domain *domain, unsigned int virq,
drivers/irqchip/irq-gic-v5.c
730
ret = gicv5_irq_spi_domain_translate(domain, fwspec, &hwirq, &type);
drivers/irqchip/irq-gic-v5.c
737
irq_domain_set_info(domain, virq, hwirq, &gicv5_spi_irq_chip, chip_data,
drivers/irqchip/irq-gic-v5.c
791
static int gicv5_irq_lpi_domain_alloc(struct irq_domain *domain, unsigned int virq,
drivers/irqchip/irq-gic-v5.c
804
irqd = irq_domain_get_irq_data(domain, virq);
drivers/irqchip/irq-gic-v5.c
806
irq_domain_set_info(domain, virq, hwirq, &gicv5_lpi_irq_chip, NULL,
drivers/irqchip/irq-gic-v5.c
839
static int gicv5_irq_ipi_domain_alloc(struct irq_domain *domain, unsigned int virq,
drivers/irqchip/irq-gic-v5.c
853
ret = irq_domain_alloc_irqs_parent(domain, virq + i, 1, &lpi);
drivers/irqchip/irq-gic-v5.c
859
irqd = irq_domain_get_irq_data(domain, virq + i);
drivers/irqchip/irq-gic-v5.c
861
irq_domain_set_hwirq_and_chip(domain, virq + i, i,
drivers/irqchip/irq-gic-v5.c
872
static void gicv5_irq_ipi_domain_free(struct irq_domain *domain, unsigned int virq,
drivers/irqchip/irq-gic-v5.c
879
d = irq_domain_get_irq_data(domain, virq + i);
drivers/irqchip/irq-gic-v5.c
888
irq_domain_free_irqs_parent(domain, virq + i, 1);
drivers/irqchip/irq-gic-v5.c
901
struct irq_domain *domain;
drivers/irqchip/irq-gic-v5.c
905
domain = gicv5_global_data.ppi_domain;
drivers/irqchip/irq-gic-v5.c
908
domain = gicv5_global_data.spi_domain;
drivers/irqchip/irq-gic-v5.c
911
domain = gicv5_global_data.lpi_domain;
drivers/irqchip/irq-gic-v5.c
918
if (generic_handle_domain_irq(domain, hwirq_id)) {
drivers/irqchip/irq-gic.c
1136
static int gic_irq_domain_alloc(struct irq_domain *domain, unsigned int virq,
drivers/irqchip/irq-gic.c
1144
ret = gic_irq_domain_translate(domain, fwspec, &hwirq, &type);
drivers/irqchip/irq-gic.c
1149
ret = gic_irq_domain_map(domain, virq + i, hwirq + i);
drivers/irqchip/irq-gic.c
1210
gic->domain = irq_domain_create_linear(handle, gic_irqs,
drivers/irqchip/irq-gic.c
1213
if (WARN_ON(!gic->domain)) {
drivers/irqchip/irq-gic.c
1243
if (WARN_ON(!gic || gic->domain))
drivers/irqchip/irq-gic.c
1436
irq_domain_set_pm_device((*gic)->domain, dev);
drivers/irqchip/irq-gic.c
1510
gicv2m_init(&node->fwnode, gic_data[gic_cnt].domain);
drivers/irqchip/irq-gic.c
1696
gicv2m_init(NULL, gic_data[0].domain);
drivers/irqchip/irq-gic.c
370
generic_handle_domain_irq(gic->domain, irqnr);
drivers/irqchip/irq-gic.c
391
ret = generic_handle_domain_irq(chip_data->domain, gic_irq);
drivers/irqchip/irq-gic.c
402
if (gic->domain->pm_dev)
drivers/irqchip/irq-gic.c
403
seq_puts(p, gic->domain->pm_dev->of_node->name);
drivers/irqchip/irq-gic.c
85
struct irq_domain *domain;
drivers/irqchip/irq-gic.c
859
.fwnode = gic_data[0].domain->fwnode,
drivers/irqchip/irq-gic.c
868
base_sgi = irq_domain_alloc_irqs(gic_data[0].domain, 8, NUMA_NO_NODE, &sgi_fwspec);
drivers/irqchip/irq-hip04.c
209
generic_handle_domain_irq(hip04_data.domain, irqnr);
drivers/irqchip/irq-hip04.c
389
hip04_data.domain = irq_domain_create_legacy(of_fwnode_handle(node), nr_irqs, irq_base, 0,
drivers/irqchip/irq-hip04.c
391
if (WARN_ON(!hip04_data.domain))
drivers/irqchip/irq-hip04.c
55
struct irq_domain *domain;
drivers/irqchip/irq-i8259.c
313
struct irq_domain *domain;
drivers/irqchip/irq-i8259.c
320
domain = irq_domain_create_legacy(of_fwnode_handle(node), 16, I8259A_IRQ_BASE, 0,
drivers/irqchip/irq-i8259.c
322
if (!domain)
drivers/irqchip/irq-i8259.c
328
return domain;
drivers/irqchip/irq-i8259.c
338
struct irq_domain *domain = irq_desc_get_handler_data(desc);
drivers/irqchip/irq-i8259.c
344
generic_handle_domain_irq(domain, hwirq);
drivers/irqchip/irq-i8259.c
349
struct irq_domain *domain;
drivers/irqchip/irq-i8259.c
352
domain = __init_i8259_irqs(node);
drivers/irqchip/irq-i8259.c
357
irq_domain_remove(domain);
drivers/irqchip/irq-i8259.c
362
domain);
drivers/irqchip/irq-idt3243x.c
110
irq_domain_remove(domain);
drivers/irqchip/irq-idt3243x.c
48
struct irq_domain *domain;
drivers/irqchip/irq-idt3243x.c
75
domain = irq_domain_create_linear(of_fwnode_handle(of_node), IDT_PIC_NR_IRQS,
drivers/irqchip/irq-idt3243x.c
77
if (!domain) {
drivers/irqchip/irq-idt3243x.c
82
idtpic->irq_domain = domain;
drivers/irqchip/irq-idt3243x.c
84
ret = irq_alloc_domain_generic_chips(domain, 32, 1, "IDTPIC",
drivers/irqchip/irq-idt3243x.c
90
gc = irq_get_domain_generic_chip(domain, 0);
drivers/irqchip/irq-imgpdc.c
122
return (struct pdc_intc_priv *)data->domain->host_data;
drivers/irqchip/irq-imgpdc.c
240
generic_handle_domain_irq(priv->domain, i);
drivers/irqchip/irq-imgpdc.c
260
generic_handle_domain_irq(priv->domain, syswake_to_hwirq(syswake));
drivers/irqchip/irq-imgpdc.c
375
priv->domain = irq_domain_create_linear(dev_fwnode(&pdev->dev), 16, &irq_generic_chip_ops,
drivers/irqchip/irq-imgpdc.c
377
if (unlikely(!priv->domain)) {
drivers/irqchip/irq-imgpdc.c
387
ret = irq_alloc_domain_generic_chips(priv->domain, 8, 2, "pdc",
drivers/irqchip/irq-imgpdc.c
395
gc = irq_get_domain_generic_chip(priv->domain, 0);
drivers/irqchip/irq-imgpdc.c
409
gc = irq_get_domain_generic_chip(priv->domain, 8);
drivers/irqchip/irq-imgpdc.c
460
irq_domain_remove(priv->domain);
drivers/irqchip/irq-imgpdc.c
468
irq_domain_remove(priv->domain);
drivers/irqchip/irq-imgpdc.c
82
struct irq_domain *domain;
drivers/irqchip/irq-imx-gpcv2.c
165
static int imx_gpcv2_domain_alloc(struct irq_domain *domain,
drivers/irqchip/irq-imx-gpcv2.c
176
err = imx_gpcv2_domain_translate(domain, fwspec, &hwirq, &type);
drivers/irqchip/irq-imx-gpcv2.c
184
irq_domain_set_hwirq_and_chip(domain, irq + i, hwirq + i,
drivers/irqchip/irq-imx-gpcv2.c
185
&gpcv2_irqchip_data_chip, domain->host_data);
drivers/irqchip/irq-imx-gpcv2.c
189
parent_fwspec.fwnode = domain->parent->fwnode;
drivers/irqchip/irq-imx-gpcv2.c
190
return irq_domain_alloc_irqs_parent(domain, irq, nr_irqs,
drivers/irqchip/irq-imx-gpcv2.c
209
struct irq_domain *parent_domain, *domain;
drivers/irqchip/irq-imx-gpcv2.c
247
domain = irq_domain_create_hierarchy(parent_domain, 0, GPC_MAX_IRQS,
drivers/irqchip/irq-imx-gpcv2.c
249
if (!domain) {
drivers/irqchip/irq-imx-gpcv2.c
254
irq_set_default_domain(domain);
drivers/irqchip/irq-imx-gpcv2.c
290
fwnode_dev_initialized(domain->fwnode, false);
drivers/irqchip/irq-imx-intmux.c
197
generic_handle_domain_irq(irqchip_data->domain, pos);
drivers/irqchip/irq-imx-intmux.c
205
struct irq_domain *domain;
drivers/irqchip/irq-imx-intmux.c
257
domain = irq_domain_create_linear(of_fwnode_handle(np), 32, &imx_intmux_domain_ops,
drivers/irqchip/irq-imx-intmux.c
259
if (!domain) {
drivers/irqchip/irq-imx-intmux.c
264
data->irqchip_data[i].domain = domain;
drivers/irqchip/irq-imx-intmux.c
265
irq_domain_set_pm_device(domain, &pdev->dev);
drivers/irqchip/irq-imx-intmux.c
301
irq_domain_remove(data->irqchip_data[i].domain);
drivers/irqchip/irq-imx-intmux.c
68
struct irq_domain *domain;
drivers/irqchip/irq-imx-irqsteer.c
153
generic_handle_domain_irq(data->domain, pos + hwirq);
drivers/irqchip/irq-imx-irqsteer.c
215
data->domain = irq_domain_create_linear(dev_fwnode(&pdev->dev), data->reg_num * 32,
drivers/irqchip/irq-imx-irqsteer.c
217
if (!data->domain) {
drivers/irqchip/irq-imx-irqsteer.c
222
irq_domain_set_pm_device(data->domain, &pdev->dev);
drivers/irqchip/irq-imx-irqsteer.c
263
irq_domain_remove(irqsteer_data->domain);
drivers/irqchip/irq-imx-irqsteer.c
37
struct irq_domain *domain;
drivers/irqchip/irq-imx-mu-msi.c
145
static int imx_mu_msi_domain_irq_alloc(struct irq_domain *domain,
drivers/irqchip/irq-imx-mu-msi.c
150
struct imx_mu_msi *msi_data = domain->host_data;
drivers/irqchip/irq-imx-mu-msi.c
167
irq_domain_set_info(domain, virq, pos,
drivers/irqchip/irq-imx-mu-msi.c
173
static void imx_mu_msi_domain_irq_free(struct irq_domain *domain,
drivers/irqchip/irq-imx-mu-msi.c
176
struct irq_data *d = irq_domain_get_irq_data(domain, virq);
drivers/irqchip/irq-ingenic-tcu.c
114
tcu->domain = irq_domain_create_linear(of_fwnode_handle(np), 32, &irq_generic_chip_ops,
drivers/irqchip/irq-ingenic-tcu.c
116
if (!tcu->domain) {
drivers/irqchip/irq-ingenic-tcu.c
121
ret = irq_alloc_domain_generic_chips(tcu->domain, 32, 1, "TCU",
drivers/irqchip/irq-ingenic-tcu.c
129
gc = irq_get_domain_generic_chip(tcu->domain, 0);
drivers/irqchip/irq-ingenic-tcu.c
165
tcu->domain);
drivers/irqchip/irq-ingenic-tcu.c
174
irq_domain_remove(tcu->domain);
drivers/irqchip/irq-ingenic-tcu.c
19
struct irq_domain *domain;
drivers/irqchip/irq-ingenic-tcu.c
27
struct irq_domain *domain = irq_desc_get_handler_data(desc);
drivers/irqchip/irq-ingenic-tcu.c
28
struct irq_chip_generic *gc = irq_get_domain_generic_chip(domain, 0);
drivers/irqchip/irq-ingenic-tcu.c
43
generic_handle_domain_irq(domain, i);
drivers/irqchip/irq-ingenic.c
100
intc->domain = domain;
drivers/irqchip/irq-ingenic.c
102
err = irq_alloc_domain_generic_chips(domain, 32, 1, "INTC",
drivers/irqchip/irq-ingenic.c
109
gc = irq_get_domain_generic_chip(domain, i * 32);
drivers/irqchip/irq-ingenic.c
133
irq_domain_remove(domain);
drivers/irqchip/irq-ingenic.c
23
struct irq_domain *domain;
drivers/irqchip/irq-ingenic.c
37
struct irq_domain *domain = intc->domain;
drivers/irqchip/irq-ingenic.c
43
gc = irq_get_domain_generic_chip(domain, i * 32);
drivers/irqchip/irq-ingenic.c
52
generic_handle_domain_irq(domain, bit + (i * 32));
drivers/irqchip/irq-ingenic.c
66
struct irq_domain *domain;
drivers/irqchip/irq-ingenic.c
93
domain = irq_domain_create_linear(of_fwnode_handle(node), num_chips * 32,
drivers/irqchip/irq-ingenic.c
95
if (!domain) {
drivers/irqchip/irq-ixp4xx.c
116
generic_handle_domain_irq(ixi->domain, i);
drivers/irqchip/irq-ixp4xx.c
124
generic_handle_domain_irq(ixi->domain, i + 32);
drivers/irqchip/irq-ixp4xx.c
128
static int ixp4xx_irq_domain_translate(struct irq_domain *domain,
drivers/irqchip/irq-ixp4xx.c
237
ixi->domain = irq_domain_create_linear(fwnode, nr_irqs,
drivers/irqchip/irq-ixp4xx.c
240
if (!ixi->domain) {
drivers/irqchip/irq-ixp4xx.c
54
struct irq_domain *domain;
drivers/irqchip/irq-jcore-aic.c
110
domain = irq_domain_create_legacy(of_fwnode_handle(node), dom_sz - min_irq, min_irq,
drivers/irqchip/irq-jcore-aic.c
112
if (!domain)
drivers/irqchip/irq-jcore-aic.c
70
struct irq_domain *domain;
drivers/irqchip/irq-lan966x-oic.c
166
struct lan966x_oic_data *lan966x_oic = gc->domain->host_data;
drivers/irqchip/irq-lan966x-oic.c
234
struct irq_domain *domain;
drivers/irqchip/irq-lan966x-oic.c
250
domain = devm_irq_domain_instantiate(dev, &d_info);
drivers/irqchip/irq-lan966x-oic.c
251
if (IS_ERR(domain))
drivers/irqchip/irq-lan966x-oic.c
252
return dev_err_probe(dev, PTR_ERR(domain),
drivers/irqchip/irq-loongarch-avec.c
274
static int avecintc_domain_alloc(struct irq_domain *domain, unsigned int virq,
drivers/irqchip/irq-loongarch-avec.c
278
struct irq_data *irqd = irq_domain_get_irq_data(domain, virq + i);
drivers/irqchip/irq-loongarch-avec.c
291
irq_domain_set_info(domain, virq + i, virq + i, &avec_irq_controller,
drivers/irqchip/irq-loongarch-avec.c
317
static void avecintc_domain_free(struct irq_domain *domain, unsigned int virq,
drivers/irqchip/irq-loongarch-avec.c
321
struct irq_data *d = irq_domain_get_irq_data(domain, virq + i);
drivers/irqchip/irq-loongarch-avec.c
366
loongarch_avec.domain = irq_domain_create_tree(loongarch_avec.fwnode,
drivers/irqchip/irq-loongarch-avec.c
368
if (!loongarch_avec.domain) {
drivers/irqchip/irq-loongarch-avec.c
399
irq_domain_remove(loongarch_avec.domain);
drivers/irqchip/irq-loongarch-avec.c
413
return pch_msi_acpi_init_avec(loongarch_avec.domain);
drivers/irqchip/irq-loongarch-avec.c
43
struct irq_domain *domain;
drivers/irqchip/irq-loongson-eiointc.c
150
struct eiointc_priv *priv = d->domain->host_data;
drivers/irqchip/irq-loongson-eiointc.c
372
static int eiointc_domain_alloc(struct irq_domain *domain, unsigned int virq,
drivers/irqchip/irq-loongson-eiointc.c
378
struct eiointc_priv *priv = domain->host_data;
drivers/irqchip/irq-loongson-eiointc.c
380
ret = irq_domain_translate_onecell(domain, arg, &hwirq, &type);
drivers/irqchip/irq-loongson-eiointc.c
385
irq_domain_set_info(domain, virq + i, hwirq + i, &eiointc_irq_chip,
drivers/irqchip/irq-loongson-eiointc.c
392
static void eiointc_domain_free(struct irq_domain *domain, unsigned int virq,
drivers/irqchip/irq-loongson-eiointc.c
398
struct irq_data *d = irq_domain_get_irq_data(domain, virq + i);
drivers/irqchip/irq-loongson-htpic.c
108
htpic->domain = __init_i8259_irqs(node);
drivers/irqchip/irq-loongson-htpic.c
109
if (!htpic->domain) {
drivers/irqchip/irq-loongson-htpic.c
142
irq_domain_remove(htpic->domain);
drivers/irqchip/irq-loongson-htpic.c
24
struct irq_domain *domain;
drivers/irqchip/irq-loongson-htpic.c
51
generic_handle_domain_irq(priv->domain, bit);
drivers/irqchip/irq-loongson-htvec.c
112
static int htvec_domain_alloc(struct irq_domain *domain, unsigned int virq,
drivers/irqchip/irq-loongson-htvec.c
118
struct htvec *priv = domain->host_data;
drivers/irqchip/irq-loongson-htvec.c
120
ret = irq_domain_translate_onecell(domain, arg, &hwirq, &type);
drivers/irqchip/irq-loongson-htvec.c
125
irq_domain_set_info(domain, virq + i, hwirq + i, &htvec_irq_chip,
drivers/irqchip/irq-loongson-htvec.c
132
static void htvec_domain_free(struct irq_domain *domain, unsigned int virq,
drivers/irqchip/irq-loongson-htvec.c
138
struct irq_data *d = irq_domain_get_irq_data(domain, virq + i);
drivers/irqchip/irq-loongson-liointc.c
205
struct irq_domain *domain;
drivers/irqchip/irq-loongson-liointc.c
239
domain = irq_domain_create_linear(domain_handle, LIOINTC_CHIP_IRQ,
drivers/irqchip/irq-loongson-liointc.c
242
domain = irq_domain_create_linear(domain_handle, LIOINTC_CHIP_IRQ,
drivers/irqchip/irq-loongson-liointc.c
244
if (!domain) {
drivers/irqchip/irq-loongson-liointc.c
249
err = irq_alloc_domain_generic_chips(domain, LIOINTC_CHIP_IRQ, 1,
drivers/irqchip/irq-loongson-liointc.c
281
gc = irq_get_domain_generic_chip(domain, 0);
drivers/irqchip/irq-loongson-liointc.c
284
gc->domain = domain;
drivers/irqchip/irq-loongson-liointc.c
313
irq_domain_remove(domain);
drivers/irqchip/irq-loongson-liointc.c
96
generic_handle_domain_irq(gc->domain, bit);
drivers/irqchip/irq-loongson-pch-lpc.c
46
struct pch_lpc *priv = d->domain->host_data;
drivers/irqchip/irq-loongson-pch-lpc.c
56
struct pch_lpc *priv = d->domain->host_data;
drivers/irqchip/irq-loongson-pch-lpc.c
67
struct pch_lpc *priv = d->domain->host_data;
drivers/irqchip/irq-loongson-pch-lpc.c
79
struct pch_lpc *priv = d->domain->host_data;
drivers/irqchip/irq-loongson-pch-msi.c
104
err = pch_msi_parent_domain_alloc(domain, virq + i, hwirq + i);
drivers/irqchip/irq-loongson-pch-msi.c
108
irq_domain_set_hwirq_and_chip(domain, virq + i, hwirq + i,
drivers/irqchip/irq-loongson-pch-msi.c
116
irq_domain_free_irqs_parent(domain, virq, i);
drivers/irqchip/irq-loongson-pch-msi.c
121
static void pch_msi_middle_domain_free(struct irq_domain *domain,
drivers/irqchip/irq-loongson-pch-msi.c
125
struct irq_data *d = irq_domain_get_irq_data(domain, virq);
drivers/irqchip/irq-loongson-pch-msi.c
128
irq_domain_free_irqs_parent(domain, virq, nr_irqs);
drivers/irqchip/irq-loongson-pch-msi.c
80
static int pch_msi_parent_domain_alloc(struct irq_domain *domain,
drivers/irqchip/irq-loongson-pch-msi.c
85
fwspec.fwnode = domain->parent->fwnode;
drivers/irqchip/irq-loongson-pch-msi.c
89
return irq_domain_alloc_irqs_parent(domain, virq, 1, &fwspec);
drivers/irqchip/irq-loongson-pch-msi.c
92
static int pch_msi_middle_domain_alloc(struct irq_domain *domain,
drivers/irqchip/irq-loongson-pch-msi.c
96
struct pch_msi_data *priv = domain->host_data;
drivers/irqchip/irq-loongson-pch-pic.c
219
static int pch_pic_alloc(struct irq_domain *domain, unsigned int virq,
drivers/irqchip/irq-loongson-pch-pic.c
227
struct pch_pic *priv = domain->host_data;
drivers/irqchip/irq-loongson-pch-pic.c
229
err = pch_pic_domain_translate(domain, fwspec, &hwirq, &type);
drivers/irqchip/irq-loongson-pch-pic.c
236
parent_fwspec.fwnode = domain->parent->fwnode;
drivers/irqchip/irq-loongson-pch-pic.c
240
err = irq_domain_alloc_irqs_parent(domain, virq, 1, &parent_fwspec);
drivers/irqchip/irq-loongson-pch-pic.c
244
irq_domain_set_info(domain, virq, hwirq,
drivers/irqchip/irq-lpc32xx.c
148
generic_handle_domain_irq(lpc32xx_mic_irqc->domain, irq);
drivers/irqchip/irq-lpc32xx.c
163
generic_handle_domain_irq(ic->domain, irq);
drivers/irqchip/irq-lpc32xx.c
213
irqc->domain = irq_domain_create_linear(of_fwnode_handle(node), NR_LPC32XX_IC_IRQS,
drivers/irqchip/irq-lpc32xx.c
215
if (!irqc->domain) {
drivers/irqchip/irq-lpc32xx.c
30
struct irq_domain *domain;
drivers/irqchip/irq-ls-extirq.c
102
struct ls_extirq_data *priv = domain->host_data;
drivers/irqchip/irq-ls-extirq.c
113
irq_domain_set_hwirq_and_chip(domain, virq, hwirq, &ls_extirq_chip,
drivers/irqchip/irq-ls-extirq.c
116
return irq_domain_alloc_irqs_parent(domain, virq, 1, &priv->map[hwirq]);
drivers/irqchip/irq-ls-extirq.c
173
struct irq_domain *domain, *parent_domain;
drivers/irqchip/irq-ls-extirq.c
207
domain = irq_domain_create_hierarchy(parent_domain, 0, priv->nirq, of_fwnode_handle(node),
drivers/irqchip/irq-ls-extirq.c
209
if (!domain)
drivers/irqchip/irq-ls-extirq.c
99
ls_extirq_domain_alloc(struct irq_domain *domain, unsigned int virq,
drivers/irqchip/irq-ls-scfg-msi.c
137
static int ls_scfg_msi_domain_irq_alloc(struct irq_domain *domain,
drivers/irqchip/irq-ls-scfg-msi.c
143
struct ls_scfg_msi *msi_data = domain->host_data;
drivers/irqchip/irq-ls-scfg-msi.c
163
irq_domain_set_info(domain, virq, pos,
drivers/irqchip/irq-ls-scfg-msi.c
170
static void ls_scfg_msi_domain_irq_free(struct irq_domain *domain,
drivers/irqchip/irq-ls-scfg-msi.c
173
struct irq_data *d = irq_domain_get_irq_data(domain, virq);
drivers/irqchip/irq-ls1x.c
129
priv->domain = irq_domain_create_linear(of_fwnode_handle(node), 32, &irq_generic_chip_ops,
drivers/irqchip/irq-ls1x.c
131
if (!priv->domain) {
drivers/irqchip/irq-ls1x.c
137
err = irq_alloc_domain_generic_chips(priv->domain, 32, 2,
drivers/irqchip/irq-ls1x.c
155
gc = irq_get_domain_generic_chip(priv->domain, 0);
drivers/irqchip/irq-ls1x.c
184
irq_domain_remove(priv->domain);
drivers/irqchip/irq-ls1x.c
32
struct irq_domain *domain;
drivers/irqchip/irq-ls1x.c
53
generic_handle_domain_irq(priv->domain, bit);
drivers/irqchip/irq-mbigen.c
225
if (WARN_ON_ONCE(!dev->msi.domain))
drivers/irqchip/irq-mchp-eic.c
159
static int mchp_eic_domain_alloc(struct irq_domain *domain, unsigned int virq,
drivers/irqchip/irq-mchp-eic.c
171
ret = irq_domain_translate_twocell(domain, fwspec, &hwirq, &type);
drivers/irqchip/irq-mchp-eic.c
189
irq_domain_set_hwirq_and_chip(domain, virq, hwirq, &mchp_eic_chip, eic);
drivers/irqchip/irq-mchp-eic.c
191
parent_fwspec.fwnode = domain->parent->fwnode;
drivers/irqchip/irq-mchp-eic.c
197
return irq_domain_alloc_irqs_parent(domain, virq, 1, &parent_fwspec);
drivers/irqchip/irq-mchp-eic.c
256
eic->domain = irq_domain_create_hierarchy(parent_domain, 0, MCHP_EIC_NIRQ,
drivers/irqchip/irq-mchp-eic.c
259
if (!eic->domain) {
drivers/irqchip/irq-mchp-eic.c
39
struct irq_domain *domain;
drivers/irqchip/irq-meson-gpio.c
443
struct meson_gpio_irq_controller *ctl = data->domain->host_data;
drivers/irqchip/irq-meson-gpio.c
468
static int meson_gpio_irq_domain_translate(struct irq_domain *domain,
drivers/irqchip/irq-meson-gpio.c
482
static int meson_gpio_irq_allocate_gic_irq(struct irq_domain *domain,
drivers/irqchip/irq-meson-gpio.c
489
fwspec.fwnode = domain->parent->fwnode;
drivers/irqchip/irq-meson-gpio.c
495
return irq_domain_alloc_irqs_parent(domain, virq, 1, &fwspec);
drivers/irqchip/irq-meson-gpio.c
498
static int meson_gpio_irq_domain_alloc(struct irq_domain *domain,
drivers/irqchip/irq-meson-gpio.c
504
struct meson_gpio_irq_controller *ctl = domain->host_data;
drivers/irqchip/irq-meson-gpio.c
513
ret = meson_gpio_irq_domain_translate(domain, fwspec, &hwirq, &type);
drivers/irqchip/irq-meson-gpio.c
521
ret = meson_gpio_irq_allocate_gic_irq(domain, virq,
drivers/irqchip/irq-meson-gpio.c
529
irq_domain_set_hwirq_and_chip(domain, virq, hwirq,
drivers/irqchip/irq-meson-gpio.c
535
static void meson_gpio_irq_domain_free(struct irq_domain *domain,
drivers/irqchip/irq-meson-gpio.c
539
struct meson_gpio_irq_controller *ctl = domain->host_data;
drivers/irqchip/irq-meson-gpio.c
546
irq_domain_free_irqs_parent(domain, virq, 1);
drivers/irqchip/irq-meson-gpio.c
548
irq_data = irq_domain_get_irq_data(domain, virq);
drivers/irqchip/irq-meson-gpio.c
589
struct irq_domain *domain, *parent_domain;
drivers/irqchip/irq-meson-gpio.c
620
domain = irq_domain_create_hierarchy(parent_domain, 0,
drivers/irqchip/irq-meson-gpio.c
625
if (!domain) {
drivers/irqchip/irq-mips-cpu.c
183
static int mips_cpu_ipi_alloc(struct irq_domain *domain, unsigned int virq,
drivers/irqchip/irq-mips-cpu.c
186
struct cpu_ipi_domain_state *state = domain->host_data;
drivers/irqchip/irq-mips-cpu.c
196
ret = irq_domain_set_hwirq_and_chip(domain, virq + i, hwirq,
drivers/irqchip/irq-mips-cpu.c
202
ret = irq_domain_set_hwirq_and_chip(domain->parent, virq + i, hwirq,
drivers/irqchip/irq-mmp.c
122
struct irq_domain *domain = d->domain;
drivers/irqchip/irq-mmp.c
123
struct icu_chip_data *data = (struct icu_chip_data *)domain->host_data;
drivers/irqchip/irq-mmp.c
150
struct irq_domain *domain;
drivers/irqchip/irq-mmp.c
159
domain = icu_data[i].domain;
drivers/irqchip/irq-mmp.c
160
data = (struct icu_chip_data *)domain->host_data;
drivers/irqchip/irq-mmp.c
233
generic_handle_domain_irq(icu_data[0].domain, hwirq);
drivers/irqchip/irq-mmp.c
244
generic_handle_domain_irq(icu_data[0].domain, hwirq);
drivers/irqchip/irq-mmp.c
264
icu_data[0].domain = irq_domain_create_linear(of_fwnode_handle(node), nr_irqs,
drivers/irqchip/irq-mmp.c
268
ret = irq_create_mapping(icu_data[0].domain, irq);
drivers/irqchip/irq-mmp.c
283
irq_domain_remove(icu_data[0].domain);
drivers/irqchip/irq-mmp.c
394
icu_data[i].domain = irq_domain_create_linear(of_fwnode_handle(node), nr_irqs,
drivers/irqchip/irq-mmp.c
398
ret = irq_create_mapping(icu_data[i].domain, irq);
drivers/irqchip/irq-mmp.c
421
irq_domain_remove(icu_data[i].domain);
drivers/irqchip/irq-mmp.c
50
struct irq_domain *domain;
drivers/irqchip/irq-mmp.c
69
struct irq_domain *domain = d->domain;
drivers/irqchip/irq-mmp.c
70
struct icu_chip_data *data = (struct icu_chip_data *)domain->host_data;
drivers/irqchip/irq-mmp.c
93
struct irq_domain *domain = d->domain;
drivers/irqchip/irq-mmp.c
94
struct icu_chip_data *data = (struct icu_chip_data *)domain->host_data;
drivers/irqchip/irq-mscc-ocelot.c
126
struct irq_domain *domain;
drivers/irqchip/irq-mscc-ocelot.c
134
domain = irq_domain_create_linear(of_fwnode_handle(node), p->n_irq,
drivers/irqchip/irq-mscc-ocelot.c
136
if (!domain) {
drivers/irqchip/irq-mscc-ocelot.c
141
ret = irq_alloc_domain_generic_chips(domain, p->n_irq, 1,
drivers/irqchip/irq-mscc-ocelot.c
149
gc = irq_get_domain_generic_chip(domain, 0);
drivers/irqchip/irq-mscc-ocelot.c
178
domain->host_data = p;
drivers/irqchip/irq-mscc-ocelot.c
180
domain);
drivers/irqchip/irq-mscc-ocelot.c
188
irq_domain_remove(domain);
drivers/irqchip/irq-mscc-ocelot.c
80
struct irq_domain *d = data->domain;
drivers/irqchip/irq-msi-lib.c
26
bool msi_lib_init_dev_msi_info(struct device *dev, struct irq_domain *domain,
drivers/irqchip/irq-msi-lib.c
43
if (domain->bus_token == pops->bus_select_token) {
drivers/irqchip/irq-msi-lib.c
44
if (WARN_ON_ONCE(domain != real_parent))
drivers/irqchip/irq-mst-intc.c
206
static int mst_intc_domain_alloc(struct irq_domain *domain, unsigned int virq,
drivers/irqchip/irq-mst-intc.c
212
struct mst_intc_chip_data *cd = domain->host_data;
drivers/irqchip/irq-mst-intc.c
224
irq_domain_set_hwirq_and_chip(domain, virq + i, hwirq + i,
drivers/irqchip/irq-mst-intc.c
226
domain->host_data);
drivers/irqchip/irq-mst-intc.c
229
parent_fwspec.fwnode = domain->parent->fwnode;
drivers/irqchip/irq-mst-intc.c
240
return irq_domain_alloc_irqs_parent(domain, virq, nr_irqs, &parent_fwspec);
drivers/irqchip/irq-mst-intc.c
252
struct irq_domain *domain, *domain_parent;
drivers/irqchip/irq-mst-intc.c
280
domain = irq_domain_create_hierarchy(domain_parent, 0, cd->nr_irqs, of_fwnode_handle(dn),
drivers/irqchip/irq-mst-intc.c
282
if (!domain) {
drivers/irqchip/irq-mtk-cirq.c
170
static int mtk_cirq_domain_alloc(struct irq_domain *domain, unsigned int virq,
drivers/irqchip/irq-mtk-cirq.c
179
ret = mtk_cirq_domain_translate(domain, fwspec, &hwirq, &type);
drivers/irqchip/irq-mtk-cirq.c
186
irq_domain_set_hwirq_and_chip(domain, virq, hwirq,
drivers/irqchip/irq-mtk-cirq.c
188
domain->host_data);
drivers/irqchip/irq-mtk-cirq.c
190
parent_fwspec.fwnode = domain->parent->fwnode;
drivers/irqchip/irq-mtk-cirq.c
191
return irq_domain_alloc_irqs_parent(domain, virq, nr_irqs,
drivers/irqchip/irq-mtk-cirq.c
230
irq = irq_find_mapping(cirq_data->domain, i);
drivers/irqchip/irq-mtk-cirq.c
303
struct irq_domain *domain, *domain_parent;
drivers/irqchip/irq-mtk-cirq.c
343
domain = irq_domain_create_hierarchy(domain_parent, 0, irq_num, of_fwnode_handle(node),
drivers/irqchip/irq-mtk-cirq.c
345
if (!domain) {
drivers/irqchip/irq-mtk-cirq.c
349
cirq_data->domain = domain;
drivers/irqchip/irq-mtk-cirq.c
63
struct irq_domain *domain;
drivers/irqchip/irq-mtk-sysirq.c
109
irq_domain_set_hwirq_and_chip(domain, virq + i, hwirq + i,
drivers/irqchip/irq-mtk-sysirq.c
111
domain->host_data);
drivers/irqchip/irq-mtk-sysirq.c
113
gic_fwspec.fwnode = domain->parent->fwnode;
drivers/irqchip/irq-mtk-sysirq.c
114
return irq_domain_alloc_irqs_parent(domain, virq, nr_irqs, &gic_fwspec);
drivers/irqchip/irq-mtk-sysirq.c
126
struct irq_domain *domain, *domain_parent;
drivers/irqchip/irq-mtk-sysirq.c
209
domain = irq_domain_create_hierarchy(domain_parent, 0, intpol_num, of_fwnode_handle(node),
drivers/irqchip/irq-mtk-sysirq.c
211
if (!domain) {
drivers/irqchip/irq-mtk-sysirq.c
92
static int mtk_sysirq_domain_alloc(struct irq_domain *domain, unsigned int virq,
drivers/irqchip/irq-mvebu-gicp.c
109
ret = irq_domain_alloc_irqs_parent(domain, virq, 1, &fwspec);
drivers/irqchip/irq-mvebu-gicp.c
115
ret = irq_domain_set_hwirq_and_chip(domain, virq, hwirq,
drivers/irqchip/irq-mvebu-gicp.c
123
irq_domain_free_irqs_parent(domain, virq, nr_irqs);
drivers/irqchip/irq-mvebu-gicp.c
131
static void gicp_irq_domain_free(struct irq_domain *domain,
drivers/irqchip/irq-mvebu-gicp.c
134
struct mvebu_gicp *gicp = domain->host_data;
drivers/irqchip/irq-mvebu-gicp.c
135
struct irq_data *d = irq_domain_get_irq_data(domain, virq);
drivers/irqchip/irq-mvebu-gicp.c
142
irq_domain_free_irqs_parent(domain, virq, nr_irqs);
drivers/irqchip/irq-mvebu-gicp.c
82
static int gicp_irq_domain_alloc(struct irq_domain *domain, unsigned int virq,
drivers/irqchip/irq-mvebu-gicp.c
85
struct mvebu_gicp *gicp = domain->host_data;
drivers/irqchip/irq-mvebu-gicp.c
99
fwspec.fwnode = domain->parent->fwnode;
drivers/irqchip/irq-mvebu-icu.c
133
static int mvebu_icu_msi_init(struct irq_domain *domain, struct msi_domain_info *info,
drivers/irqchip/irq-mvebu-icu.c
136
irq_domain_set_hwirq_and_chip(domain, virq, hwirq, info->chip, info->chip_data);
drivers/irqchip/irq-mvebu-icu.c
279
dev->msi.domain = of_msi_get_domain(dev, dev->of_node, DOMAIN_BUS_PLATFORM_MSI);
drivers/irqchip/irq-mvebu-icu.c
280
if (!dev->msi.domain)
drivers/irqchip/irq-mvebu-icu.c
283
if (!irq_domain_get_of_node(dev->msi.domain))
drivers/irqchip/irq-mvebu-odmi.c
103
fwspec.fwnode = domain->parent->fwnode;
drivers/irqchip/irq-mvebu-odmi.c
109
ret = irq_domain_alloc_irqs_parent(domain, virq, 1, &fwspec);
drivers/irqchip/irq-mvebu-odmi.c
119
d = irq_domain_get_irq_data(domain->parent, virq);
drivers/irqchip/irq-mvebu-odmi.c
122
irq_domain_set_hwirq_and_chip(domain, virq, hwirq,
drivers/irqchip/irq-mvebu-odmi.c
128
static void odmi_irq_domain_free(struct irq_domain *domain,
drivers/irqchip/irq-mvebu-odmi.c
131
struct irq_data *d = irq_domain_get_irq_data(domain, virq);
drivers/irqchip/irq-mvebu-odmi.c
138
irq_domain_free_irqs_parent(domain, virq, nr_irqs);
drivers/irqchip/irq-mvebu-odmi.c
81
static int odmi_irq_domain_alloc(struct irq_domain *domain, unsigned int virq,
drivers/irqchip/irq-mvebu-pic.c
113
generic_handle_domain_irq(pic->domain, irqn);
drivers/irqchip/irq-mvebu-pic.c
153
pic->domain = irq_domain_create_linear(dev_fwnode(&pdev->dev), PIC_MAX_IRQS,
drivers/irqchip/irq-mvebu-pic.c
155
if (!pic->domain) {
drivers/irqchip/irq-mvebu-pic.c
175
irq_domain_remove(pic->domain);
drivers/irqchip/irq-mvebu-pic.c
32
struct irq_domain *domain;
drivers/irqchip/irq-mvebu-pic.c
84
static int mvebu_pic_irq_map(struct irq_domain *domain, unsigned int virq,
drivers/irqchip/irq-mvebu-pic.c
87
struct mvebu_pic *pic = domain->host_data;
drivers/irqchip/irq-mvebu-sei.c
169
static int mvebu_sei_domain_alloc(struct irq_domain *domain, unsigned int virq,
drivers/irqchip/irq-mvebu-sei.c
172
struct mvebu_sei *sei = domain->host_data;
drivers/irqchip/irq-mvebu-sei.c
176
irq_domain_set_hwirq_and_chip(domain, virq, fwspec->param[0],
drivers/irqchip/irq-mvebu-sei.c
182
static void mvebu_sei_domain_free(struct irq_domain *domain, unsigned int virq,
drivers/irqchip/irq-mvebu-sei.c
188
struct irq_data *d = irq_domain_get_irq_data(domain, virq + i);
drivers/irqchip/irq-mvebu-sei.c
199
static int mvebu_sei_ap_translate(struct irq_domain *domain,
drivers/irqchip/irq-mvebu-sei.c
210
static int mvebu_sei_ap_alloc(struct irq_domain *domain, unsigned int virq,
drivers/irqchip/irq-mvebu-sei.c
213
struct mvebu_sei *sei = domain->host_data;
drivers/irqchip/irq-mvebu-sei.c
219
mvebu_sei_ap_translate(domain, arg, &hwirq, &type);
drivers/irqchip/irq-mvebu-sei.c
221
fwspec.fwnode = domain->parent->fwnode;
drivers/irqchip/irq-mvebu-sei.c
225
err = irq_domain_alloc_irqs_parent(domain, virq, 1, &fwspec);
drivers/irqchip/irq-mvebu-sei.c
229
irq_domain_set_info(domain, virq, hwirq,
drivers/irqchip/irq-mvebu-sei.c
250
static int mvebu_sei_cp_domain_alloc(struct irq_domain *domain,
drivers/irqchip/irq-mvebu-sei.c
254
struct mvebu_sei *sei = domain->host_data;
drivers/irqchip/irq-mvebu-sei.c
273
fwspec.fwnode = domain->parent->fwnode;
drivers/irqchip/irq-mvebu-sei.c
277
ret = irq_domain_alloc_irqs_parent(domain, virq, 1, &fwspec);
drivers/irqchip/irq-mvebu-sei.c
281
irq_domain_set_info(domain, virq, hwirq,
drivers/irqchip/irq-mvebu-sei.c
292
static void mvebu_sei_cp_domain_free(struct irq_domain *domain,
drivers/irqchip/irq-mvebu-sei.c
295
struct mvebu_sei *sei = domain->host_data;
drivers/irqchip/irq-mvebu-sei.c
296
struct irq_data *d = irq_domain_get_irq_data(domain, virq);
drivers/irqchip/irq-mvebu-sei.c
304
irq_domain_free_irqs_parent(domain, virq, 1);
drivers/irqchip/irq-nvic.c
48
static int nvic_irq_domain_alloc(struct irq_domain *domain, unsigned int virq,
drivers/irqchip/irq-nvic.c
56
ret = irq_domain_translate_onecell(domain, fwspec, &hwirq, &type);
drivers/irqchip/irq-nvic.c
61
irq_map_generic_chip(domain, virq + i, hwirq + i);
drivers/irqchip/irq-omap-intc.c
251
domain = irq_domain_create_linear(of_fwnode_handle(node), omap_nr_irqs,
drivers/irqchip/irq-omap-intc.c
256
ret = omap_alloc_gc_of(domain, omap_irq_base);
drivers/irqchip/irq-omap-intc.c
258
irq_domain_remove(domain);
drivers/irqchip/irq-omap-intc.c
277
domain = irq_domain_create_legacy(of_fwnode_handle(node), omap_nr_irqs, irq_base, 0,
drivers/irqchip/irq-omap-intc.c
359
generic_handle_domain_irq(domain, irqnr);
drivers/irqchip/irq-omap-intc.c
67
static struct irq_domain *domain;
drivers/irqchip/irq-or1k-pic.c
133
struct or1k_pic_dev *pic = data->domain->host_data;
drivers/irqchip/irq-orion.c
142
struct irq_domain *domain;
drivers/irqchip/irq-orion.c
149
domain = irq_domain_create_linear(of_fwnode_handle(np), nrirqs,
drivers/irqchip/irq-orion.c
151
if (!domain) {
drivers/irqchip/irq-orion.c
156
ret = irq_alloc_domain_generic_chips(domain, nrirqs, 1, np->name,
drivers/irqchip/irq-orion.c
181
gc = irq_get_domain_generic_chip(domain, 0);
drivers/irqchip/irq-orion.c
200
domain);
drivers/irqchip/irq-owl-sirq.c
228
static int owl_sirq_domain_alloc(struct irq_domain *domain, unsigned int virq,
drivers/irqchip/irq-owl-sirq.c
231
struct owl_sirq_chip_data *chip_data = domain->host_data;
drivers/irqchip/irq-owl-sirq.c
241
ret = owl_sirq_domain_translate(domain, fwspec, &hwirq, &type);
drivers/irqchip/irq-owl-sirq.c
259
irq_domain_set_hwirq_and_chip(domain, virq, hwirq, &owl_sirq_chip,
drivers/irqchip/irq-owl-sirq.c
262
parent_fwspec.fwnode = domain->parent->fwnode;
drivers/irqchip/irq-owl-sirq.c
268
return irq_domain_alloc_irqs_parent(domain, virq, 1, &parent_fwspec);
drivers/irqchip/irq-owl-sirq.c
281
struct irq_domain *domain, *parent_domain;
drivers/irqchip/irq-owl-sirq.c
326
domain = irq_domain_create_hierarchy(parent_domain, 0, NUM_SIRQ, of_fwnode_handle(node),
drivers/irqchip/irq-owl-sirq.c
328
if (!domain) {
drivers/irqchip/irq-pic32-evic.c
189
static void __init pic32_ext_irq_of_init(struct irq_domain *domain)
drivers/irqchip/irq-pic32-evic.c
191
struct device_node *node = irq_domain_get_of_node(domain);
drivers/irqchip/irq-pic32-evic.c
192
struct evic_chip_data *priv = domain->host_data;
drivers/irqchip/irq-pic32-evic.c
53
return (struct evic_chip_data *)data->domain->host_data;
drivers/irqchip/irq-pruss-intc.c
110
struct irq_domain *domain;
drivers/irqchip/irq-pruss-intc.c
499
err = generic_handle_domain_irq(intc->domain, hwirq);
drivers/irqchip/irq-pruss-intc.c
558
intc->domain = irq_domain_create_linear(dev_fwnode(dev), max_system_events,
drivers/irqchip/irq-pruss-intc.c
560
if (!intc->domain)
drivers/irqchip/irq-pruss-intc.c
596
irq_domain_remove(intc->domain);
drivers/irqchip/irq-pruss-intc.c
615
irq_dispose_mapping(irq_find_mapping(intc->domain, hwirq));
drivers/irqchip/irq-pruss-intc.c
617
irq_domain_remove(intc->domain);
drivers/irqchip/irq-qcom-mpm.c
215
static int qcom_mpm_alloc(struct irq_domain *domain, unsigned int virq,
drivers/irqchip/irq-qcom-mpm.c
218
struct qcom_mpm_priv *priv = domain->host_data;
drivers/irqchip/irq-qcom-mpm.c
226
ret = irq_domain_translate_twocell(domain, fwspec, &pin, &type);
drivers/irqchip/irq-qcom-mpm.c
231
return irq_domain_disconnect_hierarchy(domain, virq);
drivers/irqchip/irq-qcom-mpm.c
233
ret = irq_domain_set_hwirq_and_chip(domain, virq, pin,
drivers/irqchip/irq-qcom-mpm.c
240
return irq_domain_disconnect_hierarchy(domain->parent, virq);
drivers/irqchip/irq-qcom-mpm.c
248
parent_fwspec.fwnode = domain->parent->fwnode;
drivers/irqchip/irq-qcom-mpm.c
254
return irq_domain_alloc_irqs_parent(domain, virq, nr_irqs,
drivers/irqchip/irq-qcom-mpm.c
282
struct irq_desc *desc = irq_resolve_mapping(priv->domain, pin);
drivers/irqchip/irq-qcom-mpm.c
454
priv->domain = irq_domain_create_hierarchy(parent_domain,
drivers/irqchip/irq-qcom-mpm.c
457
if (!priv->domain) {
drivers/irqchip/irq-qcom-mpm.c
463
irq_domain_update_bus_token(priv->domain, DOMAIN_BUS_WAKEUP);
drivers/irqchip/irq-qcom-mpm.c
475
irq_domain_remove(priv->domain);
drivers/irqchip/irq-qcom-mpm.c
86
struct irq_domain *domain;
drivers/irqchip/irq-realtek-rtl.c
106
struct irq_domain *domain;
drivers/irqchip/irq-realtek-rtl.c
118
domain = irq_desc_get_handler_data(desc);
drivers/irqchip/irq-realtek-rtl.c
120
generic_handle_domain_irq(domain, soc_int);
drivers/irqchip/irq-realtek-rtl.c
129
struct irq_domain *domain;
drivers/irqchip/irq-realtek-rtl.c
165
domain = irq_domain_create_linear(of_fwnode_handle(node), RTL_ICTL_NUM_INPUTS, &irq_domain_ops, NULL);
drivers/irqchip/irq-realtek-rtl.c
166
if (!domain)
drivers/irqchip/irq-realtek-rtl.c
169
irq_set_chained_handler_and_data(parent_irq, realtek_irq_dispatch, domain);
drivers/irqchip/irq-renesas-irqc.c
58
return data->domain->host_data;
drivers/irqchip/irq-renesas-rza1.c
102
static int rza1_irqc_alloc(struct irq_domain *domain, unsigned int virq,
drivers/irqchip/irq-renesas-rza1.c
105
struct rza1_irqc_priv *priv = domain->host_data;
drivers/irqchip/irq-renesas-rza1.c
112
ret = irq_domain_set_hwirq_and_chip(domain, virq, hwirq, &priv->chip,
drivers/irqchip/irq-renesas-rza1.c
122
return irq_domain_alloc_irqs_parent(domain, virq, nr_irqs, &spec);
drivers/irqchip/irq-renesas-rza1.c
125
static int rza1_irqc_translate(struct irq_domain *domain,
drivers/irqchip/irq-renesas-rza1.c
51
return data->domain->host_data;
drivers/irqchip/irq-renesas-rzg2l.c
471
static int rzg2l_irqc_alloc(struct irq_domain *domain, unsigned int virq,
drivers/irqchip/irq-renesas-rzg2l.c
474
struct rzg2l_irqc_priv *priv = domain->host_data;
drivers/irqchip/irq-renesas-rzg2l.c
480
ret = irq_domain_translate_twocell(domain, arg, &hwirq, &type);
drivers/irqchip/irq-renesas-rzg2l.c
502
ret = irq_domain_set_hwirq_and_chip(domain, virq, hwirq, priv->irqchip,
drivers/irqchip/irq-renesas-rzg2l.c
507
return irq_domain_alloc_irqs_parent(domain, virq, nr_irqs, &priv->fwspec[hwirq]);
drivers/irqchip/irq-renesas-rzg2l.c
89
return data->domain->host_data;
drivers/irqchip/irq-renesas-rzt2h.c
183
static int rzt2h_icu_alloc(struct irq_domain *domain, unsigned int virq, unsigned int nr_irqs,
drivers/irqchip/irq-renesas-rzt2h.c
186
struct rzt2h_icu_priv *priv = domain->host_data;
drivers/irqchip/irq-renesas-rzt2h.c
191
ret = irq_domain_translate_twocell(domain, arg, &hwirq, &type);
drivers/irqchip/irq-renesas-rzt2h.c
195
ret = irq_domain_set_hwirq_and_chip(domain, virq, hwirq, &rzt2h_icu_chip, NULL);
drivers/irqchip/irq-renesas-rzt2h.c
199
return irq_domain_alloc_irqs_parent(domain, virq, nr_irqs, &priv->fwspec[hwirq]);
drivers/irqchip/irq-renesas-rzt2h.c
83
return data->domain->host_data;
drivers/irqchip/irq-renesas-rzv2h.c
167
return data->domain->host_data;
drivers/irqchip/irq-renesas-rzv2h.c
492
static int rzv2h_icu_alloc(struct irq_domain *domain, unsigned int virq, unsigned int nr_irqs,
drivers/irqchip/irq-renesas-rzv2h.c
495
struct rzv2h_icu_priv *priv = domain->host_data;
drivers/irqchip/irq-renesas-rzv2h.c
501
ret = irq_domain_translate_twocell(domain, arg, &hwirq, &type);
drivers/irqchip/irq-renesas-rzv2h.c
522
ret = irq_domain_set_hwirq_and_chip(domain, virq, hwirq, &rzv2h_icu_chip,
drivers/irqchip/irq-renesas-rzv2h.c
527
return irq_domain_alloc_irqs_parent(domain, virq, nr_irqs, &priv->fwspec[hwirq]);
drivers/irqchip/irq-riscv-aplic-direct.c
103
static int aplic_direct_irqdomain_alloc(struct irq_domain *domain, unsigned int virq,
drivers/irqchip/irq-riscv-aplic-direct.c
106
struct aplic_priv *priv = domain->host_data;
drivers/irqchip/irq-riscv-aplic-direct.c
118
irq_domain_set_info(domain, virq + i, hwirq + i, &aplic_direct_chip,
drivers/irqchip/irq-riscv-aplic-direct.c
236
struct irq_domain *domain;
drivers/irqchip/irq-riscv-aplic-direct.c
304
domain = irq_find_matching_fwnode(riscv_get_intc_hwnode(),
drivers/irqchip/irq-riscv-aplic-direct.c
306
if (!aplic_direct_parent_irq && domain) {
drivers/irqchip/irq-riscv-aplic-direct.c
307
aplic_direct_parent_irq = irq_create_mapping(domain, RV_IRQ_EXT);
drivers/irqchip/irq-riscv-imsic-early.c
185
struct irq_domain *domain;
drivers/irqchip/irq-riscv-imsic-early.c
189
domain = irq_find_matching_fwnode(riscv_get_intc_hwnode(), DOMAIN_BUS_ANY);
drivers/irqchip/irq-riscv-imsic-early.c
190
if (!domain) {
drivers/irqchip/irq-riscv-imsic-early.c
194
imsic_parent_irq = irq_create_mapping(domain, RV_IRQ_EXT);
drivers/irqchip/irq-riscv-imsic-platform.c
225
static int imsic_irq_domain_alloc(struct irq_domain *domain, unsigned int virq,
drivers/irqchip/irq-riscv-imsic-platform.c
238
irq_domain_set_info(domain, virq, virq, &imsic_irq_base_chip, vec,
drivers/irqchip/irq-riscv-imsic-platform.c
247
static void imsic_irq_domain_free(struct irq_domain *domain, unsigned int virq,
drivers/irqchip/irq-riscv-imsic-platform.c
250
struct irq_data *d = irq_domain_get_irq_data(domain, virq);
drivers/irqchip/irq-riscv-imsic-platform.c
253
irq_domain_free_irqs_parent(domain, virq, nr_irqs);
drivers/irqchip/irq-riscv-imsic-platform.c
278
static bool imsic_init_dev_msi_info(struct device *dev, struct irq_domain *domain,
drivers/irqchip/irq-riscv-imsic-platform.c
281
if (!msi_lib_init_dev_msi_info(dev, domain, real_parent, info))
drivers/irqchip/irq-riscv-intc.c
135
static int riscv_intc_domain_alloc(struct irq_domain *domain,
drivers/irqchip/irq-riscv-intc.c
144
ret = irq_domain_translate_onecell(domain, fwspec, &hwirq, &type);
drivers/irqchip/irq-riscv-intc.c
158
ret = riscv_intc_domain_map(domain, virq + i, hwirq + i);
drivers/irqchip/irq-sg2042-msi.c
132
static int sg204x_msi_parent_domain_alloc(struct irq_domain *domain, unsigned int virq, int hwirq)
drivers/irqchip/irq-sg2042-msi.c
134
struct sg204x_msi_chipdata *data = domain->host_data;
drivers/irqchip/irq-sg2042-msi.c
139
fwspec.fwnode = domain->parent->fwnode;
drivers/irqchip/irq-sg2042-msi.c
144
ret = irq_domain_alloc_irqs_parent(domain, virq, 1, &fwspec);
drivers/irqchip/irq-sg2042-msi.c
148
d = irq_domain_get_irq_data(domain->parent, virq);
drivers/irqchip/irq-sg2042-msi.c
152
static int sg204x_msi_middle_domain_alloc(struct irq_domain *domain, unsigned int virq,
drivers/irqchip/irq-sg2042-msi.c
155
struct sg204x_msi_chipdata *data = domain->host_data;
drivers/irqchip/irq-sg2042-msi.c
163
err = sg204x_msi_parent_domain_alloc(domain, virq + i, hwirq + i);
drivers/irqchip/irq-sg2042-msi.c
167
irq_domain_set_hwirq_and_chip(domain, virq + i, hwirq + i,
drivers/irqchip/irq-sg2042-msi.c
174
irq_domain_free_irqs_parent(domain, virq, i);
drivers/irqchip/irq-sg2042-msi.c
178
static void sg204x_msi_middle_domain_free(struct irq_domain *domain, unsigned int virq,
drivers/irqchip/irq-sg2042-msi.c
181
struct irq_data *d = irq_domain_get_irq_data(domain, virq);
drivers/irqchip/irq-sg2042-msi.c
184
irq_domain_free_irqs_parent(domain, virq, nr_irqs);
drivers/irqchip/irq-sifive-plic.c
349
static int plic_irq_domain_alloc(struct irq_domain *domain, unsigned int virq,
drivers/irqchip/irq-sifive-plic.c
357
ret = plic_irq_domain_translate(domain, fwspec, &hwirq, &type);
drivers/irqchip/irq-sifive-plic.c
362
ret = plic_irqdomain_map(domain, virq + i, hwirq + i);
drivers/irqchip/irq-sifive-plic.c
762
struct irq_domain *domain;
drivers/irqchip/irq-sifive-plic.c
780
domain = irq_find_matching_fwnode(riscv_get_intc_hwnode(), DOMAIN_BUS_ANY);
drivers/irqchip/irq-sifive-plic.c
781
if (domain)
drivers/irqchip/irq-sifive-plic.c
782
plic_parent_irq = irq_create_mapping(domain, RV_IRQ_EXT);
drivers/irqchip/irq-sni-exiu.c
138
static int exiu_domain_translate(struct irq_domain *domain,
drivers/irqchip/irq-sni-exiu.c
143
struct exiu_irq_data *info = domain->host_data;
drivers/irqchip/irq-sni-exiu.c
232
struct irq_domain *parent_domain, *domain;
drivers/irqchip/irq-sni-exiu.c
256
domain = irq_domain_create_hierarchy(parent_domain, 0, NUM_IRQS, of_fwnode_handle(node),
drivers/irqchip/irq-sni-exiu.c
258
if (!domain) {
drivers/irqchip/irq-sni-exiu.c
278
struct irq_domain *domain;
drivers/irqchip/irq-sni-exiu.c
292
domain = acpi_irq_create_hierarchy(0, NUM_IRQS, dev_fwnode(&pdev->dev),
drivers/irqchip/irq-sni-exiu.c
294
if (!domain) {
drivers/irqchip/irq-sp7021-intc.c
180
generic_handle_domain_irq(sp_intc.domain, hwirq);
drivers/irqchip/irq-sp7021-intc.c
195
static int sp_intc_irq_domain_map(struct irq_domain *domain,
drivers/irqchip/irq-sp7021-intc.c
259
sp_intc.domain = irq_domain_create_linear(of_fwnode_handle(node), SP_INTC_NR_IRQS,
drivers/irqchip/irq-sp7021-intc.c
261
if (!sp_intc.domain) {
drivers/irqchip/irq-sp7021-intc.c
69
struct irq_domain *domain;
drivers/irqchip/irq-starfive-jh8100-intc.c
106
generic_handle_domain_irq(irqc->domain, hwirq);
drivers/irqchip/irq-starfive-jh8100-intc.c
161
irqc->domain = irq_domain_create_linear(of_fwnode_handle(intc), STARFIVE_INTC_SRC_IRQ_NUM,
drivers/irqchip/irq-starfive-jh8100-intc.c
163
if (!irqc->domain) {
drivers/irqchip/irq-starfive-jh8100-intc.c
185
irq_domain_remove(irqc->domain);
drivers/irqchip/irq-starfive-jh8100-intc.c
31
struct irq_domain *domain;
drivers/irqchip/irq-stm32-exti.c
118
struct irq_domain *domain = irq_desc_get_handler_data(desc);
drivers/irqchip/irq-stm32-exti.c
120
unsigned int nbanks = domain->gc->num_chips;
drivers/irqchip/irq-stm32-exti.c
128
gc = irq_get_domain_generic_chip(domain, irq_base);
drivers/irqchip/irq-stm32-exti.c
132
generic_handle_domain_irq(domain, irq_base + n);
drivers/irqchip/irq-stm32-exti.c
331
struct irq_domain *domain;
drivers/irqchip/irq-stm32-exti.c
337
domain = irq_domain_create_linear(of_fwnode_handle(node), drv_data->bank_nr * IRQS_PER_BANK,
drivers/irqchip/irq-stm32-exti.c
339
if (!domain) {
drivers/irqchip/irq-stm32-exti.c
346
ret = irq_alloc_domain_generic_chips(domain, IRQS_PER_BANK, 1, "exti",
drivers/irqchip/irq-stm32-exti.c
361
gc = irq_get_domain_generic_chip(domain, i * IRQS_PER_BANK);
drivers/irqchip/irq-stm32-exti.c
382
irq_set_handler_data(irq, domain);
drivers/irqchip/irq-stm32-exti.c
389
irq_domain_remove(domain);
drivers/irqchip/irq-stm32mp-exti.c
617
struct irq_domain *domain = data;
drivers/irqchip/irq-stm32mp-exti.c
619
irq_domain_remove(domain);
drivers/irqchip/irq-stm32mp-exti.c
625
struct irq_domain *parent_domain, *domain;
drivers/irqchip/irq-stm32mp-exti.c
685
domain = irq_domain_create_hierarchy(parent_domain, 0, drv_data->bank_nr * IRQS_PER_BANK,
drivers/irqchip/irq-stm32mp-exti.c
687
if (!domain) {
drivers/irqchip/irq-stm32mp-exti.c
692
ret = devm_add_action_or_reset(dev, stm32mp_exti_remove_irq, domain);
drivers/irqchip/irq-sun6i-r.c
198
static int sun6i_r_intc_domain_translate(struct irq_domain *domain,
drivers/irqchip/irq-sun6i-r.c
222
static int sun6i_r_intc_domain_alloc(struct irq_domain *domain,
drivers/irqchip/irq-sun6i-r.c
232
ret = sun6i_r_intc_domain_translate(domain, fwspec, &hwirq, &type);
drivers/irqchip/irq-sun6i-r.c
240
.fwnode = domain->parent->fwnode,
drivers/irqchip/irq-sun6i-r.c
245
ret = irq_domain_alloc_irqs_parent(domain, virq, nr_irqs, &gic_fwspec);
drivers/irqchip/irq-sun6i-r.c
251
irq_domain_set_hwirq_and_chip(domain, virq, hwirq,
drivers/irqchip/irq-sun6i-r.c
256
irq_domain_set_hwirq_and_chip(domain, virq, hwirq,
drivers/irqchip/irq-sun6i-r.c
316
struct irq_domain *domain, *parent_domain;
drivers/irqchip/irq-sun6i-r.c
345
domain = irq_domain_create_hierarchy(parent_domain, 0, 0, of_fwnode_handle(node),
drivers/irqchip/irq-sun6i-r.c
347
if (!domain) {
drivers/irqchip/irq-sunxi-nmi.c
101
generic_handle_domain_irq(domain, 0);
drivers/irqchip/irq-sunxi-nmi.c
154
struct irq_domain *domain;
drivers/irqchip/irq-sunxi-nmi.c
157
domain = irq_domain_create_linear(of_fwnode_handle(node), 1, &irq_generic_chip_ops, NULL);
drivers/irqchip/irq-sunxi-nmi.c
158
if (!domain) {
drivers/irqchip/irq-sunxi-nmi.c
163
ret = irq_alloc_domain_generic_chips(domain, 1, 2, DRV_NAME,
drivers/irqchip/irq-sunxi-nmi.c
178
gc = irq_get_domain_generic_chip(domain, 0);
drivers/irqchip/irq-sunxi-nmi.c
214
irq_set_chained_handler_and_data(irq, sunxi_sc_nmi_handle_irq, domain);
drivers/irqchip/irq-sunxi-nmi.c
219
irq_domain_remove(domain);
drivers/irqchip/irq-sunxi-nmi.c
97
struct irq_domain *domain = irq_desc_get_handler_data(desc);
drivers/irqchip/irq-tb10x.c
117
domain = irq_domain_create_linear(of_fwnode_handle(ictl), AB_IRQCTL_MAXIRQ,
drivers/irqchip/irq-tb10x.c
119
if (!domain) {
drivers/irqchip/irq-tb10x.c
126
ret = irq_alloc_domain_generic_chips(domain, AB_IRQCTL_MAXIRQ,
drivers/irqchip/irq-tb10x.c
136
gc = domain->gc->gc[0];
drivers/irqchip/irq-tb10x.c
158
domain);
drivers/irqchip/irq-tb10x.c
169
irq_domain_remove(domain);
drivers/irqchip/irq-tb10x.c
83
struct irq_domain *domain = irq_desc_get_handler_data(desc);
drivers/irqchip/irq-tb10x.c
86
generic_handle_domain_irq(domain, irq);
drivers/irqchip/irq-tb10x.c
95
struct irq_domain *domain;
drivers/irqchip/irq-tegra.c
240
static int tegra_ictlr_domain_alloc(struct irq_domain *domain,
drivers/irqchip/irq-tegra.c
246
struct tegra_ictlr_info *info = domain->host_data;
drivers/irqchip/irq-tegra.c
262
irq_domain_set_hwirq_and_chip(domain, virq + i, hwirq + i,
drivers/irqchip/irq-tegra.c
268
parent_fwspec.fwnode = domain->parent->fwnode;
drivers/irqchip/irq-tegra.c
269
return irq_domain_alloc_irqs_parent(domain, virq, nr_irqs,
drivers/irqchip/irq-tegra.c
282
struct irq_domain *parent_domain, *domain;
drivers/irqchip/irq-tegra.c
337
domain = irq_domain_create_hierarchy(parent_domain, 0, num_ictlrs * 32,
drivers/irqchip/irq-tegra.c
339
if (!domain) {
drivers/irqchip/irq-ti-sci-inta.c
149
struct irq_domain *domain;
drivers/irqchip/irq-ti-sci-inta.c
154
domain = vint_desc->domain;
drivers/irqchip/irq-ti-sci-inta.c
155
inta = domain->host_data;
drivers/irqchip/irq-ti-sci-inta.c
163
generic_handle_domain_irq(domain, vint_desc->events[bit].hwirq);
drivers/irqchip/irq-ti-sci-inta.c
205
static struct ti_sci_inta_vint_desc *ti_sci_inta_alloc_parent_irq(struct irq_domain *domain)
drivers/irqchip/irq-ti-sci-inta.c
207
struct ti_sci_inta_irq_domain *inta = domain->host_data;
drivers/irqchip/irq-ti-sci-inta.c
231
vint_desc->domain = domain;
drivers/irqchip/irq-ti-sci-inta.c
283
struct ti_sci_inta_irq_domain *inta = vint_desc->domain->host_data;
drivers/irqchip/irq-ti-sci-inta.c
324
static struct ti_sci_inta_event_desc *ti_sci_inta_alloc_irq(struct irq_domain *domain,
drivers/irqchip/irq-ti-sci-inta.c
327
struct ti_sci_inta_irq_domain *inta = domain->host_data;
drivers/irqchip/irq-ti-sci-inta.c
343
vint_desc = ti_sci_inta_alloc_parent_irq(domain);
drivers/irqchip/irq-ti-sci-inta.c
392
inta = vint_desc->domain->host_data;
drivers/irqchip/irq-ti-sci-inta.c
427
event_desc = ti_sci_inta_alloc_irq(data->domain, data->hwirq);
drivers/irqchip/irq-ti-sci-inta.c
464
inta = data->domain->host_data;
drivers/irqchip/irq-ti-sci-inta.c
551
static void ti_sci_inta_irq_domain_free(struct irq_domain *domain,
drivers/irqchip/irq-ti-sci-inta.c
554
struct irq_data *data = irq_domain_get_irq_data(domain, virq);
drivers/irqchip/irq-ti-sci-inta.c
570
static int ti_sci_inta_irq_domain_alloc(struct irq_domain *domain,
drivers/irqchip/irq-ti-sci-inta.c
576
irq_domain_set_info(domain, virq, arg->hwirq, &ti_sci_inta_irq_chip,
drivers/irqchip/irq-ti-sci-inta.c
649
struct irq_domain *parent_domain, *domain, *msi_domain;
drivers/irqchip/irq-ti-sci-inta.c
69
struct irq_domain *domain;
drivers/irqchip/irq-ti-sci-inta.c
704
domain = irq_domain_create_linear(dev_fwnode(dev), ti_sci_get_num_resources(inta->vint),
drivers/irqchip/irq-ti-sci-inta.c
706
if (!domain) {
drivers/irqchip/irq-ti-sci-inta.c
713
domain);
drivers/irqchip/irq-ti-sci-inta.c
715
irq_domain_remove(domain);
drivers/irqchip/irq-ti-sci-intr.c
117
static void ti_sci_intr_irq_domain_free(struct irq_domain *domain,
drivers/irqchip/irq-ti-sci-intr.c
120
struct ti_sci_intr_irq_domain *intr = domain->host_data;
drivers/irqchip/irq-ti-sci-intr.c
124
data = irq_domain_get_irq_data(domain, virq);
drivers/irqchip/irq-ti-sci-intr.c
131
irq_domain_free_irqs_parent(domain, virq, 1);
drivers/irqchip/irq-ti-sci-intr.c
144
static int ti_sci_intr_alloc_parent_irq(struct irq_domain *domain, unsigned int virq,
drivers/irqchip/irq-ti-sci-intr.c
147
struct ti_sci_intr_irq_domain *intr = domain->host_data;
drivers/irqchip/irq-ti-sci-intr.c
187
err = irq_domain_alloc_irqs_parent(domain, virq, 1, &fwspec);
drivers/irqchip/irq-ti-sci-intr.c
200
irq_domain_free_irqs_parent(domain, virq, 1);
drivers/irqchip/irq-ti-sci-intr.c
215
static int ti_sci_intr_irq_domain_alloc(struct irq_domain *domain,
drivers/irqchip/irq-ti-sci-intr.c
224
err = ti_sci_intr_irq_domain_translate(domain, fwspec, &hwirq, &hwirq_type);
drivers/irqchip/irq-ti-sci-intr.c
228
out_irq = ti_sci_intr_alloc_parent_irq(domain, virq, hwirq, hwirq_type);
drivers/irqchip/irq-ti-sci-intr.c
232
irq_domain_set_hwirq_and_chip(domain, virq, hwirq,
drivers/irqchip/irq-ti-sci-intr.c
247
struct irq_domain *parent_domain, *domain;
drivers/irqchip/irq-ti-sci-intr.c
295
domain = irq_domain_create_hierarchy(parent_domain, 0, 0, dev_fwnode(dev),
drivers/irqchip/irq-ti-sci-intr.c
297
if (!domain) {
drivers/irqchip/irq-ti-sci-intr.c
57
static int ti_sci_intr_irq_domain_translate(struct irq_domain *domain,
drivers/irqchip/irq-ti-sci-intr.c
62
struct ti_sci_intr_irq_domain *intr = domain->host_data;
drivers/irqchip/irq-ts4800.c
128
data->domain = irq_domain_create_linear(dev_fwnode(&pdev->dev), 8, &ts4800_ic_ops, data);
drivers/irqchip/irq-ts4800.c
129
if (!data->domain) {
drivers/irqchip/irq-ts4800.c
146
irq_domain_remove(data->domain);
drivers/irqchip/irq-ts4800.c
30
struct irq_domain *domain;
drivers/irqchip/irq-ts4800.c
97
generic_handle_domain_irq(data->domain, bit);
drivers/irqchip/irq-uniphier-aidet.c
107
static int uniphier_aidet_domain_alloc(struct irq_domain *domain,
drivers/irqchip/irq-uniphier-aidet.c
119
ret = uniphier_aidet_domain_translate(domain, arg, &hwirq, &type);
drivers/irqchip/irq-uniphier-aidet.c
140
ret = irq_domain_set_hwirq_and_chip(domain, virq, hwirq,
drivers/irqchip/irq-uniphier-aidet.c
142
domain->host_data);
drivers/irqchip/irq-uniphier-aidet.c
147
parent_fwspec.fwnode = domain->parent->fwnode;
drivers/irqchip/irq-uniphier-aidet.c
153
return irq_domain_alloc_irqs_parent(domain, virq, 1, &parent_fwspec);
drivers/irqchip/irq-uniphier-aidet.c
188
priv->domain = irq_domain_create_hierarchy(
drivers/irqchip/irq-uniphier-aidet.c
193
if (!priv->domain)
drivers/irqchip/irq-uniphier-aidet.c
24
struct irq_domain *domain;
drivers/irqchip/irq-uniphier-aidet.c
93
static int uniphier_aidet_domain_translate(struct irq_domain *domain,
drivers/irqchip/irq-versatile-fpga.c
100
generic_handle_domain_irq(f->domain, irq);
drivers/irqchip/irq-versatile-fpga.c
120
generic_handle_domain_irq(f->domain, irq);
drivers/irqchip/irq-versatile-fpga.c
179
f->domain = irq_domain_create_linear(of_fwnode_handle(node), fls(valid),
drivers/irqchip/irq-versatile-fpga.c
186
irq_create_mapping(f->domain, i);
drivers/irqchip/irq-versatile-fpga.c
44
struct irq_domain *domain;
drivers/irqchip/irq-versatile-fpga.c
72
seq_puts(p, irq_domain_get_of_node(f->domain)->name);
drivers/irqchip/irq-vf610-mscm-ir.c
124
static int vf610_mscm_ir_domain_alloc(struct irq_domain *domain, unsigned int virq,
drivers/irqchip/irq-vf610-mscm-ir.c
132
if (!irq_domain_get_of_node(domain->parent))
drivers/irqchip/irq-vf610-mscm-ir.c
140
irq_domain_set_hwirq_and_chip(domain, virq + i, hwirq + i,
drivers/irqchip/irq-vf610-mscm-ir.c
142
domain->host_data);
drivers/irqchip/irq-vf610-mscm-ir.c
144
parent_fwspec.fwnode = domain->parent->fwnode;
drivers/irqchip/irq-vf610-mscm-ir.c
156
return irq_domain_alloc_irqs_parent(domain, virq, nr_irqs,
drivers/irqchip/irq-vf610-mscm-ir.c
181
struct irq_domain *domain, *domain_parent;
drivers/irqchip/irq-vf610-mscm-ir.c
212
domain = irq_domain_create_hierarchy(domain_parent, 0, MSCM_IRSPRC_NUM,
drivers/irqchip/irq-vf610-mscm-ir.c
215
if (!domain) {
drivers/irqchip/irq-vf610-mscm-ir.c
220
if (of_device_is_compatible(irq_domain_get_of_node(domain->parent),
drivers/irqchip/irq-vic.c
214
generic_handle_domain_irq(vic->domain, irq);
drivers/irqchip/irq-vic.c
231
generic_handle_domain_irq(vic->domain, hwirq);
drivers/irqchip/irq-vic.c
296
v->domain = irq_domain_create_simple(of_fwnode_handle(node),
drivers/irqchip/irq-vic.c
302
irq_create_mapping(v->domain, i);
drivers/irqchip/irq-vic.c
307
v->irq = irq_find_mapping(v->domain, 0);
drivers/irqchip/irq-vic.c
71
struct irq_domain *domain;
drivers/irqchip/irq-vt8500.c
104
struct vt8500_irq_data *priv = d->domain->host_data;
drivers/irqchip/irq-vt8500.c
181
generic_handle_domain_irq(intc->domain, irqnr);
drivers/irqchip/irq-vt8500.c
217
intc->domain = irq_domain_create_linear(of_fwnode_handle(node), 64,
drivers/irqchip/irq-vt8500.c
219
if (!intc->domain) {
drivers/irqchip/irq-vt8500.c
64
struct irq_domain *domain; /* Domain for this controller */
drivers/irqchip/irq-vt8500.c
72
struct vt8500_irq_data *priv = d->domain->host_data;
drivers/irqchip/irq-vt8500.c
82
struct vt8500_irq_data *priv = d->domain->host_data;
drivers/irqchip/irq-vt8500.c
93
struct vt8500_irq_data *priv = d->domain->host_data;
drivers/irqchip/irq-wpcm450-aic.c
157
aic->domain = irq_domain_create_linear(of_fwnode_handle(node), AIC_NUM_IRQS, &wpcm450_aic_ops, aic);
drivers/irqchip/irq-wpcm450-aic.c
38
struct irq_domain *domain;
drivers/irqchip/irq-wpcm450-aic.c
72
generic_handle_domain_irq(aic->domain, hwirq);
drivers/irqchip/qcom-irq-combiner.c
101
static int combiner_irq_map(struct irq_domain *domain, unsigned int irq,
drivers/irqchip/qcom-irq-combiner.c
105
irq_set_chip_data(irq, domain->host_data);
drivers/irqchip/qcom-irq-combiner.c
110
static void combiner_irq_unmap(struct irq_domain *domain, unsigned int irq)
drivers/irqchip/qcom-irq-combiner.c
250
combiner->domain = irq_domain_create_linear(pdev->dev.fwnode, combiner->nirqs,
drivers/irqchip/qcom-irq-combiner.c
252
if (!combiner->domain)
drivers/irqchip/qcom-irq-combiner.c
32
struct irq_domain *domain;
drivers/irqchip/qcom-irq-combiner.c
72
generic_handle_domain_irq(combiner->domain, hwirq);
drivers/irqchip/qcom-pdc.c
264
static int qcom_pdc_alloc(struct irq_domain *domain, unsigned int virq,
drivers/irqchip/qcom-pdc.c
274
ret = irq_domain_translate_twocell(domain, fwspec, &hwirq, &type);
drivers/irqchip/qcom-pdc.c
279
return irq_domain_disconnect_hierarchy(domain, virq);
drivers/irqchip/qcom-pdc.c
281
ret = irq_domain_set_hwirq_and_chip(domain, virq, hwirq,
drivers/irqchip/qcom-pdc.c
288
return irq_domain_disconnect_hierarchy(domain->parent, virq);
drivers/irqchip/qcom-pdc.c
296
parent_fwspec.fwnode = domain->parent->fwnode;
drivers/irqchip/qcom-pdc.c
302
return irq_domain_alloc_irqs_parent(domain, virq, nr_irqs,
drivers/mailbox/zynqmp-ipi-mailbox.c
823
struct irq_domain *domain;
drivers/mailbox/zynqmp-ipi-mailbox.c
836
domain = irq_find_host(interrupt_parent);
drivers/mailbox/zynqmp-ipi-mailbox.c
840
sgi_fwspec.fwnode = domain->fwnode;
drivers/media/platform/nvidia/tegra-vde/dmabuf-cache.c
112
if (!vde->domain && sgt->nents > 1) {
drivers/media/platform/nvidia/tegra-vde/dmabuf-cache.c
124
if (vde->domain) {
drivers/media/platform/nvidia/tegra-vde/dmabuf-cache.c
38
if (entry->vde->domain)
drivers/media/platform/nvidia/tegra-vde/dmabuf-cache.c
90
if (vde->domain)
drivers/media/platform/nvidia/tegra-vde/iommu.c
133
iommu_detach_group(vde->domain, vde->group);
drivers/media/platform/nvidia/tegra-vde/iommu.c
138
iommu_domain_free(vde->domain);
drivers/media/platform/nvidia/tegra-vde/iommu.c
147
if (vde->domain) {
drivers/media/platform/nvidia/tegra-vde/iommu.c
150
iommu_detach_group(vde->domain, vde->group);
drivers/media/platform/nvidia/tegra-vde/iommu.c
153
iommu_domain_free(vde->domain);
drivers/media/platform/nvidia/tegra-vde/iommu.c
156
vde->domain = NULL;
drivers/media/platform/nvidia/tegra-vde/iommu.c
29
end = vde->domain->geometry.aperture_end;
drivers/media/platform/nvidia/tegra-vde/iommu.c
39
size = iommu_map_sgtable(vde->domain, addr, sgt,
drivers/media/platform/nvidia/tegra-vde/iommu.c
57
iommu_unmap(vde->domain, addr, size);
drivers/media/platform/nvidia/tegra-vde/iommu.c
81
vde->domain = iommu_paging_domain_alloc(dev);
drivers/media/platform/nvidia/tegra-vde/iommu.c
82
if (IS_ERR(vde->domain)) {
drivers/media/platform/nvidia/tegra-vde/iommu.c
83
err = PTR_ERR(vde->domain);
drivers/media/platform/nvidia/tegra-vde/iommu.c
84
vde->domain = NULL;
drivers/media/platform/nvidia/tegra-vde/iommu.c
92
order = __ffs(vde->domain->pgsize_bitmap);
drivers/media/platform/nvidia/tegra-vde/iommu.c
95
err = iommu_attach_group(vde->domain, vde->group);
drivers/media/platform/nvidia/tegra-vde/v4l2.c
190
if (vde->domain) {
drivers/media/platform/nvidia/tegra-vde/v4l2.c
350
if (vde->domain)
drivers/media/platform/nvidia/tegra-vde/vde.c
133
if (vde->domain)
drivers/media/platform/nvidia/tegra-vde/vde.c
74
if (!vde->domain)
drivers/media/platform/nvidia/tegra-vde/vde.c
99
if (vde->domain) {
drivers/media/platform/nvidia/tegra-vde/vde.h
113
struct iommu_domain *domain;
drivers/media/platform/qcom/iris/iris_buffer.c
178
if (inst->domain == DECODER)
drivers/media/platform/qcom/iris/iris_buffer.c
264
if (inst->domain == DECODER) {
drivers/media/platform/qcom/iris/iris_buffer.c
308
if (inst->domain == DECODER) {
drivers/media/platform/qcom/iris/iris_buffer.c
375
if (inst->domain == DECODER) {
drivers/media/platform/qcom/iris/iris_buffer.c
452
if (inst->domain == DECODER) {
drivers/media/platform/qcom/iris/iris_buffer.c
511
if (inst->domain == DECODER) {
drivers/media/platform/qcom/iris/iris_buffer.c
547
if (inst->domain == DECODER)
drivers/media/platform/qcom/iris/iris_buffer.c
601
if (inst->domain == DECODER) {
drivers/media/platform/qcom/iris/iris_buffer.c
68
if (inst->domain == DECODER)
drivers/media/platform/qcom/iris/iris_common.c
67
if (inst->domain == DECODER &&
drivers/media/platform/qcom/iris/iris_ctrls.c
311
if (inst->domain == DECODER) {
drivers/media/platform/qcom/iris/iris_ctrls.c
385
if (inst->domain == DECODER) {
drivers/media/platform/qcom/iris/iris_ctrls.c
435
if (inst->domain == DECODER) {
drivers/media/platform/qcom/iris/iris_hfi_gen1_command.c
1035
if (inst->domain == DECODER) {
drivers/media/platform/qcom/iris/iris_hfi_gen1_command.c
117
if (inst->domain == DECODER)
drivers/media/platform/qcom/iris/iris_hfi_gen1_command.c
196
if (inst->domain == DECODER) {
drivers/media/platform/qcom/iris/iris_hfi_gen1_command.c
279
if (inst->domain == DECODER) {
drivers/media/platform/qcom/iris/iris_hfi_gen1_command.c
455
if (inst->domain == DECODER) {
drivers/media/platform/qcom/iris/iris_hfi_gen1_command.c
467
if (inst->domain == ENCODER) {
drivers/media/platform/qcom/iris/iris_hfi_gen1_command.c
747
if (inst->domain == DECODER)
drivers/media/platform/qcom/iris/iris_hfi_gen1_command.c
775
if (inst->domain == DECODER) {
drivers/media/platform/qcom/iris/iris_hfi_gen1_command.c
845
if (inst->domain == DECODER) {
drivers/media/platform/qcom/iris/iris_hfi_gen1_response.c
409
if (inst->domain == DECODER) {
drivers/media/platform/qcom/iris/iris_hfi_gen1_response.c
440
if (iris_split_mode_enabled(inst) && inst->domain == DECODER &&
drivers/media/platform/qcom/iris/iris_hfi_gen1_response.c
482
if (inst->domain == DECODER && uncom_pkt->stream_id == 1 &&
drivers/media/platform/qcom/iris/iris_hfi_gen1_response.c
488
} else if (inst->domain == ENCODER) {
drivers/media/platform/qcom/iris/iris_hfi_gen2_command.c
1171
static u32 iris_hfi_gen2_buf_type_from_driver(u32 domain, enum iris_buffer_type buffer_type)
drivers/media/platform/qcom/iris/iris_hfi_gen2_command.c
1175
if (domain == DECODER)
drivers/media/platform/qcom/iris/iris_hfi_gen2_command.c
118
if (inst->domain == DECODER) {
drivers/media/platform/qcom/iris/iris_hfi_gen2_command.c
1180
if (domain == DECODER)
drivers/media/platform/qcom/iris/iris_hfi_gen2_command.c
1231
static void iris_hfi_gen2_get_buffer(u32 domain, struct iris_buffer *buffer,
drivers/media/platform/qcom/iris/iris_hfi_gen2_command.c
1235
buf->type = iris_hfi_gen2_buf_type_from_driver(domain, buffer->type);
drivers/media/platform/qcom/iris/iris_hfi_gen2_command.c
1241
if (domain == DECODER && buffer->type == BUF_INPUT)
drivers/media/platform/qcom/iris/iris_hfi_gen2_command.c
1258
iris_hfi_gen2_get_buffer(inst->domain, buffer, &hfi_buffer);
drivers/media/platform/qcom/iris/iris_hfi_gen2_command.c
1285
iris_hfi_gen2_get_buffer(inst->domain, buffer, &hfi_buffer);
drivers/media/platform/qcom/iris/iris_hfi_gen2_command.c
200
if (inst->domain == DECODER) {
drivers/media/platform/qcom/iris/iris_hfi_gen2_command.c
236
if (inst->domain == DECODER) {
drivers/media/platform/qcom/iris/iris_hfi_gen2_command.c
482
if (inst->domain == DECODER) {
drivers/media/platform/qcom/iris/iris_hfi_gen2_command.c
507
if (inst->domain == DECODER) {
drivers/media/platform/qcom/iris/iris_hfi_gen2_command.c
630
if (inst->domain == DECODER) {
drivers/media/platform/qcom/iris/iris_hfi_gen2_command.c
685
if (inst->domain == ENCODER)
drivers/media/platform/qcom/iris/iris_hfi_gen2_command.c
691
if (inst->domain == ENCODER)
drivers/media/platform/qcom/iris/iris_hfi_gen2_command.c
766
if (inst->domain == DECODER) {
drivers/media/platform/qcom/iris/iris_hfi_gen2_command.c
841
if (inst->domain == ENCODER)
drivers/media/platform/qcom/iris/iris_hfi_gen2_command.c
94
if (inst->domain == DECODER) {
drivers/media/platform/qcom/iris/iris_hfi_gen2_command.c
995
if (inst->domain == ENCODER)
drivers/media/platform/qcom/iris/iris_hfi_gen2_response.c
488
if (inst->domain == DECODER) {
drivers/media/platform/qcom/iris/iris_hfi_gen2_response.c
51
if (inst->domain == DECODER)
drivers/media/platform/qcom/iris/iris_instance.h
89
enum domain_type domain;
drivers/media/platform/qcom/iris/iris_vb2.c
180
if (inst->domain == DECODER)
drivers/media/platform/qcom/iris/iris_vb2.c
185
if (inst->domain == DECODER)
drivers/media/platform/qcom/iris/iris_vb2.c
195
if (inst->domain == DECODER) {
drivers/media/platform/qcom/iris/iris_vb2.c
334
if (inst->domain == DECODER)
drivers/media/platform/qcom/iris/iris_vidc.c
164
inst->domain = session_type;
drivers/media/platform/qcom/iris/iris_vidc.c
199
if (inst->domain == DECODER)
drivers/media/platform/qcom/iris/iris_vidc.c
201
else if (inst->domain == ENCODER)
drivers/media/platform/qcom/iris/iris_vidc.c
27
if (inst->domain == ENCODER)
drivers/media/platform/qcom/iris/iris_vidc.c
271
if (inst->domain == DECODER)
drivers/media/platform/qcom/iris/iris_vidc.c
281
count, inst->domain == DECODER ? BUF_PERSIST : BUF_ARP);
drivers/media/platform/qcom/iris/iris_vidc.c
29
else if (inst->domain == DECODER)
drivers/media/platform/qcom/iris/iris_vidc.c
292
if (inst->domain == DECODER)
drivers/media/platform/qcom/iris/iris_vidc.c
294
else if (inst->domain == ENCODER)
drivers/media/platform/qcom/iris/iris_vidc.c
316
if (inst->domain == DECODER)
drivers/media/platform/qcom/iris/iris_vidc.c
318
else if (inst->domain == ENCODER)
drivers/media/platform/qcom/iris/iris_vidc.c
331
if (inst->domain == DECODER)
drivers/media/platform/qcom/iris/iris_vidc.c
333
else if (inst->domain == ENCODER)
drivers/media/platform/qcom/iris/iris_vidc.c
348
if (inst->domain == DECODER)
drivers/media/platform/qcom/iris/iris_vidc.c
350
else if (inst->domain == ENCODER)
drivers/media/platform/qcom/iris/iris_vidc.c
386
if (inst->domain == DECODER)
drivers/media/platform/qcom/iris/iris_vidc.c
417
if (inst->domain == DECODER)
drivers/media/platform/qcom/iris/iris_vidc.c
458
if (inst->domain == DECODER)
drivers/media/platform/qcom/iris/iris_vidc.c
471
inst->domain == DECODER)
drivers/media/platform/qcom/iris/iris_vidc.c
475
inst->domain == ENCODER)
drivers/media/platform/qcom/iris/iris_vidc.c
478
if (inst->domain == DECODER) {
drivers/media/platform/qcom/iris/iris_vidc.c
495
} else if (inst->domain == ENCODER) {
drivers/media/platform/qcom/iris/iris_vidc.c
520
if (inst->domain == DECODER)
drivers/media/platform/qcom/iris/iris_vidc.c
522
else if (inst->domain == ENCODER)
drivers/media/platform/qcom/iris/iris_vidc.c
532
if (inst->domain == DECODER)
drivers/media/platform/qcom/iris/iris_vidc.c
534
else if (inst->domain == ENCODER)
drivers/media/platform/qcom/iris/iris_vidc.c
548
if (inst->domain == ENCODER)
drivers/media/platform/qcom/iris/iris_vidc.c
562
if (inst->domain == ENCODER)
drivers/media/platform/qcom/iris/iris_vpu_buffer.c
2048
if (inst->domain == DECODER) {
drivers/media/platform/qcom/iris/iris_vpu_buffer.c
2051
} else if (inst->domain == ENCODER) {
drivers/media/platform/qcom/iris/iris_vpu_buffer.c
2081
if (inst->domain == DECODER)
drivers/media/platform/qcom/iris/iris_vpu_buffer.c
2120
if (inst->domain == DECODER) {
drivers/media/platform/qcom/iris/iris_vpu_buffer.c
2123
} else if (inst->domain == ENCODER) {
drivers/media/platform/qcom/iris/iris_vpu_buffer.c
2173
if (inst->domain == ENCODER)
drivers/media/platform/qcom/venus/core.h
555
venus_caps_by_codec(struct venus_core *core, u32 codec, u32 domain)
drivers/media/platform/qcom/venus/core.h
561
core->caps[c].domain == domain)
drivers/media/platform/qcom/venus/hfi_parser.c
104
parse_profile_level(struct venus_core *core, u32 codecs, u32 domain, void *data)
drivers/media/platform/qcom/venus/hfi_parser.c
115
for_each_codec(core->caps, ARRAY_SIZE(core->caps), codecs, domain,
drivers/media/platform/qcom/venus/hfi_parser.c
134
parse_caps(struct venus_core *core, u32 codecs, u32 domain, void *data)
drivers/media/platform/qcom/venus/hfi_parser.c
146
for_each_codec(core->caps, ARRAY_SIZE(core->caps), codecs, domain,
drivers/media/platform/qcom/venus/hfi_parser.c
165
parse_raw_formats(struct venus_core *core, u32 codecs, u32 domain, void *data)
drivers/media/platform/qcom/venus/hfi_parser.c
194
for_each_codec(core->caps, ARRAY_SIZE(core->caps), codecs, domain,
drivers/media/platform/qcom/venus/hfi_parser.c
227
static int parse_codecs_mask(u32 *codecs, u32 *domain, void *data)
drivers/media/platform/qcom/venus/hfi_parser.c
232
*domain = mask->video_domains;
drivers/media/platform/qcom/venus/hfi_parser.c
237
static void parser_init(struct venus_inst *inst, u32 *codecs, u32 *domain)
drivers/media/platform/qcom/venus/hfi_parser.c
243
*domain = inst->session_type;
drivers/media/platform/qcom/venus/hfi_parser.c
246
static void parser_fini(struct venus_inst *inst, u32 codecs, u32 domain)
drivers/media/platform/qcom/venus/hfi_parser.c
260
if (cap->codec & codecs && cap->domain == dom)
drivers/media/platform/qcom/venus/hfi_parser.c
30
cap->domain = VIDC_SESSION_TYPE_DEC;
drivers/media/platform/qcom/venus/hfi_parser.c
303
u32 *words = buf, *payload, codecs = 0, domain = 0;
drivers/media/platform/qcom/venus/hfi_parser.c
315
parser_init(inst, &codecs, &domain);
drivers/media/platform/qcom/venus/hfi_parser.c
346
ret = parse_codecs_mask(&codecs, &domain, payload);
drivers/media/platform/qcom/venus/hfi_parser.c
352
ret = parse_raw_formats(core, codecs, domain, payload);
drivers/media/platform/qcom/venus/hfi_parser.c
358
ret = parse_caps(core, codecs, domain, payload);
drivers/media/platform/qcom/venus/hfi_parser.c
364
ret = parse_profile_level(core, codecs, domain, payload);
drivers/media/platform/qcom/venus/hfi_parser.c
37
cap->domain = VIDC_SESSION_TYPE_ENC;
drivers/media/platform/qcom/venus/hfi_parser.c
370
ret = parse_alloc_mode(core, codecs, domain, payload);
drivers/media/platform/qcom/venus/hfi_parser.c
387
parser_fini(inst, codecs, domain);
drivers/media/platform/qcom/venus/hfi_parser.c
43
u32 codecs, u32 domain, func cb, void *data,
drivers/media/platform/qcom/venus/hfi_parser.c
51
if (cap->valid && cap->domain == domain)
drivers/media/platform/qcom/venus/hfi_parser.c
53
if (cap->codec & codecs && cap->domain == domain)
drivers/media/platform/qcom/venus/hfi_parser.c
68
parse_alloc_mode(struct venus_core *core, u32 codecs, u32 domain, void *data)
drivers/media/platform/qcom/venus/hfi_parser.c
83
codecs, domain, fill_buf_mode, type, 1);
drivers/media/platform/qcom/venus/hfi_platform.h
30
u32 domain;
drivers/media/platform/qcom/venus/hfi_platform_v4.c
11
.domain = VIDC_SESSION_TYPE_DEC,
drivers/media/platform/qcom/venus/hfi_platform_v4.c
114
.domain = VIDC_SESSION_TYPE_DEC,
drivers/media/platform/qcom/venus/hfi_platform_v4.c
137
.domain = VIDC_SESSION_TYPE_ENC,
drivers/media/platform/qcom/venus/hfi_platform_v4.c
174
.domain = VIDC_SESSION_TYPE_ENC,
drivers/media/platform/qcom/venus/hfi_platform_v4.c
211
.domain = VIDC_SESSION_TYPE_ENC,
drivers/media/platform/qcom/venus/hfi_platform_v4.c
252
.domain = VIDC_SESSION_TYPE_DEC,
drivers/media/platform/qcom/venus/hfi_platform_v4.c
274
.domain = VIDC_SESSION_TYPE_DEC,
drivers/media/platform/qcom/venus/hfi_platform_v4.c
293
.domain = VIDC_SESSION_TYPE_DEC,
drivers/media/platform/qcom/venus/hfi_platform_v4.c
312
.domain = VIDC_SESSION_TYPE_ENC,
drivers/media/platform/qcom/venus/hfi_platform_v4.c
340
.domain = VIDC_SESSION_TYPE_ENC,
drivers/media/platform/qcom/venus/hfi_platform_v4.c
37
.domain = VIDC_SESSION_TYPE_DEC,
drivers/media/platform/qcom/venus/hfi_platform_v4.c
386
if (caps->domain == VIDC_SESSION_TYPE_ENC)
drivers/media/platform/qcom/venus/hfi_platform_v4.c
63
.domain = VIDC_SESSION_TYPE_DEC,
drivers/media/platform/qcom/venus/hfi_platform_v4.c
88
.domain = VIDC_SESSION_TYPE_DEC,
drivers/media/platform/qcom/venus/hfi_platform_v6.c
11
.domain = VIDC_SESSION_TYPE_DEC,
drivers/media/platform/qcom/venus/hfi_platform_v6.c
114
.domain = VIDC_SESSION_TYPE_DEC,
drivers/media/platform/qcom/venus/hfi_platform_v6.c
137
.domain = VIDC_SESSION_TYPE_ENC,
drivers/media/platform/qcom/venus/hfi_platform_v6.c
174
.domain = VIDC_SESSION_TYPE_ENC,
drivers/media/platform/qcom/venus/hfi_platform_v6.c
211
.domain = VIDC_SESSION_TYPE_ENC,
drivers/media/platform/qcom/venus/hfi_platform_v6.c
37
.domain = VIDC_SESSION_TYPE_DEC,
drivers/media/platform/qcom/venus/hfi_platform_v6.c
63
.domain = VIDC_SESSION_TYPE_DEC,
drivers/media/platform/qcom/venus/hfi_platform_v6.c
88
.domain = VIDC_SESSION_TYPE_DEC,
drivers/mfd/ab8500-core.c
1199
0, ab8500->domain);
drivers/mfd/ab8500-core.c
1203
0, ab8500->domain);
drivers/mfd/ab8500-core.c
1210
0, ab8500->domain);
drivers/mfd/ab8500-core.c
1214
0, ab8500->domain);
drivers/mfd/ab8500-core.c
1218
0, ab8500->domain);
drivers/mfd/ab8500-core.c
1222
0, ab8500->domain);
drivers/mfd/ab8500-core.c
1229
0, ab8500->domain);
drivers/mfd/ab8500-core.c
487
handle_nested_irq(irq_find_mapping(ab8500->domain, line));
drivers/mfd/ab8500-core.c
583
ab8500->domain = irq_domain_create_simple(dev_fwnode(ab8500->dev), num_irqs, 0,
drivers/mfd/ab8500-core.c
586
if (!ab8500->domain) {
drivers/mfd/fsl-imx25-tsadc.c
37
generic_handle_domain_irq(tsadc->domain, 1);
drivers/mfd/fsl-imx25-tsadc.c
40
generic_handle_domain_irq(tsadc->domain, 0);
drivers/mfd/fsl-imx25-tsadc.c
73
tsadc->domain = irq_domain_create_simple(dev_fwnode(dev), 2, 0, &mx25_tsadc_domain_ops,
drivers/mfd/fsl-imx25-tsadc.c
75
if (!tsadc->domain) {
drivers/mfd/fsl-imx25-tsadc.c
92
irq_domain_remove(tsadc->domain);
drivers/mfd/intel_soc_pmic_bxtwc.c
460
struct irq_domain *domain;
drivers/mfd/intel_soc_pmic_bxtwc.c
467
domain = regmap_irq_get_domain(*data);
drivers/mfd/intel_soc_pmic_bxtwc.c
469
return devm_mfd_add_devices(dev, PLATFORM_DEVID_NONE, cells, n_devs, NULL, 0, domain);
drivers/mfd/ioc3.c
104
struct irq_domain *domain = irq_desc_get_handler_data(desc);
drivers/mfd/ioc3.c
105
struct ioc3_priv_data *ipd = domain->host_data;
drivers/mfd/ioc3.c
114
generic_handle_domain_irq(domain, __ffs(pending));
drivers/mfd/ioc3.c
134
struct irq_domain *domain;
drivers/mfd/ioc3.c
141
domain = irq_domain_create_linear(fn, 24, &ioc3_irq_domain_ops, ipd);
drivers/mfd/ioc3.c
142
if (!domain) {
drivers/mfd/ioc3.c
147
ipd->domain = domain;
drivers/mfd/ioc3.c
149
irq_set_chained_handler_and_data(irq, ioc3_irq_handler, domain);
drivers/mfd/ioc3.c
204
&ipd->pdev->resource[0], 0, ipd->domain);
drivers/mfd/ioc3.c
233
&ipd->pdev->resource[0], 0, ipd->domain);
drivers/mfd/ioc3.c
312
&ipd->pdev->resource[0], 0, ipd->domain);
drivers/mfd/ioc3.c
381
&ipd->pdev->resource[0], 0, ipd->domain);
drivers/mfd/ioc3.c
41
struct irq_domain *domain;
drivers/mfd/ioc3.c
615
if (ipd->domain) {
drivers/mfd/ioc3.c
616
struct fwnode_handle *fn = ipd->domain->fwnode;
drivers/mfd/ioc3.c
618
irq_domain_remove(ipd->domain);
drivers/mfd/ioc3.c
645
if (ipd->domain) {
drivers/mfd/ioc3.c
646
struct fwnode_handle *fn = ipd->domain->fwnode;
drivers/mfd/ioc3.c
648
irq_domain_remove(ipd->domain);
drivers/mfd/lp8788-irq.c
123
handle_nested_irq(irq_find_mapping(irqd->domain, i));
drivers/mfd/lp8788-irq.c
164
irqd->domain = irq_domain_create_linear(dev_fwnode(lp->dev), LP8788_INT_MAX,
drivers/mfd/lp8788-irq.c
166
if (!irqd->domain) {
drivers/mfd/lp8788-irq.c
171
lp->irqdm = irqd->domain;
drivers/mfd/lp8788-irq.c
38
struct irq_domain *domain;
drivers/mfd/max77650.c
155
struct irq_domain *domain;
drivers/mfd/max77650.c
207
domain = regmap_irq_get_domain(irq_data);
drivers/mfd/max77650.c
211
NULL, 0, domain);
drivers/mfd/max77705.c
122
domain = regmap_irq_get_domain(irq_data);
drivers/mfd/max77705.c
126
NULL, 0, domain);
drivers/mfd/max77705.c
86
struct irq_domain *domain;
drivers/mfd/max8997-irq.c
294
struct irq_domain *domain;
drivers/mfd/max8997-irq.c
330
domain = irq_domain_create_linear(NULL, MAX8997_IRQ_NR,
drivers/mfd/max8997-irq.c
332
if (!domain) {
drivers/mfd/max8997-irq.c
336
max8997->irq_domain = domain;
drivers/mfd/max8998-irq.c
213
struct irq_domain *domain;
drivers/mfd/max8998-irq.c
233
domain = irq_domain_create_simple(NULL, MAX8998_IRQ_NR,
drivers/mfd/max8998-irq.c
235
if (!domain) {
drivers/mfd/max8998-irq.c
239
max8998->irq_domain = domain;
drivers/mfd/mfd-core.c
144
int irq_base, struct irq_domain *domain)
drivers/mfd/mfd-core.c
242
if (domain) {
drivers/mfd/mfd-core.c
247
domain, cell->resources[r].start);
drivers/mfd/mfd-core.c
323
int irq_base, struct irq_domain *domain)
drivers/mfd/mfd-core.c
330
irq_base, domain);
drivers/mfd/mfd-core.c
420
int irq_base, struct irq_domain *domain)
drivers/mfd/mfd-core.c
430
irq_base, domain);
drivers/mfd/nct6694.c
215
generic_handle_irq_safe(irq_find_mapping(nct6694->domain, irq));
drivers/mfd/nct6694.c
299
nct6694->domain = irq_domain_create_simple(NULL, NCT6694_NR_IRQS, 0,
drivers/mfd/nct6694.c
302
if (!nct6694->domain) {
drivers/mfd/nct6694.c
352
irq_domain_remove(nct6694->domain);
drivers/mfd/nct6694.c
368
irq_domain_remove(nct6694->domain);
drivers/mfd/pf1550.c
204
struct irq_domain *domain;
drivers/mfd/pf1550.c
268
domain = regmap_irq_get_domain(pf1550->irq_data_regulator);
drivers/mfd/pf1550.c
270
ret = devm_mfd_add_devices(pf1550->dev, PLATFORM_DEVID_NONE, regulator, 1, NULL, 0, domain);
drivers/mfd/pf1550.c
290
domain = regmap_irq_get_domain(pf1550->irq_data_onkey);
drivers/mfd/pf1550.c
292
ret = devm_mfd_add_devices(pf1550->dev, PLATFORM_DEVID_NONE, onkey, 1, NULL, 0, domain);
drivers/mfd/pf1550.c
312
domain = regmap_irq_get_domain(pf1550->irq_data_charger);
drivers/mfd/pf1550.c
314
return devm_mfd_add_devices(pf1550->dev, PLATFORM_DEVID_NONE, charger, 1, NULL, 0, domain);
drivers/mfd/qcom-pm8xxx.c
366
struct irq_domain *domain, unsigned int irq,
drivers/mfd/qcom-pm8xxx.c
369
irq_domain_set_info(domain, irq, hwirq, chip->pm_irq_data->irq_chip,
drivers/mfd/qcom-pm8xxx.c
374
static int pm8xxx_irq_domain_alloc(struct irq_domain *domain, unsigned int virq,
drivers/mfd/qcom-pm8xxx.c
377
struct pm_irq_chip *chip = domain->host_data;
drivers/mfd/qcom-pm8xxx.c
383
ret = irq_domain_translate_twocell(domain, fwspec, &hwirq, &type);
drivers/mfd/qcom-pm8xxx.c
388
pm8xxx_irq_domain_map(chip, domain, virq + i, hwirq + i, type);
drivers/mfd/rohm-bd9576.c
151
domain = regmap_irq_get_domain(irq_data);
drivers/mfd/rohm-bd9576.c
158
domain = NULL;
drivers/mfd/rohm-bd9576.c
162
num_cells, NULL, 0, domain);
drivers/mfd/rohm-bd9576.c
98
struct irq_domain *domain;
drivers/mfd/stmpe.c
1092
int base = irq_find_mapping(stmpe->domain, 0);
drivers/mfd/stmpe.c
1120
int nestedirq = irq_find_mapping(stmpe->domain, line);
drivers/mfd/stmpe.c
1222
stmpe->domain = irq_domain_create_simple(of_fwnode_handle(np), num_irqs,
drivers/mfd/stmpe.c
1224
if (!stmpe->domain) {
drivers/mfd/stmpe.c
1298
NULL, 0, stmpe->domain);
drivers/mfd/stmpe.c
1489
if (stmpe->domain)
drivers/mfd/stmpe.c
1490
irq_domain_remove(stmpe->domain);
drivers/mfd/tc3589x.c
190
int virq = irq_find_mapping(tc3589x->domain, bit);
drivers/mfd/tc3589x.c
237
tc3589x->domain = irq_domain_create_simple(of_fwnode_handle(np),
drivers/mfd/tc3589x.c
241
if (!tc3589x->domain) {
drivers/mfd/tc3589x.c
292
0, tc3589x->domain);
drivers/mfd/tc3589x.c
303
0, tc3589x->domain);
drivers/mfd/wm831x-irq.c
563
struct irq_domain *domain;
drivers/mfd/wm831x-irq.c
590
domain = irq_domain_create_legacy(dev_fwnode(wm831x->dev), ARRAY_SIZE(wm831x_irqs),
drivers/mfd/wm831x-irq.c
593
domain = irq_domain_create_linear(dev_fwnode(wm831x->dev), ARRAY_SIZE(wm831x_irqs),
drivers/mfd/wm831x-irq.c
596
if (!domain) {
drivers/mfd/wm831x-irq.c
610
wm831x->irq_domain = domain;
drivers/misc/cs5535-mfgpt.c
146
struct cs5535_mfgpt_timer *cs5535_mfgpt_alloc_timer(int timer_nr, int domain)
drivers/misc/cs5535-mfgpt.c
157
if (domain == MFGPT_DOMAIN_WORKING)
drivers/misc/fastrpc.c
2281
bool is_secured, const char *domain)
drivers/misc/fastrpc.c
2295
domain, is_secured ? "-secure" : "");
drivers/misc/fastrpc.c
2310
static int fastrpc_get_domain_id(const char *domain)
drivers/misc/fastrpc.c
2312
if (!strncmp(domain, "adsp", 4))
drivers/misc/fastrpc.c
2314
else if (!strncmp(domain, "cdsp", 4))
drivers/misc/fastrpc.c
2316
else if (!strncmp(domain, "mdsp", 4))
drivers/misc/fastrpc.c
2318
else if (!strncmp(domain, "sdsp", 4))
drivers/misc/fastrpc.c
2320
else if (!strncmp(domain, "gdsp", 4))
drivers/misc/fastrpc.c
2343
const char *domain;
drivers/misc/fastrpc.c
2350
err = of_property_read_string(rdev->of_node, "label", &domain);
drivers/misc/fastrpc.c
2356
domain_id = fastrpc_get_domain_id(domain);
drivers/misc/fastrpc.c
2359
dev_info(rdev, "FastRPC Domain %s not supported\n", domain);
drivers/misc/fastrpc.c
2411
err = fastrpc_device_register(rdev, data, secure_dsp, domain);
drivers/misc/fastrpc.c
2419
err = fastrpc_device_register(rdev, data, true, domain);
drivers/misc/fastrpc.c
2423
err = fastrpc_device_register(rdev, data, false, domain);
drivers/misc/hi6421v600-irq.c
21
struct irq_domain *domain;
drivers/misc/hi6421v600-irq.c
256
priv->domain = irq_domain_create_simple(dev_fwnode(pmic_dev), PMIC_IRQ_LIST_MAX, 0,
drivers/misc/hi6421v600-irq.c
258
if (!priv->domain) {
drivers/misc/hi6421v600-irq.c
264
virq = irq_create_mapping(priv->domain, i);
drivers/misc/mchp_pci1xxxx/mchp_pci1xxxx_gpio.c
321
irq = irq_find_mapping(gc->irq.domain, (bit + (gpiobank * 32)));
drivers/misc/ocxl/link.c
292
link->domain, link->bus, link->dev);
drivers/misc/ocxl/link.c
364
pr_debug("Allocated SPA for %x:%x:%x at %p\n", link->domain, link->bus,
drivers/misc/ocxl/link.c
375
pr_debug("Freeing SPA for %x:%x:%x\n", link->domain, link->bus,
drivers/misc/ocxl/link.c
395
link->domain = pci_domain_nr(dev->bus);
drivers/misc/ocxl/link.c
449
if (link->domain == pci_domain_nr(dev->bus) &&
drivers/misc/ocxl/link.c
89
int domain;
drivers/misc/rp1/rp1_pci.c
109
virq = irq_find_mapping(rp1->domain, hwirq);
drivers/misc/rp1/rp1_pci.c
168
if (rp1->domain) {
drivers/misc/rp1/rp1_pci.c
170
irq = irq_find_mapping(rp1->domain, i);
drivers/misc/rp1/rp1_pci.c
174
irq_domain_remove(rp1->domain);
drivers/misc/rp1/rp1_pci.c
240
rp1->domain = irq_domain_add_linear(rp1_node, RP1_INT_END,
drivers/misc/rp1/rp1_pci.c
242
if (!rp1->domain) {
drivers/misc/rp1/rp1_pci.c
249
unsigned int irq = irq_create_mapping(rp1->domain, i);
drivers/misc/rp1/rp1_pci.c
39
struct irq_domain *domain;
drivers/misc/rp1/rp1_pci.c
57
struct rp1_dev *rp1 = irqd->domain->host_data;
drivers/misc/rp1/rp1_pci.c
65
struct rp1_dev *rp1 = irqd->domain->host_data;
drivers/misc/rp1/rp1_pci.c
73
struct rp1_dev *rp1 = irqd->domain->host_data;
drivers/net/can/usb/nct6694_canfd.c
732
irq = irq_create_mapping(nct6694->domain,
drivers/net/dsa/microchip/ksz_common.c
2600
irq = irq_find_mapping(dev->ports[port].pirq.domain,
drivers/net/dsa/microchip/ksz_common.c
2880
virq = irq_find_mapping(kirq->domain, irq);
drivers/net/dsa/microchip/ksz_common.c
2884
irq_domain_remove(kirq->domain);
drivers/net/dsa/microchip/ksz_common.c
2906
sub_irq = irq_find_mapping(kirq->domain, n);
drivers/net/dsa/microchip/ksz_common.c
2921
kirq->domain = irq_domain_create_simple(dev_fwnode(dev->dev), kirq->nirqs, 0,
drivers/net/dsa/microchip/ksz_common.c
2923
if (!kirq->domain)
drivers/net/dsa/microchip/ksz_common.c
2927
irq_create_mapping(kirq->domain, n);
drivers/net/dsa/microchip/ksz_common.c
2967
pirq->irq_num = irq_find_mapping(dev->girq.domain, p);
drivers/net/dsa/microchip/ksz_common.h
106
struct irq_domain *domain;
drivers/net/dsa/microchip/ksz_ptp.c
1025
sub_irq = irq_find_mapping(ptpirq->domain, n);
drivers/net/dsa/microchip/ksz_ptp.c
1114
ptpmsg_irq->num = irq_create_mapping(ptpirq->domain, n);
drivers/net/dsa/microchip/ksz_ptp.c
1153
ptpirq->domain = irq_domain_create_linear(dev_fwnode(dev->dev), ptpirq->nirqs,
drivers/net/dsa/microchip/ksz_ptp.c
1155
if (!ptpirq->domain)
drivers/net/dsa/microchip/ksz_ptp.c
1158
ptpirq->irq_num = irq_find_mapping(port->pirq.domain, PORT_SRC_PTP_INT);
drivers/net/dsa/microchip/ksz_ptp.c
1184
irq_domain_remove(ptpirq->domain);
drivers/net/dsa/microchip/ksz_ptp.c
1202
irq_domain_remove(ptpirq->domain);
drivers/net/dsa/mv88e6xxx/chip.c
178
sub_irq = irq_find_mapping(chip->g1_irq.domain,
drivers/net/dsa/mv88e6xxx/chip.c
274
virq = irq_find_mapping(chip->g1_irq.domain, irq);
drivers/net/dsa/mv88e6xxx/chip.c
278
irq_domain_remove(chip->g1_irq.domain);
drivers/net/dsa/mv88e6xxx/chip.c
300
chip->g1_irq.domain = irq_domain_create_simple(
drivers/net/dsa/mv88e6xxx/chip.c
303
if (!chip->g1_irq.domain)
drivers/net/dsa/mv88e6xxx/chip.c
307
irq_create_mapping(chip->g1_irq.domain, irq);
drivers/net/dsa/mv88e6xxx/chip.c
335
virq = irq_find_mapping(chip->g1_irq.domain, irq);
drivers/net/dsa/mv88e6xxx/chip.c
339
irq_domain_remove(chip->g1_irq.domain);
drivers/net/dsa/mv88e6xxx/chip.h
217
struct irq_domain *domain;
drivers/net/dsa/mv88e6xxx/global1_atu.c
479
chip->atu_prob_irq = irq_find_mapping(chip->g1_irq.domain,
drivers/net/dsa/mv88e6xxx/global1_vtu.c
661
chip->vtu_prob_irq = irq_find_mapping(chip->g1_irq.domain,
drivers/net/dsa/mv88e6xxx/global2.c
1075
sub_irq = irq_find_mapping(chip->g2_irq.domain, n);
drivers/net/dsa/mv88e6xxx/global2.c
1139
virq = irq_find_mapping(chip->g2_irq.domain, irq);
drivers/net/dsa/mv88e6xxx/global2.c
1143
irq_domain_remove(chip->g2_irq.domain);
drivers/net/dsa/mv88e6xxx/global2.c
1157
chip->g2_irq.domain = irq_domain_create_simple(dev_fwnode(chip->dev), 16, 0,
drivers/net/dsa/mv88e6xxx/global2.c
1159
if (!chip->g2_irq.domain)
drivers/net/dsa/mv88e6xxx/global2.c
1163
irq_create_mapping(chip->g2_irq.domain, irq);
drivers/net/dsa/mv88e6xxx/global2.c
1167
chip->device_irq = irq_find_mapping(chip->g1_irq.domain,
drivers/net/dsa/mv88e6xxx/global2.c
1187
virq = irq_find_mapping(chip->g2_irq.domain, irq);
drivers/net/dsa/mv88e6xxx/global2.c
1191
irq_domain_remove(chip->g2_irq.domain);
drivers/net/dsa/mv88e6xxx/global2.c
1205
irq = irq_find_mapping(chip->g2_irq.domain, phy);
drivers/net/dsa/mv88e6xxx/global2.c
994
chip->watchdog_irq = irq_find_mapping(chip->g2_irq.domain,
drivers/net/dsa/mv88e6xxx/serdes.c
198
return irq_find_mapping(chip->g2_irq.domain, MV88E6352_SERDES_IRQ);
drivers/net/dsa/mv88e6xxx/serdes.c
443
return irq_find_mapping(chip->g2_irq.domain, port);
drivers/net/dsa/qca/ar9331.c
780
static int ar9331_sw_irq_map(struct irq_domain *domain, unsigned int irq,
drivers/net/dsa/qca/ar9331.c
783
irq_set_chip_data(irq, domain->host_data);
drivers/net/dsa/realtek/rtl8365mb.c
1652
static int rtl8365mb_irq_map(struct irq_domain *domain, unsigned int irq,
drivers/net/dsa/realtek/rtl8365mb.c
1655
irq_set_chip_data(irq, domain->host_data);
drivers/net/dsa/realtek/rtl8366rb.c
470
static int rtl8366rb_irq_map(struct irq_domain *domain, unsigned int irq,
drivers/net/dsa/realtek/rtl8366rb.c
473
irq_set_chip_data(irq, domain->host_data);
drivers/net/ethernet/broadcom/bnx2x/bnx2x_sriov.c
1609
vf->domain = bnx2x_vf_domain(bp, vfid);
drivers/net/ethernet/broadcom/bnx2x/bnx2x_sriov.c
804
dev = pci_get_domain_bus_and_slot(vf->domain, vf->bus, vf->devfn);
drivers/net/ethernet/broadcom/bnx2x/bnx2x_sriov.h
188
unsigned int domain;
drivers/net/ethernet/emulex/benet/be_cmds.c
1085
u32 if_id, u32 *pmac_id, u32 domain)
drivers/net/ethernet/emulex/benet/be_cmds.c
1104
req->hdr.domain = domain;
drivers/net/ethernet/emulex/benet/be_cmds.c
1147
req->hdr.domain = dom;
drivers/net/ethernet/emulex/benet/be_cmds.c
1536
u32 *if_handle, u32 domain)
drivers/net/ethernet/emulex/benet/be_cmds.c
1546
req->hdr.domain = domain;
drivers/net/ethernet/emulex/benet/be_cmds.c
1565
int be_cmd_if_destroy(struct be_adapter *adapter, int interface_id, u32 domain)
drivers/net/ethernet/emulex/benet/be_cmds.c
1579
req->hdr.domain = domain;
drivers/net/ethernet/emulex/benet/be_cmds.c
1720
req->hdr.domain = dom;
drivers/net/ethernet/emulex/benet/be_cmds.c
1946
u32 num, u32 domain)
drivers/net/ethernet/emulex/benet/be_cmds.c
1964
req->hdr.domain = domain;
drivers/net/ethernet/emulex/benet/be_cmds.c
3480
static int be_cmd_set_qos(struct be_adapter *adapter, u32 bps, u32 domain)
drivers/net/ethernet/emulex/benet/be_cmds.c
3499
req->hdr.domain = domain;
drivers/net/ethernet/emulex/benet/be_cmds.c
3612
u32 domain)
drivers/net/ethernet/emulex/benet/be_cmds.c
3632
req->hdr.domain = domain;
drivers/net/ethernet/emulex/benet/be_cmds.c
3656
u32 domain)
drivers/net/ethernet/emulex/benet/be_cmds.c
3674
req->hdr.domain = domain;
drivers/net/ethernet/emulex/benet/be_cmds.c
3692
u8 domain)
drivers/net/ethernet/emulex/benet/be_cmds.c
3727
req->hdr.domain = domain;
drivers/net/ethernet/emulex/benet/be_cmds.c
3785
u8 *mac, u32 if_handle, bool active, u32 domain)
drivers/net/ethernet/emulex/benet/be_cmds.c
3789
if_handle, domain);
drivers/net/ethernet/emulex/benet/be_cmds.c
3797
if_handle, domain);
drivers/net/ethernet/emulex/benet/be_cmds.c
3825
u8 mac_count, u32 domain)
drivers/net/ethernet/emulex/benet/be_cmds.c
3852
req->hdr.domain = domain;
drivers/net/ethernet/emulex/benet/be_cmds.c
3886
u32 domain, u16 intf_id, u16 hsw_mode, u8 spoofchk)
drivers/net/ethernet/emulex/benet/be_cmds.c
3912
req->hdr.domain = domain;
drivers/net/ethernet/emulex/benet/be_cmds.c
3944
u32 domain, u16 intf_id, u8 *mode, bool *spoofchk)
drivers/net/ethernet/emulex/benet/be_cmds.c
3967
req->hdr.domain = domain;
drivers/net/ethernet/emulex/benet/be_cmds.c
4442
u8 profile_type, u8 query, u8 domain)
drivers/net/ethernet/emulex/benet/be_cmds.c
4470
req->hdr.domain = domain;
drivers/net/ethernet/emulex/benet/be_cmds.c
4535
int size, int count, u8 version, u8 domain)
drivers/net/ethernet/emulex/benet/be_cmds.c
4554
req->hdr.domain = domain;
drivers/net/ethernet/emulex/benet/be_cmds.c
4603
u8 domain)
drivers/net/ethernet/emulex/benet/be_cmds.c
4610
return be_cmd_set_qos(adapter, max_rate / 10, domain);
drivers/net/ethernet/emulex/benet/be_cmds.c
4614
nic_desc.vf_num = domain;
drivers/net/ethernet/emulex/benet/be_cmds.c
4633
1, version, domain);
drivers/net/ethernet/emulex/benet/be_cmds.c
4755
req->hdr.domain = vf_num + 1;
drivers/net/ethernet/emulex/benet/be_cmds.c
4849
int be_cmd_enable_vf(struct be_adapter *adapter, u8 domain)
drivers/net/ethernet/emulex/benet/be_cmds.c
4872
req->hdr.domain = domain;
drivers/net/ethernet/emulex/benet/be_cmds.c
4942
int link_state, int version, u8 domain)
drivers/net/ethernet/emulex/benet/be_cmds.c
4964
req->hdr.domain = domain;
drivers/net/ethernet/emulex/benet/be_cmds.c
4982
int link_state, u8 domain)
drivers/net/ethernet/emulex/benet/be_cmds.c
4990
2, domain);
drivers/net/ethernet/emulex/benet/be_cmds.c
4997
1, domain);
drivers/net/ethernet/emulex/benet/be_cmds.h
2388
u32 *pmac_id, u32 domain);
drivers/net/ethernet/emulex/benet/be_cmds.h
2390
u32 domain);
drivers/net/ethernet/emulex/benet/be_cmds.h
2392
u32 *if_handle, u32 domain);
drivers/net/ethernet/emulex/benet/be_cmds.h
2393
int be_cmd_if_destroy(struct be_adapter *adapter, int if_handle, u32 domain);
drivers/net/ethernet/emulex/benet/be_cmds.h
2414
u32 num, u32 domain);
drivers/net/ethernet/emulex/benet/be_cmds.h
2453
u16 link_speed, u8 domain);
drivers/net/ethernet/emulex/benet/be_cmds.h
2461
u32 domain);
drivers/net/ethernet/emulex/benet/be_cmds.h
2466
u32 if_handle, u8 domain);
drivers/net/ethernet/emulex/benet/be_cmds.h
2468
u32 if_handle, bool active, u32 domain);
drivers/net/ethernet/emulex/benet/be_cmds.h
2471
u32 domain);
drivers/net/ethernet/emulex/benet/be_cmds.h
2473
int be_cmd_set_hsw_config(struct be_adapter *adapter, u16 pvid, u32 domain,
drivers/net/ethernet/emulex/benet/be_cmds.h
2475
int be_cmd_get_hsw_config(struct be_adapter *adapter, u16 *pvid, u32 domain,
drivers/net/ethernet/emulex/benet/be_cmds.h
2495
u8 profile_type, u8 query, u8 domain);
drivers/net/ethernet/emulex/benet/be_cmds.h
2499
int be_cmd_enable_vf(struct be_adapter *adapter, u8 domain);
drivers/net/ethernet/emulex/benet/be_cmds.h
2502
int link_state, u8 domain);
drivers/net/ethernet/emulex/benet/be_cmds.h
332
u8 domain; /* dword 0 */
drivers/net/ethernet/freescale/dpaa2/dpaa2-eth.c
107
void *dpaa2_iova_to_virt(struct iommu_domain *domain,
drivers/net/ethernet/freescale/dpaa2/dpaa2-eth.c
112
phys_addr = domain ? iommu_iova_to_phys(domain, iova_addr) : iova_addr;
drivers/net/ethernet/freescale/dpaa2/dpaa2-eth.h
836
void *dpaa2_iova_to_virt(struct iommu_domain *domain, dma_addr_t iova_addr);
drivers/net/ethernet/freescale/dpaa2/dpaa2-switch.c
181
static void *dpaa2_iova_to_virt(struct iommu_domain *domain,
drivers/net/ethernet/freescale/dpaa2/dpaa2-switch.c
186
phys_addr = domain ? iommu_iova_to_phys(domain, iova_addr) : iova_addr;
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4925
struct iommu_domain *domain = iommu_get_domain_for_dev(priv->dev);
drivers/net/ethernet/hisilicon/hns3/hns3_enet.c
4932
if (!(domain && iommu_is_dma_domain(domain)))
drivers/net/ethernet/marvell/octeontx2/af/rvu_debugfs.c
2971
u16 index, domain;
drivers/net/ethernet/marvell/octeontx2/af/rvu_debugfs.c
2983
domain = 2;
drivers/net/ethernet/marvell/octeontx2/af/rvu_debugfs.c
2985
pdev = pci_get_domain_bus_and_slot(domain, pf + 1, 0);
drivers/net/ethernet/marvell/octeontx2/af/rvu_debugfs.c
826
int pf, domain, blkid;
drivers/net/ethernet/marvell/octeontx2/af/rvu_debugfs.c
830
domain = 2;
drivers/net/ethernet/marvell/octeontx2/af/rvu_debugfs.c
841
pdev = pci_get_domain_bus_and_slot(domain, pf + 1, 0);
drivers/net/ethernet/marvell/prestera/prestera_hw.c
2430
int prestera_hw_flood_domain_create(struct prestera_flood_domain *domain)
drivers/net/ethernet/marvell/prestera/prestera_hw.c
2436
err = prestera_cmd_ret(domain->sw,
drivers/net/ethernet/marvell/prestera/prestera_hw.c
2442
domain->idx = __le32_to_cpu(resp.flood_domain_idx);
drivers/net/ethernet/marvell/prestera/prestera_hw.c
2447
int prestera_hw_flood_domain_destroy(struct prestera_flood_domain *domain)
drivers/net/ethernet/marvell/prestera/prestera_hw.c
2450
.flood_domain_idx = __cpu_to_le32(domain->idx),
drivers/net/ethernet/marvell/prestera/prestera_hw.c
2453
return prestera_cmd(domain->sw, PRESTERA_CMD_TYPE_FLOOD_DOMAIN_DESTROY,
drivers/net/ethernet/marvell/prestera/prestera_hw.c
2457
int prestera_hw_flood_domain_ports_set(struct prestera_flood_domain *domain)
drivers/net/ethernet/marvell/prestera/prestera_hw.c
2461
struct prestera_switch *sw = domain->sw;
drivers/net/ethernet/marvell/prestera/prestera_hw.c
2469
list_for_each_entry(flood_domain_port, &domain->flood_domain_port_list,
drivers/net/ethernet/marvell/prestera/prestera_hw.c
2481
req->flood_domain_idx = __cpu_to_le32(domain->idx);
drivers/net/ethernet/marvell/prestera/prestera_hw.c
2484
list_for_each_entry(flood_domain_port, &domain->flood_domain_port_list,
drivers/net/ethernet/marvell/prestera/prestera_hw.c
2517
int prestera_hw_flood_domain_ports_reset(struct prestera_flood_domain *domain)
drivers/net/ethernet/marvell/prestera/prestera_hw.c
2520
.flood_domain_idx = __cpu_to_le32(domain->idx),
drivers/net/ethernet/marvell/prestera/prestera_hw.c
2523
return prestera_cmd(domain->sw,
drivers/net/ethernet/marvell/prestera/prestera_hw.h
322
int prestera_hw_flood_domain_create(struct prestera_flood_domain *domain);
drivers/net/ethernet/marvell/prestera/prestera_hw.h
323
int prestera_hw_flood_domain_destroy(struct prestera_flood_domain *domain);
drivers/net/ethernet/marvell/prestera/prestera_hw.h
324
int prestera_hw_flood_domain_ports_set(struct prestera_flood_domain *domain);
drivers/net/ethernet/marvell/prestera/prestera_hw.h
325
int prestera_hw_flood_domain_ports_reset(struct prestera_flood_domain *domain);
drivers/net/ethernet/marvell/prestera/prestera_main.c
1248
struct prestera_flood_domain *domain;
drivers/net/ethernet/marvell/prestera/prestera_main.c
1250
domain = kzalloc_obj(*domain);
drivers/net/ethernet/marvell/prestera/prestera_main.c
1251
if (!domain)
drivers/net/ethernet/marvell/prestera/prestera_main.c
1254
domain->sw = sw;
drivers/net/ethernet/marvell/prestera/prestera_main.c
1256
if (prestera_hw_flood_domain_create(domain)) {
drivers/net/ethernet/marvell/prestera/prestera_main.c
1257
kfree(domain);
drivers/net/ethernet/marvell/prestera/prestera_main.c
1261
INIT_LIST_HEAD(&domain->flood_domain_port_list);
drivers/net/ethernet/marvell/prestera/prestera_main.c
1263
return domain;
drivers/net/ethernet/mellanox/mlx5/core/esw/qos.c
1110
list_add_tail(&sched_node->entry, &esw->qos.domain->nodes);
drivers/net/ethernet/mellanox/mlx5/core/esw/qos.c
114
list_add_tail(&node->entry, &node->esw->qos.domain->nodes);
drivers/net/ethernet/mellanox/mlx5/core/esw/qos.c
1666
if (esw->qos.domain)
drivers/net/ethernet/mellanox/mlx5/core/esw/qos.c
1674
if (esw->qos.domain)
drivers/net/ethernet/mellanox/mlx5/core/esw/qos.c
24
mutex_lock(&esw->qos.domain->lock);
drivers/net/ethernet/mellanox/mlx5/core/esw/qos.c
29
mutex_unlock(&esw->qos.domain->lock);
drivers/net/ethernet/mellanox/mlx5/core/esw/qos.c
327
struct list_head *nodes = parent ? &parent->children : &esw->qos.domain->nodes;
drivers/net/ethernet/mellanox/mlx5/core/esw/qos.c
34
lockdep_assert_held(&esw->qos.domain->lock);
drivers/net/ethernet/mellanox/mlx5/core/esw/qos.c
375
struct list_head *nodes = parent ? &parent->children : &esw->qos.domain->nodes;
drivers/net/ethernet/mellanox/mlx5/core/esw/qos.c
53
esw->qos.domain = esw_qos_domain_alloc();
drivers/net/ethernet/mellanox/mlx5/core/esw/qos.c
55
return esw->qos.domain ? 0 : -ENOMEM;
drivers/net/ethernet/mellanox/mlx5/core/esw/qos.c
60
kfree(esw->qos.domain);
drivers/net/ethernet/mellanox/mlx5/core/esw/qos.c
61
esw->qos.domain = NULL;
drivers/net/ethernet/mellanox/mlx5/core/esw/qos.c
718
list_add_tail(&node->entry, &esw->qos.domain->nodes);
drivers/net/ethernet/mellanox/mlx5/core/eswitch.h
394
struct mlx5_qos_domain *domain;
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_action.c
559
dr_action_get_action_domain(enum mlx5dr_domain_type domain,
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_action.c
562
switch (domain) {
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_rule.c
1084
static bool dr_rule_skip(enum mlx5dr_domain_type domain,
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/dr_rule.c
1092
if (domain != MLX5DR_DOMAIN_TYPE_FDB)
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/fs_dr.c
180
static struct mlx5dr_action *create_vport_action(struct mlx5dr_domain *domain,
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/fs_dr.c
185
return mlx5dr_action_create_dest_vport(domain, dest_attr->vport.num,
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/fs_dr.c
191
static struct mlx5dr_action *create_uplink_action(struct mlx5dr_domain *domain,
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/fs_dr.c
196
return mlx5dr_action_create_dest_vport(domain, MLX5_VPORT_UPLINK, 1,
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/fs_dr.c
200
static struct mlx5dr_action *create_ft_action(struct mlx5dr_domain *domain,
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/fs_dr.c
207
return mlx5dr_action_create_dest_flow_fw_table(domain, dest_ft);
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/fs_dr.c
217
static struct mlx5dr_action *create_range_action(struct mlx5dr_domain *domain,
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/fs_dr.c
220
return mlx5dr_action_create_dest_match_range(domain,
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/fs_dr.c
228
static struct mlx5dr_action *create_action_push_vlan(struct mlx5dr_domain *domain,
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/fs_dr.c
237
return mlx5dr_action_create_push_vlan(domain, htonl(vlan_hdr));
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/fs_dr.c
257
struct mlx5dr_domain *domain = ns->fs_dr_domain.dr_domain;
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/fs_dr.c
319
tmp_action = mlx5dr_action_create_packet_reformat(domain,
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/fs_dr.c
337
mlx5dr_err(domain, "FW-owned reformat can't be used in SW rule\n");
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/fs_dr.c
380
tmp_action = create_action_push_vlan(domain, &fte->act_dests.action.vlan[0]);
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/fs_dr.c
390
tmp_action = create_action_push_vlan(domain, &fte->act_dests.action.vlan[1]);
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/fs_dr.c
441
tmp_action = create_ft_action(domain, dst);
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/fs_dr.c
452
create_vport_action(domain, dst) :
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/fs_dr.c
453
create_uplink_action(domain, dst);
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/fs_dr.c
472
tmp_action = mlx5dr_action_create_dest_table_num(domain,
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/fs_dr.c
483
tmp_action = mlx5dr_action_create_flow_sampler(domain,
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/fs_dr.c
493
tmp_action = create_range_action(domain, dst);
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/fs_dr.c
544
mlx5dr_action_create_aso(domain,
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/fs_dr.c
584
tmp_action = mlx5dr_action_create_mult_dest_tbl(domain,
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/mlx5dr.h
124
mlx5dr_action_create_modify_header(struct mlx5dr_domain *domain,
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/mlx5dr.h
132
mlx5dr_action_create_push_vlan(struct mlx5dr_domain *domain, __be32 vlan_hdr);
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/mlx5dr.h
46
int mlx5dr_domain_destroy(struct mlx5dr_domain *domain);
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/mlx5dr.h
53
mlx5dr_table_create(struct mlx5dr_domain *domain, u32 level, u32 flags,
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/mlx5dr.h
90
mlx5dr_action_create_dest_flow_fw_table(struct mlx5dr_domain *domain,
drivers/net/ethernet/mellanox/mlx5/core/steering/sws/mlx5dr.h
94
mlx5dr_action_create_dest_vport(struct mlx5dr_domain *domain,
drivers/net/ethernet/netronome/nfp/nfp_net_debugdump.c
649
cpp_params.island = sym->domain;
drivers/net/ethernet/netronome/nfp/nfpcore/nfp_nffw.h
60
int domain;
drivers/net/ethernet/netronome/nfp/nfpcore/nfp_rtsym.c
244
sym->domain);
drivers/net/ethernet/netronome/nfp/nfpcore/nfp_rtsym.c
251
sym->domain);
drivers/net/ethernet/netronome/nfp/nfpcore/nfp_rtsym.c
75
sw->domain = nfp_meid(fw->island, fw->menum);
drivers/net/ethernet/netronome/nfp/nfpcore/nfp_rtsym.c
77
sw->domain = fw->island;
drivers/net/ethernet/netronome/nfp/nfpcore/nfp_rtsym.c
79
sw->domain = -1;
drivers/net/ethernet/realtek/r8169_main.c
1018
int domain;
drivers/net/ethernet/realtek/r8169_main.c
1020
domain = pci_domain_nr(pdev->bus);
drivers/net/ethernet/realtek/r8169_main.c
1021
if (domain)
drivers/net/ethernet/realtek/r8169_main.c
1022
snprintf(pdom, sizeof(pdom), "P%d", domain);
drivers/net/ethernet/sfc/falcon/rx.c
721
struct iommu_domain __maybe_unused *domain;
drivers/net/ethernet/sfc/falcon/rx.c
727
domain = iommu_get_domain_for_dev(&efx->pci_dev->dev);
drivers/net/ethernet/sfc/falcon/rx.c
728
if (domain && domain->type != IOMMU_DOMAIN_IDENTITY)
drivers/net/ethernet/sun/niu.c
9820
parent_id.pci.domain = pci_domain_nr(pdev->bus);
drivers/net/ethernet/sun/niu.h
3061
int domain;
drivers/net/ethernet/wangxun/txgbe/txgbe_irq.c
172
sub_irq = irq_find_mapping(txgbe->misc.domain, TXGBE_IRQ_LINK);
drivers/net/ethernet/wangxun/txgbe/txgbe_irq.c
177
sub_irq = irq_find_mapping(txgbe->misc.domain, TXGBE_IRQ_GPIO);
drivers/net/ethernet/wangxun/txgbe/txgbe_irq.c
195
virq = irq_find_mapping(txgbe->misc.domain, hwirq);
drivers/net/ethernet/wangxun/txgbe/txgbe_irq.c
199
irq_domain_remove(txgbe->misc.domain);
drivers/net/ethernet/wangxun/txgbe/txgbe_irq.c
220
txgbe->misc.domain = irq_domain_create_simple(NULL, txgbe->misc.nirqs, 0,
drivers/net/ethernet/wangxun/txgbe/txgbe_irq.c
222
if (!txgbe->misc.domain)
drivers/net/ethernet/wangxun/txgbe/txgbe_irq.c
226
irq_create_mapping(txgbe->misc.domain, hwirq);
drivers/net/ethernet/wangxun/txgbe/txgbe_irq.c
86
txgbe->link_irq = irq_find_mapping(txgbe->misc.domain, TXGBE_IRQ_LINK);
drivers/net/ethernet/wangxun/txgbe/txgbe_irq.c
94
txgbe->gpio_irq = irq_find_mapping(txgbe->misc.domain, TXGBE_IRQ_GPIO);
drivers/net/ethernet/wangxun/txgbe/txgbe_type.h
451
struct irq_domain *domain;
drivers/net/ipa/ipa_mem.c
457
struct iommu_domain *domain;
drivers/net/ipa/ipa_mem.c
465
domain = iommu_get_domain_for_dev(dev);
drivers/net/ipa/ipa_mem.c
466
if (!domain) {
drivers/net/ipa/ipa_mem.c
476
ret = iommu_map(domain, iova, phys, size, IOMMU_READ | IOMMU_WRITE,
drivers/net/ipa/ipa_mem.c
490
struct iommu_domain *domain;
drivers/net/ipa/ipa_mem.c
495
domain = iommu_get_domain_for_dev(dev);
drivers/net/ipa/ipa_mem.c
496
if (domain) {
drivers/net/ipa/ipa_mem.c
499
size = iommu_unmap(domain, ipa->imem_iova, ipa->imem_size);
drivers/net/ipa/ipa_mem.c
530
struct iommu_domain *domain;
drivers/net/ipa/ipa_mem.c
571
domain = iommu_get_domain_for_dev(dev);
drivers/net/ipa/ipa_mem.c
572
if (!domain) {
drivers/net/ipa/ipa_mem.c
583
ret = iommu_map(domain, iova, phys, size, IOMMU_READ | IOMMU_WRITE,
drivers/net/ipa/ipa_mem.c
597
struct iommu_domain *domain;
drivers/net/ipa/ipa_mem.c
599
domain = iommu_get_domain_for_dev(dev);
drivers/net/ipa/ipa_mem.c
600
if (domain) {
drivers/net/ipa/ipa_mem.c
603
size = iommu_unmap(domain, ipa->smem_iova, ipa->smem_size);
drivers/net/phy/mscc/mscc_ptp.c
417
sig[2] = ptphdr->domain;
drivers/net/phy/mscc/mscc_ptp.h
444
u8 domain;
drivers/net/wireless/atmel/at76c50x-usb.c
2337
priv->domain = at76_get_reg_domain(priv->regulatory_domain);
drivers/net/wireless/atmel/at76c50x-usb.c
2389
priv->regulatory_domain, priv->domain->name);
drivers/net/wireless/atmel/at76c50x-usb.h
424
struct reg_domain const *domain; /* reg domain description */
drivers/net/wireless/intel/iwlwifi/fw/acpi.c
298
union acpi_object *domain;
drivers/net/wireless/intel/iwlwifi/fw/acpi.c
308
domain = &wifi_pkg->package.elements[0];
drivers/net/wireless/intel/iwlwifi/fw/acpi.c
309
if (domain->type == ACPI_TYPE_INTEGER &&
drivers/net/wireless/intel/iwlwifi/fw/acpi.c
310
domain->integer.value == ACPI_WIFI_DOMAIN)
drivers/net/wireless/intel/iwlwifi/fw/api/dbg-tlv.h
41
__le32 domain;
drivers/net/wireless/intel/iwlwifi/iwl-dbg-tlv.c
331
u32 domain;
drivers/net/wireless/intel/iwlwifi/iwl-dbg-tlv.c
339
domain = le32_to_cpu(hdr->domain);
drivers/net/wireless/intel/iwlwifi/iwl-dbg-tlv.c
341
if (domain != IWL_FW_INI_DOMAIN_ALWAYS_ON &&
drivers/net/wireless/intel/iwlwifi/iwl-dbg-tlv.c
342
!(domain & trans->dbg.domains_bitmap)) {
drivers/net/wireless/intel/iwlwifi/iwl-dbg-tlv.c
345
domain, trans->dbg.domains_bitmap);
drivers/net/wireless/marvell/libertas/cmd.c
636
struct mrvl_ie_domain_param_set *domain = &cmd.domain;
drivers/net/wireless/marvell/libertas/cmd.c
656
domain->header.type = cpu_to_le16(TLV_TYPE_DOMAIN);
drivers/net/wireless/marvell/libertas/cmd.c
659
domain->country_code[0] = priv->country_code[0];
drivers/net/wireless/marvell/libertas/cmd.c
660
domain->country_code[1] = priv->country_code[1];
drivers/net/wireless/marvell/libertas/cmd.c
661
domain->country_code[2] = ' ';
drivers/net/wireless/marvell/libertas/cmd.c
702
t = &domain->triplet[num_triplet];
drivers/net/wireless/marvell/libertas/cmd.c
715
t = &domain->triplet[num_triplet];
drivers/net/wireless/marvell/libertas/cmd.c
727
domain->header.len = cpu_to_le16(sizeof(domain->country_code) +
drivers/net/wireless/marvell/libertas/cmd.c
731
(u8 *) &cmd.domain.country_code,
drivers/net/wireless/marvell/libertas/cmd.c
732
le16_to_cpu(domain->header.len));
drivers/net/wireless/marvell/libertas/cmd.c
736
sizeof(cmd.domain.header) +
drivers/net/wireless/marvell/libertas/cmd.c
737
sizeof(cmd.domain.country_code) +
drivers/net/wireless/marvell/libertas/host.h
401
struct mrvl_ie_domain_param_set domain;
drivers/net/wireless/marvell/mwifiex/fw.h
1691
struct mwifiex_ietypes_domain_param_set domain;
drivers/net/wireless/marvell/mwifiex/fw.h
1696
struct mwifiex_ietypes_domain_param_set domain;
drivers/net/wireless/marvell/mwifiex/sta_cmd.c
1008
struct mwifiex_ietypes_domain_param_set *domain =
drivers/net/wireless/marvell/mwifiex/sta_cmd.c
1009
&domain_info->domain;
drivers/net/wireless/marvell/mwifiex/sta_cmd.c
1023
domain->header.type = cpu_to_le16(WLAN_EID_COUNTRY);
drivers/net/wireless/marvell/mwifiex/sta_cmd.c
1024
memcpy(domain->country_code, adapter->domain_reg.country_code,
drivers/net/wireless/marvell/mwifiex/sta_cmd.c
1025
sizeof(domain->country_code));
drivers/net/wireless/marvell/mwifiex/sta_cmd.c
1027
domain->header.len =
drivers/net/wireless/marvell/mwifiex/sta_cmd.c
1030
+ sizeof(domain->country_code));
drivers/net/wireless/marvell/mwifiex/sta_cmd.c
1033
memcpy(domain->triplet, adapter->domain_reg.triplet,
drivers/net/wireless/marvell/mwifiex/sta_cmd.c
1038
le16_to_cpu(domain->header.len) +
drivers/net/wireless/marvell/mwifiex/sta_cmdresp.c
654
struct mwifiex_ietypes_domain_param_set *domain = &domain_info->domain;
drivers/net/wireless/marvell/mwifiex/sta_cmdresp.c
658
no_of_triplet = (u8) ((le16_to_cpu(domain->header.len)
drivers/net/wireless/realtek/rtw88/coex.c
3677
switch (reg->domain) {
drivers/net/wireless/realtek/rtw88/coex.c
3720
switch (reg->domain) {
drivers/net/wireless/realtek/rtw88/coex.c
3761
if (reg->domain == RTW_REG_DOMAIN_NL) {
drivers/net/wireless/realtek/rtw88/main.h
565
u8 domain;
drivers/net/wireless/realtek/rtw89/acpi.h
119
__le32 domain;
drivers/net/wireless/realtek/rtw89/phy.c
2160
u32 domain;
drivers/net/wireless/realtek/rtw89/phy.c
2182
domain = get_unaligned_le32(&res.domain);
drivers/net/wireless/realtek/rtw89/phy.c
2185
if (!(domain & BIT(i)))
drivers/nvme/host/core.c
4402
ASYNC_DOMAIN(domain);
drivers/nvme/host/core.c
4433
&domain);
drivers/nvme/host/core.c
4437
async_synchronize_full_domain(&domain);
drivers/nvme/host/core.c
4442
async_synchronize_full_domain(&domain);
drivers/nvme/host/rdma.c
1350
struct nvme_command *cmd, struct ib_sig_domain *domain,
drivers/nvme/host/rdma.c
1353
domain->sig_type = IB_SIG_TYPE_T10_DIF;
drivers/nvme/host/rdma.c
1354
domain->sig.dif.bg_type = IB_T10DIF_CRC;
drivers/nvme/host/rdma.c
1355
domain->sig.dif.pi_interval = 1 << bi->interval_exp;
drivers/nvme/host/rdma.c
1356
domain->sig.dif.ref_tag = le32_to_cpu(cmd->rw.reftag);
drivers/nvme/host/rdma.c
1358
domain->sig.dif.ref_remap = true;
drivers/nvme/host/rdma.c
1360
domain->sig.dif.app_tag = le16_to_cpu(cmd->rw.lbat);
drivers/nvme/host/rdma.c
1361
domain->sig.dif.apptag_check_mask = le16_to_cpu(cmd->rw.lbatm);
drivers/nvme/host/rdma.c
1362
domain->sig.dif.app_escape = true;
drivers/nvme/host/rdma.c
1364
domain->sig.dif.ref_escape = true;
drivers/nvme/target/rdma.c
570
struct nvme_command *cmd, struct ib_sig_domain *domain,
drivers/nvme/target/rdma.c
573
domain->sig_type = IB_SIG_TYPE_T10_DIF;
drivers/nvme/target/rdma.c
574
domain->sig.dif.bg_type = IB_T10DIF_CRC;
drivers/nvme/target/rdma.c
575
domain->sig.dif.pi_interval = 1 << bi->interval_exp;
drivers/nvme/target/rdma.c
576
domain->sig.dif.ref_tag = le32_to_cpu(cmd->rw.reftag);
drivers/nvme/target/rdma.c
578
domain->sig.dif.ref_remap = true;
drivers/nvme/target/rdma.c
580
domain->sig.dif.app_tag = le16_to_cpu(cmd->rw.lbat);
drivers/nvme/target/rdma.c
581
domain->sig.dif.apptag_check_mask = le16_to_cpu(cmd->rw.lbatm);
drivers/nvme/target/rdma.c
582
domain->sig.dif.app_escape = true;
drivers/nvme/target/rdma.c
584
domain->sig.dif.ref_escape = true;
drivers/of/irq.c
532
struct irq_domain *domain;
drivers/of/irq.c
538
domain = irq_find_host(oirq.np);
drivers/of/irq.c
539
if (!domain) {
drivers/pci/controller/dwc/pci-dra7xx.c
191
static int dra7xx_pcie_intx_map(struct irq_domain *domain, unsigned int irq,
drivers/pci/controller/dwc/pci-dra7xx.c
195
irq_set_chip_data(irq, domain->host_data);
drivers/pci/controller/dwc/pci-imx6.c
1652
int i, ret, domain;
drivers/pci/controller/dwc/pci-imx6.c
1731
domain = of_get_pci_domain_nr(node);
drivers/pci/controller/dwc/pci-imx6.c
1732
if (domain < 0 || domain > 1)
drivers/pci/controller/dwc/pci-imx6.c
1735
imx_pcie->controller_id = domain;
drivers/pci/controller/dwc/pcie-amd-mdb.c
129
static int amd_mdb_pcie_intx_map(struct irq_domain *domain,
drivers/pci/controller/dwc/pcie-amd-mdb.c
134
irq_set_chip_data(irq, domain->host_data);
drivers/pci/controller/dwc/pcie-amd-mdb.c
210
static int amd_mdb_pcie_event_map(struct irq_domain *domain,
drivers/pci/controller/dwc/pcie-amd-mdb.c
215
irq_set_chip_data(irq, domain->host_data);
drivers/pci/controller/dwc/pcie-designware-host.c
172
static int dw_pcie_irq_domain_alloc(struct irq_domain *domain, unsigned int virq,
drivers/pci/controller/dwc/pcie-designware-host.c
175
struct dw_pcie_rp *pp = domain->host_data;
drivers/pci/controller/dwc/pcie-designware-host.c
187
irq_domain_set_info(domain, virq + i, bit + i, pp->msi_irq_chip,
drivers/pci/controller/dwc/pcie-designware-host.c
193
static void dw_pcie_irq_domain_free(struct irq_domain *domain, unsigned int virq,
drivers/pci/controller/dwc/pcie-designware-host.c
196
struct irq_data *d = irq_domain_get_irq_data(domain, virq);
drivers/pci/controller/dwc/pcie-designware-host.c
197
struct dw_pcie_rp *pp = domain->host_data;
drivers/pci/controller/dwc/pcie-designware-host.c
33
static bool dw_pcie_init_dev_msi_info(struct device *dev, struct irq_domain *domain,
drivers/pci/controller/dwc/pcie-designware-host.c
36
if (!msi_lib_init_dev_msi_info(dev, domain, real_parent, info))
drivers/pci/controller/dwc/pcie-dw-rockchip.c
156
static int rockchip_pcie_intx_map(struct irq_domain *domain, unsigned int irq,
drivers/pci/controller/dwc/pcie-dw-rockchip.c
160
irq_set_chip_data(irq, domain->host_data);
drivers/pci/controller/dwc/pcie-sophgo.c
103
static int sophgo_pcie_intx_map(struct irq_domain *domain, unsigned int irq,
drivers/pci/controller/dwc/pcie-sophgo.c
107
irq_set_chip_data(irq, domain->host_data);
drivers/pci/controller/dwc/pcie-uniphier.c
212
static int uniphier_pcie_intx_map(struct irq_domain *domain, unsigned int irq,
drivers/pci/controller/dwc/pcie-uniphier.c
217
irq_set_chip_data(irq, domain->host_data);
drivers/pci/controller/mobiveil/pcie-mobiveil-host.c
343
static int mobiveil_pcie_intx_map(struct irq_domain *domain, unsigned int irq,
drivers/pci/controller/mobiveil/pcie-mobiveil-host.c
347
irq_set_chip_data(irq, domain->host_data);
drivers/pci/controller/mobiveil/pcie-mobiveil-host.c
390
static int mobiveil_irq_msi_domain_alloc(struct irq_domain *domain,
drivers/pci/controller/mobiveil/pcie-mobiveil-host.c
394
struct mobiveil_pcie *pcie = domain->host_data;
drivers/pci/controller/mobiveil/pcie-mobiveil-host.c
411
irq_domain_set_info(domain, virq, bit, &mobiveil_msi_bottom_irq_chip,
drivers/pci/controller/mobiveil/pcie-mobiveil-host.c
412
domain->host_data, handle_level_irq, NULL, NULL);
drivers/pci/controller/mobiveil/pcie-mobiveil-host.c
416
static void mobiveil_irq_msi_domain_free(struct irq_domain *domain,
drivers/pci/controller/mobiveil/pcie-mobiveil-host.c
420
struct irq_data *d = irq_domain_get_irq_data(domain, virq);
drivers/pci/controller/pci-aardvark.c
1309
struct advk_pcie *pcie = d->domain->host_data;
drivers/pci/controller/pci-aardvark.c
1323
struct advk_pcie *pcie = d->domain->host_data;
drivers/pci/controller/pci-aardvark.c
1342
static int advk_msi_irq_domain_alloc(struct irq_domain *domain,
drivers/pci/controller/pci-aardvark.c
1346
struct advk_pcie *pcie = domain->host_data;
drivers/pci/controller/pci-aardvark.c
1357
irq_domain_set_info(domain, virq + i, hwirq + i,
drivers/pci/controller/pci-aardvark.c
1359
domain->host_data, handle_simple_irq,
drivers/pci/controller/pci-aardvark.c
1365
static void advk_msi_irq_domain_free(struct irq_domain *domain,
drivers/pci/controller/pci-aardvark.c
1368
struct irq_data *d = irq_domain_get_irq_data(domain, virq);
drivers/pci/controller/pci-aardvark.c
1369
struct advk_pcie *pcie = domain->host_data;
drivers/pci/controller/pci-aardvark.c
1383
struct advk_pcie *pcie = d->domain->host_data;
drivers/pci/controller/pci-aardvark.c
1397
struct advk_pcie *pcie = d->domain->host_data;
drivers/pci/controller/pci-ftpci100.c
316
static int faraday_pci_irq_map(struct irq_domain *domain, unsigned int irq,
drivers/pci/controller/pci-ftpci100.c
320
irq_set_chip_data(irq, domain->host_data);
drivers/pci/controller/pci-hyperv.c
1689
static void hv_msi_free(struct irq_domain *domain, unsigned int irq)
drivers/pci/controller/pci-hyperv.c
1695
struct irq_data *irq_data = irq_domain_get_irq_data(domain, irq);
drivers/pci/controller/pci-hyperv.c
1699
hbus = domain->host_data;
drivers/pci/controller/pci-hyperv.c
2111
static bool hv_pcie_init_dev_msi_info(struct device *dev, struct irq_domain *domain,
drivers/pci/controller/pci-hyperv.c
2116
if (!msi_lib_init_dev_msi_info(dev, domain, real_parent, info))
drivers/pci/controller/pci-hyperv.c
3766
hbus->sysdata.domain = dom;
drivers/pci/controller/pci-hyperv.c
769
static void hv_pci_vec_irq_free(struct irq_domain *domain,
drivers/pci/controller/pci-hyperv.c
774
struct hv_pci_chip_data *chip_data = domain->host_data;
drivers/pci/controller/pci-hyperv.c
775
struct irq_data *d = irq_domain_get_irq_data(domain, virq);
drivers/pci/controller/pci-hyperv.c
786
d = irq_domain_get_irq_data(domain, virq + i);
drivers/pci/controller/pci-hyperv.c
790
irq_domain_free_irqs_parent(domain, virq, nr_dom_irqs);
drivers/pci/controller/pci-hyperv.c
793
static void hv_pci_vec_irq_domain_free(struct irq_domain *domain,
drivers/pci/controller/pci-hyperv.c
797
hv_pci_vec_irq_free(domain, virq, nr_irqs, nr_irqs);
drivers/pci/controller/pci-hyperv.c
800
static int hv_pci_vec_alloc_device_irq(struct irq_domain *domain,
drivers/pci/controller/pci-hyperv.c
804
struct hv_pci_chip_data *chip_data = domain->host_data;
drivers/pci/controller/pci-hyperv.c
821
static int hv_pci_vec_irq_gic_domain_alloc(struct irq_domain *domain,
drivers/pci/controller/pci-hyperv.c
829
fwspec.fwnode = domain->parent->fwnode;
drivers/pci/controller/pci-hyperv.c
842
ret = irq_domain_alloc_irqs_parent(domain, virq, 1, &fwspec);
drivers/pci/controller/pci-hyperv.c
851
d = irq_domain_get_irq_data(domain->parent, virq);
drivers/pci/controller/pci-hyperv.c
856
static int hv_pci_vec_irq_domain_alloc(struct irq_domain *domain,
drivers/pci/controller/pci-hyperv.c
864
ret = hv_pci_vec_alloc_device_irq(domain, nr_irqs, &hwirq);
drivers/pci/controller/pci-hyperv.c
869
ret = hv_pci_vec_irq_gic_domain_alloc(domain, virq + i,
drivers/pci/controller/pci-hyperv.c
872
hv_pci_vec_irq_free(domain, virq, nr_irqs, i);
drivers/pci/controller/pci-hyperv.c
876
irq_domain_set_hwirq_and_chip(domain, virq + i,
drivers/pci/controller/pci-hyperv.c
879
domain->host_data);
drivers/pci/controller/pci-hyperv.c
892
static int hv_pci_vec_irq_domain_activate(struct irq_domain *domain,
drivers/pci/controller/pci-hyperv.c
913
struct irq_domain *domain;
drivers/pci/controller/pci-hyperv.c
918
domain = irq_find_host(parent);
drivers/pci/controller/pci-hyperv.c
921
return domain;
drivers/pci/controller/pci-mvebu.c
1019
struct mvebu_pcie_port *port = d->domain->host_data;
drivers/pci/controller/pci-mvebu.c
1033
struct mvebu_pcie_port *port = d->domain->host_data;
drivers/pci/controller/pci-tegra.c
1552
ret = generic_handle_domain_irq(msi->domain, index);
drivers/pci/controller/pci-tegra.c
1625
static int tegra_msi_domain_alloc(struct irq_domain *domain, unsigned int virq,
drivers/pci/controller/pci-tegra.c
1628
struct tegra_msi *msi = domain->host_data;
drivers/pci/controller/pci-tegra.c
1642
irq_domain_set_info(domain, virq + i, hwirq + i,
drivers/pci/controller/pci-tegra.c
1643
&tegra_msi_bottom_chip, domain->host_data,
drivers/pci/controller/pci-tegra.c
1651
static void tegra_msi_domain_free(struct irq_domain *domain, unsigned int virq,
drivers/pci/controller/pci-tegra.c
1654
struct irq_data *d = irq_domain_get_irq_data(domain, virq);
drivers/pci/controller/pci-tegra.c
1655
struct tegra_msi *msi = domain->host_data;
drivers/pci/controller/pci-tegra.c
1692
msi->domain = msi_create_parent_irq_domain(&info, &tegra_msi_parent_ops);
drivers/pci/controller/pci-tegra.c
1693
if (!msi->domain) {
drivers/pci/controller/pci-tegra.c
1702
irq_domain_remove(msi->domain);
drivers/pci/controller/pci-tegra.c
1791
irq = irq_find_mapping(msi->domain, i);
drivers/pci/controller/pci-tegra.c
272
struct irq_domain *domain;
drivers/pci/controller/pci-xgene-msi.c
166
static int xgene_irq_domain_alloc(struct irq_domain *domain, unsigned int virq,
drivers/pci/controller/pci-xgene-msi.c
169
struct xgene_msi *msi = domain->host_data;
drivers/pci/controller/pci-xgene-msi.c
183
irq_domain_set_info(domain, virq, hwirq,
drivers/pci/controller/pci-xgene-msi.c
184
&xgene_msi_bottom_irq_chip, domain->host_data,
drivers/pci/controller/pci-xgene-msi.c
191
static void xgene_irq_domain_free(struct irq_domain *domain,
drivers/pci/controller/pci-xgene-msi.c
194
struct irq_data *d = irq_domain_get_irq_data(domain, virq);
drivers/pci/controller/pci-xgene-msi.c
203
irq_domain_free_irqs_parent(domain, virq, nr_irqs);
drivers/pci/controller/pcie-altera-msi.c
109
static int altera_irq_domain_alloc(struct irq_domain *domain, unsigned int virq,
drivers/pci/controller/pcie-altera-msi.c
112
struct altera_msi *msi = domain->host_data;
drivers/pci/controller/pcie-altera-msi.c
129
irq_domain_set_info(domain, virq, bit, &altera_msi_bottom_irq_chip,
drivers/pci/controller/pcie-altera-msi.c
130
domain->host_data, handle_simple_irq,
drivers/pci/controller/pcie-altera-msi.c
140
static void altera_irq_domain_free(struct irq_domain *domain,
drivers/pci/controller/pcie-altera-msi.c
143
struct irq_data *d = irq_domain_get_irq_data(domain, virq);
drivers/pci/controller/pcie-altera.c
786
static int altera_pcie_intx_map(struct irq_domain *domain, unsigned int irq,
drivers/pci/controller/pcie-altera.c
790
irq_set_chip_data(irq, domain->host_data);
drivers/pci/controller/pcie-apple.c
201
struct irq_domain *domain;
drivers/pci/controller/pcie-apple.c
235
static int apple_msi_domain_alloc(struct irq_domain *domain, unsigned int virq,
drivers/pci/controller/pcie-apple.c
238
struct apple_pcie *pcie = domain->host_data;
drivers/pci/controller/pcie-apple.c
255
ret = irq_domain_alloc_irqs_parent(domain, virq, nr_irqs, &fwspec);
drivers/pci/controller/pcie-apple.c
260
irq_domain_set_hwirq_and_chip(domain, virq + i, hwirq + i,
drivers/pci/controller/pcie-apple.c
267
static void apple_msi_domain_free(struct irq_domain *domain, unsigned int virq,
drivers/pci/controller/pcie-apple.c
270
struct irq_data *d = irq_domain_get_irq_data(domain, virq);
drivers/pci/controller/pcie-apple.c
271
struct apple_pcie *pcie = domain->host_data;
drivers/pci/controller/pcie-apple.c
336
static int apple_port_irq_domain_alloc(struct irq_domain *domain,
drivers/pci/controller/pcie-apple.c
340
struct apple_pcie_port *port = domain->host_data;
drivers/pci/controller/pcie-apple.c
353
irq_domain_set_info(domain, virq + i, fwspec->param[0] + i,
drivers/pci/controller/pcie-apple.c
363
static void apple_port_irq_domain_free(struct irq_domain *domain,
drivers/pci/controller/pcie-apple.c
369
struct irq_data *d = irq_domain_get_irq_data(domain, virq + i);
drivers/pci/controller/pcie-apple.c
394
generic_handle_domain_irq(port->domain, i);
drivers/pci/controller/pcie-apple.c
412
port->domain = irq_domain_create_linear(fwnode, 32,
drivers/pci/controller/pcie-apple.c
415
if (!port->domain)
drivers/pci/controller/pcie-apple.c
450
unsigned int hwirq = irq_domain_get_irq_data(port->domain, irq)->hwirq;
drivers/pci/controller/pcie-apple.c
491
irq = irq_domain_alloc_irqs(port->domain, 1, NUMA_NO_NODE,
drivers/pci/controller/pcie-aspeed.c
270
static int aspeed_pcie_intx_map(struct irq_domain *domain, unsigned int irq,
drivers/pci/controller/pcie-aspeed.c
274
irq_set_chip_data(irq, domain->host_data);
drivers/pci/controller/pcie-aspeed.c
622
static int aspeed_irq_msi_domain_alloc(struct irq_domain *domain,
drivers/pci/controller/pcie-aspeed.c
626
struct aspeed_pcie *pcie = domain->host_data;
drivers/pci/controller/pcie-aspeed.c
639
irq_domain_set_info(domain, virq + i, bit + i,
drivers/pci/controller/pcie-aspeed.c
641
domain->host_data, handle_simple_irq, NULL,
drivers/pci/controller/pcie-aspeed.c
648
static void aspeed_irq_msi_domain_free(struct irq_domain *domain,
drivers/pci/controller/pcie-aspeed.c
651
struct irq_data *data = irq_domain_get_irq_data(domain, virq);
drivers/pci/controller/pcie-brcmstb.c
616
static int brcm_irq_domain_alloc(struct irq_domain *domain, unsigned int virq,
drivers/pci/controller/pcie-brcmstb.c
619
struct brcm_msi *msi = domain->host_data;
drivers/pci/controller/pcie-brcmstb.c
628
irq_domain_set_info(domain, virq + i, (irq_hw_number_t)hwirq + i,
drivers/pci/controller/pcie-brcmstb.c
629
&brcm_msi_bottom_irq_chip, domain->host_data,
drivers/pci/controller/pcie-brcmstb.c
634
static void brcm_irq_domain_free(struct irq_domain *domain,
drivers/pci/controller/pcie-brcmstb.c
637
struct irq_data *d = irq_domain_get_irq_data(domain, virq);
drivers/pci/controller/pcie-hisi-error.c
151
u32 domain, busnr, devfn;
drivers/pci/controller/pcie-hisi-error.c
160
domain = pci_root->segment;
drivers/pci/controller/pcie-hisi-error.c
164
pdev = pci_get_domain_bus_and_slot(domain, busnr, devfn);
drivers/pci/controller/pcie-hisi-error.c
167
domain, busnr, PCI_SLOT(devfn), PCI_FUNC(devfn));
drivers/pci/controller/pcie-iproc-msi.c
247
static int iproc_msi_irq_domain_alloc(struct irq_domain *domain,
drivers/pci/controller/pcie-iproc-msi.c
251
struct iproc_msi *msi = domain->host_data;
drivers/pci/controller/pcie-iproc-msi.c
272
irq_domain_set_info(domain, virq + i, hwirq + i,
drivers/pci/controller/pcie-iproc-msi.c
274
domain->host_data, handle_simple_irq,
drivers/pci/controller/pcie-iproc-msi.c
281
static void iproc_msi_irq_domain_free(struct irq_domain *domain,
drivers/pci/controller/pcie-iproc-msi.c
284
struct irq_data *data = irq_domain_get_irq_data(domain, virq);
drivers/pci/controller/pcie-iproc-msi.c
296
irq_domain_free_irqs_parent(domain, virq, nr_irqs);
drivers/pci/controller/pcie-mediatek-gen3.c
562
struct mtk_gen3_pcie *pcie = data->domain->host_data;
drivers/pci/controller/pcie-mediatek-gen3.c
587
struct mtk_gen3_pcie *pcie = data->domain->host_data;
drivers/pci/controller/pcie-mediatek-gen3.c
603
struct mtk_gen3_pcie *pcie = data->domain->host_data;
drivers/pci/controller/pcie-mediatek-gen3.c
624
static int mtk_msi_bottom_domain_alloc(struct irq_domain *domain,
drivers/pci/controller/pcie-mediatek-gen3.c
628
struct mtk_gen3_pcie *pcie = domain->host_data;
drivers/pci/controller/pcie-mediatek-gen3.c
646
irq_domain_set_info(domain, virq + i, hwirq + i,
drivers/pci/controller/pcie-mediatek-gen3.c
653
static void mtk_msi_bottom_domain_free(struct irq_domain *domain,
drivers/pci/controller/pcie-mediatek-gen3.c
656
struct mtk_gen3_pcie *pcie = domain->host_data;
drivers/pci/controller/pcie-mediatek-gen3.c
657
struct irq_data *data = irq_domain_get_irq_data(domain, virq);
drivers/pci/controller/pcie-mediatek-gen3.c
666
irq_domain_free_irqs_common(domain, virq, nr_irqs);
drivers/pci/controller/pcie-mediatek-gen3.c
724
static int mtk_pcie_intx_map(struct irq_domain *domain, unsigned int irq,
drivers/pci/controller/pcie-mediatek-gen3.c
727
irq_set_chip_data(irq, domain->host_data);
drivers/pci/controller/pcie-mediatek.c
432
static int mtk_pcie_irq_domain_alloc(struct irq_domain *domain, unsigned int virq,
drivers/pci/controller/pcie-mediatek.c
435
struct mtk_pcie_port *port = domain->host_data;
drivers/pci/controller/pcie-mediatek.c
451
irq_domain_set_info(domain, virq, bit, &mtk_msi_bottom_irq_chip,
drivers/pci/controller/pcie-mediatek.c
452
domain->host_data, handle_edge_irq,
drivers/pci/controller/pcie-mediatek.c
458
static void mtk_pcie_irq_domain_free(struct irq_domain *domain,
drivers/pci/controller/pcie-mediatek.c
461
struct irq_data *d = irq_domain_get_irq_data(domain, virq);
drivers/pci/controller/pcie-mediatek.c
474
irq_domain_free_irqs_parent(domain, virq, nr_irqs);
drivers/pci/controller/pcie-mediatek.c
551
static int mtk_pcie_intx_map(struct irq_domain *domain, unsigned int irq,
drivers/pci/controller/pcie-mediatek.c
555
irq_set_chip_data(irq, domain->host_data);
drivers/pci/controller/pcie-rcar-host.c
40
struct irq_domain *domain;
drivers/pci/controller/pcie-rcar-host.c
579
ret = generic_handle_domain_irq(msi->domain, index);
drivers/pci/controller/pcie-rcar-host.c
646
static int rcar_msi_domain_alloc(struct irq_domain *domain, unsigned int virq,
drivers/pci/controller/pcie-rcar-host.c
649
struct rcar_msi *msi = domain->host_data;
drivers/pci/controller/pcie-rcar-host.c
663
irq_domain_set_info(domain, virq + i, hwirq + i,
drivers/pci/controller/pcie-rcar-host.c
664
&rcar_msi_bottom_chip, domain->host_data,
drivers/pci/controller/pcie-rcar-host.c
670
static void rcar_msi_domain_free(struct irq_domain *domain, unsigned int virq,
drivers/pci/controller/pcie-rcar-host.c
673
struct irq_data *d = irq_domain_get_irq_data(domain, virq);
drivers/pci/controller/pcie-rcar-host.c
674
struct rcar_msi *msi = domain->host_data;
drivers/pci/controller/pcie-rcar-host.c
715
msi->domain = msi_create_parent_irq_domain(&info, &rcar_msi_parent_ops);
drivers/pci/controller/pcie-rcar-host.c
716
if (!msi->domain) {
drivers/pci/controller/pcie-rcar-host.c
726
irq_domain_remove(msi->domain);
drivers/pci/controller/pcie-rockchip-host.c
669
static int rockchip_pcie_intx_map(struct irq_domain *domain, unsigned int irq,
drivers/pci/controller/pcie-rockchip-host.c
673
irq_set_chip_data(irq, domain->host_data);
drivers/pci/controller/pcie-rzg3s-host.c
188
struct irq_domain *domain;
drivers/pci/controller/pcie-rzg3s-host.c
492
ret = generic_handle_domain_irq(msi->domain, bit);
drivers/pci/controller/pcie-rzg3s-host.c
570
static int rzg3s_pcie_msi_domain_alloc(struct irq_domain *domain,
drivers/pci/controller/pcie-rzg3s-host.c
574
struct rzg3s_pcie_msi *msi = domain->host_data;
drivers/pci/controller/pcie-rzg3s-host.c
586
irq_domain_set_info(domain, virq + i, hwirq + i,
drivers/pci/controller/pcie-rzg3s-host.c
588
domain->host_data, handle_edge_irq, NULL,
drivers/pci/controller/pcie-rzg3s-host.c
595
static void rzg3s_pcie_msi_domain_free(struct irq_domain *domain,
drivers/pci/controller/pcie-rzg3s-host.c
598
struct irq_data *d = irq_domain_get_irq_data(domain, virq);
drivers/pci/controller/pcie-rzg3s-host.c
599
struct rzg3s_pcie_msi *msi = domain->host_data;
drivers/pci/controller/pcie-rzg3s-host.c
639
msi->domain = msi_create_parent_irq_domain(&info,
drivers/pci/controller/pcie-rzg3s-host.c
641
if (!msi->domain)
drivers/pci/controller/pcie-rzg3s-host.c
784
irq_domain_remove(msi->domain);
drivers/pci/controller/pcie-rzg3s-host.c
836
irq_domain_remove(msi->domain);
drivers/pci/controller/pcie-rzg3s-host.c
877
static int rzg3s_pcie_intx_map(struct irq_domain *domain, unsigned int irq,
drivers/pci/controller/pcie-rzg3s-host.c
882
irq_set_chip_data(irq, domain->host_data);
drivers/pci/controller/pcie-xilinx-cpm.c
201
static int xilinx_cpm_pcie_intx_map(struct irq_domain *domain,
drivers/pci/controller/pcie-xilinx-cpm.c
206
irq_set_chip_data(irq, domain->host_data);
drivers/pci/controller/pcie-xilinx-cpm.c
265
static int xilinx_cpm_pcie_event_map(struct irq_domain *domain,
drivers/pci/controller/pcie-xilinx-cpm.c
270
irq_set_chip_data(irq, domain->host_data);
drivers/pci/controller/pcie-xilinx-dma-pl.c
255
static int xilinx_pl_dma_pcie_intx_map(struct irq_domain *domain,
drivers/pci/controller/pcie-xilinx-dma-pl.c
259
irq_set_chip_data(irq, domain->host_data);
drivers/pci/controller/pcie-xilinx-dma-pl.c
405
static int xilinx_irq_domain_alloc(struct irq_domain *domain, unsigned int virq,
drivers/pci/controller/pcie-xilinx-dma-pl.c
408
struct pl_dma_pcie *pcie = domain->host_data;
drivers/pci/controller/pcie-xilinx-dma-pl.c
421
irq_domain_set_info(domain, virq + i, bit + i, &xilinx_irq_chip,
drivers/pci/controller/pcie-xilinx-dma-pl.c
422
domain->host_data, handle_simple_irq,
drivers/pci/controller/pcie-xilinx-dma-pl.c
430
static void xilinx_irq_domain_free(struct irq_domain *domain, unsigned int virq,
drivers/pci/controller/pcie-xilinx-dma-pl.c
433
struct irq_data *data = irq_domain_get_irq_data(domain, virq);
drivers/pci/controller/pcie-xilinx-dma-pl.c
546
static int xilinx_pl_dma_pcie_event_map(struct irq_domain *domain,
drivers/pci/controller/pcie-xilinx-dma-pl.c
551
irq_set_chip_data(irq, domain->host_data);
drivers/pci/controller/pcie-xilinx-nwl.c
405
static int nwl_intx_map(struct irq_domain *domain, unsigned int irq,
drivers/pci/controller/pcie-xilinx-nwl.c
409
irq_set_chip_data(irq, domain->host_data);
drivers/pci/controller/pcie-xilinx-nwl.c
454
static int nwl_irq_domain_alloc(struct irq_domain *domain, unsigned int virq,
drivers/pci/controller/pcie-xilinx-nwl.c
457
struct nwl_pcie *pcie = domain->host_data;
drivers/pci/controller/pcie-xilinx-nwl.c
471
irq_domain_set_info(domain, virq + i, bit + i, &nwl_irq_chip,
drivers/pci/controller/pcie-xilinx-nwl.c
472
domain->host_data, handle_simple_irq,
drivers/pci/controller/pcie-xilinx-nwl.c
479
static void nwl_irq_domain_free(struct irq_domain *domain, unsigned int virq,
drivers/pci/controller/pcie-xilinx-nwl.c
482
struct irq_data *data = irq_domain_get_irq_data(domain, virq);
drivers/pci/controller/pcie-xilinx.c
222
static int xilinx_msi_domain_alloc(struct irq_domain *domain, unsigned int virq,
drivers/pci/controller/pcie-xilinx.c
225
struct xilinx_pcie *pcie = domain->host_data;
drivers/pci/controller/pcie-xilinx.c
238
irq_domain_set_info(domain, virq + i, hwirq + i,
drivers/pci/controller/pcie-xilinx.c
239
&xilinx_msi_bottom_chip, domain->host_data,
drivers/pci/controller/pcie-xilinx.c
245
static void xilinx_msi_domain_free(struct irq_domain *domain, unsigned int virq,
drivers/pci/controller/pcie-xilinx.c
248
struct irq_data *d = irq_domain_get_irq_data(domain, virq);
drivers/pci/controller/pcie-xilinx.c
249
struct xilinx_pcie *pcie = domain->host_data;
drivers/pci/controller/pcie-xilinx.c
263
static bool xilinx_init_dev_msi_info(struct device *dev, struct irq_domain *domain,
drivers/pci/controller/pcie-xilinx.c
268
if (!msi_lib_init_dev_msi_info(dev, domain, real_parent, info))
drivers/pci/controller/pcie-xilinx.c
321
static int xilinx_pcie_intx_map(struct irq_domain *domain, unsigned int irq,
drivers/pci/controller/pcie-xilinx.c
325
irq_set_chip_data(irq, domain->host_data);
drivers/pci/controller/pcie-xilinx.c
390
struct irq_domain *domain;
drivers/pci/controller/pcie-xilinx.c
404
domain = pcie->msi_domain;
drivers/pci/controller/pcie-xilinx.c
408
domain = pcie->leg_domain;
drivers/pci/controller/pcie-xilinx.c
415
generic_handle_domain_irq(domain, val);
drivers/pci/controller/plda/pcie-plda-host.c
107
irq_domain_set_info(domain, virq, bit, &plda_msi_bottom_irq_chip,
drivers/pci/controller/plda/pcie-plda-host.c
108
domain->host_data, handle_edge_irq, NULL, NULL);
drivers/pci/controller/plda/pcie-plda-host.c
115
static void plda_irq_msi_domain_free(struct irq_domain *domain,
drivers/pci/controller/plda/pcie-plda-host.c
119
struct irq_data *d = irq_domain_get_irq_data(domain, virq);
drivers/pci/controller/plda/pcie-plda-host.c
249
static int plda_pcie_intx_map(struct irq_domain *domain, unsigned int irq,
drivers/pci/controller/plda/pcie-plda-host.c
253
irq_set_chip_data(irq, domain->host_data);
drivers/pci/controller/plda/pcie-plda-host.c
367
static int plda_pcie_event_map(struct irq_domain *domain, unsigned int irq,
drivers/pci/controller/plda/pcie-plda-host.c
370
struct plda_pcie_rp *port = (void *)domain->host_data;
drivers/pci/controller/plda/pcie-plda-host.c
373
irq_set_chip_data(irq, domain->host_data);
drivers/pci/controller/plda/pcie-plda-host.c
89
static int plda_irq_msi_domain_alloc(struct irq_domain *domain,
drivers/pci/controller/plda/pcie-plda-host.c
94
struct plda_pcie_rp *port = domain->host_data;
drivers/pci/controller/vmd.c
1000
vmd->sysdata.domain = PCI_DOMAIN_NR_NOT_SET;
drivers/pci/controller/vmd.c
1039
vmd->sysdata.domain);
drivers/pci/controller/vmd.c
1066
pci_bus_release_emul_domain_nr(vmd->sysdata.domain);
drivers/pci/controller/vmd.c
262
static void vmd_msi_free(struct irq_domain *domain, unsigned int virq,
drivers/pci/controller/vmd.c
265
static int vmd_msi_alloc(struct irq_domain *domain, unsigned int virq,
drivers/pci/controller/vmd.c
269
struct vmd_dev *vmd = domain->host_data;
drivers/pci/controller/vmd.c
275
vmd_msi_free(domain, virq, i);
drivers/pci/controller/vmd.c
283
irq_domain_set_info(domain, virq + i, vmdirq->irq->virq,
drivers/pci/controller/vmd.c
291
static void vmd_msi_free(struct irq_domain *domain, unsigned int virq,
drivers/pci/controller/vmd.c
298
irq_data = irq_domain_get_irq_data(domain, virq + i);
drivers/pci/controller/vmd.c
316
static bool vmd_init_dev_msi_info(struct device *dev, struct irq_domain *domain,
drivers/pci/controller/vmd.c
320
if (!msi_lib_init_dev_msi_info(dev, domain, real_parent, info))
drivers/pci/controller/vmd.c
351
vmd->sysdata.domain);
drivers/pci/controller/vmd.c
898
sd->domain = pci_bus_find_emul_domain_nr(0, 0x10000, INT_MAX);
drivers/pci/controller/vmd.c
899
if (sd->domain < 0)
drivers/pci/controller/vmd.c
900
return sd->domain;
drivers/pci/controller/vmd.c
907
pci_bus_release_emul_domain_nr(sd->domain);
drivers/pci/endpoint/pci-ep-msi.c
42
struct irq_domain *domain;
drivers/pci/endpoint/pci-ep-msi.c
53
domain = of_msi_map_get_device_domain(epc->dev.parent, 0,
drivers/pci/endpoint/pci-ep-msi.c
55
if (!domain) {
drivers/pci/endpoint/pci-ep-msi.c
60
if (!irq_domain_is_msi_parent(domain))
drivers/pci/endpoint/pci-ep-msi.c
63
if (!irq_domain_is_msi_immutable(domain)) {
drivers/pci/endpoint/pci-ep-msi.c
68
dev_set_msi_domain(epc->dev.parent, domain);
drivers/pci/msi/irqdomain.c
13
struct irq_domain *domain;
drivers/pci/msi/irqdomain.c
15
domain = dev_get_msi_domain(&dev->dev);
drivers/pci/msi/irqdomain.c
16
if (domain && irq_domain_is_hierarchy(domain))
drivers/pci/msi/irqdomain.c
170
void pci_msix_prepare_desc(struct irq_domain *domain, msi_alloc_info_t *arg,
drivers/pci/msi/irqdomain.c
210
struct irq_domain *domain = dev_get_msi_domain(&pdev->dev);
drivers/pci/msi/irqdomain.c
212
if (!domain || !irq_domain_is_msi_parent(domain))
drivers/pci/msi/irqdomain.c
24
struct irq_domain *domain;
drivers/pci/msi/irqdomain.c
26
domain = dev_get_msi_domain(&dev->dev);
drivers/pci/msi/irqdomain.c
27
if (domain && irq_domain_is_hierarchy(domain)) {
drivers/pci/msi/irqdomain.c
297
struct irq_domain *domain;
drivers/pci/msi/irqdomain.c
300
domain = dev_get_msi_domain(&pdev->dev);
drivers/pci/msi/irqdomain.c
302
if (!domain || !irq_domain_is_hierarchy(domain)) {
drivers/pci/msi/irqdomain.c
308
if (!irq_domain_is_msi_parent(domain)) {
drivers/pci/msi/irqdomain.c
314
info = domain->host_data;
drivers/pci/msi/irqdomain.c
324
supported = domain->msi_parent_ops->supported_flags;
drivers/pci/msi/irqdomain.c
364
u32 pci_msi_domain_get_msi_rid(struct irq_domain *domain, struct pci_dev *pdev)
drivers/pci/msi/irqdomain.c
371
of_node = irq_domain_get_of_node(domain);
drivers/pci/msi/irqdomain.c
390
u32 pci_msi_map_rid_ctlr_node(struct irq_domain *domain, struct pci_dev *pdev,
drivers/pci/msi/irqdomain.c
398
if (irq_domain_get_of_node(domain)) {
drivers/pci/msi/irqdomain.c
63
struct msi_domain_info *info = data->domain->host_data;
drivers/pci/msi/irqdomain.c
73
struct msi_domain_info *info = data->domain->host_data;
drivers/pci/of.c
231
u32 domain;
drivers/pci/of.c
234
error = of_property_read_u32(node, "linux,pci-domain", &domain);
drivers/pci/of.c
238
return (u16)domain;
drivers/pci/pcie/aer.c
1212
u16 domain;
drivers/pci/pcie/aer.c
1226
pdev = pci_get_domain_bus_and_slot(entry.domain, entry.bus,
drivers/pci/pcie/aer.c
1230
entry.domain, entry.bus,
drivers/pci/pcie/aer.c
1265
void aer_recover_queue(int domain, unsigned int bus, unsigned int devfn,
drivers/pci/pcie/aer.c
1271
.domain = domain,
drivers/pci/pcie/aer.c
1281
domain, bus, PCI_SLOT(devfn), PCI_FUNC(devfn));
drivers/pci/pcie/aer_inject.c
106
int domain = pci_domain_nr(dev->bus);
drivers/pci/pcie/aer_inject.c
107
if (domain < 0)
drivers/pci/pcie/aer_inject.c
109
return __find_aer_error(domain, dev->bus->number, dev->devfn);
drivers/pci/pcie/aer_inject.c
223
int domain;
drivers/pci/pcie/aer_inject.c
229
domain = pci_domain_nr(bus);
drivers/pci/pcie/aer_inject.c
230
if (domain < 0)
drivers/pci/pcie/aer_inject.c
232
err = __find_aer_error(domain, bus->number, devfn);
drivers/pci/pcie/aer_inject.c
255
int domain;
drivers/pci/pcie/aer_inject.c
261
domain = pci_domain_nr(bus);
drivers/pci/pcie/aer_inject.c
262
if (domain < 0)
drivers/pci/pcie/aer_inject.c
264
err = __find_aer_error(domain, bus->number, devfn);
drivers/pci/pcie/aer_inject.c
332
dev = pci_get_domain_bus_and_slot(einj->domain, einj->bus, devfn);
drivers/pci/pcie/aer_inject.c
392
aer_error_init(err, einj->domain, einj->bus, devfn,
drivers/pci/pcie/aer_inject.c
44
u32 domain;
drivers/pci/pcie/aer_inject.c
49
u32 domain;
drivers/pci/pcie/aer_inject.c
494
if (usize < offsetof(struct aer_error_inj, domain) ||
drivers/pci/pcie/aer_inject.c
77
static void aer_error_init(struct aer_error *err, u32 domain,
drivers/pci/pcie/aer_inject.c
82
err->domain = domain;
drivers/pci/pcie/aer_inject.c
89
static struct aer_error *__find_aer_error(u32 domain, unsigned int bus,
drivers/pci/pcie/aer_inject.c
95
if (domain == err->domain &&
drivers/pci/search.c
143
struct pci_bus *pci_find_bus(int domain, int busnr)
drivers/pci/search.c
149
if (pci_domain_nr(bus) != domain)
drivers/pci/search.c
230
struct pci_dev *pci_get_domain_bus_and_slot(int domain, unsigned int bus,
drivers/pci/search.c
236
if (pci_domain_nr(dev->bus) == domain &&
drivers/pci/vgaarb.c
1046
static int vga_pci_str_to_vars(char *buf, int count, unsigned int *domain,
drivers/pci/vgaarb.c
1052
n = sscanf(buf, "PCI:%x:%x:%x.%x", domain, bus, &slot, &func);
drivers/pci/vgaarb.c
1281
unsigned int domain, bus, devfn;
drivers/pci/vgaarb.c
1292
&domain, &bus, &devfn)) {
drivers/pci/vgaarb.c
1296
pdev = pci_get_domain_bus_and_slot(domain, bus, devfn);
drivers/pci/vgaarb.c
1299
domain, bus, PCI_SLOT(devfn),
drivers/pci/vgaarb.c
1306
domain, bus, PCI_SLOT(devfn), PCI_FUNC(devfn),
drivers/pci/xen-pcifront.c
181
.domain = pci_domain_nr(bus),
drivers/pci/xen-pcifront.c
217
.domain = pci_domain_nr(bus),
drivers/pci/xen-pcifront.c
248
.domain = pci_domain_nr(dev->bus),
drivers/pci/xen-pcifront.c
301
.domain = pci_domain_nr(dev->bus),
drivers/pci/xen-pcifront.c
320
.domain = pci_domain_nr(dev->bus),
drivers/pci/xen-pcifront.c
349
.domain = pci_domain_nr(dev->bus),
drivers/pci/xen-pcifront.c
408
unsigned int domain, unsigned int bus,
drivers/pci/xen-pcifront.c
430
"%04x:%02x:%02x.%d found.\n", domain, bus,
drivers/pci/xen-pcifront.c
438
unsigned int domain, unsigned int bus)
drivers/pci/xen-pcifront.c
452
if (domain != 0) {
drivers/pci/xen-pcifront.c
454
"PCI Root in non-zero PCI Domain! domain=%d\n", domain);
drivers/pci/xen-pcifront.c
463
domain, bus);
drivers/pci/xen-pcifront.c
474
pcifront_init_sd(sd, domain, bus, pdev);
drivers/pci/xen-pcifront.c
497
err = pcifront_scan_bus(pdev, domain, bus, b);
drivers/pci/xen-pcifront.c
516
unsigned int domain, unsigned int bus)
drivers/pci/xen-pcifront.c
521
b = pci_find_bus(domain, bus);
drivers/pci/xen-pcifront.c
524
return pcifront_scan_root(pdev, domain, bus);
drivers/pci/xen-pcifront.c
527
domain, bus);
drivers/pci/xen-pcifront.c
529
err = pcifront_scan_bus(pdev, domain, bus, b);
drivers/pci/xen-pcifront.c
581
int domain = pdev->sh_info->aer_op.domain;
drivers/pci/xen-pcifront.c
588
pcidev = pci_get_domain_bus_and_slot(domain, bus, devfn);
drivers/pci/xen-pcifront.c
68
unsigned int domain, unsigned int bus,
drivers/pci/xen-pcifront.c
73
sd->sd.domain = domain;
drivers/pci/xen-pcifront.c
809
unsigned int domain, bus;
drivers/pci/xen-pcifront.c
835
"%x:%x", &domain, &bus);
drivers/pci/xen-pcifront.c
842
err = pcifront_rescan_root(pdev, domain, bus);
drivers/pci/xen-pcifront.c
846
domain, bus);
drivers/pci/xen-pcifront.c
908
unsigned int domain, bus, slot, func;
drivers/pci/xen-pcifront.c
960
"%x:%x:%x.%x", &domain, &bus, &slot, &func);
drivers/pci/xen-pcifront.c
969
pci_dev = pci_get_domain_bus_and_slot(domain, bus,
drivers/pci/xen-pcifront.c
974
domain, bus, slot, func);
drivers/pci/xen-pcifront.c
984
domain, bus, slot, func);
drivers/perf/riscv_pmu_sbi.c
1196
struct irq_domain *domain = NULL;
drivers/perf/riscv_pmu_sbi.c
1219
domain = irq_find_matching_fwnode(riscv_get_intc_hwnode(),
drivers/perf/riscv_pmu_sbi.c
1221
if (!domain) {
drivers/perf/riscv_pmu_sbi.c
1226
riscv_pmu_irq = irq_create_mapping(domain, riscv_pmu_irq_num);
drivers/pinctrl/actions/pinctrl-owl.c
851
struct irq_domain *domain = pctrl->chip.irq.domain;
drivers/pinctrl/actions/pinctrl-owl.c
871
generic_handle_domain_irq(domain, offset + pin);
drivers/pinctrl/bcm/pinctrl-bcm2835.c
437
generic_handle_domain_irq(pc->gpio_chip.irq.domain,
drivers/pinctrl/bcm/pinctrl-bcm2835.c
756
int irq = irq_find_mapping(chip->irq.domain, offset);
drivers/pinctrl/bcm/pinctrl-iproc-gpio.c
189
generic_handle_domain_irq(gc->irq.domain, pin);
drivers/pinctrl/bcm/pinctrl-nsp-gpio.c
147
generic_handle_domain_irq(gc->irq.domain, bit);
drivers/pinctrl/intel/pinctrl-baytrail.c
1392
generic_handle_domain_irq(vg->chip.irq.domain, base + pin);
drivers/pinctrl/intel/pinctrl-cherryview.c
1417
generic_handle_domain_irq(gc->irq.domain, offset);
drivers/pinctrl/intel/pinctrl-cherryview.c
1556
irq_domain_associate_many(chip->irq.domain, irq_base,
drivers/pinctrl/intel/pinctrl-intel.c
1308
generic_handle_domain_irq(gc->irq.domain, padgrp->gpio_base + gpp_offset);
drivers/pinctrl/intel/pinctrl-lynxpoint.c
573
generic_handle_domain_irq(lg->chip.irq.domain, base + pin);
drivers/pinctrl/mediatek/mtk-eint.c
402
generic_handle_domain_irq(eint->domain, eint_num);
drivers/pinctrl/mediatek/mtk-eint.c
455
virq = irq_find_mapping(eint->domain, eint_num);
drivers/pinctrl/mediatek/mtk-eint.c
504
irq = irq_find_mapping(eint->domain, eint_n);
drivers/pinctrl/mediatek/mtk-eint.c
577
eint->domain = irq_domain_create_linear(dev_fwnode(eint->dev), eint->hw->ap_num,
drivers/pinctrl/mediatek/mtk-eint.c
579
if (!eint->domain)
drivers/pinctrl/mediatek/mtk-eint.c
595
virq = irq_create_mapping(eint->domain, i);
drivers/pinctrl/mediatek/mtk-eint.h
72
struct irq_domain *domain;
drivers/pinctrl/mediatek/pinctrl-airoha.c
2391
generic_handle_irq(irq_find_mapping(girq->domain,
drivers/pinctrl/mvebu/pinctrl-armada-37xx.c
1062
d = gc->irq.domain;
drivers/pinctrl/mvebu/pinctrl-armada-37xx.c
674
struct irq_domain *d = gc->irq.domain;
drivers/pinctrl/nomadik/pinctrl-abx500.c
236
return irq_create_mapping(pct->parent->domain, hwirq);
drivers/pinctrl/nomadik/pinctrl-nomadik.c
311
int irq = irq_find_mapping(nmk_chip->chip.irq.domain, offset);
drivers/pinctrl/nuvoton/pinctrl-ma35.c
490
struct irq_domain *irqdomain = bank->chip.irq.domain;
drivers/pinctrl/nuvoton/pinctrl-npcm7xx.c
1642
virq = irq_find_mapping(npcm->domain, offset);
drivers/pinctrl/nuvoton/pinctrl-npcm7xx.c
223
generic_handle_domain_irq(gc->irq.domain, bit);
drivers/pinctrl/nuvoton/pinctrl-npcm7xx.c
96
struct irq_domain *domain;
drivers/pinctrl/nuvoton/pinctrl-npcm8xx.c
112
struct irq_domain *domain;
drivers/pinctrl/nuvoton/pinctrl-npcm8xx.c
2042
virq = irq_find_mapping(npcm->domain, offset);
drivers/pinctrl/nuvoton/pinctrl-npcm8xx.c
221
generic_handle_domain_irq(gc->irq.domain, bit);
drivers/pinctrl/nuvoton/pinctrl-wpcm450.c
334
generic_handle_domain_irq(gpio->chip.gc.irq.domain, offset);
drivers/pinctrl/nuvoton/pinctrl-wpcm450.c
59
struct irq_domain *domain;
drivers/pinctrl/pinctrl-amd.c
651
generic_handle_domain_irq_safe(gc->irq.domain, irqnr + i);
drivers/pinctrl/pinctrl-apple-gpio.c
358
generic_handle_domain_irq(gc->irq.domain, pinh + pinl);
drivers/pinctrl/pinctrl-at91.c
1717
generic_handle_domain_irq(gpio_chip->irq.domain, n);
drivers/pinctrl/pinctrl-aw9523.c
481
tmp = irq_find_mapping(awi->gpio.irq.domain, n);
drivers/pinctrl/pinctrl-cy8c95x0.c
1110
nested_irq = irq_find_mapping(gc->irq.domain, level);
drivers/pinctrl/pinctrl-equilibrium.c
169
generic_handle_domain_irq(gc->irq.domain, offset);
drivers/pinctrl/pinctrl-ingenic.c
3796
generic_handle_domain_irq(gc->irq.domain, i);
drivers/pinctrl/pinctrl-keembay.c
1277
kmb_irq = irq_find_mapping(gc->irq.domain, pin);
drivers/pinctrl/pinctrl-mcp23s08.c
458
child_irq = irq_find_mapping(mcp->chip.irq.domain, i);
drivers/pinctrl/pinctrl-microchip-sgpio.c
776
generic_handle_domain_irq(chip->irq.domain, gpio);
drivers/pinctrl/pinctrl-ocelot.c
2207
generic_handle_domain_irq(chip->irq.domain, gpio);
drivers/pinctrl/pinctrl-ocelot.c
2350
generic_handle_domain_irq(chip->irq.domain, irq + 32 * i);
drivers/pinctrl/pinctrl-pic32.c
2107
generic_handle_domain_irq(gc->irq.domain, pin);
drivers/pinctrl/pinctrl-pistachio.c
1317
generic_handle_domain_irq(gc->irq.domain, pin);
drivers/pinctrl/pinctrl-rockchip.h
339
struct irq_domain *domain;
drivers/pinctrl/pinctrl-rp1.c
1084
int irq = irq_find_mapping(chip->irq.domain, offset);
drivers/pinctrl/pinctrl-rp1.c
885
generic_handle_irq(irq_find_mapping(pc->gpio_chip.irq.domain,
drivers/pinctrl/pinctrl-single.c
1321
if (pcs->domain)
drivers/pinctrl/pinctrl-single.c
1322
irq_domain_remove(pcs->domain);
drivers/pinctrl/pinctrl-single.c
1498
generic_handle_domain_irq(pcs->domain,
drivers/pinctrl/pinctrl-single.c
1625
pcs->domain = irq_domain_create_simple(of_fwnode_handle(np),
drivers/pinctrl/pinctrl-single.c
1629
if (!pcs->domain) {
drivers/pinctrl/pinctrl-single.c
199
struct irq_domain *domain;
drivers/pinctrl/pinctrl-st.c
1433
generic_handle_domain_irq(bank->gpio_chip.irq.domain, n);
drivers/pinctrl/pinctrl-stmfx.c
590
handle_nested_irq(irq_find_mapping(gc->irq.domain, n));
drivers/pinctrl/pinctrl-sx150x.c
559
handle_nested_irq(irq_find_mapping(pctl->gpio.irq.domain, n));
drivers/pinctrl/qcom/pinctrl-msm.c
1323
generic_handle_domain_irq(gc->irq.domain, i);
drivers/pinctrl/qcom/pinctrl-msm.c
160
unsigned int irq = irq_find_mapping(gc->irq.domain, group);
drivers/pinctrl/qcom/pinctrl-spmi-gpio.c
988
static int pmic_gpio_domain_translate(struct irq_domain *domain,
drivers/pinctrl/qcom/pinctrl-spmi-gpio.c
993
struct pmic_gpio_state *state = container_of(domain->host_data,
drivers/pinctrl/qcom/pinctrl-spmi-mpp.c
789
static int pmic_mpp_domain_translate(struct irq_domain *domain,
drivers/pinctrl/qcom/pinctrl-spmi-mpp.c
794
struct pmic_mpp_state *state = container_of(domain->host_data,
drivers/pinctrl/qcom/pinctrl-ssbi-gpio.c
683
static int pm8xxx_domain_translate(struct irq_domain *domain,
drivers/pinctrl/qcom/pinctrl-ssbi-gpio.c
688
struct pm8xxx_gpio *pctrl = container_of(domain->host_data,
drivers/pinctrl/qcom/pinctrl-ssbi-mpp.c
730
static int pm8xxx_mpp_domain_translate(struct irq_domain *domain,
drivers/pinctrl/qcom/pinctrl-ssbi-mpp.c
735
struct pm8xxx_mpp *pctrl = container_of(domain->host_data,
drivers/pinctrl/renesas/pinctrl-rzg2l.c
1840
virq = irq_find_mapping(chip->irq.domain, offset);
drivers/pinctrl/renesas/pinctrl-rzg2l.c
2623
struct irq_domain *domain = pctrl->gpio_chip.irq.domain;
drivers/pinctrl/renesas/pinctrl-rzg2l.c
2634
virq = irq_find_mapping(domain, pctrl->hwirq[i]);
drivers/pinctrl/renesas/pinctrl-rzg2l.c
2641
data = irq_domain_get_irq_data(domain, virq);
drivers/pinctrl/renesas/pinctrl-rzg2l.c
2662
static void rzg2l_gpio_irq_domain_free(struct irq_domain *domain, unsigned int virq,
drivers/pinctrl/renesas/pinctrl-rzg2l.c
2667
d = irq_domain_get_irq_data(domain, virq);
drivers/pinctrl/renesas/pinctrl-rzg2l.c
2687
irq_domain_free_irqs_common(domain, virq, nr_irqs);
drivers/pinctrl/renesas/pinctrl-rzt2h.c
782
static void rzt2h_gpio_irq_domain_free(struct irq_domain *domain, unsigned int virq,
drivers/pinctrl/renesas/pinctrl-rzt2h.c
785
struct irq_data *d = irq_domain_get_irq_data(domain, virq);
drivers/pinctrl/renesas/pinctrl-rzt2h.c
795
irq_domain_free_irqs_common(domain, virq, nr_irqs);
drivers/pinctrl/samsung/pinctrl-exynos.c
710
struct irq_domain *domain)
drivers/pinctrl/samsung/pinctrl-exynos.c
716
generic_handle_domain_irq(domain, irq);
drivers/pinctrl/sophgo/pinctrl-cv18xx.c
48
u8 domain, u32 cfg)
drivers/pinctrl/sophgo/pinctrl-cv18xx.c
52
if (domain >= pctrl->data->npds)
drivers/pinctrl/sophgo/pinctrl-cv18xx.c
55
if (priv->power_cfg[domain] && priv->power_cfg[domain] != cfg)
drivers/pinctrl/sophgo/pinctrl-cv18xx.c
58
priv->power_cfg[domain] = cfg;
drivers/pinctrl/sophgo/pinctrl-cv18xx.c
64
u8 domain)
drivers/pinctrl/sophgo/pinctrl-cv18xx.c
68
return priv->power_cfg[domain];
drivers/pinctrl/sophgo/pinctrl-cv18xx.c
82
u8 domain)
drivers/pinctrl/sophgo/pinctrl-cv18xx.c
84
return pctrl->data->pdnames[domain];
drivers/pinctrl/spear/pinctrl-plgpio.c
417
generic_handle_domain_irq(gc->irq.domain, pin);
drivers/pinctrl/starfive/pinctrl-starfive-jh7100.c
1186
generic_handle_domain_irq(sfp->gc.irq.domain, pin);
drivers/pinctrl/starfive/pinctrl-starfive-jh7100.c
1190
generic_handle_domain_irq(sfp->gc.irq.domain, pin + 32);
drivers/pinctrl/starfive/pinctrl-starfive-jh7100.c
1331
irq_domain_set_pm_device(sfp->gc.irq.domain, dev);
drivers/pinctrl/starfive/pinctrl-starfive-jh7110-aon.c
106
generic_handle_domain_irq(sfp->gc.irq.domain, pin);
drivers/pinctrl/starfive/pinctrl-starfive-jh7110-sys.c
370
generic_handle_domain_irq(sfp->gc.irq.domain, pin);
drivers/pinctrl/starfive/pinctrl-starfive-jh7110-sys.c
374
generic_handle_domain_irq(sfp->gc.irq.domain, pin + 32);
drivers/pinctrl/stm32/pinctrl-stm32.c
152
struct irq_domain *domain;
drivers/pinctrl/stm32/pinctrl-stm32.c
1699
if (pctl->domain) {
drivers/pinctrl/stm32/pinctrl-stm32.c
1703
bank->domain = irq_domain_create_hierarchy(pctl->domain, 0, STM32_GPIO_IRQ_LINE,
drivers/pinctrl/stm32/pinctrl-stm32.c
1707
if (!bank->domain)
drivers/pinctrl/stm32/pinctrl-stm32.c
173
struct irq_domain *domain;
drivers/pinctrl/stm32/pinctrl-stm32.c
1742
struct irq_domain *domain;
drivers/pinctrl/stm32/pinctrl-stm32.c
1751
domain = irq_find_host(parent);
drivers/pinctrl/stm32/pinctrl-stm32.c
1753
if (!domain)
drivers/pinctrl/stm32/pinctrl-stm32.c
1757
return domain;
drivers/pinctrl/stm32/pinctrl-stm32.c
1877
pctl->domain = stm32_pctrl_get_irq_domain(pdev);
drivers/pinctrl/stm32/pinctrl-stm32.c
1878
if (IS_ERR(pctl->domain))
drivers/pinctrl/stm32/pinctrl-stm32.c
1879
return PTR_ERR(pctl->domain);
drivers/pinctrl/stm32/pinctrl-stm32.c
1880
if (!pctl->domain)
drivers/pinctrl/stm32/pinctrl-stm32.c
1916
if (pctl->domain) {
drivers/pinctrl/stm32/pinctrl-stm32.c
497
struct stm32_gpio_bank *bank = d->domain->host_data;
drivers/pinctrl/stm32/pinctrl-stm32.c
519
struct stm32_gpio_bank *bank = d->domain->host_data;
drivers/pinctrl/stm32/pinctrl-stm32.c
545
struct stm32_gpio_bank *bank = irq_data->domain->host_data;
drivers/pinctrl/stm32/pinctrl-stm32.c
565
struct stm32_gpio_bank *bank = irq_data->domain->host_data;
drivers/pinctrl/sunxi/pinctrl-sunxi.c
1088
return irq_find_mapping(pctl->domain, irqnum);
drivers/pinctrl/sunxi/pinctrl-sunxi.c
1328
generic_handle_domain_irq(pctl->domain,
drivers/pinctrl/sunxi/pinctrl-sunxi.c
1726
pctl->domain = irq_domain_create_linear(dev_fwnode(&pdev->dev),
drivers/pinctrl/sunxi/pinctrl-sunxi.c
1729
if (!pctl->domain) {
drivers/pinctrl/sunxi/pinctrl-sunxi.c
1736
int irqno = irq_create_mapping(pctl->domain, i);
drivers/pinctrl/sunxi/pinctrl-sunxi.h
170
struct irq_domain *domain;
drivers/platform/cznic/turris-omnia-mcu-gpio.c
940
struct irq_domain *domain;
drivers/platform/cznic/turris-omnia-mcu-gpio.c
947
domain = mcu->gc.irq.domain;
drivers/platform/cznic/turris-omnia-mcu-gpio.c
952
nested_irq = irq_find_mapping(domain, omnia_int_to_gpio_idx[i]);
drivers/platform/surface/aggregator/bus.c
192
if (id->domain != uid.domain || id->category != uid.category)
drivers/platform/surface/aggregator/bus.c
220
id->domain == 0 &&
drivers/platform/surface/aggregator/bus.c
28
sdev->uid.domain, sdev->uid.category, sdev->uid.target,
drivers/platform/surface/aggregator/bus.c
404
uid->domain = d;
drivers/platform/surface/aggregator/bus.c
46
sdev->uid.domain, sdev->uid.category,
drivers/platform/surface/aggregator/bus.c
98
sdev->uid.domain, sdev->uid.category, sdev->uid.target,
drivers/platform/x86/intel/int0002_vgpio.c
137
generic_handle_domain_irq_safe(chip->irq.domain, GPE0A_PME_B0_VIRT_GPIO_PIN);
drivers/platform/x86/x86-android-tablets/asus.c
123
.domain = DOMAIN_BUS_WAKEUP,
drivers/platform/x86/x86-android-tablets/asus.c
261
.domain = DOMAIN_BUS_WAKEUP,
drivers/platform/x86/x86-android-tablets/core.c
135
domain = irq_find_matching_fwspec(&fwspec, data->domain);
drivers/platform/x86/x86-android-tablets/core.c
136
if (!domain) {
drivers/platform/x86/x86-android-tablets/core.c
141
return irq_create_mapping(domain, data->index);
drivers/platform/x86/x86-android-tablets/core.c
79
struct irq_domain *domain;
drivers/platform/x86/x86-android-tablets/x86-android-tablets.h
42
enum irq_domain_bus_token domain;
drivers/pmdomain/actions/owl-sps.c
59
static int owl_sps_power_on(struct generic_pm_domain *domain)
drivers/pmdomain/actions/owl-sps.c
61
struct owl_sps_domain *pd = to_owl_pd(domain);
drivers/pmdomain/actions/owl-sps.c
68
static int owl_sps_power_off(struct generic_pm_domain *domain)
drivers/pmdomain/actions/owl-sps.c
70
struct owl_sps_domain *pd = to_owl_pd(domain);
drivers/pmdomain/amlogic/meson-ee-pwrc.c
359
static int meson_ee_pwrc_off(struct generic_pm_domain *domain)
drivers/pmdomain/amlogic/meson-ee-pwrc.c
362
container_of(domain, struct meson_ee_pwrc_domain, base);
drivers/pmdomain/amlogic/meson-ee-pwrc.c
395
static int meson_ee_pwrc_on(struct generic_pm_domain *domain)
drivers/pmdomain/amlogic/meson-ee-pwrc.c
398
container_of(domain, struct meson_ee_pwrc_domain, base);
drivers/pmdomain/amlogic/meson-secure-pwrc.c
67
static int meson_secure_pwrc_off(struct generic_pm_domain *domain)
drivers/pmdomain/amlogic/meson-secure-pwrc.c
71
container_of(domain, struct meson_secure_pwrc_domain, base);
drivers/pmdomain/amlogic/meson-secure-pwrc.c
82
static int meson_secure_pwrc_on(struct generic_pm_domain *domain)
drivers/pmdomain/amlogic/meson-secure-pwrc.c
86
container_of(domain, struct meson_secure_pwrc_domain, base);
drivers/pmdomain/arm/scmi_pm_domain.c
20
u32 domain;
drivers/pmdomain/arm/scmi_pm_domain.c
25
static int scmi_pd_power(struct generic_pm_domain *domain, u32 state)
drivers/pmdomain/arm/scmi_pm_domain.c
27
struct scmi_pm_domain *pd = to_scmi_pd(domain);
drivers/pmdomain/arm/scmi_pm_domain.c
29
return power_ops->state_set(pd->ph, pd->domain, state);
drivers/pmdomain/arm/scmi_pm_domain.c
32
static int scmi_pd_power_on(struct generic_pm_domain *domain)
drivers/pmdomain/arm/scmi_pm_domain.c
34
return scmi_pd_power(domain, SCMI_POWER_STATE_GENERIC_ON);
drivers/pmdomain/arm/scmi_pm_domain.c
37
static int scmi_pd_power_off(struct generic_pm_domain *domain)
drivers/pmdomain/arm/scmi_pm_domain.c
39
return scmi_pd_power(domain, SCMI_POWER_STATE_GENERIC_OFF);
drivers/pmdomain/arm/scmi_pm_domain.c
94
scmi_pd->domain = i;
drivers/pmdomain/arm/scpi_pm_domain.c
111
scpi_pd->domain = i;
drivers/pmdomain/arm/scpi_pm_domain.c
19
u32 domain;
drivers/pmdomain/arm/scpi_pm_domain.c
44
ret = pd->ops->device_set_power_state(pd->domain, state);
drivers/pmdomain/arm/scpi_pm_domain.c
48
return !(state == pd->ops->device_get_power_state(pd->domain));
drivers/pmdomain/arm/scpi_pm_domain.c
51
static int scpi_pd_power_on(struct generic_pm_domain *domain)
drivers/pmdomain/arm/scpi_pm_domain.c
53
struct scpi_pm_domain *pd = to_scpi_pd(domain);
drivers/pmdomain/arm/scpi_pm_domain.c
58
static int scpi_pd_power_off(struct generic_pm_domain *domain)
drivers/pmdomain/arm/scpi_pm_domain.c
60
struct scpi_pm_domain *pd = to_scpi_pd(domain);
drivers/pmdomain/bcm/bcm2835-power.c
136
u32 domain;
drivers/pmdomain/bcm/bcm2835-power.c
370
static int bcm2835_power_pd_power_on(struct generic_pm_domain *domain)
drivers/pmdomain/bcm/bcm2835-power.c
373
container_of(domain, struct bcm2835_power_domain, base);
drivers/pmdomain/bcm/bcm2835-power.c
376
switch (pd->domain) {
drivers/pmdomain/bcm/bcm2835-power.c
434
dev_err(power->dev, "Invalid domain %d\n", pd->domain);
drivers/pmdomain/bcm/bcm2835-power.c
439
static int bcm2835_power_pd_power_off(struct generic_pm_domain *domain)
drivers/pmdomain/bcm/bcm2835-power.c
442
container_of(domain, struct bcm2835_power_domain, base);
drivers/pmdomain/bcm/bcm2835-power.c
445
switch (pd->domain) {
drivers/pmdomain/bcm/bcm2835-power.c
500
dev_err(power->dev, "Invalid domain %d\n", pd->domain);
drivers/pmdomain/bcm/bcm2835-power.c
522
dom->domain = pd_xlate_index;
drivers/pmdomain/bcm/raspberrypi-power.c
120
dom->domain = xlate_index + 1;
drivers/pmdomain/bcm/raspberrypi-power.c
126
int xlate_index, int domain,
drivers/pmdomain/bcm/raspberrypi-power.c
132
dom->domain = domain;
drivers/pmdomain/bcm/raspberrypi-power.c
151
packet.domain = RPI_POWER_DOMAIN_ARM;
drivers/pmdomain/bcm/raspberrypi-power.c
24
u32 domain;
drivers/pmdomain/bcm/raspberrypi-power.c
43
u32 domain;
drivers/pmdomain/bcm/raspberrypi-power.c
51
static int rpi_firmware_set_power(struct generic_pm_domain *domain, bool on)
drivers/pmdomain/bcm/raspberrypi-power.c
54
container_of(domain, struct rpi_power_domain, base);
drivers/pmdomain/bcm/raspberrypi-power.c
59
packet.domain = rpi_domain->domain;
drivers/pmdomain/bcm/raspberrypi-power.c
67
dev_err(&domain->dev, "Failed to set %s to %u (%d)\n",
drivers/pmdomain/bcm/raspberrypi-power.c
70
dev_dbg(&domain->dev, "Set %s to %u\n",
drivers/pmdomain/bcm/raspberrypi-power.c
76
static int rpi_domain_off(struct generic_pm_domain *domain)
drivers/pmdomain/bcm/raspberrypi-power.c
78
return rpi_firmware_set_power(domain, false);
drivers/pmdomain/bcm/raspberrypi-power.c
81
static int rpi_domain_on(struct generic_pm_domain *domain)
drivers/pmdomain/bcm/raspberrypi-power.c
83
return rpi_firmware_set_power(domain, true);
drivers/pmdomain/core.c
1006
if (!genpd->gov->power_down_ok(&genpd->domain))
drivers/pmdomain/core.c
1429
if (!genpd->gov->system_power_down_ok(&genpd->domain))
drivers/pmdomain/core.c
1955
dev_pm_domain_set(dev, &genpd->domain);
drivers/pmdomain/core.c
2419
genpd->domain.ops.runtime_suspend = genpd_runtime_suspend;
drivers/pmdomain/core.c
2420
genpd->domain.ops.runtime_resume = genpd_runtime_resume;
drivers/pmdomain/core.c
2421
genpd->domain.ops.prepare = genpd_prepare;
drivers/pmdomain/core.c
2422
genpd->domain.ops.suspend_noirq = genpd_suspend_noirq;
drivers/pmdomain/core.c
2423
genpd->domain.ops.resume_noirq = genpd_resume_noirq;
drivers/pmdomain/core.c
2424
genpd->domain.ops.freeze_noirq = genpd_freeze_noirq;
drivers/pmdomain/core.c
2425
genpd->domain.ops.thaw_noirq = genpd_thaw_noirq;
drivers/pmdomain/core.c
2426
genpd->domain.ops.poweroff_noirq = genpd_poweroff_noirq;
drivers/pmdomain/core.c
2427
genpd->domain.ops.restore_noirq = genpd_restore_noirq;
drivers/pmdomain/core.c
2428
genpd->domain.ops.complete = genpd_complete;
drivers/pmdomain/core.c
2429
genpd->domain.start = genpd_dev_pm_start;
drivers/pmdomain/core.c
2430
genpd->domain.set_performance_state = genpd_dev_pm_set_performance_state;
drivers/pmdomain/imx/gpc.c
131
static int imx_pgc_get_clocks(struct device *dev, struct imx_pm_domain *domain)
drivers/pmdomain/imx/gpc.c
144
domain->clk[i] = clk;
drivers/pmdomain/imx/gpc.c
146
domain->num_clks = i;
drivers/pmdomain/imx/gpc.c
152
clk_put(domain->clk[i]);
drivers/pmdomain/imx/gpc.c
157
static void imx_pgc_put_clocks(struct imx_pm_domain *domain)
drivers/pmdomain/imx/gpc.c
161
for (i = domain->num_clks - 1; i >= 0; i--)
drivers/pmdomain/imx/gpc.c
162
clk_put(domain->clk[i]);
drivers/pmdomain/imx/gpc.c
165
static int imx_pgc_parse_dt(struct device *dev, struct imx_pm_domain *domain)
drivers/pmdomain/imx/gpc.c
168
domain->supply = devm_regulator_get_optional(dev, "power");
drivers/pmdomain/imx/gpc.c
169
if (IS_ERR(domain->supply)) {
drivers/pmdomain/imx/gpc.c
170
if (PTR_ERR(domain->supply) == -ENODEV)
drivers/pmdomain/imx/gpc.c
171
domain->supply = NULL;
drivers/pmdomain/imx/gpc.c
173
return PTR_ERR(domain->supply);
drivers/pmdomain/imx/gpc.c
177
return imx_pgc_get_clocks(dev, domain);
drivers/pmdomain/imx/gpc.c
182
struct imx_pm_domain *domain = pdev->dev.platform_data;
drivers/pmdomain/imx/gpc.c
188
ret = imx_pgc_parse_dt(dev, domain);
drivers/pmdomain/imx/gpc.c
194
if (domain->base.power_on)
drivers/pmdomain/imx/gpc.c
195
domain->base.power_on(&domain->base);
drivers/pmdomain/imx/gpc.c
198
pm_genpd_init(&domain->base, NULL, false);
drivers/pmdomain/imx/gpc.c
199
ret = of_genpd_add_provider_simple(dev->of_node, &domain->base);
drivers/pmdomain/imx/gpc.c
209
pm_genpd_remove(&domain->base);
drivers/pmdomain/imx/gpc.c
210
imx_pgc_put_clocks(domain);
drivers/pmdomain/imx/gpc.c
217
struct imx_pm_domain *domain = pdev->dev.platform_data;
drivers/pmdomain/imx/gpc.c
221
pm_genpd_remove(&domain->base);
drivers/pmdomain/imx/gpc.c
222
imx_pgc_put_clocks(domain);
drivers/pmdomain/imx/gpc.c
361
struct imx_pm_domain *domain;
drivers/pmdomain/imx/gpc.c
365
domain = &imx_gpc_domains[i];
drivers/pmdomain/imx/gpc.c
366
domain->regmap = regmap;
drivers/pmdomain/imx/gpc.c
367
domain->ipg_rate_mhz = 66;
drivers/pmdomain/imx/gpc.c
370
domain->supply = devm_regulator_get(dev, "pu");
drivers/pmdomain/imx/gpc.c
371
if (IS_ERR(domain->supply))
drivers/pmdomain/imx/gpc.c
372
return PTR_ERR(domain->supply);
drivers/pmdomain/imx/gpc.c
374
ret = imx_pgc_get_clocks(dev, domain);
drivers/pmdomain/imx/gpc.c
378
domain->base.power_on(&domain->base);
drivers/pmdomain/imx/gpc.c
454
struct imx_pm_domain *domain;
drivers/pmdomain/imx/gpc.c
485
domain = pd_pdev->dev.platform_data;
drivers/pmdomain/imx/gpc.c
486
domain->regmap = regmap;
drivers/pmdomain/imx/gpc.c
487
domain->ipg_rate_mhz = ipg_rate_mhz;
drivers/pmdomain/imx/gpcv2.c
1322
struct imx_pgc_domain *domain = pdev->dev.platform_data;
drivers/pmdomain/imx/gpcv2.c
1325
domain->dev = &pdev->dev;
drivers/pmdomain/imx/gpcv2.c
1327
domain->regulator = devm_regulator_get_optional(domain->dev, "power");
drivers/pmdomain/imx/gpcv2.c
1328
if (IS_ERR(domain->regulator)) {
drivers/pmdomain/imx/gpcv2.c
1329
if (PTR_ERR(domain->regulator) != -ENODEV)
drivers/pmdomain/imx/gpcv2.c
1330
return dev_err_probe(domain->dev, PTR_ERR(domain->regulator),
drivers/pmdomain/imx/gpcv2.c
1332
} else if (domain->voltage) {
drivers/pmdomain/imx/gpcv2.c
1333
regulator_set_voltage(domain->regulator,
drivers/pmdomain/imx/gpcv2.c
1334
domain->voltage, domain->voltage);
drivers/pmdomain/imx/gpcv2.c
1337
domain->num_clks = devm_clk_bulk_get_all(domain->dev, &domain->clks);
drivers/pmdomain/imx/gpcv2.c
1338
if (domain->num_clks < 0)
drivers/pmdomain/imx/gpcv2.c
1339
return dev_err_probe(domain->dev, domain->num_clks,
drivers/pmdomain/imx/gpcv2.c
1342
domain->reset = devm_reset_control_array_get_optional_exclusive(domain->dev);
drivers/pmdomain/imx/gpcv2.c
1343
if (IS_ERR(domain->reset))
drivers/pmdomain/imx/gpcv2.c
1344
return dev_err_probe(domain->dev, PTR_ERR(domain->reset),
drivers/pmdomain/imx/gpcv2.c
1347
pm_runtime_enable(domain->dev);
drivers/pmdomain/imx/gpcv2.c
1349
if (domain->bits.map)
drivers/pmdomain/imx/gpcv2.c
1350
regmap_update_bits(domain->regmap, domain->regs->map,
drivers/pmdomain/imx/gpcv2.c
1351
domain->bits.map, domain->bits.map);
drivers/pmdomain/imx/gpcv2.c
1353
ret = pm_genpd_init(&domain->genpd, NULL, true);
drivers/pmdomain/imx/gpcv2.c
1355
dev_err_probe(domain->dev, ret, "Failed to init power domain\n");
drivers/pmdomain/imx/gpcv2.c
1360
of_property_present(domain->dev->of_node, "power-domains"))
drivers/pmdomain/imx/gpcv2.c
1361
lockdep_set_subclass(&domain->genpd.mlock, 1);
drivers/pmdomain/imx/gpcv2.c
1363
ret = of_genpd_add_provider_simple(domain->dev->of_node,
drivers/pmdomain/imx/gpcv2.c
1364
&domain->genpd);
drivers/pmdomain/imx/gpcv2.c
1366
dev_err_probe(domain->dev, ret, "Failed to add genpd provider\n");
drivers/pmdomain/imx/gpcv2.c
1373
pm_genpd_remove(&domain->genpd);
drivers/pmdomain/imx/gpcv2.c
1375
if (domain->bits.map)
drivers/pmdomain/imx/gpcv2.c
1376
regmap_update_bits(domain->regmap, domain->regs->map,
drivers/pmdomain/imx/gpcv2.c
1377
domain->bits.map, 0);
drivers/pmdomain/imx/gpcv2.c
1378
pm_runtime_disable(domain->dev);
drivers/pmdomain/imx/gpcv2.c
1385
struct imx_pgc_domain *domain = pdev->dev.platform_data;
drivers/pmdomain/imx/gpcv2.c
1387
of_genpd_del_provider(domain->dev->of_node);
drivers/pmdomain/imx/gpcv2.c
1388
pm_genpd_remove(&domain->genpd);
drivers/pmdomain/imx/gpcv2.c
1390
if (domain->bits.map)
drivers/pmdomain/imx/gpcv2.c
1391
regmap_update_bits(domain->regmap, domain->regs->map,
drivers/pmdomain/imx/gpcv2.c
1392
domain->bits.map, 0);
drivers/pmdomain/imx/gpcv2.c
1394
pm_runtime_disable(domain->dev);
drivers/pmdomain/imx/gpcv2.c
1484
struct imx_pgc_domain *domain;
drivers/pmdomain/imx/gpcv2.c
1518
domain = pd_pdev->dev.platform_data;
drivers/pmdomain/imx/gpcv2.c
1519
domain->regmap = regmap;
drivers/pmdomain/imx/gpcv2.c
1520
domain->regs = domain_data->pgc_regs;
drivers/pmdomain/imx/gpcv2.c
1522
domain->genpd.power_on = imx_pgc_power_up;
drivers/pmdomain/imx/gpcv2.c
1523
domain->genpd.power_off = imx_pgc_power_down;
drivers/pmdomain/imx/gpcv2.c
316
struct imx_pgc_domain *domain = to_imx_pgc_domain(genpd);
drivers/pmdomain/imx/gpcv2.c
320
ret = pm_runtime_get_sync(domain->dev);
drivers/pmdomain/imx/gpcv2.c
322
pm_runtime_put_noidle(domain->dev);
drivers/pmdomain/imx/gpcv2.c
326
if (!IS_ERR(domain->regulator)) {
drivers/pmdomain/imx/gpcv2.c
327
ret = regulator_enable(domain->regulator);
drivers/pmdomain/imx/gpcv2.c
329
dev_err(domain->dev,
drivers/pmdomain/imx/gpcv2.c
336
reset_control_assert(domain->reset);
drivers/pmdomain/imx/gpcv2.c
339
ret = clk_bulk_prepare_enable(domain->num_clks, domain->clks);
drivers/pmdomain/imx/gpcv2.c
341
dev_err(domain->dev, "failed to enable reset clocks\n");
drivers/pmdomain/imx/gpcv2.c
348
if (domain->bits.pxx) {
drivers/pmdomain/imx/gpcv2.c
350
regmap_update_bits(domain->regmap, domain->regs->pup,
drivers/pmdomain/imx/gpcv2.c
351
domain->bits.pxx, domain->bits.pxx);
drivers/pmdomain/imx/gpcv2.c
356
ret = regmap_read_poll_timeout(domain->regmap,
drivers/pmdomain/imx/gpcv2.c
357
domain->regs->pup, reg_val,
drivers/pmdomain/imx/gpcv2.c
358
!(reg_val & domain->bits.pxx),
drivers/pmdomain/imx/gpcv2.c
361
dev_err(domain->dev, "failed to command PGC\n");
drivers/pmdomain/imx/gpcv2.c
366
for_each_set_bit(pgc, &domain->pgc, 32) {
drivers/pmdomain/imx/gpcv2.c
367
regmap_clear_bits(domain->regmap, GPC_PGC_CTRL(pgc),
drivers/pmdomain/imx/gpcv2.c
375
reset_control_deassert(domain->reset);
drivers/pmdomain/imx/gpcv2.c
378
if (domain->bits.hskreq) {
drivers/pmdomain/imx/gpcv2.c
379
regmap_update_bits(domain->regmap, domain->regs->hsk,
drivers/pmdomain/imx/gpcv2.c
380
domain->bits.hskreq, domain->bits.hskreq);
drivers/pmdomain/imx/gpcv2.c
403
regmap_read_bypassed(domain->regmap, domain->regs->hsk, ®_val);
drivers/pmdomain/imx/gpcv2.c
408
if (!domain->keep_clocks)
drivers/pmdomain/imx/gpcv2.c
409
clk_bulk_disable_unprepare(domain->num_clks, domain->clks);
drivers/pmdomain/imx/gpcv2.c
414
clk_bulk_disable_unprepare(domain->num_clks, domain->clks);
drivers/pmdomain/imx/gpcv2.c
416
if (!IS_ERR(domain->regulator))
drivers/pmdomain/imx/gpcv2.c
417
regulator_disable(domain->regulator);
drivers/pmdomain/imx/gpcv2.c
419
pm_runtime_put(domain->dev);
drivers/pmdomain/imx/gpcv2.c
426
struct imx_pgc_domain *domain = to_imx_pgc_domain(genpd);
drivers/pmdomain/imx/gpcv2.c
431
if (!domain->keep_clocks) {
drivers/pmdomain/imx/gpcv2.c
432
ret = clk_bulk_prepare_enable(domain->num_clks, domain->clks);
drivers/pmdomain/imx/gpcv2.c
434
dev_err(domain->dev, "failed to enable reset clocks\n");
drivers/pmdomain/imx/gpcv2.c
440
if (domain->bits.hskreq) {
drivers/pmdomain/imx/gpcv2.c
441
regmap_clear_bits(domain->regmap, domain->regs->hsk,
drivers/pmdomain/imx/gpcv2.c
442
domain->bits.hskreq);
drivers/pmdomain/imx/gpcv2.c
444
ret = regmap_read_poll_timeout(domain->regmap, domain->regs->hsk,
drivers/pmdomain/imx/gpcv2.c
446
!(reg_val & domain->bits.hskack),
drivers/pmdomain/imx/gpcv2.c
449
dev_err(domain->dev, "failed to power down ADB400\n");
drivers/pmdomain/imx/gpcv2.c
454
if (domain->bits.pxx) {
drivers/pmdomain/imx/gpcv2.c
456
for_each_set_bit(pgc, &domain->pgc, 32) {
drivers/pmdomain/imx/gpcv2.c
457
regmap_update_bits(domain->regmap, GPC_PGC_CTRL(pgc),
drivers/pmdomain/imx/gpcv2.c
462
regmap_update_bits(domain->regmap, domain->regs->pdn,
drivers/pmdomain/imx/gpcv2.c
463
domain->bits.pxx, domain->bits.pxx);
drivers/pmdomain/imx/gpcv2.c
468
ret = regmap_read_poll_timeout(domain->regmap,
drivers/pmdomain/imx/gpcv2.c
469
domain->regs->pdn, reg_val,
drivers/pmdomain/imx/gpcv2.c
470
!(reg_val & domain->bits.pxx),
drivers/pmdomain/imx/gpcv2.c
473
dev_err(domain->dev, "failed to command PGC\n");
drivers/pmdomain/imx/gpcv2.c
479
clk_bulk_disable_unprepare(domain->num_clks, domain->clks);
drivers/pmdomain/imx/gpcv2.c
481
if (!IS_ERR(domain->regulator)) {
drivers/pmdomain/imx/gpcv2.c
482
ret = regulator_disable(domain->regulator);
drivers/pmdomain/imx/gpcv2.c
484
dev_err(domain->dev,
drivers/pmdomain/imx/gpcv2.c
491
pm_runtime_put_sync_suspend(domain->dev);
drivers/pmdomain/imx/gpcv2.c
496
if (!domain->keep_clocks)
drivers/pmdomain/imx/gpcv2.c
497
clk_bulk_disable_unprepare(domain->num_clks, domain->clks);
drivers/pmdomain/imx/imx8m-blk-ctrl.c
106
ret = clk_bulk_prepare_enable(data->num_clks, domain->clks);
drivers/pmdomain/imx/imx8m-blk-ctrl.c
114
ret = pm_runtime_get_sync(domain->power_dev);
drivers/pmdomain/imx/imx8m-blk-ctrl.c
128
ret = icc_bulk_set_bw(domain->num_paths, domain->paths);
drivers/pmdomain/imx/imx8m-blk-ctrl.c
133
clk_bulk_disable_unprepare(data->num_clks, domain->clks);
drivers/pmdomain/imx/imx8m-blk-ctrl.c
138
clk_bulk_disable_unprepare(data->num_clks, domain->clks);
drivers/pmdomain/imx/imx8m-blk-ctrl.c
147
struct imx8m_blk_ctrl_domain *domain = to_imx8m_blk_ctrl_domain(genpd);
drivers/pmdomain/imx/imx8m-blk-ctrl.c
148
const struct imx8m_blk_ctrl_domain_data *data = domain->data;
drivers/pmdomain/imx/imx8m-blk-ctrl.c
149
struct imx8m_blk_ctrl *bc = domain->bc;
drivers/pmdomain/imx/imx8m-blk-ctrl.c
159
pm_runtime_put(domain->power_dev);
drivers/pmdomain/imx/imx8m-blk-ctrl.c
226
struct imx8m_blk_ctrl_domain *domain = &bc->domains[i];
drivers/pmdomain/imx/imx8m-blk-ctrl.c
229
domain->data = data;
drivers/pmdomain/imx/imx8m-blk-ctrl.c
230
domain->num_paths = data->num_paths;
drivers/pmdomain/imx/imx8m-blk-ctrl.c
233
domain->clks[j].id = data->clk_names[j];
drivers/pmdomain/imx/imx8m-blk-ctrl.c
236
domain->paths[j].name = data->path_names[j];
drivers/pmdomain/imx/imx8m-blk-ctrl.c
238
domain->paths[j].avg_bw = 1;
drivers/pmdomain/imx/imx8m-blk-ctrl.c
239
domain->paths[j].peak_bw = 1;
drivers/pmdomain/imx/imx8m-blk-ctrl.c
242
ret = devm_of_icc_bulk_get(dev, data->num_paths, domain->paths);
drivers/pmdomain/imx/imx8m-blk-ctrl.c
246
domain->num_paths = 0;
drivers/pmdomain/imx/imx8m-blk-ctrl.c
253
ret = devm_clk_bulk_get(dev, data->num_clks, domain->clks);
drivers/pmdomain/imx/imx8m-blk-ctrl.c
259
domain->power_dev =
drivers/pmdomain/imx/imx8m-blk-ctrl.c
261
if (IS_ERR_OR_NULL(domain->power_dev)) {
drivers/pmdomain/imx/imx8m-blk-ctrl.c
262
if (!domain->power_dev)
drivers/pmdomain/imx/imx8m-blk-ctrl.c
265
ret = PTR_ERR(domain->power_dev);
drivers/pmdomain/imx/imx8m-blk-ctrl.c
272
domain->genpd.name = data->name;
drivers/pmdomain/imx/imx8m-blk-ctrl.c
273
domain->genpd.power_on = imx8m_blk_ctrl_power_on;
drivers/pmdomain/imx/imx8m-blk-ctrl.c
274
domain->genpd.power_off = imx8m_blk_ctrl_power_off;
drivers/pmdomain/imx/imx8m-blk-ctrl.c
275
domain->bc = bc;
drivers/pmdomain/imx/imx8m-blk-ctrl.c
277
ret = pm_genpd_init(&domain->genpd, NULL, true);
drivers/pmdomain/imx/imx8m-blk-ctrl.c
282
dev_pm_domain_detach(domain->power_dev, true);
drivers/pmdomain/imx/imx8m-blk-ctrl.c
296
lockdep_set_class(&domain->genpd.mlock,
drivers/pmdomain/imx/imx8m-blk-ctrl.c
299
bc->onecell_data.domains[i] = &domain->genpd;
drivers/pmdomain/imx/imx8m-blk-ctrl.c
344
struct imx8m_blk_ctrl_domain *domain = &bc->domains[i];
drivers/pmdomain/imx/imx8m-blk-ctrl.c
346
pm_genpd_remove(&domain->genpd);
drivers/pmdomain/imx/imx8m-blk-ctrl.c
347
dev_pm_domain_detach(domain->power_dev, true);
drivers/pmdomain/imx/imx8m-blk-ctrl.c
376
struct imx8m_blk_ctrl_domain *domain = &bc->domains[i];
drivers/pmdomain/imx/imx8m-blk-ctrl.c
378
ret = pm_runtime_get_sync(domain->power_dev);
drivers/pmdomain/imx/imx8m-blk-ctrl.c
380
pm_runtime_put_noidle(domain->power_dev);
drivers/pmdomain/imx/imx8m-blk-ctrl.c
87
struct imx8m_blk_ctrl_domain *domain = to_imx8m_blk_ctrl_domain(genpd);
drivers/pmdomain/imx/imx8m-blk-ctrl.c
88
const struct imx8m_blk_ctrl_domain_data *data = domain->data;
drivers/pmdomain/imx/imx8m-blk-ctrl.c
89
struct imx8m_blk_ctrl *bc = domain->bc;
drivers/pmdomain/imx/imx8mp-blk-ctrl.c
177
struct imx8mp_blk_ctrl_domain *domain)
drivers/pmdomain/imx/imx8mp-blk-ctrl.c
179
switch (domain->id) {
drivers/pmdomain/imx/imx8mp-blk-ctrl.c
196
struct imx8mp_blk_ctrl_domain *domain)
drivers/pmdomain/imx/imx8mp-blk-ctrl.c
198
switch (domain->id) {
drivers/pmdomain/imx/imx8mp-blk-ctrl.c
310
struct imx8mp_blk_ctrl_domain *domain)
drivers/pmdomain/imx/imx8mp-blk-ctrl.c
312
switch (domain->id) {
drivers/pmdomain/imx/imx8mp-blk-ctrl.c
365
struct imx8mp_blk_ctrl_domain *domain)
drivers/pmdomain/imx/imx8mp-blk-ctrl.c
367
switch (domain->id) {
drivers/pmdomain/imx/imx8mp-blk-ctrl.c
45
void (*power_off) (struct imx8mp_blk_ctrl *bc, struct imx8mp_blk_ctrl_domain *domain);
drivers/pmdomain/imx/imx8mp-blk-ctrl.c
46
void (*power_on) (struct imx8mp_blk_ctrl *bc, struct imx8mp_blk_ctrl_domain *domain);
drivers/pmdomain/imx/imx8mp-blk-ctrl.c
523
struct imx8mp_blk_ctrl_domain *domain = to_imx8mp_blk_ctrl_domain(genpd);
drivers/pmdomain/imx/imx8mp-blk-ctrl.c
524
const struct imx8mp_blk_ctrl_domain_data *data = domain->data;
drivers/pmdomain/imx/imx8mp-blk-ctrl.c
525
struct imx8mp_blk_ctrl *bc = domain->bc;
drivers/pmdomain/imx/imx8mp-blk-ctrl.c
536
ret = clk_bulk_prepare_enable(data->num_clks, domain->clks);
drivers/pmdomain/imx/imx8mp-blk-ctrl.c
543
bc->power_on(bc, domain);
drivers/pmdomain/imx/imx8mp-blk-ctrl.c
546
ret = pm_runtime_resume_and_get(domain->power_dev);
drivers/pmdomain/imx/imx8mp-blk-ctrl.c
552
ret = icc_bulk_set_bw(domain->num_paths, domain->paths);
drivers/pmdomain/imx/imx8mp-blk-ctrl.c
556
clk_bulk_disable_unprepare(data->num_clks, domain->clks);
drivers/pmdomain/imx/imx8mp-blk-ctrl.c
561
clk_bulk_disable_unprepare(data->num_clks, domain->clks);
drivers/pmdomain/imx/imx8mp-blk-ctrl.c
570
struct imx8mp_blk_ctrl_domain *domain = to_imx8mp_blk_ctrl_domain(genpd);
drivers/pmdomain/imx/imx8mp-blk-ctrl.c
571
const struct imx8mp_blk_ctrl_domain_data *data = domain->data;
drivers/pmdomain/imx/imx8mp-blk-ctrl.c
572
struct imx8mp_blk_ctrl *bc = domain->bc;
drivers/pmdomain/imx/imx8mp-blk-ctrl.c
575
ret = clk_bulk_prepare_enable(data->num_clks, domain->clks);
drivers/pmdomain/imx/imx8mp-blk-ctrl.c
582
bc->power_off(bc, domain);
drivers/pmdomain/imx/imx8mp-blk-ctrl.c
584
clk_bulk_disable_unprepare(data->num_clks, domain->clks);
drivers/pmdomain/imx/imx8mp-blk-ctrl.c
587
pm_runtime_put(domain->power_dev);
drivers/pmdomain/imx/imx8mp-blk-ctrl.c
598
struct imx8mp_blk_ctrl_domain *domain =
drivers/pmdomain/imx/imx8mp-blk-ctrl.c
602
if (domain->genpd.status == GENPD_STATE_ON)
drivers/pmdomain/imx/imx8mp-blk-ctrl.c
667
struct imx8mp_blk_ctrl_domain *domain = &bc->domains[i];
drivers/pmdomain/imx/imx8mp-blk-ctrl.c
670
domain->data = data;
drivers/pmdomain/imx/imx8mp-blk-ctrl.c
671
domain->num_paths = data->num_paths;
drivers/pmdomain/imx/imx8mp-blk-ctrl.c
674
domain->clks[j].id = data->clk_names[j];
drivers/pmdomain/imx/imx8mp-blk-ctrl.c
677
domain->paths[j].name = data->path_names[j];
drivers/pmdomain/imx/imx8mp-blk-ctrl.c
679
domain->paths[j].avg_bw = 1;
drivers/pmdomain/imx/imx8mp-blk-ctrl.c
680
domain->paths[j].peak_bw = 1;
drivers/pmdomain/imx/imx8mp-blk-ctrl.c
683
ret = devm_of_icc_bulk_get(dev, data->num_paths, domain->paths);
drivers/pmdomain/imx/imx8mp-blk-ctrl.c
687
domain->num_paths = 0;
drivers/pmdomain/imx/imx8mp-blk-ctrl.c
694
ret = devm_clk_bulk_get(dev, data->num_clks, domain->clks);
drivers/pmdomain/imx/imx8mp-blk-ctrl.c
700
domain->power_dev =
drivers/pmdomain/imx/imx8mp-blk-ctrl.c
702
if (IS_ERR_OR_NULL(domain->power_dev)) {
drivers/pmdomain/imx/imx8mp-blk-ctrl.c
703
if (!domain->power_dev)
drivers/pmdomain/imx/imx8mp-blk-ctrl.c
706
ret = PTR_ERR(domain->power_dev);
drivers/pmdomain/imx/imx8mp-blk-ctrl.c
713
domain->power_nb.notifier_call = imx8mp_blk_ctrl_gpc_notifier;
drivers/pmdomain/imx/imx8mp-blk-ctrl.c
714
ret = dev_pm_genpd_add_notifier(domain->power_dev, &domain->power_nb);
drivers/pmdomain/imx/imx8mp-blk-ctrl.c
717
dev_pm_domain_detach(domain->power_dev, true);
drivers/pmdomain/imx/imx8mp-blk-ctrl.c
721
domain->genpd.name = data->name;
drivers/pmdomain/imx/imx8mp-blk-ctrl.c
722
domain->genpd.power_on = imx8mp_blk_ctrl_power_on;
drivers/pmdomain/imx/imx8mp-blk-ctrl.c
723
domain->genpd.power_off = imx8mp_blk_ctrl_power_off;
drivers/pmdomain/imx/imx8mp-blk-ctrl.c
724
domain->genpd.flags = data->flags;
drivers/pmdomain/imx/imx8mp-blk-ctrl.c
725
domain->bc = bc;
drivers/pmdomain/imx/imx8mp-blk-ctrl.c
726
domain->id = i;
drivers/pmdomain/imx/imx8mp-blk-ctrl.c
728
ret = pm_genpd_init(&domain->genpd, NULL, true);
drivers/pmdomain/imx/imx8mp-blk-ctrl.c
731
dev_pm_genpd_remove_notifier(domain->power_dev);
drivers/pmdomain/imx/imx8mp-blk-ctrl.c
732
dev_pm_domain_detach(domain->power_dev, true);
drivers/pmdomain/imx/imx8mp-blk-ctrl.c
746
lockdep_set_class(&domain->genpd.mlock,
drivers/pmdomain/imx/imx8mp-blk-ctrl.c
749
bc->onecell_data.domains[i] = &domain->genpd;
drivers/pmdomain/imx/imx8mp-blk-ctrl.c
78
void (*power_off) (struct imx8mp_blk_ctrl *bc, struct imx8mp_blk_ctrl_domain *domain);
drivers/pmdomain/imx/imx8mp-blk-ctrl.c
79
void (*power_on) (struct imx8mp_blk_ctrl *bc, struct imx8mp_blk_ctrl_domain *domain);
drivers/pmdomain/imx/imx8mp-blk-ctrl.c
797
struct imx8mp_blk_ctrl_domain *domain = &bc->domains[i];
drivers/pmdomain/imx/imx8mp-blk-ctrl.c
799
pm_genpd_remove(&domain->genpd);
drivers/pmdomain/imx/imx8mp-blk-ctrl.c
800
dev_pm_genpd_remove_notifier(domain->power_dev);
drivers/pmdomain/imx/imx8mp-blk-ctrl.c
801
dev_pm_domain_detach(domain->power_dev, true);
drivers/pmdomain/imx/imx8mp-blk-ctrl.c
830
struct imx8mp_blk_ctrl_domain *domain = &bc->domains[i];
drivers/pmdomain/imx/imx8mp-blk-ctrl.c
832
ret = pm_runtime_get_sync(domain->power_dev);
drivers/pmdomain/imx/imx8mp-blk-ctrl.c
834
pm_runtime_put_noidle(domain->power_dev);
drivers/pmdomain/imx/imx93-blk-ctrl.c
102
static int imx93_blk_ctrl_set_qos(struct imx93_blk_ctrl_domain *domain)
drivers/pmdomain/imx/imx93-blk-ctrl.c
104
const struct imx93_blk_ctrl_domain_data *data = domain->data;
drivers/pmdomain/imx/imx93-blk-ctrl.c
105
struct imx93_blk_ctrl *bc = domain->bc;
drivers/pmdomain/imx/imx93-blk-ctrl.c
128
struct imx93_blk_ctrl_domain *domain = to_imx93_blk_ctrl_domain(genpd);
drivers/pmdomain/imx/imx93-blk-ctrl.c
129
const struct imx93_blk_ctrl_domain_data *data = domain->data;
drivers/pmdomain/imx/imx93-blk-ctrl.c
130
struct imx93_blk_ctrl *bc = domain->bc;
drivers/pmdomain/imx/imx93-blk-ctrl.c
139
ret = clk_bulk_prepare_enable(data->num_clks, domain->clks);
drivers/pmdomain/imx/imx93-blk-ctrl.c
161
return imx93_blk_ctrl_set_qos(domain);
drivers/pmdomain/imx/imx93-blk-ctrl.c
164
clk_bulk_disable_unprepare(data->num_clks, domain->clks);
drivers/pmdomain/imx/imx93-blk-ctrl.c
173
struct imx93_blk_ctrl_domain *domain = to_imx93_blk_ctrl_domain(genpd);
drivers/pmdomain/imx/imx93-blk-ctrl.c
174
const struct imx93_blk_ctrl_domain_data *data = domain->data;
drivers/pmdomain/imx/imx93-blk-ctrl.c
175
struct imx93_blk_ctrl *bc = domain->bc;
drivers/pmdomain/imx/imx93-blk-ctrl.c
184
clk_bulk_disable_unprepare(data->num_clks, domain->clks);
drivers/pmdomain/imx/imx93-blk-ctrl.c
250
struct imx93_blk_ctrl_domain *domain = &bc->domains[i];
drivers/pmdomain/imx/imx93-blk-ctrl.c
253
domain->data = data;
drivers/pmdomain/imx/imx93-blk-ctrl.c
258
domain->clks[j].id = data->clk_names[j];
drivers/pmdomain/imx/imx93-blk-ctrl.c
260
ret = devm_clk_bulk_get(dev, data->num_clks, domain->clks);
drivers/pmdomain/imx/imx93-blk-ctrl.c
266
domain->genpd.name = data->name;
drivers/pmdomain/imx/imx93-blk-ctrl.c
267
domain->genpd.power_on = imx93_blk_ctrl_power_on;
drivers/pmdomain/imx/imx93-blk-ctrl.c
268
domain->genpd.power_off = imx93_blk_ctrl_power_off;
drivers/pmdomain/imx/imx93-blk-ctrl.c
269
domain->bc = bc;
drivers/pmdomain/imx/imx93-blk-ctrl.c
271
ret = pm_genpd_init(&domain->genpd, NULL, true);
drivers/pmdomain/imx/imx93-blk-ctrl.c
287
lockdep_set_class(&domain->genpd.mlock,
drivers/pmdomain/imx/imx93-blk-ctrl.c
290
bc->onecell_data.domains[i] = &domain->genpd;
drivers/pmdomain/imx/imx93-blk-ctrl.c
322
struct imx93_blk_ctrl_domain *domain = &bc->domains[i];
drivers/pmdomain/imx/imx93-blk-ctrl.c
324
pm_genpd_remove(&domain->genpd);
drivers/pmdomain/imx/imx93-pd.c
100
struct imx93_power_domain *domain;
drivers/pmdomain/imx/imx93-pd.c
104
domain = devm_kzalloc(dev, sizeof(*domain), GFP_KERNEL);
drivers/pmdomain/imx/imx93-pd.c
105
if (!domain)
drivers/pmdomain/imx/imx93-pd.c
108
domain->addr = devm_platform_ioremap_resource(pdev, 0);
drivers/pmdomain/imx/imx93-pd.c
109
if (IS_ERR(domain->addr))
drivers/pmdomain/imx/imx93-pd.c
110
return PTR_ERR(domain->addr);
drivers/pmdomain/imx/imx93-pd.c
112
domain->num_clks = devm_clk_bulk_get_all(dev, &domain->clks);
drivers/pmdomain/imx/imx93-pd.c
113
if (domain->num_clks < 0)
drivers/pmdomain/imx/imx93-pd.c
114
return dev_err_probe(dev, domain->num_clks, "Failed to get domain's clocks\n");
drivers/pmdomain/imx/imx93-pd.c
116
domain->genpd.name = dev_name(dev);
drivers/pmdomain/imx/imx93-pd.c
117
domain->genpd.power_off = imx93_pd_off;
drivers/pmdomain/imx/imx93-pd.c
118
domain->genpd.power_on = imx93_pd_on;
drivers/pmdomain/imx/imx93-pd.c
119
domain->dev = dev;
drivers/pmdomain/imx/imx93-pd.c
121
init_off = readl(domain->addr + MIX_FUNC_STAT_OFF) & FUNC_STAT_ISO_STAT_MASK;
drivers/pmdomain/imx/imx93-pd.c
124
ret = clk_bulk_prepare_enable(domain->num_clks, domain->clks);
drivers/pmdomain/imx/imx93-pd.c
126
return dev_err_probe(domain->dev, ret,
drivers/pmdomain/imx/imx93-pd.c
128
domain->genpd.name);
drivers/pmdomain/imx/imx93-pd.c
131
ret = pm_genpd_init(&domain->genpd, NULL, init_off);
drivers/pmdomain/imx/imx93-pd.c
135
platform_set_drvdata(pdev, domain);
drivers/pmdomain/imx/imx93-pd.c
137
ret = of_genpd_add_provider_simple(np, &domain->genpd);
drivers/pmdomain/imx/imx93-pd.c
144
pm_genpd_remove(&domain->genpd);
drivers/pmdomain/imx/imx93-pd.c
148
clk_bulk_disable_unprepare(domain->num_clks, domain->clks);
drivers/pmdomain/imx/imx93-pd.c
37
struct imx93_power_domain *domain = to_imx93_pd(genpd);
drivers/pmdomain/imx/imx93-pd.c
38
void __iomem *addr = domain->addr;
drivers/pmdomain/imx/imx93-pd.c
42
ret = clk_bulk_prepare_enable(domain->num_clks, domain->clks);
drivers/pmdomain/imx/imx93-pd.c
44
dev_err(domain->dev, "failed to enable clocks for domain: %s\n", genpd->name);
drivers/pmdomain/imx/imx93-pd.c
55
dev_err(domain->dev, "pd_on timeout: name: %s, stat: %x\n", genpd->name, val);
drivers/pmdomain/imx/imx93-pd.c
64
struct imx93_power_domain *domain = to_imx93_pd(genpd);
drivers/pmdomain/imx/imx93-pd.c
65
void __iomem *addr = domain->addr;
drivers/pmdomain/imx/imx93-pd.c
77
dev_err(domain->dev, "pd_off timeout: name: %s, stat: %x\n", genpd->name, val);
drivers/pmdomain/imx/imx93-pd.c
81
clk_bulk_disable_unprepare(domain->num_clks, domain->clks);
drivers/pmdomain/imx/imx93-pd.c
88
struct imx93_power_domain *domain = platform_get_drvdata(pdev);
drivers/pmdomain/imx/imx93-pd.c
93
pm_genpd_remove(&domain->genpd);
drivers/pmdomain/imx/scu-pd.c
352
static int imx_sc_pd_power(struct generic_pm_domain *domain, bool power_on)
drivers/pmdomain/imx/scu-pd.c
359
pd = to_imx_sc_pd(domain);
drivers/pmdomain/imx/scu-pd.c
375
dev_err(&domain->dev, "failed to power %s resource %d ret %d\n",
drivers/pmdomain/imx/scu-pd.c
381
static int imx_sc_pd_power_on(struct generic_pm_domain *domain)
drivers/pmdomain/imx/scu-pd.c
383
return imx_sc_pd_power(domain, true);
drivers/pmdomain/imx/scu-pd.c
386
static int imx_sc_pd_power_off(struct generic_pm_domain *domain)
drivers/pmdomain/imx/scu-pd.c
388
return imx_sc_pd_power(domain, false);
drivers/pmdomain/imx/scu-pd.c
394
struct generic_pm_domain *domain = ERR_PTR(-ENOENT);
drivers/pmdomain/imx/scu-pd.c
403
domain = &sc_pd->pd;
drivers/pmdomain/imx/scu-pd.c
408
return domain;
drivers/pmdomain/mediatek/airoha-cpu-pmdomain.c
54
static int airoha_cpu_pmdomain_set_performance_state(struct generic_pm_domain *domain,
drivers/pmdomain/mediatek/mtk-pm-domains.c
1230
struct generic_pm_domain *domain;
drivers/pmdomain/mediatek/mtk-pm-domains.c
1232
domain = scpsys_add_one_domain(scpsys, node);
drivers/pmdomain/mediatek/mtk-pm-domains.c
1233
if (IS_ERR(domain)) {
drivers/pmdomain/mediatek/mtk-pm-domains.c
1234
ret = PTR_ERR(domain);
drivers/pmdomain/qcom/cpr.c
1417
static int cpr_power_off(struct generic_pm_domain *domain)
drivers/pmdomain/qcom/cpr.c
1419
struct cpr_drv *drv = container_of(domain, struct cpr_drv, pd);
drivers/pmdomain/qcom/cpr.c
1424
static int cpr_power_on(struct generic_pm_domain *domain)
drivers/pmdomain/qcom/cpr.c
1426
struct cpr_drv *drv = container_of(domain, struct cpr_drv, pd);
drivers/pmdomain/qcom/cpr.c
1431
static int cpr_pd_attach_dev(struct generic_pm_domain *domain,
drivers/pmdomain/qcom/cpr.c
1434
struct cpr_drv *drv = container_of(domain, struct cpr_drv, pd);
drivers/pmdomain/qcom/cpr.c
745
static int cpr_set_performance_state(struct generic_pm_domain *domain,
drivers/pmdomain/qcom/cpr.c
748
struct cpr_drv *drv = container_of(domain, struct cpr_drv, pd);
drivers/pmdomain/qcom/rpmhpd.c
20
#define domain_to_rpmhpd(domain) container_of(domain, struct rpmhpd, pd)
drivers/pmdomain/qcom/rpmhpd.c
919
static int rpmhpd_power_on(struct generic_pm_domain *domain)
drivers/pmdomain/qcom/rpmhpd.c
921
struct rpmhpd *pd = domain_to_rpmhpd(domain);
drivers/pmdomain/qcom/rpmhpd.c
937
static int rpmhpd_power_off(struct generic_pm_domain *domain)
drivers/pmdomain/qcom/rpmhpd.c
939
struct rpmhpd *pd = domain_to_rpmhpd(domain);
drivers/pmdomain/qcom/rpmhpd.c
953
static int rpmhpd_set_performance_state(struct generic_pm_domain *domain,
drivers/pmdomain/qcom/rpmhpd.c
956
struct rpmhpd *pd = domain_to_rpmhpd(domain);
drivers/pmdomain/qcom/rpmpd.c
1023
static int rpmpd_power_on(struct generic_pm_domain *domain)
drivers/pmdomain/qcom/rpmpd.c
1026
struct rpmpd *pd = domain_to_rpmpd(domain);
drivers/pmdomain/qcom/rpmpd.c
1042
static int rpmpd_power_off(struct generic_pm_domain *domain)
drivers/pmdomain/qcom/rpmpd.c
1045
struct rpmpd *pd = domain_to_rpmpd(domain);
drivers/pmdomain/qcom/rpmpd.c
1058
static int rpmpd_set_performance(struct generic_pm_domain *domain,
drivers/pmdomain/qcom/rpmpd.c
1061
struct rpmpd *pd = domain_to_rpmpd(domain);
drivers/pmdomain/qcom/rpmpd.c
18
#define domain_to_rpmpd(domain) container_of(domain, struct rpmpd, pd)
drivers/pmdomain/rockchip/pm-domains.c
702
static int rockchip_pd_power_on(struct generic_pm_domain *domain)
drivers/pmdomain/rockchip/pm-domains.c
704
struct rockchip_pm_domain *pd = to_rockchip_pd(domain);
drivers/pmdomain/rockchip/pm-domains.c
720
static int rockchip_pd_power_off(struct generic_pm_domain *domain)
drivers/pmdomain/rockchip/pm-domains.c
722
struct rockchip_pm_domain *pd = to_rockchip_pd(domain);
drivers/pmdomain/samsung/exynos-pm-domains.c
36
static int exynos_pd_power(struct generic_pm_domain *domain, bool power_on)
drivers/pmdomain/samsung/exynos-pm-domains.c
43
pd = container_of(domain, struct exynos_pm_domain, pd);
drivers/pmdomain/samsung/exynos-pm-domains.c
55
pr_err("Power domain %s %s failed\n", domain->name, op);
drivers/pmdomain/samsung/exynos-pm-domains.c
66
static int exynos_pd_power_on(struct generic_pm_domain *domain)
drivers/pmdomain/samsung/exynos-pm-domains.c
68
return exynos_pd_power(domain, true);
drivers/pmdomain/samsung/exynos-pm-domains.c
71
static int exynos_pd_power_off(struct generic_pm_domain *domain)
drivers/pmdomain/samsung/exynos-pm-domains.c
73
return exynos_pd_power(domain, false);
drivers/pmdomain/st/ste-ux500-pm-domain.c
20
static int pd_power_off(struct generic_pm_domain *domain)
drivers/pmdomain/st/ste-ux500-pm-domain.c
32
static int pd_power_on(struct generic_pm_domain *domain)
drivers/pmdomain/sunxi/sun55i-pck600.c
104
static int sunxi_pck600_power_off(struct generic_pm_domain *domain)
drivers/pmdomain/sunxi/sun55i-pck600.c
106
struct sunxi_pck600_pd *pd = to_sunxi_pd(domain);
drivers/pmdomain/sunxi/sun55i-pck600.c
97
static int sunxi_pck600_power_on(struct generic_pm_domain *domain)
drivers/pmdomain/sunxi/sun55i-pck600.c
99
struct sunxi_pck600_pd *pd = to_sunxi_pd(domain);
drivers/pmdomain/tegra/powergate-bpmp.c
149
static int tegra_powergate_power_on(struct generic_pm_domain *domain)
drivers/pmdomain/tegra/powergate-bpmp.c
151
struct tegra_powergate *powergate = to_tegra_powergate(domain);
drivers/pmdomain/tegra/powergate-bpmp.c
158
static int tegra_powergate_power_off(struct generic_pm_domain *domain)
drivers/pmdomain/tegra/powergate-bpmp.c
160
struct tegra_powergate *powergate = to_tegra_powergate(domain);
drivers/pmdomain/tegra/powergate-bpmp.c
311
struct generic_pm_domain *domain = ERR_PTR(-ENOENT);
drivers/pmdomain/tegra/powergate-bpmp.c
320
domain = &powergate->genpd;
drivers/pmdomain/tegra/powergate-bpmp.c
325
return domain;
drivers/pmdomain/thead/th1520-pm-domains.c
51
static int th1520_pd_power_on(struct generic_pm_domain *domain)
drivers/pmdomain/thead/th1520-pm-domains.c
53
struct th1520_power_domain *pd = to_th1520_power_domain(domain);
drivers/pmdomain/thead/th1520-pm-domains.c
58
static int th1520_pd_power_off(struct generic_pm_domain *domain)
drivers/pmdomain/thead/th1520-pm-domains.c
60
struct th1520_power_domain *pd = to_th1520_power_domain(domain);
drivers/pmdomain/thead/th1520-pm-domains.c
68
struct generic_pm_domain *domain = ERR_PTR(-ENOENT);
drivers/pmdomain/thead/th1520-pm-domains.c
80
domain = &pd->genpd;
drivers/pmdomain/thead/th1520-pm-domains.c
85
return domain;
drivers/pmdomain/ti/omap_prm.c
522
static int omap_prm_domain_power_on(struct generic_pm_domain *domain)
drivers/pmdomain/ti/omap_prm.c
528
prmd = genpd_to_prm_domain(domain);
drivers/pmdomain/ti/omap_prm.c
566
static int omap_prm_domain_power_off(struct generic_pm_domain *domain)
drivers/pmdomain/ti/omap_prm.c
572
prmd = genpd_to_prm_domain(domain);
drivers/pmdomain/ti/omap_prm.c
638
static int omap_prm_domain_attach_dev(struct generic_pm_domain *domain,
drivers/pmdomain/ti/omap_prm.c
647
prmd = genpd_to_prm_domain(domain);
drivers/pmdomain/ti/omap_prm.c
669
static void omap_prm_domain_detach_dev(struct generic_pm_domain *domain,
drivers/pmdomain/ti/omap_prm.c
675
prmd = genpd_to_prm_domain(domain);
drivers/pmdomain/ti/ti_sci_pm_domains.c
110
static int ti_sci_pd_power_off(struct generic_pm_domain *domain)
drivers/pmdomain/ti/ti_sci_pm_domains.c
112
struct ti_sci_pm_domain *pd = genpd_to_ti_sci_pd(domain);
drivers/pmdomain/ti/ti_sci_pm_domains.c
122
static int ti_sci_pd_power_on(struct generic_pm_domain *domain)
drivers/pmdomain/ti/ti_sci_pm_domains.c
124
struct ti_sci_pm_domain *pd = genpd_to_ti_sci_pd(domain);
drivers/pmdomain/ti/ti_sci_pm_domains.c
284
pd->pd.domain.ops.suspend = ti_sci_pd_suspend;
drivers/pmdomain/xilinx/zynqmp-pm-domains.c
104
static int zynqmp_gpd_power_off(struct generic_pm_domain *domain)
drivers/pmdomain/xilinx/zynqmp-pm-domains.c
106
struct zynqmp_pm_domain *pd = to_zynqmp_pm_domain(domain);
drivers/pmdomain/xilinx/zynqmp-pm-domains.c
114
dev_dbg(&domain->dev, "PM node id %d is already released\n",
drivers/pmdomain/xilinx/zynqmp-pm-domains.c
119
list_for_each_entry_safe(pdd, tmp, &domain->dev_list, list_node) {
drivers/pmdomain/xilinx/zynqmp-pm-domains.c
124
domain->name);
drivers/pmdomain/xilinx/zynqmp-pm-domains.c
133
dev_err(&domain->dev,
drivers/pmdomain/xilinx/zynqmp-pm-domains.c
139
dev_dbg(&domain->dev, "set requirement to 0x%x for PM node id %d\n",
drivers/pmdomain/xilinx/zynqmp-pm-domains.c
152
static int zynqmp_gpd_attach_dev(struct generic_pm_domain *domain,
drivers/pmdomain/xilinx/zynqmp-pm-domains.c
155
struct zynqmp_pm_domain *pd = to_zynqmp_pm_domain(domain);
drivers/pmdomain/xilinx/zynqmp-pm-domains.c
159
if (domain->device_count)
drivers/pmdomain/xilinx/zynqmp-pm-domains.c
165
dev_err(&domain->dev, "%s request failed for node %d: %d\n",
drivers/pmdomain/xilinx/zynqmp-pm-domains.c
166
domain->name, pd->node_id, ret);
drivers/pmdomain/xilinx/zynqmp-pm-domains.c
172
dev_dbg(&domain->dev, "%s requested PM node id %d\n",
drivers/pmdomain/xilinx/zynqmp-pm-domains.c
183
static void zynqmp_gpd_detach_dev(struct generic_pm_domain *domain,
drivers/pmdomain/xilinx/zynqmp-pm-domains.c
186
struct zynqmp_pm_domain *pd = to_zynqmp_pm_domain(domain);
drivers/pmdomain/xilinx/zynqmp-pm-domains.c
190
if (domain->device_count)
drivers/pmdomain/xilinx/zynqmp-pm-domains.c
195
dev_err(&domain->dev, "failed to release PM node id %d: %d\n",
drivers/pmdomain/xilinx/zynqmp-pm-domains.c
202
dev_dbg(&domain->dev, "%s released PM node id %d\n",
drivers/pmdomain/xilinx/zynqmp-pm-domains.c
73
static int zynqmp_gpd_power_on(struct generic_pm_domain *domain)
drivers/pmdomain/xilinx/zynqmp-pm-domains.c
75
struct zynqmp_pm_domain *pd = to_zynqmp_pm_domain(domain);
drivers/pmdomain/xilinx/zynqmp-pm-domains.c
83
dev_err(&domain->dev,
drivers/pmdomain/xilinx/zynqmp-pm-domains.c
89
dev_dbg(&domain->dev, "set requirement to 0x%x for PM node id %d\n",
drivers/powercap/intel_rapl_common.c
1412
static int rapl_check_domain(int domain, struct rapl_package *rp)
drivers/powercap/intel_rapl_common.c
1416
switch (domain) {
drivers/powercap/intel_rapl_common.c
1422
ra.reg = rp->priv->regs[domain][RAPL_DOMAIN_REG_STATUS];
drivers/powercap/intel_rapl_common.c
1425
pr_err("invalid domain id %d\n", domain);
drivers/powercap/intel_rapl_common.c
1794
int domain, idx;
drivers/powercap/intel_rapl_common.c
1818
domain = event_to_domain[cfg];
drivers/powercap/intel_rapl_common.c
1822
event->hw.flags = domain; /* Which domain */
drivers/powercap/intel_rapl_common.c
1827
if (rp->domains[idx].id == domain) {
drivers/powercap/intel_rapl_common.c
1958
#define RAPL_EVENT_GROUP(_name, domain) \
drivers/powercap/intel_rapl_common.c
1967
return rapl_pmu.domain_map & BIT(domain) ? attr->mode : 0; \
drivers/powercap/intel_rapl_common.c
2039
int domain = rd->id;
drivers/powercap/intel_rapl_common.c
2042
if (!test_bit(domain, &rp->domain_map))
drivers/powercap/intel_rapl_common.c
2051
data->scale[domain] = val;
drivers/powercap/intel_rapl_common.c
2075
pr_debug("Domain %s: hw unit %lld * 2^-32 Joules\n", rd->name, data->scale[domain]);
drivers/regulator/mt6363-regulator.c
861
struct irq_domain *domain;
drivers/regulator/mt6363-regulator.c
884
domain = irq_find_host(interrupt_parent);
drivers/regulator/mt6363-regulator.c
886
fwspec.fwnode = domain->fwnode;
drivers/remoteproc/omap_remoteproc.c
836
ret = omap_iommu_domain_deactivate(rproc->domain);
drivers/remoteproc/omap_remoteproc.c
866
ret = omap_iommu_domain_activate(rproc->domain);
drivers/remoteproc/omap_remoteproc.c
900
omap_iommu_domain_deactivate(rproc->domain);
drivers/remoteproc/qcom_q6v5_adsp.c
336
iommu_unmap(rproc->domain, adsp->mem_phys, adsp->mem_size);
drivers/remoteproc/qcom_q6v5_adsp.c
350
if (!rproc->domain)
drivers/remoteproc/qcom_q6v5_adsp.c
362
ret = iommu_map(rproc->domain, iova, adsp->mem_phys,
drivers/remoteproc/qcom_q6v5_pas.c
262
iommu_unmap(rproc->domain, mem_phys, size);
drivers/remoteproc/qcom_q6v5_pas.c
270
ret = iommu_map(rproc->domain, mem_phys, mem_phys, size,
drivers/remoteproc/remoteproc_core.c
108
domain = iommu_paging_domain_alloc(dev);
drivers/remoteproc/remoteproc_core.c
109
if (IS_ERR(domain)) {
drivers/remoteproc/remoteproc_core.c
111
return PTR_ERR(domain);
drivers/remoteproc/remoteproc_core.c
114
iommu_set_fault_handler(domain, rproc_iommu_fault, rproc);
drivers/remoteproc/remoteproc_core.c
116
ret = iommu_attach_device(domain, dev);
drivers/remoteproc/remoteproc_core.c
122
rproc->domain = domain;
drivers/remoteproc/remoteproc_core.c
1235
unmapped = iommu_unmap(rproc->domain, entry->da, entry->len);
drivers/remoteproc/remoteproc_core.c
127
iommu_domain_free(domain);
drivers/remoteproc/remoteproc_core.c
133
struct iommu_domain *domain = rproc->domain;
drivers/remoteproc/remoteproc_core.c
136
if (!domain)
drivers/remoteproc/remoteproc_core.c
139
iommu_detach_device(domain, dev);
drivers/remoteproc/remoteproc_core.c
140
iommu_domain_free(domain);
drivers/remoteproc/remoteproc_core.c
624
if (!rproc->domain)
drivers/remoteproc/remoteproc_core.c
642
ret = iommu_map(rproc->domain, rsc->da, rsc->pa, rsc->len, rsc->flags,
drivers/remoteproc/remoteproc_core.c
700
if (mem->da != FW_RSC_ADDR_ANY && !rproc->domain) {
drivers/remoteproc/remoteproc_core.c
729
if (mem->da != FW_RSC_ADDR_ANY && rproc->domain) {
drivers/remoteproc/remoteproc_core.c
736
ret = iommu_map(rproc->domain, mem->da, dma, mem->len,
drivers/remoteproc/remoteproc_core.c
81
static int rproc_iommu_fault(struct iommu_domain *domain, struct device *dev,
drivers/remoteproc/remoteproc_core.c
99
struct iommu_domain *domain;
drivers/reset/reset-eyeq.c
180
u32 domain, u32 offset, bool assert)
drivers/reset/reset-eyeq.c
182
void __iomem *base = priv->base + priv->data->domains[domain].offset;
drivers/reset/reset-eyeq.c
183
enum eqr_domain_type domain_type = priv->data->domains[domain].type;
drivers/reset/reset-eyeq.c
190
lockdep_assert_held(&priv->mutexes[domain]);
drivers/reset/reset-eyeq.c
239
dev_dbg(dev, "%u-%u: timeout\n", domain, offset);
drivers/reset/reset-eyeq.c
243
static void eqr_assert_locked(struct eqr_private *priv, u32 domain, u32 offset)
drivers/reset/reset-eyeq.c
245
enum eqr_domain_type domain_type = priv->data->domains[domain].type;
drivers/reset/reset-eyeq.c
249
lockdep_assert_held(&priv->mutexes[domain]);
drivers/reset/reset-eyeq.c
251
base = priv->base + priv->data->domains[domain].offset;
drivers/reset/reset-eyeq.c
285
u32 domain = FIELD_GET(ID_DOMAIN_MASK, id);
drivers/reset/reset-eyeq.c
288
dev_dbg(rcdev->dev, "%u-%u: assert request\n", domain, offset);
drivers/reset/reset-eyeq.c
290
guard(mutex)(&priv->mutexes[domain]);
drivers/reset/reset-eyeq.c
292
eqr_assert_locked(priv, domain, offset);
drivers/reset/reset-eyeq.c
293
return eqr_busy_wait_locked(priv, rcdev->dev, domain, offset, true);
drivers/reset/reset-eyeq.c
296
static void eqr_deassert_locked(struct eqr_private *priv, u32 domain,
drivers/reset/reset-eyeq.c
299
enum eqr_domain_type domain_type = priv->data->domains[domain].type;
drivers/reset/reset-eyeq.c
303
lockdep_assert_held(&priv->mutexes[domain]);
drivers/reset/reset-eyeq.c
305
base = priv->base + priv->data->domains[domain].offset;
drivers/reset/reset-eyeq.c
339
u32 domain = FIELD_GET(ID_DOMAIN_MASK, id);
drivers/reset/reset-eyeq.c
342
dev_dbg(rcdev->dev, "%u-%u: deassert request\n", domain, offset);
drivers/reset/reset-eyeq.c
344
guard(mutex)(&priv->mutexes[domain]);
drivers/reset/reset-eyeq.c
346
eqr_deassert_locked(priv, domain, offset);
drivers/reset/reset-eyeq.c
347
return eqr_busy_wait_locked(priv, rcdev->dev, domain, offset, false);
drivers/reset/reset-eyeq.c
352
u32 domain = FIELD_GET(ID_DOMAIN_MASK, id);
drivers/reset/reset-eyeq.c
355
enum eqr_domain_type domain_type = priv->data->domains[domain].type;
drivers/reset/reset-eyeq.c
358
dev_dbg(rcdev->dev, "%u-%u: status request\n", domain, offset);
drivers/reset/reset-eyeq.c
360
guard(mutex)(&priv->mutexes[domain]);
drivers/reset/reset-eyeq.c
362
base = priv->base + priv->data->domains[domain].offset;
drivers/reset/reset-eyeq.c
388
u32 domain, u32 offset)
drivers/reset/reset-eyeq.c
392
if (domain >= priv->data->domain_count || offset > 31 ||
drivers/reset/reset-eyeq.c
393
!(priv->data->domains[domain].valid_mask & BIT(offset))) {
drivers/reset/reset-eyeq.c
394
dev_err(rcdev->dev, "%u-%u: invalid reset\n", domain, offset);
drivers/reset/reset-eyeq.c
398
return FIELD_PREP(ID_DOMAIN_MASK, domain) | FIELD_PREP(ID_OFFSET_MASK, offset);
drivers/rtc/rtc-nct6694.c
252
data->irq = irq_create_mapping(nct6694->domain, NCT6694_IRQ_RTC);
drivers/s390/crypto/ap_bus.c
1330
int domain;
drivers/s390/crypto/ap_bus.c
1332
if (sscanf(buf, "%i\n", &domain) != 1 ||
drivers/s390/crypto/ap_bus.c
1333
domain < 0 || domain > ap_max_domain_id ||
drivers/s390/crypto/ap_bus.c
1334
!test_bit_inv(domain, ap_perms.aqm))
drivers/s390/crypto/ap_bus.c
1338
ap_domain_index = domain;
drivers/s390/crypto/ap_bus.c
1342
__func__, domain);
drivers/s390/crypto/ap_bus.c
327
int ap_test_config_usage_domain(unsigned int domain)
drivers/s390/crypto/ap_bus.c
329
if (domain > ap_max_domain_id)
drivers/s390/crypto/ap_bus.c
332
return ap_test_config(ap_qci_info->aqm, domain);
drivers/s390/crypto/ap_bus.c
345
int ap_test_config_ctrl_domain(unsigned int domain)
drivers/s390/crypto/ap_bus.c
347
if (!ap_qci_info || domain > ap_max_domain_id)
drivers/s390/crypto/ap_bus.c
349
return ap_test_config(ap_qci_info->adm, domain);
drivers/s390/crypto/ap_bus.c
56
module_param_named(domain, ap_domain_index, int, 0444);
drivers/s390/crypto/ap_bus.c
57
MODULE_PARM_DESC(domain, "domain index for ap devices");
drivers/s390/crypto/pkey_api.c
103
apqn.domain = kgs.domain;
drivers/s390/crypto/pkey_api.c
127
apqn.domain = kcs.domain;
drivers/s390/crypto/pkey_api.c
152
apqn.domain = ksp.domain;
drivers/s390/crypto/pkey_api.c
245
kfc.domain = apqns[0].domain;
drivers/s390/crypto/pkey_api.c
286
kvk.domain = 0xFFFF;
drivers/s390/crypto/pkey_api.c
289
&kvk.cardnr, &kvk.domain,
drivers/s390/crypto/pkey_api.c
518
&kvk.cardnr, &kvk.domain,
drivers/s390/crypto/pkey_cca.c
262
apqns[0].card == 0xFFFF && apqns[0].domain == 0xFFFF)) {
drivers/s390/crypto/pkey_cca.c
273
rc = cca_sec2protkey(apqns[i].card, apqns[i].domain,
drivers/s390/crypto/pkey_cca.c
278
rc = cca_cipher2protkey(apqns[i].card, apqns[i].domain,
drivers/s390/crypto/pkey_cca.c
282
rc = cca_ecc2protkey(apqns[i].card, apqns[i].domain,
drivers/s390/crypto/pkey_cca.c
347
apqns[0].card == 0xFFFF && apqns[0].domain == 0xFFFF)) {
drivers/s390/crypto/pkey_cca.c
358
rc = cca_gencipherkey(apqns[i].card, apqns[i].domain,
drivers/s390/crypto/pkey_cca.c
363
rc = cca_genseckey(apqns[i].card, apqns[i].domain,
drivers/s390/crypto/pkey_cca.c
436
apqns[0].card == 0xFFFF && apqns[0].domain == 0xFFFF)) {
drivers/s390/crypto/pkey_cca.c
447
rc = cca_clr2cipherkey(apqns[i].card, apqns[i].domain,
drivers/s390/crypto/pkey_cca.c
452
rc = cca_clr2seckey(apqns[i].card, apqns[i].domain,
drivers/s390/crypto/pkey_cca.c
505
*dom = ((struct pkey_apqn *)apqns)->domain;
drivers/s390/crypto/pkey_cca.c
539
*dom = ((struct pkey_apqn *)apqns)->domain;
drivers/s390/crypto/pkey_ep11.c
233
apqns[0].card == 0xFFFF && apqns[0].domain == 0xFFFF)) {
drivers/s390/crypto/pkey_ep11.c
245
rc = ep11_kblob2protkey(apqns[i].card, apqns[i].domain,
drivers/s390/crypto/pkey_ep11.c
251
rc = ep11_kblob2protkey(apqns[i].card, apqns[i].domain,
drivers/s390/crypto/pkey_ep11.c
257
rc = ep11_kblob2protkey(apqns[i].card, apqns[i].domain,
drivers/s390/crypto/pkey_ep11.c
322
apqns[0].card == 0xFFFF && apqns[0].domain == 0xFFFF)) {
drivers/s390/crypto/pkey_ep11.c
332
rc = ep11_genaeskey(apqns[i].card, apqns[i].domain,
drivers/s390/crypto/pkey_ep11.c
404
apqns[0].card == 0xFFFF && apqns[0].domain == 0xFFFF)) {
drivers/s390/crypto/pkey_ep11.c
414
rc = ep11_clr2keyblob(apqns[i].card, apqns[i].domain,
drivers/s390/crypto/pkey_ep11.c
461
*dom = ((struct pkey_apqn *)apqns)->domain;
drivers/s390/crypto/pkey_ep11.c
485
*dom = ((struct pkey_apqn *)apqns)->domain;
drivers/s390/crypto/zcrypt_api.c
1050
unsigned int func_code = 0, domain;
drivers/s390/crypto/zcrypt_api.c
1081
rc = prep_ep11_ap_msg(userspace, xcrb, &ap_msg, &func_code, &domain);
drivers/s390/crypto/zcrypt_api.c
1087
if (perms != &ap_perms && domain < AUTOSEL_DOM) {
drivers/s390/crypto/zcrypt_api.c
1089
if (!test_bit_inv(domain, perms->adm)) {
drivers/s390/crypto/zcrypt_api.c
1223
unsigned int domain;
drivers/s390/crypto/zcrypt_api.c
1232
rc = prep_rng_ap_msg(&ap_msg, &func_code, &domain);
drivers/s390/crypto/zcrypt_api.c
862
unsigned short *domain, tdom;
drivers/s390/crypto/zcrypt_api.c
875
rc = prep_cca_ap_msg(userspace, xcrb, &ap_msg, &func_code, &domain);
drivers/s390/crypto/zcrypt_api.c
881
tdom = *domain;
drivers/s390/crypto/zcrypt_api.c
956
xcrb->user_defined, *domain);
drivers/s390/crypto/zcrypt_api.c
963
if (*domain == AUTOSEL_DOM)
drivers/s390/crypto/zcrypt_api.c
964
*domain = AP_QID_QUEUE(qid);
drivers/s390/crypto/zcrypt_ccamisc.c
1031
preqcblk->domain = domain;
drivers/s390/crypto/zcrypt_ccamisc.c
1078
__func__, (int)cardnr, (int)domain, rc);
drivers/s390/crypto/zcrypt_ccamisc.c
1211
int cca_cipher2protkey(u16 cardnr, u16 domain, const u8 *ckey,
drivers/s390/crypto/zcrypt_ccamisc.c
1271
preqcblk->domain = domain;
drivers/s390/crypto/zcrypt_ccamisc.c
1299
__func__, (int)cardnr, (int)domain, rc);
drivers/s390/crypto/zcrypt_ccamisc.c
1378
int cca_ecc2protkey(u16 cardnr, u16 domain, const u8 *key,
drivers/s390/crypto/zcrypt_ccamisc.c
1435
preqcblk->domain = domain;
drivers/s390/crypto/zcrypt_ccamisc.c
1463
__func__, (int)cardnr, (int)domain, rc);
drivers/s390/crypto/zcrypt_ccamisc.c
1526
int cca_query_crypto_facility(u16 cardnr, u16 domain,
drivers/s390/crypto/zcrypt_ccamisc.c
1560
preqcblk->domain = domain;
drivers/s390/crypto/zcrypt_ccamisc.c
1579
__func__, (int)cardnr, (int)domain, rc);
drivers/s390/crypto/zcrypt_ccamisc.c
1632
int cca_get_info(u16 cardnr, u16 domain, struct cca_info *ci, u32 xflags)
drivers/s390/crypto/zcrypt_ccamisc.c
1643
if (domain != AUTOSEL_DOM) {
drivers/s390/crypto/zcrypt_ccamisc.c
1644
rc = zcrypt_device_status_ext(cardnr, domain, &devstat);
drivers/s390/crypto/zcrypt_ccamisc.c
1664
rc = cca_query_crypto_facility(cardnr, domain, "STATICSA",
drivers/s390/crypto/zcrypt_ccamisc.c
1691
rc = cca_query_crypto_facility(cardnr, domain, "STATICSB",
drivers/s390/crypto/zcrypt_ccamisc.c
1712
int cca_findcard2(u32 *apqns, u32 *nr_apqns, u16 cardnr, u16 domain,
drivers/s390/crypto/zcrypt_ccamisc.c
1745
if (domain != 0xFFFF && dom != domain)
drivers/s390/crypto/zcrypt_ccamisc.c
320
int cca_genseckey(u16 cardnr, u16 domain,
drivers/s390/crypto/zcrypt_ccamisc.c
369
preqcblk->domain = domain;
drivers/s390/crypto/zcrypt_ccamisc.c
414
__func__, (int)cardnr, (int)domain, rc);
drivers/s390/crypto/zcrypt_ccamisc.c
464
int cca_clr2seckey(u16 cardnr, u16 domain, u32 keybitsize,
drivers/s390/crypto/zcrypt_ccamisc.c
511
preqcblk->domain = domain;
drivers/s390/crypto/zcrypt_ccamisc.c
553
__func__, (int)cardnr, (int)domain, rc);
drivers/s390/crypto/zcrypt_ccamisc.c
604
int cca_sec2protkey(u16 cardnr, u16 domain,
drivers/s390/crypto/zcrypt_ccamisc.c
658
preqcblk->domain = domain;
drivers/s390/crypto/zcrypt_ccamisc.c
681
__func__, (int)cardnr, (int)domain, rc);
drivers/s390/crypto/zcrypt_ccamisc.c
770
int cca_gencipherkey(u16 cardnr, u16 domain, u32 keybitsize, u32 keygenflags,
drivers/s390/crypto/zcrypt_ccamisc.c
853
preqcblk->domain = domain;
drivers/s390/crypto/zcrypt_ccamisc.c
910
__func__, (int)cardnr, (int)domain, rc);
drivers/s390/crypto/zcrypt_ccamisc.c
966
static int _ip_cprb_helper(u16 cardnr, u16 domain,
drivers/s390/crypto/zcrypt_ccamisc.h
163
int cca_genseckey(u16 cardnr, u16 domain, u32 keybitsize, u8 *seckey,
drivers/s390/crypto/zcrypt_ccamisc.h
169
int cca_clr2seckey(u16 cardnr, u16 domain, u32 keybitsize,
drivers/s390/crypto/zcrypt_ccamisc.h
175
int cca_sec2protkey(u16 cardnr, u16 domain,
drivers/s390/crypto/zcrypt_ccamisc.h
182
int cca_gencipherkey(u16 cardnr, u16 domain, u32 keybitsize, u32 keygenflags,
drivers/s390/crypto/zcrypt_ccamisc.h
188
int cca_cipher2protkey(u16 cardnr, u16 domain, const u8 *ckey,
drivers/s390/crypto/zcrypt_ccamisc.h
195
int cca_clr2cipherkey(u16 cardnr, u16 domain, u32 keybitsize, u32 keygenflags,
drivers/s390/crypto/zcrypt_ccamisc.h
202
int cca_ecc2protkey(u16 cardnr, u16 domain, const u8 *key,
drivers/s390/crypto/zcrypt_ccamisc.h
208
int cca_query_crypto_facility(u16 cardnr, u16 domain,
drivers/s390/crypto/zcrypt_ccamisc.h
229
int cca_findcard2(u32 *apqns, u32 *nr_apqns, u16 cardnr, u16 domain,
drivers/s390/crypto/zcrypt_ep11misc.c
1004
__func__, (int)card, (int)domain, rc);
drivers/s390/crypto/zcrypt_ep11misc.c
1052
static int _ep11_unwrapkey(u16 card, u16 domain,
drivers/s390/crypto/zcrypt_ep11misc.c
1155
target.dom_id = domain;
drivers/s390/crypto/zcrypt_ep11misc.c
1163
__func__, (int)card, (int)domain, rc);
drivers/s390/crypto/zcrypt_ep11misc.c
1198
static int ep11_unwrapkey(u16 card, u16 domain,
drivers/s390/crypto/zcrypt_ep11misc.c
1216
rc = _ep11_unwrapkey(card, domain, kek, keksize, enckey, enckeysize,
drivers/s390/crypto/zcrypt_ep11misc.c
1234
static int _ep11_wrapkey(u16 card, u16 domain,
drivers/s390/crypto/zcrypt_ep11misc.c
1311
target.dom_id = domain;
drivers/s390/crypto/zcrypt_ep11misc.c
1319
__func__, (int)card, (int)domain, rc);
drivers/s390/crypto/zcrypt_ep11misc.c
1354
int ep11_clr2keyblob(u16 card, u16 domain, u32 keybitsize, u32 keygenflags,
drivers/s390/crypto/zcrypt_ep11misc.c
1386
rc = _ep11_genaeskey(card, domain, 256,
drivers/s390/crypto/zcrypt_ep11misc.c
1396
rc = ep11_cryptsingle(card, domain, 0, 0, def_iv, kek, keklen,
drivers/s390/crypto/zcrypt_ep11misc.c
1405
rc = ep11_unwrapkey(card, domain, kek, keklen,
drivers/s390/crypto/zcrypt_ep11misc.c
1545
int ep11_findcard2(u32 *apqns, u32 *nr_apqns, u16 cardnr, u16 domain,
drivers/s390/crypto/zcrypt_ep11misc.c
1578
if (domain != 0xFFFF && dom != domain)
drivers/s390/crypto/zcrypt_ep11misc.c
551
static int ep11_query_info(u16 cardnr, u16 domain, u32 query_type,
drivers/s390/crypto/zcrypt_ep11misc.c
597
target.dom_id = domain;
drivers/s390/crypto/zcrypt_ep11misc.c
605
__func__, (int)cardnr, (int)domain, rc);
drivers/s390/crypto/zcrypt_ep11misc.c
698
int ep11_get_domain_info(u16 card, u16 domain,
drivers/s390/crypto/zcrypt_ep11misc.c
710
rc = ep11_query_info(card, domain, 0x03 /* domain info query */,
drivers/s390/crypto/zcrypt_ep11misc.c
744
static int _ep11_genaeskey(u16 card, u16 domain,
drivers/s390/crypto/zcrypt_ep11misc.c
842
target.dom_id = domain;
drivers/s390/crypto/zcrypt_ep11misc.c
850
__func__, (int)card, (int)domain, rc);
drivers/s390/crypto/zcrypt_ep11misc.c
885
int ep11_genaeskey(u16 card, u16 domain, u32 keybitsize, u32 keygenflags,
drivers/s390/crypto/zcrypt_ep11misc.c
906
rc = _ep11_genaeskey(card, domain, keybitsize, keygenflags,
drivers/s390/crypto/zcrypt_ep11misc.c
923
static int ep11_cryptsingle(u16 card, u16 domain,
drivers/s390/crypto/zcrypt_ep11misc.c
996
target.dom_id = domain;
drivers/s390/crypto/zcrypt_ep11misc.h
112
int ep11_get_domain_info(u16 card, u16 domain,
drivers/s390/crypto/zcrypt_ep11misc.h
118
int ep11_genaeskey(u16 card, u16 domain, u32 keybitsize, u32 keygenflags,
drivers/s390/crypto/zcrypt_ep11misc.h
124
int ep11_clr2keyblob(u16 cardnr, u16 domain, u32 keybitsize, u32 keygenflags,
drivers/s390/crypto/zcrypt_ep11misc.h
145
int ep11_findcard2(u32 *apqns, u32 *nr_apqns, u16 cardnr, u16 domain,
drivers/s390/crypto/zcrypt_msgtype6.c
1145
unsigned int *func_code, unsigned int *domain)
drivers/s390/crypto/zcrypt_msgtype6.c
1154
func_code, domain);
drivers/s390/crypto/zcrypt_msgtype6.c
1257
unsigned int *domain)
drivers/s390/crypto/zcrypt_msgtype6.c
1269
rng_type6cprb_msgx(ap_msg, ZCRYPT_RNG_BUFFER_SIZE, domain);
drivers/s390/crypto/zcrypt_msgtype6.c
1297
msg->cprbx.domain = AP_QID_QUEUE(zq->queue->qid);
drivers/s390/crypto/zcrypt_msgtype6.c
234
msg->cprbx.domain = AP_QID_QUEUE(zq->queue->qid);
drivers/s390/crypto/zcrypt_msgtype6.c
304
msg->cprbx.domain = AP_QID_QUEUE(zq->queue->qid);
drivers/s390/crypto/zcrypt_msgtype6.c
415
*dom = (unsigned short *)&msg->cprbx.domain;
drivers/s390/crypto/zcrypt_msgtype6.c
450
unsigned int *domain)
drivers/s390/crypto/zcrypt_msgtype6.c
532
*domain = msg->cprbx.target_id;
drivers/s390/crypto/zcrypt_msgtype6.h
121
unsigned int *domain)
drivers/s390/crypto/zcrypt_msgtype6.h
159
*domain = (unsigned short)msg->cprbx.domain;
drivers/scsi/be2iscsi/be_cmds.h
257
u8 domain; /* dword 0 */
drivers/scsi/elx/efct/efct_hw.c
216
d = efct->efcport->domain;
drivers/scsi/elx/efct/efct_lio.c
125
if (efc->domain && efc->domain->nport)
drivers/scsi/elx/efct/efct_lio.c
126
efct_scsi_tgt_del_nport(efc, efc->domain->nport);
drivers/scsi/elx/efct/efct_lio.c
173
if (efc->domain) {
drivers/scsi/elx/efct/efct_lio.c
176
ret = efc_nport_vport_new(efc->domain,
drivers/scsi/elx/efct/efct_lio.c
200
if (efc->domain) {
drivers/scsi/elx/efct/efct_lio.c
201
efc_nport_vport_del(efct->efcport, efc->domain,
drivers/scsi/elx/efct/efct_xport.c
565
struct efc_domain *domain = efct->efcport->domain;
drivers/scsi/elx/efct/efct_xport.c
567
if (domain)
drivers/scsi/elx/efct/efct_xport.c
569
domain);
drivers/scsi/elx/efct/efct_xport.c
685
if (efc->domain && efc->domain->nport) {
drivers/scsi/elx/efct/efct_xport.c
686
nport = efc->domain->nport;
drivers/scsi/elx/efct/efct_xport.c
699
if (efc->domain && efc->domain->nport) {
drivers/scsi/elx/efct/efct_xport.c
700
if (efc->domain->is_loop) {
drivers/scsi/elx/efct/efct_xport.c
703
struct efc_nport *nport = efc->domain->nport;
drivers/scsi/elx/efct/efct_xport.c
749
if (!efc->domain || !efc->domain->nport) {
drivers/scsi/elx/efct/efct_xport.c
797
if (efc->domain) {
drivers/scsi/elx/efct/efct_xport.c
800
efc->domain->flogi_service_params;
drivers/scsi/elx/libefc/efc.h
36
#define domain_sm_trace(domain) \
drivers/scsi/elx/libefc/efc.h
37
efc_log_debug(domain->efc, "[domain:%s] %-20s %-20s\n", \
drivers/scsi/elx/libefc/efc.h
38
domain->display_name, __func__, efc_sm_event_name(evt)) \
drivers/scsi/elx/libefc/efc.h
40
#define domain_trace(domain, fmt, ...) \
drivers/scsi/elx/libefc/efc.h
41
efc_log_debug(domain->efc, \
drivers/scsi/elx/libefc/efc.h
42
"[%s]" fmt, domain->display_name, ##__VA_ARGS__) \
drivers/scsi/elx/libefc/efc_cmds.c
132
nport->indicator, nport->domain->indicator);
drivers/scsi/elx/libefc/efc_cmds.c
207
struct efc_domain *domain, u8 *wwpn)
drivers/scsi/elx/libefc/efc_cmds.c
227
if (domain) {
drivers/scsi/elx/libefc/efc_cmds.c
278
nport->domain->indicator, false);
drivers/scsi/elx/libefc/efc_cmds.c
317
efc_domain_get_mbox_status(struct efc_domain *domain, u8 *mqe, int status)
drivers/scsi/elx/libefc/efc_cmds.c
319
struct efc *efc = domain->efc;
drivers/scsi/elx/libefc/efc_cmds.c
325
domain->indicator, status,
drivers/scsi/elx/libefc/efc_cmds.c
334
efc_domain_free_resources(struct efc_domain *domain, int evt, void *data)
drivers/scsi/elx/libefc/efc_cmds.c
336
struct efc *efc = domain->efc;
drivers/scsi/elx/libefc/efc_cmds.c
339
if (domain->dma.virt) {
drivers/scsi/elx/libefc/efc_cmds.c
341
domain->dma.size, domain->dma.virt,
drivers/scsi/elx/libefc/efc_cmds.c
342
domain->dma.phys);
drivers/scsi/elx/libefc/efc_cmds.c
343
memset(&domain->dma, 0, sizeof(struct efc_dma));
drivers/scsi/elx/libefc/efc_cmds.c
347
sli_resource_free(efc->sli, SLI4_RSRC_VFI, domain->indicator);
drivers/scsi/elx/libefc/efc_cmds.c
349
efc_domain_cb(efc, evt, domain);
drivers/scsi/elx/libefc/efc_cmds.c
353
efc_domain_send_nport_evt(struct efc_domain *domain,
drivers/scsi/elx/libefc/efc_cmds.c
356
struct efc *efc = domain->efc;
drivers/scsi/elx/libefc/efc_cmds.c
359
efc_nport_send_evt(domain->nport, port_evt, NULL);
drivers/scsi/elx/libefc/efc_cmds.c
362
efc_domain_cb(efc, domain_evt, domain);
drivers/scsi/elx/libefc/efc_cmds.c
369
struct efc_domain *domain = arg;
drivers/scsi/elx/libefc/efc_cmds.c
371
if (efc_domain_get_mbox_status(domain, mqe, status)) {
drivers/scsi/elx/libefc/efc_cmds.c
372
efc_domain_free_resources(domain,
drivers/scsi/elx/libefc/efc_cmds.c
377
efc_domain_send_nport_evt(domain, EFC_EVT_NPORT_ALLOC_OK,
drivers/scsi/elx/libefc/efc_cmds.c
383
efc_domain_alloc_read_sparm64(struct efc_domain *domain)
drivers/scsi/elx/libefc/efc_cmds.c
385
struct efc *efc = domain->efc;
drivers/scsi/elx/libefc/efc_cmds.c
389
rc = sli_cmd_read_sparm64(efc->sli, data, &domain->dma, 0);
drivers/scsi/elx/libefc/efc_cmds.c
392
efc_domain_free_resources(domain,
drivers/scsi/elx/libefc/efc_cmds.c
398
efc_domain_alloc_read_sparm64_cb, domain);
drivers/scsi/elx/libefc/efc_cmds.c
401
efc_domain_free_resources(domain,
drivers/scsi/elx/libefc/efc_cmds.c
410
struct efc_domain *domain = arg;
drivers/scsi/elx/libefc/efc_cmds.c
412
if (efc_domain_get_mbox_status(domain, mqe, status)) {
drivers/scsi/elx/libefc/efc_cmds.c
413
efc_domain_free_resources(domain,
drivers/scsi/elx/libefc/efc_cmds.c
418
efc_domain_alloc_read_sparm64(domain);
drivers/scsi/elx/libefc/efc_cmds.c
423
efc_domain_alloc_init_vfi(struct efc_domain *domain)
drivers/scsi/elx/libefc/efc_cmds.c
425
struct efc *efc = domain->efc;
drivers/scsi/elx/libefc/efc_cmds.c
426
struct efc_nport *nport = domain->nport;
drivers/scsi/elx/libefc/efc_cmds.c
434
domain->fcf_indicator = efc->fcfi;
drivers/scsi/elx/libefc/efc_cmds.c
435
rc = sli_cmd_init_vfi(efc->sli, data, domain->indicator,
drivers/scsi/elx/libefc/efc_cmds.c
436
domain->fcf_indicator, nport->indicator);
drivers/scsi/elx/libefc/efc_cmds.c
439
efc_domain_free_resources(domain,
drivers/scsi/elx/libefc/efc_cmds.c
446
efc_domain_alloc_init_vfi_cb, domain);
drivers/scsi/elx/libefc/efc_cmds.c
449
efc_domain_free_resources(domain,
drivers/scsi/elx/libefc/efc_cmds.c
455
efc_cmd_domain_alloc(struct efc *efc, struct efc_domain *domain, u32 fcf)
drivers/scsi/elx/libefc/efc_cmds.c
459
if (!domain || !domain->nport) {
drivers/scsi/elx/libefc/efc_cmds.c
461
domain, domain ? domain->nport : NULL);
drivers/scsi/elx/libefc/efc_cmds.c
466
domain->dma.size = EFC_SPARAM_DMA_SZ;
drivers/scsi/elx/libefc/efc_cmds.c
467
domain->dma.virt = dma_alloc_coherent(&efc->pci->dev,
drivers/scsi/elx/libefc/efc_cmds.c
468
domain->dma.size,
drivers/scsi/elx/libefc/efc_cmds.c
469
&domain->dma.phys, GFP_KERNEL);
drivers/scsi/elx/libefc/efc_cmds.c
470
if (!domain->dma.virt) {
drivers/scsi/elx/libefc/efc_cmds.c
475
domain->fcf = fcf;
drivers/scsi/elx/libefc/efc_cmds.c
476
domain->fcf_indicator = U32_MAX;
drivers/scsi/elx/libefc/efc_cmds.c
477
domain->indicator = U32_MAX;
drivers/scsi/elx/libefc/efc_cmds.c
479
if (sli_resource_alloc(efc->sli, SLI4_RSRC_VFI, &domain->indicator,
drivers/scsi/elx/libefc/efc_cmds.c
484
domain->dma.size, domain->dma.virt,
drivers/scsi/elx/libefc/efc_cmds.c
485
domain->dma.phys);
drivers/scsi/elx/libefc/efc_cmds.c
486
memset(&domain->dma, 0, sizeof(struct efc_dma));
drivers/scsi/elx/libefc/efc_cmds.c
491
efc_domain_alloc_init_vfi(domain);
drivers/scsi/elx/libefc/efc_cmds.c
499
struct efc_domain *domain = arg;
drivers/scsi/elx/libefc/efc_cmds.c
501
if (efc_domain_get_mbox_status(domain, mqe, status)) {
drivers/scsi/elx/libefc/efc_cmds.c
502
efc_domain_free_resources(domain,
drivers/scsi/elx/libefc/efc_cmds.c
507
efc_domain_send_nport_evt(domain, EFC_EVT_NPORT_ATTACH_OK,
drivers/scsi/elx/libefc/efc_cmds.c
513
efc_cmd_domain_attach(struct efc *efc, struct efc_domain *domain, u32 fc_id)
drivers/scsi/elx/libefc/efc_cmds.c
518
if (!domain) {
drivers/scsi/elx/libefc/efc_cmds.c
519
efc_log_err(efc, "bad param(s) domain=%p\n", domain);
drivers/scsi/elx/libefc/efc_cmds.c
523
domain->nport->fc_id = fc_id;
drivers/scsi/elx/libefc/efc_cmds.c
525
rc = sli_cmd_reg_vfi(efc->sli, buf, SLI4_BMBX_SIZE, domain->indicator,
drivers/scsi/elx/libefc/efc_cmds.c
526
domain->fcf_indicator, domain->dma,
drivers/scsi/elx/libefc/efc_cmds.c
527
domain->nport->indicator, domain->nport->sli_wwpn,
drivers/scsi/elx/libefc/efc_cmds.c
528
domain->nport->fc_id);
drivers/scsi/elx/libefc/efc_cmds.c
535
efc_domain_attach_reg_vfi_cb, domain);
drivers/scsi/elx/libefc/efc_cmds.c
544
efc_domain_free_resources(domain, EFC_HW_DOMAIN_ATTACH_FAIL, buf);
drivers/scsi/elx/libefc/efc_cmds.c
552
struct efc_domain *domain = arg;
drivers/scsi/elx/libefc/efc_cmds.c
556
rc = efc_domain_get_mbox_status(domain, mqe, status);
drivers/scsi/elx/libefc/efc_cmds.c
562
efc_domain_free_resources(domain, evt, mqe);
drivers/scsi/elx/libefc/efc_cmds.c
567
efc_domain_free_unreg_vfi(struct efc_domain *domain)
drivers/scsi/elx/libefc/efc_cmds.c
569
struct efc *efc = domain->efc;
drivers/scsi/elx/libefc/efc_cmds.c
573
rc = sli_cmd_unreg_vfi(efc->sli, data, domain->indicator,
drivers/scsi/elx/libefc/efc_cmds.c
581
efc_domain_free_unreg_vfi_cb, domain);
drivers/scsi/elx/libefc/efc_cmds.c
590
efc_domain_free_resources(domain, EFC_HW_DOMAIN_FREE_FAIL, data);
drivers/scsi/elx/libefc/efc_cmds.c
594
efc_cmd_domain_free(struct efc *efc, struct efc_domain *domain)
drivers/scsi/elx/libefc/efc_cmds.c
596
if (!domain) {
drivers/scsi/elx/libefc/efc_cmds.c
597
efc_log_err(efc, "bad parameter(s) domain=%p\n", domain);
drivers/scsi/elx/libefc/efc_cmds.c
601
efc_domain_free_unreg_vfi(domain);
drivers/scsi/elx/libefc/efc_cmds.h
13
struct efc_domain *domain, u8 *wwpn);
drivers/scsi/elx/libefc/efc_cmds.h
19
efc_cmd_domain_alloc(struct efc *efc, struct efc_domain *domain, u32 fcf);
drivers/scsi/elx/libefc/efc_cmds.h
21
efc_cmd_domain_attach(struct efc *efc, struct efc_domain *domain, u32 fc_id);
drivers/scsi/elx/libefc/efc_cmds.h
23
efc_cmd_domain_free(struct efc *efc, struct efc_domain *domain);
drivers/scsi/elx/libefc/efc_device.c
470
efc_domain_attach(node->nport->domain, d_id);
drivers/scsi/elx/libefc/efc_device.c
526
node->nport->domain->attached) {
drivers/scsi/elx/libefc/efc_device.c
534
node->nport->domain->attached);
drivers/scsi/elx/libefc/efc_device.c
548
if (!node->nport->domain->attached) {
drivers/scsi/elx/libefc/efc_device.c
572
memcpy(node->nport->domain->flogi_service_params,
drivers/scsi/elx/libefc/efc_device.c
594
if (!node->nport->domain->attached) {
drivers/scsi/elx/libefc/efc_device.c
619
if (!node->nport->domain->attached) {
drivers/scsi/elx/libefc/efc_device.c
645
if (!node->nport->domain->attached) {
drivers/scsi/elx/libefc/efc_device.c
895
WARN_ON(!node->nport->domain->attached);
drivers/scsi/elx/libefc/efc_device.c
933
WARN_ON(node->nport->domain->attached);
drivers/scsi/elx/libefc/efc_device.c
949
efc_domain_attach(node->nport->domain,
drivers/scsi/elx/libefc/efc_device.c
961
WARN_ON(!node->nport->domain->attached);
drivers/scsi/elx/libefc/efc_domain.c
106
struct efc_domain *domain = container_of(arg, struct efc_domain, ref);
drivers/scsi/elx/libefc/efc_domain.c
107
struct efc *efc = domain->efc;
drivers/scsi/elx/libefc/efc_domain.c
112
kfree(domain);
drivers/scsi/elx/libefc/efc_domain.c
116
efc_domain_free(struct efc_domain *domain)
drivers/scsi/elx/libefc/efc_domain.c
120
efc = domain->efc;
drivers/scsi/elx/libefc/efc_domain.c
125
efc_log_debug(efc, "Domain free: wwn %016llX\n", domain->fcf_wwn);
drivers/scsi/elx/libefc/efc_domain.c
127
xa_destroy(&domain->lookup);
drivers/scsi/elx/libefc/efc_domain.c
128
efc->domain = NULL;
drivers/scsi/elx/libefc/efc_domain.c
129
kref_put(&domain->ref, domain->release);
drivers/scsi/elx/libefc/efc_domain.c
135
struct efc_domain *domain;
drivers/scsi/elx/libefc/efc_domain.c
137
domain = kzalloc_obj(*domain, GFP_ATOMIC);
drivers/scsi/elx/libefc/efc_domain.c
138
if (!domain)
drivers/scsi/elx/libefc/efc_domain.c
141
domain->efc = efc;
drivers/scsi/elx/libefc/efc_domain.c
142
domain->drvsm.app = domain;
drivers/scsi/elx/libefc/efc_domain.c
145
kref_init(&domain->ref);
drivers/scsi/elx/libefc/efc_domain.c
146
domain->release = _efc_domain_free;
drivers/scsi/elx/libefc/efc_domain.c
148
xa_init(&domain->lookup);
drivers/scsi/elx/libefc/efc_domain.c
150
INIT_LIST_HEAD(&domain->nport_list);
drivers/scsi/elx/libefc/efc_domain.c
151
efc->domain = domain;
drivers/scsi/elx/libefc/efc_domain.c
152
domain->fcf_wwn = fcf_wwn;
drivers/scsi/elx/libefc/efc_domain.c
153
efc_log_debug(efc, "Domain allocated: wwn %016llX\n", domain->fcf_wwn);
drivers/scsi/elx/libefc/efc_domain.c
155
return domain;
drivers/scsi/elx/libefc/efc_domain.c
166
if (!efc->domain && callback)
drivers/scsi/elx/libefc/efc_domain.c
17
struct efc_domain *domain = NULL;
drivers/scsi/elx/libefc/efc_domain.c
174
struct efc_domain *domain = ctx->app;
drivers/scsi/elx/libefc/efc_domain.c
187
efc_log_warn(domain->efc, "%-20s %-20s not handled\n",
drivers/scsi/elx/libefc/efc_domain.c
196
struct efc_domain *domain = ctx->app;
drivers/scsi/elx/libefc/efc_domain.c
205
memcpy(&domain->pending_drec, arg,
drivers/scsi/elx/libefc/efc_domain.c
206
sizeof(domain->pending_drec));
drivers/scsi/elx/libefc/efc_domain.c
207
domain->domain_found_pending = true;
drivers/scsi/elx/libefc/efc_domain.c
211
domain->domain_found_pending = false;
drivers/scsi/elx/libefc/efc_domain.c
215
efc_log_warn(domain->efc, "%-20s %-20s not handled\n",
drivers/scsi/elx/libefc/efc_domain.c
22
domain = data;
drivers/scsi/elx/libefc/efc_domain.c
221
struct efc_domain *domain = NULL;\
drivers/scsi/elx/libefc/efc_domain.c
225
domain = ctx->app;\
drivers/scsi/elx/libefc/efc_domain.c
226
WARN_ON(!domain->efc);\
drivers/scsi/elx/libefc/efc_domain.c
227
efc = domain->efc
drivers/scsi/elx/libefc/efc_domain.c
235
domain_sm_trace(domain);
drivers/scsi/elx/libefc/efc_domain.c
239
domain->attached = false;
drivers/scsi/elx/libefc/efc_domain.c
261
nport = efc_nport_alloc(domain, my_wwpn, my_wwnn, U32_MAX,
drivers/scsi/elx/libefc/efc_domain.c
281
domain->is_loop = drec->is_loop;
drivers/scsi/elx/libefc/efc_domain.c
291
domain->is_nlport = drec->map.loop[1] == 0x00;
drivers/scsi/elx/libefc/efc_domain.c
293
if (!domain->is_loop) {
drivers/scsi/elx/libefc/efc_domain.c
295
if (efc_cmd_domain_alloc(efc, domain, drec->index)) {
drivers/scsi/elx/libefc/efc_domain.c
306
(domain->is_nlport ?
drivers/scsi/elx/libefc/efc_domain.c
343
if (efc_cmd_domain_alloc(efc, domain, drec->index)) {
drivers/scsi/elx/libefc/efc_domain.c
362
domain_sm_trace(domain);
drivers/scsi/elx/libefc/efc_domain.c
369
nport = domain->nport;
drivers/scsi/elx/libefc/efc_domain.c
37
domain = efc->domain;
drivers/scsi/elx/libefc/efc_domain.c
376
memcpy(domain->service_params + 4, domain->dma.virt,
drivers/scsi/elx/libefc/efc_domain.c
378
memcpy(nport->service_params + 4, domain->dma.virt,
drivers/scsi/elx/libefc/efc_domain.c
38
if (!domain) {
drivers/scsi/elx/libefc/efc_domain.c
39
domain = efc_domain_alloc(efc, fcf_wwn);
drivers/scsi/elx/libefc/efc_domain.c
392
if (domain->is_loop && !domain->is_nlport) {
drivers/scsi/elx/libefc/efc_domain.c
40
if (!domain) {
drivers/scsi/elx/libefc/efc_domain.c
405
__efc_domain_attach_internal(domain, nport->fc_id);
drivers/scsi/elx/libefc/efc_domain.c
428
domain->req_accept_frames = true;
drivers/scsi/elx/libefc/efc_domain.c
439
domain->req_domain_free = true;
drivers/scsi/elx/libefc/efc_domain.c
45
efc_sm_transition(&domain->drvsm, __efc_domain_init,
drivers/scsi/elx/libefc/efc_domain.c
464
domain_sm_trace(domain);
drivers/scsi/elx/libefc/efc_domain.c
478
rc = xa_err(xa_store(&domain->lookup, fc_id, domain->nport,
drivers/scsi/elx/libefc/efc_domain.c
48
efc_domain_post_event(domain, EFC_EVT_DOMAIN_FOUND, drec);
drivers/scsi/elx/libefc/efc_domain.c
486
efc_node_fcid_display(fc_id, domain->nport->display_name,
drivers/scsi/elx/libefc/efc_domain.c
487
sizeof(domain->nport->display_name));
drivers/scsi/elx/libefc/efc_domain.c
490
rc = efc_cmd_domain_attach(efc, domain, fc_id);
drivers/scsi/elx/libefc/efc_domain.c
511
if (!list_empty(&domain->nport_list)) {
drivers/scsi/elx/libefc/efc_domain.c
522
&domain->nport_list,
drivers/scsi/elx/libefc/efc_domain.c
53
domain_trace(domain, "EFC_HW_DOMAIN_LOST:\n");
drivers/scsi/elx/libefc/efc_domain.c
531
if (efc_cmd_domain_free(efc, domain))
drivers/scsi/elx/libefc/efc_domain.c
549
domain_sm_trace(domain);
drivers/scsi/elx/libefc/efc_domain.c
55
efc_domain_post_event(domain, EFC_EVT_DOMAIN_LOST, NULL);
drivers/scsi/elx/libefc/efc_domain.c
561
domain->domain_notify_pend = true;
drivers/scsi/elx/libefc/efc_domain.c
564
domain->attached = true;
drivers/scsi/elx/libefc/efc_domain.c
571
domain->req_accept_frames = true;
drivers/scsi/elx/libefc/efc_domain.c
580
&domain->nport_list, list_entry) {
drivers/scsi/elx/libefc/efc_domain.c
587
domain->domain_notify_pend = false;
drivers/scsi/elx/libefc/efc_domain.c
59
domain_trace(domain, "EFC_HW_DOMAIN_ALLOC_OK:\n");
drivers/scsi/elx/libefc/efc_domain.c
60
efc_domain_post_event(domain, EFC_EVT_DOMAIN_ALLOC_OK, NULL);
drivers/scsi/elx/libefc/efc_domain.c
629
domain_sm_trace(domain);
drivers/scsi/elx/libefc/efc_domain.c
634
if (efc_vport_start(domain)) {
drivers/scsi/elx/libefc/efc_domain.c
635
efc_log_debug(domain->efc,
drivers/scsi/elx/libefc/efc_domain.c
64
domain_trace(domain, "EFC_HW_DOMAIN_ALLOC_FAIL:\n");
drivers/scsi/elx/libefc/efc_domain.c
641
if (!list_empty(&domain->nport_list)) {
drivers/scsi/elx/libefc/efc_domain.c
65
efc_domain_post_event(domain, EFC_EVT_DOMAIN_ALLOC_FAIL,
drivers/scsi/elx/libefc/efc_domain.c
651
&domain->nport_list,
drivers/scsi/elx/libefc/efc_domain.c
660
if (efc_cmd_domain_free(efc, domain))
drivers/scsi/elx/libefc/efc_domain.c
679
WARN_ON(!domain->attached);
drivers/scsi/elx/libefc/efc_domain.c
685
WARN_ON(domain->nport->fc_id != fc_id);
drivers/scsi/elx/libefc/efc_domain.c
70
domain_trace(domain, "EFC_HW_DOMAIN_ATTACH_OK:\n");
drivers/scsi/elx/libefc/efc_domain.c
700
domain_sm_trace(domain);
drivers/scsi/elx/libefc/efc_domain.c
71
efc_domain_post_event(domain, EFC_EVT_DOMAIN_ATTACH_OK, NULL);
drivers/scsi/elx/libefc/efc_domain.c
711
rc = efc_cmd_domain_free(efc, domain);
drivers/scsi/elx/libefc/efc_domain.c
729
domain_sm_trace(domain);
drivers/scsi/elx/libefc/efc_domain.c
734
if (domain->domain_found_pending) {
drivers/scsi/elx/libefc/efc_domain.c
742
u64 fcf_wwn = domain->fcf_wwn;
drivers/scsi/elx/libefc/efc_domain.c
743
struct efc_domain_record drec = domain->pending_drec;
drivers/scsi/elx/libefc/efc_domain.c
746
domain->req_domain_free = true;
drivers/scsi/elx/libefc/efc_domain.c
747
domain = efc_domain_alloc(efc, fcf_wwn);
drivers/scsi/elx/libefc/efc_domain.c
749
if (!domain) {
drivers/scsi/elx/libefc/efc_domain.c
75
domain_trace(domain, "EFC_HW_DOMAIN_ATTACH_FAIL:\n");
drivers/scsi/elx/libefc/efc_domain.c
76
efc_domain_post_event(domain,
drivers/scsi/elx/libefc/efc_domain.c
760
efc_sm_transition(&domain->drvsm, __efc_domain_init,
drivers/scsi/elx/libefc/efc_domain.c
762
efc_sm_post_event(&domain->drvsm,
drivers/scsi/elx/libefc/efc_domain.c
765
domain->req_domain_free = true;
drivers/scsi/elx/libefc/efc_domain.c
779
domain_sm_trace(domain);
drivers/scsi/elx/libefc/efc_domain.c
788
if (!list_empty(&domain->nport_list)) {
drivers/scsi/elx/libefc/efc_domain.c
798
&domain->nport_list,
drivers/scsi/elx/libefc/efc_domain.c
807
if (efc_cmd_domain_free(efc, domain))
drivers/scsi/elx/libefc/efc_domain.c
81
domain_trace(domain, "EFC_HW_DOMAIN_FREE_OK:\n");
drivers/scsi/elx/libefc/efc_domain.c
82
efc_domain_post_event(domain, EFC_EVT_DOMAIN_FREE_OK, NULL);
drivers/scsi/elx/libefc/efc_domain.c
824
__efc_domain_attach_internal(struct efc_domain *domain, u32 s_id)
drivers/scsi/elx/libefc/efc_domain.c
826
memcpy(domain->dma.virt,
drivers/scsi/elx/libefc/efc_domain.c
827
((uint8_t *)domain->flogi_service_params) + 4,
drivers/scsi/elx/libefc/efc_domain.c
829
(void)efc_sm_post_event(&domain->drvsm, EFC_EVT_DOMAIN_REQ_ATTACH,
drivers/scsi/elx/libefc/efc_domain.c
834
efc_domain_attach(struct efc_domain *domain, u32 s_id)
drivers/scsi/elx/libefc/efc_domain.c
836
__efc_domain_attach_internal(domain, s_id);
drivers/scsi/elx/libefc/efc_domain.c
840
efc_domain_post_event(struct efc_domain *domain,
drivers/scsi/elx/libefc/efc_domain.c
846
rc = efc_sm_post_event(&domain->drvsm, event, arg);
drivers/scsi/elx/libefc/efc_domain.c
848
req_domain_free = domain->req_domain_free;
drivers/scsi/elx/libefc/efc_domain.c
849
domain->req_domain_free = false;
drivers/scsi/elx/libefc/efc_domain.c
852
efc_domain_free(domain);
drivers/scsi/elx/libefc/efc_domain.c
858
efct_domain_process_pending(struct efc_domain *domain)
drivers/scsi/elx/libefc/efc_domain.c
86
domain_trace(domain, "EFC_HW_DOMAIN_FREE_FAIL:\n");
drivers/scsi/elx/libefc/efc_domain.c
860
struct efc *efc = domain->efc;
drivers/scsi/elx/libefc/efc_domain.c
87
efc_domain_post_event(domain, EFC_EVT_DOMAIN_FREE_FAIL, NULL);
drivers/scsi/elx/libefc/efc_domain.c
893
if (efc_domain_dispatch_frame(domain, seq))
drivers/scsi/elx/libefc/efc_domain.c
907
struct efc_domain *domain = efc->domain;
drivers/scsi/elx/libefc/efc_domain.c
914
if (!domain || efc->hold_frames || !list_empty(&efc->pend_frames)) {
drivers/scsi/elx/libefc/efc_domain.c
922
if (domain) {
drivers/scsi/elx/libefc/efc_domain.c
924
efct_domain_process_pending(domain);
drivers/scsi/elx/libefc/efc_domain.c
932
if (efc_domain_dispatch_frame(domain, seq))
drivers/scsi/elx/libefc/efc_domain.c
940
struct efc_domain *domain = (struct efc_domain *)arg;
drivers/scsi/elx/libefc/efc_domain.c
941
struct efc *efc = domain->efc;
drivers/scsi/elx/libefc/efc_domain.c
95
if (efc->domain && domain->req_accept_frames) {
drivers/scsi/elx/libefc/efc_domain.c
96
domain->req_accept_frames = false;
drivers/scsi/elx/libefc/efc_domain.c
961
nport = efc_nport_find(domain, d_id);
drivers/scsi/elx/libefc/efc_domain.c
971
nport = domain->nport;
drivers/scsi/elx/libefc/efc_domain.h
17
efc_domain_free(struct efc_domain *domain);
drivers/scsi/elx/libefc/efc_domain.h
42
efc_domain_attach(struct efc_domain *domain, u32 s_id);
drivers/scsi/elx/libefc/efc_domain.h
44
efc_domain_post_event(struct efc_domain *domain, enum efc_sm_event event,
drivers/scsi/elx/libefc/efc_domain.h
47
__efc_domain_attach_internal(struct efc_domain *domain, u32 s_id);
drivers/scsi/elx/libefc/efc_fabric.c
1097
remote_sp = (struct fc_els_flogi *)nport->domain->flogi_service_params;
drivers/scsi/elx/libefc/efc_fabric.c
1264
if (!node->nport->domain->attached) {
drivers/scsi/elx/libefc/efc_fabric.c
1266
efc_domain_attach(node->nport->domain,
drivers/scsi/elx/libefc/efc_fabric.c
150
memcpy(node->nport->domain->flogi_service_params,
drivers/scsi/elx/libefc/efc_fabric.c
160
WARN_ON(node->nport->domain->attached);
drivers/scsi/elx/libefc/efc_fabric.c
161
efc_domain_attach(node->nport->domain,
drivers/scsi/elx/libefc/efc_fabric.c
183
if (node->nport->domain->attached &&
drivers/scsi/elx/libefc/efc_fabric.c
184
!node->nport->domain->domain_notify_pend) {
drivers/scsi/elx/libefc/efc_node.c
196
struct efc_domain *domain = nport->domain;
drivers/scsi/elx/libefc/efc_node.c
199
if (!domain->attached) {
drivers/scsi/elx/libefc/efc_node.h
177
efc_process_node_pending(struct efc_node *domain);
drivers/scsi/elx/libefc/efc_nport.c
105
memcpy(nport->service_params, domain->service_params,
drivers/scsi/elx/libefc/efc_nport.c
121
if (list_empty(&domain->nport_list))
drivers/scsi/elx/libefc/efc_nport.c
122
domain->nport = nport;
drivers/scsi/elx/libefc/efc_nport.c
125
list_add_tail(&nport->list_entry, &domain->nport_list);
drivers/scsi/elx/libefc/efc_nport.c
127
kref_get(&domain->ref);
drivers/scsi/elx/libefc/efc_nport.c
129
efc_log_debug(domain->efc, "New Nport [%s]\n", nport->display_name);
drivers/scsi/elx/libefc/efc_nport.c
137
struct efc_domain *domain;
drivers/scsi/elx/libefc/efc_nport.c
142
domain = nport->domain;
drivers/scsi/elx/libefc/efc_nport.c
143
efc_log_debug(domain->efc, "[%s] free nport\n", nport->display_name);
drivers/scsi/elx/libefc/efc_nport.c
149
if (nport == domain->nport)
drivers/scsi/elx/libefc/efc_nport.c
150
domain->nport = NULL;
drivers/scsi/elx/libefc/efc_nport.c
153
xa_erase(&domain->lookup, nport->fc_id);
drivers/scsi/elx/libefc/efc_nport.c
155
if (list_empty(&domain->nport_list))
drivers/scsi/elx/libefc/efc_nport.c
156
efc_domain_post_event(domain, EFC_EVT_ALL_CHILD_NODES_FREE,
drivers/scsi/elx/libefc/efc_nport.c
159
kref_put(&domain->ref, domain->release);
drivers/scsi/elx/libefc/efc_nport.c
164
efc_nport_find(struct efc_domain *domain, u32 d_id)
drivers/scsi/elx/libefc/efc_nport.c
169
nport = xa_load(&domain->lookup, d_id);
drivers/scsi/elx/libefc/efc_nport.c
185
rc = xa_err(xa_store(&nport->domain->lookup, fc_id, nport, GFP_ATOMIC));
drivers/scsi/elx/libefc/efc_nport.c
275
struct efc_domain *domain = nport->domain;
drivers/scsi/elx/libefc/efc_nport.c
296
xa_erase(&domain->lookup, nport->fc_id);
drivers/scsi/elx/libefc/efc_nport.c
324
struct efc_domain *domain = nport->domain;
drivers/scsi/elx/libefc/efc_nport.c
331
WARN_ON(nport != domain->nport);
drivers/scsi/elx/libefc/efc_nport.c
366
if (efc_cmd_nport_alloc(efc, nport, nport->domain,
drivers/scsi/elx/libefc/efc_nport.c
45
efc_nport_find_wwn(struct efc_domain *domain, uint64_t wwnn, uint64_t wwpn)
drivers/scsi/elx/libefc/efc_nport.c
50
list_for_each_entry(nport, &domain->nport_list, list_entry) {
drivers/scsi/elx/libefc/efc_nport.c
543
struct efc_domain *domain = nport->domain;
drivers/scsi/elx/libefc/efc_nport.c
561
xa_erase(&domain->lookup, nport->fc_id);
drivers/scsi/elx/libefc/efc_nport.c
598
efc_vport_nport_alloc(struct efc_domain *domain, struct efc_vport *vport)
drivers/scsi/elx/libefc/efc_nport.c
602
lockdep_assert_held(&domain->efc->lock);
drivers/scsi/elx/libefc/efc_nport.c
604
nport = efc_nport_alloc(domain, vport->wwpn, vport->wwnn, vport->fc_id,
drivers/scsi/elx/libefc/efc_nport.c
621
efc_vport_start(struct efc_domain *domain)
drivers/scsi/elx/libefc/efc_nport.c
623
struct efc *efc = domain->efc;
drivers/scsi/elx/libefc/efc_nport.c
633
if (efc_vport_nport_alloc(domain, vport))
drivers/scsi/elx/libefc/efc_nport.c
643
efc_nport_vport_new(struct efc_domain *domain, uint64_t wwpn, uint64_t wwnn,
drivers/scsi/elx/libefc/efc_nport.c
647
struct efc *efc = domain->efc;
drivers/scsi/elx/libefc/efc_nport.c
652
if (ini && domain->efc->enable_ini == 0) {
drivers/scsi/elx/libefc/efc_nport.c
657
if (tgt && domain->efc->enable_tgt == 0) {
drivers/scsi/elx/libefc/efc_nport.c
66
efc_nport_alloc(struct efc_domain *domain, uint64_t wwpn, uint64_t wwnn,
drivers/scsi/elx/libefc/efc_nport.c
666
vport = efc_vport_create_spec(domain->efc, wwnn, wwpn, fc_id, ini, tgt,
drivers/scsi/elx/libefc/efc_nport.c
674
rc = efc_vport_nport_alloc(domain, vport);
drivers/scsi/elx/libefc/efc_nport.c
681
efc_nport_vport_del(struct efc *efc, struct efc_domain *domain,
drivers/scsi/elx/libefc/efc_nport.c
700
if (!domain) {
drivers/scsi/elx/libefc/efc_nport.c
706
list_for_each_entry(nport, &domain->nport_list, list_entry) {
drivers/scsi/elx/libefc/efc_nport.c
71
if (domain->efc->enable_ini)
drivers/scsi/elx/libefc/efc_nport.c
76
nport = efc_nport_find_wwn(domain, wwnn, wwpn);
drivers/scsi/elx/libefc/efc_nport.c
78
efc_log_err(domain->efc,
drivers/scsi/elx/libefc/efc_nport.c
93
nport->efc = domain->efc;
drivers/scsi/elx/libefc/efc_nport.c
95
nport->domain = domain;
drivers/scsi/elx/libefc/efc_nport.c
97
nport->instance_index = domain->nport_count++;
drivers/scsi/elx/libefc/efc_nport.h
16
efc_nport_find(struct efc_domain *domain, u32 d_id);
drivers/scsi/elx/libefc/efc_nport.h
18
efc_nport_alloc(struct efc_domain *domain, uint64_t wwpn, uint64_t wwnn,
drivers/scsi/elx/libefc/efc_nport.h
48
efc_vport_start(struct efc_domain *domain);
drivers/scsi/elx/libefc/efclib.h
149
struct efc_domain *domain;
drivers/scsi/elx/libefc/efclib.h
547
struct efc_domain *domain;
drivers/scsi/elx/libefc/efclib.h
592
int efc_nport_vport_new(struct efc_domain *domain, u64 wwpn,
drivers/scsi/elx/libefc/efclib.h
595
int efc_nport_vport_del(struct efc *efc, struct efc_domain *domain,
drivers/scsi/hpsa.c
6356
pciinfo.domain = pci_domain_nr(h->pdev->bus);
drivers/scsi/hpsa_cmd.h
707
unsigned short domain;
drivers/scsi/lpfc/lpfc_els.c
7973
if ((ns_did.un.b.domain == rscn_did.un.b.domain)
drivers/scsi/lpfc/lpfc_els.c
7979
if ((ns_did.un.b.domain == rscn_did.un.b.domain)
drivers/scsi/lpfc/lpfc_els.c
7984
if (ns_did.un.b.domain == rscn_did.un.b.domain)
drivers/scsi/lpfc/lpfc_hbadisc.c
5532
if ((mydid.un.b.domain == 0) && (mydid.un.b.area == 0)) {
drivers/scsi/lpfc/lpfc_hbadisc.c
5539
if ((mydid.un.b.domain == matchdid.un.b.domain) &&
drivers/scsi/lpfc/lpfc_hbadisc.c
5549
if ((ndlpdid.un.b.domain == 0) &&
drivers/scsi/lpfc/lpfc_hbadisc.c
5560
if ((mydid.un.b.domain == ndlpdid.un.b.domain) &&
drivers/scsi/lpfc/lpfc_hbadisc.c
5562
if ((matchdid.un.b.domain == 0) &&
drivers/scsi/lpfc/lpfc_hw.h
1062
uint8_t domain;
drivers/scsi/lpfc/lpfc_hw.h
1068
uint8_t domain;
drivers/scsi/qla2xxx/qla_attr.c
2551
fc_host_port_id(shost) = vha->d_id.b.domain << 16 |
drivers/scsi/qla2xxx/qla_attr.c
2679
port_id = fcport->d_id.b.domain << 16 |
drivers/scsi/qla2xxx/qla_attr.c
856
did.b.domain = (type & 0x00ff0000) >> 16;
drivers/scsi/qla2xxx/qla_attr.c
861
did.b.domain, did.b.area, did.b.al_pa);
drivers/scsi/qla2xxx/qla_bsg.c
1873
vha->bidir_fcport.d_id.b.domain = vha->d_id.b.domain;
drivers/scsi/qla2xxx/qla_bsg.c
2276
uint8_t domain, area, al_pa, state;
drivers/scsi/qla2xxx/qla_bsg.c
2291
&area, &domain, &topo, &sw_cap);
drivers/scsi/qla2xxx/qla_bsg.c
368
fcport->d_id.b.domain =
drivers/scsi/qla2xxx/qla_bsg.c
426
fcport->d_id.b.domain, fcport->d_id.b.area, fcport->d_id.b.al_pa);
drivers/scsi/qla2xxx/qla_bsg.c
540
fcport->d_id.b.domain = bsg_request->rqst_data.h_ct.port_id[2];
drivers/scsi/qla2xxx/qla_bsg.c
563
fcport->loop_id, fcport->d_id.b.domain, fcport->d_id.b.area,
drivers/scsi/qla2xxx/qla_def.h
3097
uint8_t domain;
drivers/scsi/qla2xxx/qla_def.h
3104
uint8_t domain;
drivers/scsi/qla2xxx/qla_def.h
3416
uint8_t domain;
drivers/scsi/qla2xxx/qla_def.h
433
res.domain = id.domain;
drivers/scsi/qla2xxx/qla_def.h
44
uint8_t domain;
drivers/scsi/qla2xxx/qla_def.h
444
res.domain = id.domain;
drivers/scsi/qla2xxx/qla_def.h
455
res.b.domain = id.domain;
drivers/scsi/qla2xxx/qla_def.h
467
res.domain = port_id.b.domain;
drivers/scsi/qla2xxx/qla_def.h
53
uint8_t domain;
drivers/scsi/qla2xxx/qla_def.h
63
uint8_t domain;
drivers/scsi/qla2xxx/qla_def.h
69
uint8_t domain;
drivers/scsi/qla2xxx/qla_dfs.c
164
sess->d_id.b.domain, sess->d_id.b.area,
drivers/scsi/qla2xxx/qla_edif.c
1582
portid.b.domain = sa_frame.port_id.b.domain;
drivers/scsi/qla2xxx/qla_edif.c
2254
id.b.domain = (data >> 16) & 0xff;
drivers/scsi/qla2xxx/qla_edif.c
2462
sa_update_iocb->port_id[2] = sp->fcport->d_id.b.domain;
drivers/scsi/qla2xxx/qla_edif.c
2517
sa_update_iocb->port_id[2] = sp->fcport->d_id.b.domain;
drivers/scsi/qla2xxx/qla_edif.c
2553
a.did.b.domain = p->s_id[2];
drivers/scsi/qla2xxx/qla_edif.c
2605
purex->pur_info.pur_did.b.domain = p->d_id[2];
drivers/scsi/qla2xxx/qla_edif.c
2621
beid.domain = purex->pur_info.pur_did.b.domain;
drivers/scsi/qla2xxx/qla_edif.c
3149
cmd_pkt->port_id[2] = sp->fcport->d_id.b.domain;
drivers/scsi/qla2xxx/qla_edif.c
3557
d_id.b.domain = bsg_request->rqst_data.h_els.port_id[0];
drivers/scsi/qla2xxx/qla_edif.c
768
portid.b.domain = appplogiok.u.d_id.b.domain;
drivers/scsi/qla2xxx/qla_edif.c
883
portid.b.domain = appplogifail.u.d_id.b.domain;
drivers/scsi/qla2xxx/qla_edif.c
971
tdid.b.domain = app_req.remote_pid.domain;
drivers/scsi/qla2xxx/qla_edif_bsg.h
100
uint8_t domain;
drivers/scsi/qla2xxx/qla_edif_bsg.h
94
uint8_t domain;
drivers/scsi/qla2xxx/qla_gs.c
1017
sns_cmd->p.cmd.param[2] = fcport->d_id.b.domain;
drivers/scsi/qla2xxx/qla_gs.c
1035
fcport->d_id.b.domain = sns_cmd->p.gan_data[17];
drivers/scsi/qla2xxx/qla_gs.c
1044
fcport->d_id.b.domain = 0xf0;
drivers/scsi/qla2xxx/qla_gs.c
1050
fcport->d_id.b.domain, fcport->d_id.b.area,
drivers/scsi/qla2xxx/qla_gs.c
1106
list[i].d_id.b.domain = entry[1];
drivers/scsi/qla2xxx/qla_gs.c
1156
sns_cmd->p.cmd.param[2] = list[i].d_id.b.domain;
drivers/scsi/qla2xxx/qla_gs.c
1212
sns_cmd->p.cmd.param[2] = list[i].d_id.b.domain;
drivers/scsi/qla2xxx/qla_gs.c
1237
list[i].d_id.b.domain, list[i].d_id.b.area,
drivers/scsi/qla2xxx/qla_gs.c
1272
sns_cmd->p.cmd.param[2] = vha->d_id.b.domain;
drivers/scsi/qla2xxx/qla_gs.c
132
routine, ms_pkt->entry_status, vha->d_id.b.domain,
drivers/scsi/qla2xxx/qla_gs.c
1321
sns_cmd->p.cmd.param[2] = vha->d_id.b.domain;
drivers/scsi/qla2xxx/qla_gs.c
148
routine, vha->d_id.b.domain,
drivers/scsi/qla2xxx/qla_gs.c
184
vha->d_id.b.domain, vha->d_id.b.area,
drivers/scsi/qla2xxx/qla_gs.c
256
fcport->d_id.b.domain = 0xf0;
drivers/scsi/qla2xxx/qla_gs.c
262
fcport->d_id.b.domain, fcport->d_id.b.area,
drivers/scsi/qla2xxx/qla_gs.c
2847
fcport->loop_id, fcport->d_id.b.domain,
drivers/scsi/qla2xxx/qla_gs.c
3012
ct_req->req.gff_id.port_id[0] = fcport->d_id.b.domain;
drivers/scsi/qla2xxx/qla_gs.c
3185
if ((rp->id.b.domain & 0xf0) == 0xf0)
drivers/scsi/qla2xxx/qla_gs.c
3349
id.b.domain = d->port_id[0];
drivers/scsi/qla2xxx/qla_gs.c
491
list[i].d_id.b.domain, list[i].d_id.b.area,
drivers/scsi/qla2xxx/qla_init.c
1103
id.b.domain = e->port_id[2];
drivers/scsi/qla2xxx/qla_init.c
2562
cid.b.domain = (ea->iop[1] >> 16) & 0xff;
drivers/scsi/qla2xxx/qla_init.c
3089
uint8_t domain, area, al_pa;
drivers/scsi/qla2xxx/qla_init.c
3103
&area, &domain, &topo, &sw_cap);
drivers/scsi/qla2xxx/qla_init.c
430
sp->handle, fcport->loop_id, fcport->d_id.b.domain,
drivers/scsi/qla2xxx/qla_init.c
490
sp->handle, fcport->loop_id, fcport->d_id.b.domain,
drivers/scsi/qla2xxx/qla_init.c
5056
uint8_t domain;
drivers/scsi/qla2xxx/qla_init.c
5065
&loop_id, &al_pa, &area, &domain, &topo, &sw_cap);
drivers/scsi/qla2xxx/qla_init.c
5141
id.b.domain = domain;
drivers/scsi/qla2xxx/qla_init.c
5557
port_state_str[state], fcport->d_id.b.domain,
drivers/scsi/qla2xxx/qla_init.c
5875
uint8_t domain, area, al_pa;
drivers/scsi/qla2xxx/qla_init.c
5941
domain = gid->domain;
drivers/scsi/qla2xxx/qla_init.c
5951
if ((domain & 0xf0) == 0xf0)
drivers/scsi/qla2xxx/qla_init.c
5955
if (area && domain && ((area != vha->d_id.b.area) ||
drivers/scsi/qla2xxx/qla_init.c
5956
(domain != vha->d_id.b.domain)) &&
drivers/scsi/qla2xxx/qla_init.c
5968
new_fcport->d_id.b.domain = domain;
drivers/scsi/qla2xxx/qla_init.c
6145
rport_ids.port_id = fcport->d_id.b.domain << 16 |
drivers/scsi/qla2xxx/qla_init.c
6602
new_fcport->d_id.b.domain,
drivers/scsi/qla2xxx/qla_init.c
6623
if ((new_fcport->d_id.b.domain & 0xf0) == 0xf0)
drivers/scsi/qla2xxx/qla_init.c
6684
fcport->d_id.b.domain,
drivers/scsi/qla2xxx/qla_init.c
6688
new_fcport->d_id.b.domain,
drivers/scsi/qla2xxx/qla_init.c
6829
fcport->loop_id, fcport->d_id.b.domain,
drivers/scsi/qla2xxx/qla_init.c
6834
fcport->d_id.b.domain, fcport->d_id.b.area,
drivers/scsi/qla2xxx/qla_init.c
6854
fcport->loop_id, fcport->d_id.b.domain,
drivers/scsi/qla2xxx/qla_init.c
6912
fcport->d_id.b.domain, fcport->d_id.b.area,
drivers/scsi/qla2xxx/qla_init.c
6924
"jiffies=%lx.\n", mb[0], fcport->d_id.b.domain,
drivers/scsi/qla2xxx/qla_init.c
6930
fcport->d_id.b.domain, fcport->d_id.b.area,
drivers/scsi/qla2xxx/qla_init.c
769
fcport->d_id.b.domain, fcport->d_id.b.area,
drivers/scsi/qla2xxx/qla_init.c
775
id.b.domain = e->port_id[2];
drivers/scsi/qla2xxx/qla_init.c
950
id.b.domain = e->port_id[0];
drivers/scsi/qla2xxx/qla_init.c
9827
fcport->loop_id, fcport->d_id.b.domain,
drivers/scsi/qla2xxx/qla_init.c
9834
fcport->d_id.b.domain, fcport->d_id.b.area,
drivers/scsi/qla2xxx/qla_iocb.c
1636
cmd_pkt->port_id[2] = sp->fcport->d_id.b.domain;
drivers/scsi/qla2xxx/qla_iocb.c
1842
cmd_pkt->port_id[2] = sp->fcport->d_id.b.domain;
drivers/scsi/qla2xxx/qla_iocb.c
2003
cmd_pkt->port_id[2] = sp->fcport->d_id.b.domain;
drivers/scsi/qla2xxx/qla_iocb.c
2225
cmd_pkt->port_id[2] = sp->fcport->d_id.b.domain;
drivers/scsi/qla2xxx/qla_iocb.c
2410
logio->port_id[2] = sp->fcport->d_id.b.domain;
drivers/scsi/qla2xxx/qla_iocb.c
2440
logio->port_id[2] = sp->fcport->d_id.b.domain;
drivers/scsi/qla2xxx/qla_iocb.c
2462
mbx->mb2 = cpu_to_le16(sp->fcport->d_id.b.domain);
drivers/scsi/qla2xxx/qla_iocb.c
2487
logio->port_id[2] = sp->fcport->d_id.b.domain;
drivers/scsi/qla2xxx/qla_iocb.c
2502
mbx->mb2 = cpu_to_le16(sp->fcport->d_id.b.domain);
drivers/scsi/qla2xxx/qla_iocb.c
2561
tsk->port_id[2] = fcport->d_id.b.domain;
drivers/scsi/qla2xxx/qla_iocb.c
2664
sp->name, sp->handle, fcport->d_id.b.domain, fcport->d_id.b.area,
drivers/scsi/qla2xxx/qla_iocb.c
2695
sp->name, sp->handle, fcport->d_id.b.domain,
drivers/scsi/qla2xxx/qla_iocb.c
2731
fcport->d_id.b.domain = remote_did.b.domain;
drivers/scsi/qla2xxx/qla_iocb.c
2736
fcport->d_id.b.domain, fcport->d_id.b.area, fcport->d_id.b.al_pa);
drivers/scsi/qla2xxx/qla_iocb.c
2763
logo_pyld.s_id[2] = vha->d_id.b.domain;
drivers/scsi/qla2xxx/qla_iocb.c
2783
sp->name, sp->handle, fcport->loop_id, fcport->d_id.b.domain,
drivers/scsi/qla2xxx/qla_iocb.c
2813
els_iocb->d_id[2] = sp->fcport->d_id.b.domain;
drivers/scsi/qla2xxx/qla_iocb.c
2817
els_iocb->s_id[0] = vha->d_id.b.domain;
drivers/scsi/qla2xxx/qla_iocb.c
2975
cid.b.domain = (le32_to_cpu(fw_status[2]) >> 16)
drivers/scsi/qla2xxx/qla_iocb.c
3159
els_iocb->d_id[2] = a->did.b.domain;
drivers/scsi/qla2xxx/qla_iocb.c
3163
els_iocb->s_id[0] = vha->d_id.b.domain;
drivers/scsi/qla2xxx/qla_iocb.c
3199
els_iocb->d_id[2] = sp->fcport->d_id.b.domain;
drivers/scsi/qla2xxx/qla_iocb.c
3532
cmd_pkt->port_id[2] = sp->fcport->d_id.b.domain;
drivers/scsi/qla2xxx/qla_iocb.c
3603
cmd_pkt->port_id[2] = sp->fcport->d_id.b.domain;
drivers/scsi/qla2xxx/qla_iocb.c
3702
abt_iocb->port_id[2] = sp->fcport->d_id.b.domain;
drivers/scsi/qla2xxx/qla_iocb.c
3842
logio->port_id[2] = sp->fcport->d_id.b.domain;
drivers/scsi/qla2xxx/qla_iocb.c
4208
cmd_pkt->port_id[2] = vha->d_id.b.domain;
drivers/scsi/qla2xxx/qla_iocb.c
4409
cmd_pkt->port_id[2] = sp->fcport->d_id.b.domain;
drivers/scsi/qla2xxx/qla_isr.c
1771
host_pid = (vha->d_id.b.domain << 16) | (vha->d_id.b.area << 8)
drivers/scsi/qla2xxx/qla_isr.c
2123
fcport->d_id.b.domain, fcport->d_id.b.area,
drivers/scsi/qla2xxx/qla_isr.c
2141
type, sp->handle, fcport->d_id.b.domain,
drivers/scsi/qla2xxx/qla_isr.c
2171
fcport->d_id.b.domain, fcport->d_id.b.area, fcport->d_id.b.al_pa,
drivers/scsi/qla2xxx/qla_isr.c
2547
type, fcport->port_name, sp->handle, fcport->d_id.b.domain,
drivers/scsi/qla2xxx/qla_isr.c
2719
fcport->d_id.b.domain, fcport->d_id.b.area,
drivers/scsi/qla2xxx/qla_isr.c
3635
"port state= %s comp_status %x.\n", fcport->d_id.b.domain,
drivers/scsi/qla2xxx/qla_isr.c
3689
cp->device->id, cp->device->lun, fcport->d_id.b.domain,
drivers/scsi/qla2xxx/qla_mbx.c
1744
uint8_t *area, uint8_t *domain, uint16_t *top, uint16_t *sw_cap)
drivers/scsi/qla2xxx/qla_mbx.c
1776
*domain = LSB(mcp->mb[3]);
drivers/scsi/qla2xxx/qla_mbx.c
2107
fcport->d_id.b.domain = pd24->port_id[0];
drivers/scsi/qla2xxx/qla_mbx.c
2133
pd->slave_state, fcport->d_id.b.domain,
drivers/scsi/qla2xxx/qla_mbx.c
2152
fcport->d_id.b.domain = pd->port_id[0];
drivers/scsi/qla2xxx/qla_mbx.c
2549
qla24xx_login_fabric(scsi_qla_host_t *vha, uint16_t loop_id, uint8_t domain,
drivers/scsi/qla2xxx/qla_mbx.c
2586
lg->port_id[2] = domain;
drivers/scsi/qla2xxx/qla_mbx.c
2685
qla2x00_login_fabric(scsi_qla_host_t *vha, uint16_t loop_id, uint8_t domain,
drivers/scsi/qla2xxx/qla_mbx.c
2705
mcp->mb[2] = domain;
drivers/scsi/qla2xxx/qla_mbx.c
2779
fcport->d_id.b.domain, fcport->d_id.b.area,
drivers/scsi/qla2xxx/qla_mbx.c
2825
qla24xx_fabric_logout(scsi_qla_host_t *vha, uint16_t loop_id, uint8_t domain,
drivers/scsi/qla2xxx/qla_mbx.c
2854
lg->port_id[2] = domain;
drivers/scsi/qla2xxx/qla_mbx.c
2900
qla2x00_fabric_logout(scsi_qla_host_t *vha, uint16_t loop_id, uint8_t domain,
drivers/scsi/qla2xxx/qla_mbx.c
3344
abt->port_id[2] = fcport->d_id.b.domain;
drivers/scsi/qla2xxx/qla_mbx.c
3430
tsk->p.tsk.port_id[2] = fcport->d_id.b.domain;
drivers/scsi/qla2xxx/qla_mbx.c
4045
id.b.domain = rptid_entry->port_id[2];
drivers/scsi/qla2xxx/qla_mbx.c
4230
vha->d_id.b.domain = rptid_entry->port_id[2];
drivers/scsi/qla2xxx/qla_mbx.c
4253
fcport->d_id.b.domain =
drivers/scsi/qla2xxx/qla_mbx.c
6786
fcport->d_id.b.domain = pd->port_id[0];
drivers/scsi/qla2xxx/qla_mr.c
1937
vha->d_id.b.domain = pinfo->port_id[0];
drivers/scsi/qla2xxx/qla_nvme.c
1261
id.b.domain = p->s_id.domain;
drivers/scsi/qla2xxx/qla_nvme.c
1264
d_id.b.domain = p->d_id[2];
drivers/scsi/qla2xxx/qla_nvme.c
679
cmd_pkt->port_id[2] = sp->fcport->d_id.b.domain;
drivers/scsi/qla2xxx/qla_target.c
1356
gid->domain == s_id.domain) {
drivers/scsi/qla2xxx/qla_target.c
1444
fcport->loop_id, sess->d_id.b.domain, sess->d_id.b.area,
drivers/scsi/qla2xxx/qla_target.c
171
vha->d_id.b.domain == d_id.domain &&
drivers/scsi/qla2xxx/qla_target.c
2133
"tag=%d, param=%x)\n", vha->vp_idx, abts->fcp_hdr_le.s_id.domain,
drivers/scsi/qla2xxx/qla_target.c
326
atio->u.isp24.fcp_hdr.d_id.domain,
drivers/scsi/qla2xxx/qla_target.c
4916
fcport->d_id.b.domain, fcport->d_id.b.area,
drivers/scsi/qla2xxx/qla_target.c
5005
key = (((u32)s_id->b.domain << 16) |
drivers/scsi/qla2xxx/qla_target.c
5047
port_id.b.domain = iocb->u.isp24.port_id[2];
drivers/scsi/qla2xxx/qla_target.c
5332
s_id.domain = iocb->u.isp24.port_id[2];
drivers/scsi/qla2xxx/qla_target.c
6124
port_id.b.domain = iocb->u.isp24.port_id[2];
drivers/scsi/qla2xxx/qla_target.c
7280
if (s_id.domain == 0xFF && s_id.area == 0xFC) {
drivers/scsi/qla2xxx/qla_target.c
7287
s_id.domain, s_id.area, s_id.al_pa);
drivers/scsi/qla2xxx/qla_target.c
7304
vha->vp_idx, s_id.domain, s_id.area, s_id.al_pa);
drivers/scsi/qla2xxx/qla_target.c
826
port_id.b.domain = iocb->u.isp24.port_id[2];
drivers/scsi/qla2xxx/qla_target.c
935
logo->id.b.domain, logo->id.b.area, logo->id.b.al_pa,
drivers/scsi/qla2xxx/qla_target.c
956
sess->d_id.b.domain, sess->d_id.b.area, sess->d_id.b.al_pa,
drivers/scsi/qla2xxx/qla_target.h
1072
((_s_id.b.domain == 0xff) && ((_s_id.b.area & 0xf0) == 0xf0))
drivers/scsi/qla2xxx/qla_target.h
1119
return s_id.domain << 16 |
drivers/scsi/qla2xxx/tcm_qla2xxx.c
1477
sess->loop_id, loop_id, sess->d_id.b.domain,
drivers/scsi/qla2xxx/tcm_qla2xxx.c
1478
sess->d_id.b.area, sess->d_id.b.al_pa, s_id.b.domain,
drivers/scsi/qla2xxx/tcm_qla2xxx.c
1499
key = (((u32) sess->d_id.b.domain << 16) |
drivers/scsi/qla2xxx/tcm_qla2xxx.c
1506
sess->d_id.b.domain, sess->d_id.b.area,
drivers/scsi/qla2xxx/tcm_qla2xxx.c
1510
sess->d_id.b.domain, sess->d_id.b.area,
drivers/scsi/qla2xxx/tcm_qla2xxx.c
1513
key = (((u32) s_id.b.domain << 16) |
drivers/scsi/qla2xxx/tcm_qla2xxx.c
1519
s_id.b.domain, s_id.b.area, s_id.b.al_pa);
drivers/scsi/smartpqi/smartpqi_init.c
6682
pci_info.domain = pci_domain_nr(pci_dev->bus);
drivers/sh/intc/core.c
182
static bool __init intc_map(struct irq_domain *domain, int irq)
drivers/sh/intc/core.c
189
if (irq_domain_associate(domain, irq, irq)) {
drivers/sh/intc/core.c
329
if (!intc_map(d->domain, irq))
drivers/sh/intc/core.c
347
if (!intc_map(d->domain, irq2))
drivers/sh/intc/internals.h
71
struct irq_domain *domain;
drivers/sh/intc/irqdomain.c
62
d->domain = irq_domain_create_linear(NULL, hw->nr_vectors, &intc_evt_ops, NULL);
drivers/sh/intc/irqdomain.c
64
d->domain = irq_domain_create_tree(NULL, &intc_evt_ops, NULL);
drivers/sh/intc/irqdomain.c
66
BUG_ON(!d->domain);
drivers/soc/dove/pmu.c
143
static int pmu_domain_power_off(struct generic_pm_domain *domain)
drivers/soc/dove/pmu.c
145
struct pmu_domain *pmu_dom = to_pmu_domain(domain);
drivers/soc/dove/pmu.c
177
static int pmu_domain_power_on(struct generic_pm_domain *domain)
drivers/soc/dove/pmu.c
179
struct pmu_domain *pmu_dom = to_pmu_domain(domain);
drivers/soc/dove/pmu.c
211
static void __pmu_domain_register(struct pmu_domain *domain,
drivers/soc/dove/pmu.c
214
unsigned int val = readl_relaxed(domain->pmu->pmu_base + PMU_PWR);
drivers/soc/dove/pmu.c
216
domain->base.power_off = pmu_domain_power_off;
drivers/soc/dove/pmu.c
217
domain->base.power_on = pmu_domain_power_on;
drivers/soc/dove/pmu.c
219
pm_genpd_init(&domain->base, NULL, !(val & domain->pwr_mask));
drivers/soc/dove/pmu.c
222
of_genpd_add_provider_simple(np, &domain->base);
drivers/soc/dove/pmu.c
230
struct irq_domain *domain = pmu->irq_domain;
drivers/soc/dove/pmu.c
246
generic_handle_irq(irq_find_mapping(domain, hwirq));
drivers/soc/dove/pmu.c
269
struct irq_domain *domain;
drivers/soc/dove/pmu.c
276
domain = irq_domain_create_linear(of_fwnode_handle(pmu->of_node), NR_PMU_IRQS,
drivers/soc/dove/pmu.c
278
if (!domain) {
drivers/soc/dove/pmu.c
283
ret = irq_alloc_domain_generic_chips(domain, NR_PMU_IRQS, 1, name,
drivers/soc/dove/pmu.c
289
irq_domain_remove(domain);
drivers/soc/dove/pmu.c
293
gc = irq_get_domain_generic_chip(domain, 0);
drivers/soc/dove/pmu.c
299
pmu->irq_domain = domain;
drivers/soc/dove/pmu.c
325
struct pmu_domain *domain;
drivers/soc/dove/pmu.c
327
domain = kzalloc_obj(*domain);
drivers/soc/dove/pmu.c
328
if (domain) {
drivers/soc/dove/pmu.c
329
domain->pmu = pmu;
drivers/soc/dove/pmu.c
330
domain->pwr_mask = domain_initdata->pwr_mask;
drivers/soc/dove/pmu.c
331
domain->rst_mask = domain_initdata->rst_mask;
drivers/soc/dove/pmu.c
332
domain->iso_mask = domain_initdata->iso_mask;
drivers/soc/dove/pmu.c
333
domain->base.name = domain_initdata->name;
drivers/soc/dove/pmu.c
335
__pmu_domain_register(domain, NULL);
drivers/soc/dove/pmu.c
409
struct pmu_domain *domain;
drivers/soc/dove/pmu.c
411
domain = kzalloc_obj(*domain);
drivers/soc/dove/pmu.c
412
if (!domain)
drivers/soc/dove/pmu.c
415
domain->pmu = pmu;
drivers/soc/dove/pmu.c
416
domain->base.name = kasprintf(GFP_KERNEL, "%pOFn", np);
drivers/soc/dove/pmu.c
417
if (!domain->base.name) {
drivers/soc/dove/pmu.c
418
kfree(domain);
drivers/soc/dove/pmu.c
423
&domain->pwr_mask);
drivers/soc/dove/pmu.c
425
&domain->iso_mask);
drivers/soc/dove/pmu.c
436
domain->rst_mask = BIT(args.args[0]);
drivers/soc/dove/pmu.c
440
__pmu_domain_register(domain, np);
drivers/soc/qcom/qcom_pd_mapper.c
100
domain->name = domain_name;
drivers/soc/qcom/qcom_pd_mapper.c
101
domain->instance_id = instance_id;
drivers/soc/qcom/qcom_pd_mapper.c
102
list_add_tail(&domain->list, &service->domains);
drivers/soc/qcom/qcom_pd_mapper.c
108
const struct qcom_pdm_domain_data *domain)
drivers/soc/qcom/qcom_pd_mapper.c
115
domain->domain,
drivers/soc/qcom/qcom_pd_mapper.c
116
domain->instance_id);
drivers/soc/qcom/qcom_pd_mapper.c
120
for (i = 0; domain->services[i]; i++) {
drivers/soc/qcom/qcom_pd_mapper.c
122
domain->services[i],
drivers/soc/qcom/qcom_pd_mapper.c
123
domain->domain,
drivers/soc/qcom/qcom_pd_mapper.c
124
domain->instance_id);
drivers/soc/qcom/qcom_pd_mapper.c
136
struct qcom_pdm_domain *domain, *tdomain;
drivers/soc/qcom/qcom_pd_mapper.c
139
list_for_each_entry_safe(domain, tdomain, &service->domains, list) {
drivers/soc/qcom/qcom_pd_mapper.c
140
list_del(&domain->list);
drivers/soc/qcom/qcom_pd_mapper.c
141
kfree(domain);
drivers/soc/qcom/qcom_pd_mapper.c
180
struct qcom_pdm_domain *domain;
drivers/soc/qcom/qcom_pd_mapper.c
185
list_for_each_entry(domain, &service->domains, list) {
drivers/soc/qcom/qcom_pd_mapper.c
191
strscpy(rsp->domain_list[j].name, domain->name,
drivers/soc/qcom/qcom_pd_mapper.c
193
rsp->domain_list[j].instance = domain->instance_id;
drivers/soc/qcom/qcom_pd_mapper.c
195
pr_debug("PDM: found %s / %d\n", domain->name,
drivers/soc/qcom/qcom_pd_mapper.c
196
domain->instance_id);
drivers/soc/qcom/qcom_pd_mapper.c
25
const char *domain;
drivers/soc/qcom/qcom_pd_mapper.c
255
.domain = "msm/adsp/audio_pd",
drivers/soc/qcom/qcom_pd_mapper.c
264
.domain = "msm/adsp/charger_pd",
drivers/soc/qcom/qcom_pd_mapper.c
270
.domain = "msm/adsp/root_pd",
drivers/soc/qcom/qcom_pd_mapper.c
276
.domain = "msm/adsp/root_pd",
drivers/soc/qcom/qcom_pd_mapper.c
285
.domain = "msm/adsp/sensor_pd",
drivers/soc/qcom/qcom_pd_mapper.c
291
.domain = "msm/adsp/audio_pd",
drivers/soc/qcom/qcom_pd_mapper.c
297
.domain = "msm/adsp/root_pd",
drivers/soc/qcom/qcom_pd_mapper.c
303
.domain = "msm/cdsp/root_pd",
drivers/soc/qcom/qcom_pd_mapper.c
309
.domain = "msm/slpi/root_pd",
drivers/soc/qcom/qcom_pd_mapper.c
315
.domain = "msm/slpi/sensor_pd",
drivers/soc/qcom/qcom_pd_mapper.c
321
.domain = "msm/modem/root_pd",
drivers/soc/qcom/qcom_pd_mapper.c
329
.domain = "msm/modem/root_pd",
drivers/soc/qcom/qcom_pd_mapper.c
338
.domain = "msm/modem/root_pd",
drivers/soc/qcom/qcom_pd_mapper.c
348
.domain = "msm/modem/root_pd",
drivers/soc/qcom/qcom_pd_mapper.c
354
.domain = "msm/modem/wlan_pd",
drivers/soc/qcom/qcom_pd_mapper.c
71
struct qcom_pdm_domain *domain;
drivers/soc/qcom/qcom_pd_mapper.c
75
list_for_each_entry(domain, &service->domains, list) {
drivers/soc/qcom/qcom_pd_mapper.c
76
if (!strcmp(domain->name, domain_name))
drivers/soc/qcom/qcom_pd_mapper.c
90
domain = kzalloc_obj(*domain);
drivers/soc/qcom/qcom_pd_mapper.c
91
if (!domain) {
drivers/soc/qcom/smp2p.c
103
struct irq_domain *domain;
drivers/soc/qcom/smp2p.c
272
irq_pin = irq_find_mapping(entry->domain, i);
drivers/soc/qcom/smp2p.c
402
entry->domain = irq_domain_create_linear(of_fwnode_handle(node), 32, &smp2p_irq_ops, entry);
drivers/soc/qcom/smp2p.c
403
if (!entry->domain) {
drivers/soc/qcom/smp2p.c
655
irq_domain_remove(entry->domain);
drivers/soc/qcom/smp2p.c
680
irq_domain_remove(entry->domain);
drivers/soc/qcom/smsm.c
112
struct irq_domain *domain;
drivers/soc/qcom/smsm.c
228
irq_pin = irq_find_mapping(entry->domain, i);
drivers/soc/qcom/smsm.c
233
irq_pin = irq_find_mapping(entry->domain, i);
drivers/soc/qcom/smsm.c
459
entry->domain = irq_domain_create_linear(of_fwnode_handle(node), 32, &smsm_irq_ops, entry);
drivers/soc/qcom/smsm.c
460
if (!entry->domain) {
drivers/soc/qcom/smsm.c
650
if (smsm->entries[id].domain)
drivers/soc/qcom/smsm.c
651
irq_domain_remove(smsm->entries[id].domain);
drivers/soc/qcom/smsm.c
668
if (smsm->entries[id].domain)
drivers/soc/qcom/smsm.c
669
irq_domain_remove(smsm->entries[id].domain);
drivers/soc/tegra/pmc.c
2092
irq = irq_find_mapping(pmc->domain, hwirq);
drivers/soc/tegra/pmc.c
2447
static int tegra_pmc_irq_translate(struct irq_domain *domain,
drivers/soc/tegra/pmc.c
2461
static int tegra_pmc_irq_alloc(struct irq_domain *domain, unsigned int virq,
drivers/soc/tegra/pmc.c
2464
struct tegra_pmc *pmc = domain->host_data;
drivers/soc/tegra/pmc.c
2483
err = irq_domain_set_hwirq_and_chip(domain, virq,
drivers/soc/tegra/pmc.c
2491
err = irq_domain_disconnect_hierarchy(domain->parent, virq);
drivers/soc/tegra/pmc.c
2501
err = irq_domain_alloc_irqs_parent(domain, virq,
drivers/soc/tegra/pmc.c
2513
err = irq_domain_set_hwirq_and_chip(domain, virq,
drivers/soc/tegra/pmc.c
2518
if (!err && domain->parent)
drivers/soc/tegra/pmc.c
2519
err = irq_domain_disconnect_hierarchy(domain->parent,
drivers/soc/tegra/pmc.c
2527
err = irq_domain_disconnect_hierarchy(domain, virq);
drivers/soc/tegra/pmc.c
2737
pmc->domain = irq_domain_create_hierarchy(parent, 0, 96, dev_fwnode(pmc->dev),
drivers/soc/tegra/pmc.c
2739
if (!pmc->domain) {
drivers/soc/tegra/pmc.c
473
struct irq_domain *domain;
drivers/soc/tegra/pmc.c
499
to_powergate(struct generic_pm_domain *domain)
drivers/soc/tegra/pmc.c
501
return container_of(domain, struct tegra_powergate, genpd);
drivers/soc/tegra/pmc.c
918
static int tegra_genpd_power_on(struct generic_pm_domain *domain)
drivers/soc/tegra/pmc.c
920
struct tegra_powergate *pg = to_powergate(domain);
drivers/soc/tegra/pmc.c
937
static int tegra_genpd_power_off(struct generic_pm_domain *domain)
drivers/soc/tegra/pmc.c
939
struct tegra_powergate *pg = to_powergate(domain);
drivers/soc/ti/k3-ringacc.c
1373
dev->msi.domain = of_msi_get_domain(dev, dev->of_node,
drivers/soc/ti/k3-ringacc.c
1375
if (!dev->msi.domain)
drivers/soc/ti/ti_sci_inta_msi.c
52
struct irq_domain *domain;
drivers/soc/ti/ti_sci_inta_msi.c
57
domain = msi_create_irq_domain(fwnode, info, parent);
drivers/soc/ti/ti_sci_inta_msi.c
58
if (domain)
drivers/soc/ti/ti_sci_inta_msi.c
59
irq_domain_update_bus_token(domain, DOMAIN_BUS_TI_SCI_INTA_MSI);
drivers/soc/ti/ti_sci_inta_msi.c
61
return domain;
drivers/soc/xilinx/xlnx_event_manager.c
582
struct irq_domain *domain;
drivers/soc/xilinx/xlnx_event_manager.c
595
domain = irq_find_host(interrupt_parent);
drivers/soc/xilinx/xlnx_event_manager.c
599
sgi_fwspec.fwnode = domain->fwnode;
drivers/soundwire/irq.c
34
bus->domain = irq_domain_create_linear(fwnode, SDW_FW_MAX_DEVICES,
drivers/soundwire/irq.c
36
if (!bus->domain) {
drivers/soundwire/irq.c
46
irq_domain_remove(bus->domain);
drivers/soundwire/irq.c
58
slave->irq = irq_create_mapping(slave->bus->domain, slave->index);
drivers/spmi/spmi-pmic-arb.c
1000
qpnpint_irq_domain_map(bus, domain, virq + i, hwirq + i,
drivers/spmi/spmi-pmic-arb.c
171
struct irq_domain *domain;
drivers/spmi/spmi-pmic-arb.c
1943
bus->domain = irq_domain_create_tree(of_fwnode_handle(node), &pmic_arb_irq_domain_ops, bus);
drivers/spmi/spmi-pmic-arb.c
1944
if (!bus->domain) {
drivers/spmi/spmi-pmic-arb.c
1995
irq_domain_remove(bus->domain);
drivers/spmi/spmi-pmic-arb.c
685
irq = irq_find_mapping(bus->domain,
drivers/spmi/spmi-pmic-arb.c
881
static int qpnpint_irq_domain_activate(struct irq_domain *domain,
drivers/spmi/spmi-pmic-arb.c
965
struct irq_domain *domain, unsigned int virq,
drivers/spmi/spmi-pmic-arb.c
981
irq_domain_set_info(domain, virq, hwirq, &pmic_arb_irqchip, bus,
drivers/spmi/spmi-pmic-arb.c
985
static int qpnpint_irq_domain_alloc(struct irq_domain *domain,
drivers/spmi/spmi-pmic-arb.c
989
struct spmi_pmic_arb_bus *bus = domain->host_data;
drivers/spmi/spmi-pmic-arb.c
995
ret = qpnpint_irq_domain_translate(domain, fwspec, &hwirq, &type);
drivers/staging/greybus/gpio.c
386
irq = irq_find_mapping(ggc->chip.irq.domain, event->which);
drivers/staging/greybus/gpio.c
47
return d->domain->host_data;
drivers/thermal/intel/int340x_thermal/processor_thermal_rapl.c
51
enum rapl_domain_type domain;
drivers/thermal/intel/int340x_thermal/processor_thermal_rapl.c
57
for (domain = RAPL_DOMAIN_PACKAGE; domain < RAPL_DOMAIN_MAX; domain++) {
drivers/thermal/intel/int340x_thermal/processor_thermal_rapl.c
59
if (rapl_regs->regs[domain][reg])
drivers/thermal/intel/int340x_thermal/processor_thermal_rapl.c
60
rapl_mmio_priv.regs[domain][reg].mmio =
drivers/thermal/intel/int340x_thermal/processor_thermal_rapl.c
62
rapl_regs->regs[domain][reg];
drivers/thermal/intel/int340x_thermal/processor_thermal_rapl.c
63
rapl_mmio_priv.limits[domain] = rapl_regs->limits[domain];
drivers/thermal/qcom/lmh.c
214
lmh_data->domain = irq_domain_create_linear(dev_fwnode(dev), 1, &lmh_irq_ops, lmh_data);
drivers/thermal/qcom/lmh.c
215
if (!lmh_data->domain) {
drivers/thermal/qcom/lmh.c
227
irq_domain_remove(lmh_data->domain);
drivers/thermal/qcom/lmh.c
37
struct irq_domain *domain;
drivers/thermal/qcom/lmh.c
44
int irq = irq_find_mapping(lmh_data->domain, 0);
drivers/thermal/tegra/soctherm.c
1032
irq_find_mapping(soc_irq_cdata.domain, 0));
drivers/thermal/tegra/soctherm.c
1036
irq_find_mapping(soc_irq_cdata.domain, 1));
drivers/thermal/tegra/soctherm.c
1040
irq_find_mapping(soc_irq_cdata.domain, 2));
drivers/thermal/tegra/soctherm.c
1044
irq_find_mapping(soc_irq_cdata.domain, 3));
drivers/thermal/tegra/soctherm.c
1244
soc_irq_cdata.domain = irq_domain_create_linear(fwnode, num_irqs, &soctherm_oc_domain_ops,
drivers/thermal/tegra/soctherm.c
1246
if (!soc_irq_cdata.domain) {
drivers/thermal/tegra/soctherm.c
355
struct irq_domain *domain;
drivers/thermal/ti-soc-thermal/dra752-thermal-data.c
336
.domain = "cpu",
drivers/thermal/ti-soc-thermal/dra752-thermal-data.c
345
.domain = "gpu",
drivers/thermal/ti-soc-thermal/dra752-thermal-data.c
352
.domain = "core",
drivers/thermal/ti-soc-thermal/dra752-thermal-data.c
359
.domain = "dspeve",
drivers/thermal/ti-soc-thermal/dra752-thermal-data.c
366
.domain = "iva",
drivers/thermal/ti-soc-thermal/omap3-thermal-data.c
150
.domain = "cpu",
drivers/thermal/ti-soc-thermal/omap3-thermal-data.c
82
.domain = "cpu",
drivers/thermal/ti-soc-thermal/omap4-thermal-data.c
205
.domain = "cpu",
drivers/thermal/ti-soc-thermal/omap4-thermal-data.c
236
.domain = "cpu",
drivers/thermal/ti-soc-thermal/omap4-thermal-data.c
74
.domain = "cpu",
drivers/thermal/ti-soc-thermal/omap5-thermal-data.c
283
.domain = "cpu",
drivers/thermal/ti-soc-thermal/omap5-thermal-data.c
292
.domain = "gpu",
drivers/thermal/ti-soc-thermal/omap5-thermal-data.c
299
.domain = "core",
drivers/thermal/ti-soc-thermal/ti-bandgap.c
1008
char *domain;
drivers/thermal/ti-soc-thermal/ti-bandgap.c
1017
domain = bgp->conf->sensors[i].domain;
drivers/thermal/ti-soc-thermal/ti-bandgap.c
1018
ret = bgp->conf->expose_sensor(bgp, i, domain);
drivers/thermal/ti-soc-thermal/ti-bandgap.c
243
__func__, bgp->conf->sensors[i].domain,
drivers/thermal/ti-soc-thermal/ti-bandgap.h
235
char *domain;
drivers/thermal/ti-soc-thermal/ti-bandgap.h
332
int (*expose_sensor)(struct ti_bandgap *bgp, int id, char *domain);
drivers/thermal/ti-soc-thermal/ti-thermal-common.c
163
char *domain)
drivers/thermal/ti-soc-thermal/ti-thermal.h
42
int ti_thermal_expose_sensor(struct ti_bandgap *bgp, int id, char *domain);
drivers/thermal/ti-soc-thermal/ti-thermal.h
49
int ti_thermal_expose_sensor(struct ti_bandgap *bgp, int id, char *domain)
drivers/tty/vcc.c
26
char *domain;
drivers/tty/vcc.c
488
rv = scnprintf(buf, PAGE_SIZE, "%s\n", port->domain);
drivers/tty/vcc.c
534
static DEVICE_ATTR_ADMIN_RO(domain);
drivers/tty/vcc.c
565
const char *domain;
drivers/tty/vcc.c
624
domain = mdesc_get_property(hp, node, "vcc-domain-name", NULL);
drivers/tty/vcc.c
625
if (!domain) {
drivers/tty/vcc.c
630
port->domain = kstrdup(domain, GFP_KERNEL);
drivers/tty/vcc.c
631
if (!port->domain) {
drivers/tty/vcc.c
658
kfree(port->domain);
drivers/tty/vcc.c
714
kfree(port->domain);
drivers/tty/vcc.c
994
kfree(port->domain);
drivers/uio/uio_pci_generic_sva.c
30
struct iommu_domain *domain;
drivers/uio/uio_pci_generic_sva.c
35
domain = iommu_get_domain_for_dev(&udev->pdev->dev);
drivers/uio/uio_pci_generic_sva.c
36
if (domain)
drivers/uio/uio_pci_generic_sva.c
37
iommu_detach_device(domain, &udev->pdev->dev);
drivers/usb/host/xhci.c
250
struct iommu_domain *domain;
drivers/usb/host/xhci.c
268
domain = iommu_get_domain_for_dev(dev);
drivers/usb/host/xhci.c
269
if (!(xhci->quirks & XHCI_ZERO_64B_REGS) || !domain ||
drivers/usb/host/xhci.c
270
domain->type == IOMMU_DOMAIN_IDENTITY)
drivers/vdpa/vdpa_user/iova_domain.c
100
spin_unlock(&domain->iotlb_lock);
drivers/vdpa/vdpa_user/iova_domain.c
103
static int vduse_domain_map_bounce_page(struct vduse_iova_domain *domain,
drivers/vdpa/vdpa_user/iova_domain.c
123
map = &domain->bounce_maps[iova >> BOUNCE_MAP_SHIFT];
drivers/vdpa/vdpa_user/iova_domain.c
125
head_map = &domain->bounce_maps[(iova & PAGE_MASK) >> BOUNCE_MAP_SHIFT];
drivers/vdpa/vdpa_user/iova_domain.c
142
static void vduse_domain_unmap_bounce_page(struct vduse_iova_domain *domain,
drivers/vdpa/vdpa_user/iova_domain.c
149
map = &domain->bounce_maps[iova >> BOUNCE_MAP_SHIFT];
drivers/vdpa/vdpa_user/iova_domain.c
184
static void vduse_domain_bounce(struct vduse_iova_domain *domain,
drivers/vdpa/vdpa_user/iova_domain.c
194
if (iova >= domain->bounce_size)
drivers/vdpa/vdpa_user/iova_domain.c
198
map = &domain->bounce_maps[iova >> BOUNCE_MAP_SHIFT];
drivers/vdpa/vdpa_user/iova_domain.c
20
static int vduse_iotlb_add_range(struct vduse_iova_domain *domain,
drivers/vdpa/vdpa_user/iova_domain.c
207
page = domain->user_bounce_pages ?
drivers/vdpa/vdpa_user/iova_domain.c
219
vduse_domain_get_coherent_page(struct vduse_iova_domain *domain, u64 iova)
drivers/vdpa/vdpa_user/iova_domain.c
226
spin_lock(&domain->iotlb_lock);
drivers/vdpa/vdpa_user/iova_domain.c
227
map = vhost_iotlb_itree_first(domain->iotlb, start, last);
drivers/vdpa/vdpa_user/iova_domain.c
234
spin_unlock(&domain->iotlb_lock);
drivers/vdpa/vdpa_user/iova_domain.c
240
vduse_domain_get_bounce_page(struct vduse_iova_domain *domain, u64 iova)
drivers/vdpa/vdpa_user/iova_domain.c
245
read_lock(&domain->bounce_lock);
drivers/vdpa/vdpa_user/iova_domain.c
246
map = &domain->bounce_maps[iova >> BOUNCE_MAP_SHIFT];
drivers/vdpa/vdpa_user/iova_domain.c
247
if (domain->user_bounce_pages || !map->bounce_page)
drivers/vdpa/vdpa_user/iova_domain.c
253
read_unlock(&domain->bounce_lock);
drivers/vdpa/vdpa_user/iova_domain.c
259
vduse_domain_free_kernel_bounce_pages(struct vduse_iova_domain *domain)
drivers/vdpa/vdpa_user/iova_domain.c
264
bounce_pfns = domain->bounce_size >> BOUNCE_MAP_SHIFT;
drivers/vdpa/vdpa_user/iova_domain.c
267
map = &domain->bounce_maps[pfn];
drivers/vdpa/vdpa_user/iova_domain.c
280
int vduse_domain_add_user_bounce_pages(struct vduse_iova_domain *domain,
drivers/vdpa/vdpa_user/iova_domain.c
286
int bounce_pfns = domain->bounce_size >> BOUNCE_MAP_SHIFT;
drivers/vdpa/vdpa_user/iova_domain.c
291
if (count != (domain->bounce_size >> PAGE_SHIFT))
drivers/vdpa/vdpa_user/iova_domain.c
294
write_lock(&domain->bounce_lock);
drivers/vdpa/vdpa_user/iova_domain.c
296
if (domain->user_bounce_pages)
drivers/vdpa/vdpa_user/iova_domain.c
301
head_map = &domain->bounce_maps[(i * inner_pages)];
drivers/vdpa/vdpa_user/iova_domain.c
306
map = &domain->bounce_maps[(i * inner_pages + j)];
drivers/vdpa/vdpa_user/iova_domain.c
318
domain->user_bounce_pages = true;
drivers/vdpa/vdpa_user/iova_domain.c
321
write_unlock(&domain->bounce_lock);
drivers/vdpa/vdpa_user/iova_domain.c
326
void vduse_domain_remove_user_bounce_pages(struct vduse_iova_domain *domain)
drivers/vdpa/vdpa_user/iova_domain.c
331
int bounce_pfns = domain->bounce_size >> BOUNCE_MAP_SHIFT;
drivers/vdpa/vdpa_user/iova_domain.c
335
write_lock(&domain->bounce_lock);
drivers/vdpa/vdpa_user/iova_domain.c
336
if (!domain->user_bounce_pages)
drivers/vdpa/vdpa_user/iova_domain.c
339
count = domain->bounce_size >> PAGE_SHIFT;
drivers/vdpa/vdpa_user/iova_domain.c
342
head_map = &domain->bounce_maps[(i * inner_pages)];
drivers/vdpa/vdpa_user/iova_domain.c
35
ret = vhost_iotlb_add_range_ctx(domain->iotlb, start, last,
drivers/vdpa/vdpa_user/iova_domain.c
350
map = &domain->bounce_maps[(i * inner_pages + j)];
drivers/vdpa/vdpa_user/iova_domain.c
363
domain->user_bounce_pages = false;
drivers/vdpa/vdpa_user/iova_domain.c
365
write_unlock(&domain->bounce_lock);
drivers/vdpa/vdpa_user/iova_domain.c
368
void vduse_domain_reset_bounce_map(struct vduse_iova_domain *domain)
drivers/vdpa/vdpa_user/iova_domain.c
370
if (!domain->bounce_map)
drivers/vdpa/vdpa_user/iova_domain.c
373
spin_lock(&domain->iotlb_lock);
drivers/vdpa/vdpa_user/iova_domain.c
374
if (!domain->bounce_map)
drivers/vdpa/vdpa_user/iova_domain.c
377
vduse_iotlb_del_range(domain, 0, domain->bounce_size - 1);
drivers/vdpa/vdpa_user/iova_domain.c
378
domain->bounce_map = 0;
drivers/vdpa/vdpa_user/iova_domain.c
380
spin_unlock(&domain->iotlb_lock);
drivers/vdpa/vdpa_user/iova_domain.c
383
static int vduse_domain_init_bounce_map(struct vduse_iova_domain *domain)
drivers/vdpa/vdpa_user/iova_domain.c
387
if (domain->bounce_map)
drivers/vdpa/vdpa_user/iova_domain.c
390
spin_lock(&domain->iotlb_lock);
drivers/vdpa/vdpa_user/iova_domain.c
391
if (domain->bounce_map)
drivers/vdpa/vdpa_user/iova_domain.c
394
ret = vduse_iotlb_add_range(domain, 0, domain->bounce_size - 1,
drivers/vdpa/vdpa_user/iova_domain.c
395
0, VHOST_MAP_RW, domain->file, 0);
drivers/vdpa/vdpa_user/iova_domain.c
399
domain->bounce_map = 1;
drivers/vdpa/vdpa_user/iova_domain.c
401
spin_unlock(&domain->iotlb_lock);
drivers/vdpa/vdpa_user/iova_domain.c
427
void vduse_domain_sync_single_for_device(struct vduse_iova_domain *domain,
drivers/vdpa/vdpa_user/iova_domain.c
431
read_lock(&domain->bounce_lock);
drivers/vdpa/vdpa_user/iova_domain.c
433
vduse_domain_bounce(domain, dma_addr, size, DMA_TO_DEVICE);
drivers/vdpa/vdpa_user/iova_domain.c
434
read_unlock(&domain->bounce_lock);
drivers/vdpa/vdpa_user/iova_domain.c
437
void vduse_domain_sync_single_for_cpu(struct vduse_iova_domain *domain,
drivers/vdpa/vdpa_user/iova_domain.c
441
read_lock(&domain->bounce_lock);
drivers/vdpa/vdpa_user/iova_domain.c
443
vduse_domain_bounce(domain, dma_addr, size, DMA_FROM_DEVICE);
drivers/vdpa/vdpa_user/iova_domain.c
444
read_unlock(&domain->bounce_lock);
drivers/vdpa/vdpa_user/iova_domain.c
447
dma_addr_t vduse_domain_map_page(struct vduse_iova_domain *domain,
drivers/vdpa/vdpa_user/iova_domain.c
45
static void vduse_iotlb_del_range(struct vduse_iova_domain *domain,
drivers/vdpa/vdpa_user/iova_domain.c
452
struct iova_domain *iovad = &domain->stream_iovad;
drivers/vdpa/vdpa_user/iova_domain.c
453
unsigned long limit = domain->bounce_size - 1;
drivers/vdpa/vdpa_user/iova_domain.c
460
if (vduse_domain_init_bounce_map(domain))
drivers/vdpa/vdpa_user/iova_domain.c
463
read_lock(&domain->bounce_lock);
drivers/vdpa/vdpa_user/iova_domain.c
464
if (vduse_domain_map_bounce_page(domain, (u64)iova, (u64)size, pa))
drivers/vdpa/vdpa_user/iova_domain.c
469
vduse_domain_bounce(domain, iova, size, DMA_TO_DEVICE);
drivers/vdpa/vdpa_user/iova_domain.c
471
read_unlock(&domain->bounce_lock);
drivers/vdpa/vdpa_user/iova_domain.c
475
read_unlock(&domain->bounce_lock);
drivers/vdpa/vdpa_user/iova_domain.c
481
void vduse_domain_unmap_page(struct vduse_iova_domain *domain,
drivers/vdpa/vdpa_user/iova_domain.c
485
struct iova_domain *iovad = &domain->stream_iovad;
drivers/vdpa/vdpa_user/iova_domain.c
486
read_lock(&domain->bounce_lock);
drivers/vdpa/vdpa_user/iova_domain.c
489
vduse_domain_bounce(domain, dma_addr, size, DMA_FROM_DEVICE);
drivers/vdpa/vdpa_user/iova_domain.c
491
vduse_domain_unmap_bounce_page(domain, (u64)dma_addr, (u64)size);
drivers/vdpa/vdpa_user/iova_domain.c
492
read_unlock(&domain->bounce_lock);
drivers/vdpa/vdpa_user/iova_domain.c
496
dma_addr_t vduse_domain_alloc_coherent(struct vduse_iova_domain *domain,
drivers/vdpa/vdpa_user/iova_domain.c
499
struct iova_domain *iovad = &domain->consistent_iovad;
drivers/vdpa/vdpa_user/iova_domain.c
500
unsigned long limit = domain->iova_limit;
drivers/vdpa/vdpa_user/iova_domain.c
506
spin_lock(&domain->iotlb_lock);
drivers/vdpa/vdpa_user/iova_domain.c
507
if (vduse_iotlb_add_range(domain, (u64)iova, (u64)iova + size - 1,
drivers/vdpa/vdpa_user/iova_domain.c
509
domain->file, (u64)iova)) {
drivers/vdpa/vdpa_user/iova_domain.c
51
while ((map = vhost_iotlb_itree_first(domain->iotlb, start, last))) {
drivers/vdpa/vdpa_user/iova_domain.c
510
spin_unlock(&domain->iotlb_lock);
drivers/vdpa/vdpa_user/iova_domain.c
513
spin_unlock(&domain->iotlb_lock);
drivers/vdpa/vdpa_user/iova_domain.c
523
void vduse_domain_free_coherent(struct vduse_iova_domain *domain, size_t size,
drivers/vdpa/vdpa_user/iova_domain.c
526
struct iova_domain *iovad = &domain->consistent_iovad;
drivers/vdpa/vdpa_user/iova_domain.c
530
spin_lock(&domain->iotlb_lock);
drivers/vdpa/vdpa_user/iova_domain.c
531
map = vhost_iotlb_itree_first(domain->iotlb, (u64)dma_addr,
drivers/vdpa/vdpa_user/iova_domain.c
534
spin_unlock(&domain->iotlb_lock);
drivers/vdpa/vdpa_user/iova_domain.c
540
vhost_iotlb_map_free(domain->iotlb, map);
drivers/vdpa/vdpa_user/iova_domain.c
541
spin_unlock(&domain->iotlb_lock);
drivers/vdpa/vdpa_user/iova_domain.c
548
struct vduse_iova_domain *domain = vmf->vma->vm_private_data;
drivers/vdpa/vdpa_user/iova_domain.c
55
vhost_iotlb_map_free(domain->iotlb, map);
drivers/vdpa/vdpa_user/iova_domain.c
552
if (!domain)
drivers/vdpa/vdpa_user/iova_domain.c
555
if (iova < domain->bounce_size)
drivers/vdpa/vdpa_user/iova_domain.c
556
page = vduse_domain_get_bounce_page(domain, iova);
drivers/vdpa/vdpa_user/iova_domain.c
558
page = vduse_domain_get_coherent_page(domain, iova);
drivers/vdpa/vdpa_user/iova_domain.c
574
struct vduse_iova_domain *domain = file->private_data;
drivers/vdpa/vdpa_user/iova_domain.c
577
vma->vm_private_data = domain;
drivers/vdpa/vdpa_user/iova_domain.c
585
struct vduse_iova_domain *domain = file->private_data;
drivers/vdpa/vdpa_user/iova_domain.c
587
spin_lock(&domain->iotlb_lock);
drivers/vdpa/vdpa_user/iova_domain.c
588
vduse_iotlb_del_range(domain, 0, ULLONG_MAX);
drivers/vdpa/vdpa_user/iova_domain.c
589
vduse_domain_remove_user_bounce_pages(domain);
drivers/vdpa/vdpa_user/iova_domain.c
59
int vduse_domain_set_map(struct vduse_iova_domain *domain,
drivers/vdpa/vdpa_user/iova_domain.c
590
vduse_domain_free_kernel_bounce_pages(domain);
drivers/vdpa/vdpa_user/iova_domain.c
591
spin_unlock(&domain->iotlb_lock);
drivers/vdpa/vdpa_user/iova_domain.c
592
put_iova_domain(&domain->stream_iovad);
drivers/vdpa/vdpa_user/iova_domain.c
593
put_iova_domain(&domain->consistent_iovad);
drivers/vdpa/vdpa_user/iova_domain.c
594
vhost_iotlb_free(domain->iotlb);
drivers/vdpa/vdpa_user/iova_domain.c
595
vfree(domain->bounce_maps);
drivers/vdpa/vdpa_user/iova_domain.c
596
kfree(domain);
drivers/vdpa/vdpa_user/iova_domain.c
607
void vduse_domain_destroy(struct vduse_iova_domain *domain)
drivers/vdpa/vdpa_user/iova_domain.c
609
fput(domain->file);
drivers/vdpa/vdpa_user/iova_domain.c
615
struct vduse_iova_domain *domain;
drivers/vdpa/vdpa_user/iova_domain.c
625
domain = kzalloc_obj(*domain);
drivers/vdpa/vdpa_user/iova_domain.c
626
if (!domain)
drivers/vdpa/vdpa_user/iova_domain.c
629
domain->iotlb = vhost_iotlb_alloc(0, 0);
drivers/vdpa/vdpa_user/iova_domain.c
630
if (!domain->iotlb)
drivers/vdpa/vdpa_user/iova_domain.c
633
domain->iova_limit = iova_limit;
drivers/vdpa/vdpa_user/iova_domain.c
634
domain->bounce_size = PAGE_ALIGN(bounce_size);
drivers/vdpa/vdpa_user/iova_domain.c
635
domain->bounce_maps = vzalloc(bounce_pfns *
drivers/vdpa/vdpa_user/iova_domain.c
637
if (!domain->bounce_maps)
drivers/vdpa/vdpa_user/iova_domain.c
641
map = &domain->bounce_maps[pfn];
drivers/vdpa/vdpa_user/iova_domain.c
645
domain, O_RDWR);
drivers/vdpa/vdpa_user/iova_domain.c
649
domain->file = file;
drivers/vdpa/vdpa_user/iova_domain.c
650
rwlock_init(&domain->bounce_lock);
drivers/vdpa/vdpa_user/iova_domain.c
651
spin_lock_init(&domain->iotlb_lock);
drivers/vdpa/vdpa_user/iova_domain.c
652
init_iova_domain(&domain->stream_iovad,
drivers/vdpa/vdpa_user/iova_domain.c
654
ret = iova_domain_init_rcaches(&domain->stream_iovad);
drivers/vdpa/vdpa_user/iova_domain.c
657
init_iova_domain(&domain->consistent_iovad,
drivers/vdpa/vdpa_user/iova_domain.c
659
ret = iova_domain_init_rcaches(&domain->consistent_iovad);
drivers/vdpa/vdpa_user/iova_domain.c
663
return domain;
drivers/vdpa/vdpa_user/iova_domain.c
665
put_iova_domain(&domain->stream_iovad);
drivers/vdpa/vdpa_user/iova_domain.c
669
vfree(domain->bounce_maps);
drivers/vdpa/vdpa_user/iova_domain.c
67
spin_lock(&domain->iotlb_lock);
drivers/vdpa/vdpa_user/iova_domain.c
671
vhost_iotlb_free(domain->iotlb);
drivers/vdpa/vdpa_user/iova_domain.c
673
kfree(domain);
drivers/vdpa/vdpa_user/iova_domain.c
68
vduse_iotlb_del_range(domain, start, last);
drivers/vdpa/vdpa_user/iova_domain.c
73
ret = vduse_iotlb_add_range(domain, map->start, map->last,
drivers/vdpa/vdpa_user/iova_domain.c
80
spin_unlock(&domain->iotlb_lock);
drivers/vdpa/vdpa_user/iova_domain.c
84
vduse_iotlb_del_range(domain, start, last);
drivers/vdpa/vdpa_user/iova_domain.c
85
spin_unlock(&domain->iotlb_lock);
drivers/vdpa/vdpa_user/iova_domain.c
89
void vduse_domain_clear_map(struct vduse_iova_domain *domain,
drivers/vdpa/vdpa_user/iova_domain.c
95
spin_lock(&domain->iotlb_lock);
drivers/vdpa/vdpa_user/iova_domain.c
98
vduse_iotlb_del_range(domain, map->start, map->last);
drivers/vdpa/vdpa_user/iova_domain.h
45
int vduse_domain_set_map(struct vduse_iova_domain *domain,
drivers/vdpa/vdpa_user/iova_domain.h
48
void vduse_domain_clear_map(struct vduse_iova_domain *domain,
drivers/vdpa/vdpa_user/iova_domain.h
51
void vduse_domain_sync_single_for_device(struct vduse_iova_domain *domain,
drivers/vdpa/vdpa_user/iova_domain.h
55
void vduse_domain_sync_single_for_cpu(struct vduse_iova_domain *domain,
drivers/vdpa/vdpa_user/iova_domain.h
59
dma_addr_t vduse_domain_map_page(struct vduse_iova_domain *domain,
drivers/vdpa/vdpa_user/iova_domain.h
64
void vduse_domain_unmap_page(struct vduse_iova_domain *domain,
drivers/vdpa/vdpa_user/iova_domain.h
68
dma_addr_t vduse_domain_alloc_coherent(struct vduse_iova_domain *domain,
drivers/vdpa/vdpa_user/iova_domain.h
71
void vduse_domain_free_coherent(struct vduse_iova_domain *domain, size_t size,
drivers/vdpa/vdpa_user/iova_domain.h
74
void vduse_domain_reset_bounce_map(struct vduse_iova_domain *domain);
drivers/vdpa/vdpa_user/iova_domain.h
76
int vduse_domain_add_user_bounce_pages(struct vduse_iova_domain *domain,
drivers/vdpa/vdpa_user/iova_domain.h
79
void vduse_domain_remove_user_bounce_pages(struct vduse_iova_domain *domain);
drivers/vdpa/vdpa_user/iova_domain.h
81
void vduse_domain_destroy(struct vduse_iova_domain *domain);
drivers/vdpa/vdpa_user/vduse_dev.c
1008
struct vduse_iova_domain *domain;
drivers/vdpa/vdpa_user/vduse_dev.c
101
struct vduse_iova_domain *domain;
drivers/vdpa/vdpa_user/vduse_dev.c
1011
domain = token.group->as->domain;
drivers/vdpa/vdpa_user/vduse_dev.c
1012
vduse_domain_free_coherent(domain, size, dma_addr, attrs);
drivers/vdpa/vdpa_user/vduse_dev.c
1024
return dma_addr < token.group->as->domain->bounce_size;
drivers/vdpa/vdpa_user/vduse_dev.c
1040
return token.group->as->domain->bounce_size;
drivers/vdpa/vdpa_user/vduse_dev.c
1191
if (!dev->as[asid].domain)
drivers/vdpa/vdpa_user/vduse_dev.c
1195
size != dev->as[asid].domain->bounce_size)
drivers/vdpa/vdpa_user/vduse_dev.c
1198
vduse_domain_remove_user_bounce_pages(dev->as[asid].domain);
drivers/vdpa/vdpa_user/vduse_dev.c
1221
if (!dev->as[asid].domain || !dev->as[asid].domain->bounce_map ||
drivers/vdpa/vdpa_user/vduse_dev.c
1222
size != dev->as[asid].domain->bounce_size ||
drivers/vdpa/vdpa_user/vduse_dev.c
1252
ret = vduse_domain_add_user_bounce_pages(dev->as[asid].domain,
drivers/vdpa/vdpa_user/vduse_dev.c
1310
if (!dev->as[asid].domain)
drivers/vdpa/vdpa_user/vduse_dev.c
1313
spin_lock(&dev->as[asid].domain->iotlb_lock);
drivers/vdpa/vdpa_user/vduse_dev.c
1314
map = vhost_iotlb_itree_first(dev->as[asid].domain->iotlb,
drivers/vdpa/vdpa_user/vduse_dev.c
1330
if (dev->as[asid].domain->bounce_map && map->start == 0 &&
drivers/vdpa/vdpa_user/vduse_dev.c
1331
map->last == dev->as[asid].domain->bounce_size - 1)
drivers/vdpa/vdpa_user/vduse_dev.c
1337
spin_unlock(&dev->as[asid].domain->iotlb_lock);
drivers/vdpa/vdpa_user/vduse_dev.c
1627
if (dev->as[i].domain)
drivers/vdpa/vdpa_user/vduse_dev.c
1629
dev->as[i].domain->bounce_size);
drivers/vdpa/vdpa_user/vduse_dev.c
1899
if (dev->as[i].domain)
drivers/vdpa/vdpa_user/vduse_dev.c
1900
vduse_domain_destroy(dev->as[i].domain);
drivers/vdpa/vdpa_user/vduse_dev.c
2025
if (dev->as[0].domain)
drivers/vdpa/vdpa_user/vduse_dev.c
2298
dev->as[i].domain = vduse_domain_create(VDUSE_IOVA_SIZE - 1,
drivers/vdpa/vdpa_user/vduse_dev.c
2300
if (!dev->as[i].domain) {
drivers/vdpa/vdpa_user/vduse_dev.c
2319
if (dev->as[j].domain) {
drivers/vdpa/vdpa_user/vduse_dev.c
2320
vduse_domain_destroy(dev->as[j].domain);
drivers/vdpa/vdpa_user/vduse_dev.c
2321
dev->as[j].domain = NULL;
drivers/vdpa/vdpa_user/vduse_dev.c
469
struct vduse_iova_domain *domain = dev->as[i].domain;
drivers/vdpa/vdpa_user/vduse_dev.c
471
if (domain && domain->bounce_map)
drivers/vdpa/vdpa_user/vduse_dev.c
472
vduse_domain_reset_bounce_map(domain);
drivers/vdpa/vdpa_user/vduse_dev.c
859
ret = vduse_domain_set_map(dev->as[asid].domain, iotlb);
drivers/vdpa/vdpa_user/vduse_dev.c
865
vduse_domain_clear_map(dev->as[asid].domain, iotlb);
drivers/vdpa/vdpa_user/vduse_dev.c
917
struct vduse_iova_domain *domain;
drivers/vdpa/vdpa_user/vduse_dev.c
923
domain = token.group->as->domain;
drivers/vdpa/vdpa_user/vduse_dev.c
924
vduse_domain_sync_single_for_device(domain, dma_addr, size, dir);
drivers/vdpa/vdpa_user/vduse_dev.c
931
struct vduse_iova_domain *domain;
drivers/vdpa/vdpa_user/vduse_dev.c
937
domain = token.group->as->domain;
drivers/vdpa/vdpa_user/vduse_dev.c
938
vduse_domain_sync_single_for_cpu(domain, dma_addr, size, dir);
drivers/vdpa/vdpa_user/vduse_dev.c
946
struct vduse_iova_domain *domain;
drivers/vdpa/vdpa_user/vduse_dev.c
952
domain = token.group->as->domain;
drivers/vdpa/vdpa_user/vduse_dev.c
953
return vduse_domain_map_page(domain, page, offset, size, dir, attrs);
drivers/vdpa/vdpa_user/vduse_dev.c
960
struct vduse_iova_domain *domain;
drivers/vdpa/vdpa_user/vduse_dev.c
966
domain = token.group->as->domain;
drivers/vdpa/vdpa_user/vduse_dev.c
967
vduse_domain_unmap_page(domain, dma_addr, size, dir, attrs);
drivers/vdpa/vdpa_user/vduse_dev.c
984
struct vduse_iova_domain *domain;
drivers/vdpa/vdpa_user/vduse_dev.c
987
domain = token.group->as->domain;
drivers/vdpa/vdpa_user/vduse_dev.c
988
*dma_addr = vduse_domain_alloc_coherent(domain, size, addr);
drivers/vfio/vfio_iommu_type1.c
1059
static long vfio_sync_unpin(struct vfio_dma *dma, struct vfio_domain *domain,
drivers/vfio/vfio_iommu_type1.c
1066
iommu_iotlb_sync(domain->domain, iotlb_gather);
drivers/vfio/vfio_iommu_type1.c
1092
static size_t unmap_unpin_fast(struct vfio_domain *domain,
drivers/vfio/vfio_iommu_type1.c
1103
unmapped = iommu_unmap_fast(domain->domain, iova, len,
drivers/vfio/vfio_iommu_type1.c
1123
*unlocked += vfio_sync_unpin(dma, domain, unmapped_list,
drivers/vfio/vfio_iommu_type1.c
1131
static size_t unmap_unpin_slow(struct vfio_domain *domain,
drivers/vfio/vfio_iommu_type1.c
1136
size_t unmapped = iommu_unmap(domain->domain, iova, len);
drivers/vfio/vfio_iommu_type1.c
1151
struct vfio_domain *domain, *d;
drivers/vfio/vfio_iommu_type1.c
1171
domain = d = list_first_entry(&iommu->domain_list,
drivers/vfio/vfio_iommu_type1.c
1175
iommu_unmap(d->domain, dma->iova, dma->size);
drivers/vfio/vfio_iommu_type1.c
1185
phys = iommu_iova_to_phys(domain->domain, iova);
drivers/vfio/vfio_iommu_type1.c
1197
next = iommu_iova_to_phys(domain->domain, iova + len);
drivers/vfio/vfio_iommu_type1.c
1206
unmapped = unmap_unpin_fast(domain, dma, iova, len, phys,
drivers/vfio/vfio_iommu_type1.c
1211
unmapped = unmap_unpin_slow(domain, dma, iova, len,
drivers/vfio/vfio_iommu_type1.c
1223
unlocked += vfio_sync_unpin(dma, domain, &unmapped_region_list,
drivers/vfio/vfio_iommu_type1.c
1250
struct vfio_domain *domain;
drivers/vfio/vfio_iommu_type1.c
1254
list_for_each_entry(domain, &iommu->domain_list, next)
drivers/vfio/vfio_iommu_type1.c
1255
iommu->pgsize_bitmap &= domain->domain->pgsize_bitmap;
drivers/vfio/vfio_iommu_type1.c
1561
ret = iommu_map(d->domain, iova, (phys_addr_t)pfn << PAGE_SHIFT,
drivers/vfio/vfio_iommu_type1.c
1574
iommu_unmap(d->domain, iova, npage << PAGE_SHIFT);
drivers/vfio/vfio_iommu_type1.c
1805
struct vfio_domain *domain)
drivers/vfio/vfio_iommu_type1.c
1842
phys = iommu_iova_to_phys(d->domain, iova);
drivers/vfio/vfio_iommu_type1.c
1853
p == iommu_iova_to_phys(d->domain, i)) {
drivers/vfio/vfio_iommu_type1.c
1878
ret = iommu_map(domain->domain, iova, phys, size,
drivers/vfio/vfio_iommu_type1.c
1912
iommu_unmap(domain->domain, dma->iova, dma->size);
drivers/vfio/vfio_iommu_type1.c
1922
phys = iommu_iova_to_phys(domain->domain, iova);
drivers/vfio/vfio_iommu_type1.c
1932
p == iommu_iova_to_phys(domain->domain, i)) {
drivers/vfio/vfio_iommu_type1.c
1938
iommu_unmap(domain->domain, iova, size);
drivers/vfio/vfio_iommu_type1.c
1948
static struct vfio_iommu_group *find_iommu_group(struct vfio_domain *domain,
drivers/vfio/vfio_iommu_type1.c
1953
list_for_each_entry(g, &domain->group_list, next) {
drivers/vfio/vfio_iommu_type1.c
1966
struct vfio_domain *domain;
drivers/vfio/vfio_iommu_type1.c
1968
list_for_each_entry(domain, &iommu->domain_list, next) {
drivers/vfio/vfio_iommu_type1.c
1969
group = find_iommu_group(domain, iommu_group);
drivers/vfio/vfio_iommu_type1.c
2231
struct iommu_domain **domain = data;
drivers/vfio/vfio_iommu_type1.c
2233
*domain = iommu_paging_domain_alloc(dev);
drivers/vfio/vfio_iommu_type1.c
2242
struct vfio_domain *domain, *d;
drivers/vfio/vfio_iommu_type1.c
2281
domain = kzalloc_obj(*domain);
drivers/vfio/vfio_iommu_type1.c
2282
if (!domain)
drivers/vfio/vfio_iommu_type1.c
2290
iommu_group_for_each_dev(iommu_group, &domain->domain,
drivers/vfio/vfio_iommu_type1.c
2292
if (IS_ERR(domain->domain)) {
drivers/vfio/vfio_iommu_type1.c
2293
ret = PTR_ERR(domain->domain);
drivers/vfio/vfio_iommu_type1.c
2297
ret = iommu_attach_group(domain->domain, group->iommu_group);
drivers/vfio/vfio_iommu_type1.c
2302
geo = &domain->domain->geometry;
drivers/vfio/vfio_iommu_type1.c
2338
INIT_LIST_HEAD(&domain->group_list);
drivers/vfio/vfio_iommu_type1.c
2339
list_add(&group->next, &domain->group_list);
drivers/vfio/vfio_iommu_type1.c
2354
if (domain->domain->ops->enforce_cache_coherency)
drivers/vfio/vfio_iommu_type1.c
2355
domain->enforce_cache_coherency =
drivers/vfio/vfio_iommu_type1.c
2356
domain->domain->ops->enforce_cache_coherency(
drivers/vfio/vfio_iommu_type1.c
2357
domain->domain);
drivers/vfio/vfio_iommu_type1.c
2367
if (d->domain->ops == domain->domain->ops &&
drivers/vfio/vfio_iommu_type1.c
2369
domain->enforce_cache_coherency) {
drivers/vfio/vfio_iommu_type1.c
2370
iommu_detach_group(domain->domain, group->iommu_group);
drivers/vfio/vfio_iommu_type1.c
2371
if (!iommu_attach_group(d->domain,
drivers/vfio/vfio_iommu_type1.c
2374
iommu_domain_free(domain->domain);
drivers/vfio/vfio_iommu_type1.c
2375
kfree(domain);
drivers/vfio/vfio_iommu_type1.c
2379
ret = iommu_attach_group(domain->domain,
drivers/vfio/vfio_iommu_type1.c
2387
ret = vfio_iommu_replay(iommu, domain);
drivers/vfio/vfio_iommu_type1.c
2392
ret = iommu_get_msi_cookie(domain->domain, resv_msi_base);
drivers/vfio/vfio_iommu_type1.c
2397
list_add(&domain->next, &iommu->domain_list);
drivers/vfio/vfio_iommu_type1.c
2415
iommu_detach_group(domain->domain, group->iommu_group);
drivers/vfio/vfio_iommu_type1.c
2417
iommu_domain_free(domain->domain);
drivers/vfio/vfio_iommu_type1.c
2421
kfree(domain);
drivers/vfio/vfio_iommu_type1.c
2468
struct vfio_domain *domain;
drivers/vfio/vfio_iommu_type1.c
2476
list_for_each_entry(domain, &iommu->domain_list, next) {
drivers/vfio/vfio_iommu_type1.c
2477
struct iommu_domain_geometry *geo = &domain->domain->geometry;
drivers/vfio/vfio_iommu_type1.c
2543
struct vfio_domain *domain;
drivers/vfio/vfio_iommu_type1.c
2571
list_for_each_entry(domain, &iommu->domain_list, next) {
drivers/vfio/vfio_iommu_type1.c
2572
group = find_iommu_group(domain, iommu_group);
drivers/vfio/vfio_iommu_type1.c
2576
iommu_detach_group(domain->domain, group->iommu_group);
drivers/vfio/vfio_iommu_type1.c
2587
if (list_empty(&domain->group_list)) {
drivers/vfio/vfio_iommu_type1.c
2597
iommu_domain_free(domain->domain);
drivers/vfio/vfio_iommu_type1.c
2598
list_del(&domain->next);
drivers/vfio/vfio_iommu_type1.c
2599
kfree(domain);
drivers/vfio/vfio_iommu_type1.c
2657
static void vfio_release_domain(struct vfio_domain *domain)
drivers/vfio/vfio_iommu_type1.c
2662
&domain->group_list, next) {
drivers/vfio/vfio_iommu_type1.c
2663
iommu_detach_group(domain->domain, group->iommu_group);
drivers/vfio/vfio_iommu_type1.c
2668
iommu_domain_free(domain->domain);
drivers/vfio/vfio_iommu_type1.c
2674
struct vfio_domain *domain, *domain_tmp;
drivers/vfio/vfio_iommu_type1.c
2685
list_for_each_entry_safe(domain, domain_tmp,
drivers/vfio/vfio_iommu_type1.c
2687
vfio_release_domain(domain);
drivers/vfio/vfio_iommu_type1.c
2688
list_del(&domain->next);
drivers/vfio/vfio_iommu_type1.c
2689
kfree(domain);
drivers/vfio/vfio_iommu_type1.c
2699
struct vfio_domain *domain;
drivers/vfio/vfio_iommu_type1.c
2703
list_for_each_entry(domain, &iommu->domain_list, next) {
drivers/vfio/vfio_iommu_type1.c
2704
if (!(domain->enforce_cache_coherency)) {
drivers/vfio/vfio_iommu_type1.c
3234
struct iommu_domain *domain = ERR_PTR(-ENODEV);
drivers/vfio/vfio_iommu_type1.c
3244
domain = d->domain;
drivers/vfio/vfio_iommu_type1.c
3250
return domain;
drivers/vfio/vfio_iommu_type1.c
82
struct iommu_domain *domain;
drivers/vhost/vdpa.c
1012
r = iommu_map(v->domain, iova, pa, size,
drivers/vhost/vdpa.c
1337
v->domain = iommu_paging_domain_alloc(dma_dev);
drivers/vhost/vdpa.c
1338
if (IS_ERR(v->domain)) {
drivers/vhost/vdpa.c
1339
ret = PTR_ERR(v->domain);
drivers/vhost/vdpa.c
1340
v->domain = NULL;
drivers/vhost/vdpa.c
1344
ret = iommu_attach_device(v->domain, dma_dev);
drivers/vhost/vdpa.c
1351
iommu_domain_free(v->domain);
drivers/vhost/vdpa.c
1352
v->domain = NULL;
drivers/vhost/vdpa.c
1362
if (v->domain) {
drivers/vhost/vdpa.c
1363
iommu_detach_device(v->domain, dma_dev);
drivers/vhost/vdpa.c
1364
iommu_domain_free(v->domain);
drivers/vhost/vdpa.c
1367
v->domain = NULL;
drivers/vhost/vdpa.c
1378
} else if (v->domain && v->domain->geometry.force_aperture) {
drivers/vhost/vdpa.c
1379
range->first = v->domain->geometry.aperture_start;
drivers/vhost/vdpa.c
1380
range->last = v->domain->geometry.aperture_end;
drivers/vhost/vdpa.c
47
struct iommu_domain *domain;
drivers/vhost/vdpa.c
916
iommu_unmap(v->domain, map->start, map->size);
drivers/video/fbdev/nvidia/nv_hw.c
686
int domain = pci_domain_nr(par->pci_dev->bus);
drivers/video/fbdev/nvidia/nv_hw.c
690
dev = pci_get_domain_bus_and_slot(domain, 0, 3);
drivers/video/fbdev/nvidia/nv_hw.c
698
dev = pci_get_domain_bus_and_slot(domain, 0, 5);
drivers/video/fbdev/nvidia/nv_hw.c
711
dev = pci_get_domain_bus_and_slot(domain, 0, 1);
drivers/video/fbdev/nvidia/nv_hw.c
717
dev = pci_get_domain_bus_and_slot(domain, 0, 3);
drivers/video/fbdev/nvidia/nv_hw.c
725
dev = pci_get_domain_bus_and_slot(domain, 0, 2);
drivers/video/fbdev/riva/nv_driver.c
162
int domain = pci_domain_nr(par->pdev->bus);
drivers/video/fbdev/riva/nv_driver.c
230
dev = pci_get_domain_bus_and_slot(domain, 0, 1);
drivers/video/fbdev/riva/nv_driver.c
235
dev = pci_get_domain_bus_and_slot(domain, 0, 1);
drivers/video/fbdev/riva/riva_hw.c
1103
int domain = pci_domain_nr(pdev->bus);
drivers/video/fbdev/riva/riva_hw.c
1105
dev = pci_get_domain_bus_and_slot(domain, 0, 3);
drivers/video/fbdev/riva/riva_hw.c
1120
dev = pci_get_domain_bus_and_slot(domain, 0, 1);
drivers/video/fbdev/riva/riva_hw.c
2077
int domain = pci_domain_nr(pdev->bus);
drivers/video/fbdev/riva/riva_hw.c
2090
dev = pci_get_domain_bus_and_slot(domain, 0, 1);
drivers/video/fbdev/riva/riva_hw.c
2095
dev = pci_get_domain_bus_and_slot(domain, 0, 1);
drivers/watchdog/octeon-wdt-main.c
312
struct irq_domain *domain;
drivers/watchdog/octeon-wdt-main.c
315
domain = octeon_irq_get_block_domain(node,
drivers/watchdog/octeon-wdt-main.c
318
irq = irq_find_mapping(domain, hwirq);
drivers/watchdog/octeon-wdt-main.c
352
struct irq_domain *domain;
drivers/watchdog/octeon-wdt-main.c
368
domain = octeon_irq_get_block_domain(node, WD_BLOCK_NUMBER);
drivers/watchdog/octeon-wdt-main.c
372
irq = irq_create_mapping(domain, hwirq);
drivers/xen/pci.c
302
domid_t domain;
drivers/xen/pci.c
324
int domain = -ENODEV;
drivers/xen/pci.c
329
domain = owner->domain;
drivers/xen/pci.c
331
return domain;
drivers/xen/pci.c
335
int xen_register_device_domain_owner(struct pci_dev *dev, uint16_t domain)
drivers/xen/pci.c
349
owner->domain = domain;
drivers/xen/privcmd.c
245
domid_t domain;
drivers/xen/privcmd.c
269
st->domain, NULL);
drivers/xen/privcmd.c
323
state.domain = mmapcmd.dom;
drivers/xen/privcmd.c
340
domid_t domain;
drivers/xen/privcmd.c
377
st->domain, cur_pages);
drivers/xen/privcmd.c
571
state.domain = m.dom;
drivers/xen/pvcalls-back.c
270
if (req->u.socket.domain != AF_INET ||
drivers/xen/pvcalls-front.c
323
req->u.socket.domain = AF_INET;
drivers/xen/xen-acpi-processor.c
378
acpi_psd[acpi_id].domain);
drivers/xen/xen-pciback/passthrough.c
125
unsigned int domain, bus;
drivers/xen/xen-pciback/passthrough.c
144
domain = (unsigned int)pci_domain_nr(dev_entry->dev->bus);
drivers/xen/xen-pciback/passthrough.c
148
err = publish_root_cb(pdev, domain, bus);
drivers/xen/xen-pciback/passthrough.c
179
unsigned int *domain, unsigned int *bus,
drivers/xen/xen-pciback/passthrough.c
182
*domain = pci_domain_nr(pcidev->bus);
drivers/xen/xen-pciback/passthrough.c
21
unsigned int domain,
drivers/xen/xen-pciback/passthrough.c
32
if (domain == (unsigned int)pci_domain_nr(dev_entry->dev->bus)
drivers/xen/xen-pciback/passthrough.c
51
unsigned int domain, bus, devfn;
drivers/xen/xen-pciback/passthrough.c
64
domain = (unsigned int)pci_domain_nr(dev->bus);
drivers/xen/xen-pciback/passthrough.c
67
err = publish_cb(pdev, domain, bus, devfn, devid);
drivers/xen/xen-pciback/pci_stub.c
1047
static inline int str_to_slot(const char *buf, int *domain, int *bus,
drivers/xen/xen-pciback/pci_stub.c
1052
switch (sscanf(buf, " %x:%x:%x.%x %n", domain, bus, slot, func,
drivers/xen/xen-pciback/pci_stub.c
1056
sscanf(buf, " %x:%x:%x.* %n", domain, bus, slot, &parsed);
drivers/xen/xen-pciback/pci_stub.c
1060
sscanf(buf, " %x:%x:*.* %n", domain, bus, &parsed);
drivers/xen/xen-pciback/pci_stub.c
1067
*domain = 0;
drivers/xen/xen-pciback/pci_stub.c
1084
static inline int str_to_quirk(const char *buf, int *domain, int *bus, int
drivers/xen/xen-pciback/pci_stub.c
1089
sscanf(buf, " %x:%x:%x.%x-%x:%x:%x %n", domain, bus, slot, func,
drivers/xen/xen-pciback/pci_stub.c
1095
*domain = 0;
drivers/xen/xen-pciback/pci_stub.c
1104
static int pcistub_device_id_add(int domain, int bus, int slot, int func)
drivers/xen/xen-pciback/pci_stub.c
1111
rc = pcistub_device_id_add(domain, bus, slot, func);
drivers/xen/xen-pciback/pci_stub.c
1117
rc = pcistub_device_id_add(domain, bus, slot, func);
drivers/xen/xen-pciback/pci_stub.c
1124
!pci_domains_supported ? domain :
drivers/xen/xen-pciback/pci_stub.c
1126
domain < 0 || domain > 0xffff)
drivers/xen/xen-pciback/pci_stub.c
1137
domain, bus, slot, func);
drivers/xen/xen-pciback/pci_stub.c
1139
pcistub_device_id_add_list(pci_dev_id, domain, bus, devfn);
drivers/xen/xen-pciback/pci_stub.c
1144
static int pcistub_device_id_remove(int domain, int bus, int slot, int func)
drivers/xen/xen-pciback/pci_stub.c
1153
if (pci_dev_id->domain == domain && pci_dev_id->bus == bus
drivers/xen/xen-pciback/pci_stub.c
1165
domain, bus, slot, func);
drivers/xen/xen-pciback/pci_stub.c
1173
static int pcistub_reg_add(int domain, int bus, int slot, int func,
drivers/xen/xen-pciback/pci_stub.c
1185
psdev = pcistub_device_find(domain, bus, slot, func);
drivers/xen/xen-pciback/pci_stub.c
1218
int domain, bus, slot, func;
drivers/xen/xen-pciback/pci_stub.c
1221
err = str_to_slot(buf, &domain, &bus, &slot, &func);
drivers/xen/xen-pciback/pci_stub.c
1225
err = pcistub_device_id_add(domain, bus, slot, func);
drivers/xen/xen-pciback/pci_stub.c
1237
int domain, bus, slot, func;
drivers/xen/xen-pciback/pci_stub.c
1240
err = str_to_slot(buf, &domain, &bus, &slot, &func);
drivers/xen/xen-pciback/pci_stub.c
1244
err = pcistub_device_id_remove(domain, bus, slot, func);
drivers/xen/xen-pciback/pci_stub.c
1266
pci_dev_id->domain, pci_dev_id->bus,
drivers/xen/xen-pciback/pci_stub.c
1310
int domain, bus, slot, func;
drivers/xen/xen-pciback/pci_stub.c
1313
err = str_to_slot(buf, &domain, &bus, &slot, &func);
drivers/xen/xen-pciback/pci_stub.c
1317
psdev = pcistub_device_find(domain, bus, slot, func);
drivers/xen/xen-pciback/pci_stub.c
1348
int domain, bus, slot, func, reg, size, mask;
drivers/xen/xen-pciback/pci_stub.c
1351
err = str_to_quirk(buf, &domain, &bus, &slot, &func, ®, &size,
drivers/xen/xen-pciback/pci_stub.c
1356
err = pcistub_reg_add(domain, bus, slot, func, reg, size, mask);
drivers/xen/xen-pciback/pci_stub.c
1412
int domain, bus, slot, func;
drivers/xen/xen-pciback/pci_stub.c
1417
err = str_to_slot(buf, &domain, &bus, &slot, &func);
drivers/xen/xen-pciback/pci_stub.c
1421
psdev = pcistub_device_find(domain, bus, slot, func);
drivers/xen/xen-pciback/pci_stub.c
1476
int domain, bus, slot, func;
drivers/xen/xen-pciback/pci_stub.c
1481
err = str_to_slot(buf, &domain, &bus, &slot, &func);
drivers/xen/xen-pciback/pci_stub.c
1485
psdev = pcistub_device_find(domain, bus, slot, func);
drivers/xen/xen-pciback/pci_stub.c
1554
int domain, bus, slot, func;
drivers/xen/xen-pciback/pci_stub.c
1563
&domain, &bus, &slot, &func, &parsed);
drivers/xen/xen-pciback/pci_stub.c
1569
&domain, &bus, &slot, &parsed);
drivers/xen/xen-pciback/pci_stub.c
1575
&domain, &bus, &parsed);
drivers/xen/xen-pciback/pci_stub.c
1580
domain = 0;
drivers/xen/xen-pciback/pci_stub.c
1603
err = pcistub_device_id_add(domain, bus, slot, func);
drivers/xen/xen-pciback/pci_stub.c
176
static struct pcistub_device *pcistub_device_find_locked(int domain, int bus,
drivers/xen/xen-pciback/pci_stub.c
183
&& domain == pci_domain_nr(psdev->dev->bus)
drivers/xen/xen-pciback/pci_stub.c
194
static struct pcistub_device *pcistub_device_find(int domain, int bus,
drivers/xen/xen-pciback/pci_stub.c
202
psdev = pcistub_device_find_locked(domain, bus, slot, func);
drivers/xen/xen-pciback/pci_stub.c
233
int domain = (sbdf >> 16) & 0xffff;
drivers/xen/xen-pciback/pci_stub.c
238
psdev = pcistub_device_find(domain, bus, slot, func);
drivers/xen/xen-pciback/pci_stub.c
248
int domain, int bus,
drivers/xen/xen-pciback/pci_stub.c
257
psdev = pcistub_device_find_locked(domain, bus, slot, func);
drivers/xen/xen-pciback/pci_stub.c
343
if (pci_domain_nr(dev->bus) == pdev_id->domain
drivers/xen/xen-pciback/pci_stub.c
45
int domain;
drivers/xen/xen-pciback/pci_stub.c
530
int domain, int bus, unsigned int devfn)
drivers/xen/xen-pciback/pci_stub.c
539
if (pci_dev_id->domain == domain && pci_dev_id->bus == bus &&
drivers/xen/xen-pciback/pci_stub.c
547
new->domain = domain;
drivers/xen/xen-pciback/pci_stub.c
755
&aer_op->domain, &aer_op->bus, &aer_op->devfn);
drivers/xen/xen-pciback/pci_stub.c
763
aer_cmd, aer_op->domain, aer_op->bus, aer_op->devfn);
drivers/xen/xen-pciback/pciback.h
107
unsigned int *domain, unsigned int *bus,
drivers/xen/xen-pciback/pciback.h
115
unsigned int domain, unsigned int bus,
drivers/xen/xen-pciback/pciback.h
141
xen_pcibk_get_pci_dev(struct xen_pcibk_device *pdev, unsigned int domain,
drivers/xen/xen-pciback/pciback.h
145
return xen_pcibk_backend->get(pdev, domain, bus, devfn);
drivers/xen/xen-pciback/pciback.h
157
unsigned int *domain,
drivers/xen/xen-pciback/pciback.h
162
return xen_pcibk_backend->find(pcidev, pdev, domain, bus,
drivers/xen/xen-pciback/pciback.h
68
int domain, int bus,
drivers/xen/xen-pciback/pciback.h
93
unsigned int domain, unsigned int bus,
drivers/xen/xen-pciback/pciback.h
96
unsigned int domain, unsigned int bus);
drivers/xen/xen-pciback/pciback_ops.c
333
dev = xen_pcibk_get_pci_dev(pdev, op->domain, op->bus, op->devfn);
drivers/xen/xen-pciback/vpci.c
234
unsigned int *domain, unsigned int *bus,
drivers/xen/xen-pciback/vpci.c
248
*domain = 0;
drivers/xen/xen-pciback/vpci.c
32
unsigned int domain,
drivers/xen/xen-pciback/vpci.c
40
if (domain != 0 || bus != 0)
drivers/xen/xen-pciback/xenbus.c
204
unsigned int domain, unsigned int bus,
drivers/xen/xen-pciback/xenbus.c
219
"%04x:%02x:%02x.%02x", domain, bus,
drivers/xen/xen-pciback/xenbus.c
227
int domain, int bus, int slot, int func,
drivers/xen/xen-pciback/xenbus.c
234
domain, bus, slot, func);
drivers/xen/xen-pciback/xenbus.c
236
dev = pcistub_get_pci_dev_by_slot(pdev, domain, bus, slot, func);
drivers/xen/xen-pciback/xenbus.c
243
domain, bus, slot, func);
drivers/xen/xen-pciback/xenbus.c
274
int domain, int bus, int slot, int func)
drivers/xen/xen-pciback/xenbus.c
280
domain, bus, slot, func);
drivers/xen/xen-pciback/xenbus.c
282
dev = xen_pcibk_get_pci_dev(pdev, domain, bus, PCI_DEVFN(slot, func));
drivers/xen/xen-pciback/xenbus.c
287
domain, bus, slot, func);
drivers/xen/xen-pciback/xenbus.c
303
unsigned int domain, unsigned int bus)
drivers/xen/xen-pciback/xenbus.c
335
if (d == domain && b == bus) {
drivers/xen/xen-pciback/xenbus.c
348
root_num, domain, bus);
drivers/xen/xen-pciback/xenbus.c
351
"%04x:%02x", domain, bus);
drivers/xen/xen-pciback/xenbus.c
367
int domain, bus, slot, func;
drivers/xen/xen-pciback/xenbus.c
416
&domain, &bus, &slot, &func);
drivers/xen/xen-pciback/xenbus.c
431
err = xen_pcibk_export_device(pdev, domain, bus, slot,
drivers/xen/xen-pciback/xenbus.c
470
&domain, &bus, &slot, &func);
drivers/xen/xen-pciback/xenbus.c
485
err = xen_pcibk_remove_device(pdev, domain, bus, slot,
drivers/xen/xen-pciback/xenbus.c
565
int domain, bus, slot, func;
drivers/xen/xen-pciback/xenbus.c
602
"%x:%x:%x.%x", &domain, &bus, &slot, &func);
drivers/xen/xen-pciback/xenbus.c
616
err = xen_pcibk_export_device(pdev, domain, bus, slot, func, i);
fs/erofs/fscache.c
371
static void erofs_fscache_domain_put(struct erofs_domain *domain)
fs/erofs/fscache.c
374
if (refcount_dec_and_test(&domain->ref)) {
fs/erofs/fscache.c
375
list_del(&domain->list);
fs/erofs/fscache.c
380
fscache_relinquish_volume(domain->volume, NULL, false);
fs/erofs/fscache.c
382
kfree_sensitive(domain->domain_id);
fs/erofs/fscache.c
383
kfree(domain);
fs/erofs/fscache.c
417
struct erofs_domain *domain;
fs/erofs/fscache.c
420
domain = kzalloc_obj(struct erofs_domain);
fs/erofs/fscache.c
421
if (!domain)
fs/erofs/fscache.c
424
domain->domain_id = kstrdup(sbi->domain_id, GFP_KERNEL);
fs/erofs/fscache.c
425
if (!domain->domain_id) {
fs/erofs/fscache.c
426
kfree(domain);
fs/erofs/fscache.c
443
domain->volume = sbi->volume;
fs/erofs/fscache.c
444
refcount_set(&domain->ref, 1);
fs/erofs/fscache.c
445
list_add(&domain->list, &erofs_domain_list);
fs/erofs/fscache.c
446
sbi->domain = domain;
fs/erofs/fscache.c
449
kfree_sensitive(domain->domain_id);
fs/erofs/fscache.c
450
kfree(domain);
fs/erofs/fscache.c
457
struct erofs_domain *domain;
fs/erofs/fscache.c
461
list_for_each_entry(domain, &erofs_domain_list, list) {
fs/erofs/fscache.c
462
if (!strcmp(domain->domain_id, sbi->domain_id)) {
fs/erofs/fscache.c
463
sbi->domain = domain;
fs/erofs/fscache.c
464
sbi->volume = domain->volume;
fs/erofs/fscache.c
465
refcount_inc(&domain->ref);
fs/erofs/fscache.c
543
struct erofs_domain *domain = EROFS_SB(sb)->domain;
fs/erofs/fscache.c
555
refcount_inc(&domain->ref);
fs/erofs/fscache.c
556
ctx->domain = domain;
fs/erofs/fscache.c
565
struct erofs_domain *domain = EROFS_SB(sb)->domain;
fs/erofs/fscache.c
570
if (ctx->domain != domain || strcmp(ctx->name, name))
fs/erofs/fscache.c
576
domain->domain_id);
fs/erofs/fscache.c
598
struct erofs_domain *domain = NULL;
fs/erofs/fscache.c
602
if (!ctx->domain)
fs/erofs/fscache.c
607
domain = ctx->domain;
fs/erofs/fscache.c
612
if (domain)
fs/erofs/fscache.c
613
erofs_fscache_domain_put(domain);
fs/erofs/fscache.c
656
if (sbi->domain)
fs/erofs/fscache.c
657
erofs_fscache_domain_put(sbi->domain);
fs/erofs/fscache.c
663
sbi->domain = NULL;
fs/erofs/internal.h
165
struct erofs_domain *domain;
fs/erofs/internal.h
93
struct erofs_domain *domain;
fs/nfs/nfs4xdr.c
5806
memcpy(res->impl_id->domain, dummy_str, dummy);
fs/nfs/super.c
614
impl_id->name, impl_id->domain,
fs/nfs/sysfs.c
304
if (!impl_id || strlen(impl_id->domain) == 0)
fs/nfs/sysfs.c
306
return sysfs_emit(buf, "%s\n", impl_id->domain);
fs/nfsd/trace.h
2025
const char *domain,
fs/nfsd/trace.h
2029
TP_ARGS(net, domain, path, maxsize),
fs/nfsd/trace.h
2033
__string(domain, domain)
fs/nfsd/trace.h
2039
__assign_str(domain);
fs/nfsd/trace.h
2043
__get_str(domain), __get_str(path), __entry->maxsize
fs/ocfs2/dlm/dlmapi.h
181
struct dlm_ctxt * dlm_register_domain(const char *domain, u32 key,
fs/ocfs2/dlm/dlmcommon.h
693
u8 domain[O2NM_MAX_NAME_LEN];
fs/ocfs2/dlm/dlmcommon.h
702
u8 domain[O2NM_MAX_NAME_LEN];
fs/ocfs2/dlm/dlmcommon.h
710
u8 domain[O2NM_MAX_NAME_LEN];
fs/ocfs2/dlm/dlmdomain.c
1329
cancel->domain);
fs/ocfs2/dlm/dlmdomain.c
1332
dlm = __dlm_lookup_domain_full(cancel->domain, cancel->name_len);
fs/ocfs2/dlm/dlmdomain.c
1358
memcpy(cancel_msg.domain, dlm->name, cancel_msg.name_len);
fs/ocfs2/dlm/dlmdomain.c
1425
memcpy(join_msg.domain, dlm->name, join_msg.name_len);
fs/ocfs2/dlm/dlmdomain.c
1510
memcpy(assert_msg.domain, dlm->name, assert_msg.name_len);
fs/ocfs2/dlm/dlmdomain.c
1942
static struct dlm_ctxt *dlm_alloc_ctxt(const char *domain,
fs/ocfs2/dlm/dlmdomain.c
1956
dlm->name = kstrdup(domain, GFP_KERNEL);
fs/ocfs2/dlm/dlmdomain.c
2097
struct dlm_ctxt * dlm_register_domain(const char *domain,
fs/ocfs2/dlm/dlmdomain.c
2105
if (strlen(domain) >= O2NM_MAX_NAME_LEN) {
fs/ocfs2/dlm/dlmdomain.c
2111
mlog(0, "register called for domain \"%s\"\n", domain);
fs/ocfs2/dlm/dlmdomain.c
2123
dlm = __dlm_lookup_domain(domain);
fs/ocfs2/dlm/dlmdomain.c
2131
domain));
fs/ocfs2/dlm/dlmdomain.c
2140
"\"%s\"\n", domain);
fs/ocfs2/dlm/dlmdomain.c
2158
new_ctxt = dlm_alloc_ctxt(domain, key);
fs/ocfs2/dlm/dlmdomain.c
244
static struct dlm_ctxt * __dlm_lookup_domain_full(const char *domain, int len)
fs/ocfs2/dlm/dlmdomain.c
254
memcmp(tmp->name, domain, len)==0)
fs/ocfs2/dlm/dlmdomain.c
262
static struct dlm_ctxt * __dlm_lookup_domain(const char *domain)
fs/ocfs2/dlm/dlmdomain.c
266
return __dlm_lookup_domain_full(domain, strlen(domain));
fs/ocfs2/dlm/dlmdomain.c
273
static int dlm_wait_on_domain_helper(const char *domain)
fs/ocfs2/dlm/dlmdomain.c
280
tmp = __dlm_lookup_domain(domain);
fs/ocfs2/dlm/dlmdomain.c
803
query->domain);
fs/ocfs2/dlm/dlmdomain.c
821
dlm = __dlm_lookup_domain_full(query->domain, query->name_len);
fs/ocfs2/dlm/dlmdomain.c
918
assert->domain);
fs/ocfs2/dlm/dlmdomain.c
921
dlm = __dlm_lookup_domain_full(assert->domain, assert->name_len);
fs/ocfs2/dlmfs/dlmfs.c
412
const struct qstr *domain = &dentry->d_name;
fs/ocfs2/dlmfs/dlmfs.c
416
mlog(0, "mkdir %.*s\n", domain->len, domain->name);
fs/ocfs2/dlmfs/dlmfs.c
419
if (domain->len >= GROUP_NAME_MAX) {
fs/ocfs2/dlmfs/dlmfs.c
434
conn = user_dlm_register(domain);
fs/ocfs2/dlmfs/dlmfs.c
438
status, domain->len, domain->name);
fs/smb/client/cifsencrypt.c
265
wchar_t *domain;
fs/smb/client/cifsencrypt.c
293
domain = kmalloc(2 + (len * 2), GFP_KERNEL);
fs/smb/client/cifsencrypt.c
294
if (domain == NULL)
fs/smb/client/cifsencrypt.c
297
len = cifs_strtoUTF16((__le16 *)domain, ses->domainName, len,
fs/smb/client/cifsencrypt.c
299
hmac_md5_update(&hmac_ctx, (const u8 *)domain, 2 * len);
fs/smb/client/cifsencrypt.c
300
kfree(domain);
fs/smb/server/auth.c
106
domain = kzalloc(2 + UNICODE_LEN(len), KSMBD_DEFAULT_GFP);
fs/smb/server/auth.c
107
if (!domain) {
fs/smb/server/auth.c
112
conv_len = smb_strtoUTF16((__le16 *)domain, dname, len,
fs/smb/server/auth.c
119
hmac_md5_update(&ctx, (const u8 *)domain, UNICODE_LEN(conv_len));
fs/smb/server/auth.c
124
kfree(domain);
fs/smb/server/auth.c
79
wchar_t *domain = NULL;
fs/smb/server/smbacl.c
1531
memcpy(&server_conf.domain_sid, &domain, sizeof(struct smb_sid));
fs/smb/server/smbacl.c
20
static const struct smb_sid domain = {1, 4, {0, 0, 0, 0, 0, 5},
fs/xfs/libxfs/xfs_fs.h
1113
__u32 domain;
fs/xfs/libxfs/xfs_refcount.c
1022
enum xfs_refc_domain domain,
fs/xfs/libxfs/xfs_refcount.c
103
cur->bc_rec.rc.rc_domain = domain;
fs/xfs/libxfs/xfs_refcount.c
1040
error = xfs_refcount_find_left_extents(cur, &left, &cleft, domain,
fs/xfs/libxfs/xfs_refcount.c
1044
error = xfs_refcount_find_right_extents(cur, &right, &cright, domain,
fs/xfs/libxfs/xfs_refcount.c
2172
enum xfs_refc_domain domain,
fs/xfs/libxfs/xfs_refcount.c
2184
low.rc.rc_domain = high.rc.rc_domain = domain;
fs/xfs/libxfs/xfs_refcount.c
425
enum xfs_refc_domain domain,
fs/xfs/libxfs/xfs_refcount.c
434
error = xfs_refcount_lookup_le(cur, domain, agbno, &found_rec);
fs/xfs/libxfs/xfs_refcount.c
448
if (rcext.rc_domain != domain)
fs/xfs/libxfs/xfs_refcount.c
54
enum xfs_refc_domain domain,
fs/xfs/libxfs/xfs_refcount.c
59
xfs_refcount_encode_startblock(bno, domain),
fs/xfs/libxfs/xfs_refcount.c
63
cur->bc_rec.rc.rc_domain = domain;
fs/xfs/libxfs/xfs_refcount.c
705
enum xfs_refc_domain domain,
fs/xfs/libxfs/xfs_refcount.c
714
error = xfs_refcount_lookup_le(cur, domain, agbno - 1, &found_rec);
fs/xfs/libxfs/xfs_refcount.c
729
if (tmp.rc_domain != domain)
fs/xfs/libxfs/xfs_refcount.c
74
enum xfs_refc_domain domain,
fs/xfs/libxfs/xfs_refcount.c
749
if (tmp.rc_domain != domain)
fs/xfs/libxfs/xfs_refcount.c
768
cleft->rc_domain = domain;
fs/xfs/libxfs/xfs_refcount.c
779
cleft->rc_domain = domain;
fs/xfs/libxfs/xfs_refcount.c
79
xfs_refcount_encode_startblock(bno, domain),
fs/xfs/libxfs/xfs_refcount.c
798
enum xfs_refc_domain domain,
fs/xfs/libxfs/xfs_refcount.c
807
error = xfs_refcount_lookup_ge(cur, domain, agbno + aglen, &found_rec);
fs/xfs/libxfs/xfs_refcount.c
822
if (tmp.rc_domain != domain)
fs/xfs/libxfs/xfs_refcount.c
83
cur->bc_rec.rc.rc_domain = domain;
fs/xfs/libxfs/xfs_refcount.c
842
if (tmp.rc_domain != domain)
fs/xfs/libxfs/xfs_refcount.c
861
cright->rc_domain = domain;
fs/xfs/libxfs/xfs_refcount.c
872
cright->rc_domain = domain;
fs/xfs/libxfs/xfs_refcount.c
94
enum xfs_refc_domain domain,
fs/xfs/libxfs/xfs_refcount.c
99
xfs_refcount_encode_startblock(bno, domain),
fs/xfs/libxfs/xfs_refcount.h
119
enum xfs_refc_domain domain, xfs_agblock_t bno,
fs/xfs/libxfs/xfs_refcount.h
18
enum xfs_refc_domain domain, xfs_agblock_t bno, int *stat);
fs/xfs/libxfs/xfs_refcount.h
20
enum xfs_refc_domain domain, xfs_agblock_t bno, int *stat);
fs/xfs/libxfs/xfs_refcount.h
22
enum xfs_refc_domain domain, xfs_agblock_t bno, int *stat);
fs/xfs/libxfs/xfs_refcount.h
29
enum xfs_refc_domain domain)
fs/xfs/libxfs/xfs_refcount.h
39
if (domain != XFS_REFC_DOMAIN_SHARED)
fs/xfs/scrub/refcount_repair.c
164
enum xfs_refc_domain domain,
fs/xfs/scrub/refcount_repair.c
172
.rc_domain = domain,
fs/xfs/scrub/rtrefcount_repair.c
161
enum xfs_refc_domain domain,
fs/xfs/scrub/rtrefcount_repair.c
170
.rc_domain = domain,
fs/xfs/scrub/trace.h
2170
__field(enum xfs_refc_domain, domain)
fs/xfs/scrub/trace.h
2180
__entry->domain = rec->rc_domain;
fs/xfs/scrub/trace.h
2189
__print_symbolic(__entry->domain, XFS_REFC_DOMAIN_STRINGS),
fs/xfs/scrub/trace.h
944
__field(enum xfs_refc_domain, domain)
fs/xfs/scrub/trace.h
953
__entry->domain = irec->rc_domain;
fs/xfs/scrub/trace.h
962
__print_symbolic(__entry->domain, XFS_REFC_DOMAIN_STRINGS),
fs/xfs/xfs_healthmon.c
1215
running_event->domain = XFS_HEALTHMON_MOUNT;
fs/xfs/xfs_healthmon.c
1229
hm->unmount_event->domain = XFS_HEALTHMON_MOUNT;
fs/xfs/xfs_healthmon.c
184
existing->domain != new->domain)
fs/xfs/xfs_healthmon.c
200
switch (existing->domain) {
fs/xfs/xfs_healthmon.c
319
.domain = XFS_HEALTHMON_MOUNT,
fs/xfs/xfs_healthmon.c
465
.domain = XFS_HEALTHMON_FS,
fs/xfs/xfs_healthmon.c
501
event.domain = XFS_HEALTHMON_RTGROUP;
fs/xfs/xfs_healthmon.c
507
event.domain = XFS_HEALTHMON_AG;
fs/xfs/xfs_healthmon.c
535
.domain = XFS_HEALTHMON_INODE,
fs/xfs/xfs_healthmon.c
562
.domain = XFS_HEALTHMON_MOUNT,
fs/xfs/xfs_healthmon.c
603
.domain = media_error_domain(fdev),
fs/xfs/xfs_healthmon.c
648
.domain = XFS_HEALTHMON_FILERANGE,
fs/xfs/xfs_healthmon.c
753
if (event->domain < 0 || event->domain >= ARRAY_SIZE(domain_map) ||
fs/xfs/xfs_healthmon.c
757
hme.domain = domain_map[event->domain];
fs/xfs/xfs_healthmon.c
761
switch (event->domain) {
fs/xfs/xfs_healthmon.h
116
enum xfs_healthmon_domain domain;
fs/xfs/xfs_trace.h
3643
__field(enum xfs_refc_domain, domain)
fs/xfs/xfs_trace.h
3652
__entry->domain = irec->rc_domain;
fs/xfs/xfs_trace.h
3661
__print_symbolic(__entry->domain, XFS_REFC_DOMAIN_STRINGS),
fs/xfs/xfs_trace.h
3681
__field(enum xfs_refc_domain, domain)
fs/xfs/xfs_trace.h
3691
__entry->domain = irec->rc_domain;
fs/xfs/xfs_trace.h
3701
__print_symbolic(__entry->domain, XFS_REFC_DOMAIN_STRINGS),
fs/xfs/xfs_trace.h
6068
__field(unsigned int, domain)
fs/xfs/xfs_trace.h
6080
__entry->domain = event->domain;
fs/xfs/xfs_trace.h
6088
switch (__entry->domain) {
fs/xfs/xfs_trace.h
6129
__print_symbolic(__entry->domain, XFS_HEALTHMON_DOMAIN_STRINGS),
fs/xfs/xfs_trace.h
6159
__field(unsigned int, domain)
fs/xfs/xfs_trace.h
6167
__entry->domain = event->domain;
fs/xfs/xfs_trace.h
6175
__print_symbolic(__entry->domain, XFS_HEALTHMON_DOMAIN_STRINGS),
fs/xfs/xfs_trace.h
6189
__field(unsigned int, domain)
fs/xfs/xfs_trace.h
6198
__entry->domain = event->domain;
fs/xfs/xfs_trace.h
6207
__print_symbolic(__entry->domain, XFS_HEALTHMON_DOMAIN_STRINGS),
fs/xfs/xfs_trace.h
6222
__field(unsigned int, domain)
fs/xfs/xfs_trace.h
6232
__entry->domain = event->domain;
fs/xfs/xfs_trace.h
6242
__print_symbolic(__entry->domain, XFS_HEALTHMON_DOMAIN_STRINGS),
include/acpi/processor.h
106
u64 domain;
include/acpi/processor.h
147
u64 domain;
include/linux/aer.h
72
void aer_recover_queue(int domain, unsigned int bus, unsigned int devfn,
include/linux/async.h
112
struct async_domain *domain)
include/linux/async.h
114
return async_schedule_node_domain(func, dev, dev_to_node(dev), domain);
include/linux/async.h
118
extern void async_synchronize_full_domain(struct async_domain *domain);
include/linux/async.h
121
struct async_domain *domain);
include/linux/async.h
42
struct async_domain *domain);
include/linux/async.h
70
struct async_domain *domain)
include/linux/async.h
72
return async_schedule_node_domain(func, data, NUMA_NO_NODE, domain);
include/linux/cs5535.h
221
int domain);
include/linux/device.h
387
struct irq_domain *domain;
include/linux/device.h
821
return dev->msi.domain;
include/linux/device.h
830
dev->msi.domain = d;
include/linux/generic_pt/iommu.h
195
phys_addr_t pt_iommu_##fmt##_iova_to_phys(struct iommu_domain *domain, \
include/linux/generic_pt/iommu.h
197
int pt_iommu_##fmt##_map_pages(struct iommu_domain *domain, \
include/linux/generic_pt/iommu.h
202
struct iommu_domain *domain, unsigned long iova, \
include/linux/generic_pt/iommu.h
206
struct iommu_domain *domain, unsigned long iova, size_t size, \
include/linux/generic_pt/iommu.h
244
static_assert(offsetof(s, pt_iommu_memb.domain) == \
include/linux/generic_pt/iommu.h
41
struct iommu_domain domain;
include/linux/gpio/driver.h
65
struct irq_domain *domain;
include/linux/gpio/driver.h
689
struct irq_domain *domain);
include/linux/gpio/driver.h
695
struct irq_domain *domain)
include/linux/intel_rapl.h
93
struct rapl_domain *domain;
include/linux/iommu.h
1044
static inline void iommu_iotlb_gather_add_page(struct iommu_domain *domain,
include/linux/iommu.h
1055
iommu_iotlb_sync(domain, gather);
include/linux/iommu.h
1126
struct iommu_domain *domain;
include/linux/iommu.h
1185
int iommu_attach_device_pasid(struct iommu_domain *domain,
include/linux/iommu.h
1188
void iommu_detach_device_pasid(struct iommu_domain *domain,
include/linux/iommu.h
1223
static inline void iommu_domain_free(struct iommu_domain *domain)
include/linux/iommu.h
1227
static inline int iommu_attach_device(struct iommu_domain *domain,
include/linux/iommu.h
1233
static inline void iommu_detach_device(struct iommu_domain *domain,
include/linux/iommu.h
1243
static inline int iommu_map(struct iommu_domain *domain, unsigned long iova,
include/linux/iommu.h
1249
static inline size_t iommu_unmap(struct iommu_domain *domain,
include/linux/iommu.h
1255
static inline size_t iommu_unmap_fast(struct iommu_domain *domain,
include/linux/iommu.h
1262
static inline ssize_t iommu_map_sg(struct iommu_domain *domain,
include/linux/iommu.h
1269
static inline void iommu_flush_iotlb_all(struct iommu_domain *domain)
include/linux/iommu.h
1273
static inline void iommu_iotlb_sync(struct iommu_domain *domain,
include/linux/iommu.h
1278
static inline phys_addr_t iommu_iova_to_phys(struct iommu_domain *domain, dma_addr_t iova)
include/linux/iommu.h
1283
static inline void iommu_set_fault_handler(struct iommu_domain *domain,
include/linux/iommu.h
1317
static inline int iommu_attach_group(struct iommu_domain *domain,
include/linux/iommu.h
1323
static inline void iommu_detach_group(struct iommu_domain *domain,
include/linux/iommu.h
1381
static inline int iommu_set_pgtable_quirks(struct iommu_domain *domain,
include/linux/iommu.h
1403
static inline void iommu_iotlb_gather_add_page(struct iommu_domain *domain,
include/linux/iommu.h
1501
static inline int iommu_attach_device_pasid(struct iommu_domain *domain,
include/linux/iommu.h
1508
static inline void iommu_detach_device_pasid(struct iommu_domain *domain,
include/linux/iommu.h
1560
static inline ssize_t iommu_map_sgtable(struct iommu_domain *domain,
include/linux/iommu.h
1563
return iommu_map_sg(domain, iova, sgt->sgl, sgt->orig_nents, prot,
include/linux/iommu.h
1575
int iommu_get_msi_cookie(struct iommu_domain *domain, dma_addr_t base);
include/linux/iommu.h
1577
static inline int iommu_get_msi_cookie(struct iommu_domain *domain, dma_addr_t base)
include/linux/iommu.h
253
static inline bool iommu_is_dma_domain(struct iommu_domain *domain)
include/linux/iommu.h
255
return domain->type & __IOMMU_DOMAIN_DMA_API;
include/linux/iommu.h
385
int (*set_dirty_tracking)(struct iommu_domain *domain, bool enabled);
include/linux/iommu.h
386
int (*read_and_clear_dirty)(struct iommu_domain *domain,
include/linux/iommu.h
754
int (*attach_dev)(struct iommu_domain *domain, struct device *dev,
include/linux/iommu.h
756
int (*set_dev_pasid)(struct iommu_domain *domain, struct device *dev,
include/linux/iommu.h
759
int (*map_pages)(struct iommu_domain *domain, unsigned long iova,
include/linux/iommu.h
762
size_t (*unmap_pages)(struct iommu_domain *domain, unsigned long iova,
include/linux/iommu.h
766
void (*flush_iotlb_all)(struct iommu_domain *domain);
include/linux/iommu.h
767
int (*iotlb_sync_map)(struct iommu_domain *domain, unsigned long iova,
include/linux/iommu.h
769
void (*iotlb_sync)(struct iommu_domain *domain,
include/linux/iommu.h
771
int (*cache_invalidate_user)(struct iommu_domain *domain,
include/linux/iommu.h
774
phys_addr_t (*iova_to_phys)(struct iommu_domain *domain,
include/linux/iommu.h
777
bool (*enforce_cache_coherency)(struct iommu_domain *domain);
include/linux/iommu.h
778
int (*set_pgtable_quirks)(struct iommu_domain *domain,
include/linux/iommu.h
781
void (*free)(struct iommu_domain *domain);
include/linux/iommu.h
869
int iommu_deferred_attach(struct device *dev, struct iommu_domain *domain);
include/linux/iommu.h
907
extern void iommu_domain_free(struct iommu_domain *domain);
include/linux/iommu.h
908
extern int iommu_attach_device(struct iommu_domain *domain,
include/linux/iommu.h
910
extern void iommu_detach_device(struct iommu_domain *domain,
include/linux/iommu.h
915
extern int iommu_map(struct iommu_domain *domain, unsigned long iova,
include/linux/iommu.h
917
int iommu_map_nosync(struct iommu_domain *domain, unsigned long iova,
include/linux/iommu.h
919
int iommu_sync_map(struct iommu_domain *domain, unsigned long iova,
include/linux/iommu.h
921
extern size_t iommu_unmap(struct iommu_domain *domain, unsigned long iova,
include/linux/iommu.h
923
extern size_t iommu_unmap_fast(struct iommu_domain *domain,
include/linux/iommu.h
926
extern ssize_t iommu_map_sg(struct iommu_domain *domain, unsigned long iova,
include/linux/iommu.h
929
extern phys_addr_t iommu_iova_to_phys(struct iommu_domain *domain, dma_addr_t iova);
include/linux/iommu.h
930
extern void iommu_set_fault_handler(struct iommu_domain *domain,
include/linux/iommu.h
944
extern int iommu_attach_group(struct iommu_domain *domain,
include/linux/iommu.h
946
extern void iommu_detach_group(struct iommu_domain *domain,
include/linux/iommu.h
966
int iommu_set_pgtable_quirks(struct iommu_domain *domain,
include/linux/iommu.h
971
extern int report_iommu_fault(struct iommu_domain *domain, struct device *dev,
include/linux/iommu.h
974
static inline void iommu_flush_iotlb_all(struct iommu_domain *domain)
include/linux/iommu.h
976
if (domain->ops->flush_iotlb_all)
include/linux/iommu.h
977
domain->ops->flush_iotlb_all(domain);
include/linux/iommu.h
980
static inline void iommu_iotlb_sync(struct iommu_domain *domain,
include/linux/iommu.h
983
if (domain->ops->iotlb_sync &&
include/linux/iommu.h
985
domain->ops->iotlb_sync(domain, iotlb_gather);
include/linux/irq.h
1070
struct irq_domain *domain;
include/linux/irq.h
183
struct irq_domain *domain;
include/linux/irq_sim.h
20
int (*irq_sim_irq_requested)(struct irq_domain *domain,
include/linux/irq_sim.h
22
void (*irq_sim_irq_released)(struct irq_domain *domain,
include/linux/irq_sim.h
41
void irq_domain_remove_sim(struct irq_domain *domain);
include/linux/irqchip/arm-gic-v3.h
642
struct irq_domain *domain, u8 irq_prio);
include/linux/irqchip/arm-gic-v4.h
154
int its_init_v4(struct irq_domain *domain,
include/linux/irqchip/arm-gic-v4.h
21
struct irq_domain *domain;
include/linux/irqchip/irq-msi-lib.h
24
bool msi_lib_init_dev_msi_info(struct device *dev, struct irq_domain *domain,
include/linux/irqdesc.h
199
int generic_handle_domain_irq(struct irq_domain *domain, irq_hw_number_t hwirq);
include/linux/irqdesc.h
200
int generic_handle_domain_irq_safe(struct irq_domain *domain, irq_hw_number_t hwirq);
include/linux/irqdesc.h
201
int generic_handle_domain_nmi(struct irq_domain *domain, irq_hw_number_t hwirq);
include/linux/irqdesc.h
202
bool generic_handle_demux_domain_irq(struct irq_domain *domain, irq_hw_number_t hwirq);
include/linux/irqdomain.h
372
void irq_set_default_domain(struct irq_domain *domain);
include/linux/irqdomain.h
384
void irq_domain_update_bus_token(struct irq_domain *domain, enum irq_domain_bus_token bus_token);
include/linux/irqdomain.h
431
unsigned int irq_create_direct_mapping(struct irq_domain *domain);
include/linux/irqdomain.h
475
void irq_domain_remove(struct irq_domain *domain);
include/linux/irqdomain.h
477
int irq_domain_associate(struct irq_domain *domain, unsigned int irq, irq_hw_number_t hwirq);
include/linux/irqdomain.h
478
void irq_domain_associate_many(struct irq_domain *domain, unsigned int irq_base,
include/linux/irqdomain.h
481
unsigned int irq_create_mapping_affinity(struct irq_domain *domain, irq_hw_number_t hwirq,
include/linux/irqdomain.h
498
static inline unsigned int irq_create_mapping(struct irq_domain *domain, irq_hw_number_t hwirq)
include/linux/irqdomain.h
500
return irq_create_mapping_affinity(domain, hwirq, NULL);
include/linux/irqdomain.h
503
struct irq_desc *__irq_resolve_mapping(struct irq_domain *domain,
include/linux/irqdomain.h
514
static inline struct irq_desc *irq_resolve_mapping(struct irq_domain *domain,
include/linux/irqdomain.h
517
return __irq_resolve_mapping(domain, hwirq, NULL);
include/linux/irqdomain.h
527
static inline unsigned int irq_find_mapping(struct irq_domain *domain,
include/linux/irqdomain.h
532
if (__irq_resolve_mapping(domain, hwirq, &irq))
include/linux/irqdomain.h
562
int irq_reserve_ipi(struct irq_domain *domain, const struct cpumask *dest);
include/linux/irqdomain.h
566
struct irq_data *irq_domain_get_irq_data(struct irq_domain *domain, unsigned int virq);
include/linux/irqdomain.h
567
void irq_domain_set_info(struct irq_domain *domain, unsigned int virq, irq_hw_number_t hwirq,
include/linux/irqdomain.h
608
int __irq_domain_alloc_irqs(struct irq_domain *domain, int irq_base, unsigned int nr_irqs,
include/linux/irqdomain.h
624
static inline int irq_domain_alloc_irqs(struct irq_domain *domain, unsigned int nr_irqs,
include/linux/irqdomain.h
627
return __irq_domain_alloc_irqs(domain, -1, nr_irqs, node, arg, false, NULL);
include/linux/irqdomain.h
630
int irq_domain_set_hwirq_and_chip(struct irq_domain *domain, unsigned int virq,
include/linux/irqdomain.h
633
void irq_domain_free_irqs_common(struct irq_domain *domain, unsigned int virq,
include/linux/irqdomain.h
635
void irq_domain_free_irqs_top(struct irq_domain *domain, unsigned int virq, unsigned int nr_irqs);
include/linux/irqdomain.h
637
int irq_domain_push_irq(struct irq_domain *domain, int virq, void *arg);
include/linux/irqdomain.h
638
int irq_domain_pop_irq(struct irq_domain *domain, int virq);
include/linux/irqdomain.h
640
int irq_domain_alloc_irqs_parent(struct irq_domain *domain, unsigned int irq_base,
include/linux/irqdomain.h
643
void irq_domain_free_irqs_parent(struct irq_domain *domain, unsigned int irq_base,
include/linux/irqdomain.h
646
int irq_domain_disconnect_hierarchy(struct irq_domain *domain, unsigned int virq);
include/linux/irqdomain.h
650
static inline bool irq_domain_is_hierarchy(struct irq_domain *domain)
include/linux/irqdomain.h
652
return domain->flags & IRQ_DOMAIN_FLAG_HIERARCHY;
include/linux/irqdomain.h
655
static inline bool irq_domain_is_ipi(struct irq_domain *domain)
include/linux/irqdomain.h
657
return domain->flags & (IRQ_DOMAIN_FLAG_IPI_PER_CPU | IRQ_DOMAIN_FLAG_IPI_SINGLE);
include/linux/irqdomain.h
660
static inline bool irq_domain_is_ipi_per_cpu(struct irq_domain *domain)
include/linux/irqdomain.h
662
return domain->flags & IRQ_DOMAIN_FLAG_IPI_PER_CPU;
include/linux/irqdomain.h
665
static inline bool irq_domain_is_ipi_single(struct irq_domain *domain)
include/linux/irqdomain.h
667
return domain->flags & IRQ_DOMAIN_FLAG_IPI_SINGLE;
include/linux/irqdomain.h
670
static inline bool irq_domain_is_msi(struct irq_domain *domain)
include/linux/irqdomain.h
672
return domain->flags & IRQ_DOMAIN_FLAG_MSI;
include/linux/irqdomain.h
675
static inline bool irq_domain_is_msi_parent(struct irq_domain *domain)
include/linux/irqdomain.h
677
return domain->flags & IRQ_DOMAIN_FLAG_MSI_PARENT;
include/linux/irqdomain.h
680
static inline bool irq_domain_is_msi_device(struct irq_domain *domain)
include/linux/irqdomain.h
682
return domain->flags & IRQ_DOMAIN_FLAG_MSI_DEVICE;
include/linux/irqdomain.h
685
static inline bool irq_domain_is_msi_immutable(struct irq_domain *domain)
include/linux/irqdomain.h
687
return domain->flags & IRQ_DOMAIN_FLAG_MSI_IMMUTABLE;
include/linux/irqdomain.h
690
static inline int irq_domain_alloc_irqs(struct irq_domain *domain, unsigned int nr_irqs,
include/linux/irqdomain.h
698
static inline bool irq_domain_is_hierarchy(struct irq_domain *domain)
include/linux/irqdomain.h
703
static inline bool irq_domain_is_ipi(struct irq_domain *domain)
include/linux/irqdomain.h
708
static inline bool irq_domain_is_ipi_per_cpu(struct irq_domain *domain)
include/linux/irqdomain.h
713
static inline bool irq_domain_is_ipi_single(struct irq_domain *domain)
include/linux/irqdomain.h
718
static inline bool irq_domain_is_msi(struct irq_domain *domain)
include/linux/irqdomain.h
723
static inline bool irq_domain_is_msi_parent(struct irq_domain *domain)
include/linux/irqdomain.h
728
static inline bool irq_domain_is_msi_device(struct irq_domain *domain)
include/linux/irqdomain.h
740
int msi_device_domain_alloc_wired(struct irq_domain *domain, unsigned int hwirq, unsigned int type);
include/linux/irqdomain.h
741
void msi_device_domain_free_wired(struct irq_domain *domain, unsigned int virq);
include/linux/irqdomain.h
743
static inline int msi_device_domain_alloc_wired(struct irq_domain *domain, unsigned int hwirq,
include/linux/irqdomain.h
749
static inline void msi_device_domain_free_wired(struct irq_domain *domain, unsigned int virq)
include/linux/mfd/abx500/ab8500.h
353
struct irq_domain *domain;
include/linux/mfd/imx25-tsadc.h
10
struct irq_domain *domain;
include/linux/mfd/nct6694.h
89
struct irq_domain *domain;
include/linux/mfd/stmpe.h
128
struct irq_domain *domain;
include/linux/mfd/tc3589x.h
124
struct irq_domain *domain;
include/linux/mod_devicetable.h
886
__u8 domain;
include/linux/moxtet.h
46
struct irq_domain *domain;
include/linux/msi.h
228
struct irq_domain *domain;
include/linux/msi.h
455
int (*msi_init)(struct irq_domain *domain,
include/linux/msi.h
459
void (*msi_free)(struct irq_domain *domain,
include/linux/msi.h
462
int (*msi_prepare)(struct irq_domain *domain,
include/linux/msi.h
465
void (*msi_teardown)(struct irq_domain *domain,
include/linux/msi.h
467
void (*prepare_desc)(struct irq_domain *domain, msi_alloc_info_t *arg,
include/linux/msi.h
471
int (*domain_alloc_irqs)(struct irq_domain *domain,
include/linux/msi.h
473
void (*domain_free_irqs)(struct irq_domain *domain,
include/linux/msi.h
475
int (*msi_translate)(struct irq_domain *domain, struct irq_fwspec *fwspec,
include/linux/msi.h
624
bool (*init_dev_msi_info)(struct device *dev, struct irq_domain *domain,
include/linux/msi.h
629
bool msi_parent_init_dev_msi_info(struct device *dev, struct irq_domain *domain,
include/linux/msi.h
670
struct msi_domain_info *msi_get_domain_info(struct irq_domain *domain);
include/linux/msi.h
705
u32 pci_msi_domain_get_msi_rid(struct irq_domain *domain, struct pci_dev *pdev);
include/linux/msi.h
706
u32 pci_msi_map_rid_ctlr_node(struct irq_domain *domain, struct pci_dev *pdev,
include/linux/msi.h
709
void pci_msix_prepare_desc(struct irq_domain *domain, msi_alloc_info_t *arg,
include/linux/nfs_xdr.h
1376
char domain[NFS4_OPAQUE_LIMIT + 1];
include/linux/omap-iommu.h
19
int omap_iommu_domain_deactivate(struct iommu_domain *domain);
include/linux/omap-iommu.h
20
int omap_iommu_domain_activate(struct iommu_domain *domain);
include/linux/omap-iommu.h
25
static inline int omap_iommu_domain_deactivate(struct iommu_domain *domain)
include/linux/omap-iommu.h
30
static inline int omap_iommu_domain_activate(struct iommu_domain *domain)
include/linux/pci.h
1220
struct pci_bus *pci_find_bus(int domain, int busnr);
include/linux/pci.h
1298
struct pci_dev *pci_get_domain_bus_and_slot(int domain, unsigned int bus,
include/linux/pci.h
2196
static inline struct pci_dev *pci_get_domain_bus_and_slot(int domain,
include/linux/pci.h
882
int raw_pci_read(unsigned int domain, unsigned int bus, unsigned int devfn,
include/linux/pci.h
884
int raw_pci_write(unsigned int domain, unsigned int bus, unsigned int devfn,
include/linux/pm_domain.h
156
bool (*system_power_down_ok)(struct dev_pm_domain *domain);
include/linux/pm_domain.h
157
bool (*power_down_ok)(struct dev_pm_domain *domain);
include/linux/pm_domain.h
196
struct dev_pm_domain domain; /* PM domain operations */
include/linux/pm_domain.h
218
int (*power_off)(struct generic_pm_domain *domain);
include/linux/pm_domain.h
219
int (*power_on)(struct generic_pm_domain *domain);
include/linux/pm_domain.h
225
int (*set_hwmode_dev)(struct generic_pm_domain *domain,
include/linux/pm_domain.h
227
bool (*get_hwmode_dev)(struct generic_pm_domain *domain,
include/linux/pm_domain.h
229
int (*attach_dev)(struct generic_pm_domain *domain,
include/linux/pm_domain.h
231
void (*detach_dev)(struct generic_pm_domain *domain,
include/linux/pm_domain.h
257
return container_of(pd, struct generic_pm_domain, domain);
include/linux/remoteproc.h
551
struct iommu_domain *domain;
include/linux/scmi_protocol.h
162
(const struct scmi_protocol_handle *ph, u32 domain);
include/linux/scmi_protocol.h
163
int (*limits_set)(const struct scmi_protocol_handle *ph, u32 domain,
include/linux/scmi_protocol.h
165
int (*limits_get)(const struct scmi_protocol_handle *ph, u32 domain,
include/linux/scmi_protocol.h
167
int (*level_set)(const struct scmi_protocol_handle *ph, u32 domain,
include/linux/scmi_protocol.h
169
int (*level_get)(const struct scmi_protocol_handle *ph, u32 domain,
include/linux/scmi_protocol.h
172
u32 domain);
include/linux/scmi_protocol.h
174
u32 domain, u32 *rate_limit);
include/linux/scmi_protocol.h
176
struct device *dev, u32 domain);
include/linux/scmi_protocol.h
177
int (*freq_set)(const struct scmi_protocol_handle *ph, u32 domain,
include/linux/scmi_protocol.h
179
int (*freq_get)(const struct scmi_protocol_handle *ph, u32 domain,
include/linux/scmi_protocol.h
181
int (*est_power_get)(const struct scmi_protocol_handle *ph, u32 domain,
include/linux/scmi_protocol.h
184
u32 domain);
include/linux/scmi_protocol.h
186
u32 domain, u32 *rate_limit);
include/linux/scmi_protocol.h
202
u32 domain);
include/linux/scmi_protocol.h
210
int (*state_set)(const struct scmi_protocol_handle *ph, u32 domain,
include/linux/scmi_protocol.h
212
int (*state_get)(const struct scmi_protocol_handle *ph, u32 domain,
include/linux/scmi_protocol.h
541
u32 domain);
include/linux/scmi_protocol.h
542
int (*latency_get)(const struct scmi_protocol_handle *ph, u32 domain);
include/linux/scmi_protocol.h
543
int (*reset)(const struct scmi_protocol_handle *ph, u32 domain);
include/linux/scmi_protocol.h
544
int (*assert)(const struct scmi_protocol_handle *ph, u32 domain);
include/linux/scmi_protocol.h
545
int (*deassert)(const struct scmi_protocol_handle *ph, u32 domain);
include/linux/soundwire/sdw.h
1042
struct irq_domain *domain;
include/linux/sunrpc/gss_api.h
87
struct auth_domain *domain;
include/linux/surface_aggregator/device.h
100
.domain = d, \
include/linux/surface_aggregator/device.h
56
u8 domain;
include/linux/vga_switcheroo.h
174
int vga_switcheroo_init_domain_pm_ops(struct device *dev, struct dev_pm_domain *domain);
include/linux/vga_switcheroo.h
195
static inline int vga_switcheroo_init_domain_pm_ops(struct device *dev, struct dev_pm_domain *domain) { return -EINVAL; }
include/net/netlabel.h
196
char *domain;
include/net/netlabel.h
367
kfree(secattr->domain);
include/net/netlabel.h
407
int netlbl_cfg_map_del(const char *domain,
include/net/netlabel.h
412
int netlbl_cfg_unlbl_map_add(const char *domain,
include/net/netlabel.h
434
const char *domain,
include/net/netlabel.h
442
const char *domain,
include/net/netlabel.h
511
static inline int netlbl_cfg_map_del(const char *domain,
include/net/netlabel.h
519
static inline int netlbl_cfg_unlbl_map_add(const char *domain,
include/net/netlabel.h
557
const char *domain,
include/net/netlabel.h
575
const char *domain,
include/sound/q6usboffload.h
17
struct iommu_domain *domain;
include/trace/events/kyber.h
16
TP_PROTO(dev_t dev, const char *domain, const char *type,
include/trace/events/kyber.h
20
TP_ARGS(dev, domain, type, percentile, numerator, denominator, samples),
include/trace/events/kyber.h
24
__array( char, domain, DOMAIN_LEN )
include/trace/events/kyber.h
34
strscpy(__entry->domain, domain, sizeof(__entry->domain));
include/trace/events/kyber.h
43
MAJOR(__entry->dev), MINOR(__entry->dev), __entry->domain,
include/trace/events/kyber.h
50
TP_PROTO(dev_t dev, const char *domain, unsigned int depth),
include/trace/events/kyber.h
52
TP_ARGS(dev, domain, depth),
include/trace/events/kyber.h
56
__array( char, domain, DOMAIN_LEN )
include/trace/events/kyber.h
62
strscpy(__entry->domain, domain, sizeof(__entry->domain));
include/trace/events/kyber.h
67
MAJOR(__entry->dev), MINOR(__entry->dev), __entry->domain,
include/trace/events/kyber.h
73
TP_PROTO(dev_t dev, const char *domain),
include/trace/events/kyber.h
75
TP_ARGS(dev, domain),
include/trace/events/kyber.h
79
__array( char, domain, DOMAIN_LEN )
include/trace/events/kyber.h
84
strscpy(__entry->domain, domain, sizeof(__entry->domain));
include/trace/events/kyber.h
88
__entry->domain)
include/uapi/drm/amdgpu_drm.h
745
__u32 domain;
include/uapi/drm/etnaviv_drm.h
174
__u8 domain; /* in, pm domain */
include/uapi/drm/etnaviv_drm.h
261
__u8 domain; /* in, pm domain index */
include/uapi/drm/nouveau_drm.h
144
__u32 domain;
include/uapi/drm/nouveau_drm.h
161
__u32 domain;
include/uapi/drm/radeon_drm.h
889
__u32 domain;
include/uapi/linux/cciss_ioctl.h
16
unsigned short domain;
include/uapi/linux/tipc.h
166
__u32 domain;
include/uapi/linux/virtio_iommu.h
101
__le32 domain;
include/uapi/linux/virtio_iommu.h
111
__le32 domain;
include/uapi/linux/virtio_iommu.h
76
__le32 domain;
include/uapi/linux/virtio_iommu.h
85
__le32 domain;
include/xen/interface/io/pciif.h
60
uint32_t domain; /* PCI Domain/Segment */
include/xen/interface/io/pciif.h
84
uint32_t domain; /* PCI Domain/Segment*/
include/xen/interface/io/pvcalls.h
45
uint32_t domain;
include/xen/interface/platform.h
337
uint32_t domain; /* domain number of one dependent group */
include/xen/interface/platform.h
390
uint64_t domain;
include/xen/pci.h
23
uint16_t domain)
include/xen/pci.h
9
int xen_register_device_domain_owner(struct pci_dev *dev, uint16_t domain);
io_uring/net.c
1714
bctx->socket.family = sock->domain;
io_uring/net.c
1726
sock->domain = READ_ONCE(sqe->fd);
io_uring/net.c
1752
file = __sys_socket_file(sock->domain, sock->type, sock->protocol);
io_uring/net.c
39
int domain;
kernel/async.c
152
struct async_domain *domain,
kernel/async.c
163
entry->domain = domain;
kernel/async.c
170
list_add_tail(&entry->domain_list, &domain->pending);
kernel/async.c
171
if (domain->registered)
kernel/async.c
201
int node, struct async_domain *domain)
kernel/async.c
225
return __async_schedule_node_domain(func, data, node, domain, entry);
kernel/async.c
295
void async_synchronize_full_domain(struct async_domain *domain)
kernel/async.c
297
async_synchronize_cookie_domain(ASYNC_COOKIE_MAX, domain);
kernel/async.c
310
void async_synchronize_cookie_domain(async_cookie_t cookie, struct async_domain *domain)
kernel/async.c
317
wait_event(async_done, lowest_in_progress(domain) >= cookie);
kernel/async.c
76
struct async_domain *domain;
kernel/async.c
89
static async_cookie_t lowest_in_progress(struct async_domain *domain)
kernel/async.c
97
if (domain) {
kernel/async.c
98
if (!list_empty(&domain->pending))
kernel/async.c
99
first = list_first_entry(&domain->pending,
kernel/irq/chip.c
1530
if (data->domain)
kernel/irq/chip.c
1531
return data->domain->pm_dev;
kernel/irq/debugfs.c
82
data->domain ? data->domain->name : "");
kernel/irq/debugfs.c
85
if (data->domain && data->domain->ops && data->domain->ops->debug_show)
kernel/irq/debugfs.c
86
data->domain->ops->debug_show(m, NULL, data, ind + 1);
kernel/irq/devres.c
336
struct irq_domain **domain = res;
kernel/irq/devres.c
338
irq_domain_remove(*domain);
kernel/irq/devres.c
353
struct irq_domain *domain;
kernel/irq/devres.c
360
domain = irq_domain_instantiate(info);
kernel/irq/devres.c
361
if (!IS_ERR(domain)) {
kernel/irq/devres.c
362
*dr = domain;
kernel/irq/devres.c
368
return domain;
kernel/irq/generic-chip.c
321
gc->domain = d;
kernel/irq/generic-chip.c
617
if (gc->domain) {
kernel/irq/generic-chip.c
618
virq = irq_find_mapping(gc->domain, gc->irq_base + i);
kernel/irq/generic-chip.c
638
if (!gc->domain)
kernel/irq/generic-chip.c
648
virq = irq_find_mapping(gc->domain, gc->irq_base + __ffs(gc->installed));
kernel/irq/ipi-mux.c
157
struct irq_domain *domain;
kernel/irq/ipi-mux.c
177
domain = irq_domain_create_linear(fwnode, nr_ipi,
kernel/irq/ipi-mux.c
179
if (!domain) {
kernel/irq/ipi-mux.c
185
domain->flags |= IRQ_DOMAIN_FLAG_IPI_SINGLE;
kernel/irq/ipi-mux.c
186
irq_domain_update_bus_token(domain, DOMAIN_BUS_IPI);
kernel/irq/ipi-mux.c
188
rc = irq_domain_alloc_irqs(domain, nr_ipi, NUMA_NO_NODE, NULL);
kernel/irq/ipi-mux.c
194
ipi_mux_domain = domain;
kernel/irq/ipi-mux.c
200
irq_domain_remove(domain);
kernel/irq/ipi.c
119
struct irq_domain *domain;
kernel/irq/ipi.c
125
domain = data->domain;
kernel/irq/ipi.c
126
if (WARN_ON(domain == NULL))
kernel/irq/ipi.c
129
if (!irq_domain_is_ipi(domain)) {
kernel/irq/ipi.c
142
if (irq_domain_is_ipi_per_cpu(domain)) {
kernel/irq/ipi.c
181
if (irq_domain_is_ipi_per_cpu(data->domain))
kernel/irq/ipi.c
23
int irq_reserve_ipi(struct irq_domain *domain,
kernel/irq/ipi.c
247
if (irq_domain_is_ipi_per_cpu(data->domain) &&
kernel/irq/ipi.c
289
if (irq_domain_is_ipi_per_cpu(data->domain)) {
kernel/irq/ipi.c
30
if (!domain ||!irq_domain_is_ipi(domain)) {
kernel/irq/ipi.c
46
if (irq_domain_is_ipi_single(domain)) {
kernel/irq/ipi.c
84
virq = __irq_domain_alloc_irqs(domain, virq, nr_irqs, NUMA_NO_NODE,
kernel/irq/irq_sim.c
113
work_ctx->ops.irq_sim_irq_released(work_ctx->domain, hwirq,
kernel/irq/irq_sim.c
140
irqnum = irq_find_mapping(work_ctx->domain, offset);
kernel/irq/irq_sim.c
145
static int irq_sim_domain_map(struct irq_domain *domain,
kernel/irq/irq_sim.c
148
struct irq_sim_work_ctx *work_ctx = domain->host_data;
kernel/irq/irq_sim.c
164
static void irq_sim_domain_unmap(struct irq_domain *domain, unsigned int virq)
kernel/irq/irq_sim.c
169
irqd = irq_domain_get_irq_data(domain, virq);
kernel/irq/irq_sim.c
18
struct irq_domain *domain;
kernel/irq/irq_sim.c
214
work_ctx->domain = irq_domain_create_linear(fwnode, num_irqs,
kernel/irq/irq_sim.c
217
if (!work_ctx->domain)
kernel/irq/irq_sim.c
228
return no_free_ptr(work_ctx)->domain;
kernel/irq/irq_sim.c
238
void irq_domain_remove_sim(struct irq_domain *domain)
kernel/irq/irq_sim.c
240
struct irq_sim_work_ctx *work_ctx = domain->host_data;
kernel/irq/irq_sim.c
246
irq_domain_remove(domain);
kernel/irq/irq_sim.c
252
struct irq_domain *domain = data;
kernel/irq/irq_sim.c
254
irq_domain_remove_sim(domain);
kernel/irq/irq_sim.c
284
struct irq_domain *domain;
kernel/irq/irq_sim.c
287
domain = irq_domain_create_sim_full(fwnode, num_irqs, ops, data);
kernel/irq/irq_sim.c
288
if (IS_ERR(domain))
kernel/irq/irq_sim.c
289
return domain;
kernel/irq/irq_sim.c
291
ret = devm_add_action_or_reset(dev, devm_irq_domain_remove_sim, domain);
kernel/irq/irq_sim.c
295
return domain;
kernel/irq/irq_sim.c
99
return work_ctx->ops.irq_sim_irq_requested(work_ctx->domain,
kernel/irq/irqdesc.c
285
if (desc->irq_data.domain)
kernel/irq/irqdesc.c
729
int generic_handle_domain_irq(struct irq_domain *domain, irq_hw_number_t hwirq)
kernel/irq/irqdesc.c
731
return handle_irq_desc(irq_resolve_mapping(domain, hwirq));
kernel/irq/irqdesc.c
747
int generic_handle_domain_irq_safe(struct irq_domain *domain, irq_hw_number_t hwirq)
kernel/irq/irqdesc.c
753
ret = handle_irq_desc(irq_resolve_mapping(domain, hwirq));
kernel/irq/irqdesc.c
770
int generic_handle_domain_nmi(struct irq_domain *domain, irq_hw_number_t hwirq)
kernel/irq/irqdesc.c
773
return handle_irq_desc(irq_resolve_mapping(domain, hwirq));
kernel/irq/irqdesc.c
838
bool generic_handle_demux_domain_irq(struct irq_domain *domain, irq_hw_number_t hwirq)
kernel/irq/irqdesc.c
840
struct irq_desc *desc = irq_resolve_mapping(domain, hwirq);
kernel/irq/irqdomain.c
1001
mutex_unlock(&domain->root->mutex);
kernel/irq/irqdomain.c
1025
struct irq_domain *domain;
kernel/irq/irqdomain.c
1031
domain = irq_data->domain;
kernel/irq/irqdomain.c
1032
if (WARN_ON(domain == NULL))
kernel/irq/irqdomain.c
1035
if (irq_domain_is_hierarchy(domain)) {
kernel/irq/irqdomain.c
1036
irq_domain_free_one_irq(domain, virq);
kernel/irq/irqdomain.c
1038
irq_domain_disassociate(domain, virq);
kernel/irq/irqdomain.c
1052
struct irq_desc *__irq_resolve_mapping(struct irq_domain *domain,
kernel/irq/irqdomain.c
1060
if (domain == NULL)
kernel/irq/irqdomain.c
1061
domain = irq_default_domain;
kernel/irq/irqdomain.c
1062
if (domain == NULL)
kernel/irq/irqdomain.c
1065
if (irq_domain_is_nomap(domain)) {
kernel/irq/irqdomain.c
1066
if (hwirq < domain->hwirq_max) {
kernel/irq/irqdomain.c
1067
data = irq_domain_get_irq_data(domain, hwirq);
kernel/irq/irqdomain.c
1079
if (hwirq < domain->revmap_size)
kernel/irq/irqdomain.c
1080
data = rcu_dereference(domain->revmap[hwirq]);
kernel/irq/irqdomain.c
1082
data = radix_tree_lookup(&domain->revmap_tree, hwirq);
kernel/irq/irqdomain.c
1324
struct irq_domain *domain = data->domain;
kernel/irq/irqdomain.c
1326
domain->mapcount++;
kernel/irq/irqdomain.c
1327
irq_domain_set_mapping(domain, data->hwirq, data);
kernel/irq/irqdomain.c
1343
struct irq_domain *domain = data->domain;
kernel/irq/irqdomain.c
1346
domain->mapcount--;
kernel/irq/irqdomain.c
1347
irq_domain_clear_mapping(domain, hwirq);
kernel/irq/irqdomain.c
1351
static struct irq_data *irq_domain_insert_irq_data(struct irq_domain *domain,
kernel/irq/irqdomain.c
1362
irq_data->domain = domain;
kernel/irq/irqdomain.c
1388
irq_data->domain = NULL;
kernel/irq/irqdomain.c
1407
int irq_domain_disconnect_hierarchy(struct irq_domain *domain,
kernel/irq/irqdomain.c
1412
irqd = irq_domain_get_irq_data(domain, virq);
kernel/irq/irqdomain.c
143
static int alloc_name(struct irq_domain *domain, char *base, enum irq_domain_bus_token bus_token)
kernel/irq/irqdomain.c
1459
virq, tail->parent_data->domain->name);
kernel/irq/irqdomain.c
146
domain->name = kasprintf(GFP_KERNEL, "%s", base);
kernel/irq/irqdomain.c
1470
static int irq_domain_alloc_irq_data(struct irq_domain *domain,
kernel/irq/irqdomain.c
148
domain->name = kasprintf(GFP_KERNEL, "%s-%d", base, bus_token);
kernel/irq/irqdomain.c
1480
irq_data->domain = domain;
kernel/irq/irqdomain.c
1482
for (parent = domain->parent; parent; parent = parent->parent) {
kernel/irq/irqdomain.c
149
if (!domain->name)
kernel/irq/irqdomain.c
1499
struct irq_data *irq_domain_get_irq_data(struct irq_domain *domain,
kernel/irq/irqdomain.c
1506
if (irq_data->domain == domain)
kernel/irq/irqdomain.c
152
domain->flags |= IRQ_DOMAIN_NAME_ALLOCATED;
kernel/irq/irqdomain.c
1521
int irq_domain_set_hwirq_and_chip(struct irq_domain *domain, unsigned int virq,
kernel/irq/irqdomain.c
1526
struct irq_data *irq_data = irq_domain_get_irq_data(domain, virq);
kernel/irq/irqdomain.c
1550
void irq_domain_set_info(struct irq_domain *domain, unsigned int virq,
kernel/irq/irqdomain.c
1555
irq_domain_set_hwirq_and_chip(domain, virq, hwirq, chip, chip_data);
kernel/irq/irqdomain.c
156
static int alloc_fwnode_name(struct irq_domain *domain, const struct fwnode_handle *fwnode,
kernel/irq/irqdomain.c
1567
void irq_domain_free_irqs_common(struct irq_domain *domain, unsigned int virq,
kernel/irq/irqdomain.c
1574
irq_data = irq_domain_get_irq_data(domain, virq + i);
kernel/irq/irqdomain.c
1578
irq_domain_free_irqs_parent(domain, virq, nr_irqs);
kernel/irq/irqdomain.c
1588
void irq_domain_free_irqs_top(struct irq_domain *domain, unsigned int virq,
kernel/irq/irqdomain.c
1597
irq_domain_free_irqs_common(domain, virq, nr_irqs);
kernel/irq/irqdomain.c
1601
static void irq_domain_free_irqs_hierarchy(struct irq_domain *domain,
kernel/irq/irqdomain.c
1607
if (!domain->ops->free)
kernel/irq/irqdomain.c
1611
if (irq_domain_get_irq_data(domain, irq_base + i))
kernel/irq/irqdomain.c
1612
domain->ops->free(domain, irq_base + i, 1);
kernel/irq/irqdomain.c
1616
static int irq_domain_alloc_irqs_hierarchy(struct irq_domain *domain, unsigned int irq_base,
kernel/irq/irqdomain.c
1619
if (!domain->ops->alloc) {
kernel/irq/irqdomain.c
1624
return domain->ops->alloc(domain, irq_base, nr_irqs, arg);
kernel/irq/irqdomain.c
1627
static int irq_domain_alloc_irqs_locked(struct irq_domain *domain, int irq_base,
kernel/irq/irqdomain.c
1645
if (irq_domain_alloc_irq_data(domain, virq, nr_irqs)) {
kernel/irq/irqdomain.c
1651
ret = irq_domain_alloc_irqs_hierarchy(domain, virq, nr_irqs, arg);
kernel/irq/irqdomain.c
1695
int __irq_domain_alloc_irqs(struct irq_domain *domain, int irq_base,
kernel/irq/irqdomain.c
1701
if (domain == NULL) {
kernel/irq/irqdomain.c
1702
domain = irq_default_domain;
kernel/irq/irqdomain.c
1703
if (WARN(!domain, "domain is NULL; cannot allocate IRQ\n"))
kernel/irq/irqdomain.c
1707
mutex_lock(&domain->root->mutex);
kernel/irq/irqdomain.c
1708
ret = irq_domain_alloc_irqs_locked(domain, irq_base, nr_irqs, node, arg,
kernel/irq/irqdomain.c
1710
mutex_unlock(&domain->root->mutex);
kernel/irq/irqdomain.c
1721
lockdep_assert_held(&d->domain->root->mutex);
kernel/irq/irqdomain.c
1723
if (irq_domain_is_nomap(d->domain))
kernel/irq/irqdomain.c
1727
if (d->hwirq < d->domain->revmap_size) {
kernel/irq/irqdomain.c
1729
rcu_assign_pointer(d->domain->revmap[d->hwirq], d);
kernel/irq/irqdomain.c
1731
slot = radix_tree_lookup_slot(&d->domain->revmap_tree, d->hwirq);
kernel/irq/irqdomain.c
1733
radix_tree_replace_slot(&d->domain->revmap_tree, slot, d);
kernel/irq/irqdomain.c
1748
int irq_domain_push_irq(struct irq_domain *domain, int virq, void *arg)
kernel/irq/irqdomain.c
175
domain->name = strreplace(name, '/', ':');
kernel/irq/irqdomain.c
176
domain->flags |= IRQ_DOMAIN_NAME_ALLOCATED;
kernel/irq/irqdomain.c
1770
if (domain == NULL)
kernel/irq/irqdomain.c
1773
if (WARN_ON(!irq_domain_is_hierarchy(domain)))
kernel/irq/irqdomain.c
1779
if (domain->parent != irq_data->domain)
kernel/irq/irqdomain.c
1787
mutex_lock(&domain->root->mutex);
kernel/irq/irqdomain.c
1797
irq_data->domain = domain;
kernel/irq/irqdomain.c
180
static int alloc_unknown_name(struct irq_domain *domain, enum irq_domain_bus_token bus_token)
kernel/irq/irqdomain.c
1804
rv = irq_domain_alloc_irqs_hierarchy(domain, virq, 1, arg);
kernel/irq/irqdomain.c
1813
irq_domain_set_mapping(domain, irq_data->hwirq, irq_data);
kernel/irq/irqdomain.c
1815
mutex_unlock(&domain->root->mutex);
kernel/irq/irqdomain.c
1829
int irq_domain_pop_irq(struct irq_domain *domain, int virq)
kernel/irq/irqdomain.c
1851
if (domain == NULL)
kernel/irq/irqdomain.c
1857
tmp_irq_data = irq_domain_get_irq_data(domain, virq);
kernel/irq/irqdomain.c
186
domain->name = kasprintf(GFP_KERNEL, "unknown-%d", id);
kernel/irq/irqdomain.c
1863
if (WARN_ON(irq_data->domain != domain))
kernel/irq/irqdomain.c
1870
mutex_lock(&domain->root->mutex);
kernel/irq/irqdomain.c
1874
irq_domain_clear_mapping(domain, irq_data->hwirq);
kernel/irq/irqdomain.c
1875
irq_domain_free_irqs_hierarchy(domain, virq, 1);
kernel/irq/irqdomain.c
188
domain->name = kasprintf(GFP_KERNEL, "unknown-%d-%d", id, bus_token);
kernel/irq/irqdomain.c
1882
mutex_unlock(&domain->root->mutex);
kernel/irq/irqdomain.c
189
if (!domain->name)
kernel/irq/irqdomain.c
1898
struct irq_domain *domain;
kernel/irq/irqdomain.c
1901
if (WARN(!data || !data->domain || !data->domain->ops->free,
kernel/irq/irqdomain.c
1905
domain = data->domain;
kernel/irq/irqdomain.c
1907
mutex_lock(&domain->root->mutex);
kernel/irq/irqdomain.c
1910
irq_domain_free_irqs_hierarchy(domain, virq, nr_irqs);
kernel/irq/irqdomain.c
1911
mutex_unlock(&domain->root->mutex);
kernel/irq/irqdomain.c
1918
static void irq_domain_free_one_irq(struct irq_domain *domain, unsigned int virq)
kernel/irq/irqdomain.c
192
domain->flags |= IRQ_DOMAIN_NAME_ALLOCATED;
kernel/irq/irqdomain.c
1920
if (irq_domain_is_msi_device(domain))
kernel/irq/irqdomain.c
1921
msi_device_domain_free_wired(domain, virq);
kernel/irq/irqdomain.c
1933
int irq_domain_alloc_irqs_parent(struct irq_domain *domain,
kernel/irq/irqdomain.c
1937
if (!domain->parent)
kernel/irq/irqdomain.c
1940
return irq_domain_alloc_irqs_hierarchy(domain->parent, irq_base,
kernel/irq/irqdomain.c
1951
void irq_domain_free_irqs_parent(struct irq_domain *domain,
kernel/irq/irqdomain.c
1954
if (!domain->parent)
kernel/irq/irqdomain.c
1957
irq_domain_free_irqs_hierarchy(domain->parent, irq_base, nr_irqs);
kernel/irq/irqdomain.c
196
static int irq_domain_set_name(struct irq_domain *domain, const struct irq_domain_info *info)
kernel/irq/irqdomain.c
1963
if (irq_data && irq_data->domain) {
kernel/irq/irqdomain.c
1964
struct irq_domain *domain = irq_data->domain;
kernel/irq/irqdomain.c
1966
if (domain->ops->deactivate)
kernel/irq/irqdomain.c
1967
domain->ops->deactivate(domain, irq_data);
kernel/irq/irqdomain.c
1977
if (irqd && irqd->domain) {
kernel/irq/irqdomain.c
1978
struct irq_domain *domain = irqd->domain;
kernel/irq/irqdomain.c
1983
if (!ret && domain->ops->activate) {
kernel/irq/irqdomain.c
1984
ret = domain->ops->activate(domain, irqd, reserve);
kernel/irq/irqdomain.c
2029
static void irq_domain_check_hierarchy(struct irq_domain *domain)
kernel/irq/irqdomain.c
2032
if (domain->ops->alloc)
kernel/irq/irqdomain.c
2033
domain->flags |= IRQ_DOMAIN_FLAG_HIERARCHY;
kernel/irq/irqdomain.c
2041
struct irq_data *irq_domain_get_irq_data(struct irq_domain *domain,
kernel/irq/irqdomain.c
2046
return (irq_data && irq_data->domain == domain) ? irq_data : NULL;
kernel/irq/irqdomain.c
2061
void irq_domain_set_info(struct irq_domain *domain, unsigned int virq,
kernel/irq/irqdomain.c
2071
static int irq_domain_alloc_irqs_locked(struct irq_domain *domain, int irq_base,
kernel/irq/irqdomain.c
2078
static void irq_domain_check_hierarchy(struct irq_domain *domain) { }
kernel/irq/irqdomain.c
2079
static void irq_domain_free_one_irq(struct irq_domain *domain, unsigned int virq) { }
kernel/irq/irqdomain.c
218
return alloc_name(domain, fwid->name, bus_token);
kernel/irq/irqdomain.c
220
domain->name = fwid->name;
kernel/irq/irqdomain.c
222
return alloc_name(domain, fwid->name, bus_token);
kernel/irq/irqdomain.c
226
return alloc_fwnode_name(domain, fwnode, bus_token, info->name_suffix);
kernel/irq/irqdomain.c
229
if (domain->name)
kernel/irq/irqdomain.c
234
return alloc_unknown_name(domain, bus_token);
kernel/irq/irqdomain.c
239
struct irq_domain *domain;
kernel/irq/irqdomain.c
247
domain = kzalloc_node(struct_size(domain, revmap, info->size),
kernel/irq/irqdomain.c
249
if (!domain)
kernel/irq/irqdomain.c
252
err = irq_domain_set_name(domain, info);
kernel/irq/irqdomain.c
254
kfree(domain);
kernel/irq/irqdomain.c
258
domain->fwnode = fwnode_handle_get(info->fwnode);
kernel/irq/irqdomain.c
259
fwnode_dev_initialized(domain->fwnode, true);
kernel/irq/irqdomain.c
262
INIT_RADIX_TREE(&domain->revmap_tree, GFP_KERNEL);
kernel/irq/irqdomain.c
263
domain->ops = info->ops;
kernel/irq/irqdomain.c
264
domain->host_data = info->host_data;
kernel/irq/irqdomain.c
265
domain->bus_token = info->bus_token;
kernel/irq/irqdomain.c
266
domain->hwirq_max = info->hwirq_max;
kernel/irq/irqdomain.c
269
domain->flags |= IRQ_DOMAIN_FLAG_NO_MAP;
kernel/irq/irqdomain.c
271
domain->revmap_size = info->size;
kernel/irq/irqdomain.c
28
static int irq_domain_alloc_irqs_locked(struct irq_domain *domain, int irq_base,
kernel/irq/irqdomain.c
281
mutex_init(&domain->mutex);
kernel/irq/irqdomain.c
282
domain->root = domain;
kernel/irq/irqdomain.c
284
irq_domain_check_hierarchy(domain);
kernel/irq/irqdomain.c
286
return domain;
kernel/irq/irqdomain.c
289
static void __irq_domain_publish(struct irq_domain *domain)
kernel/irq/irqdomain.c
292
debugfs_add_domain_dir(domain);
kernel/irq/irqdomain.c
293
list_add(&domain->link, &irq_domain_list);
kernel/irq/irqdomain.c
296
pr_debug("Added domain %s\n", domain->name);
kernel/irq/irqdomain.c
299
static void irq_domain_free(struct irq_domain *domain)
kernel/irq/irqdomain.c
301
fwnode_dev_initialized(domain->fwnode, false);
kernel/irq/irqdomain.c
302
fwnode_handle_put(domain->fwnode);
kernel/irq/irqdomain.c
303
if (domain->flags & IRQ_DOMAIN_NAME_ALLOCATED)
kernel/irq/irqdomain.c
304
kfree(domain->name);
kernel/irq/irqdomain.c
305
kfree(domain);
kernel/irq/irqdomain.c
31
static void irq_domain_check_hierarchy(struct irq_domain *domain);
kernel/irq/irqdomain.c
32
static void irq_domain_free_one_irq(struct irq_domain *domain, unsigned int virq);
kernel/irq/irqdomain.c
323
struct irq_domain *domain;
kernel/irq/irqdomain.c
326
domain = __irq_domain_create(info);
kernel/irq/irqdomain.c
327
if (IS_ERR(domain))
kernel/irq/irqdomain.c
328
return domain;
kernel/irq/irqdomain.c
330
domain->flags |= info->domain_flags;
kernel/irq/irqdomain.c
331
domain->exit = info->exit;
kernel/irq/irqdomain.c
332
domain->dev = info->dev;
kernel/irq/irqdomain.c
336
domain->root = info->parent->root;
kernel/irq/irqdomain.c
337
domain->parent = info->parent;
kernel/irq/irqdomain.c
342
err = irq_domain_alloc_generic_chips(domain, info->dgc_info);
kernel/irq/irqdomain.c
348
err = info->init(domain);
kernel/irq/irqdomain.c
353
__irq_domain_publish(domain);
kernel/irq/irqdomain.c
364
irq_domain_associate_many(domain, info->virq_base, info->hwirq_base,
kernel/irq/irqdomain.c
368
return domain;
kernel/irq/irqdomain.c
372
irq_domain_remove_generic_chips(domain);
kernel/irq/irqdomain.c
374
irq_domain_free(domain);
kernel/irq/irqdomain.c
398
void irq_domain_remove(struct irq_domain *domain)
kernel/irq/irqdomain.c
400
if (domain->exit)
kernel/irq/irqdomain.c
401
domain->exit(domain);
kernel/irq/irqdomain.c
404
debugfs_remove_domain_dir(domain);
kernel/irq/irqdomain.c
406
WARN_ON(!radix_tree_empty(&domain->revmap_tree));
kernel/irq/irqdomain.c
408
list_del(&domain->link);
kernel/irq/irqdomain.c
413
if (unlikely(irq_default_domain == domain))
kernel/irq/irqdomain.c
418
if (domain->flags & IRQ_DOMAIN_FLAG_DESTROY_GC)
kernel/irq/irqdomain.c
419
irq_domain_remove_generic_chips(domain);
kernel/irq/irqdomain.c
421
pr_debug("Removed domain %s\n", domain->name);
kernel/irq/irqdomain.c
422
irq_domain_free(domain);
kernel/irq/irqdomain.c
426
void irq_domain_update_bus_token(struct irq_domain *domain,
kernel/irq/irqdomain.c
431
if (domain->bus_token == bus_token)
kernel/irq/irqdomain.c
436
domain->bus_token = bus_token;
kernel/irq/irqdomain.c
438
name = kasprintf(GFP_KERNEL, "%s-%d", domain->name, bus_token);
kernel/irq/irqdomain.c
444
debugfs_remove_domain_dir(domain);
kernel/irq/irqdomain.c
446
if (domain->flags & IRQ_DOMAIN_NAME_ALLOCATED)
kernel/irq/irqdomain.c
447
kfree(domain->name);
kernel/irq/irqdomain.c
449
domain->flags |= IRQ_DOMAIN_NAME_ALLOCATED;
kernel/irq/irqdomain.c
451
domain->name = name;
kernel/irq/irqdomain.c
452
debugfs_add_domain_dir(domain);
kernel/irq/irqdomain.c
490
struct irq_domain *domain = __irq_domain_instantiate(&info, true, false);
kernel/irq/irqdomain.c
492
return IS_ERR(domain) ? NULL : domain;
kernel/irq/irqdomain.c
512
struct irq_domain *domain = __irq_domain_instantiate(&info, false, true);
kernel/irq/irqdomain.c
514
return IS_ERR(domain) ? NULL : domain;
kernel/irq/irqdomain.c
570
void irq_set_default_domain(struct irq_domain *domain)
kernel/irq/irqdomain.c
572
pr_debug("Default domain set to @0x%p\n", domain);
kernel/irq/irqdomain.c
574
irq_default_domain = domain;
kernel/irq/irqdomain.c
593
static bool irq_domain_is_nomap(struct irq_domain *domain)
kernel/irq/irqdomain.c
596
(domain->flags & IRQ_DOMAIN_FLAG_NO_MAP);
kernel/irq/irqdomain.c
599
static void irq_domain_clear_mapping(struct irq_domain *domain,
kernel/irq/irqdomain.c
602
lockdep_assert_held(&domain->root->mutex);
kernel/irq/irqdomain.c
604
if (irq_domain_is_nomap(domain))
kernel/irq/irqdomain.c
607
if (hwirq < domain->revmap_size)
kernel/irq/irqdomain.c
608
rcu_assign_pointer(domain->revmap[hwirq], NULL);
kernel/irq/irqdomain.c
610
radix_tree_delete(&domain->revmap_tree, hwirq);
kernel/irq/irqdomain.c
613
static void irq_domain_set_mapping(struct irq_domain *domain,
kernel/irq/irqdomain.c
621
lockdep_assert_held(&domain->root->mutex);
kernel/irq/irqdomain.c
623
if (irq_domain_is_nomap(domain))
kernel/irq/irqdomain.c
626
if (hwirq < domain->revmap_size)
kernel/irq/irqdomain.c
627
rcu_assign_pointer(domain->revmap[hwirq], irq_data);
kernel/irq/irqdomain.c
629
radix_tree_insert(&domain->revmap_tree, hwirq, irq_data);
kernel/irq/irqdomain.c
632
static void irq_domain_disassociate(struct irq_domain *domain, unsigned int irq)
kernel/irq/irqdomain.c
637
if (WARN(!irq_data || irq_data->domain != domain,
kernel/irq/irqdomain.c
643
mutex_lock(&domain->root->mutex);
kernel/irq/irqdomain.c
654
if (domain->ops->unmap)
kernel/irq/irqdomain.c
655
domain->ops->unmap(domain, irq);
kernel/irq/irqdomain.c
658
irq_data->domain = NULL;
kernel/irq/irqdomain.c
660
domain->mapcount--;
kernel/irq/irqdomain.c
663
irq_domain_clear_mapping(domain, hwirq);
kernel/irq/irqdomain.c
665
mutex_unlock(&domain->root->mutex);
kernel/irq/irqdomain.c
668
static int irq_domain_associate_locked(struct irq_domain *domain, unsigned int virq,
kernel/irq/irqdomain.c
674
if (WARN(hwirq >= domain->hwirq_max,
kernel/irq/irqdomain.c
675
"error: hwirq 0x%x is too large for %s\n", (int)hwirq, domain->name))
kernel/irq/irqdomain.c
679
if (WARN(irq_data->domain, "error: virq%i is already associated", virq))
kernel/irq/irqdomain.c
683
irq_data->domain = domain;
kernel/irq/irqdomain.c
684
if (domain->ops->map) {
kernel/irq/irqdomain.c
685
ret = domain->ops->map(domain, virq, hwirq);
kernel/irq/irqdomain.c
694
domain->name, hwirq, virq, ret);
kernel/irq/irqdomain.c
696
irq_data->domain = NULL;
kernel/irq/irqdomain.c
702
domain->mapcount++;
kernel/irq/irqdomain.c
703
irq_domain_set_mapping(domain, hwirq, irq_data);
kernel/irq/irqdomain.c
710
int irq_domain_associate(struct irq_domain *domain, unsigned int virq,
kernel/irq/irqdomain.c
715
mutex_lock(&domain->root->mutex);
kernel/irq/irqdomain.c
716
ret = irq_domain_associate_locked(domain, virq, hwirq);
kernel/irq/irqdomain.c
717
mutex_unlock(&domain->root->mutex);
kernel/irq/irqdomain.c
723
void irq_domain_associate_many(struct irq_domain *domain, unsigned int irq_base,
kernel/irq/irqdomain.c
729
of_node = irq_domain_get_of_node(domain);
kernel/irq/irqdomain.c
734
irq_domain_associate(domain, irq_base + i, hwirq_base + i);
kernel/irq/irqdomain.c
749
unsigned int irq_create_direct_mapping(struct irq_domain *domain)
kernel/irq/irqdomain.c
754
if (domain == NULL)
kernel/irq/irqdomain.c
755
domain = irq_default_domain;
kernel/irq/irqdomain.c
757
of_node = irq_domain_get_of_node(domain);
kernel/irq/irqdomain.c
763
if (virq >= domain->hwirq_max) {
kernel/irq/irqdomain.c
765
domain->hwirq_max);
kernel/irq/irqdomain.c
771
if (irq_domain_associate(domain, virq, virq)) {
kernel/irq/irqdomain.c
781
static unsigned int irq_create_mapping_affinity_locked(struct irq_domain *domain,
kernel/irq/irqdomain.c
785
struct device_node *of_node = irq_domain_get_of_node(domain);
kernel/irq/irqdomain.c
788
pr_debug("irq_create_mapping(0x%p, 0x%lx)\n", domain, hwirq);
kernel/irq/irqdomain.c
798
if (irq_domain_associate_locked(domain, virq, hwirq)) {
kernel/irq/irqdomain.c
820
unsigned int irq_create_mapping_affinity(struct irq_domain *domain,
kernel/irq/irqdomain.c
827
if (domain == NULL)
kernel/irq/irqdomain.c
828
domain = irq_default_domain;
kernel/irq/irqdomain.c
829
if (domain == NULL) {
kernel/irq/irqdomain.c
834
mutex_lock(&domain->root->mutex);
kernel/irq/irqdomain.c
837
virq = irq_find_mapping(domain, hwirq);
kernel/irq/irqdomain.c
843
virq = irq_create_mapping_affinity_locked(domain, hwirq, affinity);
kernel/irq/irqdomain.c
845
mutex_unlock(&domain->root->mutex);
kernel/irq/irqdomain.c
884
struct irq_domain *domain;
kernel/irq/irqdomain.c
887
domain = irq_find_matching_fwspec(fwspec, DOMAIN_BUS_WIRED);
kernel/irq/irqdomain.c
888
if (!domain)
kernel/irq/irqdomain.c
889
domain = irq_find_matching_fwspec(fwspec, DOMAIN_BUS_ANY);
kernel/irq/irqdomain.c
891
domain = irq_default_domain;
kernel/irq/irqdomain.c
894
return domain;
kernel/irq/irqdomain.c
900
struct irq_domain *domain = fwspec_to_domain(fwspec);
kernel/irq/irqdomain.c
904
if (!domain || !domain->ops->get_fwspec_info)
kernel/irq/irqdomain.c
907
return domain->ops->get_fwspec_info(fwspec, info);
kernel/irq/irqdomain.c
914
struct irq_domain *domain;
kernel/irq/irqdomain.c
919
domain = fwspec_to_domain(fwspec);
kernel/irq/irqdomain.c
920
if (!domain) {
kernel/irq/irqdomain.c
926
if (irq_domain_translate(domain, fwspec, &hwirq, &type))
kernel/irq/irqdomain.c
936
mutex_lock(&domain->root->mutex);
kernel/irq/irqdomain.c
942
virq = irq_find_mapping(domain, hwirq);
kernel/irq/irqdomain.c
973
if (irq_domain_is_hierarchy(domain)) {
kernel/irq/irqdomain.c
974
if (irq_domain_is_msi_device(domain)) {
kernel/irq/irqdomain.c
975
mutex_unlock(&domain->root->mutex);
kernel/irq/irqdomain.c
976
virq = msi_device_domain_alloc_wired(domain, hwirq, type);
kernel/irq/irqdomain.c
977
mutex_lock(&domain->root->mutex);
kernel/irq/irqdomain.c
979
virq = irq_domain_alloc_irqs_locked(domain, -1, 1, NUMA_NO_NODE,
kernel/irq/irqdomain.c
987
virq = irq_create_mapping_affinity_locked(domain, hwirq, NULL);
kernel/irq/msi.c
1034
struct irq_domain *domain, *parent = dev->msi.domain;
kernel/irq/msi.c
1090
domain = __msi_create_irq_domain(fwnode, &bundle->info, IRQ_DOMAIN_FLAG_MSI_DEVICE, parent);
kernel/irq/msi.c
1091
if (!domain)
kernel/irq/msi.c
1094
dev->msi.data->__domains[domid].domain = domain;
kernel/irq/msi.c
1096
if (msi_domain_prepare_irqs(domain, dev, hwsize, &bundle->alloc_info)) {
kernel/irq/msi.c
1097
dev->msi.data->__domains[domid].domain = NULL;
kernel/irq/msi.c
1098
irq_domain_remove(domain);
kernel/irq/msi.c
1117
struct irq_domain *domain;
kernel/irq/msi.c
1120
domain = msi_get_device_domain(dev, domid);
kernel/irq/msi.c
1121
if (!domain || !irq_domain_is_msi_device(domain))
kernel/irq/msi.c
1124
dev->msi.data->__domains[domid].domain = NULL;
kernel/irq/msi.c
1125
info = domain->host_data;
kernel/irq/msi.c
1127
info->ops->msi_teardown(domain, info->alloc_data);
kernel/irq/msi.c
1129
if (irq_domain_is_msi_device(domain))
kernel/irq/msi.c
1130
fwnode = domain->fwnode;
kernel/irq/msi.c
1131
irq_domain_remove(domain);
kernel/irq/msi.c
1148
struct irq_domain *domain;
kernel/irq/msi.c
1151
domain = msi_get_device_domain(dev, domid);
kernel/irq/msi.c
1152
if (domain && irq_domain_is_msi_device(domain)) {
kernel/irq/msi.c
1153
info = domain->host_data;
kernel/irq/msi.c
1159
static int msi_domain_prepare_irqs(struct irq_domain *domain, struct device *dev,
kernel/irq/msi.c
1162
struct msi_domain_info *info = domain->host_data;
kernel/irq/msi.c
1165
return ops->msi_prepare(domain, dev, nvec, arg);
kernel/irq/msi.c
1179
static bool msi_check_reservation_mode(struct irq_domain *domain,
kernel/irq/msi.c
1185
switch(domain->bus_token) {
kernel/irq/msi.c
1209
static int msi_handle_pci_fail(struct irq_domain *domain, struct msi_desc *desc,
kernel/irq/msi.c
1212
switch(domain->bus_token) {
kernel/irq/msi.c
1235
static int msi_init_virq(struct irq_domain *domain, int virq, unsigned int vflags)
kernel/irq/msi.c
1237
struct irq_data *irqd = irq_domain_get_irq_data(domain, virq);
kernel/irq/msi.c
1274
static int populate_alloc_info(struct irq_domain *domain, struct device *dev,
kernel/irq/msi.c
1277
struct msi_domain_info *info = domain->host_data;
kernel/irq/msi.c
1286
return msi_domain_prepare_irqs(domain, dev, nirqs, arg);
kernel/irq/msi.c
1292
static int __msi_domain_alloc_irqs(struct device *dev, struct irq_domain *domain,
kernel/irq/msi.c
1296
struct msi_domain_info *info = domain->host_data;
kernel/irq/msi.c
1304
ret = populate_alloc_info(domain, dev, ctrl->nirqs, &arg);
kernel/irq/msi.c
1320
if (msi_check_reservation_mode(domain, info, dev))
kernel/irq/msi.c
1332
ops->prepare_desc(domain, &arg, desc);
kernel/irq/msi.c
1336
virq = __irq_domain_alloc_irqs(domain, -1, desc->nvec_used,
kernel/irq/msi.c
1340
return msi_handle_pci_fail(domain, desc, allocated);
kernel/irq/msi.c
1345
ret = msi_init_virq(domain, virq + i, vflags);
kernel/irq/msi.c
1373
struct irq_domain *domain;
kernel/irq/msi.c
1379
domain = msi_get_device_domain(dev, ctrl->domid);
kernel/irq/msi.c
1380
if (!domain)
kernel/irq/msi.c
1383
info = domain->host_data;
kernel/irq/msi.c
1391
return ops->domain_alloc_irqs(domain, dev, ctrl->nirqs);
kernel/irq/msi.c
1393
return __msi_domain_alloc_irqs(dev, domain, ctrl);
kernel/irq/msi.c
1483
struct irq_domain *domain;
kernel/irq/msi.c
1488
domain = msi_get_device_domain(dev, domid);
kernel/irq/msi.c
1489
if (!domain) {
kernel/irq/msi.c
1511
ret = __msi_domain_alloc_irqs(dev, domain, &ctrl);
kernel/irq/msi.c
1576
int msi_device_domain_alloc_wired(struct irq_domain *domain, unsigned int hwirq,
kernel/irq/msi.c
1581
struct device *dev = domain->dev;
kernel/irq/msi.c
1584
if (WARN_ON_ONCE(!dev || domain->bus_token != DOMAIN_BUS_WIRED_TO_MSI))
kernel/irq/msi.c
1590
if (WARN_ON_ONCE(msi_get_device_domain(dev, domid) != domain))
kernel/irq/msi.c
1597
static void __msi_domain_free_irqs(struct device *dev, struct irq_domain *domain,
kernel/irq/msi.c
1601
struct msi_domain_info *info = domain->host_data;
kernel/irq/msi.c
1614
irqd = irq_domain_get_irq_data(domain, desc->irq + i);
kernel/irq/msi.c
1630
struct irq_domain *domain;
kernel/irq/msi.c
1635
domain = msi_get_device_domain(dev, ctrl->domid);
kernel/irq/msi.c
1636
if (!domain)
kernel/irq/msi.c
1639
info = domain->host_data;
kernel/irq/msi.c
1643
ops->domain_free_irqs(domain, dev);
kernel/irq/msi.c
1645
__msi_domain_free_irqs(dev, domain, ctrl);
kernel/irq/msi.c
1726
void msi_device_domain_free_wired(struct irq_domain *domain, unsigned int virq)
kernel/irq/msi.c
1729
struct device *dev = domain->dev;
kernel/irq/msi.c
1731
if (WARN_ON_ONCE(!dev || !desc || domain->bus_token != DOMAIN_BUS_WIRED_TO_MSI))
kernel/irq/msi.c
1735
if (WARN_ON_ONCE(msi_get_device_domain(dev, MSI_DEFAULT_DOMAIN) != domain))
kernel/irq/msi.c
1747
struct msi_domain_info *msi_get_domain_info(struct irq_domain *domain)
kernel/irq/msi.c
1749
return (struct msi_domain_info *)domain->host_data;
kernel/irq/msi.c
1770
struct irq_domain *domain = dev_get_msi_domain(dev);
kernel/irq/msi.c
1772
for (; domain; domain = domain->parent)
kernel/irq/msi.c
1773
if (domain->flags & IRQ_DOMAIN_FLAG_ISOLATED_MSI)
kernel/irq/msi.c
186
(dev->msi.domain &&
kernel/irq/msi.c
187
!dev->msi.data->__domains[ctrl->domid].domain)))
kernel/irq/msi.c
337
if (dev->msi.domain && !irq_domain_is_msi_parent(dev->msi.domain))
kernel/irq/msi.c
338
md->__domains[MSI_DEFAULT_DOMAIN].domain = dev->msi.domain;
kernel/irq/msi.c
603
struct irq_domain *domain;
kernel/irq/msi.c
610
domain = dev->msi.data->__domains[domid].domain;
kernel/irq/msi.c
611
if (!domain)
kernel/irq/msi.c
614
if (WARN_ON_ONCE(irq_domain_is_msi_parent(domain)))
kernel/irq/msi.c
617
return domain;
kernel/irq/msi.c
62
static int msi_domain_prepare_irqs(struct irq_domain *domain, struct device *dev,
kernel/irq/msi.c
623
struct irq_domain *domain;
kernel/irq/msi.c
625
domain = msi_get_device_domain(dev, domid);
kernel/irq/msi.c
626
if (domain) {
kernel/irq/msi.c
627
info = domain->host_data;
kernel/irq/msi.c
640
static void msi_check_level(struct irq_domain *domain, struct msi_msg *msg)
kernel/irq/msi.c
642
struct msi_domain_info *info = domain->host_data;
kernel/irq/msi.c
674
msi_check_level(irq_data->domain, msg);
kernel/irq/msi.c
681
static int msi_domain_activate(struct irq_domain *domain,
kernel/irq/msi.c
687
msi_check_level(irq_data->domain, msg);
kernel/irq/msi.c
692
static void msi_domain_deactivate(struct irq_domain *domain,
kernel/irq/msi.c
701
static int msi_domain_alloc(struct irq_domain *domain, unsigned int virq,
kernel/irq/msi.c
704
struct msi_domain_info *info = domain->host_data;
kernel/irq/msi.c
709
if (irq_resolve_mapping(domain, hwirq))
kernel/irq/msi.c
712
if (domain->parent) {
kernel/irq/msi.c
713
ret = irq_domain_alloc_irqs_parent(domain, virq, nr_irqs, arg);
kernel/irq/msi.c
719
ret = ops->msi_init(domain, info, virq + i, hwirq + i, arg);
kernel/irq/msi.c
723
ops->msi_free(domain, info, virq + i);
kernel/irq/msi.c
725
irq_domain_free_irqs_top(domain, virq, nr_irqs);
kernel/irq/msi.c
733
static void msi_domain_free(struct irq_domain *domain, unsigned int virq,
kernel/irq/msi.c
736
struct msi_domain_info *info = domain->host_data;
kernel/irq/msi.c
741
info->ops->msi_free(domain, info, virq + i);
kernel/irq/msi.c
743
irq_domain_free_irqs_top(domain, virq, nr_irqs);
kernel/irq/msi.c
746
static int msi_domain_translate(struct irq_domain *domain, struct irq_fwspec *fwspec,
kernel/irq/msi.c
749
struct msi_domain_info *info = domain->host_data;
kernel/irq/msi.c
757
return info->ops->msi_translate(domain, fwspec, hwirq, type);
kernel/irq/msi.c
792
static int msi_domain_ops_prepare(struct irq_domain *domain, struct device *dev,
kernel/irq/msi.c
799
static void msi_domain_ops_teardown(struct irq_domain *domain, msi_alloc_info_t *arg)
kernel/irq/msi.c
809
static int msi_domain_ops_init(struct irq_domain *domain,
kernel/irq/msi.c
814
irq_domain_set_hwirq_and_chip(domain, virq, hwirq, info->chip,
kernel/irq/msi.c
870
struct irq_domain *domain;
kernel/irq/msi.c
887
domain = irq_domain_create_hierarchy(parent, flags | IRQ_DOMAIN_FLAG_MSI, 0,
kernel/irq/msi.c
890
if (domain) {
kernel/irq/msi.c
891
irq_domain_update_bus_token(domain, info->bus_token);
kernel/irq/msi.c
892
domain->dev = info->dev;
kernel/irq/msi.c
894
domain->pm_dev = parent->pm_dev;
kernel/irq/msi.c
897
return domain;
kernel/irq/msi.c
968
bool msi_parent_init_dev_msi_info(struct device *dev, struct irq_domain *domain,
kernel/irq/msi.c
972
struct irq_domain *parent = domain->parent;
kernel/irq/proc.c
506
if (desc->irq_data.domain)
net/bridge/br_cfm.c
507
if (create->domain == BR_CFM_VLAN) {
net/bridge/br_cfm.c
512
if (create->domain != BR_CFM_PORT) {
net/bridge/br_cfm.c
541
if (create->domain == BR_CFM_PORT) {
net/bridge/br_cfm_netlink.c
123
create.domain = nla_get_u32(tb[IFLA_BRIDGE_CFM_MEP_CREATE_DOMAIN]);
net/bridge/br_cfm_netlink.c
471
mep->create.domain))
net/bridge/br_mrp.c
179
memset(hdr->domain, 0xff, MRP_DOMAIN_UUID_LENGTH);
net/bridge/br_private_cfm.h
10
enum br_cfm_domain domain; /* Domain for this MEP */
net/bridge/br_private_mrp.h
118
__u8 domain[MRP_DOMAIN_UUID_LENGTH];
net/netlabel/netlabel_domainhash.c
137
static struct netlbl_dom_map *netlbl_domhsh_search(const char *domain,
net/netlabel/netlabel_domainhash.c
144
if (domain != NULL) {
net/netlabel/netlabel_domainhash.c
145
bkt = netlbl_domhsh_hash(domain);
net/netlabel/netlabel_domainhash.c
151
strcmp(iter->domain, domain) == 0)
net/netlabel/netlabel_domainhash.c
172
static struct netlbl_dom_map *netlbl_domhsh_search_def(const char *domain,
net/netlabel/netlabel_domainhash.c
177
entry = netlbl_domhsh_search(domain, family);
net/netlabel/netlabel_domainhash.c
222
entry->domain ? entry->domain : "(default)");
net/netlabel/netlabel_domainhash.c
425
if (entry->domain != NULL)
net/netlabel/netlabel_domainhash.c
426
entry_old = netlbl_domhsh_search(entry->domain, entry->family);
net/netlabel/netlabel_domainhash.c
428
entry_old = netlbl_domhsh_search_def(entry->domain,
net/netlabel/netlabel_domainhash.c
433
if (entry->domain != NULL) {
net/netlabel/netlabel_domainhash.c
434
u32 bkt = netlbl_domhsh_hash(entry->domain);
net/netlabel/netlabel_domainhash.c
614
entry->domain ? entry->domain : "(default)");
net/netlabel/netlabel_domainhash.c
658
int netlbl_domhsh_remove_af4(const char *domain,
net/netlabel/netlabel_domainhash.c
673
if (domain)
net/netlabel/netlabel_domainhash.c
674
entry_map = netlbl_domhsh_search(domain, AF_INET);
net/netlabel/netlabel_domainhash.c
676
entry_map = netlbl_domhsh_search_def(domain, AF_INET);
net/netlabel/netlabel_domainhash.c
727
int netlbl_domhsh_remove_af6(const char *domain,
net/netlabel/netlabel_domainhash.c
740
if (domain)
net/netlabel/netlabel_domainhash.c
741
entry_map = netlbl_domhsh_search(domain, AF_INET6);
net/netlabel/netlabel_domainhash.c
743
entry_map = netlbl_domhsh_search_def(domain, AF_INET6);
net/netlabel/netlabel_domainhash.c
792
int netlbl_domhsh_remove(const char *domain, u16 family,
net/netlabel/netlabel_domainhash.c
801
if (domain)
net/netlabel/netlabel_domainhash.c
802
entry = netlbl_domhsh_search(domain, AF_INET);
net/netlabel/netlabel_domainhash.c
804
entry = netlbl_domhsh_search_def(domain, AF_INET);
net/netlabel/netlabel_domainhash.c
812
if (domain)
net/netlabel/netlabel_domainhash.c
813
entry = netlbl_domhsh_search(domain, AF_INET6);
net/netlabel/netlabel_domainhash.c
815
entry = netlbl_domhsh_search_def(domain, AF_INET6);
net/netlabel/netlabel_domainhash.c
855
struct netlbl_dom_map *netlbl_domhsh_getentry(const char *domain, u16 family)
net/netlabel/netlabel_domainhash.c
859
return netlbl_domhsh_search_def(domain, family);
net/netlabel/netlabel_domainhash.c
873
struct netlbl_dommap_def *netlbl_domhsh_getentry_af4(const char *domain,
net/netlabel/netlabel_domainhash.c
879
dom_iter = netlbl_domhsh_search_def(domain, AF_INET);
net/netlabel/netlabel_domainhash.c
90
kfree(ptr->domain);
net/netlabel/netlabel_domainhash.c
903
struct netlbl_dommap_def *netlbl_domhsh_getentry_af6(const char *domain,
net/netlabel/netlabel_domainhash.c
909
dom_iter = netlbl_domhsh_search_def(domain, AF_INET6);
net/netlabel/netlabel_domainhash.h
59
char *domain;
net/netlabel/netlabel_domainhash.h
78
int netlbl_domhsh_remove_af4(const char *domain,
net/netlabel/netlabel_domainhash.h
82
int netlbl_domhsh_remove_af6(const char *domain,
net/netlabel/netlabel_domainhash.h
86
int netlbl_domhsh_remove(const char *domain, u16 family,
net/netlabel/netlabel_domainhash.h
89
struct netlbl_dom_map *netlbl_domhsh_getentry(const char *domain, u16 family);
net/netlabel/netlabel_domainhash.h
90
struct netlbl_dommap_def *netlbl_domhsh_getentry_af4(const char *domain,
net/netlabel/netlabel_domainhash.h
93
struct netlbl_dommap_def *netlbl_domhsh_getentry_af6(const char *domain,
net/netlabel/netlabel_domainhash.h
95
int netlbl_domhsh_remove_af6(const char *domain,
net/netlabel/netlabel_kapi.c
110
if (domain != NULL) {
net/netlabel/netlabel_kapi.c
111
entry->domain = kstrdup(domain, GFP_ATOMIC);
net/netlabel/netlabel_kapi.c
112
if (entry->domain == NULL)
net/netlabel/netlabel_kapi.c
1144
entry = netlbl_domhsh_getentry_af4(secattr->domain,
net/netlabel/netlabel_kapi.c
1174
entry = netlbl_domhsh_getentry_af6(secattr->domain,
net/netlabel/netlabel_kapi.c
1225
entry = netlbl_domhsh_getentry_af4(secattr->domain,
net/netlabel/netlabel_kapi.c
1246
entry = netlbl_domhsh_getentry_af6(secattr->domain,
net/netlabel/netlabel_kapi.c
1323
entry = netlbl_domhsh_getentry_af4(secattr->domain,
net/netlabel/netlabel_kapi.c
1346
entry = netlbl_domhsh_getentry_af6(secattr->domain,
net/netlabel/netlabel_kapi.c
183
kfree(entry->domain);
net/netlabel/netlabel_kapi.c
324
const char *domain,
net/netlabel/netlabel_kapi.c
343
if (domain != NULL) {
net/netlabel/netlabel_kapi.c
344
entry->domain = kstrdup(domain, GFP_ATOMIC);
net/netlabel/netlabel_kapi.c
345
if (entry->domain == NULL)
net/netlabel/netlabel_kapi.c
389
kfree(entry->domain);
net/netlabel/netlabel_kapi.c
449
const char *domain,
net/netlabel/netlabel_kapi.c
469
if (domain != NULL) {
net/netlabel/netlabel_kapi.c
470
entry->domain = kstrdup(domain, GFP_ATOMIC);
net/netlabel/netlabel_kapi.c
471
if (entry->domain == NULL)
net/netlabel/netlabel_kapi.c
519
kfree(entry->domain);
net/netlabel/netlabel_kapi.c
56
int netlbl_cfg_map_del(const char *domain,
net/netlabel/netlabel_kapi.c
63
return netlbl_domhsh_remove(domain, family, audit_info);
net/netlabel/netlabel_kapi.c
67
return netlbl_domhsh_remove_af4(domain, addr, mask,
net/netlabel/netlabel_kapi.c
71
return netlbl_domhsh_remove_af6(domain, addr, mask,
net/netlabel/netlabel_kapi.c
95
int netlbl_cfg_unlbl_map_add(const char *domain,
net/netlabel/netlabel_kapi.c
988
dom_entry = netlbl_domhsh_getentry(secattr->domain, family);
net/netlabel/netlabel_mgmt.c
262
kfree(entry->domain);
net/netlabel/netlabel_mgmt.c
290
if (entry->domain != NULL) {
net/netlabel/netlabel_mgmt.c
292
NLBL_MGMT_A_DOMAIN, entry->domain);
net/netlabel/netlabel_mgmt.c
453
char *domain;
net/netlabel/netlabel_mgmt.c
461
domain = nla_data(info->attrs[NLBL_MGMT_A_DOMAIN]);
net/netlabel/netlabel_mgmt.c
462
return netlbl_domhsh_remove(domain, AF_UNSPEC, &audit_info);
net/netlabel/netlabel_mgmt.c
94
entry->domain = kmalloc(tmp_size, GFP_KERNEL);
net/netlabel/netlabel_mgmt.c
95
if (entry->domain == NULL) {
net/netlabel/netlabel_mgmt.c
99
nla_strscpy(entry->domain,
net/sunrpc/auth_gss/gss_mech_switch.c
39
if (pf->domain)
net/sunrpc/auth_gss/gss_mech_switch.c
40
auth_domain_put(pf->domain);
net/sunrpc/auth_gss/gss_mech_switch.c
79
pf->domain = dom;
net/tipc/addr.c
41
bool tipc_in_scope(bool legacy_format, u32 domain, u32 addr)
net/tipc/addr.c
43
if (!domain || (domain == addr))
net/tipc/addr.c
47
if (domain == tipc_cluster_mask(addr)) /* domain <Z.C.0> */
net/tipc/addr.c
49
if (domain == (addr & TIPC_ZONE_CLUSTER_MASK)) /* domain <Z.C.0> */
net/tipc/addr.c
51
if (domain == (addr & TIPC_ZONE_MASK)) /* domain <Z.0.0> */
net/tipc/addr.h
130
bool tipc_in_scope(bool legacy_format, u32 domain, u32 addr);
net/tipc/bearer.c
1013
u32 domain = 0;
net/tipc/bearer.c
1033
domain = nla_get_u32(attrs[TIPC_NLA_BEARER_DOMAIN]);
net/tipc/bearer.c
1047
return tipc_enable_bearer(net, bearer, domain, prio, attrs,
net/tipc/bearer.c
343
b->domain = disc_domain;
net/tipc/bearer.h
170
u32 domain;
net/tipc/discover.c
248
if (!tipc_in_scope(legacy, b->domain, src))
net/tipc/discover.c
305
if (tipc_node(d->domain) && d->num_nodes) {
net/tipc/discover.c
374
d->domain = b->domain;
net/tipc/discover.c
411
d->domain = b->domain;
net/tipc/discover.c
66
u32 domain;
net/tipc/discover.c
85
u32 dest_domain = b->domain;
net/tipc/monitor.c
216
struct tipc_mon_domain *dom_aft = peer->domain;
net/tipc/monitor.c
248
struct tipc_mon_domain *dom = peer->domain;
net/tipc/monitor.c
274
struct tipc_mon_domain *dom = self->domain;
net/tipc/monitor.c
369
kfree(peer->domain);
net/tipc/monitor.c
380
kfree(peer->domain);
net/tipc/monitor.c
381
peer->domain = NULL;
net/tipc/monitor.c
461
dom = peer->domain;
net/tipc/monitor.c
462
peer->domain = NULL;
net/tipc/monitor.c
538
dom = peer->domain;
net/tipc/monitor.c
546
peer->domain = dom;
net/tipc/monitor.c
671
self->domain = dom;
net/tipc/monitor.c
698
kfree(peer->domain);
net/tipc/monitor.c
704
kfree(self->domain);
net/tipc/monitor.c
747
struct tipc_mon_domain *dom = peer->domain;
net/tipc/monitor.c
77
struct tipc_mon_domain *domain;
net/tipc/socket.c
1706
srcaddr->sock.addr.name.domain = 0;
net/tipc/socket.c
1718
srcaddr->member.addr.name.domain = 0;
net/tipc/socket.c
766
addr->addr.name.domain = 0;
scripts/mod/devicetable-offsets.c
259
DEVID_FIELD(ssam_device_id, domain);
scripts/mod/file2alias.c
1309
DEF_FIELD(symval, ssam_device_id, domain);
scripts/mod/file2alias.c
1320
domain, category, alias);
security/landlock/audit.c
137
get_hierarchy(const struct landlock_ruleset *const domain, const size_t layer)
security/landlock/audit.c
139
struct landlock_hierarchy *hierarchy = domain->hierarchy;
security/landlock/audit.c
142
if (WARN_ON_ONCE(layer >= domain->num_layers))
security/landlock/audit.c
145
for (i = domain->num_layers - 1; i > layer; i--) {
security/landlock/audit.c
184
static size_t get_denied_layer(const struct landlock_ruleset *const domain,
security/landlock/audit.c
197
return domain->num_layers - 1;
security/landlock/audit.c
367
if (WARN_ON_ONCE(!subject || !subject->domain ||
security/landlock/audit.c
368
!subject->domain->hierarchy || !request))
security/landlock/audit.c
378
youngest_layer = get_denied_layer(subject->domain,
security/landlock/audit.c
387
get_hierarchy(subject->domain, youngest_layer);
security/landlock/audit.c
391
get_hierarchy(subject->domain, youngest_layer);
security/landlock/cred.c
25
if (old_llcred->domain) {
security/landlock/cred.c
26
landlock_get_ruleset(old_llcred->domain);
security/landlock/cred.c
40
struct landlock_ruleset *const dom = landlock_cred(cred)->domain;
security/landlock/cred.h
129
const struct landlock_ruleset *domain;
security/landlock/cred.h
135
domain = landlock_cred(cred)->domain;
security/landlock/cred.h
136
if (!domain)
security/landlock/cred.h
139
for (layer_level = domain->num_layers - 1; layer_level >= 0;
security/landlock/cred.h
142
.masks = domain->access_masks[layer_level],
security/landlock/cred.h
36
struct landlock_ruleset *domain;
security/landlock/cred.h
73
landlock_put_ruleset(dst->domain);
security/landlock/cred.h
77
landlock_get_ruleset(src->domain);
security/landlock/cred.h
82
return landlock_cred(current_cred())->domain;
security/landlock/cred.h
91
return landlock_cred(__task_cred(task))->domain;
security/landlock/fs.c
1029
static bool collect_domain_accesses(const struct landlock_ruleset *const domain,
security/landlock/fs.c
1036
if (WARN_ON_ONCE(!domain || !mnt_root || !dir || !layer_masks_dom))
security/landlock/fs.c
1041
if (!landlock_init_layer_masks(domain, LANDLOCK_MASK_ACCESS_FS,
security/landlock/fs.c
1050
if (landlock_unmask_layers(find_rule(domain, dir),
security/landlock/fs.c
1170
subject->domain,
security/landlock/fs.c
1173
if (is_access_to_paths_allowed(subject->domain, new_dir,
security/landlock/fs.c
1200
allow_parent1 = collect_domain_accesses(subject->domain, mnt_dir.dentry,
security/landlock/fs.c
1203
allow_parent2 = collect_domain_accesses(subject->domain, mnt_dir.dentry,
security/landlock/fs.c
1217
subject->domain, &mnt_dir, access_request_parent1,
security/landlock/fs.c
1644
subject->domain, &file->f_path,
security/landlock/fs.c
1645
landlock_init_layer_masks(subject->domain,
security/landlock/fs.c
1803
landlock_get_ruleset(new_subject->domain);
security/landlock/fs.c
1808
prev_dom = landlock_file(file)->fown_subject.domain;
security/landlock/fs.c
1820
landlock_put_ruleset_deferred(landlock_file(file)->fown_subject.domain);
security/landlock/fs.c
364
find_rule(const struct landlock_ruleset *const domain,
security/landlock/fs.c
380
rule = landlock_find_rule(domain, id);
security/landlock/fs.c
742
is_access_to_paths_allowed(const struct landlock_ruleset *const domain,
security/landlock/fs.c
787
landlock_union_access_masks(domain).fs;
security/landlock/fs.c
799
if (landlock_init_layer_masks(domain, LANDLOCK_MASK_ACCESS_FS,
security/landlock/fs.c
802
landlock_unmask_layers(find_rule(domain, dentry_child1),
security/landlock/fs.c
808
if (landlock_init_layer_masks(domain, LANDLOCK_MASK_ACCESS_FS,
security/landlock/fs.c
811
landlock_unmask_layers(find_rule(domain, dentry_child2),
security/landlock/fs.c
866
rule = find_rule(domain, walker_path.dentry);
security/landlock/fs.c
960
access_request = landlock_init_layer_masks(subject->domain,
security/landlock/fs.c
963
if (is_access_to_paths_allowed(subject->domain, path, access_request,
security/landlock/net.c
190
rule = landlock_find_rule(subject->domain, id);
security/landlock/net.c
191
access_request = landlock_init_layer_masks(subject->domain,
security/landlock/ruleset.c
679
landlock_init_layer_masks(const struct landlock_ruleset *const domain,
security/landlock/ruleset.c
707
for (size_t i = 0; i < domain->num_layers; i++) {
security/landlock/ruleset.c
708
const access_mask_t handled = get_access_mask(domain, i);
security/landlock/ruleset.c
713
for (size_t i = domain->num_layers; i < ARRAY_SIZE(masks->access); i++)
security/landlock/ruleset.h
230
landlock_union_access_masks(const struct landlock_ruleset *const domain)
security/landlock/ruleset.h
235
for (layer_level = 0; layer_level < domain->num_layers; layer_level++) {
security/landlock/ruleset.h
237
.masks = domain->access_masks[layer_level],
security/landlock/ruleset.h
308
landlock_init_layer_masks(const struct landlock_ruleset *const domain,
security/landlock/syscalls.c
551
landlock_merge_ruleset(new_llcred->domain, ruleset);
security/landlock/syscalls.c
565
landlock_put_ruleset(new_llcred->domain);
security/landlock/syscalls.c
566
new_llcred->domain = new_dom;
security/landlock/task.c
100
err = domain_ptrace(parent_subject->domain, child_dom);
security/landlock/task.c
117
.layer_plus_one = parent_subject->domain->num_layers,
security/landlock/task.c
145
err = domain_ptrace(parent_subject->domain, child_dom);
security/landlock/task.c
163
.layer_plus_one = parent_subject->domain->num_layers,
security/landlock/task.c
236
const struct landlock_ruleset *const domain)
security/landlock/task.c
242
dom_other = landlock_cred(other->sk_socket->file->f_cred)->domain;
security/landlock/task.c
243
return domain_is_scoped(domain, dom_other,
security/landlock/task.c
281
if (!sock_is_scoped(other, subject->domain))
security/landlock/task.c
318
if (!sock_is_scoped(other->sk, subject->domain))
security/landlock/task.c
371
is_scoped = domain_is_scoped(subject->domain,
security/landlock/task.c
407
if (!subject->domain)
security/landlock/task.c
412
is_scoped = domain_is_scoped(subject->domain,
security/selinux/include/security.h
325
int security_node_sid(u16 domain, const void *addr, u32 addrlen, u32 *out_sid);
security/selinux/ss/services.c
2671
int security_node_sid(u16 domain,
security/selinux/ss/services.c
2693
switch (domain) {
security/selinux/ss/services.c
3973
secattr->domain = kstrdup(sym_name(policydb, SYM_TYPES, ctx->type - 1),
security/selinux/ss/services.c
3975
if (secattr->domain == NULL)
security/smack/smack_access.c
552
skp->smk_netlabel.domain = skp->smk_known;
security/tomoyo/audit.c
253
const char *domainname = r->domain->domainname->name;
security/tomoyo/audit.c
373
if (!tomoyo_get_audit(r->domain->ns, r->profile, r->type,
security/tomoyo/common.c
1015
domain = tomoyo_task(p)->domain_info;
security/tomoyo/common.c
1019
domain = tomoyo_find_domain(data + 7);
security/tomoyo/common.c
1021
domain = tomoyo_find_domain_by_qid(pid);
security/tomoyo/common.c
1024
head->w.domain = domain;
security/tomoyo/common.c
1030
if (domain)
security/tomoyo/common.c
1031
head->r.domain = &domain->list;
security/tomoyo/common.c
1035
if (domain && domain->is_deleted)
security/tomoyo/common.c
1098
struct tomoyo_domain_info *domain;
security/tomoyo/common.c
1106
list_for_each_entry_rcu(domain, &tomoyo_domain_list, list,
security/tomoyo/common.c
1109
if (domain == &tomoyo_kernel_domain)
security/tomoyo/common.c
1111
if (domain->is_deleted ||
security/tomoyo/common.c
1112
tomoyo_pathcmp(domain->domainname, &name))
security/tomoyo/common.c
1114
domain->is_deleted = true;
security/tomoyo/common.c
1186
struct tomoyo_domain_info *domain = head->w.domain;
security/tomoyo/common.c
1194
domain = NULL;
security/tomoyo/common.c
1198
domain = tomoyo_find_domain(data);
security/tomoyo/common.c
1200
domain = tomoyo_assign_domain(data, false);
security/tomoyo/common.c
1201
head->w.domain = domain;
security/tomoyo/common.c
1204
if (!domain)
security/tomoyo/common.c
1206
ns = domain->ns;
security/tomoyo/common.c
1211
domain->profile = (u8) idx;
security/tomoyo/common.c
1217
set_bit(idx, domain->group);
security/tomoyo/common.c
1219
clear_bit(idx, domain->group);
security/tomoyo/common.c
1227
domain->flags[idx] = !is_delete;
security/tomoyo/common.c
1230
return tomoyo_write_domain2(ns, &domain->acl_info_list, data,
security/tomoyo/common.c
1639
list_for_each_cookie(head->r.domain, &tomoyo_domain_list) {
security/tomoyo/common.c
1640
struct tomoyo_domain_info *domain =
security/tomoyo/common.c
1641
list_entry(head->r.domain, typeof(*domain), list);
security/tomoyo/common.c
1646
if (domain->is_deleted &&
security/tomoyo/common.c
1650
tomoyo_set_string(head, domain->domainname->name);
security/tomoyo/common.c
1653
domain->profile);
security/tomoyo/common.c
1655
if (domain->flags[i])
security/tomoyo/common.c
1663
if (!test_bit(i, domain->group))
security/tomoyo/common.c
1674
if (!tomoyo_read_domain2(head, &domain->acl_info_list))
security/tomoyo/common.c
1720
struct tomoyo_domain_info *domain = NULL;
security/tomoyo/common.c
1740
domain = tomoyo_task(p)->domain_info;
security/tomoyo/common.c
1742
if (!domain)
security/tomoyo/common.c
1744
tomoyo_io_printf(head, "%u %u ", pid, domain->profile);
security/tomoyo/common.c
1745
tomoyo_set_string(head, domain->domainname->name);
security/tomoyo/common.c
1978
struct tomoyo_domain_info *domain;
security/tomoyo/common.c
2132
static void tomoyo_add_entry(struct tomoyo_domain_info *domain, char *header)
security/tomoyo/common.c
2177
if (!tomoyo_write_domain2(domain->ns, &domain->acl_info_list, buffer,
security/tomoyo/common.c
2238
tomoyo_add_entry(r->domain, entry.query);
security/tomoyo/common.c
2242
entry.domain = r->domain;
security/tomoyo/common.c
2299
struct tomoyo_domain_info *domain = NULL;
security/tomoyo/common.c
2305
domain = ptr->domain;
security/tomoyo/common.c
2309
return domain;
security/tomoyo/common.c
2844
head->w.domain = NULL;
security/tomoyo/common.c
2916
struct tomoyo_domain_info *domain;
security/tomoyo/common.c
2921
list_for_each_entry_rcu(domain, &tomoyo_domain_list, list,
security/tomoyo/common.c
2923
const u8 profile = domain->profile;
security/tomoyo/common.c
2924
struct tomoyo_policy_namespace *ns = domain->ns;
security/tomoyo/common.c
2936
profile, domain->domainname->name);
security/tomoyo/common.c
999
struct tomoyo_domain_info *domain = NULL;
security/tomoyo/common.h
1000
int tomoyo_check_open_permission(struct tomoyo_domain_info *domain,
security/tomoyo/common.h
1010
struct tomoyo_domain_info *domain,
security/tomoyo/common.h
431
struct tomoyo_domain_info *domain;
security/tomoyo/common.h
803
struct list_head *domain;
security/tomoyo/common.h
824
struct tomoyo_domain_info *domain;
security/tomoyo/domain.c
165
const struct tomoyo_domain_info *domain = r->domain;
security/tomoyo/domain.c
167
const struct list_head *list = &domain->acl_info_list;
security/tomoyo/domain.c
184
if (!test_bit(i, domain->group))
security/tomoyo/domain.c
186
list = &domain->ns->acl_group[i++];
security/tomoyo/domain.c
564
const struct tomoyo_domain_info *domain = tomoyo_domain();
security/tomoyo/domain.c
566
e.profile = domain->profile;
security/tomoyo/domain.c
567
memcpy(e.group, domain->group, sizeof(e.group));
security/tomoyo/domain.c
629
ee->r.profile = r->domain->profile;
security/tomoyo/domain.c
630
ee->r.mode = tomoyo_get_mode(r->domain->ns, ee->r.profile,
security/tomoyo/domain.c
705
struct tomoyo_domain_info *domain = NULL;
security/tomoyo/domain.c
835
domain = old_domain;
security/tomoyo/domain.c
846
domain = old_domain;
security/tomoyo/domain.c
856
if (!domain)
security/tomoyo/domain.c
857
domain = tomoyo_assign_domain(ee->tmp, true);
security/tomoyo/domain.c
858
if (domain)
security/tomoyo/domain.c
876
if (!domain)
security/tomoyo/domain.c
877
domain = old_domain;
security/tomoyo/domain.c
883
s->domain_info = domain;
security/tomoyo/domain.c
884
atomic_inc(&domain->users);
security/tomoyo/domain.c
888
ee->r.domain = domain;
security/tomoyo/file.c
584
r->mode = tomoyo_get_mode(r->domain->ns, r->profile, r->type);
security/tomoyo/file.c
616
r->mode = tomoyo_get_mode(r->domain->ns, r->profile, r->type);
security/tomoyo/file.c
758
int tomoyo_check_open_permission(struct tomoyo_domain_info *domain,
security/tomoyo/file.c
774
tomoyo_init_request_info(&r, domain, TOMOYO_MAC_FILE_OPEN)
security/tomoyo/gc.c
252
struct tomoyo_domain_info *domain =
security/tomoyo/gc.c
253
container_of(element, typeof(*domain), list);
security/tomoyo/gc.c
262
list_for_each_entry_safe(acl, tmp, &domain->acl_info_list, list) {
security/tomoyo/gc.c
266
tomoyo_put_name(domain->domainname);
security/tomoyo/gc.c
49
if (head->r.domain == element || head->r.group == element ||
security/tomoyo/gc.c
50
head->r.acl == element || &head->w.domain->list == element)
security/tomoyo/gc.c
534
struct tomoyo_domain_info *domain;
security/tomoyo/gc.c
537
list_for_each_entry_safe(domain, tmp, &tomoyo_domain_list,
security/tomoyo/gc.c
539
tomoyo_collect_acl(&domain->acl_info_list);
security/tomoyo/gc.c
540
if (!domain->is_deleted || atomic_read(&domain->users))
security/tomoyo/gc.c
542
tomoyo_try_to_gc(TOMOYO_ID_DOMAIN, &domain->list);
security/tomoyo/securityfs_if.c
103
const char *domain = tomoyo_domain()->domainname->name;
security/tomoyo/securityfs_if.c
104
loff_t len = strlen(domain);
security/tomoyo/securityfs_if.c
112
if (copy_to_user(buf, domain + pos, len))
security/tomoyo/securityfs_if.c
238
struct tomoyo_domain_info *domain;
security/tomoyo/securityfs_if.c
243
domain = tomoyo_domain();
security/tomoyo/securityfs_if.c
245
if (domain != &tomoyo_kernel_domain)
security/tomoyo/util.c
1020
struct tomoyo_domain_info *domain, const u8 index)
security/tomoyo/util.c
1025
if (!domain)
security/tomoyo/util.c
1026
domain = tomoyo_domain();
security/tomoyo/util.c
1027
r->domain = domain;
security/tomoyo/util.c
1028
profile = domain->profile;
security/tomoyo/util.c
1031
r->mode = tomoyo_get_mode(domain->ns, profile, index);
security/tomoyo/util.c
1047
struct tomoyo_domain_info *domain = r->domain;
security/tomoyo/util.c
1052
if (!domain)
security/tomoyo/util.c
1054
if (READ_ONCE(domain->flags[TOMOYO_DIF_QUOTA_WARNED]))
security/tomoyo/util.c
1056
list_for_each_entry_rcu(ptr, &domain->acl_info_list, list,
security/tomoyo/util.c
1095
if (count < tomoyo_profile(domain->ns, domain->profile)->
security/tomoyo/util.c
1098
WRITE_ONCE(domain->flags[TOMOYO_DIF_QUOTA_WARNED], true);
security/tomoyo/util.c
1103
domain->domainname->name);
security/tomoyo/util.c
614
struct tomoyo_domain_info *domain;
security/tomoyo/util.c
619
list_for_each_entry_rcu(domain, &tomoyo_domain_list, list,
security/tomoyo/util.c
621
if (!domain->is_deleted &&
security/tomoyo/util.c
622
!tomoyo_pathcmp(&name, domain->domainname))
security/tomoyo/util.c
623
return domain;
sound/firewire/amdtp-stream.c
1160
const struct amdtp_domain *d = s->domain;
sound/firewire/amdtp-stream.c
1207
if (s == s->domain->irq_target) {
sound/firewire/amdtp-stream.c
1231
struct amdtp_domain *d = s->domain;
sound/firewire/amdtp-stream.c
1266
struct amdtp_domain *d = s->domain;
sound/firewire/amdtp-stream.c
1341
struct amdtp_domain *d = s->domain;
sound/firewire/amdtp-stream.c
1395
struct amdtp_domain *d = s->domain;
sound/firewire/amdtp-stream.c
1446
struct amdtp_domain *d = s->domain;
sound/firewire/amdtp-stream.c
1562
struct amdtp_domain *d = s->domain;
sound/firewire/amdtp-stream.c
1572
struct amdtp_domain *d = s->domain;
sound/firewire/amdtp-stream.c
1582
struct amdtp_domain *d = s->domain;
sound/firewire/amdtp-stream.c
1640
struct amdtp_domain *d = s->domain;
sound/firewire/amdtp-stream.c
1669
bool is_irq_target = (s == s->domain->irq_target);
sound/firewire/amdtp-stream.c
1732
if (s->domain->replay.enable) {
sound/firewire/amdtp-stream.c
1841
if (s->domain->replay.enable)
sound/firewire/amdtp-stream.c
1933
if (s->domain->replay.enable)
sound/firewire/amdtp-stream.c
2001
s->domain = d;
sound/firewire/amdtp-stream.c
563
struct amdtp_domain *d = s->domain;
sound/firewire/amdtp-stream.h
213
struct amdtp_domain *domain;
sound/firewire/bebob/bebob.h
118
struct amdtp_domain domain;
sound/firewire/bebob/bebob_pcm.c
136
struct amdtp_domain *d = &bebob->domain;
sound/firewire/bebob/bebob_pcm.c
305
return amdtp_domain_stream_pcm_pointer(&bebob->domain,
sound/firewire/bebob/bebob_pcm.c
312
return amdtp_domain_stream_pcm_pointer(&bebob->domain,
sound/firewire/bebob/bebob_pcm.c
320
return amdtp_domain_stream_pcm_ack(&bebob->domain, &bebob->tx_stream);
sound/firewire/bebob/bebob_pcm.c
327
return amdtp_domain_stream_pcm_ack(&bebob->domain, &bebob->rx_stream);
sound/firewire/bebob/bebob_stream.c
427
return amdtp_domain_add_stream(&bebob->domain, stream,
sound/firewire/bebob/bebob_stream.c
491
err = amdtp_domain_init(&bebob->domain);
sound/firewire/bebob/bebob_stream.c
544
amdtp_domain_stop(&bebob->domain);
sound/firewire/bebob/bebob_stream.c
581
err = amdtp_domain_set_events_per_period(&bebob->domain,
sound/firewire/bebob/bebob_stream.c
604
amdtp_domain_stop(&bebob->domain);
sound/firewire/bebob/bebob_stream.c
644
err = amdtp_domain_start(&bebob->domain, tx_init_skip_cycles, true, false);
sound/firewire/bebob/bebob_stream.c
663
if (!amdtp_domain_wait_ready(&bebob->domain, READY_TIMEOUT_MS)) {
sound/firewire/bebob/bebob_stream.c
671
amdtp_domain_stop(&bebob->domain);
sound/firewire/bebob/bebob_stream.c
679
amdtp_domain_stop(&bebob->domain);
sound/firewire/bebob/bebob_stream.c
693
amdtp_domain_destroy(&bebob->domain);
sound/firewire/dice/dice-pcm.c
167
struct amdtp_domain *d = &dice->domain;
sound/firewire/dice/dice-pcm.c
366
return amdtp_domain_stream_pcm_pointer(&dice->domain, stream);
sound/firewire/dice/dice-pcm.c
373
return amdtp_domain_stream_pcm_pointer(&dice->domain, stream);
sound/firewire/dice/dice-pcm.c
381
return amdtp_domain_stream_pcm_ack(&dice->domain, stream);
sound/firewire/dice/dice-pcm.c
389
return amdtp_domain_stream_pcm_ack(&dice->domain, stream);
sound/firewire/dice/dice-stream.c
282
amdtp_domain_stop(&dice->domain);
sound/firewire/dice/dice-stream.c
314
err = amdtp_domain_set_events_per_period(&dice->domain,
sound/firewire/dice/dice-stream.c
368
err = amdtp_domain_add_stream(&dice->domain, stream,
sound/firewire/dice/dice-stream.c
402
amdtp_domain_stop(&dice->domain);
sound/firewire/dice/dice-stream.c
453
err = amdtp_domain_start(&dice->domain, 0, true, false);
sound/firewire/dice/dice-stream.c
457
if (!amdtp_domain_wait_ready(&dice->domain, READY_TIMEOUT_MS)) {
sound/firewire/dice/dice-stream.c
465
amdtp_domain_stop(&dice->domain);
sound/firewire/dice/dice-stream.c
483
amdtp_domain_stop(&dice->domain);
sound/firewire/dice/dice-stream.c
564
err = amdtp_domain_init(&dice->domain);
sound/firewire/dice/dice-stream.c
584
amdtp_domain_destroy(&dice->domain);
sound/firewire/dice/dice-stream.c
602
amdtp_domain_stop(&dice->domain);
sound/firewire/dice/dice.h
113
struct amdtp_domain domain;
sound/firewire/digi00x/digi00x-pcm.c
103
struct amdtp_domain *d = &dg00x->domain;
sound/firewire/digi00x/digi00x-pcm.c
284
return amdtp_domain_stream_pcm_pointer(&dg00x->domain, &dg00x->tx_stream);
sound/firewire/digi00x/digi00x-pcm.c
291
return amdtp_domain_stream_pcm_pointer(&dg00x->domain, &dg00x->rx_stream);
sound/firewire/digi00x/digi00x-pcm.c
298
return amdtp_domain_stream_pcm_ack(&dg00x->domain, &dg00x->tx_stream);
sound/firewire/digi00x/digi00x-pcm.c
305
return amdtp_domain_stream_pcm_ack(&dg00x->domain, &dg00x->rx_stream);
sound/firewire/digi00x/digi00x-stream.c
267
err = amdtp_domain_init(&dg00x->domain);
sound/firewire/digi00x/digi00x-stream.c
282
amdtp_domain_destroy(&dg00x->domain);
sound/firewire/digi00x/digi00x-stream.c
302
amdtp_domain_stop(&dg00x->domain);
sound/firewire/digi00x/digi00x-stream.c
323
err = amdtp_domain_set_events_per_period(&dg00x->domain,
sound/firewire/digi00x/digi00x-stream.c
345
amdtp_domain_stop(&dg00x->domain);
sound/firewire/digi00x/digi00x-stream.c
370
err = amdtp_domain_add_stream(&dg00x->domain, &dg00x->rx_stream,
sound/firewire/digi00x/digi00x-stream.c
375
err = amdtp_domain_add_stream(&dg00x->domain, &dg00x->tx_stream,
sound/firewire/digi00x/digi00x-stream.c
384
err = amdtp_domain_start(&dg00x->domain, 0, true, true);
sound/firewire/digi00x/digi00x-stream.c
388
if (!amdtp_domain_wait_ready(&dg00x->domain, READY_TIMEOUT_MS)) {
sound/firewire/digi00x/digi00x-stream.c
396
amdtp_domain_stop(&dg00x->domain);
sound/firewire/digi00x/digi00x-stream.c
405
amdtp_domain_stop(&dg00x->domain);
sound/firewire/digi00x/digi00x.h
60
struct amdtp_domain domain;
sound/firewire/fireface/ff-pcm.c
142
struct amdtp_domain *d = &ff->domain;
sound/firewire/fireface/ff-pcm.c
325
return amdtp_domain_stream_pcm_pointer(&ff->domain, &ff->tx_stream);
sound/firewire/fireface/ff-pcm.c
332
return amdtp_domain_stream_pcm_pointer(&ff->domain, &ff->rx_stream);
sound/firewire/fireface/ff-pcm.c
339
return amdtp_domain_stream_pcm_ack(&ff->domain, &ff->tx_stream);
sound/firewire/fireface/ff-pcm.c
346
return amdtp_domain_stream_pcm_ack(&ff->domain, &ff->rx_stream);
sound/firewire/fireface/ff-stream.c
103
amdtp_domain_destroy(&ff->domain);
sound/firewire/fireface/ff-stream.c
125
amdtp_domain_stop(&ff->domain);
sound/firewire/fireface/ff-stream.c
156
err = amdtp_domain_set_events_per_period(&ff->domain,
sound/firewire/fireface/ff-stream.c
177
amdtp_domain_stop(&ff->domain);
sound/firewire/fireface/ff-stream.c
192
err = amdtp_domain_add_stream(&ff->domain, &ff->rx_stream,
sound/firewire/fireface/ff-stream.c
197
err = amdtp_domain_add_stream(&ff->domain, &ff->tx_stream,
sound/firewire/fireface/ff-stream.c
206
err = amdtp_domain_start(&ff->domain, 0, true, true);
sound/firewire/fireface/ff-stream.c
210
if (!amdtp_domain_wait_ready(&ff->domain, READY_TIMEOUT_MS)) {
sound/firewire/fireface/ff-stream.c
222
amdtp_domain_stop(&ff->domain);
sound/firewire/fireface/ff-stream.c
231
amdtp_domain_stop(&ff->domain);
sound/firewire/fireface/ff-stream.c
241
amdtp_domain_stop(&ff->domain);
sound/firewire/fireface/ff-stream.c
88
err = amdtp_domain_init(&ff->domain);
sound/firewire/fireface/ff.h
99
struct amdtp_domain domain;
sound/firewire/fireworks/fireworks.h
108
struct amdtp_domain domain;
sound/firewire/fireworks/fireworks_pcm.c
176
struct amdtp_domain *d = &efw->domain;
sound/firewire/fireworks/fireworks_pcm.c
336
return amdtp_domain_stream_pcm_pointer(&efw->domain, &efw->tx_stream);
sound/firewire/fireworks/fireworks_pcm.c
342
return amdtp_domain_stream_pcm_pointer(&efw->domain, &efw->rx_stream);
sound/firewire/fireworks/fireworks_pcm.c
349
return amdtp_domain_stream_pcm_ack(&efw->domain, &efw->tx_stream);
sound/firewire/fireworks/fireworks_pcm.c
356
return amdtp_domain_stream_pcm_ack(&efw->domain, &efw->rx_stream);
sound/firewire/fireworks/fireworks_stream.c
142
err = amdtp_domain_init(&efw->domain);
sound/firewire/fireworks/fireworks_stream.c
205
amdtp_domain_stop(&efw->domain);
sound/firewire/fireworks/fireworks_stream.c
235
err = amdtp_domain_set_events_per_period(&efw->domain,
sound/firewire/fireworks/fireworks_stream.c
258
amdtp_domain_stop(&efw->domain);
sound/firewire/fireworks/fireworks_stream.c
288
err = amdtp_domain_start(&efw->domain, tx_init_skip_cycles, true, false);
sound/firewire/fireworks/fireworks_stream.c
292
if (!amdtp_domain_wait_ready(&efw->domain, READY_TIMEOUT_MS)) {
sound/firewire/fireworks/fireworks_stream.c
300
amdtp_domain_stop(&efw->domain);
sound/firewire/fireworks/fireworks_stream.c
311
amdtp_domain_stop(&efw->domain);
sound/firewire/fireworks/fireworks_stream.c
323
amdtp_domain_stop(&efw->domain);
sound/firewire/fireworks/fireworks_stream.c
334
amdtp_domain_destroy(&efw->domain);
sound/firewire/fireworks/fireworks_stream.c
82
err = amdtp_domain_add_stream(&efw->domain, stream,
sound/firewire/motu/amdtp-motu.c
341
p->cache->tx_cycle_count = (s->domain->processing_cycle.tx_start % CYCLES_PER_SECOND);
sound/firewire/motu/amdtp-motu.c
404
p->cache->rx_cycle_count = (s->domain->processing_cycle.rx_start % CYCLES_PER_SECOND);
sound/firewire/motu/motu-pcm.c
133
struct amdtp_domain *d = &motu->domain;
sound/firewire/motu/motu-pcm.c
305
return amdtp_domain_stream_pcm_pointer(&motu->domain, &motu->tx_stream);
sound/firewire/motu/motu-pcm.c
311
return amdtp_domain_stream_pcm_pointer(&motu->domain, &motu->rx_stream);
sound/firewire/motu/motu-pcm.c
318
return amdtp_domain_stream_pcm_ack(&motu->domain, &motu->tx_stream);
sound/firewire/motu/motu-pcm.c
325
return amdtp_domain_stream_pcm_ack(&motu->domain, &motu->rx_stream);
sound/firewire/motu/motu-stream.c
150
amdtp_domain_stop(&motu->domain);
sound/firewire/motu/motu-stream.c
180
err = amdtp_domain_set_events_per_period(&motu->domain,
sound/firewire/motu/motu-stream.c
237
amdtp_domain_stop(&motu->domain);
sound/firewire/motu/motu-stream.c
275
err = amdtp_domain_add_stream(&motu->domain, &motu->tx_stream,
sound/firewire/motu/motu-stream.c
280
err = amdtp_domain_add_stream(&motu->domain, &motu->rx_stream,
sound/firewire/motu/motu-stream.c
293
err = amdtp_domain_start(&motu->domain, 0, true, false);
sound/firewire/motu/motu-stream.c
297
if (!amdtp_domain_wait_ready(&motu->domain, READY_TIMEOUT_MS)) {
sound/firewire/motu/motu-stream.c
313
amdtp_domain_stop(&motu->domain);
sound/firewire/motu/motu-stream.c
321
amdtp_domain_stop(&motu->domain);
sound/firewire/motu/motu-stream.c
381
err = amdtp_domain_init(&motu->domain);
sound/firewire/motu/motu-stream.c
394
amdtp_domain_destroy(&motu->domain);
sound/firewire/motu/motu.h
79
struct amdtp_domain domain;
sound/firewire/oxfw/oxfw-pcm.c
173
struct amdtp_domain *d = &oxfw->domain;
sound/firewire/oxfw/oxfw-pcm.c
370
return amdtp_domain_stream_pcm_pointer(&oxfw->domain, &oxfw->tx_stream);
sound/firewire/oxfw/oxfw-pcm.c
376
return amdtp_domain_stream_pcm_pointer(&oxfw->domain, &oxfw->rx_stream);
sound/firewire/oxfw/oxfw-pcm.c
383
return amdtp_domain_stream_pcm_ack(&oxfw->domain, &oxfw->tx_stream);
sound/firewire/oxfw/oxfw-pcm.c
390
return amdtp_domain_stream_pcm_ack(&oxfw->domain, &oxfw->rx_stream);
sound/firewire/oxfw/oxfw-stream.c
117
err = amdtp_domain_add_stream(&oxfw->domain, stream,
sound/firewire/oxfw/oxfw-stream.c
287
amdtp_domain_stop(&oxfw->domain);
sound/firewire/oxfw/oxfw-stream.c
319
err = amdtp_domain_set_events_per_period(&oxfw->domain,
sound/firewire/oxfw/oxfw-stream.c
341
amdtp_domain_stop(&oxfw->domain);
sound/firewire/oxfw/oxfw-stream.c
385
err = amdtp_domain_start(&oxfw->domain, tx_init_skip_cycles, replay_seq, false);
sound/firewire/oxfw/oxfw-stream.c
389
if (!amdtp_domain_wait_ready(&oxfw->domain, READY_TIMEOUT_MS)) {
sound/firewire/oxfw/oxfw-stream.c
397
amdtp_domain_stop(&oxfw->domain);
sound/firewire/oxfw/oxfw-stream.c
409
amdtp_domain_stop(&oxfw->domain);
sound/firewire/oxfw/oxfw-stream.c
450
err = amdtp_domain_init(&oxfw->domain);
sound/firewire/oxfw/oxfw-stream.c
464
amdtp_domain_destroy(&oxfw->domain);
sound/firewire/oxfw/oxfw-stream.c
474
amdtp_domain_stop(&oxfw->domain);
sound/firewire/oxfw/oxfw.h
92
struct amdtp_domain domain;
sound/firewire/tascam/tascam-pcm.c
213
return amdtp_domain_stream_pcm_pointer(&tscm->domain, &tscm->tx_stream);
sound/firewire/tascam/tascam-pcm.c
220
return amdtp_domain_stream_pcm_pointer(&tscm->domain, &tscm->rx_stream);
sound/firewire/tascam/tascam-pcm.c
227
return amdtp_domain_stream_pcm_ack(&tscm->domain, &tscm->tx_stream);
sound/firewire/tascam/tascam-pcm.c
234
return amdtp_domain_stream_pcm_ack(&tscm->domain, &tscm->rx_stream);
sound/firewire/tascam/tascam-pcm.c
46
struct amdtp_domain *d = &tscm->domain;
sound/firewire/tascam/tascam-stream.c
360
err = amdtp_domain_init(&tscm->domain);
sound/firewire/tascam/tascam-stream.c
372
amdtp_domain_stop(&tscm->domain);
sound/firewire/tascam/tascam-stream.c
382
amdtp_domain_destroy(&tscm->domain);
sound/firewire/tascam/tascam-stream.c
400
amdtp_domain_stop(&tscm->domain);
sound/firewire/tascam/tascam-stream.c
421
err = amdtp_domain_set_events_per_period(&tscm->domain,
sound/firewire/tascam/tascam-stream.c
445
amdtp_domain_stop(&tscm->domain);
sound/firewire/tascam/tascam-stream.c
470
err = amdtp_domain_add_stream(&tscm->domain, &tscm->rx_stream, tscm->rx_resources.channel,
sound/firewire/tascam/tascam-stream.c
475
err = amdtp_domain_add_stream(&tscm->domain, &tscm->tx_stream, tscm->tx_resources.channel,
sound/firewire/tascam/tascam-stream.c
492
err = amdtp_domain_start(&tscm->domain, tx_init_skip_cycles, true, true);
sound/firewire/tascam/tascam-stream.c
496
if (!amdtp_domain_wait_ready(&tscm->domain, READY_TIMEOUT_MS)) {
sound/firewire/tascam/tascam-stream.c
504
amdtp_domain_stop(&tscm->domain);
sound/firewire/tascam/tascam-stream.c
513
amdtp_domain_stop(&tscm->domain);
sound/firewire/tascam/tascam.h
99
struct amdtp_domain domain;
sound/pci/emu10k1/emu10k1_main.c
1445
struct iommu_domain *domain;
sound/pci/emu10k1/emu10k1_main.c
1449
domain = iommu_get_domain_for_dev(emu->card->dev);
sound/pci/emu10k1/emu10k1_main.c
1450
if (!domain || domain->type == IOMMU_DOMAIN_IDENTITY)
sound/soc/codecs/cs48l32.h
168
#define CS48L32_RATE_CONTROL(name, domain) SOC_ENUM(name, cs48l32_sample_rate[(domain) - 1])
sound/soc/codecs/rt5677.c
4831
return irq_create_mapping(rt5677->domain, irq);
sound/soc/codecs/rt5677.c
5338
virq = irq_find_mapping(rt5677->domain, i);
sound/soc/codecs/rt5677.c
5398
virq = irq_find_mapping(rt5677->domain, i);
sound/soc/codecs/rt5677.c
5514
rt5677->domain = irq_domain_create_linear(dev_fwnode(&i2c->dev),
sound/soc/codecs/rt5677.c
5516
if (!rt5677->domain) {
sound/soc/codecs/rt5677.h
1796
struct irq_domain *domain;
sound/soc/intel/avs/avs.h
307
u8 core_id, u8 domain, void *param, u32 param_size,
sound/soc/intel/avs/dsp.c
226
u8 core_id, u8 domain, void *param, u32 param_size,
sound/soc/intel/avs/dsp.c
256
core_id, domain, param, param_size);
sound/soc/intel/avs/messages.c
143
u8 ppl_id, u8 core_id, u8 domain,
sound/soc/intel/avs/messages.c
155
msg.ext.init_instance.proc_domain = domain;
sound/soc/intel/avs/messages.h
324
u8 ppl_id, u8 core_id, u8 domain,
sound/soc/intel/avs/path.c
435
t->domain, cfg, cfg_size, &mod->instance_id);
sound/soc/intel/avs/path.c
470
t->domain, cfg, cfg_size, &mod->instance_id);
sound/soc/intel/avs/path.c
595
t->domain, cfg, cfg_size, &mod->instance_id);
sound/soc/intel/avs/path.c
631
t->core_id, t->domain, &cfg, sizeof(cfg),
sound/soc/intel/avs/path.c
648
t->core_id, t->domain, &cfg, sizeof(cfg),
sound/soc/intel/avs/path.c
668
t->core_id, t->domain, &cfg, sizeof(cfg),
sound/soc/intel/avs/path.c
687
t->core_id, t->domain, &cfg, sizeof(cfg),
sound/soc/intel/avs/path.c
705
t->core_id, t->domain, &cfg, sizeof(cfg),
sound/soc/intel/avs/path.c
722
t->core_id, t->domain, &cfg, sizeof(cfg),
sound/soc/intel/avs/path.c
739
t->core_id, t->domain, &cfg, sizeof(cfg),
sound/soc/intel/avs/path.c
755
t->core_id, t->domain, &cfg, sizeof(cfg),
sound/soc/intel/avs/path.c
794
t->core_id, t->domain, cfg, cfg_size,
sound/soc/intel/avs/topology.c
1183
.offset = offsetof(struct avs_tplg_module, domain),
sound/soc/intel/avs/topology.h
221
u8 domain;
sound/soc/qcom/qdsp6/audioreach.c
338
cfg->domain.proc_domain = cont->proc_domain;
sound/soc/qcom/qdsp6/audioreach.c
55
struct apm_cont_prop_id_domain domain;
sound/soc/qcom/qdsp6/q6usb.c
392
data->priv.domain = iommu_get_domain_for_dev(&pdev->dev);
sound/soc/sof/ipc4-topology.c
42
enum sof_comp_domain domain;
sound/soc/sof/ipc4-topology.c
56
return sof_domains[i].domain;
sound/soc/ti/j721e-evm.c
176
struct j721e_audio_domain *domain = &priv->audio_domains[audio_domain];
sound/soc/ti/j721e-evm.c
205
if (domain->parent_clk_id == -1 || priv->hsdiv_rates[domain->parent_clk_id] != scki) {
sound/soc/ti/j721e-evm.c
212
if (domain->parent_clk_id != clk_id) {
sound/soc/ti/j721e-evm.c
213
ret = clk_set_parent(domain->codec.target,
sound/soc/ti/j721e-evm.c
214
domain->codec.parent[clk_id]);
sound/soc/ti/j721e-evm.c
218
ret = clk_set_parent(domain->mcasp.target,
sound/soc/ti/j721e-evm.c
219
domain->mcasp.parent[clk_id]);
sound/soc/ti/j721e-evm.c
223
domain->parent_clk_id = clk_id;
sound/soc/ti/j721e-evm.c
226
ret = clk_set_rate(domain->codec.target, scki);
sound/soc/ti/j721e-evm.c
233
ret = clk_set_rate(domain->mcasp.target, scki);
sound/soc/ti/j721e-evm.c
235
priv->hsdiv_rates[domain->parent_clk_id] = scki;
sound/soc/ti/j721e-evm.c
259
struct j721e_audio_domain *domain = &priv->audio_domains[domain_id];
sound/soc/ti/j721e-evm.c
268
domain->active++;
sound/soc/ti/j721e-evm.c
305
domain->active--;
sound/soc/ti/j721e-evm.c
318
struct j721e_audio_domain *domain = &priv->audio_domains[domain_id];
sound/soc/ti/j721e-evm.c
328
if (domain->rate && domain->rate != params_rate(params)) {
sound/soc/ti/j721e-evm.c
351
sysclk_rate = priv->hsdiv_rates[domain->parent_clk_id];
sound/soc/ti/j721e-evm.c
370
domain->rate = params_rate(params);
sound/soc/ti/j721e-evm.c
384
struct j721e_audio_domain *domain = &priv->audio_domains[domain_id];
sound/soc/ti/j721e-evm.c
388
domain->active--;
sound/soc/ti/j721e-evm.c
389
if (!domain->active) {
sound/soc/ti/j721e-evm.c
390
domain->rate = 0;
sound/soc/ti/j721e-evm.c
391
domain->active_link = 0;
sound/soc/ti/j721e-evm.c
407
struct j721e_audio_domain *domain = &priv->audio_domains[domain_id];
sound/soc/ti/j721e-evm.c
418
sysclk_rate = priv->hsdiv_rates[domain->parent_clk_id];
sound/soc/ti/j721e-evm.c
622
struct j721e_audio_domain *domain;
sound/soc/ti/j721e-evm.c
639
domain = &priv->audio_domains[J721E_AUDIO_DOMAIN_CPB];
sound/soc/ti/j721e-evm.c
640
ret = j721e_get_clocks(priv->dev, &domain->codec, "cpb-codec-scki");
sound/soc/ti/j721e-evm.c
644
ret = j721e_get_clocks(priv->dev, &domain->mcasp, "cpb-mcasp-auxclk");
sound/soc/ti/j721e-evm.c
724
struct j721e_audio_domain *domain;
sound/soc/ti/j721e-evm.c
751
domain = &priv->audio_domains[J721E_AUDIO_DOMAIN_IVI];
sound/soc/ti/j721e-evm.c
752
ret = j721e_get_clocks(priv->dev, &domain->codec, "ivi-codec-scki");
sound/soc/ti/j721e-evm.c
756
ret = j721e_get_clocks(priv->dev, &domain->mcasp, "ivi-mcasp-auxclk");
sound/usb/qcom/qc_audio_offload.c
534
umap_size = iommu_unmap(uaudio_qdev->data->domain, iova, mapped_iova_size);
sound/usb/qcom/qc_audio_offload.c
588
iommu_map(uaudio_qdev->data->domain, iova, pa, size, prot, GFP_KERNEL);
sound/usb/qcom/qc_audio_offload.c
621
ret = iommu_map(uaudio_qdev->data->domain, iova_sg, pa_sg, sg_len,
tools/lib/perf/include/perf/event.h
556
__u16 domain;
tools/perf/arch/x86/util/iostat.c
111
const struct iio_root_ports_list * const list, u32 domain, u8 bus)
tools/perf/arch/x86/util/iostat.c
119
if (rp && rp->domain == domain && rp->bus == bus)
tools/perf/arch/x86/util/iostat.c
149
u32 domain;
tools/perf/arch/x86/util/iostat.c
163
ret = sscanf(buf, "%04x:%02hhx", &domain, &bus);
tools/perf/arch/x86/util/iostat.c
170
rp = iio_root_port_new(domain, bus, die, pmu_idx);
tools/perf/arch/x86/util/iostat.c
226
static int iio_root_port_parse_str(u32 *domain, u8 *bus, char *str)
tools/perf/arch/x86/util/iostat.c
238
if (ret || sscanf(str, "%08x:%02hhx", domain, bus) != 2)
tools/perf/arch/x86/util/iostat.c
253
u32 domain;
tools/perf/arch/x86/util/iostat.c
267
if (!iio_root_port_parse_str(&domain, &bus, tok)) {
tools/perf/arch/x86/util/iostat.c
268
rp = iio_root_port_find_by_notation(*list, domain, bus);
tools/perf/arch/x86/util/iostat.c
277
domain, bus))
tools/perf/arch/x86/util/iostat.c
279
domain, bus);
tools/perf/arch/x86/util/iostat.c
413
config->csv_sep, rp->domain, rp->bus,
tools/perf/arch/x86/util/iostat.c
416
sprintf(prefix, "%04x:%02x%s", rp->domain, rp->bus,
tools/perf/arch/x86/util/iostat.c
62
u32 domain;
tools/perf/arch/x86/util/iostat.c
81
rp->die, rp->pmu_idx, rp->domain, rp->bus);
tools/perf/arch/x86/util/iostat.c
84
static struct iio_root_port *iio_root_port_new(u32 domain, u8 bus,
tools/perf/arch/x86/util/iostat.c
90
p->domain = domain;
tools/perf/builtin-sched.c
4305
dinfo1 = cd_info1->domains[ds1->domain];
tools/perf/builtin-sched.c
4308
dinfo2 = cd_info2->domains[ds2->domain];
tools/perf/builtin-sched.c
4311
if (dinfo2 && dinfo1->domain != dinfo2->domain) {
tools/perf/builtin-sched.c
4322
dinfo1->domain);
tools/perf/builtin-sched.c
4329
cs1->cpu, dinfo1->domain);
tools/perf/trace/beauty/socket.c
23
int domain = syscall_arg__val(arg, 0);
tools/perf/trace/beauty/socket.c
25
if (domain == AF_INET || domain == AF_INET6)
tools/perf/util/env.h
58
u32 domain;
tools/perf/util/event.c
605
size_t size = fprintf(fp, "\ndomain%u ", ds->domain);
tools/perf/util/header.c
1640
u32 cpu, domain;
tools/perf/util/header.c
1691
retval = sscanf(line, "domain%u %s %s %*s", &domain, dname,
tools/perf/util/header.c
1700
retval = sscanf(line, "domain%u %s %*s", &domain, cpumask);
tools/perf/util/header.c
1705
domain_info->domain = domain;
tools/perf/util/header.c
1706
if (domain > *max_sched_domains)
tools/perf/util/header.c
1707
*max_sched_domains = domain;
tools/perf/util/header.c
1764
ret = do_write(ff, &cd_map[i]->domains[j]->domain, sizeof(u32));
tools/perf/util/header.c
2449
fprintf(fp, "# Domain : %u\n", d_info->domain);
tools/perf/util/header.c
3614
u32 schedstat_version, max_sched_domains, cpu, domain, nr_domains;
tools/perf/util/header.c
3663
if (do_read_u32(ff, &domain))
tools/perf/util/header.c
3670
assert(cd_map[cpu]->domains[domain] == NULL);
tools/perf/util/header.c
3671
cd_map[cpu]->domains[domain] = d_info;
tools/perf/util/header.c
3672
d_info->domain = domain;
tools/perf/util/synthetic-events.c
2624
ds->domain = d_num;
tools/power/cpupower/utils/helpers/helpers.h
129
extern struct pci_dev *pci_acc_init(struct pci_access **pacc, int domain,
tools/power/cpupower/utils/helpers/pci.c
26
struct pci_dev *pci_acc_init(struct pci_access **pacc, int domain, int bus,
tools/power/cpupower/utils/helpers/pci.c
37
filter_nb_link.domain = domain;
tools/power/x86/turbostat/turbostat.c
4797
int get_rapl_counters(int cpu, unsigned int domain, struct core_data *c, struct pkg_data *p)
tools/power/x86/turbostat/turbostat.c
4804
fprintf(stderr, "%s: cpu%d domain%d\n", __func__, cpu, domain);
tools/power/x86/turbostat/turbostat.c
4807
assert(domain < rapl_counter_info_perdomain_size);
tools/power/x86/turbostat/turbostat.c
4809
rci = &rapl_counter_info_perdomain[domain];
tools/power/x86/turbostat/turbostat.c
5103
unsigned int domain;
tools/power/x86/turbostat/turbostat.c
5108
domain = cpu_to_domain(pp, cpu);
tools/power/x86/turbostat/turbostat.c
5109
assert(domain < pp->num_domains);
tools/power/x86/turbostat/turbostat.c
5111
fd_counter = pp->fd_perf_per_domain[domain];
tools/power/x86/turbostat/turbostat.c
5901
for (size_t domain = 0; domain < pp->num_domains; ++domain) {
tools/power/x86/turbostat/turbostat.c
5902
if (pp->fd_perf_per_domain[domain] != -1) {
tools/power/x86/turbostat/turbostat.c
5903
close(pp->fd_perf_per_domain[domain]);
tools/power/x86/turbostat/turbostat.c
5904
pp->fd_perf_per_domain[domain] = -1;
tools/testing/selftests/bpf/prog_tests/sock_create.c
126
.domain = AF_INET6,
tools/testing/selftests/bpf/prog_tests/sock_create.c
147
.domain = AF_INET,
tools/testing/selftests/bpf/prog_tests/sock_create.c
168
.domain = AF_INET6,
tools/testing/selftests/bpf/prog_tests/sock_create.c
20
int domain;
tools/testing/selftests/bpf/prog_tests/sock_create.c
201
.domain = AF_INET,
tools/testing/selftests/bpf/prog_tests/sock_create.c
234
.domain = AF_INET,
tools/testing/selftests/bpf/prog_tests/sock_create.c
250
.domain = AF_INET,
tools/testing/selftests/bpf/prog_tests/sock_create.c
297
sock_fd = socket(test->domain, test->type, test->protocol);
tools/testing/selftests/bpf/prog_tests/sock_create.c
43
.domain = AF_INET,
tools/testing/selftests/bpf/prog_tests/sock_create.c
64
.domain = AF_INET6,
tools/testing/selftests/bpf/prog_tests/sock_create.c
95
.domain = AF_INET,
tools/testing/selftests/bpf/prog_tests/sock_post_bind.c
123
.domain = AF_INET6,
tools/testing/selftests/bpf/prog_tests/sock_post_bind.c
153
.domain = AF_INET,
tools/testing/selftests/bpf/prog_tests/sock_post_bind.c
17
int domain;
tools/testing/selftests/bpf/prog_tests/sock_post_bind.c
183
.domain = AF_INET,
tools/testing/selftests/bpf/prog_tests/sock_post_bind.c
214
.domain = AF_INET,
tools/testing/selftests/bpf/prog_tests/sock_post_bind.c
245
.domain = AF_INET6,
tools/testing/selftests/bpf/prog_tests/sock_post_bind.c
260
.domain = AF_INET,
tools/testing/selftests/bpf/prog_tests/sock_post_bind.c
273
.domain = AF_INET6,
tools/testing/selftests/bpf/prog_tests/sock_post_bind.c
305
static int bind_sock(int domain, int type, const char *ip,
tools/testing/selftests/bpf/prog_tests/sock_post_bind.c
315
sockfd = socket(domain, type, 0);
tools/testing/selftests/bpf/prog_tests/sock_post_bind.c
321
if (domain == AF_INET) {
tools/testing/selftests/bpf/prog_tests/sock_post_bind.c
324
addr4->sin_family = domain;
tools/testing/selftests/bpf/prog_tests/sock_post_bind.c
326
if (inet_pton(domain, ip, (void *)&addr4->sin_addr) != 1)
tools/testing/selftests/bpf/prog_tests/sock_post_bind.c
328
} else if (domain == AF_INET6) {
tools/testing/selftests/bpf/prog_tests/sock_post_bind.c
331
addr6->sin6_family = domain;
tools/testing/selftests/bpf/prog_tests/sock_post_bind.c
333
if (inet_pton(domain, ip, (void *)&addr6->sin6_addr) != 1)
tools/testing/selftests/bpf/prog_tests/sock_post_bind.c
353
if (domain == AF_INET)
tools/testing/selftests/bpf/prog_tests/sock_post_bind.c
388
res = bind_sock(test->domain, test->type, test->ip, test->port,
tools/testing/selftests/bpf/prog_tests/sock_post_bind.c
81
.domain = AF_INET,
tools/testing/selftests/bpf/prog_tests/sock_post_bind.c
94
.domain = AF_INET6,
tools/testing/selftests/bpf/prog_tests/socket_helpers.h
421
int domain, type;
tools/testing/selftests/bpf/prog_tests/socket_helpers.h
423
opt_len = sizeof(domain);
tools/testing/selftests/bpf/prog_tests/socket_helpers.h
424
if (getsockopt(sock_fd, SOL_SOCKET, SO_DOMAIN, &domain, &opt_len))
tools/testing/selftests/bpf/prog_tests/socket_helpers.h
431
switch (domain) {
tools/testing/selftests/landlock/audit.h
338
size_t domain;
tools/testing/selftests/landlock/audit.h
347
records->domain = 0;
tools/testing/selftests/landlock/audit.h
364
records->domain++;
tools/testing/selftests/landlock/audit_test.c
415
EXPECT_EQ(0, records.domain);
tools/testing/selftests/landlock/audit_test.c
604
EXPECT_EQ(0, records.domain);
tools/testing/selftests/landlock/common.h
224
int domain;
tools/testing/selftests/landlock/fs_test.c
7155
EXPECT_EQ(0, records.domain);
tools/testing/selftests/landlock/fs_test.c
7208
EXPECT_EQ(0, records.domain);
tools/testing/selftests/landlock/fs_test.c
7225
EXPECT_EQ(1, records.domain);
tools/testing/selftests/landlock/fs_test.c
7242
EXPECT_EQ(1, records.domain);
tools/testing/selftests/landlock/fs_test.c
7259
EXPECT_EQ(1, records.domain);
tools/testing/selftests/landlock/fs_test.c
7285
EXPECT_EQ(0, records.domain);
tools/testing/selftests/landlock/fs_test.c
7303
EXPECT_EQ(1, records.domain);
tools/testing/selftests/landlock/fs_test.c
7323
EXPECT_EQ(1, records.domain);
tools/testing/selftests/landlock/fs_test.c
7343
EXPECT_EQ(1, records.domain);
tools/testing/selftests/landlock/fs_test.c
7363
EXPECT_EQ(1, records.domain);
tools/testing/selftests/landlock/fs_test.c
7383
EXPECT_EQ(1, records.domain);
tools/testing/selftests/landlock/fs_test.c
7403
EXPECT_EQ(1, records.domain);
tools/testing/selftests/landlock/fs_test.c
7423
EXPECT_EQ(1, records.domain);
tools/testing/selftests/landlock/fs_test.c
7443
EXPECT_EQ(1, records.domain);
tools/testing/selftests/landlock/fs_test.c
7468
EXPECT_EQ(0, records.domain);
tools/testing/selftests/landlock/fs_test.c
7493
EXPECT_EQ(0, records.domain);
tools/testing/selftests/landlock/fs_test.c
7515
EXPECT_EQ(0, records.domain);
tools/testing/selftests/landlock/fs_test.c
7542
EXPECT_EQ(0, records.domain);
tools/testing/selftests/landlock/fs_test.c
7580
EXPECT_EQ(1, records.domain);
tools/testing/selftests/landlock/fs_test.c
7598
EXPECT_EQ(1, records.domain);
tools/testing/selftests/landlock/fs_test.c
7622
EXPECT_EQ(1, records.domain);
tools/testing/selftests/landlock/fs_test.c
7643
EXPECT_EQ(1, records.domain);
tools/testing/selftests/landlock/net_test.c
1075
const int domain;
tools/testing/selftests/landlock/net_test.c
108
ret = socket(srv->protocol.domain, srv->protocol.type | SOCK_CLOEXEC,
tools/testing/selftests/landlock/net_test.c
1081
.domain = variant->domain,
tools/testing/selftests/landlock/net_test.c
1100
.domain = AF_INET,
tools/testing/selftests/landlock/net_test.c
1107
.domain = AF_INET,
tools/testing/selftests/landlock/net_test.c
1114
.domain = AF_INET,
tools/testing/selftests/landlock/net_test.c
1121
.domain = AF_INET,
tools/testing/selftests/landlock/net_test.c
1128
.domain = AF_INET6,
tools/testing/selftests/landlock/net_test.c
1135
.domain = AF_INET6,
tools/testing/selftests/landlock/net_test.c
1142
.domain = AF_INET6,
tools/testing/selftests/landlock/net_test.c
1149
.domain = AF_INET6,
tools/testing/selftests/landlock/net_test.c
122
switch (srv->protocol.domain) {
tools/testing/selftests/landlock/net_test.c
149
switch (srv->protocol.domain) {
tools/testing/selftests/landlock/net_test.c
1509
.domain = AF_INET,
tools/testing/selftests/landlock/net_test.c
1563
.domain = AF_INET,
tools/testing/selftests/landlock/net_test.c
1694
.domain = AF_INET,
tools/testing/selftests/landlock/net_test.c
1704
.domain = AF_INET,
tools/testing/selftests/landlock/net_test.c
1714
.domain = AF_INET6,
tools/testing/selftests/landlock/net_test.c
172
switch (prot->domain) {
tools/testing/selftests/landlock/net_test.c
1724
.domain = AF_INET6,
tools/testing/selftests/landlock/net_test.c
1940
.domain = AF_INET,
tools/testing/selftests/landlock/net_test.c
195
switch (srv->protocol.domain) {
tools/testing/selftests/landlock/net_test.c
1950
.domain = AF_INET6,
tools/testing/selftests/landlock/net_test.c
1996
EXPECT_EQ(1, records.domain);
tools/testing/selftests/landlock/net_test.c
2024
EXPECT_EQ(1, records.domain);
tools/testing/selftests/landlock/net_test.c
231
switch (srv->protocol.domain) {
tools/testing/selftests/landlock/net_test.c
275
.domain = AF_UNSPEC,
tools/testing/selftests/landlock/net_test.c
302
.domain = AF_INET,
tools/testing/selftests/landlock/net_test.c
314
.domain = AF_INET,
tools/testing/selftests/landlock/net_test.c
325
.domain = AF_INET,
tools/testing/selftests/landlock/net_test.c
336
.domain = AF_INET6,
tools/testing/selftests/landlock/net_test.c
348
.domain = AF_INET6,
tools/testing/selftests/landlock/net_test.c
359
.domain = AF_INET6,
tools/testing/selftests/landlock/net_test.c
370
.domain = AF_INET,
tools/testing/selftests/landlock/net_test.c
380
.domain = AF_INET6,
tools/testing/selftests/landlock/net_test.c
390
.domain = AF_UNIX,
tools/testing/selftests/landlock/net_test.c
400
.domain = AF_UNIX,
tools/testing/selftests/landlock/net_test.c
410
.domain = AF_INET,
tools/testing/selftests/landlock/net_test.c
422
.domain = AF_INET,
tools/testing/selftests/landlock/net_test.c
433
.domain = AF_INET,
tools/testing/selftests/landlock/net_test.c
444
.domain = AF_INET6,
tools/testing/selftests/landlock/net_test.c
456
.domain = AF_INET6,
tools/testing/selftests/landlock/net_test.c
467
.domain = AF_INET6,
tools/testing/selftests/landlock/net_test.c
478
.domain = AF_INET,
tools/testing/selftests/landlock/net_test.c
488
.domain = AF_INET6,
tools/testing/selftests/landlock/net_test.c
498
.domain = AF_UNIX,
tools/testing/selftests/landlock/net_test.c
508
.domain = AF_UNIX,
tools/testing/selftests/landlock/net_test.c
560
if (srv->protocol.domain == AF_UNIX) {
tools/testing/selftests/landlock/net_test.c
57
switch (prot.domain) {
tools/testing/selftests/landlock/net_test.c
60
srv->ipv4_addr.sin_family = prot.domain;
tools/testing/selftests/landlock/net_test.c
66
srv->ipv6_addr.sin6_family = prot.domain;
tools/testing/selftests/landlock/net_test.c
772
if (variant->prot.domain == AF_INET) {
tools/testing/selftests/landlock/net_test.c
778
} else if (variant->prot.domain == AF_INET6) {
tools/testing/selftests/landlock/net_test.c
800
if (variant->prot.domain == AF_INET) {
tools/testing/selftests/landlock/net_test.c
806
} else if (variant->prot.domain == AF_INET6) {
tools/testing/selftests/landlock/net_test.c
817
if (variant->prot.domain == AF_INET ||
tools/testing/selftests/landlock/net_test.c
818
variant->prot.domain == AF_INET6) {
tools/testing/selftests/landlock/net_test.c
883
if (self->srv0.protocol.domain == AF_UNIX &&
tools/testing/selftests/landlock/net_test.c
892
if (self->srv0.protocol.domain == AF_UNIX &&
tools/testing/selftests/landlock/net_test.c
91
return (prot->domain == AF_INET || prot->domain == AF_INET6) &&
tools/testing/selftests/landlock/net_test.c
922
if (self->srv0.protocol.domain == AF_UNIX &&
tools/testing/selftests/landlock/net_test.c
995
.domain = AF_INET,
tools/testing/selftests/landlock/ptrace_test.c
345
EXPECT_EQ(0, records.domain);
tools/testing/selftests/landlock/ptrace_test.c
384
EXPECT_EQ(1, records.domain);
tools/testing/selftests/landlock/ptrace_test.c
430
EXPECT_EQ(0, records.domain);
tools/testing/selftests/landlock/scoped_abstract_unix_test.c
315
EXPECT_EQ(0, records.domain);
tools/testing/selftests/landlock/scoped_abstract_unix_test.c
744
const int domain;
tools/testing/selftests/landlock/scoped_abstract_unix_test.c
750
.domain = SCOPE_SANDBOX,
tools/testing/selftests/landlock/scoped_abstract_unix_test.c
756
.domain = OTHER_SANDBOX,
tools/testing/selftests/landlock/scoped_abstract_unix_test.c
762
.domain = NO_SANDBOX,
tools/testing/selftests/landlock/scoped_abstract_unix_test.c
822
if (variant->domain == SCOPE_SANDBOX)
tools/testing/selftests/landlock/scoped_abstract_unix_test.c
825
else if (variant->domain == OTHER_SANDBOX)
tools/testing/selftests/landlock/scoped_abstract_unix_test.c
865
if (variant->domain == SCOPE_SANDBOX) {
tools/testing/selftests/landlock/scoped_abstract_unix_test.c
880
if (variant->domain == SCOPE_SANDBOX) {
tools/testing/selftests/landlock/scoped_abstract_unix_test.c
891
if (variant->domain == SCOPE_SANDBOX) {
tools/testing/selftests/landlock/scoped_abstract_unix_test.c
955
if (variant->domain != SCOPE_SANDBOX) {
tools/testing/selftests/net/bind_bhash.c
34
int domain = use_v6 ? AF_INET6 : AF_INET;
tools/testing/selftests/net/bind_bhash.c
36
sock_fd = socket(domain, SOCK_STREAM, 0);
tools/testing/selftests/net/bind_bhash.c
42
hint.ai_family = domain;
tools/testing/selftests/net/icmp_rfc4884.c
114
static void set_addr(struct sockaddr_inet *addr, int domain,
tools/testing/selftests/net/icmp_rfc4884.c
119
switch (domain) {
tools/testing/selftests/net/icmp_rfc4884.c
140
set_addr(&addr, info->domain, src_port);
tools/testing/selftests/net/icmp_rfc4884.c
335
.domain = AF_INET,
tools/testing/selftests/net/icmp_rfc4884.c
345
.domain = AF_INET6,
tools/testing/selftests/net/icmp_rfc4884.c
43
int domain;
tools/testing/selftests/net/icmp_rfc4884.c
656
dgram = socket(v->info.domain, SOCK_DGRAM, 0);
tools/testing/selftests/net/icmp_rfc4884.c
662
raw = socket(v->info.domain, SOCK_RAW, v->info.proto);
tools/testing/selftests/net/icmp_rfc4884.c
669
set_addr(&addr, v->info.domain, 0);
tools/testing/selftests/net/io_uring_zerocopy_tx.c
105
fd = do_setup_tx(domain, type, protocol);
tools/testing/selftests/net/io_uring_zerocopy_tx.c
212
static void do_test(int domain, int type, int protocol)
tools/testing/selftests/net/io_uring_zerocopy_tx.c
218
do_tx(domain, type, protocol);
tools/testing/selftests/net/io_uring_zerocopy_tx.c
79
static int do_setup_tx(int domain, int type, int protocol)
tools/testing/selftests/net/io_uring_zerocopy_tx.c
83
fd = socket(domain, type, protocol);
tools/testing/selftests/net/io_uring_zerocopy_tx.c
94
static void do_tx(int domain, int type, int protocol)
tools/testing/selftests/net/ip_local_port_range.c
36
int domain, err;
tools/testing/selftests/net/ip_local_port_range.c
39
len = sizeof(domain);
tools/testing/selftests/net/ip_local_port_range.c
40
err = getsockopt(fd, SOL_SOCKET, SO_DOMAIN, &domain, &len);
tools/testing/selftests/net/ip_local_port_range.c
44
return domain;
tools/testing/selftests/net/msg_zerocopy.c
186
static bool do_sendmsg(int fd, struct msghdr *msg, bool do_zerocopy, int domain)
tools/testing/selftests/net/msg_zerocopy.c
199
if (domain == PF_RDS) {
tools/testing/selftests/net/msg_zerocopy.c
222
if (do_zerocopy && domain == PF_RDS) {
tools/testing/selftests/net/msg_zerocopy.c
298
static void setup_sockaddr(int domain, const char *str_addr,
tools/testing/selftests/net/msg_zerocopy.c
304
switch (domain) {
tools/testing/selftests/net/msg_zerocopy.c
326
static int do_setup_tx(int domain, int type, int protocol)
tools/testing/selftests/net/msg_zerocopy.c
330
fd = socket(domain, type, protocol);
tools/testing/selftests/net/msg_zerocopy.c
338
if (domain != PF_PACKET && domain != PF_RDS)
tools/testing/selftests/net/msg_zerocopy.c
342
if (domain == PF_RDS) {
tools/testing/selftests/net/msg_zerocopy.c
396
static bool do_recv_completion(int fd, int domain)
tools/testing/selftests/net/msg_zerocopy.c
405
if (domain == PF_RDS)
tools/testing/selftests/net/msg_zerocopy.c
466
static void do_recv_completions(int fd, int domain)
tools/testing/selftests/net/msg_zerocopy.c
468
while (do_recv_completion(fd, domain)) {}
tools/testing/selftests/net/msg_zerocopy.c
473
static void do_recv_remaining_completions(int fd, int domain)
tools/testing/selftests/net/msg_zerocopy.c
479
if (do_poll(fd, domain == PF_RDS ? POLLIN : POLLERR))
tools/testing/selftests/net/msg_zerocopy.c
480
do_recv_completions(fd, domain);
tools/testing/selftests/net/msg_zerocopy.c
488
static void do_tx(int domain, int type, int protocol)
tools/testing/selftests/net/msg_zerocopy.c
501
fd = do_setup_tx(domain, type, protocol);
tools/testing/selftests/net/msg_zerocopy.c
503
if (domain == PF_PACKET) {
tools/testing/selftests/net/msg_zerocopy.c
530
if (domain == PF_PACKET || protocol == IPPROTO_RAW) {
tools/testing/selftests/net/msg_zerocopy.c
540
if (domain == PF_RDS) {
tools/testing/selftests/net/msg_zerocopy.c
557
do_sendmsg(fd, &msg, cfg_zerocopy, domain);
tools/testing/selftests/net/msg_zerocopy.c
560
do_recv_completions(fd, domain);
tools/testing/selftests/net/msg_zerocopy.c
564
do_recv_completions(fd, domain);
tools/testing/selftests/net/msg_zerocopy.c
570
do_recv_remaining_completions(fd, domain);
tools/testing/selftests/net/msg_zerocopy.c
580
static int do_setup_rx(int domain, int type, int protocol)
tools/testing/selftests/net/msg_zerocopy.c
587
if (domain == PF_PACKET)
tools/testing/selftests/net/msg_zerocopy.c
593
fd = socket(domain, type, protocol);
tools/testing/selftests/net/msg_zerocopy.c
661
static void do_rx(int domain, int type, int protocol)
tools/testing/selftests/net/msg_zerocopy.c
667
fd = do_setup_rx(domain, type, protocol);
tools/testing/selftests/net/msg_zerocopy.c
686
static void do_test(int domain, int type, int protocol)
tools/testing/selftests/net/msg_zerocopy.c
690
if (cfg_cork && (domain == PF_PACKET || type != SOCK_DGRAM))
tools/testing/selftests/net/msg_zerocopy.c
699
do_rx(domain, type, protocol);
tools/testing/selftests/net/msg_zerocopy.c
701
do_tx(domain, type, protocol);
tools/testing/selftests/net/so_txtime.c
365
static void setup_sockaddr(int domain, const char *str_addr,
tools/testing/selftests/net/so_txtime.c
371
switch (domain) {
tools/testing/selftests/net/so_txtime.c
441
int domain = PF_UNSPEC;
tools/testing/selftests/net/so_txtime.c
447
if (domain != PF_UNSPEC)
tools/testing/selftests/net/so_txtime.c
449
domain = PF_INET;
tools/testing/selftests/net/so_txtime.c
455
if (domain != PF_UNSPEC)
tools/testing/selftests/net/so_txtime.c
457
domain = PF_INET6;
tools/testing/selftests/net/so_txtime.c
494
if (domain == PF_UNSPEC)
tools/testing/selftests/net/so_txtime.c
501
setup_sockaddr(domain, daddr, &cfg_dst_addr);
tools/testing/selftests/net/so_txtime.c
502
setup_sockaddr(domain, saddr, &cfg_src_addr);
tools/testing/selftests/net/socket.c
13
int domain;
tools/testing/selftests/net/socket.c
50
fd = socket(s->domain, s->type, s->protocol);
tools/testing/selftests/net/socket.c
65
s->domain, s->type, s->protocol,
tools/testing/selftests/net/socket.c
78
s->domain, s->type, s->protocol,
tools/testing/selftests/net/tcp_mmap.c
337
static void setup_sockaddr(int domain, const char *str_addr,
tools/testing/selftests/net/tcp_mmap.c
343
switch (domain) {
tools/testing/selftests/net/tcp_port_share.c
107
.domain = AF_INET6,
tools/testing/selftests/net/tcp_port_share.c
142
ln = socket(v->domain, SOCK_STREAM, 0);
tools/testing/selftests/net/tcp_port_share.c
146
make_inet_addr(v->domain, v->dst_ip, DST_PORT, &addr);
tools/testing/selftests/net/tcp_port_share.c
151
c1 = socket(v->domain, SOCK_STREAM, 0);
tools/testing/selftests/net/tcp_port_share.c
155
make_inet_addr(v->domain, v->src1_ip, 0, &addr);
tools/testing/selftests/net/tcp_port_share.c
158
make_inet_addr(v->domain, v->dst_ip, DST_PORT, &addr);
tools/testing/selftests/net/tcp_port_share.c
163
pb = socket(v->domain, SOCK_STREAM, 0);
tools/testing/selftests/net/tcp_port_share.c
167
make_inet_addr(v->domain, v->bind_ip, SRC_PORT, &addr);
tools/testing/selftests/net/tcp_port_share.c
171
c2 = socket(v->domain, SOCK_STREAM, 0);
tools/testing/selftests/net/tcp_port_share.c
175
make_inet_addr(v->domain, v->src2_ip, 0, &addr);
tools/testing/selftests/net/tcp_port_share.c
178
make_inet_addr(v->domain, v->dst_ip, DST_PORT, &addr);
tools/testing/selftests/net/tcp_port_share.c
204
ln = socket(v->domain, SOCK_STREAM, 0);
tools/testing/selftests/net/tcp_port_share.c
208
make_inet_addr(v->domain, v->dst_ip, DST_PORT, &addr);
tools/testing/selftests/net/tcp_port_share.c
213
c1 = socket(v->domain, SOCK_STREAM, 0);
tools/testing/selftests/net/tcp_port_share.c
217
make_inet_addr(v->domain, v->src1_ip, 0, &addr);
tools/testing/selftests/net/tcp_port_share.c
220
make_inet_addr(v->domain, v->dst_ip, DST_PORT, &addr);
tools/testing/selftests/net/tcp_port_share.c
228
make_inet_addr(v->domain, v->bind_ip, SRC_PORT, &addr);
tools/testing/selftests/net/tcp_port_share.c
232
pb = socket(v->domain, SOCK_STREAM, 0);
tools/testing/selftests/net/tcp_port_share.c
236
make_inet_addr(v->domain, v->bind_ip, SRC_PORT, &addr);
tools/testing/selftests/net/tcp_port_share.c
242
c2 = socket(v->domain, SOCK_STREAM, 0);
tools/testing/selftests/net/tcp_port_share.c
246
make_inet_addr(v->domain, v->src2_ip, 0, &addr);
tools/testing/selftests/net/tcp_port_share.c
249
make_inet_addr(v->domain, v->dst_ip, DST_PORT, &addr);
tools/testing/selftests/net/tcp_port_share.c
87
int domain;
tools/testing/selftests/net/tcp_port_share.c
99
.domain = AF_INET,
tools/testing/selftests/net/udpgso_bench_rx.c
61
static void setup_sockaddr(int domain, const char *str_addr, void *sockaddr)
tools/testing/selftests/net/udpgso_bench_rx.c
66
switch (domain) {
tools/testing/selftests/net/udpgso_bench_tx.c
116
static void setup_sockaddr(int domain, const char *str_addr, void *sockaddr)
tools/testing/selftests/net/udpgso_bench_tx.c
121
switch (domain) {