Symbol: vgic_cpu
arch/arm64/include/asm/kvm_host.h
895
struct vgic_cpu vgic_cpu;
arch/arm64/kvm/arm.c
618
(atomic_read(&vcpu->arch.vgic_cpu.vgic_v3.its_vpe.vlpi_count) ||
arch/arm64/kvm/arm.c
703
&vcpu->arch.vgic_cpu.vgic_v3);
arch/arm64/kvm/arm.c
713
kvm_call_hyp(__vgic_v3_save_aprs, &vcpu->arch.vgic_cpu.vgic_v3);
arch/arm64/kvm/hyp/nvhe/hyp-main.c
138
hyp_vcpu->vcpu.arch.vgic_cpu.vgic_v3 = host_vcpu->arch.vgic_cpu.vgic_v3;
arch/arm64/kvm/hyp/nvhe/hyp-main.c
144
struct vgic_v3_cpu_if *hyp_cpu_if = &hyp_vcpu->vcpu.arch.vgic_cpu.vgic_v3;
arch/arm64/kvm/hyp/nvhe/hyp-main.c
145
struct vgic_v3_cpu_if *host_cpu_if = &host_vcpu->arch.vgic_cpu.vgic_v3;
arch/arm64/kvm/hyp/nvhe/switch.c
114
__vgic_v3_save_state(&vcpu->arch.vgic_cpu.vgic_v3);
arch/arm64/kvm/hyp/nvhe/switch.c
115
__vgic_v3_deactivate_traps(&vcpu->arch.vgic_cpu.vgic_v3);
arch/arm64/kvm/hyp/nvhe/switch.c
123
__vgic_v3_activate_traps(&vcpu->arch.vgic_cpu.vgic_v3);
arch/arm64/kvm/hyp/nvhe/switch.c
124
__vgic_v3_restore_state(&vcpu->arch.vgic_cpu.vgic_v3);
arch/arm64/kvm/hyp/vgic-v3-sr.c
1249
if (vcpu->arch.vgic_cpu.vgic_v3.vgic_hcr & ICH_HCR_EL2_TDIR) {
arch/arm64/kvm/hyp/vgic-v3-sr.c
559
unsigned int used_lrs = vcpu->arch.vgic_cpu.vgic_v3.used_lrs;
arch/arm64/kvm/hyp/vgic-v3-sr.c
598
unsigned int used_lrs = vcpu->arch.vgic_cpu.vgic_v3.used_lrs;
arch/arm64/kvm/sys_regs.c
663
p->regval = vcpu->arch.vgic_cpu.vgic_v3.vgic_sre;
arch/arm64/kvm/vgic-sys-reg-v3.c
17
struct vgic_cpu *vgic_v3_cpu = &vcpu->arch.vgic_cpu;
arch/arm64/kvm/vgic-sys-reg-v3.c
210
struct vgic_v3_cpu_if *vgicv3 = &vcpu->arch.vgic_cpu.vgic_v3;
arch/arm64/kvm/vgic-sys-reg-v3.c
220
struct vgic_v3_cpu_if *vgicv3 = &vcpu->arch.vgic_cpu.vgic_v3;
arch/arm64/kvm/vgic-sys-reg-v3.c
293
struct vgic_v3_cpu_if *vgicv3 = &vcpu->arch.vgic_cpu.vgic_v3;
arch/arm64/kvm/vgic-sys-reg-v3.c
62
struct vgic_cpu *vgic_v3_cpu = &vcpu->arch.vgic_cpu;
arch/arm64/kvm/vgic/vgic-init.c
170
struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu;
arch/arm64/kvm/vgic/vgic-init.c
171
kfree(vgic_cpu->private_irqs);
arch/arm64/kvm/vgic/vgic-init.c
172
vgic_cpu->private_irqs = NULL;
arch/arm64/kvm/vgic/vgic-init.c
264
struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu;
arch/arm64/kvm/vgic/vgic-init.c
269
if (vgic_cpu->private_irqs)
arch/arm64/kvm/vgic/vgic-init.c
272
vgic_cpu->private_irqs = kzalloc_objs(struct vgic_irq,
arch/arm64/kvm/vgic/vgic-init.c
276
if (!vgic_cpu->private_irqs)
arch/arm64/kvm/vgic/vgic-init.c
284
struct vgic_irq *irq = &vgic_cpu->private_irqs[i];
arch/arm64/kvm/vgic/vgic-init.c
338
struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu;
arch/arm64/kvm/vgic/vgic-init.c
342
vgic_cpu->rd_iodev.base_addr = VGIC_ADDR_UNDEF;
arch/arm64/kvm/vgic/vgic-init.c
344
INIT_LIST_HEAD(&vgic_cpu->ap_list_head);
arch/arm64/kvm/vgic/vgic-init.c
345
raw_spin_lock_init(&vgic_cpu->ap_list_lock);
arch/arm64/kvm/vgic/vgic-init.c
346
atomic_set(&vgic_cpu->vgic_v3.its_vpe.vlpi_count, 0);
arch/arm64/kvm/vgic/vgic-init.c
460
struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu;
arch/arm64/kvm/vgic/vgic-init.c
468
INIT_LIST_HEAD(&vgic_cpu->ap_list_head);
arch/arm64/kvm/vgic/vgic-init.c
469
kfree(vgic_cpu->private_irqs);
arch/arm64/kvm/vgic/vgic-init.c
470
vgic_cpu->private_irqs = NULL;
arch/arm64/kvm/vgic/vgic-init.c
492
vgic_cpu->rd_iodev.base_addr = VGIC_ADDR_UNDEF;
arch/arm64/kvm/vgic/vgic-its.c
1310
if (vcpu->arch.vgic_cpu.vgic_v3.its_vpe.its_vm)
arch/arm64/kvm/vgic/vgic-its.c
1311
its_invall_vpe(&vcpu->arch.vgic_cpu.vgic_v3.its_vpe);
arch/arm64/kvm/vgic/vgic-its.c
1798
if (!(vcpu->arch.vgic_cpu.pendbaser & GICR_PENDBASER_PTZ))
arch/arm64/kvm/vgic/vgic-its.c
331
map.vpe = &vcpu->arch.vgic_cpu.vgic_v3.its_vpe;
arch/arm64/kvm/vgic/vgic-its.c
391
gpa_t pendbase = GICR_PENDBASER_ADDRESS(vcpu->arch.vgic_cpu.pendbaser);
arch/arm64/kvm/vgic/vgic-mmio-v2.c
383
return vcpu->arch.vgic_cpu.vgic_v2.vgic_apr;
arch/arm64/kvm/vgic/vgic-mmio-v2.c
385
struct vgic_v3_cpu_if *vgicv3 = &vcpu->arch.vgic_cpu.vgic_v3;
arch/arm64/kvm/vgic/vgic-mmio-v2.c
409
vcpu->arch.vgic_cpu.vgic_v2.vgic_apr = val;
arch/arm64/kvm/vgic/vgic-mmio-v2.c
411
struct vgic_v3_cpu_if *vgicv3 = &vcpu->arch.vgic_cpu.vgic_v3;
arch/arm64/kvm/vgic/vgic-mmio-v3.c
267
struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu;
arch/arm64/kvm/vgic/vgic-mmio-v3.c
269
return atomic_read(&vgic_cpu->ctlr) == GICR_CTLR_ENABLE_LPIS;
arch/arm64/kvm/vgic/vgic-mmio-v3.c
275
struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu;
arch/arm64/kvm/vgic/vgic-mmio-v3.c
278
val = atomic_read(&vgic_cpu->ctlr);
arch/arm64/kvm/vgic/vgic-mmio-v3.c
289
struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu;
arch/arm64/kvm/vgic/vgic-mmio-v3.c
300
ctlr = atomic_cmpxchg_acquire(&vgic_cpu->ctlr,
arch/arm64/kvm/vgic/vgic-mmio-v3.c
308
atomic_set_release(&vgic_cpu->ctlr, 0);
arch/arm64/kvm/vgic/vgic-mmio-v3.c
310
ctlr = atomic_cmpxchg_acquire(&vgic_cpu->ctlr, 0,
arch/arm64/kvm/vgic/vgic-mmio-v3.c
322
struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu;
arch/arm64/kvm/vgic/vgic-mmio-v3.c
323
struct vgic_redist_region *iter, *rdreg = vgic_cpu->rdreg;
arch/arm64/kvm/vgic/vgic-mmio-v3.c
328
if (vgic_cpu->rdreg_index < rdreg->free_index - 1) {
arch/arm64/kvm/vgic/vgic-mmio-v3.c
330
} else if (rdreg->count && vgic_cpu->rdreg_index == (rdreg->count - 1)) {
arch/arm64/kvm/vgic/vgic-mmio-v3.c
510
struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu;
arch/arm64/kvm/vgic/vgic-mmio-v3.c
511
u64 value = vgic_cpu->pendbaser;
arch/arm64/kvm/vgic/vgic-mmio-v3.c
522
struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu;
arch/arm64/kvm/vgic/vgic-mmio-v3.c
530
old_pendbaser = READ_ONCE(vgic_cpu->pendbaser);
arch/arm64/kvm/vgic/vgic-mmio-v3.c
534
} while (cmpxchg64(&vgic_cpu->pendbaser, old_pendbaser,
arch/arm64/kvm/vgic/vgic-mmio-v3.c
541
return !!atomic_read(&vcpu->arch.vgic_cpu.syncr_busy);
arch/arm64/kvm/vgic/vgic-mmio-v3.c
547
atomic_inc(&vcpu->arch.vgic_cpu.syncr_busy);
arch/arm64/kvm/vgic/vgic-mmio-v3.c
551
atomic_dec(&vcpu->arch.vgic_cpu.syncr_busy);
arch/arm64/kvm/vgic/vgic-mmio-v3.c
782
struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu;
arch/arm64/kvm/vgic/vgic-mmio-v3.c
783
struct vgic_io_device *rd_dev = &vcpu->arch.vgic_cpu.rd_iodev;
arch/arm64/kvm/vgic/vgic-mmio-v3.c
791
if (!IS_VGIC_ADDR_UNDEF(vgic_cpu->rd_iodev.base_addr))
arch/arm64/kvm/vgic/vgic-mmio-v3.c
809
vgic_cpu->rdreg = rdreg;
arch/arm64/kvm/vgic/vgic-mmio-v3.c
810
vgic_cpu->rdreg_index = rdreg->free_index;
arch/arm64/kvm/vgic/vgic-mmio-v3.c
839
struct vgic_io_device *rd_dev = &vcpu->arch.vgic_cpu.rd_iodev;
arch/arm64/kvm/vgic/vgic-mmio-v3.c
963
if (vcpu->arch.vgic_cpu.rdreg == rdreg)
arch/arm64/kvm/vgic/vgic-mmio-v3.c
964
vcpu->arch.vgic_cpu.rdreg = NULL;
arch/arm64/kvm/vgic/vgic-v2.c
115
struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu;
arch/arm64/kvm/vgic/vgic-v2.c
116
struct vgic_v2_cpu_if *cpuif = &vgic_cpu->vgic_v2;
arch/arm64/kvm/vgic/vgic-v2.c
122
for (int lr = 0; lr < vgic_cpu->vgic_v2.used_lrs; lr++)
arch/arm64/kvm/vgic/vgic-v2.c
126
list_for_each_entry_continue(irq, &vgic_cpu->ap_list_head, ap_list) {
arch/arm64/kvm/vgic/vgic-v2.c
153
struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu;
arch/arm64/kvm/vgic/vgic-v2.c
154
struct vgic_v2_cpu_if *cpuif = &vgic_cpu->vgic_v2;
arch/arm64/kvm/vgic/vgic-v2.c
304
vcpu->arch.vgic_cpu.vgic_v2.vgic_lr[lr] = val;
arch/arm64/kvm/vgic/vgic-v2.c
33
struct vgic_v2_cpu_if *cpuif = &vcpu->arch.vgic_cpu.vgic_v2;
arch/arm64/kvm/vgic/vgic-v2.c
336
vcpu->arch.vgic_cpu.vgic_v2.vgic_lr[lr] = 0;
arch/arm64/kvm/vgic/vgic-v2.c
341
struct vgic_v2_cpu_if *cpu_if = &vcpu->arch.vgic_cpu.vgic_v2;
arch/arm64/kvm/vgic/vgic-v2.c
368
struct vgic_v2_cpu_if *cpu_if = &vcpu->arch.vgic_cpu.vgic_v2;
arch/arm64/kvm/vgic/vgic-v2.c
401
vcpu->arch.vgic_cpu.vgic_v2.vgic_vmcr = 0;
arch/arm64/kvm/vgic/vgic-v2.c
548
struct vgic_v2_cpu_if *cpu_if = &vcpu->arch.vgic_cpu.vgic_v2;
arch/arm64/kvm/vgic/vgic-v2.c
569
struct vgic_v2_cpu_if *cpu_if = &vcpu->arch.vgic_cpu.vgic_v2;
arch/arm64/kvm/vgic/vgic-v2.c
571
u64 used_lrs = vcpu->arch.vgic_cpu.vgic_v2.used_lrs;
arch/arm64/kvm/vgic/vgic-v2.c
593
struct vgic_v2_cpu_if *cpu_if = &vcpu->arch.vgic_cpu.vgic_v2;
arch/arm64/kvm/vgic/vgic-v2.c
609
struct vgic_v2_cpu_if *cpu_if = &vcpu->arch.vgic_cpu.vgic_v2;
arch/arm64/kvm/vgic/vgic-v2.c
619
struct vgic_v2_cpu_if *cpu_if = &vcpu->arch.vgic_cpu.vgic_v2;
arch/arm64/kvm/vgic/vgic-v3-nested.c
326
struct vgic_v3_cpu_if *host_if = &vcpu->arch.vgic_cpu.vgic_v3;
arch/arm64/kvm/vgic/vgic-v3-nested.c
360
vcpu->arch.vgic_cpu.vgic_v3.used_lrs = cpu_if->used_lrs;
arch/arm64/kvm/vgic/vgic-v3-nested.c
381
vcpu->arch.vgic_cpu.vgic_v3.used_lrs = 0;
arch/arm64/kvm/vgic/vgic-v3.c
148
struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu;
arch/arm64/kvm/vgic/vgic-v3.c
149
struct vgic_v3_cpu_if *cpuif = &vgic_cpu->vgic_v3;
arch/arm64/kvm/vgic/vgic-v3.c
166
list_for_each_entry_continue(irq, &vgic_cpu->ap_list_head, ap_list) {
arch/arm64/kvm/vgic/vgic-v3.c
197
struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu;
arch/arm64/kvm/vgic/vgic-v3.c
198
struct vgic_v3_cpu_if *cpuif = &vgic_cpu->vgic_v3;
arch/arm64/kvm/vgic/vgic-v3.c
27
struct vgic_v3_cpu_if *cpuif = &vcpu->arch.vgic_cpu.vgic_v3;
arch/arm64/kvm/vgic/vgic-v3.c
371
vcpu->arch.vgic_cpu.vgic_v3.vgic_lr[lr] = val;
arch/arm64/kvm/vgic/vgic-v3.c
401
vcpu->arch.vgic_cpu.vgic_v3.vgic_lr[lr] = 0;
arch/arm64/kvm/vgic/vgic-v3.c
406
struct vgic_v3_cpu_if *cpu_if = &vcpu->arch.vgic_cpu.vgic_v3;
arch/arm64/kvm/vgic/vgic-v3.c
434
struct vgic_v3_cpu_if *cpu_if = &vcpu->arch.vgic_cpu.vgic_v3;
arch/arm64/kvm/vgic/vgic-v3.c
468
struct vgic_v3_cpu_if *vgic_v3 = &vcpu->arch.vgic_cpu.vgic_v3;
arch/arm64/kvm/vgic/vgic-v3.c
487
vcpu->arch.vgic_cpu.pendbaser = INITIAL_PENDBASER_VALUE;
arch/arm64/kvm/vgic/vgic-v3.c
492
vcpu->arch.vgic_cpu.num_id_bits = FIELD_GET(ICH_VTR_EL2_IDbits,
arch/arm64/kvm/vgic/vgic-v3.c
494
vcpu->arch.vgic_cpu.num_pri_bits = FIELD_GET(ICH_VTR_EL2_PRIbits,
arch/arm64/kvm/vgic/vgic-v3.c
500
struct vgic_v3_cpu_if *vgic_v3 = &vcpu->arch.vgic_cpu.vgic_v3;
arch/arm64/kvm/vgic/vgic-v3.c
527
pendbase = GICR_PENDBASER_ADDRESS(vcpu->arch.vgic_cpu.pendbaser);
arch/arm64/kvm/vgic/vgic-v3.c
618
pendbase = GICR_PENDBASER_ADDRESS(vcpu->arch.vgic_cpu.pendbaser);
arch/arm64/kvm/vgic/vgic-v3.c
753
struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu;
arch/arm64/kvm/vgic/vgic-v3.c
755
if (IS_VGIC_ADDR_UNDEF(vgic_cpu->rd_iodev.base_addr)) {
arch/arm64/kvm/vgic/vgic-v3.c
978
struct vgic_v3_cpu_if *cpu_if = &vcpu->arch.vgic_cpu.vgic_v3;
arch/arm64/kvm/vgic/vgic-v3.c
997
struct vgic_v3_cpu_if *cpu_if = &vcpu->arch.vgic_cpu.vgic_v3;
arch/arm64/kvm/vgic/vgic-v4.c
100
raw_spin_unlock(&vcpu->arch.vgic_cpu.vgic_v3.its_vpe.vpe_lock);
arch/arm64/kvm/vgic/vgic-v4.c
117
struct its_vpe *vpe = &vcpu->arch.vgic_cpu.vgic_v3.its_vpe;
arch/arm64/kvm/vgic/vgic-v4.c
218
struct its_vpe *vpe = &irq->target_vcpu->arch.vgic_cpu.vgic_v3.its_vpe;
arch/arm64/kvm/vgic/vgic-v4.c
267
dist->its_vm.vpes[i] = &vcpu->arch.vgic_cpu.vgic_v3.its_vpe;
arch/arm64/kvm/vgic/vgic-v4.c
360
struct its_vpe *vpe = &vcpu->arch.vgic_cpu.vgic_v3.its_vpe;
arch/arm64/kvm/vgic/vgic-v4.c
370
struct its_vpe *vpe = &vcpu->arch.vgic_cpu.vgic_v3.its_vpe;
arch/arm64/kvm/vgic/vgic-v4.c
406
struct its_vpe *vpe = &vcpu->arch.vgic_cpu.vgic_v3.its_vpe;
arch/arm64/kvm/vgic/vgic-v4.c
477
.vpe = &irq->target_vcpu->arch.vgic_cpu.vgic_v3.its_vpe,
arch/arm64/kvm/vgic/vgic-v4.c
548
atomic_dec(&irq->target_vcpu->arch.vgic_cpu.vgic_v3.its_vpe.vlpi_count);
arch/arm64/kvm/vgic/vgic-v4.c
98
raw_spin_lock(&vcpu->arch.vgic_cpu.vgic_v3.its_vpe.vpe_lock);
arch/arm64/kvm/vgic/vgic-v4.c
99
vcpu->arch.vgic_cpu.vgic_v3.its_vpe.pending_last = true;
arch/arm64/kvm/vgic/vgic.c
1008
__vgic_v3_save_state(&vcpu->arch.vgic_cpu.vgic_v3);
arch/arm64/kvm/vgic/vgic.c
1046
__vgic_v3_restore_state(&vcpu->arch.vgic_cpu.vgic_v3);
arch/arm64/kvm/vgic/vgic.c
1085
scoped_guard(raw_spinlock, &vcpu->arch.vgic_cpu.ap_list_lock)
arch/arm64/kvm/vgic/vgic.c
1099
__vgic_v3_activate_traps(&vcpu->arch.vgic_cpu.vgic_v3);
arch/arm64/kvm/vgic/vgic.c
111
return &vcpu->arch.vgic_cpu.private_irqs[intid];
arch/arm64/kvm/vgic/vgic.c
1113
__vgic_v3_deactivate_traps(&vcpu->arch.vgic_cpu.vgic_v3);
arch/arm64/kvm/vgic/vgic.c
1125
struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu;
arch/arm64/kvm/vgic/vgic.c
1134
if (vcpu->arch.vgic_cpu.vgic_v3.its_vpe.pending_last)
arch/arm64/kvm/vgic/vgic.c
1139
raw_spin_lock_irqsave(&vgic_cpu->ap_list_lock, flags);
arch/arm64/kvm/vgic/vgic.c
1141
list_for_each_entry(irq, &vgic_cpu->ap_list_head, ap_list) {
arch/arm64/kvm/vgic/vgic.c
1152
raw_spin_unlock_irqrestore(&vgic_cpu->ap_list_lock, flags);
arch/arm64/kvm/vgic/vgic.c
181
struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu;
arch/arm64/kvm/vgic/vgic.c
186
raw_spin_lock_irqsave(&vgic_cpu->ap_list_lock, flags);
arch/arm64/kvm/vgic/vgic.c
188
list_for_each_entry_safe(irq, tmp, &vgic_cpu->ap_list_head, ap_list) {
arch/arm64/kvm/vgic/vgic.c
198
raw_spin_unlock_irqrestore(&vgic_cpu->ap_list_lock, flags);
arch/arm64/kvm/vgic/vgic.c
348
struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu;
arch/arm64/kvm/vgic/vgic.c
351
lockdep_assert_held(&vgic_cpu->ap_list_lock);
arch/arm64/kvm/vgic/vgic.c
354
list_sort(&info, &vgic_cpu->ap_list_head, vgic_irq_cmp);
arch/arm64/kvm/vgic/vgic.c
445
raw_spin_lock_irqsave(&vcpu->arch.vgic_cpu.ap_list_lock, flags);
arch/arm64/kvm/vgic/vgic.c
462
raw_spin_unlock_irqrestore(&vcpu->arch.vgic_cpu.ap_list_lock,
arch/arm64/kvm/vgic/vgic.c
475
list_add_tail(&irq->ap_list, &vcpu->arch.vgic_cpu.ap_list_head);
arch/arm64/kvm/vgic/vgic.c
484
raw_spin_unlock_irqrestore(&vcpu->arch.vgic_cpu.ap_list_lock, flags);
arch/arm64/kvm/vgic/vgic.c
712
struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu;
arch/arm64/kvm/vgic/vgic.c
719
raw_spin_lock(&vgic_cpu->ap_list_lock);
arch/arm64/kvm/vgic/vgic.c
721
list_for_each_entry_safe(irq, tmp, &vgic_cpu->ap_list_head, ap_list) {
arch/arm64/kvm/vgic/vgic.c
760
raw_spin_unlock(&vgic_cpu->ap_list_lock);
arch/arm64/kvm/vgic/vgic.c
774
raw_spin_lock(&vcpuA->arch.vgic_cpu.ap_list_lock);
arch/arm64/kvm/vgic/vgic.c
775
raw_spin_lock_nested(&vcpuB->arch.vgic_cpu.ap_list_lock,
arch/arm64/kvm/vgic/vgic.c
789
struct vgic_cpu *new_cpu = &target_vcpu->arch.vgic_cpu;
arch/arm64/kvm/vgic/vgic.c
798
raw_spin_unlock(&vcpuB->arch.vgic_cpu.ap_list_lock);
arch/arm64/kvm/vgic/vgic.c
799
raw_spin_unlock(&vcpuA->arch.vgic_cpu.ap_list_lock);
arch/arm64/kvm/vgic/vgic.c
809
raw_spin_unlock(&vgic_cpu->ap_list_lock);
arch/arm64/kvm/vgic/vgic.c
849
struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu;
arch/arm64/kvm/vgic/vgic.c
852
lockdep_assert_held(&vgic_cpu->ap_list_lock);
arch/arm64/kvm/vgic/vgic.c
856
list_for_each_entry(irq, &vgic_cpu->ap_list_head, ap_list) {
arch/arm64/kvm/vgic/vgic.c
954
struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu;
arch/arm64/kvm/vgic/vgic.c
959
lockdep_assert_held(&vgic_cpu->ap_list_lock);
arch/arm64/kvm/vgic/vgic.c
968
list_for_each_entry(irq, &vgic_cpu->ap_list_head, ap_list) {
arch/arm64/kvm/vgic/vgic.c
985
vcpu->arch.vgic_cpu.vgic_v2.used_lrs = count;
arch/arm64/kvm/vgic/vgic.c
988
vcpu->arch.vgic_cpu.vgic_v3.used_lrs = count;
arch/arm64/kvm/vgic/vgic.h
369
struct vgic_cpu *cpu_if = &vcpu->arch.vgic_cpu;