Symbol: kvm_nvhe_sym
arch/arm64/include/asm/hyp_image.h
61
#define KVM_NVHE_ALIAS(sym) kvm_nvhe_sym(sym) = sym;
arch/arm64/include/asm/hyp_image.h
64
#define KVM_NVHE_ALIAS_HYP(first, sec) kvm_nvhe_sym(first) = kvm_nvhe_sym(sec);
arch/arm64/include/asm/kvm_asm.h
114
#define DECLARE_KVM_NVHE_SYM(sym) extern char kvm_nvhe_sym(sym)[]
arch/arm64/include/asm/kvm_asm.h
127
DECLARE_PER_CPU(type, kvm_nvhe_sym(sym))
arch/arm64/include/asm/kvm_asm.h
141
base = kvm_nvhe_sym(kvm_arm_hyp_percpu_base)[cpu]; \
arch/arm64/include/asm/kvm_asm.h
195
#define CHOOSE_NVHE_SYM(sym) kvm_nvhe_sym(sym)
arch/arm64/include/asm/kvm_asm.h
236
#define kvm_ksym_ref_nvhe(sym) kvm_ksym_ref(kvm_nvhe_sym(sym))
arch/arm64/include/asm/kvm_asm.h
247
extern unsigned long kvm_nvhe_sym(kvm_arm_hyp_percpu_base)[];
arch/arm64/include/asm/kvm_host.h
705
extern struct fgt_masks kvm_nvhe_sym(hfgrtr_masks);
arch/arm64/include/asm/kvm_host.h
706
extern struct fgt_masks kvm_nvhe_sym(hfgwtr_masks);
arch/arm64/include/asm/kvm_host.h
707
extern struct fgt_masks kvm_nvhe_sym(hfgitr_masks);
arch/arm64/include/asm/kvm_host.h
708
extern struct fgt_masks kvm_nvhe_sym(hdfgrtr_masks);
arch/arm64/include/asm/kvm_host.h
709
extern struct fgt_masks kvm_nvhe_sym(hdfgwtr_masks);
arch/arm64/include/asm/kvm_host.h
710
extern struct fgt_masks kvm_nvhe_sym(hafgrtr_masks);
arch/arm64/include/asm/kvm_host.h
711
extern struct fgt_masks kvm_nvhe_sym(hfgrtr2_masks);
arch/arm64/include/asm/kvm_host.h
712
extern struct fgt_masks kvm_nvhe_sym(hfgwtr2_masks);
arch/arm64/include/asm/kvm_host.h
713
extern struct fgt_masks kvm_nvhe_sym(hfgitr2_masks);
arch/arm64/include/asm/kvm_host.h
714
extern struct fgt_masks kvm_nvhe_sym(hdfgrtr2_masks);
arch/arm64/include/asm/kvm_host.h
715
extern struct fgt_masks kvm_nvhe_sym(hdfgwtr2_masks);
arch/arm64/include/asm/kvm_host.h
716
extern struct fgt_masks kvm_nvhe_sym(ich_hfgrtr_masks);
arch/arm64/include/asm/kvm_host.h
717
extern struct fgt_masks kvm_nvhe_sym(ich_hfgwtr_masks);
arch/arm64/include/asm/kvm_host.h
718
extern struct fgt_masks kvm_nvhe_sym(ich_hfgitr_masks);
arch/arm64/include/asm/kvm_host.h
845
extern struct kvm_host_psci_config kvm_nvhe_sym(kvm_host_psci_config);
arch/arm64/include/asm/kvm_host.h
848
extern s64 kvm_nvhe_sym(hyp_physvirt_offset);
arch/arm64/include/asm/kvm_host.h
851
extern u64 kvm_nvhe_sym(hyp_cpu_logical_map)[NR_CPUS];
arch/arm64/include/asm/kvm_hyp.h
145
extern u64 kvm_nvhe_sym(id_aa64pfr0_el1_sys_val);
arch/arm64/include/asm/kvm_hyp.h
146
extern u64 kvm_nvhe_sym(id_aa64pfr1_el1_sys_val);
arch/arm64/include/asm/kvm_hyp.h
147
extern u64 kvm_nvhe_sym(id_aa64pfr2_el1_sys_val);
arch/arm64/include/asm/kvm_hyp.h
148
extern u64 kvm_nvhe_sym(id_aa64isar0_el1_sys_val);
arch/arm64/include/asm/kvm_hyp.h
149
extern u64 kvm_nvhe_sym(id_aa64isar1_el1_sys_val);
arch/arm64/include/asm/kvm_hyp.h
150
extern u64 kvm_nvhe_sym(id_aa64isar2_el1_sys_val);
arch/arm64/include/asm/kvm_hyp.h
151
extern u64 kvm_nvhe_sym(id_aa64mmfr0_el1_sys_val);
arch/arm64/include/asm/kvm_hyp.h
152
extern u64 kvm_nvhe_sym(id_aa64mmfr1_el1_sys_val);
arch/arm64/include/asm/kvm_hyp.h
153
extern u64 kvm_nvhe_sym(id_aa64mmfr2_el1_sys_val);
arch/arm64/include/asm/kvm_hyp.h
154
extern u64 kvm_nvhe_sym(id_aa64smfr0_el1_sys_val);
arch/arm64/include/asm/kvm_hyp.h
156
extern unsigned long kvm_nvhe_sym(__icache_flags);
arch/arm64/include/asm/kvm_hyp.h
157
extern unsigned int kvm_nvhe_sym(kvm_arm_vmid_bits);
arch/arm64/include/asm/kvm_hyp.h
158
extern unsigned int kvm_nvhe_sym(kvm_host_sve_max_vl);
arch/arm64/include/asm/kvm_hyp.h
159
extern unsigned long kvm_nvhe_sym(hyp_nr_cpus);
arch/arm64/include/asm/kvm_pkvm.h
125
for (i = 0; i < kvm_nvhe_sym(hyp_memblock_nr); i++) {
arch/arm64/include/asm/kvm_pkvm.h
126
struct memblock_region *reg = &kvm_nvhe_sym(hyp_memory)[i];
arch/arm64/include/asm/kvm_pkvm.h
72
extern struct memblock_region kvm_nvhe_sym(hyp_memory)[];
arch/arm64/include/asm/kvm_pkvm.h
73
extern unsigned int kvm_nvhe_sym(hyp_memblock_nr);
arch/arm64/include/asm/kvm_pkvm.h
93
for (i = 0; i < kvm_nvhe_sym(hyp_memblock_nr); i++) {
arch/arm64/include/asm/kvm_pkvm.h
94
res += hyp_vmemmap_memblock_size(&kvm_nvhe_sym(hyp_memory)[i],
arch/arm64/kvm/arm.c
2494
if (!kvm_nvhe_sym(kvm_arm_hyp_percpu_base)[cpu])
arch/arm64/kvm/arm.c
2504
free_pages(kvm_nvhe_sym(kvm_arm_hyp_percpu_base)[cpu], nvhe_percpu_order());
arch/arm64/kvm/arm.c
2511
void *per_cpu_base = kvm_ksym_ref(kvm_nvhe_sym(kvm_arm_hyp_percpu_base));
arch/arm64/kvm/arm.c
2557
kvm_nvhe_sym(id_aa64pfr0_el1_sys_val) = get_hyp_id_aa64pfr0_el1();
arch/arm64/kvm/arm.c
2558
kvm_nvhe_sym(id_aa64pfr1_el1_sys_val) = read_sanitised_ftr_reg(SYS_ID_AA64PFR1_EL1);
arch/arm64/kvm/arm.c
2559
kvm_nvhe_sym(id_aa64pfr2_el1_sys_val) = read_sanitised_ftr_reg(SYS_ID_AA64PFR2_EL1);
arch/arm64/kvm/arm.c
2560
kvm_nvhe_sym(id_aa64isar0_el1_sys_val) = read_sanitised_ftr_reg(SYS_ID_AA64ISAR0_EL1);
arch/arm64/kvm/arm.c
2561
kvm_nvhe_sym(id_aa64isar1_el1_sys_val) = read_sanitised_ftr_reg(SYS_ID_AA64ISAR1_EL1);
arch/arm64/kvm/arm.c
2562
kvm_nvhe_sym(id_aa64isar2_el1_sys_val) = read_sanitised_ftr_reg(SYS_ID_AA64ISAR2_EL1);
arch/arm64/kvm/arm.c
2563
kvm_nvhe_sym(id_aa64mmfr0_el1_sys_val) = read_sanitised_ftr_reg(SYS_ID_AA64MMFR0_EL1);
arch/arm64/kvm/arm.c
2564
kvm_nvhe_sym(id_aa64mmfr1_el1_sys_val) = read_sanitised_ftr_reg(SYS_ID_AA64MMFR1_EL1);
arch/arm64/kvm/arm.c
2565
kvm_nvhe_sym(id_aa64mmfr2_el1_sys_val) = read_sanitised_ftr_reg(SYS_ID_AA64MMFR2_EL1);
arch/arm64/kvm/arm.c
2566
kvm_nvhe_sym(id_aa64smfr0_el1_sys_val) = read_sanitised_ftr_reg(SYS_ID_AA64SMFR0_EL1);
arch/arm64/kvm/arm.c
2567
kvm_nvhe_sym(__icache_flags) = __icache_flags;
arch/arm64/kvm/arm.c
2568
kvm_nvhe_sym(kvm_arm_vmid_bits) = kvm_arm_vmid_bits;
arch/arm64/kvm/arm.c
2571
kvm_nvhe_sym(hfgrtr_masks) = hfgrtr_masks;
arch/arm64/kvm/arm.c
2572
kvm_nvhe_sym(hfgwtr_masks) = hfgwtr_masks;
arch/arm64/kvm/arm.c
2573
kvm_nvhe_sym(hfgitr_masks) = hfgitr_masks;
arch/arm64/kvm/arm.c
2574
kvm_nvhe_sym(hdfgrtr_masks) = hdfgrtr_masks;
arch/arm64/kvm/arm.c
2575
kvm_nvhe_sym(hdfgwtr_masks) = hdfgwtr_masks;
arch/arm64/kvm/arm.c
2576
kvm_nvhe_sym(hafgrtr_masks) = hafgrtr_masks;
arch/arm64/kvm/arm.c
2577
kvm_nvhe_sym(hfgrtr2_masks) = hfgrtr2_masks;
arch/arm64/kvm/arm.c
2578
kvm_nvhe_sym(hfgwtr2_masks) = hfgwtr2_masks;
arch/arm64/kvm/arm.c
2579
kvm_nvhe_sym(hfgitr2_masks) = hfgitr2_masks;
arch/arm64/kvm/arm.c
2580
kvm_nvhe_sym(hdfgrtr2_masks)= hdfgrtr2_masks;
arch/arm64/kvm/arm.c
2581
kvm_nvhe_sym(hdfgwtr2_masks)= hdfgwtr2_masks;
arch/arm64/kvm/arm.c
2582
kvm_nvhe_sym(ich_hfgrtr_masks) = ich_hfgrtr_masks;
arch/arm64/kvm/arm.c
2583
kvm_nvhe_sym(ich_hfgwtr_masks) = ich_hfgwtr_masks;
arch/arm64/kvm/arm.c
2584
kvm_nvhe_sym(ich_hfgitr_masks) = ich_hfgitr_masks;
arch/arm64/kvm/arm.c
2727
kvm_nvhe_sym(kvm_arm_hyp_percpu_base)[cpu] = (unsigned long)page_addr;
arch/arm64/kvm/arm.c
2730
kvm_nvhe_sym(hyp_nr_cpus) = num_possible_cpus();
arch/arm64/kvm/arm.c
2805
char *percpu_begin = (char *)kvm_nvhe_sym(kvm_arm_hyp_percpu_base)[cpu];
arch/arm64/kvm/pkvm.c
21
static struct memblock_region *hyp_memory = kvm_nvhe_sym(hyp_memory);
arch/arm64/kvm/pkvm.c
22
static unsigned int *hyp_memblock_nr_ptr = &kvm_nvhe_sym(hyp_memblock_nr);
arch/arm64/kvm/reset.c
56
kvm_nvhe_sym(kvm_host_sve_max_vl) = kvm_host_sve_max_vl;
arch/arm64/kvm/stacktrace.c
223
i < ARRAY_SIZE(kvm_nvhe_sym(pkvm_stacktrace)) && stacktrace[i];