Home
last modified time | relevance | path

Searched refs:kvm_nvhe_sym (Results 1 – 8 of 8) sorted by relevance

/openbmc/linux/arch/arm64/include/asm/
H A Dkvm_hyp.h136 extern u64 kvm_nvhe_sym(id_aa64pfr0_el1_sys_val);
137 extern u64 kvm_nvhe_sym(id_aa64pfr1_el1_sys_val);
138 extern u64 kvm_nvhe_sym(id_aa64isar0_el1_sys_val);
139 extern u64 kvm_nvhe_sym(id_aa64isar1_el1_sys_val);
140 extern u64 kvm_nvhe_sym(id_aa64isar2_el1_sys_val);
141 extern u64 kvm_nvhe_sym(id_aa64mmfr0_el1_sys_val);
142 extern u64 kvm_nvhe_sym(id_aa64mmfr1_el1_sys_val);
143 extern u64 kvm_nvhe_sym(id_aa64mmfr2_el1_sys_val);
144 extern u64 kvm_nvhe_sym(id_aa64smfr0_el1_sys_val);
146 extern unsigned long kvm_nvhe_sym(__icache_flags);
[all …]
H A Dhyp_image.h18 #define kvm_nvhe_sym(sym) __kvm_nvhe_##sym macro
20 #define kvm_nvhe_sym(sym) sym macro
61 #define KVM_NVHE_ALIAS(sym) kvm_nvhe_sym(sym) = sym;
64 #define KVM_NVHE_ALIAS_HYP(first, sec) kvm_nvhe_sym(first) = kvm_nvhe_sym(sec);
H A Dkvm_pkvm.h23 extern struct memblock_region kvm_nvhe_sym(hyp_memory)[];
24 extern unsigned int kvm_nvhe_sym(hyp_memblock_nr);
44 for (i = 0; i < kvm_nvhe_sym(hyp_memblock_nr); i++) { in hyp_vmemmap_pages()
45 res += hyp_vmemmap_memblock_size(&kvm_nvhe_sym(hyp_memory)[i], in hyp_vmemmap_pages()
75 for (i = 0; i < kvm_nvhe_sym(hyp_memblock_nr); i++) { in __hyp_pgtable_total_pages()
76 struct memblock_region *reg = &kvm_nvhe_sym(hyp_memory)[i]; in __hyp_pgtable_total_pages()
H A Dkvm_asm.h87 #define DECLARE_KVM_NVHE_SYM(sym) extern char kvm_nvhe_sym(sym)[]
100 DECLARE_PER_CPU(type, kvm_nvhe_sym(sym))
114 base = kvm_nvhe_sym(kvm_arm_hyp_percpu_base)[cpu]; \
168 #define CHOOSE_NVHE_SYM(sym) kvm_nvhe_sym(sym)
208 #define kvm_ksym_ref_nvhe(sym) kvm_ksym_ref(kvm_nvhe_sym(sym))
219 extern unsigned long kvm_nvhe_sym(kvm_arm_hyp_percpu_base)[];
H A Dkvm_host.h451 extern struct kvm_host_psci_config kvm_nvhe_sym(kvm_host_psci_config);
454 extern s64 kvm_nvhe_sym(hyp_physvirt_offset);
457 extern u64 kvm_nvhe_sym(hyp_cpu_logical_map)[NR_CPUS];
/openbmc/linux/arch/arm64/kvm/
H A Darm.c2072 free_pages(kvm_nvhe_sym(kvm_arm_hyp_percpu_base)[cpu], nvhe_percpu_order()); in teardown_hyp_mode()
2078 void *per_cpu_base = kvm_ksym_ref(kvm_nvhe_sym(kvm_arm_hyp_percpu_base)); in do_pkvm_init()
2124 kvm_nvhe_sym(id_aa64pfr0_el1_sys_val) = get_hyp_id_aa64pfr0_el1(); in kvm_hyp_init_symbols()
2125 kvm_nvhe_sym(id_aa64pfr1_el1_sys_val) = read_sanitised_ftr_reg(SYS_ID_AA64PFR1_EL1); in kvm_hyp_init_symbols()
2126 kvm_nvhe_sym(id_aa64isar0_el1_sys_val) = read_sanitised_ftr_reg(SYS_ID_AA64ISAR0_EL1); in kvm_hyp_init_symbols()
2127 kvm_nvhe_sym(id_aa64isar1_el1_sys_val) = read_sanitised_ftr_reg(SYS_ID_AA64ISAR1_EL1); in kvm_hyp_init_symbols()
2128 kvm_nvhe_sym(id_aa64isar2_el1_sys_val) = read_sanitised_ftr_reg(SYS_ID_AA64ISAR2_EL1); in kvm_hyp_init_symbols()
2133 kvm_nvhe_sym(__icache_flags) = __icache_flags; in kvm_hyp_init_symbols()
2134 kvm_nvhe_sym(kvm_arm_vmid_bits) = kvm_arm_vmid_bits; in kvm_hyp_init_symbols()
2226 kvm_nvhe_sym(kvm_arm_hyp_percpu_base)[cpu] = (unsigned long)page_addr; in init_hyp_mode()
[all …]
H A Dpkvm.c20 static struct memblock_region *hyp_memory = kvm_nvhe_sym(hyp_memory);
21 static unsigned int *hyp_memblock_nr_ptr = &kvm_nvhe_sym(hyp_memblock_nr);
H A Dstacktrace.c222 i < ARRAY_SIZE(kvm_nvhe_sym(pkvm_stacktrace)) && stacktrace[i]; in pkvm_dump_backtrace()