1*13720a56SMarc Zyngier /* 2*13720a56SMarc Zyngier * Copyright (C) 2015 - ARM Ltd 3*13720a56SMarc Zyngier * Author: Marc Zyngier <marc.zyngier@arm.com> 4*13720a56SMarc Zyngier * 5*13720a56SMarc Zyngier * This program is free software; you can redistribute it and/or modify 6*13720a56SMarc Zyngier * it under the terms of the GNU General Public License version 2 as 7*13720a56SMarc Zyngier * published by the Free Software Foundation. 8*13720a56SMarc Zyngier * 9*13720a56SMarc Zyngier * This program is distributed in the hope that it will be useful, 10*13720a56SMarc Zyngier * but WITHOUT ANY WARRANTY; without even the implied warranty of 11*13720a56SMarc Zyngier * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 12*13720a56SMarc Zyngier * GNU General Public License for more details. 13*13720a56SMarc Zyngier * 14*13720a56SMarc Zyngier * You should have received a copy of the GNU General Public License 15*13720a56SMarc Zyngier * along with this program. If not, see <http://www.gnu.org/licenses/>. 16*13720a56SMarc Zyngier */ 17*13720a56SMarc Zyngier 18*13720a56SMarc Zyngier #ifndef __ARM64_KVM_HYP_H__ 19*13720a56SMarc Zyngier #define __ARM64_KVM_HYP_H__ 20*13720a56SMarc Zyngier 21*13720a56SMarc Zyngier #include <linux/compiler.h> 22*13720a56SMarc Zyngier #include <linux/kvm_host.h> 23*13720a56SMarc Zyngier #include <asm/kvm_mmu.h> 24*13720a56SMarc Zyngier #include <asm/sysreg.h> 25*13720a56SMarc Zyngier 26*13720a56SMarc Zyngier #define __hyp_text __section(.hyp.text) notrace 27*13720a56SMarc Zyngier 28*13720a56SMarc Zyngier static inline unsigned long __kern_hyp_va(unsigned long v) 29*13720a56SMarc Zyngier { 30*13720a56SMarc Zyngier asm volatile(ALTERNATIVE("and %0, %0, %1", 31*13720a56SMarc Zyngier "nop", 32*13720a56SMarc Zyngier ARM64_HAS_VIRT_HOST_EXTN) 33*13720a56SMarc Zyngier : "+r" (v) : "i" (HYP_PAGE_OFFSET_MASK)); 34*13720a56SMarc Zyngier return v; 35*13720a56SMarc Zyngier } 36*13720a56SMarc Zyngier 37*13720a56SMarc Zyngier #define kern_hyp_va(v) (typeof(v))(__kern_hyp_va((unsigned long)(v))) 38*13720a56SMarc Zyngier 39*13720a56SMarc Zyngier static inline unsigned long __hyp_kern_va(unsigned long v) 40*13720a56SMarc Zyngier { 41*13720a56SMarc Zyngier u64 offset = PAGE_OFFSET - HYP_PAGE_OFFSET; 42*13720a56SMarc Zyngier asm volatile(ALTERNATIVE("add %0, %0, %1", 43*13720a56SMarc Zyngier "nop", 44*13720a56SMarc Zyngier ARM64_HAS_VIRT_HOST_EXTN) 45*13720a56SMarc Zyngier : "+r" (v) : "r" (offset)); 46*13720a56SMarc Zyngier return v; 47*13720a56SMarc Zyngier } 48*13720a56SMarc Zyngier 49*13720a56SMarc Zyngier #define hyp_kern_va(v) (typeof(v))(__hyp_kern_va((unsigned long)(v))) 50*13720a56SMarc Zyngier 51*13720a56SMarc Zyngier #define read_sysreg_elx(r,nvh,vh) \ 52*13720a56SMarc Zyngier ({ \ 53*13720a56SMarc Zyngier u64 reg; \ 54*13720a56SMarc Zyngier asm volatile(ALTERNATIVE("mrs %0, " __stringify(r##nvh),\ 55*13720a56SMarc Zyngier "mrs_s %0, " __stringify(r##vh),\ 56*13720a56SMarc Zyngier ARM64_HAS_VIRT_HOST_EXTN) \ 57*13720a56SMarc Zyngier : "=r" (reg)); \ 58*13720a56SMarc Zyngier reg; \ 59*13720a56SMarc Zyngier }) 60*13720a56SMarc Zyngier 61*13720a56SMarc Zyngier #define write_sysreg_elx(v,r,nvh,vh) \ 62*13720a56SMarc Zyngier do { \ 63*13720a56SMarc Zyngier u64 __val = (u64)(v); \ 64*13720a56SMarc Zyngier asm volatile(ALTERNATIVE("msr " __stringify(r##nvh) ", %x0",\ 65*13720a56SMarc Zyngier "msr_s " __stringify(r##vh) ", %x0",\ 66*13720a56SMarc Zyngier ARM64_HAS_VIRT_HOST_EXTN) \ 67*13720a56SMarc Zyngier : : "rZ" (__val)); \ 68*13720a56SMarc Zyngier } while (0) 69*13720a56SMarc Zyngier 70*13720a56SMarc Zyngier /* 71*13720a56SMarc Zyngier * Unified accessors for registers that have a different encoding 72*13720a56SMarc Zyngier * between VHE and non-VHE. They must be specified without their "ELx" 73*13720a56SMarc Zyngier * encoding. 74*13720a56SMarc Zyngier */ 75*13720a56SMarc Zyngier #define read_sysreg_el2(r) \ 76*13720a56SMarc Zyngier ({ \ 77*13720a56SMarc Zyngier u64 reg; \ 78*13720a56SMarc Zyngier asm volatile(ALTERNATIVE("mrs %0, " __stringify(r##_EL2),\ 79*13720a56SMarc Zyngier "mrs %0, " __stringify(r##_EL1),\ 80*13720a56SMarc Zyngier ARM64_HAS_VIRT_HOST_EXTN) \ 81*13720a56SMarc Zyngier : "=r" (reg)); \ 82*13720a56SMarc Zyngier reg; \ 83*13720a56SMarc Zyngier }) 84*13720a56SMarc Zyngier 85*13720a56SMarc Zyngier #define write_sysreg_el2(v,r) \ 86*13720a56SMarc Zyngier do { \ 87*13720a56SMarc Zyngier u64 __val = (u64)(v); \ 88*13720a56SMarc Zyngier asm volatile(ALTERNATIVE("msr " __stringify(r##_EL2) ", %x0",\ 89*13720a56SMarc Zyngier "msr " __stringify(r##_EL1) ", %x0",\ 90*13720a56SMarc Zyngier ARM64_HAS_VIRT_HOST_EXTN) \ 91*13720a56SMarc Zyngier : : "rZ" (__val)); \ 92*13720a56SMarc Zyngier } while (0) 93*13720a56SMarc Zyngier 94*13720a56SMarc Zyngier #define read_sysreg_el0(r) read_sysreg_elx(r, _EL0, _EL02) 95*13720a56SMarc Zyngier #define write_sysreg_el0(v,r) write_sysreg_elx(v, r, _EL0, _EL02) 96*13720a56SMarc Zyngier #define read_sysreg_el1(r) read_sysreg_elx(r, _EL1, _EL12) 97*13720a56SMarc Zyngier #define write_sysreg_el1(v,r) write_sysreg_elx(v, r, _EL1, _EL12) 98*13720a56SMarc Zyngier 99*13720a56SMarc Zyngier /* The VHE specific system registers and their encoding */ 100*13720a56SMarc Zyngier #define sctlr_EL12 sys_reg(3, 5, 1, 0, 0) 101*13720a56SMarc Zyngier #define cpacr_EL12 sys_reg(3, 5, 1, 0, 2) 102*13720a56SMarc Zyngier #define ttbr0_EL12 sys_reg(3, 5, 2, 0, 0) 103*13720a56SMarc Zyngier #define ttbr1_EL12 sys_reg(3, 5, 2, 0, 1) 104*13720a56SMarc Zyngier #define tcr_EL12 sys_reg(3, 5, 2, 0, 2) 105*13720a56SMarc Zyngier #define afsr0_EL12 sys_reg(3, 5, 5, 1, 0) 106*13720a56SMarc Zyngier #define afsr1_EL12 sys_reg(3, 5, 5, 1, 1) 107*13720a56SMarc Zyngier #define esr_EL12 sys_reg(3, 5, 5, 2, 0) 108*13720a56SMarc Zyngier #define far_EL12 sys_reg(3, 5, 6, 0, 0) 109*13720a56SMarc Zyngier #define mair_EL12 sys_reg(3, 5, 10, 2, 0) 110*13720a56SMarc Zyngier #define amair_EL12 sys_reg(3, 5, 10, 3, 0) 111*13720a56SMarc Zyngier #define vbar_EL12 sys_reg(3, 5, 12, 0, 0) 112*13720a56SMarc Zyngier #define contextidr_EL12 sys_reg(3, 5, 13, 0, 1) 113*13720a56SMarc Zyngier #define cntkctl_EL12 sys_reg(3, 5, 14, 1, 0) 114*13720a56SMarc Zyngier #define cntp_tval_EL02 sys_reg(3, 5, 14, 2, 0) 115*13720a56SMarc Zyngier #define cntp_ctl_EL02 sys_reg(3, 5, 14, 2, 1) 116*13720a56SMarc Zyngier #define cntp_cval_EL02 sys_reg(3, 5, 14, 2, 2) 117*13720a56SMarc Zyngier #define cntv_tval_EL02 sys_reg(3, 5, 14, 3, 0) 118*13720a56SMarc Zyngier #define cntv_ctl_EL02 sys_reg(3, 5, 14, 3, 1) 119*13720a56SMarc Zyngier #define cntv_cval_EL02 sys_reg(3, 5, 14, 3, 2) 120*13720a56SMarc Zyngier #define spsr_EL12 sys_reg(3, 5, 4, 0, 0) 121*13720a56SMarc Zyngier #define elr_EL12 sys_reg(3, 5, 4, 0, 1) 122*13720a56SMarc Zyngier 123*13720a56SMarc Zyngier /** 124*13720a56SMarc Zyngier * hyp_alternate_select - Generates patchable code sequences that are 125*13720a56SMarc Zyngier * used to switch between two implementations of a function, depending 126*13720a56SMarc Zyngier * on the availability of a feature. 127*13720a56SMarc Zyngier * 128*13720a56SMarc Zyngier * @fname: a symbol name that will be defined as a function returning a 129*13720a56SMarc Zyngier * function pointer whose type will match @orig and @alt 130*13720a56SMarc Zyngier * @orig: A pointer to the default function, as returned by @fname when 131*13720a56SMarc Zyngier * @cond doesn't hold 132*13720a56SMarc Zyngier * @alt: A pointer to the alternate function, as returned by @fname 133*13720a56SMarc Zyngier * when @cond holds 134*13720a56SMarc Zyngier * @cond: a CPU feature (as described in asm/cpufeature.h) 135*13720a56SMarc Zyngier */ 136*13720a56SMarc Zyngier #define hyp_alternate_select(fname, orig, alt, cond) \ 137*13720a56SMarc Zyngier typeof(orig) * __hyp_text fname(void) \ 138*13720a56SMarc Zyngier { \ 139*13720a56SMarc Zyngier typeof(alt) *val = orig; \ 140*13720a56SMarc Zyngier asm volatile(ALTERNATIVE("nop \n", \ 141*13720a56SMarc Zyngier "mov %0, %1 \n", \ 142*13720a56SMarc Zyngier cond) \ 143*13720a56SMarc Zyngier : "+r" (val) : "r" (alt)); \ 144*13720a56SMarc Zyngier return val; \ 145*13720a56SMarc Zyngier } 146*13720a56SMarc Zyngier 147*13720a56SMarc Zyngier void __vgic_v2_save_state(struct kvm_vcpu *vcpu); 148*13720a56SMarc Zyngier void __vgic_v2_restore_state(struct kvm_vcpu *vcpu); 149*13720a56SMarc Zyngier 150*13720a56SMarc Zyngier void __vgic_v3_save_state(struct kvm_vcpu *vcpu); 151*13720a56SMarc Zyngier void __vgic_v3_restore_state(struct kvm_vcpu *vcpu); 152*13720a56SMarc Zyngier 153*13720a56SMarc Zyngier void __timer_save_state(struct kvm_vcpu *vcpu); 154*13720a56SMarc Zyngier void __timer_restore_state(struct kvm_vcpu *vcpu); 155*13720a56SMarc Zyngier 156*13720a56SMarc Zyngier void __sysreg_save_host_state(struct kvm_cpu_context *ctxt); 157*13720a56SMarc Zyngier void __sysreg_restore_host_state(struct kvm_cpu_context *ctxt); 158*13720a56SMarc Zyngier void __sysreg_save_guest_state(struct kvm_cpu_context *ctxt); 159*13720a56SMarc Zyngier void __sysreg_restore_guest_state(struct kvm_cpu_context *ctxt); 160*13720a56SMarc Zyngier void __sysreg32_save_state(struct kvm_vcpu *vcpu); 161*13720a56SMarc Zyngier void __sysreg32_restore_state(struct kvm_vcpu *vcpu); 162*13720a56SMarc Zyngier 163*13720a56SMarc Zyngier void __debug_save_state(struct kvm_vcpu *vcpu, 164*13720a56SMarc Zyngier struct kvm_guest_debug_arch *dbg, 165*13720a56SMarc Zyngier struct kvm_cpu_context *ctxt); 166*13720a56SMarc Zyngier void __debug_restore_state(struct kvm_vcpu *vcpu, 167*13720a56SMarc Zyngier struct kvm_guest_debug_arch *dbg, 168*13720a56SMarc Zyngier struct kvm_cpu_context *ctxt); 169*13720a56SMarc Zyngier void __debug_cond_save_host_state(struct kvm_vcpu *vcpu); 170*13720a56SMarc Zyngier void __debug_cond_restore_host_state(struct kvm_vcpu *vcpu); 171*13720a56SMarc Zyngier 172*13720a56SMarc Zyngier void __fpsimd_save_state(struct user_fpsimd_state *fp_regs); 173*13720a56SMarc Zyngier void __fpsimd_restore_state(struct user_fpsimd_state *fp_regs); 174*13720a56SMarc Zyngier bool __fpsimd_enabled(void); 175*13720a56SMarc Zyngier 176*13720a56SMarc Zyngier u64 __guest_enter(struct kvm_vcpu *vcpu, struct kvm_cpu_context *host_ctxt); 177*13720a56SMarc Zyngier void __noreturn __hyp_do_panic(unsigned long, ...); 178*13720a56SMarc Zyngier 179*13720a56SMarc Zyngier #endif /* __ARM64_KVM_HYP_H__ */ 180*13720a56SMarc Zyngier 181