1*d2912cb1SThomas Gleixner /* SPDX-License-Identifier: GPL-2.0-only */ 2d30f6e48SScott Wood /* 3d30f6e48SScott Wood * Copyright 2010-2011 Freescale Semiconductor, Inc. 4d30f6e48SScott Wood */ 5d30f6e48SScott Wood 6d30f6e48SScott Wood #ifndef ASM_KVM_BOOKE_HV_ASM_H 7d30f6e48SScott Wood #define ASM_KVM_BOOKE_HV_ASM_H 8d30f6e48SScott Wood 92c86cd18SChristophe Leroy #include <asm/feature-fixups.h> 102c86cd18SChristophe Leroy 11d30f6e48SScott Wood #ifdef __ASSEMBLY__ 12d30f6e48SScott Wood 13d30f6e48SScott Wood /* 14d30f6e48SScott Wood * All exceptions from guest state must go through KVM 15d30f6e48SScott Wood * (except for those which are delivered directly to the guest) -- 16d30f6e48SScott Wood * there are no exceptions for which we fall through directly to 17d30f6e48SScott Wood * the normal host handler. 18d30f6e48SScott Wood * 19e51f8f32SMihai Caraman * 32-bit host 20d30f6e48SScott Wood * Expected inputs (normal exceptions): 21d30f6e48SScott Wood * SCRATCH0 = saved r10 22d30f6e48SScott Wood * r10 = thread struct 23d30f6e48SScott Wood * r11 = appropriate SRR1 variant (currently used as scratch) 24d30f6e48SScott Wood * r13 = saved CR 25d30f6e48SScott Wood * *(r10 + THREAD_NORMSAVE(0)) = saved r11 26d30f6e48SScott Wood * *(r10 + THREAD_NORMSAVE(2)) = saved r13 27d30f6e48SScott Wood * 28d30f6e48SScott Wood * Expected inputs (crit/mcheck/debug exceptions): 29d30f6e48SScott Wood * appropriate SCRATCH = saved r8 30d30f6e48SScott Wood * r8 = exception level stack frame 31d30f6e48SScott Wood * r9 = *(r8 + _CCR) = saved CR 32d30f6e48SScott Wood * r11 = appropriate SRR1 variant (currently used as scratch) 33d30f6e48SScott Wood * *(r8 + GPR9) = saved r9 34d30f6e48SScott Wood * *(r8 + GPR10) = saved r10 (r10 not yet clobbered) 35d30f6e48SScott Wood * *(r8 + GPR11) = saved r11 36e51f8f32SMihai Caraman * 37e51f8f32SMihai Caraman * 64-bit host 389d378dfaSScott Wood * Expected inputs (GEN/GDBELL/DBG/CRIT/MC exception types): 39e51f8f32SMihai Caraman * r10 = saved CR 40e51f8f32SMihai Caraman * r13 = PACA_POINTER 41e51f8f32SMihai Caraman * *(r13 + PACA_EX##type + EX_R10) = saved r10 42e51f8f32SMihai Caraman * *(r13 + PACA_EX##type + EX_R11) = saved r11 43e51f8f32SMihai Caraman * SPRN_SPRG_##type##_SCRATCH = saved r13 44e51f8f32SMihai Caraman * 45e51f8f32SMihai Caraman * Expected inputs (TLB exception type): 46e51f8f32SMihai Caraman * r10 = saved CR 47a3dc6207SScott Wood * r12 = extlb pointer 48e51f8f32SMihai Caraman * r13 = PACA_POINTER 49a3dc6207SScott Wood * *(r12 + EX_TLB_R10) = saved r10 50a3dc6207SScott Wood * *(r12 + EX_TLB_R11) = saved r11 51a3dc6207SScott Wood * *(r12 + EX_TLB_R13) = saved r13 52a3dc6207SScott Wood * SPRN_SPRG_GEN_SCRATCH = saved r12 53e51f8f32SMihai Caraman * 54e51f8f32SMihai Caraman * Only the bolted version of TLB miss exception handlers is supported now. 55d30f6e48SScott Wood */ 56d30f6e48SScott Wood .macro DO_KVM intno srr1 57d30f6e48SScott Wood #ifdef CONFIG_KVM_BOOKE_HV 58d30f6e48SScott Wood BEGIN_FTR_SECTION 59d30f6e48SScott Wood mtocrf 0x80, r11 /* check MSR[GS] without clobbering reg */ 60d61966fcSMihai Caraman bf 3, 1975f 61d30f6e48SScott Wood b kvmppc_handler_\intno\()_\srr1 62d61966fcSMihai Caraman 1975: 63d30f6e48SScott Wood END_FTR_SECTION_IFSET(CPU_FTR_EMB_HV) 64d30f6e48SScott Wood #endif 65d30f6e48SScott Wood .endm 66d30f6e48SScott Wood 67d30f6e48SScott Wood #endif /*__ASSEMBLY__ */ 68d30f6e48SScott Wood #endif /* ASM_KVM_BOOKE_HV_ASM_H */ 69