1*539cb89fSJames Hogan/* 2*539cb89fSJames Hogan * This file is subject to the terms and conditions of the GNU General Public 3*539cb89fSJames Hogan * License. See the file "COPYING" in the main directory of this archive 4*539cb89fSJames Hogan * for more details. 5*539cb89fSJames Hogan * 6*539cb89fSJames Hogan * MIPS SIMD Architecture (MSA) context handling code for KVM. 7*539cb89fSJames Hogan * 8*539cb89fSJames Hogan * Copyright (C) 2015 Imagination Technologies Ltd. 9*539cb89fSJames Hogan */ 10*539cb89fSJames Hogan 11*539cb89fSJames Hogan#include <asm/asm.h> 12*539cb89fSJames Hogan#include <asm/asm-offsets.h> 13*539cb89fSJames Hogan#include <asm/asmmacro.h> 14*539cb89fSJames Hogan#include <asm/regdef.h> 15*539cb89fSJames Hogan 16*539cb89fSJames Hogan .set noreorder 17*539cb89fSJames Hogan .set noat 18*539cb89fSJames Hogan 19*539cb89fSJames HoganLEAF(__kvm_save_msa) 20*539cb89fSJames Hogan st_d 0, VCPU_FPR0, a0 21*539cb89fSJames Hogan st_d 1, VCPU_FPR1, a0 22*539cb89fSJames Hogan st_d 2, VCPU_FPR2, a0 23*539cb89fSJames Hogan st_d 3, VCPU_FPR3, a0 24*539cb89fSJames Hogan st_d 4, VCPU_FPR4, a0 25*539cb89fSJames Hogan st_d 5, VCPU_FPR5, a0 26*539cb89fSJames Hogan st_d 6, VCPU_FPR6, a0 27*539cb89fSJames Hogan st_d 7, VCPU_FPR7, a0 28*539cb89fSJames Hogan st_d 8, VCPU_FPR8, a0 29*539cb89fSJames Hogan st_d 9, VCPU_FPR9, a0 30*539cb89fSJames Hogan st_d 10, VCPU_FPR10, a0 31*539cb89fSJames Hogan st_d 11, VCPU_FPR11, a0 32*539cb89fSJames Hogan st_d 12, VCPU_FPR12, a0 33*539cb89fSJames Hogan st_d 13, VCPU_FPR13, a0 34*539cb89fSJames Hogan st_d 14, VCPU_FPR14, a0 35*539cb89fSJames Hogan st_d 15, VCPU_FPR15, a0 36*539cb89fSJames Hogan st_d 16, VCPU_FPR16, a0 37*539cb89fSJames Hogan st_d 17, VCPU_FPR17, a0 38*539cb89fSJames Hogan st_d 18, VCPU_FPR18, a0 39*539cb89fSJames Hogan st_d 19, VCPU_FPR19, a0 40*539cb89fSJames Hogan st_d 20, VCPU_FPR20, a0 41*539cb89fSJames Hogan st_d 21, VCPU_FPR21, a0 42*539cb89fSJames Hogan st_d 22, VCPU_FPR22, a0 43*539cb89fSJames Hogan st_d 23, VCPU_FPR23, a0 44*539cb89fSJames Hogan st_d 24, VCPU_FPR24, a0 45*539cb89fSJames Hogan st_d 25, VCPU_FPR25, a0 46*539cb89fSJames Hogan st_d 26, VCPU_FPR26, a0 47*539cb89fSJames Hogan st_d 27, VCPU_FPR27, a0 48*539cb89fSJames Hogan st_d 28, VCPU_FPR28, a0 49*539cb89fSJames Hogan st_d 29, VCPU_FPR29, a0 50*539cb89fSJames Hogan st_d 30, VCPU_FPR30, a0 51*539cb89fSJames Hogan st_d 31, VCPU_FPR31, a0 52*539cb89fSJames Hogan jr ra 53*539cb89fSJames Hogan nop 54*539cb89fSJames Hogan END(__kvm_save_msa) 55*539cb89fSJames Hogan 56*539cb89fSJames HoganLEAF(__kvm_restore_msa) 57*539cb89fSJames Hogan ld_d 0, VCPU_FPR0, a0 58*539cb89fSJames Hogan ld_d 1, VCPU_FPR1, a0 59*539cb89fSJames Hogan ld_d 2, VCPU_FPR2, a0 60*539cb89fSJames Hogan ld_d 3, VCPU_FPR3, a0 61*539cb89fSJames Hogan ld_d 4, VCPU_FPR4, a0 62*539cb89fSJames Hogan ld_d 5, VCPU_FPR5, a0 63*539cb89fSJames Hogan ld_d 6, VCPU_FPR6, a0 64*539cb89fSJames Hogan ld_d 7, VCPU_FPR7, a0 65*539cb89fSJames Hogan ld_d 8, VCPU_FPR8, a0 66*539cb89fSJames Hogan ld_d 9, VCPU_FPR9, a0 67*539cb89fSJames Hogan ld_d 10, VCPU_FPR10, a0 68*539cb89fSJames Hogan ld_d 11, VCPU_FPR11, a0 69*539cb89fSJames Hogan ld_d 12, VCPU_FPR12, a0 70*539cb89fSJames Hogan ld_d 13, VCPU_FPR13, a0 71*539cb89fSJames Hogan ld_d 14, VCPU_FPR14, a0 72*539cb89fSJames Hogan ld_d 15, VCPU_FPR15, a0 73*539cb89fSJames Hogan ld_d 16, VCPU_FPR16, a0 74*539cb89fSJames Hogan ld_d 17, VCPU_FPR17, a0 75*539cb89fSJames Hogan ld_d 18, VCPU_FPR18, a0 76*539cb89fSJames Hogan ld_d 19, VCPU_FPR19, a0 77*539cb89fSJames Hogan ld_d 20, VCPU_FPR20, a0 78*539cb89fSJames Hogan ld_d 21, VCPU_FPR21, a0 79*539cb89fSJames Hogan ld_d 22, VCPU_FPR22, a0 80*539cb89fSJames Hogan ld_d 23, VCPU_FPR23, a0 81*539cb89fSJames Hogan ld_d 24, VCPU_FPR24, a0 82*539cb89fSJames Hogan ld_d 25, VCPU_FPR25, a0 83*539cb89fSJames Hogan ld_d 26, VCPU_FPR26, a0 84*539cb89fSJames Hogan ld_d 27, VCPU_FPR27, a0 85*539cb89fSJames Hogan ld_d 28, VCPU_FPR28, a0 86*539cb89fSJames Hogan ld_d 29, VCPU_FPR29, a0 87*539cb89fSJames Hogan ld_d 30, VCPU_FPR30, a0 88*539cb89fSJames Hogan ld_d 31, VCPU_FPR31, a0 89*539cb89fSJames Hogan jr ra 90*539cb89fSJames Hogan nop 91*539cb89fSJames Hogan END(__kvm_restore_msa) 92*539cb89fSJames Hogan 93*539cb89fSJames Hogan .macro kvm_restore_msa_upper wr, off, base 94*539cb89fSJames Hogan .set push 95*539cb89fSJames Hogan .set noat 96*539cb89fSJames Hogan#ifdef CONFIG_64BIT 97*539cb89fSJames Hogan ld $1, \off(\base) 98*539cb89fSJames Hogan insert_d \wr, 1 99*539cb89fSJames Hogan#elif defined(CONFIG_CPU_LITTLE_ENDIAN) 100*539cb89fSJames Hogan lw $1, \off(\base) 101*539cb89fSJames Hogan insert_w \wr, 2 102*539cb89fSJames Hogan lw $1, (\off+4)(\base) 103*539cb89fSJames Hogan insert_w \wr, 3 104*539cb89fSJames Hogan#else /* CONFIG_CPU_BIG_ENDIAN */ 105*539cb89fSJames Hogan lw $1, (\off+4)(\base) 106*539cb89fSJames Hogan insert_w \wr, 2 107*539cb89fSJames Hogan lw $1, \off(\base) 108*539cb89fSJames Hogan insert_w \wr, 3 109*539cb89fSJames Hogan#endif 110*539cb89fSJames Hogan .set pop 111*539cb89fSJames Hogan .endm 112*539cb89fSJames Hogan 113*539cb89fSJames HoganLEAF(__kvm_restore_msa_upper) 114*539cb89fSJames Hogan kvm_restore_msa_upper 0, VCPU_FPR0 +8, a0 115*539cb89fSJames Hogan kvm_restore_msa_upper 1, VCPU_FPR1 +8, a0 116*539cb89fSJames Hogan kvm_restore_msa_upper 2, VCPU_FPR2 +8, a0 117*539cb89fSJames Hogan kvm_restore_msa_upper 3, VCPU_FPR3 +8, a0 118*539cb89fSJames Hogan kvm_restore_msa_upper 4, VCPU_FPR4 +8, a0 119*539cb89fSJames Hogan kvm_restore_msa_upper 5, VCPU_FPR5 +8, a0 120*539cb89fSJames Hogan kvm_restore_msa_upper 6, VCPU_FPR6 +8, a0 121*539cb89fSJames Hogan kvm_restore_msa_upper 7, VCPU_FPR7 +8, a0 122*539cb89fSJames Hogan kvm_restore_msa_upper 8, VCPU_FPR8 +8, a0 123*539cb89fSJames Hogan kvm_restore_msa_upper 9, VCPU_FPR9 +8, a0 124*539cb89fSJames Hogan kvm_restore_msa_upper 10, VCPU_FPR10+8, a0 125*539cb89fSJames Hogan kvm_restore_msa_upper 11, VCPU_FPR11+8, a0 126*539cb89fSJames Hogan kvm_restore_msa_upper 12, VCPU_FPR12+8, a0 127*539cb89fSJames Hogan kvm_restore_msa_upper 13, VCPU_FPR13+8, a0 128*539cb89fSJames Hogan kvm_restore_msa_upper 14, VCPU_FPR14+8, a0 129*539cb89fSJames Hogan kvm_restore_msa_upper 15, VCPU_FPR15+8, a0 130*539cb89fSJames Hogan kvm_restore_msa_upper 16, VCPU_FPR16+8, a0 131*539cb89fSJames Hogan kvm_restore_msa_upper 17, VCPU_FPR17+8, a0 132*539cb89fSJames Hogan kvm_restore_msa_upper 18, VCPU_FPR18+8, a0 133*539cb89fSJames Hogan kvm_restore_msa_upper 19, VCPU_FPR19+8, a0 134*539cb89fSJames Hogan kvm_restore_msa_upper 20, VCPU_FPR20+8, a0 135*539cb89fSJames Hogan kvm_restore_msa_upper 21, VCPU_FPR21+8, a0 136*539cb89fSJames Hogan kvm_restore_msa_upper 22, VCPU_FPR22+8, a0 137*539cb89fSJames Hogan kvm_restore_msa_upper 23, VCPU_FPR23+8, a0 138*539cb89fSJames Hogan kvm_restore_msa_upper 24, VCPU_FPR24+8, a0 139*539cb89fSJames Hogan kvm_restore_msa_upper 25, VCPU_FPR25+8, a0 140*539cb89fSJames Hogan kvm_restore_msa_upper 26, VCPU_FPR26+8, a0 141*539cb89fSJames Hogan kvm_restore_msa_upper 27, VCPU_FPR27+8, a0 142*539cb89fSJames Hogan kvm_restore_msa_upper 28, VCPU_FPR28+8, a0 143*539cb89fSJames Hogan kvm_restore_msa_upper 29, VCPU_FPR29+8, a0 144*539cb89fSJames Hogan kvm_restore_msa_upper 30, VCPU_FPR30+8, a0 145*539cb89fSJames Hogan kvm_restore_msa_upper 31, VCPU_FPR31+8, a0 146*539cb89fSJames Hogan jr ra 147*539cb89fSJames Hogan nop 148*539cb89fSJames Hogan END(__kvm_restore_msa_upper) 149*539cb89fSJames Hogan 150*539cb89fSJames HoganLEAF(__kvm_restore_msacsr) 151*539cb89fSJames Hogan lw t0, VCPU_MSA_CSR(a0) 152*539cb89fSJames Hogan /* 153*539cb89fSJames Hogan * The ctcmsa must stay at this offset in __kvm_restore_msacsr. 154*539cb89fSJames Hogan * See kvm_mips_csr_die_notify() which handles t0 containing a value 155*539cb89fSJames Hogan * which triggers an MSA FP Exception, which must be stepped over and 156*539cb89fSJames Hogan * ignored since the set cause bits must remain there for the guest. 157*539cb89fSJames Hogan */ 158*539cb89fSJames Hogan _ctcmsa MSA_CSR, t0 159*539cb89fSJames Hogan jr ra 160*539cb89fSJames Hogan nop 161*539cb89fSJames Hogan END(__kvm_restore_msacsr) 162