Lines Matching refs:arch

14 	mtspr(SPRN_TAR, vcpu->arch.tar);  in load_spr_state()
18 current->thread.vrsave != vcpu->arch.vrsave) in load_spr_state()
19 mtspr(SPRN_VRSAVE, vcpu->arch.vrsave); in load_spr_state()
22 if (vcpu->arch.hfscr & HFSCR_EBB) { in load_spr_state()
23 if (current->thread.ebbhr != vcpu->arch.ebbhr) in load_spr_state()
24 mtspr(SPRN_EBBHR, vcpu->arch.ebbhr); in load_spr_state()
25 if (current->thread.ebbrr != vcpu->arch.ebbrr) in load_spr_state()
26 mtspr(SPRN_EBBRR, vcpu->arch.ebbrr); in load_spr_state()
27 if (current->thread.bescr != vcpu->arch.bescr) in load_spr_state()
28 mtspr(SPRN_BESCR, vcpu->arch.bescr); in load_spr_state()
32 current->thread.tidr != vcpu->arch.tid) in load_spr_state()
33 mtspr(SPRN_TIDR, vcpu->arch.tid); in load_spr_state()
34 if (host_os_sprs->iamr != vcpu->arch.iamr) in load_spr_state()
35 mtspr(SPRN_IAMR, vcpu->arch.iamr); in load_spr_state()
36 if (host_os_sprs->amr != vcpu->arch.amr) in load_spr_state()
37 mtspr(SPRN_AMR, vcpu->arch.amr); in load_spr_state()
38 if (vcpu->arch.uamor != 0) in load_spr_state()
39 mtspr(SPRN_UAMOR, vcpu->arch.uamor); in load_spr_state()
40 if (current->thread.fscr != vcpu->arch.fscr) in load_spr_state()
41 mtspr(SPRN_FSCR, vcpu->arch.fscr); in load_spr_state()
42 if (current->thread.dscr != vcpu->arch.dscr) in load_spr_state()
43 mtspr(SPRN_DSCR, vcpu->arch.dscr); in load_spr_state()
44 if (vcpu->arch.pspb != 0) in load_spr_state()
45 mtspr(SPRN_PSPB, vcpu->arch.pspb); in load_spr_state()
54 if (!(vcpu->arch.ctrl & 1)) in load_spr_state()
60 vcpu->arch.tar = mfspr(SPRN_TAR); in store_spr_state()
64 vcpu->arch.vrsave = mfspr(SPRN_VRSAVE); in store_spr_state()
67 if (vcpu->arch.hfscr & HFSCR_EBB) { in store_spr_state()
68 vcpu->arch.ebbhr = mfspr(SPRN_EBBHR); in store_spr_state()
69 vcpu->arch.ebbrr = mfspr(SPRN_EBBRR); in store_spr_state()
70 vcpu->arch.bescr = mfspr(SPRN_BESCR); in store_spr_state()
74 vcpu->arch.tid = mfspr(SPRN_TIDR); in store_spr_state()
75 vcpu->arch.iamr = mfspr(SPRN_IAMR); in store_spr_state()
76 vcpu->arch.amr = mfspr(SPRN_AMR); in store_spr_state()
77 vcpu->arch.uamor = mfspr(SPRN_UAMOR); in store_spr_state()
78 vcpu->arch.fscr = mfspr(SPRN_FSCR); in store_spr_state()
79 vcpu->arch.dscr = mfspr(SPRN_DSCR); in store_spr_state()
80 vcpu->arch.pspb = mfspr(SPRN_PSPB); in store_spr_state()
82 vcpu->arch.ctrl = mfspr(SPRN_CTRLF); in store_spr_state()
94 unsigned long guest_msr = vcpu->arch.shregs.msr; in load_vcpu_state()
98 } else if (vcpu->arch.hfscr & HFSCR_TM) { in load_vcpu_state()
99 mtspr(SPRN_TEXASR, vcpu->arch.texasr); in load_vcpu_state()
100 mtspr(SPRN_TFHAR, vcpu->arch.tfhar); in load_vcpu_state()
101 mtspr(SPRN_TFIAR, vcpu->arch.tfiar); in load_vcpu_state()
108 load_fp_state(&vcpu->arch.fp); in load_vcpu_state()
110 load_vr_state(&vcpu->arch.vr); in load_vcpu_state()
121 store_fp_state(&vcpu->arch.fp); in store_vcpu_state()
123 store_vr_state(&vcpu->arch.vr); in store_vcpu_state()
129 unsigned long guest_msr = vcpu->arch.shregs.msr; in store_vcpu_state()
132 } else if (vcpu->arch.hfscr & HFSCR_TM) { in store_vcpu_state()
133 vcpu->arch.texasr = mfspr(SPRN_TEXASR); in store_vcpu_state()
134 vcpu->arch.tfhar = mfspr(SPRN_TFHAR); in store_vcpu_state()
135 vcpu->arch.tfiar = mfspr(SPRN_TFIAR); in store_vcpu_state()
137 if (!vcpu->arch.nested) { in store_vcpu_state()
138 vcpu->arch.load_tm++; /* see load_ebb comment */ in store_vcpu_state()
139 if (!vcpu->arch.load_tm) in store_vcpu_state()
140 vcpu->arch.hfscr &= ~HFSCR_TM; in store_vcpu_state()
169 current->thread.tidr != vcpu->arch.tid) in restore_p9_host_os_sprs()
171 if (host_os_sprs->iamr != vcpu->arch.iamr) in restore_p9_host_os_sprs()
173 if (vcpu->arch.uamor != 0) in restore_p9_host_os_sprs()
175 if (host_os_sprs->amr != vcpu->arch.amr) in restore_p9_host_os_sprs()
177 if (current->thread.fscr != vcpu->arch.fscr) in restore_p9_host_os_sprs()
179 if (current->thread.dscr != vcpu->arch.dscr) in restore_p9_host_os_sprs()
181 if (vcpu->arch.pspb != 0) in restore_p9_host_os_sprs()
185 if (!(vcpu->arch.ctrl & 1)) in restore_p9_host_os_sprs()
190 vcpu->arch.vrsave != current->thread.vrsave) in restore_p9_host_os_sprs()
193 if (vcpu->arch.hfscr & HFSCR_EBB) { in restore_p9_host_os_sprs()
194 if (vcpu->arch.bescr != current->thread.bescr) in restore_p9_host_os_sprs()
196 if (vcpu->arch.ebbhr != current->thread.ebbhr) in restore_p9_host_os_sprs()
198 if (vcpu->arch.ebbrr != current->thread.ebbrr) in restore_p9_host_os_sprs()
201 if (!vcpu->arch.nested) { in restore_p9_host_os_sprs()
208 vcpu->arch.load_ebb++; in restore_p9_host_os_sprs()
209 if (!vcpu->arch.load_ebb) in restore_p9_host_os_sprs()
210 vcpu->arch.hfscr &= ~HFSCR_EBB; in restore_p9_host_os_sprs()
214 if (vcpu->arch.tar != current->thread.tar) in restore_p9_host_os_sprs()
222 struct kvmppc_vcore *vc = vcpu->arch.vcore; in accumulate_time()
229 curr = vcpu->arch.cur_activity; in accumulate_time()
230 vcpu->arch.cur_activity = next; in accumulate_time()
231 prev_tb = vcpu->arch.cur_tb_start; in accumulate_time()
232 vcpu->arch.cur_tb_start = tb; in accumulate_time()
303 struct kvm_nested_guest *nested = vcpu->arch.nested; in switch_mmu_to_guest_radix()
307 lpid = nested ? nested->shadow_lpid : kvm->arch.lpid; in switch_mmu_to_guest_radix()
308 pid = vcpu->arch.pid; in switch_mmu_to_guest_radix()
332 lpid = kvm->arch.lpid; in switch_mmu_to_guest_hpt()
333 pid = vcpu->arch.pid; in switch_mmu_to_guest_hpt()
346 for (i = 0; i < vcpu->arch.slb_max; i++) in switch_mmu_to_guest_hpt()
347 mtslb(vcpu->arch.slb[i].orige, vcpu->arch.slb[i].origv); in switch_mmu_to_guest_hpt()
355 u32 lpid = kvm->arch.host_lpid; in switch_mmu_to_host()
356 u64 lpcr = kvm->arch.host_lpcr; in switch_mmu_to_host()
403 for (i = 0; i < vcpu->arch.slb_nr; i++) { in save_clear_guest_mmu()
409 vcpu->arch.slb[nr].orige = slbee | i; in save_clear_guest_mmu()
410 vcpu->arch.slb[nr].origv = slbev; in save_clear_guest_mmu()
414 vcpu->arch.slb_max = nr; in save_clear_guest_mmu()
429 for (set = 1; set < kvm->arch.tlb_sets; ++set) { in flush_guest_tlb()
440 for (set = 0; set < kvm->arch.tlb_sets; ++set) { in flush_guest_tlb()
463 need_tlb_flush = &kvm->arch.need_tlb_flush; in check_need_tlb_flush()
509 (vcpu->arch.hfscr & HFSCR_TM)) in kvmppc_msr_hard_disable_set_facilities()
536 struct kvm_nested_guest *nested = vcpu->arch.nested; in kvmhv_vcpu_entry_p9()
537 struct kvmppc_vcore *vc = vcpu->arch.vcore; in kvmhv_vcpu_entry_p9()
558 WARN_ON_ONCE(vcpu->arch.shregs.msr & MSR_HV); in kvmhv_vcpu_entry_p9()
559 WARN_ON_ONCE(!(vcpu->arch.shregs.msr & MSR_ME)); in kvmhv_vcpu_entry_p9()
561 vcpu->arch.ceded = 0; in kvmhv_vcpu_entry_p9()
608 mtspr(SPRN_PURR, vcpu->arch.purr); in kvmhv_vcpu_entry_p9()
609 mtspr(SPRN_SPURR, vcpu->arch.spurr); in kvmhv_vcpu_entry_p9()
613 if (vcpu->arch.doorbell_request) { in kvmhv_vcpu_entry_p9()
614 vcpu->arch.doorbell_request = 0; in kvmhv_vcpu_entry_p9()
619 if (vcpu->arch.dawr0 != host_dawr0) in kvmhv_vcpu_entry_p9()
620 mtspr(SPRN_DAWR0, vcpu->arch.dawr0); in kvmhv_vcpu_entry_p9()
621 if (vcpu->arch.dawrx0 != host_dawrx0) in kvmhv_vcpu_entry_p9()
622 mtspr(SPRN_DAWRX0, vcpu->arch.dawrx0); in kvmhv_vcpu_entry_p9()
624 if (vcpu->arch.dawr1 != host_dawr1) in kvmhv_vcpu_entry_p9()
625 mtspr(SPRN_DAWR1, vcpu->arch.dawr1); in kvmhv_vcpu_entry_p9()
626 if (vcpu->arch.dawrx1 != host_dawrx1) in kvmhv_vcpu_entry_p9()
627 mtspr(SPRN_DAWRX1, vcpu->arch.dawrx1); in kvmhv_vcpu_entry_p9()
630 if (vcpu->arch.ciabr != host_ciabr) in kvmhv_vcpu_entry_p9()
631 mtspr(SPRN_CIABR, vcpu->arch.ciabr); in kvmhv_vcpu_entry_p9()
635 mtspr(SPRN_PSSCR, vcpu->arch.psscr | PSSCR_EC | in kvmhv_vcpu_entry_p9()
638 if (vcpu->arch.psscr != host_psscr) in kvmhv_vcpu_entry_p9()
639 mtspr(SPRN_PSSCR_PR, vcpu->arch.psscr); in kvmhv_vcpu_entry_p9()
642 mtspr(SPRN_HFSCR, vcpu->arch.hfscr); in kvmhv_vcpu_entry_p9()
644 mtspr(SPRN_HSRR0, vcpu->arch.regs.nip); in kvmhv_vcpu_entry_p9()
645 mtspr(SPRN_HSRR1, (vcpu->arch.shregs.msr & ~MSR_HV) | MSR_ME); in kvmhv_vcpu_entry_p9()
664 mtspr(SPRN_SPRG0, vcpu->arch.shregs.sprg0); in kvmhv_vcpu_entry_p9()
665 mtspr(SPRN_SPRG1, vcpu->arch.shregs.sprg1); in kvmhv_vcpu_entry_p9()
666 mtspr(SPRN_SPRG2, vcpu->arch.shregs.sprg2); in kvmhv_vcpu_entry_p9()
667 mtspr(SPRN_SPRG3, vcpu->arch.shregs.sprg3); in kvmhv_vcpu_entry_p9()
711 mtspr(SPRN_DEC, vcpu->arch.dec_expires - *tb); in kvmhv_vcpu_entry_p9()
716 mtspr(SPRN_DAR, vcpu->arch.shregs.dar); in kvmhv_vcpu_entry_p9()
717 mtspr(SPRN_DSISR, vcpu->arch.shregs.dsisr); in kvmhv_vcpu_entry_p9()
718 mtspr(SPRN_SRR0, vcpu->arch.shregs.srr0); in kvmhv_vcpu_entry_p9()
719 mtspr(SPRN_SRR1, vcpu->arch.shregs.srr1); in kvmhv_vcpu_entry_p9()
722 accumulate_time(vcpu, &vcpu->arch.in_guest); in kvmhv_vcpu_entry_p9()
726 accumulate_time(vcpu, &vcpu->arch.guest_exit); in kvmhv_vcpu_entry_p9()
730 vcpu->arch.shregs.srr0 = mfspr(SPRN_SRR0); in kvmhv_vcpu_entry_p9()
731 vcpu->arch.shregs.srr1 = mfspr(SPRN_SRR1); in kvmhv_vcpu_entry_p9()
732 vcpu->arch.shregs.dar = mfspr(SPRN_DAR); in kvmhv_vcpu_entry_p9()
733 vcpu->arch.shregs.dsisr = mfspr(SPRN_DSISR); in kvmhv_vcpu_entry_p9()
745 vcpu->arch.regs.gpr[1] = local_paca->kvm_hstate.scratch1; in kvmhv_vcpu_entry_p9()
746 vcpu->arch.regs.gpr[3] = local_paca->kvm_hstate.scratch2; in kvmhv_vcpu_entry_p9()
760 vcpu->arch.regs.gpr[9] = exsave[EX_R9/sizeof(u64)]; in kvmhv_vcpu_entry_p9()
761 vcpu->arch.regs.gpr[10] = exsave[EX_R10/sizeof(u64)]; in kvmhv_vcpu_entry_p9()
762 vcpu->arch.regs.gpr[11] = exsave[EX_R11/sizeof(u64)]; in kvmhv_vcpu_entry_p9()
763 vcpu->arch.regs.gpr[12] = exsave[EX_R12/sizeof(u64)]; in kvmhv_vcpu_entry_p9()
764 vcpu->arch.regs.gpr[13] = exsave[EX_R13/sizeof(u64)]; in kvmhv_vcpu_entry_p9()
765 vcpu->arch.ppr = exsave[EX_PPR/sizeof(u64)]; in kvmhv_vcpu_entry_p9()
766 vcpu->arch.cfar = exsave[EX_CFAR/sizeof(u64)]; in kvmhv_vcpu_entry_p9()
767 vcpu->arch.regs.ctr = exsave[EX_CTR/sizeof(u64)]; in kvmhv_vcpu_entry_p9()
769 vcpu->arch.last_inst = KVM_INST_FETCH_FAILED; in kvmhv_vcpu_entry_p9()
772 vcpu->arch.fault_dar = exsave[EX_DAR/sizeof(u64)]; in kvmhv_vcpu_entry_p9()
773 vcpu->arch.fault_dsisr = exsave[EX_DSISR/sizeof(u64)]; in kvmhv_vcpu_entry_p9()
780 vcpu->arch.emul_inst = mfspr(SPRN_HEIR); in kvmhv_vcpu_entry_p9()
783 vcpu->arch.fault_dar = exsave[EX_DAR/sizeof(u64)]; in kvmhv_vcpu_entry_p9()
784 vcpu->arch.fault_dsisr = exsave[EX_DSISR/sizeof(u64)]; in kvmhv_vcpu_entry_p9()
785 vcpu->arch.fault_gpa = mfspr(SPRN_ASDR); in kvmhv_vcpu_entry_p9()
788 vcpu->arch.fault_gpa = mfspr(SPRN_ASDR); in kvmhv_vcpu_entry_p9()
791 vcpu->arch.hfscr = mfspr(SPRN_HFSCR); in kvmhv_vcpu_entry_p9()
800 vcpu->arch.emul_inst = mfspr(SPRN_HEIR); in kvmhv_vcpu_entry_p9()
808 (vcpu->arch.shregs.msr & MSR_TS_S)) { in kvmhv_vcpu_entry_p9()
814 mtspr(SPRN_HSRR0, vcpu->arch.regs.nip); in kvmhv_vcpu_entry_p9()
815 mtspr(SPRN_HSRR1, vcpu->arch.shregs.msr); in kvmhv_vcpu_entry_p9()
825 local_paca->kvm_hstate.host_purr += purr - vcpu->arch.purr; in kvmhv_vcpu_entry_p9()
826 local_paca->kvm_hstate.host_spurr += spurr - vcpu->arch.spurr; in kvmhv_vcpu_entry_p9()
827 vcpu->arch.purr = purr; in kvmhv_vcpu_entry_p9()
828 vcpu->arch.spurr = spurr; in kvmhv_vcpu_entry_p9()
830 vcpu->arch.ic = mfspr(SPRN_IC); in kvmhv_vcpu_entry_p9()
831 vcpu->arch.pid = mfspr(SPRN_PID); in kvmhv_vcpu_entry_p9()
832 vcpu->arch.psscr = mfspr(SPRN_PSSCR_PR); in kvmhv_vcpu_entry_p9()
834 vcpu->arch.shregs.sprg0 = mfspr(SPRN_SPRG0); in kvmhv_vcpu_entry_p9()
835 vcpu->arch.shregs.sprg1 = mfspr(SPRN_SPRG1); in kvmhv_vcpu_entry_p9()
836 vcpu->arch.shregs.sprg2 = mfspr(SPRN_SPRG2); in kvmhv_vcpu_entry_p9()
837 vcpu->arch.shregs.sprg3 = mfspr(SPRN_SPRG3); in kvmhv_vcpu_entry_p9()
841 vcpu->arch.doorbell_request = 1; in kvmhv_vcpu_entry_p9()
849 vcpu->arch.dec_expires = dec + *tb; in kvmhv_vcpu_entry_p9()
872 vcpu->arch.shregs.msr & MSR_TS_MASK) in kvmhv_vcpu_entry_p9()
888 if (vcpu->arch.ciabr != host_ciabr) in kvmhv_vcpu_entry_p9()
892 if (vcpu->arch.dawr0 != host_dawr0) in kvmhv_vcpu_entry_p9()
894 if (vcpu->arch.dawrx0 != host_dawrx0) in kvmhv_vcpu_entry_p9()
897 if (vcpu->arch.dawr1 != host_dawr1) in kvmhv_vcpu_entry_p9()
899 if (vcpu->arch.dawrx1 != host_dawrx1) in kvmhv_vcpu_entry_p9()