/openbmc/linux/arch/powerpc/kernel/ |
H A D | process.c | 169 msr_check_and_set(MSR_FP); in giveup_fpu() 171 msr_check_and_clear(MSR_FP); in giveup_fpu() 313 if (msr & MSR_FP) in __giveup_vsx() 441 if (usermsr & MSR_FP) in giveup_all() 530 new_msr |= MSR_FP; in restore_math() 536 if (((msr | new_msr) & (MSR_FP | MSR_VEC)) == (MSR_FP | MSR_VEC)) in restore_math() 545 if (new_msr & MSR_FP) { in restore_math() 581 if (usermsr & MSR_FP) in save_all() 1139 if (msr_diff & MSR_FP) in restore_tm_state() 1202 if (usermsr & MSR_FP) in kvmppc_save_user_regs() [all …]
|
H A D | fpu.S | 98 ori r5,r5,MSR_FP|MSR_RI 100 ori r5,r5,MSR_FP 113 ori r9,r9,MSR_FP /* enable FP for current */ 119 ori r12,r12,MSR_FP
|
H A D | signal_64.c | 237 msr |= tsk->thread.ckpt_regs.msr & (MSR_FP | MSR_VEC | MSR_VSX); in setup_tm_sigcontexts() 285 if (msr & MSR_FP) in setup_tm_sigcontexts() 387 regs_set_return_msr(regs, regs->msr & ~(MSR_FP | MSR_FE0 | MSR_FE1 | MSR_VEC | MSR_VSX)); in __unsafe_restore_sigcontext() 506 regs_set_return_msr(regs, regs->msr & ~(MSR_FP | MSR_FE0 | MSR_FE1 | MSR_VEC | MSR_VSX)); in restore_tm_sigcontexts() 599 msr_check_and_set(msr & (MSR_FP | MSR_VEC)); in restore_tm_sigcontexts() 600 if (msr & MSR_FP) { in restore_tm_sigcontexts() 602 regs_set_return_msr(regs, regs->msr | (MSR_FP | tsk->thread.fpexc_mode)); in restore_tm_sigcontexts()
|
H A D | signal_32.c | 418 if (msr & MSR_FP) in save_tm_user_regs_unsafe() 536 regs_set_return_msr(regs, regs->msr & ~(MSR_FP | MSR_FE0 | MSR_FE1)); in restore_user_regs() 617 regs_set_return_msr(regs, regs->msr & ~(MSR_FP | MSR_FE0 | MSR_FE1)); in restore_tm_user_regs() 700 msr_check_and_set(msr & (MSR_FP | MSR_VEC)); in restore_tm_user_regs() 701 if (msr & MSR_FP) { in restore_tm_user_regs() 703 regs_set_return_msr(regs, regs->msr | (MSR_FP | current->thread.fpexc_mode)); in restore_tm_user_regs()
|
H A D | vector.S | 131 andi. r5,r12,MSR_FP 204 ori r11,r10,MSR_FP
|
H A D | tm.S | 135 ori r15, r15, MSR_FP 387 ori r5, r5, MSR_FP
|
H A D | interrupt.c | 208 unsigned long mathflags = MSR_FP; in interrupt_exit_user_prepare_main()
|
H A D | cpu_setup_6xx.S | 310 ori r11,r10,MSR_FP
|
/openbmc/linux/arch/powerpc/lib/ |
H A D | ldstfp.S | 23 ori r7, r6, MSR_FP 47 ori r7, r6, MSR_FP 213 ori r7, r6, MSR_FP 228 ori r7, r6, MSR_FP
|
H A D | sstep.c | 611 if (regs->msr & MSR_FP) in do_fp_load() 618 if (regs->msr & MSR_FP) in do_fp_load() 647 if (regs->msr & MSR_FP) in do_fp_store() 659 if (regs->msr & MSR_FP) in do_fp_store() 979 if (regs->msr & MSR_FP) { in do_vsx_load() 1025 if (regs->msr & MSR_FP) { in do_vsx_store() 3440 if (!(regs->msr & MSR_PR) && !(regs->msr & MSR_FP)) in emulate_loadstore() 3511 if (!(regs->msr & MSR_PR) && !(regs->msr & MSR_FP)) in emulate_loadstore()
|
/openbmc/linux/arch/powerpc/include/asm/ |
H A D | switch_to.h | 46 msr_check_and_clear(MSR_FP); in disable_kernel_fp() 81 msr_check_and_clear(MSR_FP|MSR_VEC|MSR_VSX); in disable_kernel_vsx()
|
/openbmc/linux/arch/powerpc/kvm/ |
H A D | book3s_pr.c | 368 (MSR_FP | MSR_VEC | MSR_VSX); in kvmppc_handle_lost_math_exts() 373 if (ext_diff == MSR_FP) in kvmppc_handle_lost_math_exts() 547 if (kvmppc_get_msr(vcpu) & MSR_FP) in kvmppc_set_msr_pr() 813 msr |= MSR_FP | MSR_VEC; in kvmppc_giveup_ext() 823 if (msr & MSR_FP) { in kvmppc_giveup_ext() 829 if (t->regs->msr & MSR_FP) in kvmppc_giveup_ext() 894 msr = MSR_FP | MSR_VEC | MSR_VSX; in kvmppc_handle_ext() 906 if (msr & MSR_FP) { in kvmppc_handle_ext() 945 if (lost_ext & MSR_FP) { in kvmppc_handle_lost_ext() 1370 ext_msr = MSR_FP; in kvmppc_handle_exit_pr() [all …]
|
H A D | emulate_loadstore.c | 30 if (!(kvmppc_get_msr(vcpu) & MSR_FP)) { in kvmppc_check_fp_disabled() 247 MSR_FP); in kvmppc_emulate_loadstore()
|
H A D | tm.S | 44 ori r8, r8, MSR_FP 238 ori r5, r5, MSR_FP
|
H A D | booke.c | 160 if (!(current->thread.regs->msr & MSR_FP)) { in kvmppc_load_guest_fp() 165 current->thread.regs->msr |= MSR_FP; in kvmppc_load_guest_fp() 177 if (current->thread.regs->msr & MSR_FP) in kvmppc_save_guest_fp() 188 vcpu->arch.shadow_msr &= ~MSR_FP; in kvmppc_vcpu_sync_fpu() 189 vcpu->arch.shadow_msr |= vcpu->arch.shared->msr & MSR_FP; in kvmppc_vcpu_sync_fpu()
|
H A D | book3s_paired_singles.c | 658 if (!(kvmppc_get_msr(vcpu) & MSR_FP)) { in kvmppc_emulate_paired_single() 663 kvmppc_giveup_ext(vcpu, MSR_FP); in kvmppc_emulate_paired_single()
|
/openbmc/qemu/target/ppc/ |
H A D | cpu_init.c | 2177 (1ull << MSR_FP) | 2248 (1ull << MSR_FP) | 2287 (1ull << MSR_FP) | 2339 (1ull << MSR_FP) | 2409 (1ull << MSR_FP) | 2448 (1ull << MSR_FP) | 2490 (1ull << MSR_FP) | in POWERPC_FAMILY() 2532 (1ull << MSR_FP) | in POWERPC_FAMILY() 2588 (1ull << MSR_FP) | in POWERPC_FAMILY() 2628 (1ull << MSR_FP) | in POWERPC_FAMILY() [all …]
|
H A D | helper_regs.c | 140 QEMU_BUILD_BUG_ON(MSR_FP != HFLAGS_FP); in hreg_compute_hflags_value() 142 (1 << MSR_DR) | (1 << MSR_FP)); in hreg_compute_hflags_value()
|
/openbmc/u-boot/arch/powerpc/cpu/mpc8xx/ |
H A D | traps.c | 62 regs->msr & MSR_PR ? 1 : 0, regs->msr & MSR_FP ? 1 : 0, in show_regs()
|
/openbmc/u-boot/arch/powerpc/cpu/mpc86xx/ |
H A D | traps.c | 68 regs->msr & MSR_PR ? 1 : 0, regs->msr & MSR_FP ? 1 : 0, in show_regs()
|
/openbmc/u-boot/arch/powerpc/cpu/mpc83xx/ |
H A D | traps.c | 59 regs->msr & MSR_FP ? 1 : 0,regs->msr&MSR_ME ? 1 : 0, in show_regs()
|
H A D | start.S | 35 #define MSR_KERNEL (MSR_FP|MSR_RI) 37 #define MSR_KERNEL (MSR_FP|MSR_ME|MSR_RI)
|
/openbmc/u-boot/arch/powerpc/cpu/mpc85xx/ |
H A D | traps.c | 95 regs->msr & MSR_FP ? 1 : 0,regs->msr&MSR_ME ? 1 : 0, in show_regs()
|
/openbmc/qemu/hw/ppc/ |
H A D | spapr_cpu_core.c | 47 env->msr |= (1ULL << MSR_ME) | (1ULL << MSR_FP); in spapr_reset_vcpu()
|
/openbmc/u-boot/arch/powerpc/include/asm/ |
H A D | processor.h | 29 #define MSR_FP (1<<13) /* Floating Point enable */ macro
|