/openbmc/linux/tools/testing/selftests/kvm/x86_64/ |
H A D | hyperv_features.c | 37 static bool is_write_only_msr(uint32_t msr) in is_write_only_msr() argument 39 return msr == HV_X64_MSR_EOI; in is_write_only_msr() 42 static void guest_msr(struct msr_data *msr) in guest_msr() argument 47 GUEST_ASSERT(msr->idx); in guest_msr() 49 if (msr->write) in guest_msr() 50 vector = wrmsr_safe(msr->idx, msr->write_val); in guest_msr() 52 if (!vector && (!msr->write || !is_write_only_msr(msr->idx))) in guest_msr() 53 vector = rdmsr_safe(msr->idx, &msr_val); in guest_msr() 55 if (msr->fault_expected) in guest_msr() 58 msr->idx, msr->write ? "WR" : "RD", vector); in guest_msr() [all …]
|
H A D | userspace_msr_exit_test.c | 86 static void deny_msr(uint8_t *bitmap, u32 msr) in deny_msr() argument 88 u32 idx = msr & (KVM_MSR_FILTER_MAX_BITMAP_SIZE - 1); in deny_msr() 151 static noinline uint64_t test_rdmsr(uint32_t msr) in test_rdmsr() argument 158 "=a"(a), "=d"(d) : "c"(msr) : "memory"); in test_rdmsr() 167 static noinline void test_wrmsr(uint32_t msr, uint64_t value) in test_wrmsr() argument 175 "a"(a), "d"(d), "c"(msr) : "memory"); in test_wrmsr() 185 static noinline uint64_t test_em_rdmsr(uint32_t msr) in test_em_rdmsr() argument 192 "=a"(a), "=d"(d) : "c"(msr) : "memory"); in test_em_rdmsr() 201 static noinline void test_em_wrmsr(uint32_t msr, uint64_t value) in test_em_wrmsr() argument 209 "a"(a), "d"(d), "c"(msr) : "memory"); in test_em_wrmsr() [all …]
|
H A D | kvm_pv_test.c | 20 #define TEST_MSR(msr) { .idx = msr, .name = #msr } argument 22 #define PR_MSR(msr) ucall(UCALL_PR_MSR, 1, msr) argument 41 static void test_msr(struct msr_data *msr) in test_msr() argument 46 PR_MSR(msr); in test_msr() 48 vector = rdmsr_safe(msr->idx, &ignored); in test_msr() 51 vector = wrmsr_safe(msr->idx, 0); in test_msr() 100 struct msr_data *msr = (struct msr_data *)uc->args[0]; in pr_msr() local 102 pr_info("testing msr: %s (%#x)\n", msr->name, msr->idx); in pr_msr()
|
/openbmc/u-boot/arch/x86/cpu/ivybridge/ |
H A D | model_206ax.c | 34 msr_t msr; in enable_vmx() local 41 msr = msr_read(MSR_IA32_FEATURE_CONTROL); in enable_vmx() 43 if (msr.lo & (1 << 0)) { in enable_vmx() 54 msr.hi = 0; in enable_vmx() 55 msr.lo = 0; in enable_vmx() 76 msr.lo |= (1 << 2); in enable_vmx() 78 msr.lo |= (1 << 1); in enable_vmx() 81 msr_write(MSR_IA32_FEATURE_CONTROL, msr); in enable_vmx() 163 msr_t msr = msr_read(MSR_PLATFORM_INFO); in set_power_limits() local 172 if (!(msr.lo & PLATFORM_INFO_SET_TDP)) in set_power_limits() [all …]
|
/openbmc/u-boot/arch/x86/cpu/broadwell/ |
H A D | cpu.c | 104 msr_t msr, perf_ctl, platform_info; in set_max_freq() local 111 msr = msr_read(MSR_CONFIG_TDP_NOMINAL); in set_max_freq() 112 perf_ctl.lo = (msr.lo & 0xff) << 8; in set_max_freq() 115 msr = msr_read(MSR_PLATFORM_INFO); in set_max_freq() 116 perf_ctl.lo = msr.lo & 0xff00; in set_max_freq() 232 msr_t msr; in initialize_vr_config() local 237 msr = msr_read(MSR_VR_CURRENT_CONFIG); in initialize_vr_config() 242 msr.hi &= 0xc0000000; in initialize_vr_config() 243 msr.hi |= (0x01 << (52 - 32)); /* PSI3 threshold - 1A */ in initialize_vr_config() 244 msr.hi |= (0x05 << (42 - 32)); /* PSI2 threshold - 5A */ in initialize_vr_config() [all …]
|
/openbmc/linux/arch/x86/kernel/cpu/ |
H A D | perfctr-watchdog.c | 45 static inline unsigned int nmi_perfctr_msr_to_bit(unsigned int msr) in nmi_perfctr_msr_to_bit() argument 51 if (msr >= MSR_F15H_PERF_CTR) in nmi_perfctr_msr_to_bit() 52 return (msr - MSR_F15H_PERF_CTR) >> 1; in nmi_perfctr_msr_to_bit() 53 return msr - MSR_K7_PERFCTR0; in nmi_perfctr_msr_to_bit() 56 return msr - MSR_ARCH_PERFMON_PERFCTR0; in nmi_perfctr_msr_to_bit() 60 return msr - MSR_P6_PERFCTR0; in nmi_perfctr_msr_to_bit() 62 return msr - MSR_KNC_PERFCTR0; in nmi_perfctr_msr_to_bit() 64 return msr - MSR_P4_BPU_PERFCTR0; in nmi_perfctr_msr_to_bit() 69 return msr - MSR_ARCH_PERFMON_PERFCTR0; in nmi_perfctr_msr_to_bit() 78 static inline unsigned int nmi_evntsel_msr_to_bit(unsigned int msr) in nmi_evntsel_msr_to_bit() argument [all …]
|
/openbmc/u-boot/arch/x86/include/asm/ |
H A D | msr.h | 26 struct msr { struct 38 struct msr reg; argument 39 struct msr *msrs; 75 unsigned long long native_read_msr(unsigned int msr) in native_read_msr() argument 79 asm volatile("rdmsr" : EAX_EDX_RET(val, low, high) : "c" (msr)); in native_read_msr() 83 static inline void native_write_msr(unsigned int msr, in native_write_msr() argument 86 asm volatile("wrmsr" : : "c" (msr), "a"(low), "d" (high) : "memory"); in native_write_msr() 112 #define rdmsr(msr, val1, val2) \ argument 114 u64 __val = native_read_msr((msr)); \ 119 static inline void wrmsr(unsigned msr, unsigned low, unsigned high) in wrmsr() argument [all …]
|
/openbmc/linux/arch/x86/include/asm/ |
H A D | msr.h | 17 struct msr reg; 18 struct msr *msrs; 64 extern void do_trace_write_msr(unsigned int msr, u64 val, int failed); 65 extern void do_trace_read_msr(unsigned int msr, u64 val, int failed); 66 extern void do_trace_rdpmc(unsigned int msr, u64 val, int failed); 68 static inline void do_trace_write_msr(unsigned int msr, u64 val, int failed) {} in do_trace_write_msr() argument 69 static inline void do_trace_read_msr(unsigned int msr, u64 val, int failed) {} in do_trace_read_msr() argument 70 static inline void do_trace_rdpmc(unsigned int msr, u64 val, int failed) {} in do_trace_rdpmc() argument 80 static __always_inline unsigned long long __rdmsr(unsigned int msr) in __rdmsr() argument 87 : EAX_EDX_RET(val, low, high) : "c" (msr)); in __rdmsr() [all …]
|
H A D | msr-trace.h | 3 #define TRACE_SYSTEM msr 6 #define TRACE_INCLUDE_FILE msr-trace 22 TP_PROTO(unsigned msr, u64 val, int failed), 23 TP_ARGS(msr, val, failed), 25 __field( unsigned, msr ) 30 __entry->msr = msr; 35 __entry->msr, 41 TP_PROTO(unsigned msr, u64 val, int failed), 42 TP_ARGS(msr, val, failed) 46 TP_PROTO(unsigned msr, u64 val, int failed), [all …]
|
/openbmc/linux/arch/x86/lib/ |
H A D | msr.c | 9 struct msr *msrs_alloc(void) in msrs_alloc() 11 struct msr *msrs = NULL; in msrs_alloc() 13 msrs = alloc_percpu(struct msr); in msrs_alloc() 23 void msrs_free(struct msr *msrs) in msrs_free() 39 static int msr_read(u32 msr, struct msr *m) in msr_read() argument 44 err = rdmsrl_safe(msr, &val); in msr_read() 59 static int msr_write(u32 msr, struct msr *m) in msr_write() argument 61 return wrmsrl_safe(msr, m->q); in msr_write() 64 static inline int __flip_bit(u32 msr, u8 bit, bool set) in __flip_bit() argument 66 struct msr m, m1; in __flip_bit() [all …]
|
/openbmc/linux/arch/powerpc/kvm/ |
H A D | book3s_hv_tm.c | 19 u64 msr = vcpu->arch.shregs.msr; in emulate_tx_failure() local 23 if (MSR_TM_SUSPENDED(vcpu->arch.shregs.msr)) in emulate_tx_failure() 25 if (msr & MSR_PR) { in emulate_tx_failure() 45 u64 msr = vcpu->arch.shregs.msr; in kvmhv_p9_tm_emulation() local 74 WARN_ON_ONCE(!(MSR_TM_SUSPENDED(msr) && in kvmhv_p9_tm_emulation() 78 vcpu->arch.shregs.msr = newmsr; in kvmhv_p9_tm_emulation() 84 if ((msr & MSR_PR) && (vcpu->arch.vcore->pcr & PCR_ARCH_206)) { in kvmhv_p9_tm_emulation() 96 if ((msr & MSR_PR) && !(vcpu->arch.fscr & FSCR_EBB)) { in kvmhv_p9_tm_emulation() 105 WARN_ON_ONCE(!(MSR_TM_SUSPENDED(msr) && in kvmhv_p9_tm_emulation() 111 msr = (msr & ~MSR_TS_MASK) | MSR_TS_T; in kvmhv_p9_tm_emulation() [all …]
|
H A D | book3s_hv_tm_builtin.c | 23 u64 newmsr, msr, bescr; in kvmhv_p9_tm_emulation_early() local 45 vcpu->arch.shregs.msr = newmsr; in kvmhv_p9_tm_emulation_early() 52 msr = vcpu->arch.shregs.msr; in kvmhv_p9_tm_emulation_early() 53 if ((msr & MSR_PR) && (vcpu->arch.vcore->pcr & PCR_ARCH_206)) in kvmhv_p9_tm_emulation_early() 57 ((msr & MSR_PR) && !(mfspr(SPRN_FSCR) & FSCR_EBB))) in kvmhv_p9_tm_emulation_early() 67 msr = (msr & ~MSR_TS_MASK) | MSR_TS_T; in kvmhv_p9_tm_emulation_early() 68 vcpu->arch.shregs.msr = msr; in kvmhv_p9_tm_emulation_early() 77 msr = vcpu->arch.shregs.msr; in kvmhv_p9_tm_emulation_early() 82 newmsr = (newmsr & ~MSR_LE) | (msr & MSR_LE); in kvmhv_p9_tm_emulation_early() 84 vcpu->arch.shregs.msr = newmsr; in kvmhv_p9_tm_emulation_early() [all …]
|
/openbmc/linux/drivers/powercap/ |
H A D | intel_rapl_msr.c | 37 .reg_unit.msr = MSR_RAPL_POWER_UNIT, 38 .regs[RAPL_DOMAIN_PACKAGE][RAPL_DOMAIN_REG_LIMIT].msr = MSR_PKG_POWER_LIMIT, 39 .regs[RAPL_DOMAIN_PACKAGE][RAPL_DOMAIN_REG_STATUS].msr = MSR_PKG_ENERGY_STATUS, 40 .regs[RAPL_DOMAIN_PACKAGE][RAPL_DOMAIN_REG_PERF].msr = MSR_PKG_PERF_STATUS, 41 .regs[RAPL_DOMAIN_PACKAGE][RAPL_DOMAIN_REG_INFO].msr = MSR_PKG_POWER_INFO, 42 .regs[RAPL_DOMAIN_PP0][RAPL_DOMAIN_REG_LIMIT].msr = MSR_PP0_POWER_LIMIT, 43 .regs[RAPL_DOMAIN_PP0][RAPL_DOMAIN_REG_STATUS].msr = MSR_PP0_ENERGY_STATUS, 44 .regs[RAPL_DOMAIN_PP0][RAPL_DOMAIN_REG_POLICY].msr = MSR_PP0_POLICY, 45 .regs[RAPL_DOMAIN_PP1][RAPL_DOMAIN_REG_LIMIT].msr = MSR_PP1_POWER_LIMIT, 46 .regs[RAPL_DOMAIN_PP1][RAPL_DOMAIN_REG_STATUS].msr = MSR_PP1_ENERGY_STATUS, [all …]
|
/openbmc/qemu/target/ppc/ |
H A D | excp_helper.c | 144 return !!(env->msr & ((target_ulong)1 << MSR_LE)); in insn_need_byteswap() 193 static int powerpc_reset_wakeup(CPUPPCState *env, int excp, target_ulong *msr) in powerpc_reset_wakeup() argument 199 *msr |= SRR1_WS_NOLOSS; in powerpc_reset_wakeup() 207 *msr |= SRR1_WAKERESET; in powerpc_reset_wakeup() 210 *msr |= SRR1_WAKEEE; in powerpc_reset_wakeup() 213 *msr |= SRR1_WAKEDEC; in powerpc_reset_wakeup() 216 *msr |= SRR1_WAKEDBELL; in powerpc_reset_wakeup() 219 *msr |= SRR1_WAKEHDBELL; in powerpc_reset_wakeup() 222 *msr |= SRR1_WAKEHMI; in powerpc_reset_wakeup() 225 *msr |= SRR1_WAKEHVI; in powerpc_reset_wakeup() [all …]
|
/openbmc/u-boot/arch/arm/include/asm/ |
H A D | macro.h | 155 msr cptr_el3, xzr /* Disable coprocessor traps to EL3 */ 157 msr cptr_el2, \tmp /* Disable coprocessor traps to EL2 */ 160 msr cntvoff_el2, xzr 172 msr sctlr_el2, \tmp 175 msr sp_el2, \tmp /* Migrate SP */ 177 msr vbar_el2, \tmp /* Migrate VBAR */ 200 msr scr_el3, \tmp 206 msr spsr_el3, \tmp 207 msr elr_el3, \ep 218 msr scr_el3, \tmp [all …]
|
/openbmc/linux/arch/m68k/bvme6000/ |
H A D | config.c | 169 unsigned char msr; in bvme6000_timer_int() local 172 msr = rtc->msr & 0xc0; in bvme6000_timer_int() 173 rtc->msr = msr | 0x20; /* Ack the interrupt */ in bvme6000_timer_int() 194 unsigned char msr = rtc->msr & 0xc0; in bvme6000_sched_init() local 196 rtc->msr = 0; /* Ensure timer registers accessible */ in bvme6000_sched_init() 206 rtc->msr = 0x40; /* Access int.cntrl, etc */ in bvme6000_sched_init() 211 rtc->msr = 0; /* Access timer 1 control */ in bvme6000_sched_init() 214 rtc->msr = msr; in bvme6000_sched_init() 236 unsigned char msr, msb; in bvme6000_read_clk() local 242 msr = rtc->msr & 0xc0; in bvme6000_read_clk() [all …]
|
/openbmc/linux/tools/power/x86/turbostat/ |
H A D | turbostat.c | 440 int get_msr_sum(int cpu, off_t offset, unsigned long long *msr); 693 int get_msr(int cpu, off_t offset, unsigned long long *msr) in get_msr() argument 697 retval = pread(get_msr_fd(cpu), msr, sizeof(*msr), offset); in get_msr() 699 if (retval != sizeof *msr) in get_msr() 1982 unsigned long long msr; in get_epb() local 2001 get_msr(cpu, MSR_IA32_ENERGY_PERF_BIAS, &msr); in get_epb() 2003 return msr & 0xf; in get_epb() 2082 unsigned long long msr; in get_counters() local 2161 if (get_msr(cpu, MSR_SMI_COUNT, &msr)) in get_counters() 2163 t->smi_count = msr & 0xFFFFFFFF; in get_counters() [all …]
|
/openbmc/linux/arch/x86/kvm/ |
H A D | mtrr.c | 28 static bool is_mtrr_base_msr(unsigned int msr) in is_mtrr_base_msr() argument 31 return !(msr & 0x1); in is_mtrr_base_msr() 35 unsigned int msr) in var_mtrr_msr_to_range() argument 37 int index = (msr - MTRRphysBase_MSR(0)) / 2; in var_mtrr_msr_to_range() 42 static bool msr_mtrr_valid(unsigned msr) in msr_mtrr_valid() argument 44 switch (msr) { in msr_mtrr_valid() 68 static bool kvm_mtrr_valid(struct kvm_vcpu *vcpu, u32 msr, u64 data) in kvm_mtrr_valid() argument 73 if (!msr_mtrr_valid(msr)) in kvm_mtrr_valid() 76 if (msr == MSR_MTRRdefType) { in kvm_mtrr_valid() 80 } else if (msr >= MSR_MTRRfix64K_00000 && msr <= MSR_MTRRfix4K_F8000) { in kvm_mtrr_valid() [all …]
|
/openbmc/linux/arch/x86/xen/ |
H A D | pmu.c | 132 static inline bool is_amd_pmu_msr(unsigned int msr) in is_amd_pmu_msr() argument 138 if ((msr >= MSR_F15H_PERF_CTL && in is_amd_pmu_msr() 139 msr < MSR_F15H_PERF_CTR + (amd_num_counters * 2)) || in is_amd_pmu_msr() 140 (msr >= MSR_K7_EVNTSEL0 && in is_amd_pmu_msr() 141 msr < MSR_K7_PERFCTR0 + amd_num_counters)) in is_amd_pmu_msr() 198 static bool xen_intel_pmu_emulate(unsigned int msr, u64 *val, int type, in xen_intel_pmu_emulate() argument 214 switch (msr) { in xen_intel_pmu_emulate() 252 if (msr == MSR_CORE_PERF_GLOBAL_OVF_CTRL) in xen_intel_pmu_emulate() 261 static bool xen_amd_pmu_emulate(unsigned int msr, u64 *val, bool is_read) in xen_amd_pmu_emulate() argument 274 ((msr >= MSR_K7_EVNTSEL0) && (msr <= MSR_K7_PERFCTR3))) in xen_amd_pmu_emulate() [all …]
|
/openbmc/linux/arch/microblaze/kernel/ |
H A D | process.c | 45 regs->msr, regs->ear, regs->esr, regs->fsr); in show_regs() 72 local_save_flags(childregs->msr); in copy_thread() 73 ti->cpu_context.msr = childregs->msr & ~MSR_IE; in copy_thread() 83 childregs->msr |= MSR_UMS; in copy_thread() 95 childregs->msr &= ~MSR_EIP; in copy_thread() 96 childregs->msr |= MSR_IE; in copy_thread() 97 childregs->msr &= ~MSR_VM; in copy_thread() 98 childregs->msr |= MSR_VMS; in copy_thread() 99 childregs->msr |= MSR_EE; /* exceptions will be enabled*/ in copy_thread() 101 ti->cpu_context.msr = (childregs->msr|MSR_VM); in copy_thread() [all …]
|
/openbmc/linux/arch/x86/kvm/svm/ |
H A D | pmu.c | 38 static inline struct kvm_pmc *get_gp_pmc_amd(struct kvm_pmu *pmu, u32 msr, in get_gp_pmc_amd() argument 47 switch (msr) { in get_gp_pmc_amd() 55 idx = (unsigned int)((msr - MSR_F15H_PERF_CTL0) / 2); in get_gp_pmc_amd() 56 if (!(msr & 0x1) != (type == PMU_TYPE_EVNTSEL)) in get_gp_pmc_amd() 62 idx = msr - MSR_K7_EVNTSEL0; in get_gp_pmc_amd() 67 idx = msr - MSR_K7_PERFCTR0; in get_gp_pmc_amd() 97 static struct kvm_pmc *amd_msr_idx_to_pmc(struct kvm_vcpu *vcpu, u32 msr) in amd_msr_idx_to_pmc() argument 102 pmc = get_gp_pmc_amd(pmu, msr, PMU_TYPE_COUNTER); in amd_msr_idx_to_pmc() 103 pmc = pmc ? pmc : get_gp_pmc_amd(pmu, msr, PMU_TYPE_EVNTSEL); in amd_msr_idx_to_pmc() 108 static bool amd_is_valid_msr(struct kvm_vcpu *vcpu, u32 msr) in amd_is_valid_msr() argument [all …]
|
/openbmc/linux/arch/x86/events/ |
H A D | probe.c | 19 perf_msr_probe(struct perf_msr *msr, int cnt, bool zero, void *data) in perf_msr_probe() argument 29 if (!msr[bit].no_check) { in perf_msr_probe() 30 struct attribute_group *grp = msr[bit].grp; in perf_msr_probe() 40 if (!msr[bit].msr) in perf_msr_probe() 43 if (msr[bit].test && !msr[bit].test(bit, data)) in perf_msr_probe() 46 if (rdmsrl_safe(msr[bit].msr, &val)) in perf_msr_probe() 49 mask = msr[bit].mask; in perf_msr_probe()
|
/openbmc/linux/arch/powerpc/kernel/ |
H A D | signal_64.c | 130 unsigned long msr = regs->msr; in __unsafe_setup_sigcontext() local 147 msr |= MSR_VEC; in __unsafe_setup_sigcontext() 163 msr &= ~MSR_VSX; in __unsafe_setup_sigcontext() 176 msr |= MSR_VSX; in __unsafe_setup_sigcontext() 181 unsafe_put_user(msr, &sc->gp_regs[PT_MSR], efault_out); in __unsafe_setup_sigcontext() 210 unsigned long msr) in setup_tm_sigcontexts() argument 229 BUG_ON(!MSR_TM_ACTIVE(msr)); in setup_tm_sigcontexts() 237 msr |= tsk->thread.ckpt_regs.msr & (MSR_FP | MSR_VEC | MSR_VSX); in setup_tm_sigcontexts() 251 if (msr & MSR_VEC) in setup_tm_sigcontexts() 263 msr |= MSR_VEC; in setup_tm_sigcontexts() [all …]
|
H A D | signal_32.c | 271 unsigned long msr = regs->msr; in __unsafe_save_user_regs() local 283 msr |= MSR_VEC; in __unsafe_save_user_regs() 302 msr &= ~MSR_VSX; in __unsafe_save_user_regs() 312 msr |= MSR_VSX; in __unsafe_save_user_regs() 322 msr |= MSR_SPE; in __unsafe_save_user_regs() 331 unsafe_put_user(msr, &frame->mc_gregs[PT_MSR], failed); in __unsafe_save_user_regs() 370 struct mcontext __user *tm_frame, unsigned long msr) in save_tm_user_regs_unsafe() argument 382 unsafe_put_user((msr >> 32), &tm_frame->mc_gregs[PT_MSR], failed); in save_tm_user_regs_unsafe() 388 if (msr & MSR_VEC) in save_tm_user_regs_unsafe() 400 msr |= MSR_VEC; in save_tm_user_regs_unsafe() [all …]
|
/openbmc/linux/arch/arm64/kvm/hyp/nvhe/ |
H A D | hyp-init.S | 91 msr mair_el2, x1 94 msr hcr_el2, x1 103 msr tpidr_el2, x0 110 msr tpidr_el2, x1 113 msr vttbr_el2, x1 116 msr vtcr_el2, x1 123 msr ttbr0_el2, x2 130 msr tcr_el2, x0 152 msr sctlr_el2, x0 157 msr vbar_el2, x0 [all …]
|