Home
last modified time | relevance | path

Searched refs:cpu_isar_feature (Results 1 – 25 of 32) sorted by relevance

12

/openbmc/qemu/linux-user/aarch64/
H A Dtarget_prctl.h15 if (cpu_isar_feature(aa64_sve, cpu)) { in do_prctl_sve_get_vl()
30 if (cpu_isar_feature(aa64_sve, env_archcpu(env)) in do_prctl_sve_set_vl()
60 if (cpu_isar_feature(aa64_sme, cpu)) { in do_prctl_sme_get_vl()
74 if (cpu_isar_feature(aa64_sme, env_archcpu(env)) in do_prctl_sme_set_vl()
113 if (cpu_isar_feature(aa64_pauth, cpu)) { in do_prctl_reset_keys()
166 if (cpu_isar_feature(aa64_mte, cpu)) { in do_prctl_set_tagged_addr_ctrl()
176 if (cpu_isar_feature(aa64_mte, cpu)) { in do_prctl_set_tagged_addr_ctrl()
200 if (cpu_isar_feature(aa64_mte, cpu)) { in do_prctl_get_tagged_addr_ctrl()
H A Dsignal.c313 ? !cpu_isar_feature(aa64_sme, env_archcpu(env)) in target_restore_sve_record()
314 : !cpu_isar_feature(aa64_sve, env_archcpu(env))) { in target_restore_sve_record()
373 if (!cpu_isar_feature(aa64_sme, env_archcpu(env))) { in target_restore_za_record()
582 if (cpu_isar_feature(aa64_sve, env_archcpu(env)) || in target_setup_frame()
583 cpu_isar_feature(aa64_sme, env_archcpu(env))) { in target_setup_frame()
587 if (cpu_isar_feature(aa64_sme, env_archcpu(env))) { in target_setup_frame()
665 if (cpu_isar_feature(aa64_bti, env_archcpu(env))) { in target_setup_frame()
H A Dcpu_loop.c215 if (cpu_isar_feature(aa64_pauth, cpu)) { in target_cpu_copy_regs()
/openbmc/qemu/bsd-user/arm/
H A Dtarget_arch_elf.h41 do { if (cpu_isar_feature(feat, cpu)) { hwcaps |= hwcap; } } while (0)
98 if (cpu_isar_feature(aa32_fpsp_v3, cpu) || in get_elf_hwcap()
99 cpu_isar_feature(aa32_fpdp_v3, cpu)) { in get_elf_hwcap()
101 if (cpu_isar_feature(aa32_simd_r32, cpu)) { in get_elf_hwcap()
/openbmc/qemu/target/arm/
H A Dcpu64.c72 assert(!cpu_isar_feature(aa64_sve, cpu)); in arm_cpu_sve_finalize()
111 if (!cpu_isar_feature(aa64_sve, cpu)) { in arm_cpu_sve_finalize()
228 if (!cpu_isar_feature(aa64_sve, cpu)) { in arm_cpu_sve_finalize()
257 ? !cpu_isar_feature(aa64_sve, cpu) in cpu_arm_get_vq()
258 : !cpu_isar_feature(aa64_sme, cpu)) { in cpu_arm_get_vq()
284 return cpu_isar_feature(aa64_sve, cpu); in cpu_arm_get_sve()
310 if (!cpu_isar_feature(aa64_sme, cpu)) { in arm_cpu_sme_finalize()
327 if (!cpu_isar_feature(aa64_sme, cpu)) { in arm_cpu_sme_finalize()
344 return cpu_isar_feature(aa64_sme, cpu); in cpu_arm_get_sme()
360 return cpu_isar_feature(aa64_sme, cpu) && in cpu_arm_get_sme_fa64()
[all …]
H A Dcpu.c270 if (cpu_isar_feature(aa64_sve, cpu)) { in arm_cpu_reset_hold()
276 if (cpu_isar_feature(aa64_sme, cpu)) { in arm_cpu_reset_hold()
294 if (cpu_isar_feature(aa64_mte, cpu)) { in arm_cpu_reset_hold()
387 if (cpu_isar_feature(aa32_lob, cpu)) { in arm_cpu_reset_hold()
1251 if (cpu_isar_feature(aa64_sme, cpu)) { in aarch64_cpu_dump_state()
1257 if (cpu_isar_feature(aa64_bti, cpu)) { in aarch64_cpu_dump_state()
1353 if (cpu_isar_feature(aa64_sme, cpu) && in aarch64_cpu_dump_state()
1464 if (cpu_isar_feature(aa32_mve, cpu)) { in arm_cpu_dump_state()
1872 cpu_isar_feature(aa64_mte, cpu)) { in arm_cpu_post_init()
1933 if (cpu_isar_feature(aa64_sme, cpu) && !cpu_isar_feature(aa64_sve, cpu)) { in arm_cpu_finalize_features()
[all …]
H A Dgdbstub.c114 int nregs = cpu_isar_feature(aa32_simd_r32, cpu) ? 32 : 16; in vfp_gdb_get_reg()
139 int nregs = cpu_isar_feature(aa32_simd_r32, cpu) ? 32 : 16; in vfp_gdb_set_reg()
543 if (cpu_isar_feature(aa64_mte, cpu)) { in arm_cpu_register_gdb_regs_for_features()
556 } else if (cpu_isar_feature(aa32_simd_r32, cpu)) { in arm_cpu_register_gdb_regs_for_features()
560 } else if (cpu_isar_feature(aa32_vfp_simd, cpu)) { in arm_cpu_register_gdb_regs_for_features()
574 if (cpu_isar_feature(aa32_mve, cpu) && tcg_enabled()) { in arm_cpu_register_gdb_regs_for_features()
H A Dhelper.c3040 if (cpu_isar_feature(aa64_rme, cpu)) { in gt_cnthctl_write()
3051 if (cpu_isar_feature(aa64_ecv, cpu)) { in gt_cnthctl_write()
6030 if (cpu_isar_feature(any_evt, cpu)) { in do_hcr_write()
6069 if (cpu_isar_feature(aa64_nmi, cpu)) { in do_hcr_write()
6214 if (cpu_isar_feature(aa64_nmi, cpu)) { in hcrx_write()
6232 if (cpu_isar_feature(aa64_nmi, cpu)) { in hcrx_write()
9440 if (cpu_isar_feature(aa64_ecv, cpu)) { in register_cp_regs_for_features()
9856 if (cpu_isar_feature(aa64_lor, cpu)) { in register_cp_regs_for_features()
9880 if (cpu_isar_feature(any_ras, cpu)) { in register_cp_regs_for_features()
9884 if (cpu_isar_feature(aa64_vh, cpu) || in register_cp_regs_for_features()
[all …]
H A Darch_dump.c276 if (cpu_isar_feature(aa64_sve, cpu)) { in arm_cpu_write_elf64_note()
369 bool fpvalid = cpu_isar_feature(aa32_vfp_simd, cpu); in arm_cpu_write_elf32_note()
454 if (cpu_isar_feature(aa64_sve, cpu)) { in cpu_get_note_size()
460 if (cpu_isar_feature(aa32_vfp_simd, cpu)) { in cpu_get_note_size()
H A Ddebug_helper.c872 bool mdcr_el2_tdcc = cpu_isar_feature(aa64_fgt, env_archcpu(env)) && in access_tdcc()
874 bool mdcr_el3_tdcc = cpu_isar_feature(aa64_fgt, env_archcpu(env)) && in access_tdcc()
916 ? cpu_isar_feature(aa64_doublelock, cpu) in osdlr_write()
917 : cpu_isar_feature(aa32_doublelock, cpu)) { in osdlr_write()
1201 if (cpu_isar_feature(aa32_debugv7p1, cpu)) { in define_debug_regs()
1225 if (cpu_isar_feature(aa64_aa32_el1, cpu)) { in define_debug_regs()
H A Dmachine.c17 ? cpu_isar_feature(aa64_fp_simd, cpu) in vfp_needed()
18 : cpu_isar_feature(aa32_vfp_simd, cpu)); in vfp_needed()
253 return cpu_isar_feature(aa64_sve, cpu); in sve_needed()
497 return cpu_isar_feature(aa32_mve, cpu); in mve_needed()
1014 if (arm_feature(env, ARM_FEATURE_M) && cpu_isar_feature(aa32_lob, cpu)) { in cpu_post_load()
H A Dptw.c1151 if (type == 0 || (type == 3 && !cpu_isar_feature(aa32_pxn, cpu))) { in get_phys_addr_v6()
1193 if (cpu_isar_feature(aa32_pxn, cpu)) { in get_phys_addr_v6()
1308 if (cpu_isar_feature(any_tts2uxn, env_archcpu(env))) { in get_S2prot()
1375 } else if (cpu_isar_feature(aa64_pan3, cpu) && is_aa64 && in get_S1prot()
1568 if (!cpu_isar_feature(aa64_st, cpu)) { in check_s2_mmu_setup()
2027 !cpu_isar_feature(aa64_sel2, cpu)) { in get_phys_addr_lpae()
2136 if (aarch64 && cpu_isar_feature(aa64_bti, cpu)) { in get_phys_addr_lpae()
3612 cpu_isar_feature(aa64_rme, env_archcpu(env))) { in get_phys_addr()
H A Dkvm.c1894 if (cpu_isar_feature(aa64_sve, cpu)) { in kvm_arch_init_vcpu()
1898 if (cpu_isar_feature(aa64_pauth, cpu)) { in kvm_arch_init_vcpu()
1909 if (cpu_isar_feature(aa64_sve, cpu)) { in kvm_arch_init_vcpu()
2127 if (cpu_isar_feature(aa64_sve, cpu)) { in kvm_arch_put_registers()
2311 if (cpu_isar_feature(aa64_sve, cpu)) { in kvm_arch_get_registers()
H A Dvfp_helper.c222 cpu_isar_feature(aa32_mve, cpu)) { in vfp_set_fpsr()
253 if (!cpu_isar_feature(any_fp16, cpu)) { in vfp_set_fpcr_masked()
271 } else if (cpu_isar_feature(aa32_mve, cpu)) { in vfp_set_fpcr_masked()
H A Dgdbstub64.c601 if (cpu_isar_feature(aa64_mte, cpu)) { in aarch64_cpu_register_gdb_commands()
/openbmc/qemu/target/arm/tcg/
H A Dhflags.c133 if (!cpu_isar_feature(aa64_sme_fa64, env_archcpu(env))) { in sme_fa64()
227 if (cpu_isar_feature(aa64_sve, env_archcpu(env))) { in rebuild_hflags_a64()
245 if (cpu_isar_feature(aa64_sme, env_archcpu(env))) { in rebuild_hflags_a64()
276 if (cpu_isar_feature(aa64_pauth, env_archcpu(env))) { in rebuild_hflags_a64()
288 if (cpu_isar_feature(aa64_bti, env_archcpu(env))) { in rebuild_hflags_a64()
295 if (cpu_isar_feature(aa64_lse2, env_archcpu(env))) { in rebuild_hflags_a64()
361 if (cpu_isar_feature(aa64_mte, env_archcpu(env))) { in rebuild_hflags_a64()
H A Dm_helper.c414 if (cpu_isar_feature(aa32_mve, cpu)) { in HELPER()
448 if (cpu_isar_feature(aa32_mve, cpu)) { in HELPER()
749 if (!cpu_isar_feature(aa32_vfp_simd, env_archcpu(env)) in v7m_integrity_sig()
853 if (!cpu_isar_feature(aa32_vfp_simd, cpu)) { in v7m_exception_taken()
1097 if (cpu_isar_feature(aa32_mve, cpu)) { in HELPER()
1110 if (cpu_isar_feature(aa32_mve, cpu)) { in HELPER()
1167 if (cpu_isar_feature(aa32_mve, cpu)) { in HELPER()
1324 if (cpu_isar_feature(aa32_mve, cpu)) { in v7m_push_stack()
1568 if (cpu_isar_feature(aa32_mve, cpu)) { in do_v7m_exception_exit()
1816 if (cpu_isar_feature(aa32_mve, cpu)) { in do_v7m_exception_exit()
[all …]
H A Dpauth_helper.c318 if (cpu_isar_feature(aa64_pauth_qarma5, env_archcpu(env))) { in pauth_computepac()
320 } else if (cpu_isar_feature(aa64_pauth_qarma3, env_archcpu(env))) { in pauth_computepac()
333 ARMPauthFeature pauth_feature = cpu_isar_feature(pauth_feature, cpu); in pauth_addpac()
415 ARMPauthFeature pauth_feature = cpu_isar_feature(pauth_feature, cpu); in pauth_auth()
H A Dtlb_helper.c145 assert(cpu_isar_feature(aa64_rme, cpu)); in report_as_gpc_exception()
/openbmc/qemu/hw/intc/
H A Darmv7m_nvic.c1344 if (!cpu_isar_feature(aa32_vfp_simd, cpu)) { in nvic_readl()
1498 if (!cpu_isar_feature(aa32_ras, cpu)) { in nvic_readl()
1504 if (!cpu_isar_feature(aa32_vfp_simd, cpu)) { in nvic_readl()
1531 if (!cpu_isar_feature(aa32_vfp_simd, cpu)) { in nvic_readl()
1817 if (cpu_isar_feature(aa32_vfp_simd, cpu)) { in nvic_writel()
2057 if (!cpu_isar_feature(aa32_ras, cpu)) { in nvic_writel()
2063 if (cpu_isar_feature(aa32_vfp_simd, cpu)) { in nvic_writel()
2117 if (cpu_isar_feature(aa32_vfp_simd, cpu)) { in nvic_writel()
2123 if (cpu_isar_feature(aa32_vfp_simd, cpu)) { in nvic_writel()
2125 if (cpu_isar_feature(any_fp16, cpu)) { in nvic_writel()
[all …]
/openbmc/qemu/bsd-user/aarch64/
H A Dtarget_arch_elf.h94 do { if (cpu_isar_feature(feat, cpu)) { hwcaps |= hwcap; } } while (0)
/openbmc/qemu/linux-user/arm/
H A Dsignal.c306 if (cpu_isar_feature(aa32_vfp_simd, env_archcpu(env))) { in setup_sigframe()
479 if (cpu_isar_feature(aa32_vfp_simd, env_archcpu(env))) { in do_sigframe_return()
/openbmc/qemu/bsd-user/freebsd/
H A Dos-sys.c320 *(abi_int *)holdp = cpu_isar_feature(aa32_vfp, cpu); in do_freebsd_sysctl_oid()
/openbmc/qemu/linux-user/
H A Delfload.c525 do { if (cpu_isar_feature(feat, cpu)) { hwcaps |= hwcap; } } while (0) in get_elf_hwcap()
538 if (cpu_isar_feature(aa32_fpsp_v3, cpu) || in get_elf_hwcap()
539 cpu_isar_feature(aa32_fpdp_v3, cpu)) { in get_elf_hwcap()
541 if (cpu_isar_feature(aa32_simd_r32, cpu)) { in get_elf_hwcap()
786 do { if (cpu_isar_feature(feat, cpu)) { hwcaps |= hwcap; } } while (0)
3391 && cpu_isar_feature(aa64_bti, ARM_CPU(thread_cpu))) { in load_elf_image()
H A Dmmap.c133 if ((prot & TARGET_PROT_BTI) && cpu_isar_feature(aa64_bti, cpu)) { in validate_prot_to_pageflags()
138 if ((prot & TARGET_PROT_MTE) && cpu_isar_feature(aa64_mte, cpu)) { in validate_prot_to_pageflags()

12