Searched refs:mmfr0 (Results 1 – 8 of 8) sorted by relevance
/openbmc/linux/arch/arm64/include/asm/ |
H A D | cpufeature.h | 588 static inline bool id_aa64mmfr0_mixed_endian_el0(u64 mmfr0) in id_aa64mmfr0_mixed_endian_el0() argument 590 return cpuid_feature_extract_unsigned_field(mmfr0, ID_AA64MMFR0_EL1_BIGEND_SHIFT) == 0x1 || in id_aa64mmfr0_mixed_endian_el0() 591 cpuid_feature_extract_unsigned_field(mmfr0, ID_AA64MMFR0_EL1_BIGENDEL0_SHIFT) == 0x1; in id_aa64mmfr0_mixed_endian_el0() 682 u64 mmfr0; in system_supports_4kb_granule() local 685 mmfr0 = read_sanitised_ftr_reg(SYS_ID_AA64MMFR0_EL1); in system_supports_4kb_granule() 686 val = cpuid_feature_extract_unsigned_field(mmfr0, in system_supports_4kb_granule() 695 u64 mmfr0; in system_supports_64kb_granule() local 698 mmfr0 = read_sanitised_ftr_reg(SYS_ID_AA64MMFR0_EL1); in system_supports_64kb_granule() 699 val = cpuid_feature_extract_unsigned_field(mmfr0, in system_supports_64kb_granule() 708 u64 mmfr0; in system_supports_16kb_granule() local [all …]
|
H A D | kvm_pgtable.h | 28 static inline u64 kvm_get_parange(u64 mmfr0) in kvm_get_parange() argument 30 u64 parange = cpuid_feature_extract_unsigned_field(mmfr0, in kvm_get_parange() 431 u64 kvm_get_vtcr(u64 mmfr0, u64 mmfr1, u32 phys_shift);
|
/openbmc/linux/arch/arm64/kvm/ |
H A D | reset.c | 313 u64 mmfr0; in kvm_set_ipa_limit() local 315 mmfr0 = read_sanitised_ftr_reg(SYS_ID_AA64MMFR0_EL1); in kvm_set_ipa_limit() 316 parange = cpuid_feature_extract_unsigned_field(mmfr0, in kvm_set_ipa_limit() 331 switch (cpuid_feature_extract_unsigned_field(mmfr0, ID_AA64MMFR0_EL1_TGRAN_2_SHIFT)) { in kvm_set_ipa_limit()
|
H A D | mmu.c | 871 u64 mmfr0, mmfr1; in kvm_init_stage2_mmu() local 893 mmfr0 = read_sanitised_ftr_reg(SYS_ID_AA64MMFR0_EL1); in kvm_init_stage2_mmu() 895 kvm->arch.vtcr = kvm_get_vtcr(mmfr0, mmfr1, phys_shift); in kvm_init_stage2_mmu()
|
/openbmc/linux/arch/arm/kernel/ |
H A D | setup.c | 254 unsigned int mmfr0 = read_cpuid_ext(CPUID_EXT_MMFR0); in __get_cpu_architecture() local 255 if ((mmfr0 & 0x0000000f) >= 0x00000003 || in __get_cpu_architecture() 256 (mmfr0 & 0x000000f0) >= 0x00000030) in __get_cpu_architecture() 258 else if ((mmfr0 & 0x0000000f) == 0x00000002 || in __get_cpu_architecture() 259 (mmfr0 & 0x000000f0) == 0x00000020) in __get_cpu_architecture()
|
/openbmc/linux/arch/arm64/mm/ |
H A D | init.c | 407 u64 mmfr0 = read_cpuid(ID_AA64MMFR0_EL1); in arm64_memblock_init() local 409 mmfr0, ID_AA64MMFR0_EL1_PARANGE_SHIFT); in arm64_memblock_init()
|
/openbmc/linux/arch/arm64/kvm/hyp/ |
H A D | pgtable.c | 624 u64 kvm_get_vtcr(u64 mmfr0, u64 mmfr1, u32 phys_shift) in kvm_get_vtcr() argument 629 vtcr |= kvm_get_parange(mmfr0) << VTCR_EL2_PS_SHIFT; in kvm_get_vtcr()
|
/openbmc/linux/arch/arm64/kernel/ |
H A D | cpufeature.c | 3215 u64 safe_mmfr1, mmfr0, mmfr1; in verify_hyp_capabilities() local 3223 mmfr0 = read_cpuid(ID_AA64MMFR0_EL1); in verify_hyp_capabilities() 3235 parange = cpuid_feature_extract_unsigned_field(mmfr0, in verify_hyp_capabilities()
|