/openbmc/qemu/accel/tcg/ |
H A D | cputlb.c | 135 static inline uintptr_t tlb_index(CPUState *cpu, uintptr_t mmu_idx, in tlb_index() argument 138 uintptr_t size_mask = cpu->neg.tlb.f[mmu_idx].mask >> CPU_TLB_ENTRY_BITS; in tlb_index() 144 static inline CPUTLBEntry *tlb_entry(CPUState *cpu, uintptr_t mmu_idx, in tlb_entry() argument 147 return &cpu->neg.tlb.f[mmu_idx].table[tlb_index(cpu, mmu_idx, addr)]; in tlb_entry() 297 static void tlb_flush_one_mmuidx_locked(CPUState *cpu, int mmu_idx, in tlb_flush_one_mmuidx_locked() argument 300 CPUTLBDesc *desc = &cpu->neg.tlb.d[mmu_idx]; in tlb_flush_one_mmuidx_locked() 301 CPUTLBDescFast *fast = &cpu->neg.tlb.f[mmu_idx]; in tlb_flush_one_mmuidx_locked() 319 static inline void tlb_n_used_entries_inc(CPUState *cpu, uintptr_t mmu_idx) in tlb_n_used_entries_inc() argument 321 cpu->neg.tlb.d[mmu_idx].n_used_entries++; in tlb_n_used_entries_inc() 324 static inline void tlb_n_used_entries_dec(CPUState *cpu, uintptr_t mmu_idx) in tlb_n_used_entries_dec() argument [all …]
|
H A D | ldst_common.c.inc | 252 int mmu_idx, uintptr_t ra) 254 MemOpIdx oi = make_memop_idx(MO_UB, mmu_idx); 259 int mmu_idx, uintptr_t ra) 261 return (int8_t)cpu_ldub_mmuidx_ra(env, addr, mmu_idx, ra); 265 int mmu_idx, uintptr_t ra) 267 MemOpIdx oi = make_memop_idx(MO_BEUW | MO_UNALN, mmu_idx); 272 int mmu_idx, uintptr_t ra) 274 return (int16_t)cpu_lduw_be_mmuidx_ra(env, addr, mmu_idx, ra); 278 int mmu_idx, uintptr_t ra) 280 MemOpIdx oi = make_memop_idx(MO_BEUL | MO_UNALN, mmu_idx); [all …]
|
/openbmc/qemu/target/arm/tcg/ |
H A D | hflags.c | 67 ARMMMUIdx mmu_idx, in rebuild_hflags_common() argument 71 DP_TBFLAG_ANY(flags, MMUIDX, arm_to_core_mmu_idx(mmu_idx)); in rebuild_hflags_common() 81 ARMMMUIdx mmu_idx, in rebuild_hflags_common_32() argument 94 return rebuild_hflags_common(env, fp_el, mmu_idx, flags); in rebuild_hflags_common_32() 98 ARMMMUIdx mmu_idx) in rebuild_hflags_m32() argument 118 !((mmu_idx & ARM_MMU_IDX_M_NEGPRI) && in rebuild_hflags_m32() 127 return rebuild_hflags_common_32(env, fp_el, mmu_idx, flags); in rebuild_hflags_m32() 157 ARMMMUIdx mmu_idx) in rebuild_hflags_a32() argument 201 return rebuild_hflags_common_32(env, fp_el, mmu_idx, flags); in rebuild_hflags_a32() 205 ARMMMUIdx mmu_idx) in rebuild_hflags_a64() argument [all …]
|
H A D | tlb_helper.c | 21 bool arm_s1_regime_using_lpae_format(CPUARMState *env, ARMMMUIdx mmu_idx) in arm_s1_regime_using_lpae_format() argument 23 mmu_idx = stage_1_mmu_idx(mmu_idx); in arm_s1_regime_using_lpae_format() 24 return regime_using_lpae_format(env, mmu_idx); in arm_s1_regime_using_lpae_format() 82 int target_el, int mmu_idx, uint32_t *ret_fsc) in compute_fsr_fsc() argument 84 ARMMMUIdx arm_mmu_idx = core_to_arm_mmu_idx(env, mmu_idx); in compute_fsr_fsc() 173 int mmu_idx, ARMMMUFaultInfo *fi) in arm_deliver_fault() argument 198 fsr = compute_fsr_fsc(env, fi, target_el, mmu_idx, &fsc); in arm_deliver_fault() 243 fsr = compute_fsr_fsc(env, fi, target_el, mmu_idx, &fsc); in arm_deliver_fault() 268 int mmu_idx, uintptr_t retaddr) in arm_cpu_do_unaligned_access() argument 277 arm_deliver_fault(cpu, vaddr, access_type, mmu_idx, &fi); in arm_cpu_do_unaligned_access() [all …]
|
H A D | mte_helper.c | 267 int mmu_idx = arm_env_mmu_index(env); in HELPER() local 272 mem = allocation_tag_mem(env, mmu_idx, ptr, MMU_DATA_LOAD, 1, in HELPER() 320 int mmu_idx = arm_env_mmu_index(env); in do_stg() local 326 mem = allocation_tag_mem(env, mmu_idx, ptr, MMU_DATA_STORE, TAG_GRANULE, in do_stg() 347 int mmu_idx = arm_env_mmu_index(env); in HELPER() local 351 probe_write(env, ptr, TAG_GRANULE, mmu_idx, ra); in HELPER() 357 int mmu_idx = arm_env_mmu_index(env); in do_st2g() local 369 mem1 = allocation_tag_mem(env, mmu_idx, ptr, MMU_DATA_STORE, in do_st2g() 371 mem2 = allocation_tag_mem(env, mmu_idx, ptr + TAG_GRANULE, in do_st2g() 384 mem1 = allocation_tag_mem(env, mmu_idx, ptr, MMU_DATA_STORE, in do_st2g() [all …]
|
H A D | m_helper.c | 169 ARMMMUIdx mmu_idx = ARM_MMU_IDX_M; in arm_v7m_mmu_idx_all() local 172 mmu_idx |= ARM_MMU_IDX_M_PRIV; in arm_v7m_mmu_idx_all() 176 mmu_idx |= ARM_MMU_IDX_M_NEGPRI; in arm_v7m_mmu_idx_all() 180 mmu_idx |= ARM_MMU_IDX_M_S; in arm_v7m_mmu_idx_all() 183 return mmu_idx; in arm_v7m_mmu_idx_all() 214 ARMMMUIdx mmu_idx, StackingMode mode) in v7m_stack_write() argument 221 bool secure = mmu_idx & ARM_MMU_IDX_M_S; in v7m_stack_write() 225 if (get_phys_addr(env, addr, MMU_DATA_STORE, 0, mmu_idx, &res, &fi)) { in v7m_stack_write() 302 ARMMMUIdx mmu_idx) in v7m_stack_read() argument 309 bool secure = mmu_idx & ARM_MMU_IDX_M_S; in v7m_stack_read() [all …]
|
/openbmc/qemu/target/arm/ |
H A D | ptw.c | 88 static int get_S1prot(CPUARMState *env, ARMMMUIdx mmu_idx, bool is_aa64, 147 ARMMMUIdx stage_1_mmu_idx(ARMMMUIdx mmu_idx) in stage_1_mmu_idx() argument 149 switch (mmu_idx) { in stage_1_mmu_idx() 157 return mmu_idx; in stage_1_mmu_idx() 207 static bool regime_translation_big_endian(CPUARMState *env, ARMMMUIdx mmu_idx) in regime_translation_big_endian() argument 209 return (regime_sctlr(env, mmu_idx) & SCTLR_EE) != 0; in regime_translation_big_endian() 213 static uint64_t regime_ttbr(CPUARMState *env, ARMMMUIdx mmu_idx, int ttbrn) in regime_ttbr() argument 215 if (mmu_idx == ARMMMUIdx_Stage2) { in regime_ttbr() 218 if (mmu_idx == ARMMMUIdx_Stage2_S) { in regime_ttbr() 222 return env->cp15.ttbr0_el[regime_el(env, mmu_idx)]; in regime_ttbr() [all …]
|
H A D | internals.h | 806 MMUAccessType access_type, int mmu_idx, 810 static inline int arm_to_core_mmu_idx(ARMMMUIdx mmu_idx) in arm_to_core_mmu_idx() argument 812 return mmu_idx & ARM_MMU_IDX_COREIDX_MASK; in arm_to_core_mmu_idx() 815 static inline ARMMMUIdx core_to_arm_mmu_idx(CPUARMState *env, int mmu_idx) in core_to_arm_mmu_idx() argument 818 return mmu_idx | ARM_MMU_IDX_M; in core_to_arm_mmu_idx() 820 return mmu_idx | ARM_MMU_IDX_A; in core_to_arm_mmu_idx() 824 static inline ARMMMUIdx core_to_aa64_mmu_idx(int mmu_idx) in core_to_aa64_mmu_idx() argument 827 return mmu_idx | ARM_MMU_IDX_A; in core_to_aa64_mmu_idx() 830 int arm_mmu_idx_to_el(ARMMMUIdx mmu_idx); 839 bool arm_s1_regime_using_lpae_format(CPUARMState *env, ARMMMUIdx mmu_idx); [all …]
|
/openbmc/qemu/target/ppc/ |
H A D | mem_helper.c | 58 MMUAccessType access_type, int mmu_idx, in probe_contiguous() argument 67 return probe_access(env, addr, nb, access_type, mmu_idx, raddr); in probe_contiguous() 72 host1 = probe_access(env, addr, nb_pg1, access_type, mmu_idx, raddr); in probe_contiguous() 74 host2 = probe_access(env, addr, nb_pg2, access_type, mmu_idx, raddr); in probe_contiguous() 86 int mmu_idx = ppc_env_mmu_index(env, false); in helper_lmw() local 88 MMU_DATA_LOAD, mmu_idx, raddr); in helper_lmw() 99 env->gpr[reg] = cpu_ldl_mmuidx_ra(env, addr, mmu_idx, raddr); in helper_lmw() 108 int mmu_idx = ppc_env_mmu_index(env, false); in helper_stmw() local 110 MMU_DATA_STORE, mmu_idx, raddr); in helper_stmw() 121 cpu_stl_mmuidx_ra(env, addr, env->gpr[reg], mmu_idx, raddr); in helper_stmw() [all …]
|
H A D | mmu-booke.c | 261 static bool is_epid_mmu(int mmu_idx) in is_epid_mmu() argument 263 return mmu_idx == PPC_TLB_EPID_STORE || mmu_idx == PPC_TLB_EPID_LOAD; in is_epid_mmu() 266 static uint32_t mmubooke206_esr(int mmu_idx, MMUAccessType access_type) in mmubooke206_esr() argument 272 if (is_epid_mmu(mmu_idx)) { in mmubooke206_esr() 287 int mmu_idx, uint32_t *epid_out, in mmubooke206_get_as() argument 290 if (is_epid_mmu(mmu_idx)) { in mmubooke206_get_as() 292 if (mmu_idx == PPC_TLB_EPID_STORE) { in mmubooke206_get_as() 312 MMUAccessType access_type, int mmu_idx) in mmubooke206_check_tlb() argument 316 bool use_epid = mmubooke206_get_as(env, mmu_idx, &epid, &as, &pr); in mmubooke206_check_tlb() 393 int mmu_idx) in mmubooke206_get_physical_address() argument [all …]
|
H A D | mmu-radix64.c | 235 int mmu_idx, bool partition_scoped) in ppc_radix64_check_prot() argument 253 } else if (mmuidx_pr(mmu_idx) || (pte & R_PTE_EAA_PRIV) || in ppc_radix64_check_prot() 431 int mmu_idx, uint64_t lpid, in ppc_radix64_partition_scoped_xlate() argument 450 eaddr, mmu_idx, g_raddr); in ppc_radix64_partition_scoped_xlate() 458 &fault_cause, h_prot, mmu_idx, true)) { in ppc_radix64_partition_scoped_xlate() 516 int mmu_idx, uint64_t lpid, in ppc_radix64_process_scoped_xlate() argument 529 eaddr, mmu_idx, pid); in ppc_radix64_process_scoped_xlate() 642 g_prot, mmu_idx, false)) { in ppc_radix64_process_scoped_xlate() 680 int *psizep, int *protp, int mmu_idx, in ppc_radix64_xlate_impl() argument 690 assert(!(mmuidx_hv(mmu_idx) && cpu->vhyp)); in ppc_radix64_xlate_impl() [all …]
|
H A D | mmu-hash32.c | 40 static target_ulong hash32_bat_size(int mmu_idx, in hash32_bat_size() argument 43 if ((mmuidx_pr(mmu_idx) && !(batu & BATU32_VP)) in hash32_bat_size() 44 || (!mmuidx_pr(mmu_idx) && !(batu & BATU32_VS))) { in hash32_bat_size() 53 int mmu_idx) in ppc_hash32_bat_lookup() argument 74 mask = hash32_bat_size(mmu_idx, batu, batl); in ppc_hash32_bat_lookup() 116 hwaddr *raddr, int *prot, int mmu_idx, in ppc_hash32_direct_store() argument 181 if (ppc_hash32_key(mmuidx_pr(mmu_idx), sr)) { in ppc_hash32_direct_store() 295 hwaddr *raddrp, int *psizep, int *protp, int mmu_idx, in ppc_hash32_xlate() argument 310 if (mmuidx_real(mmu_idx)) { in ppc_hash32_xlate() 319 raddr = ppc_hash32_bat_lookup(cpu, eaddr, access_type, protp, mmu_idx); in ppc_hash32_xlate() [all …]
|
/openbmc/qemu/target/mips/sysemu/ |
H A D | physaddr.c | 25 static int is_seg_am_mapped(unsigned int am, bool eu, int mmu_idx) in is_seg_am_mapped() argument 42 switch (mmu_idx) { in is_seg_am_mapped() 80 MMUAccessType access_type, int mmu_idx, in get_seg_physical_address() argument 85 int mapped = is_seg_am_mapped(am, eu, mmu_idx); in get_seg_physical_address() 104 MMUAccessType access_type, int mmu_idx, in get_segctl_physical_address() argument 112 access_type, mmu_idx, am, eu, segmask, in get_segctl_physical_address() 118 MMUAccessType access_type, int mmu_idx) in get_physical_address() argument 122 int user_mode = mmu_idx == MIPS_HFLAG_UM; in get_physical_address() 123 int supervisor_mode = mmu_idx == MIPS_HFLAG_SM; in get_physical_address() 144 mmu_idx, segctl, 0x3FFFFFFF); in get_physical_address() [all …]
|
/openbmc/qemu/target/i386/tcg/ |
H A D | access.c | 13 MMUAccessType type, int mmu_idx, uintptr_t ra) in access_prepare_mmu() argument 27 ret->mmu_idx = mmu_idx; in access_prepare_mmu() 31 haddr1 = probe_access(env, vaddr, size1, type, mmu_idx, ra); in access_prepare_mmu() 35 haddr2 = probe_access(env, vaddr + size1, size2, type, mmu_idx, ra); in access_prepare_mmu() 51 int mmu_idx = cpu_mmu_index(env_cpu(env), false); in access_prepare() local 52 access_prepare_mmu(ret, env, vaddr, size, type, mmu_idx, ra); in access_prepare() 93 return cpu_ldub_mmuidx_ra(ac->env, addr, ac->mmu_idx, ac->ra); in access_ldb() 103 return cpu_lduw_le_mmuidx_ra(ac->env, addr, ac->mmu_idx, ac->ra); in access_ldw() 113 return cpu_ldl_le_mmuidx_ra(ac->env, addr, ac->mmu_idx, ac->ra); in access_ldl() 123 return cpu_ldq_le_mmuidx_ra(ac->env, addr, ac->mmu_idx, ac->ra); in access_ldq() [all …]
|
/openbmc/qemu/target/sparc/ |
H A D | mmu_helper.c | 70 int rw, int mmu_idx) in get_physical_address() argument 80 is_user = mmu_idx == MMU_USER_IDX; in get_physical_address() 82 if (mmu_idx == MMU_PHYS_IDX) { in get_physical_address() 207 MMUAccessType access_type, int mmu_idx, in sparc_cpu_tlb_fill() argument 225 address, access_type, mmu_idx); in sparc_cpu_tlb_fill() 232 tlb_set_page_full(cs, mmu_idx, vaddr, &full); in sparc_cpu_tlb_fill() 248 tlb_set_page_full(cs, mmu_idx, vaddr, &full); in sparc_cpu_tlb_fill() 502 static uint64_t build_sfsr(CPUSPARCState *env, int mmu_idx, int rw) in build_sfsr() argument 506 switch (mmu_idx) { in build_sfsr() 545 target_ulong address, int rw, int mmu_idx) in get_physical_address_data() argument [all …]
|
H A D | trace-events | 4 …uint64_t address, uint64_t context, int mmu_idx, uint32_t tl) "DFAULT at 0x%"PRIx64" context 0x%"P… 5 …(uint64_t address, uint64_t context, int mmu_idx, uint32_t tl) "DPROT at 0x%"PRIx64" context 0x%"P… 9 …_phys_addr_code(uint32_t tl, int mmu_idx, uint64_t prim_context, uint64_t sec_context, uint64_t ad… 10 …_phys_addr_data(uint32_t tl, int mmu_idx, uint64_t prim_context, uint64_t sec_context, uint64_t ad… 11 …nt64_t paddr, int mmu_idx, uint32_t tl, uint64_t prim_context, uint64_t sec_context) "Translate at…
|
/openbmc/qemu/target/riscv/ |
H A D | op_helper.c | 158 int mmu_idx = riscv_env_mmu_index(env, false); in helper_cbo_zero() local 171 mem = probe_write(env, address, cbozlen, mmu_idx, ra); in helper_cbo_zero() 188 cpu_stb_mmuidx_ra(env, address + i, 0, mmu_idx, ra); in helper_cbo_zero() 206 int mmu_idx = riscv_env_mmu_index(env, false); in check_zicbom_access() local 227 mmu_idx, true, &phost, ra); in check_zicbom_access() 239 probe_write(env, address, cbomlen, mmu_idx, ra); in check_zicbom_access() 472 int mmu_idx = check_access_hlsv(env, false, ra); in helper_hyp_hlv_bu() local 473 MemOpIdx oi = make_memop_idx(MO_UB, mmu_idx); in helper_hyp_hlv_bu() 481 int mmu_idx = check_access_hlsv(env, false, ra); in helper_hyp_hlv_hu() local 482 MemOpIdx oi = make_memop_idx(MO_TEUW, mmu_idx); in helper_hyp_hlv_hu() [all …]
|
H A D | internals.h | 43 static inline int mmuidx_priv(int mmu_idx) in mmuidx_priv() argument 45 int ret = mmu_idx & 3; in mmuidx_priv() 52 static inline bool mmuidx_sum(int mmu_idx) in mmuidx_sum() argument 54 return (mmu_idx & 3) == MMUIdx_S_SUM; in mmuidx_sum() 57 static inline bool mmuidx_2stage(int mmu_idx) in mmuidx_2stage() argument 59 return mmu_idx & MMU_2STAGE_BIT; in mmuidx_2stage()
|
/openbmc/qemu/target/i386/tcg/sysemu/ |
H A D | svm_helper.c | 30 static void svm_save_seg(CPUX86State *env, int mmu_idx, hwaddr addr, in svm_save_seg() argument 34 sc->selector, mmu_idx, 0); in svm_save_seg() 36 sc->base, mmu_idx, 0); in svm_save_seg() 38 sc->limit, mmu_idx, 0); in svm_save_seg() 42 mmu_idx, 0); in svm_save_seg() 55 static void svm_load_seg(CPUX86State *env, int mmu_idx, hwaddr addr, in svm_load_seg() argument 62 mmu_idx, 0); in svm_load_seg() 65 mmu_idx, 0); in svm_load_seg() 68 mmu_idx, 0); in svm_load_seg() 71 mmu_idx, 0); in svm_load_seg() [all …]
|
H A D | excp_helper.c | 31 int mmu_idx; member 145 const bool is_user = is_mmu_index_user(in->mmu_idx); in mmu_translate() 376 if (!is_mmu_index_smap(in->mmu_idx) || !(ptep & PG_USER_MASK)) { in mmu_translate() 544 MMUAccessType access_type, int mmu_idx, in get_physical_address() argument 554 switch (mmu_idx) { in get_physical_address() 562 in.mmu_idx = in get_physical_address() 575 if (is_mmu_index_32(mmu_idx)) { in get_physical_address() 581 in.mmu_idx = mmu_idx; in get_physical_address() 610 MMUAccessType access_type, int mmu_idx, in x86_cpu_tlb_fill() argument 617 if (get_physical_address(env, addr, access_type, mmu_idx, &out, &err, in x86_cpu_tlb_fill() [all …]
|
/openbmc/qemu/include/exec/ |
H A D | cpu_ldst.h | 169 int mmu_idx, uintptr_t ra); 171 int mmu_idx, uintptr_t ra); 173 int mmu_idx, uintptr_t ra); 175 int mmu_idx, uintptr_t ra); 177 int mmu_idx, uintptr_t ra); 179 int mmu_idx, uintptr_t ra); 181 int mmu_idx, uintptr_t ra); 183 int mmu_idx, uintptr_t ra); 185 int mmu_idx, uintptr_t ra); 187 int mmu_idx, uintptr_t ra); [all …]
|
H A D | exec-all.h | 207 void tlb_set_page_full(CPUState *cpu, int mmu_idx, vaddr addr, 234 int prot, int mmu_idx, vaddr size); 243 int mmu_idx, vaddr size); 325 MMUAccessType access_type, int mmu_idx, uintptr_t retaddr); 328 int mmu_idx, uintptr_t retaddr) in probe_write() argument 330 return probe_access(env, addr, size, MMU_DATA_STORE, mmu_idx, retaddr); in probe_write() 334 int mmu_idx, uintptr_t retaddr) in probe_read() argument 336 return probe_access(env, addr, size, MMU_DATA_LOAD, mmu_idx, retaddr); in probe_read() 359 MMUAccessType access_type, int mmu_idx, 380 MMUAccessType access_type, int mmu_idx, [all …]
|
/openbmc/qemu/target/microblaze/ |
H A D | helper.c | 40 MMUAccessType access_type, int mmu_idx, in mb_cpu_tlb_fill() argument 52 if (mmu_idx == MMU_NOMMU_IDX) { in mb_cpu_tlb_fill() 56 tlb_set_page_with_attrs(cs, address, address, attrs, prot, mmu_idx, in mb_cpu_tlb_fill() 61 hit = mmu_translate(cpu, &lu, address, access_type, mmu_idx); in mb_cpu_tlb_fill() 67 mmu_idx, vaddr, paddr, lu.prot); in mb_cpu_tlb_fill() 68 tlb_set_page_with_attrs(cs, vaddr, paddr, attrs, lu.prot, mmu_idx, in mb_cpu_tlb_fill() 79 mmu_idx, address); in mb_cpu_tlb_fill() 234 int mmu_idx = cpu_mmu_index(cs, false); in mb_cpu_get_phys_page_attrs_debug() local 241 if (mmu_idx != MMU_NOMMU_IDX) { in mb_cpu_get_phys_page_attrs_debug() 273 int mmu_idx, uintptr_t retaddr) in mb_cpu_do_unaligned_access() argument
|
/openbmc/qemu/target/loongarch/ |
H A D | cpu_helper.c | 17 int access_type, int index, int mmu_idx) in loongarch_map_tlb_entry() argument 20 uint64_t plv = mmu_idx; in loongarch_map_tlb_entry() 146 MMUAccessType access_type, int mmu_idx) in loongarch_map_address() argument 153 address, access_type, index, mmu_idx); in loongarch_map_address() 161 MMUAccessType access_type, int mmu_idx) in loongarch_map_address() argument 181 MMUAccessType access_type, int mmu_idx) in get_physical_address() argument 183 int user_mode = mmu_idx == MMU_USER_IDX; in get_physical_address() 184 int kernel_mode = mmu_idx == MMU_KERNEL_IDX; in get_physical_address() 225 access_type, mmu_idx); in get_physical_address()
|
/openbmc/qemu/target/hppa/ |
H A D | mem_helper.c | 199 int hppa_get_physical_address(CPUHPPAState *env, vaddr addr, int mmu_idx, in hppa_get_physical_address() argument 208 if (MMU_IDX_MMU_DISABLED(mmu_idx)) { in hppa_get_physical_address() 209 switch (mmu_idx) { in hppa_get_physical_address() 240 priv = MMU_IDX_TO_PRIV(mmu_idx); in hppa_get_physical_address() 277 if (ent->access_id && MMU_IDX_TO_P(mmu_idx)) { in hppa_get_physical_address() 342 int prot, excp, mmu_idx; in hppa_cpu_get_phys_page_debug() local 347 mmu_idx = (cpu->env.psw & PSW_D ? MMU_KERNEL_IDX : in hppa_cpu_get_phys_page_debug() 350 excp = hppa_get_physical_address(&cpu->env, addr, mmu_idx, 0, 0, in hppa_cpu_get_phys_page_debug() 411 int mmu_idx, MemTxAttrs attrs, in hppa_cpu_do_transaction_failed() argument 423 MMU_IDX_MMU_DISABLED(mmu_idx)); in hppa_cpu_do_transaction_failed() [all …]
|