1 /*
2 * Helpers for TLBI insns
3 *
4 * This code is licensed under the GNU GPL v2 or later.
5 *
6 * SPDX-License-Identifier: GPL-2.0-or-later
7 */
8 #include "qemu/osdep.h"
9 #include "qemu/log.h"
10 #include "exec/cputlb.h"
11 #include "cpu.h"
12 #include "internals.h"
13 #include "cpu-features.h"
14 #include "cpregs.h"
15
16 /* Check for traps from EL1 due to HCR_EL2.TTLB. */
access_ttlb(CPUARMState * env,const ARMCPRegInfo * ri,bool isread)17 static CPAccessResult access_ttlb(CPUARMState *env, const ARMCPRegInfo *ri,
18 bool isread)
19 {
20 if (arm_current_el(env) == 1 && (arm_hcr_el2_eff(env) & HCR_TTLB)) {
21 return CP_ACCESS_TRAP_EL2;
22 }
23 return CP_ACCESS_OK;
24 }
25
26 /* Check for traps from EL1 due to HCR_EL2.TTLB or TTLBIS. */
access_ttlbis(CPUARMState * env,const ARMCPRegInfo * ri,bool isread)27 static CPAccessResult access_ttlbis(CPUARMState *env, const ARMCPRegInfo *ri,
28 bool isread)
29 {
30 if (arm_current_el(env) == 1 &&
31 (arm_hcr_el2_eff(env) & (HCR_TTLB | HCR_TTLBIS))) {
32 return CP_ACCESS_TRAP_EL2;
33 }
34 return CP_ACCESS_OK;
35 }
36
37 #ifdef TARGET_AARCH64
38 /* Check for traps from EL1 due to HCR_EL2.TTLB or TTLBOS. */
access_ttlbos(CPUARMState * env,const ARMCPRegInfo * ri,bool isread)39 static CPAccessResult access_ttlbos(CPUARMState *env, const ARMCPRegInfo *ri,
40 bool isread)
41 {
42 if (arm_current_el(env) == 1 &&
43 (arm_hcr_el2_eff(env) & (HCR_TTLB | HCR_TTLBOS))) {
44 return CP_ACCESS_TRAP_EL2;
45 }
46 return CP_ACCESS_OK;
47 }
48 #endif
49
50 /* IS variants of TLB operations must affect all cores */
tlbiall_is_write(CPUARMState * env,const ARMCPRegInfo * ri,uint64_t value)51 static void tlbiall_is_write(CPUARMState *env, const ARMCPRegInfo *ri,
52 uint64_t value)
53 {
54 CPUState *cs = env_cpu(env);
55
56 tlb_flush_all_cpus_synced(cs);
57 }
58
tlbiasid_is_write(CPUARMState * env,const ARMCPRegInfo * ri,uint64_t value)59 static void tlbiasid_is_write(CPUARMState *env, const ARMCPRegInfo *ri,
60 uint64_t value)
61 {
62 CPUState *cs = env_cpu(env);
63
64 tlb_flush_all_cpus_synced(cs);
65 }
66
tlbimva_is_write(CPUARMState * env,const ARMCPRegInfo * ri,uint64_t value)67 static void tlbimva_is_write(CPUARMState *env, const ARMCPRegInfo *ri,
68 uint64_t value)
69 {
70 CPUState *cs = env_cpu(env);
71
72 tlb_flush_page_all_cpus_synced(cs, value & TARGET_PAGE_MASK);
73 }
74
tlbimvaa_is_write(CPUARMState * env,const ARMCPRegInfo * ri,uint64_t value)75 static void tlbimvaa_is_write(CPUARMState *env, const ARMCPRegInfo *ri,
76 uint64_t value)
77 {
78 CPUState *cs = env_cpu(env);
79
80 tlb_flush_page_all_cpus_synced(cs, value & TARGET_PAGE_MASK);
81 }
82
83 /*
84 * Non-IS variants of TLB operations are upgraded to
85 * IS versions if we are at EL1 and HCR_EL2.FB is effectively set to
86 * force broadcast of these operations.
87 */
tlb_force_broadcast(CPUARMState * env)88 static bool tlb_force_broadcast(CPUARMState *env)
89 {
90 return arm_current_el(env) == 1 && (arm_hcr_el2_eff(env) & HCR_FB);
91 }
92
tlbiall_write(CPUARMState * env,const ARMCPRegInfo * ri,uint64_t value)93 static void tlbiall_write(CPUARMState *env, const ARMCPRegInfo *ri,
94 uint64_t value)
95 {
96 /* Invalidate all (TLBIALL) */
97 CPUState *cs = env_cpu(env);
98
99 if (tlb_force_broadcast(env)) {
100 tlb_flush_all_cpus_synced(cs);
101 } else {
102 tlb_flush(cs);
103 }
104 }
105
tlbimva_write(CPUARMState * env,const ARMCPRegInfo * ri,uint64_t value)106 static void tlbimva_write(CPUARMState *env, const ARMCPRegInfo *ri,
107 uint64_t value)
108 {
109 /* Invalidate single TLB entry by MVA and ASID (TLBIMVA) */
110 CPUState *cs = env_cpu(env);
111
112 value &= TARGET_PAGE_MASK;
113 if (tlb_force_broadcast(env)) {
114 tlb_flush_page_all_cpus_synced(cs, value);
115 } else {
116 tlb_flush_page(cs, value);
117 }
118 }
119
tlbiasid_write(CPUARMState * env,const ARMCPRegInfo * ri,uint64_t value)120 static void tlbiasid_write(CPUARMState *env, const ARMCPRegInfo *ri,
121 uint64_t value)
122 {
123 /* Invalidate by ASID (TLBIASID) */
124 CPUState *cs = env_cpu(env);
125
126 if (tlb_force_broadcast(env)) {
127 tlb_flush_all_cpus_synced(cs);
128 } else {
129 tlb_flush(cs);
130 }
131 }
132
tlbimvaa_write(CPUARMState * env,const ARMCPRegInfo * ri,uint64_t value)133 static void tlbimvaa_write(CPUARMState *env, const ARMCPRegInfo *ri,
134 uint64_t value)
135 {
136 /* Invalidate single entry by MVA, all ASIDs (TLBIMVAA) */
137 CPUState *cs = env_cpu(env);
138
139 value &= TARGET_PAGE_MASK;
140 if (tlb_force_broadcast(env)) {
141 tlb_flush_page_all_cpus_synced(cs, value);
142 } else {
143 tlb_flush_page(cs, value);
144 }
145 }
146
tlbimva_hyp_write(CPUARMState * env,const ARMCPRegInfo * ri,uint64_t value)147 static void tlbimva_hyp_write(CPUARMState *env, const ARMCPRegInfo *ri,
148 uint64_t value)
149 {
150 CPUState *cs = env_cpu(env);
151 uint64_t pageaddr = value & ~MAKE_64BIT_MASK(0, 12);
152
153 tlb_flush_page_by_mmuidx(cs, pageaddr, ARMMMUIdxBit_E2);
154 }
155
tlbimva_hyp_is_write(CPUARMState * env,const ARMCPRegInfo * ri,uint64_t value)156 static void tlbimva_hyp_is_write(CPUARMState *env, const ARMCPRegInfo *ri,
157 uint64_t value)
158 {
159 CPUState *cs = env_cpu(env);
160 uint64_t pageaddr = value & ~MAKE_64BIT_MASK(0, 12);
161
162 tlb_flush_page_by_mmuidx_all_cpus_synced(cs, pageaddr,
163 ARMMMUIdxBit_E2);
164 }
165
tlbiipas2_hyp_write(CPUARMState * env,const ARMCPRegInfo * ri,uint64_t value)166 static void tlbiipas2_hyp_write(CPUARMState *env, const ARMCPRegInfo *ri,
167 uint64_t value)
168 {
169 CPUState *cs = env_cpu(env);
170 uint64_t pageaddr = (value & MAKE_64BIT_MASK(0, 28)) << 12;
171
172 tlb_flush_page_by_mmuidx(cs, pageaddr, ARMMMUIdxBit_Stage2);
173 }
174
tlbiipas2is_hyp_write(CPUARMState * env,const ARMCPRegInfo * ri,uint64_t value)175 static void tlbiipas2is_hyp_write(CPUARMState *env, const ARMCPRegInfo *ri,
176 uint64_t value)
177 {
178 CPUState *cs = env_cpu(env);
179 uint64_t pageaddr = (value & MAKE_64BIT_MASK(0, 28)) << 12;
180
181 tlb_flush_page_by_mmuidx_all_cpus_synced(cs, pageaddr, ARMMMUIdxBit_Stage2);
182 }
183
tlbiall_nsnh_write(CPUARMState * env,const ARMCPRegInfo * ri,uint64_t value)184 static void tlbiall_nsnh_write(CPUARMState *env, const ARMCPRegInfo *ri,
185 uint64_t value)
186 {
187 CPUState *cs = env_cpu(env);
188
189 tlb_flush_by_mmuidx(cs, alle1_tlbmask(env));
190 }
191
tlbiall_nsnh_is_write(CPUARMState * env,const ARMCPRegInfo * ri,uint64_t value)192 static void tlbiall_nsnh_is_write(CPUARMState *env, const ARMCPRegInfo *ri,
193 uint64_t value)
194 {
195 CPUState *cs = env_cpu(env);
196
197 tlb_flush_by_mmuidx_all_cpus_synced(cs, alle1_tlbmask(env));
198 }
199
200
tlbiall_hyp_write(CPUARMState * env,const ARMCPRegInfo * ri,uint64_t value)201 static void tlbiall_hyp_write(CPUARMState *env, const ARMCPRegInfo *ri,
202 uint64_t value)
203 {
204 CPUState *cs = env_cpu(env);
205
206 tlb_flush_by_mmuidx(cs, ARMMMUIdxBit_E2);
207 }
208
tlbiall_hyp_is_write(CPUARMState * env,const ARMCPRegInfo * ri,uint64_t value)209 static void tlbiall_hyp_is_write(CPUARMState *env, const ARMCPRegInfo *ri,
210 uint64_t value)
211 {
212 CPUState *cs = env_cpu(env);
213
214 tlb_flush_by_mmuidx_all_cpus_synced(cs, ARMMMUIdxBit_E2);
215 }
216
217 /*
218 * See: D4.7.2 TLB maintenance requirements and the TLB maintenance instructions
219 * Page D4-1736 (DDI0487A.b)
220 */
221
vae1_tlbmask(CPUARMState * env)222 static int vae1_tlbmask(CPUARMState *env)
223 {
224 uint64_t hcr = arm_hcr_el2_eff(env);
225 uint16_t mask;
226
227 assert(arm_feature(env, ARM_FEATURE_AARCH64));
228
229 if ((hcr & (HCR_E2H | HCR_TGE)) == (HCR_E2H | HCR_TGE)) {
230 mask = ARMMMUIdxBit_E20_2 |
231 ARMMMUIdxBit_E20_2_PAN |
232 ARMMMUIdxBit_E20_0;
233 } else {
234 /* This is AArch64 only, so we don't need to touch the EL30_x TLBs */
235 mask = ARMMMUIdxBit_E10_1 |
236 ARMMMUIdxBit_E10_1_PAN |
237 ARMMMUIdxBit_E10_0;
238 }
239 return mask;
240 }
241
vae2_tlbmask(CPUARMState * env)242 static int vae2_tlbmask(CPUARMState *env)
243 {
244 uint64_t hcr = arm_hcr_el2_eff(env);
245 uint16_t mask;
246
247 if (hcr & HCR_E2H) {
248 mask = ARMMMUIdxBit_E20_2 |
249 ARMMMUIdxBit_E20_2_PAN |
250 ARMMMUIdxBit_E20_0;
251 } else {
252 mask = ARMMMUIdxBit_E2;
253 }
254 return mask;
255 }
256
257 /* Return 56 if TBI is enabled, 64 otherwise. */
tlbbits_for_regime(CPUARMState * env,ARMMMUIdx mmu_idx,uint64_t addr)258 static int tlbbits_for_regime(CPUARMState *env, ARMMMUIdx mmu_idx,
259 uint64_t addr)
260 {
261 uint64_t tcr = regime_tcr(env, mmu_idx);
262 int tbi = aa64_va_parameter_tbi(tcr, mmu_idx);
263 int select = extract64(addr, 55, 1);
264
265 return (tbi >> select) & 1 ? 56 : 64;
266 }
267
vae1_tlbbits(CPUARMState * env,uint64_t addr)268 static int vae1_tlbbits(CPUARMState *env, uint64_t addr)
269 {
270 uint64_t hcr = arm_hcr_el2_eff(env);
271 ARMMMUIdx mmu_idx;
272
273 assert(arm_feature(env, ARM_FEATURE_AARCH64));
274
275 /* Only the regime of the mmu_idx below is significant. */
276 if ((hcr & (HCR_E2H | HCR_TGE)) == (HCR_E2H | HCR_TGE)) {
277 mmu_idx = ARMMMUIdx_E20_0;
278 } else {
279 mmu_idx = ARMMMUIdx_E10_0;
280 }
281
282 return tlbbits_for_regime(env, mmu_idx, addr);
283 }
284
vae2_tlbbits(CPUARMState * env,uint64_t addr)285 static int vae2_tlbbits(CPUARMState *env, uint64_t addr)
286 {
287 uint64_t hcr = arm_hcr_el2_eff(env);
288 ARMMMUIdx mmu_idx;
289
290 /*
291 * Only the regime of the mmu_idx below is significant.
292 * Regime EL2&0 has two ranges with separate TBI configuration, while EL2
293 * only has one.
294 */
295 if (hcr & HCR_E2H) {
296 mmu_idx = ARMMMUIdx_E20_2;
297 } else {
298 mmu_idx = ARMMMUIdx_E2;
299 }
300
301 return tlbbits_for_regime(env, mmu_idx, addr);
302 }
303
tlbi_aa64_vmalle1is_write(CPUARMState * env,const ARMCPRegInfo * ri,uint64_t value)304 static void tlbi_aa64_vmalle1is_write(CPUARMState *env, const ARMCPRegInfo *ri,
305 uint64_t value)
306 {
307 CPUState *cs = env_cpu(env);
308 int mask = vae1_tlbmask(env);
309
310 tlb_flush_by_mmuidx_all_cpus_synced(cs, mask);
311 }
312
tlbi_aa64_vmalle1_write(CPUARMState * env,const ARMCPRegInfo * ri,uint64_t value)313 static void tlbi_aa64_vmalle1_write(CPUARMState *env, const ARMCPRegInfo *ri,
314 uint64_t value)
315 {
316 CPUState *cs = env_cpu(env);
317 int mask = vae1_tlbmask(env);
318
319 if (tlb_force_broadcast(env)) {
320 tlb_flush_by_mmuidx_all_cpus_synced(cs, mask);
321 } else {
322 tlb_flush_by_mmuidx(cs, mask);
323 }
324 }
325
e2_tlbmask(CPUARMState * env)326 static int e2_tlbmask(CPUARMState *env)
327 {
328 return (ARMMMUIdxBit_E20_0 |
329 ARMMMUIdxBit_E20_2 |
330 ARMMMUIdxBit_E20_2_PAN |
331 ARMMMUIdxBit_E2);
332 }
333
tlbi_aa64_alle1_write(CPUARMState * env,const ARMCPRegInfo * ri,uint64_t value)334 static void tlbi_aa64_alle1_write(CPUARMState *env, const ARMCPRegInfo *ri,
335 uint64_t value)
336 {
337 CPUState *cs = env_cpu(env);
338 int mask = alle1_tlbmask(env);
339
340 tlb_flush_by_mmuidx(cs, mask);
341 }
342
tlbi_aa64_alle2_write(CPUARMState * env,const ARMCPRegInfo * ri,uint64_t value)343 static void tlbi_aa64_alle2_write(CPUARMState *env, const ARMCPRegInfo *ri,
344 uint64_t value)
345 {
346 CPUState *cs = env_cpu(env);
347 int mask = e2_tlbmask(env);
348
349 tlb_flush_by_mmuidx(cs, mask);
350 }
351
tlbi_aa64_alle3_write(CPUARMState * env,const ARMCPRegInfo * ri,uint64_t value)352 static void tlbi_aa64_alle3_write(CPUARMState *env, const ARMCPRegInfo *ri,
353 uint64_t value)
354 {
355 ARMCPU *cpu = env_archcpu(env);
356 CPUState *cs = CPU(cpu);
357
358 tlb_flush_by_mmuidx(cs, ARMMMUIdxBit_E3);
359 }
360
tlbi_aa64_alle1is_write(CPUARMState * env,const ARMCPRegInfo * ri,uint64_t value)361 static void tlbi_aa64_alle1is_write(CPUARMState *env, const ARMCPRegInfo *ri,
362 uint64_t value)
363 {
364 CPUState *cs = env_cpu(env);
365 int mask = alle1_tlbmask(env);
366
367 tlb_flush_by_mmuidx_all_cpus_synced(cs, mask);
368 }
369
tlbi_aa64_alle2is_write(CPUARMState * env,const ARMCPRegInfo * ri,uint64_t value)370 static void tlbi_aa64_alle2is_write(CPUARMState *env, const ARMCPRegInfo *ri,
371 uint64_t value)
372 {
373 CPUState *cs = env_cpu(env);
374 int mask = e2_tlbmask(env);
375
376 tlb_flush_by_mmuidx_all_cpus_synced(cs, mask);
377 }
378
tlbi_aa64_alle3is_write(CPUARMState * env,const ARMCPRegInfo * ri,uint64_t value)379 static void tlbi_aa64_alle3is_write(CPUARMState *env, const ARMCPRegInfo *ri,
380 uint64_t value)
381 {
382 CPUState *cs = env_cpu(env);
383
384 tlb_flush_by_mmuidx_all_cpus_synced(cs, ARMMMUIdxBit_E3);
385 }
386
tlbi_aa64_vae2_write(CPUARMState * env,const ARMCPRegInfo * ri,uint64_t value)387 static void tlbi_aa64_vae2_write(CPUARMState *env, const ARMCPRegInfo *ri,
388 uint64_t value)
389 {
390 /*
391 * Invalidate by VA, EL2
392 * Currently handles both VAE2 and VALE2, since we don't support
393 * flush-last-level-only.
394 */
395 CPUState *cs = env_cpu(env);
396 int mask = vae2_tlbmask(env);
397 uint64_t pageaddr = sextract64(value << 12, 0, 56);
398 int bits = vae2_tlbbits(env, pageaddr);
399
400 tlb_flush_page_bits_by_mmuidx(cs, pageaddr, mask, bits);
401 }
402
tlbi_aa64_vae3_write(CPUARMState * env,const ARMCPRegInfo * ri,uint64_t value)403 static void tlbi_aa64_vae3_write(CPUARMState *env, const ARMCPRegInfo *ri,
404 uint64_t value)
405 {
406 /*
407 * Invalidate by VA, EL3
408 * Currently handles both VAE3 and VALE3, since we don't support
409 * flush-last-level-only.
410 */
411 ARMCPU *cpu = env_archcpu(env);
412 CPUState *cs = CPU(cpu);
413 uint64_t pageaddr = sextract64(value << 12, 0, 56);
414
415 tlb_flush_page_by_mmuidx(cs, pageaddr, ARMMMUIdxBit_E3);
416 }
417
tlbi_aa64_vae1is_write(CPUARMState * env,const ARMCPRegInfo * ri,uint64_t value)418 static void tlbi_aa64_vae1is_write(CPUARMState *env, const ARMCPRegInfo *ri,
419 uint64_t value)
420 {
421 CPUState *cs = env_cpu(env);
422 int mask = vae1_tlbmask(env);
423 uint64_t pageaddr = sextract64(value << 12, 0, 56);
424 int bits = vae1_tlbbits(env, pageaddr);
425
426 tlb_flush_page_bits_by_mmuidx_all_cpus_synced(cs, pageaddr, mask, bits);
427 }
428
tlbi_aa64_vae1_write(CPUARMState * env,const ARMCPRegInfo * ri,uint64_t value)429 static void tlbi_aa64_vae1_write(CPUARMState *env, const ARMCPRegInfo *ri,
430 uint64_t value)
431 {
432 /*
433 * Invalidate by VA, EL1&0 (AArch64 version).
434 * Currently handles all of VAE1, VAAE1, VAALE1 and VALE1,
435 * since we don't support flush-for-specific-ASID-only or
436 * flush-last-level-only.
437 */
438 CPUState *cs = env_cpu(env);
439 int mask = vae1_tlbmask(env);
440 uint64_t pageaddr = sextract64(value << 12, 0, 56);
441 int bits = vae1_tlbbits(env, pageaddr);
442
443 if (tlb_force_broadcast(env)) {
444 tlb_flush_page_bits_by_mmuidx_all_cpus_synced(cs, pageaddr, mask, bits);
445 } else {
446 tlb_flush_page_bits_by_mmuidx(cs, pageaddr, mask, bits);
447 }
448 }
449
tlbi_aa64_vae2is_write(CPUARMState * env,const ARMCPRegInfo * ri,uint64_t value)450 static void tlbi_aa64_vae2is_write(CPUARMState *env, const ARMCPRegInfo *ri,
451 uint64_t value)
452 {
453 CPUState *cs = env_cpu(env);
454 int mask = vae2_tlbmask(env);
455 uint64_t pageaddr = sextract64(value << 12, 0, 56);
456 int bits = vae2_tlbbits(env, pageaddr);
457
458 tlb_flush_page_bits_by_mmuidx_all_cpus_synced(cs, pageaddr, mask, bits);
459 }
460
tlbi_aa64_vae3is_write(CPUARMState * env,const ARMCPRegInfo * ri,uint64_t value)461 static void tlbi_aa64_vae3is_write(CPUARMState *env, const ARMCPRegInfo *ri,
462 uint64_t value)
463 {
464 CPUState *cs = env_cpu(env);
465 uint64_t pageaddr = sextract64(value << 12, 0, 56);
466 int bits = tlbbits_for_regime(env, ARMMMUIdx_E3, pageaddr);
467
468 tlb_flush_page_bits_by_mmuidx_all_cpus_synced(cs, pageaddr,
469 ARMMMUIdxBit_E3, bits);
470 }
471
ipas2e1_tlbmask(CPUARMState * env,int64_t value)472 static int ipas2e1_tlbmask(CPUARMState *env, int64_t value)
473 {
474 /*
475 * The MSB of value is the NS field, which only applies if SEL2
476 * is implemented and SCR_EL3.NS is not set (i.e. in secure mode).
477 */
478 return (value >= 0
479 && cpu_isar_feature(aa64_sel2, env_archcpu(env))
480 && arm_is_secure_below_el3(env)
481 ? ARMMMUIdxBit_Stage2_S
482 : ARMMMUIdxBit_Stage2);
483 }
484
tlbi_aa64_ipas2e1_write(CPUARMState * env,const ARMCPRegInfo * ri,uint64_t value)485 static void tlbi_aa64_ipas2e1_write(CPUARMState *env, const ARMCPRegInfo *ri,
486 uint64_t value)
487 {
488 CPUState *cs = env_cpu(env);
489 int mask = ipas2e1_tlbmask(env, value);
490 uint64_t pageaddr = sextract64(value << 12, 0, 56);
491
492 if (tlb_force_broadcast(env)) {
493 tlb_flush_page_by_mmuidx_all_cpus_synced(cs, pageaddr, mask);
494 } else {
495 tlb_flush_page_by_mmuidx(cs, pageaddr, mask);
496 }
497 }
498
tlbi_aa64_ipas2e1is_write(CPUARMState * env,const ARMCPRegInfo * ri,uint64_t value)499 static void tlbi_aa64_ipas2e1is_write(CPUARMState *env, const ARMCPRegInfo *ri,
500 uint64_t value)
501 {
502 CPUState *cs = env_cpu(env);
503 int mask = ipas2e1_tlbmask(env, value);
504 uint64_t pageaddr = sextract64(value << 12, 0, 56);
505
506 tlb_flush_page_by_mmuidx_all_cpus_synced(cs, pageaddr, mask);
507 }
508
509 static const ARMCPRegInfo tlbi_not_v7_cp_reginfo[] = {
510 /*
511 * MMU TLB control. Note that the wildcarding means we cover not just
512 * the unified TLB ops but also the dside/iside/inner-shareable variants.
513 */
514 { .name = "TLBIALL", .cp = 15, .crn = 8, .crm = CP_ANY,
515 .opc1 = CP_ANY, .opc2 = 0, .access = PL1_W, .writefn = tlbiall_write,
516 .type = ARM_CP_NO_RAW },
517 { .name = "TLBIMVA", .cp = 15, .crn = 8, .crm = CP_ANY,
518 .opc1 = CP_ANY, .opc2 = 1, .access = PL1_W, .writefn = tlbimva_write,
519 .type = ARM_CP_NO_RAW },
520 { .name = "TLBIASID", .cp = 15, .crn = 8, .crm = CP_ANY,
521 .opc1 = CP_ANY, .opc2 = 2, .access = PL1_W, .writefn = tlbiasid_write,
522 .type = ARM_CP_NO_RAW },
523 { .name = "TLBIMVAA", .cp = 15, .crn = 8, .crm = CP_ANY,
524 .opc1 = CP_ANY, .opc2 = 3, .access = PL1_W, .writefn = tlbimvaa_write,
525 .type = ARM_CP_NO_RAW },
526 };
527
528 static const ARMCPRegInfo tlbi_v7_cp_reginfo[] = {
529 /* 32 bit ITLB invalidates */
530 { .name = "ITLBIALL", .cp = 15, .opc1 = 0, .crn = 8, .crm = 5, .opc2 = 0,
531 .type = ARM_CP_NO_RAW, .access = PL1_W, .accessfn = access_ttlb,
532 .writefn = tlbiall_write },
533 { .name = "ITLBIMVA", .cp = 15, .opc1 = 0, .crn = 8, .crm = 5, .opc2 = 1,
534 .type = ARM_CP_NO_RAW, .access = PL1_W, .accessfn = access_ttlb,
535 .writefn = tlbimva_write },
536 { .name = "ITLBIASID", .cp = 15, .opc1 = 0, .crn = 8, .crm = 5, .opc2 = 2,
537 .type = ARM_CP_NO_RAW, .access = PL1_W, .accessfn = access_ttlb,
538 .writefn = tlbiasid_write },
539 /* 32 bit DTLB invalidates */
540 { .name = "DTLBIALL", .cp = 15, .opc1 = 0, .crn = 8, .crm = 6, .opc2 = 0,
541 .type = ARM_CP_NO_RAW, .access = PL1_W, .accessfn = access_ttlb,
542 .writefn = tlbiall_write },
543 { .name = "DTLBIMVA", .cp = 15, .opc1 = 0, .crn = 8, .crm = 6, .opc2 = 1,
544 .type = ARM_CP_NO_RAW, .access = PL1_W, .accessfn = access_ttlb,
545 .writefn = tlbimva_write },
546 { .name = "DTLBIASID", .cp = 15, .opc1 = 0, .crn = 8, .crm = 6, .opc2 = 2,
547 .type = ARM_CP_NO_RAW, .access = PL1_W, .accessfn = access_ttlb,
548 .writefn = tlbiasid_write },
549 /* 32 bit TLB invalidates */
550 { .name = "TLBIALL", .cp = 15, .opc1 = 0, .crn = 8, .crm = 7, .opc2 = 0,
551 .type = ARM_CP_NO_RAW, .access = PL1_W, .accessfn = access_ttlb,
552 .writefn = tlbiall_write },
553 { .name = "TLBIMVA", .cp = 15, .opc1 = 0, .crn = 8, .crm = 7, .opc2 = 1,
554 .type = ARM_CP_NO_RAW, .access = PL1_W, .accessfn = access_ttlb,
555 .writefn = tlbimva_write },
556 { .name = "TLBIASID", .cp = 15, .opc1 = 0, .crn = 8, .crm = 7, .opc2 = 2,
557 .type = ARM_CP_NO_RAW, .access = PL1_W, .accessfn = access_ttlb,
558 .writefn = tlbiasid_write },
559 { .name = "TLBIMVAA", .cp = 15, .opc1 = 0, .crn = 8, .crm = 7, .opc2 = 3,
560 .type = ARM_CP_NO_RAW, .access = PL1_W, .accessfn = access_ttlb,
561 .writefn = tlbimvaa_write },
562 };
563
564 static const ARMCPRegInfo tlbi_v7mp_cp_reginfo[] = {
565 /* 32 bit TLB invalidates, Inner Shareable */
566 { .name = "TLBIALLIS", .cp = 15, .opc1 = 0, .crn = 8, .crm = 3, .opc2 = 0,
567 .type = ARM_CP_NO_RAW, .access = PL1_W, .accessfn = access_ttlbis,
568 .writefn = tlbiall_is_write },
569 { .name = "TLBIMVAIS", .cp = 15, .opc1 = 0, .crn = 8, .crm = 3, .opc2 = 1,
570 .type = ARM_CP_NO_RAW, .access = PL1_W, .accessfn = access_ttlbis,
571 .writefn = tlbimva_is_write },
572 { .name = "TLBIASIDIS", .cp = 15, .opc1 = 0, .crn = 8, .crm = 3, .opc2 = 2,
573 .type = ARM_CP_NO_RAW, .access = PL1_W, .accessfn = access_ttlbis,
574 .writefn = tlbiasid_is_write },
575 { .name = "TLBIMVAAIS", .cp = 15, .opc1 = 0, .crn = 8, .crm = 3, .opc2 = 3,
576 .type = ARM_CP_NO_RAW, .access = PL1_W, .accessfn = access_ttlbis,
577 .writefn = tlbimvaa_is_write },
578 };
579
580 static const ARMCPRegInfo tlbi_v8_cp_reginfo[] = {
581 /* AArch32 TLB invalidate last level of translation table walk */
582 { .name = "TLBIMVALIS", .cp = 15, .opc1 = 0, .crn = 8, .crm = 3, .opc2 = 5,
583 .type = ARM_CP_NO_RAW, .access = PL1_W, .accessfn = access_ttlbis,
584 .writefn = tlbimva_is_write },
585 { .name = "TLBIMVAALIS", .cp = 15, .opc1 = 0, .crn = 8, .crm = 3, .opc2 = 7,
586 .type = ARM_CP_NO_RAW, .access = PL1_W, .accessfn = access_ttlbis,
587 .writefn = tlbimvaa_is_write },
588 { .name = "TLBIMVAL", .cp = 15, .opc1 = 0, .crn = 8, .crm = 7, .opc2 = 5,
589 .type = ARM_CP_NO_RAW, .access = PL1_W, .accessfn = access_ttlb,
590 .writefn = tlbimva_write },
591 { .name = "TLBIMVAAL", .cp = 15, .opc1 = 0, .crn = 8, .crm = 7, .opc2 = 7,
592 .type = ARM_CP_NO_RAW, .access = PL1_W, .accessfn = access_ttlb,
593 .writefn = tlbimvaa_write },
594 { .name = "TLBIMVALH", .cp = 15, .opc1 = 4, .crn = 8, .crm = 7, .opc2 = 5,
595 .type = ARM_CP_NO_RAW, .access = PL2_W,
596 .writefn = tlbimva_hyp_write },
597 { .name = "TLBIMVALHIS",
598 .cp = 15, .opc1 = 4, .crn = 8, .crm = 3, .opc2 = 5,
599 .type = ARM_CP_NO_RAW, .access = PL2_W,
600 .writefn = tlbimva_hyp_is_write },
601 { .name = "TLBIIPAS2",
602 .cp = 15, .opc1 = 4, .crn = 8, .crm = 4, .opc2 = 1,
603 .type = ARM_CP_NO_RAW, .access = PL2_W,
604 .writefn = tlbiipas2_hyp_write },
605 { .name = "TLBIIPAS2IS",
606 .cp = 15, .opc1 = 4, .crn = 8, .crm = 0, .opc2 = 1,
607 .type = ARM_CP_NO_RAW, .access = PL2_W,
608 .writefn = tlbiipas2is_hyp_write },
609 { .name = "TLBIIPAS2L",
610 .cp = 15, .opc1 = 4, .crn = 8, .crm = 4, .opc2 = 5,
611 .type = ARM_CP_NO_RAW, .access = PL2_W,
612 .writefn = tlbiipas2_hyp_write },
613 { .name = "TLBIIPAS2LIS",
614 .cp = 15, .opc1 = 4, .crn = 8, .crm = 0, .opc2 = 5,
615 .type = ARM_CP_NO_RAW, .access = PL2_W,
616 .writefn = tlbiipas2is_hyp_write },
617 /* AArch64 TLBI operations */
618 { .name = "TLBI_VMALLE1IS", .state = ARM_CP_STATE_AA64,
619 .opc0 = 1, .opc1 = 0, .crn = 8, .crm = 3, .opc2 = 0,
620 .access = PL1_W, .accessfn = access_ttlbis,
621 .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS,
622 .fgt = FGT_TLBIVMALLE1IS,
623 .writefn = tlbi_aa64_vmalle1is_write },
624 { .name = "TLBI_VAE1IS", .state = ARM_CP_STATE_AA64,
625 .opc0 = 1, .opc1 = 0, .crn = 8, .crm = 3, .opc2 = 1,
626 .access = PL1_W, .accessfn = access_ttlbis,
627 .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS,
628 .fgt = FGT_TLBIVAE1IS,
629 .writefn = tlbi_aa64_vae1is_write },
630 { .name = "TLBI_ASIDE1IS", .state = ARM_CP_STATE_AA64,
631 .opc0 = 1, .opc1 = 0, .crn = 8, .crm = 3, .opc2 = 2,
632 .access = PL1_W, .accessfn = access_ttlbis,
633 .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS,
634 .fgt = FGT_TLBIASIDE1IS,
635 .writefn = tlbi_aa64_vmalle1is_write },
636 { .name = "TLBI_VAAE1IS", .state = ARM_CP_STATE_AA64,
637 .opc0 = 1, .opc1 = 0, .crn = 8, .crm = 3, .opc2 = 3,
638 .access = PL1_W, .accessfn = access_ttlbis,
639 .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS,
640 .fgt = FGT_TLBIVAAE1IS,
641 .writefn = tlbi_aa64_vae1is_write },
642 { .name = "TLBI_VALE1IS", .state = ARM_CP_STATE_AA64,
643 .opc0 = 1, .opc1 = 0, .crn = 8, .crm = 3, .opc2 = 5,
644 .access = PL1_W, .accessfn = access_ttlbis,
645 .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS,
646 .fgt = FGT_TLBIVALE1IS,
647 .writefn = tlbi_aa64_vae1is_write },
648 { .name = "TLBI_VAALE1IS", .state = ARM_CP_STATE_AA64,
649 .opc0 = 1, .opc1 = 0, .crn = 8, .crm = 3, .opc2 = 7,
650 .access = PL1_W, .accessfn = access_ttlbis,
651 .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS,
652 .fgt = FGT_TLBIVAALE1IS,
653 .writefn = tlbi_aa64_vae1is_write },
654 { .name = "TLBI_VMALLE1", .state = ARM_CP_STATE_AA64,
655 .opc0 = 1, .opc1 = 0, .crn = 8, .crm = 7, .opc2 = 0,
656 .access = PL1_W, .accessfn = access_ttlb,
657 .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS,
658 .fgt = FGT_TLBIVMALLE1,
659 .writefn = tlbi_aa64_vmalle1_write },
660 { .name = "TLBI_VAE1", .state = ARM_CP_STATE_AA64,
661 .opc0 = 1, .opc1 = 0, .crn = 8, .crm = 7, .opc2 = 1,
662 .access = PL1_W, .accessfn = access_ttlb,
663 .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS,
664 .fgt = FGT_TLBIVAE1,
665 .writefn = tlbi_aa64_vae1_write },
666 { .name = "TLBI_ASIDE1", .state = ARM_CP_STATE_AA64,
667 .opc0 = 1, .opc1 = 0, .crn = 8, .crm = 7, .opc2 = 2,
668 .access = PL1_W, .accessfn = access_ttlb,
669 .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS,
670 .fgt = FGT_TLBIASIDE1,
671 .writefn = tlbi_aa64_vmalle1_write },
672 { .name = "TLBI_VAAE1", .state = ARM_CP_STATE_AA64,
673 .opc0 = 1, .opc1 = 0, .crn = 8, .crm = 7, .opc2 = 3,
674 .access = PL1_W, .accessfn = access_ttlb,
675 .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS,
676 .fgt = FGT_TLBIVAAE1,
677 .writefn = tlbi_aa64_vae1_write },
678 { .name = "TLBI_VALE1", .state = ARM_CP_STATE_AA64,
679 .opc0 = 1, .opc1 = 0, .crn = 8, .crm = 7, .opc2 = 5,
680 .access = PL1_W, .accessfn = access_ttlb,
681 .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS,
682 .fgt = FGT_TLBIVALE1,
683 .writefn = tlbi_aa64_vae1_write },
684 { .name = "TLBI_VAALE1", .state = ARM_CP_STATE_AA64,
685 .opc0 = 1, .opc1 = 0, .crn = 8, .crm = 7, .opc2 = 7,
686 .access = PL1_W, .accessfn = access_ttlb,
687 .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS,
688 .fgt = FGT_TLBIVAALE1,
689 .writefn = tlbi_aa64_vae1_write },
690 { .name = "TLBI_IPAS2E1IS", .state = ARM_CP_STATE_AA64,
691 .opc0 = 1, .opc1 = 4, .crn = 8, .crm = 0, .opc2 = 1,
692 .access = PL2_W, .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS,
693 .writefn = tlbi_aa64_ipas2e1is_write },
694 { .name = "TLBI_IPAS2LE1IS", .state = ARM_CP_STATE_AA64,
695 .opc0 = 1, .opc1 = 4, .crn = 8, .crm = 0, .opc2 = 5,
696 .access = PL2_W, .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS,
697 .writefn = tlbi_aa64_ipas2e1is_write },
698 { .name = "TLBI_ALLE1IS", .state = ARM_CP_STATE_AA64,
699 .opc0 = 1, .opc1 = 4, .crn = 8, .crm = 3, .opc2 = 4,
700 .access = PL2_W, .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS,
701 .writefn = tlbi_aa64_alle1is_write },
702 { .name = "TLBI_VMALLS12E1IS", .state = ARM_CP_STATE_AA64,
703 .opc0 = 1, .opc1 = 4, .crn = 8, .crm = 3, .opc2 = 6,
704 .access = PL2_W, .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS,
705 .writefn = tlbi_aa64_alle1is_write },
706 { .name = "TLBI_IPAS2E1", .state = ARM_CP_STATE_AA64,
707 .opc0 = 1, .opc1 = 4, .crn = 8, .crm = 4, .opc2 = 1,
708 .access = PL2_W, .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS,
709 .writefn = tlbi_aa64_ipas2e1_write },
710 { .name = "TLBI_IPAS2LE1", .state = ARM_CP_STATE_AA64,
711 .opc0 = 1, .opc1 = 4, .crn = 8, .crm = 4, .opc2 = 5,
712 .access = PL2_W, .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS,
713 .writefn = tlbi_aa64_ipas2e1_write },
714 { .name = "TLBI_ALLE1", .state = ARM_CP_STATE_AA64,
715 .opc0 = 1, .opc1 = 4, .crn = 8, .crm = 7, .opc2 = 4,
716 .access = PL2_W, .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS,
717 .writefn = tlbi_aa64_alle1_write },
718 { .name = "TLBI_VMALLS12E1", .state = ARM_CP_STATE_AA64,
719 .opc0 = 1, .opc1 = 4, .crn = 8, .crm = 7, .opc2 = 6,
720 .access = PL2_W, .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS,
721 .writefn = tlbi_aa64_alle1is_write },
722 };
723
724 static const ARMCPRegInfo tlbi_el2_cp_reginfo[] = {
725 { .name = "TLBIALLNSNH",
726 .cp = 15, .opc1 = 4, .crn = 8, .crm = 7, .opc2 = 4,
727 .type = ARM_CP_NO_RAW, .access = PL2_W,
728 .writefn = tlbiall_nsnh_write },
729 { .name = "TLBIALLNSNHIS",
730 .cp = 15, .opc1 = 4, .crn = 8, .crm = 3, .opc2 = 4,
731 .type = ARM_CP_NO_RAW, .access = PL2_W,
732 .writefn = tlbiall_nsnh_is_write },
733 { .name = "TLBIALLH", .cp = 15, .opc1 = 4, .crn = 8, .crm = 7, .opc2 = 0,
734 .type = ARM_CP_NO_RAW, .access = PL2_W,
735 .writefn = tlbiall_hyp_write },
736 { .name = "TLBIALLHIS", .cp = 15, .opc1 = 4, .crn = 8, .crm = 3, .opc2 = 0,
737 .type = ARM_CP_NO_RAW, .access = PL2_W,
738 .writefn = tlbiall_hyp_is_write },
739 { .name = "TLBIMVAH", .cp = 15, .opc1 = 4, .crn = 8, .crm = 7, .opc2 = 1,
740 .type = ARM_CP_NO_RAW, .access = PL2_W,
741 .writefn = tlbimva_hyp_write },
742 { .name = "TLBIMVAHIS", .cp = 15, .opc1 = 4, .crn = 8, .crm = 3, .opc2 = 1,
743 .type = ARM_CP_NO_RAW, .access = PL2_W,
744 .writefn = tlbimva_hyp_is_write },
745 { .name = "TLBI_ALLE2", .state = ARM_CP_STATE_AA64,
746 .opc0 = 1, .opc1 = 4, .crn = 8, .crm = 7, .opc2 = 0,
747 .access = PL2_W,
748 .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS | ARM_CP_EL3_NO_EL2_UNDEF,
749 .writefn = tlbi_aa64_alle2_write },
750 { .name = "TLBI_VAE2", .state = ARM_CP_STATE_AA64,
751 .opc0 = 1, .opc1 = 4, .crn = 8, .crm = 7, .opc2 = 1,
752 .access = PL2_W,
753 .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS | ARM_CP_EL3_NO_EL2_UNDEF,
754 .writefn = tlbi_aa64_vae2_write },
755 { .name = "TLBI_VALE2", .state = ARM_CP_STATE_AA64,
756 .opc0 = 1, .opc1 = 4, .crn = 8, .crm = 7, .opc2 = 5,
757 .access = PL2_W,
758 .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS | ARM_CP_EL3_NO_EL2_UNDEF,
759 .writefn = tlbi_aa64_vae2_write },
760 { .name = "TLBI_ALLE2IS", .state = ARM_CP_STATE_AA64,
761 .opc0 = 1, .opc1 = 4, .crn = 8, .crm = 3, .opc2 = 0,
762 .access = PL2_W,
763 .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS | ARM_CP_EL3_NO_EL2_UNDEF,
764 .writefn = tlbi_aa64_alle2is_write },
765 { .name = "TLBI_VAE2IS", .state = ARM_CP_STATE_AA64,
766 .opc0 = 1, .opc1 = 4, .crn = 8, .crm = 3, .opc2 = 1,
767 .access = PL2_W,
768 .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS | ARM_CP_EL3_NO_EL2_UNDEF,
769 .writefn = tlbi_aa64_vae2is_write },
770 { .name = "TLBI_VALE2IS", .state = ARM_CP_STATE_AA64,
771 .opc0 = 1, .opc1 = 4, .crn = 8, .crm = 3, .opc2 = 5,
772 .access = PL2_W,
773 .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS | ARM_CP_EL3_NO_EL2_UNDEF,
774 .writefn = tlbi_aa64_vae2is_write },
775 };
776
777 static const ARMCPRegInfo tlbi_el3_cp_reginfo[] = {
778 { .name = "TLBI_ALLE3IS", .state = ARM_CP_STATE_AA64,
779 .opc0 = 1, .opc1 = 6, .crn = 8, .crm = 3, .opc2 = 0,
780 .access = PL3_W, .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS,
781 .writefn = tlbi_aa64_alle3is_write },
782 { .name = "TLBI_VAE3IS", .state = ARM_CP_STATE_AA64,
783 .opc0 = 1, .opc1 = 6, .crn = 8, .crm = 3, .opc2 = 1,
784 .access = PL3_W, .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS,
785 .writefn = tlbi_aa64_vae3is_write },
786 { .name = "TLBI_VALE3IS", .state = ARM_CP_STATE_AA64,
787 .opc0 = 1, .opc1 = 6, .crn = 8, .crm = 3, .opc2 = 5,
788 .access = PL3_W, .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS,
789 .writefn = tlbi_aa64_vae3is_write },
790 { .name = "TLBI_ALLE3", .state = ARM_CP_STATE_AA64,
791 .opc0 = 1, .opc1 = 6, .crn = 8, .crm = 7, .opc2 = 0,
792 .access = PL3_W, .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS,
793 .writefn = tlbi_aa64_alle3_write },
794 { .name = "TLBI_VAE3", .state = ARM_CP_STATE_AA64,
795 .opc0 = 1, .opc1 = 6, .crn = 8, .crm = 7, .opc2 = 1,
796 .access = PL3_W, .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS,
797 .writefn = tlbi_aa64_vae3_write },
798 { .name = "TLBI_VALE3", .state = ARM_CP_STATE_AA64,
799 .opc0 = 1, .opc1 = 6, .crn = 8, .crm = 7, .opc2 = 5,
800 .access = PL3_W, .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS,
801 .writefn = tlbi_aa64_vae3_write },
802 };
803
804 #ifdef TARGET_AARCH64
805 typedef struct {
806 uint64_t base;
807 uint64_t length;
808 } TLBIRange;
809
tlbi_range_tg_to_gran_size(int tg)810 static ARMGranuleSize tlbi_range_tg_to_gran_size(int tg)
811 {
812 /*
813 * Note that the TLBI range TG field encoding differs from both
814 * TG0 and TG1 encodings.
815 */
816 switch (tg) {
817 case 1:
818 return Gran4K;
819 case 2:
820 return Gran16K;
821 case 3:
822 return Gran64K;
823 default:
824 return GranInvalid;
825 }
826 }
827
tlbi_aa64_get_range(CPUARMState * env,ARMMMUIdx mmuidx,uint64_t value)828 static TLBIRange tlbi_aa64_get_range(CPUARMState *env, ARMMMUIdx mmuidx,
829 uint64_t value)
830 {
831 unsigned int page_size_granule, page_shift, num, scale, exponent;
832 /* Extract one bit to represent the va selector in use. */
833 uint64_t select = sextract64(value, 36, 1);
834 ARMVAParameters param = aa64_va_parameters(env, select, mmuidx, true, false);
835 TLBIRange ret = { };
836 ARMGranuleSize gran;
837
838 page_size_granule = extract64(value, 46, 2);
839 gran = tlbi_range_tg_to_gran_size(page_size_granule);
840
841 /* The granule encoded in value must match the granule in use. */
842 if (gran != param.gran) {
843 qemu_log_mask(LOG_GUEST_ERROR, "Invalid tlbi page size granule %d\n",
844 page_size_granule);
845 return ret;
846 }
847
848 page_shift = arm_granule_bits(gran);
849 num = extract64(value, 39, 5);
850 scale = extract64(value, 44, 2);
851 exponent = (5 * scale) + 1;
852
853 ret.length = (num + 1) << (exponent + page_shift);
854
855 if (param.select) {
856 ret.base = sextract64(value, 0, 37);
857 } else {
858 ret.base = extract64(value, 0, 37);
859 }
860 if (param.ds) {
861 /*
862 * With DS=1, BaseADDR is always shifted 16 so that it is able
863 * to address all 52 va bits. The input address is perforce
864 * aligned on a 64k boundary regardless of translation granule.
865 */
866 page_shift = 16;
867 }
868 ret.base <<= page_shift;
869
870 return ret;
871 }
872
do_rvae_write(CPUARMState * env,uint64_t value,int idxmap,bool synced)873 static void do_rvae_write(CPUARMState *env, uint64_t value,
874 int idxmap, bool synced)
875 {
876 ARMMMUIdx one_idx = ARM_MMU_IDX_A | ctz32(idxmap);
877 TLBIRange range;
878 int bits;
879
880 range = tlbi_aa64_get_range(env, one_idx, value);
881 bits = tlbbits_for_regime(env, one_idx, range.base);
882
883 if (synced) {
884 tlb_flush_range_by_mmuidx_all_cpus_synced(env_cpu(env),
885 range.base,
886 range.length,
887 idxmap,
888 bits);
889 } else {
890 tlb_flush_range_by_mmuidx(env_cpu(env), range.base,
891 range.length, idxmap, bits);
892 }
893 }
894
tlbi_aa64_rvae1_write(CPUARMState * env,const ARMCPRegInfo * ri,uint64_t value)895 static void tlbi_aa64_rvae1_write(CPUARMState *env,
896 const ARMCPRegInfo *ri,
897 uint64_t value)
898 {
899 /*
900 * Invalidate by VA range, EL1&0.
901 * Currently handles all of RVAE1, RVAAE1, RVAALE1 and RVALE1,
902 * since we don't support flush-for-specific-ASID-only or
903 * flush-last-level-only.
904 */
905
906 do_rvae_write(env, value, vae1_tlbmask(env),
907 tlb_force_broadcast(env));
908 }
909
tlbi_aa64_rvae1is_write(CPUARMState * env,const ARMCPRegInfo * ri,uint64_t value)910 static void tlbi_aa64_rvae1is_write(CPUARMState *env,
911 const ARMCPRegInfo *ri,
912 uint64_t value)
913 {
914 /*
915 * Invalidate by VA range, Inner/Outer Shareable EL1&0.
916 * Currently handles all of RVAE1IS, RVAE1OS, RVAAE1IS, RVAAE1OS,
917 * RVAALE1IS, RVAALE1OS, RVALE1IS and RVALE1OS, since we don't support
918 * flush-for-specific-ASID-only, flush-last-level-only or inner/outer
919 * shareable specific flushes.
920 */
921
922 do_rvae_write(env, value, vae1_tlbmask(env), true);
923 }
924
tlbi_aa64_rvae2_write(CPUARMState * env,const ARMCPRegInfo * ri,uint64_t value)925 static void tlbi_aa64_rvae2_write(CPUARMState *env,
926 const ARMCPRegInfo *ri,
927 uint64_t value)
928 {
929 /*
930 * Invalidate by VA range, EL2.
931 * Currently handles all of RVAE2 and RVALE2,
932 * since we don't support flush-for-specific-ASID-only or
933 * flush-last-level-only.
934 */
935
936 do_rvae_write(env, value, vae2_tlbmask(env),
937 tlb_force_broadcast(env));
938
939
940 }
941
tlbi_aa64_rvae2is_write(CPUARMState * env,const ARMCPRegInfo * ri,uint64_t value)942 static void tlbi_aa64_rvae2is_write(CPUARMState *env,
943 const ARMCPRegInfo *ri,
944 uint64_t value)
945 {
946 /*
947 * Invalidate by VA range, Inner/Outer Shareable, EL2.
948 * Currently handles all of RVAE2IS, RVAE2OS, RVALE2IS and RVALE2OS,
949 * since we don't support flush-for-specific-ASID-only,
950 * flush-last-level-only or inner/outer shareable specific flushes.
951 */
952
953 do_rvae_write(env, value, vae2_tlbmask(env), true);
954
955 }
956
tlbi_aa64_rvae3_write(CPUARMState * env,const ARMCPRegInfo * ri,uint64_t value)957 static void tlbi_aa64_rvae3_write(CPUARMState *env,
958 const ARMCPRegInfo *ri,
959 uint64_t value)
960 {
961 /*
962 * Invalidate by VA range, EL3.
963 * Currently handles all of RVAE3 and RVALE3,
964 * since we don't support flush-for-specific-ASID-only or
965 * flush-last-level-only.
966 */
967
968 do_rvae_write(env, value, ARMMMUIdxBit_E3, tlb_force_broadcast(env));
969 }
970
tlbi_aa64_rvae3is_write(CPUARMState * env,const ARMCPRegInfo * ri,uint64_t value)971 static void tlbi_aa64_rvae3is_write(CPUARMState *env,
972 const ARMCPRegInfo *ri,
973 uint64_t value)
974 {
975 /*
976 * Invalidate by VA range, EL3, Inner/Outer Shareable.
977 * Currently handles all of RVAE3IS, RVAE3OS, RVALE3IS and RVALE3OS,
978 * since we don't support flush-for-specific-ASID-only,
979 * flush-last-level-only or inner/outer specific flushes.
980 */
981
982 do_rvae_write(env, value, ARMMMUIdxBit_E3, true);
983 }
984
tlbi_aa64_ripas2e1_write(CPUARMState * env,const ARMCPRegInfo * ri,uint64_t value)985 static void tlbi_aa64_ripas2e1_write(CPUARMState *env, const ARMCPRegInfo *ri,
986 uint64_t value)
987 {
988 do_rvae_write(env, value, ipas2e1_tlbmask(env, value),
989 tlb_force_broadcast(env));
990 }
991
tlbi_aa64_ripas2e1is_write(CPUARMState * env,const ARMCPRegInfo * ri,uint64_t value)992 static void tlbi_aa64_ripas2e1is_write(CPUARMState *env,
993 const ARMCPRegInfo *ri,
994 uint64_t value)
995 {
996 do_rvae_write(env, value, ipas2e1_tlbmask(env, value), true);
997 }
998
999 static const ARMCPRegInfo tlbirange_reginfo[] = {
1000 { .name = "TLBI_RVAE1IS", .state = ARM_CP_STATE_AA64,
1001 .opc0 = 1, .opc1 = 0, .crn = 8, .crm = 2, .opc2 = 1,
1002 .access = PL1_W, .accessfn = access_ttlbis,
1003 .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS,
1004 .fgt = FGT_TLBIRVAE1IS,
1005 .writefn = tlbi_aa64_rvae1is_write },
1006 { .name = "TLBI_RVAAE1IS", .state = ARM_CP_STATE_AA64,
1007 .opc0 = 1, .opc1 = 0, .crn = 8, .crm = 2, .opc2 = 3,
1008 .access = PL1_W, .accessfn = access_ttlbis,
1009 .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS,
1010 .fgt = FGT_TLBIRVAAE1IS,
1011 .writefn = tlbi_aa64_rvae1is_write },
1012 { .name = "TLBI_RVALE1IS", .state = ARM_CP_STATE_AA64,
1013 .opc0 = 1, .opc1 = 0, .crn = 8, .crm = 2, .opc2 = 5,
1014 .access = PL1_W, .accessfn = access_ttlbis,
1015 .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS,
1016 .fgt = FGT_TLBIRVALE1IS,
1017 .writefn = tlbi_aa64_rvae1is_write },
1018 { .name = "TLBI_RVAALE1IS", .state = ARM_CP_STATE_AA64,
1019 .opc0 = 1, .opc1 = 0, .crn = 8, .crm = 2, .opc2 = 7,
1020 .access = PL1_W, .accessfn = access_ttlbis,
1021 .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS,
1022 .fgt = FGT_TLBIRVAALE1IS,
1023 .writefn = tlbi_aa64_rvae1is_write },
1024 { .name = "TLBI_RVAE1OS", .state = ARM_CP_STATE_AA64,
1025 .opc0 = 1, .opc1 = 0, .crn = 8, .crm = 5, .opc2 = 1,
1026 .access = PL1_W, .accessfn = access_ttlbos,
1027 .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS,
1028 .fgt = FGT_TLBIRVAE1OS,
1029 .writefn = tlbi_aa64_rvae1is_write },
1030 { .name = "TLBI_RVAAE1OS", .state = ARM_CP_STATE_AA64,
1031 .opc0 = 1, .opc1 = 0, .crn = 8, .crm = 5, .opc2 = 3,
1032 .access = PL1_W, .accessfn = access_ttlbos,
1033 .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS,
1034 .fgt = FGT_TLBIRVAAE1OS,
1035 .writefn = tlbi_aa64_rvae1is_write },
1036 { .name = "TLBI_RVALE1OS", .state = ARM_CP_STATE_AA64,
1037 .opc0 = 1, .opc1 = 0, .crn = 8, .crm = 5, .opc2 = 5,
1038 .access = PL1_W, .accessfn = access_ttlbos,
1039 .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS,
1040 .fgt = FGT_TLBIRVALE1OS,
1041 .writefn = tlbi_aa64_rvae1is_write },
1042 { .name = "TLBI_RVAALE1OS", .state = ARM_CP_STATE_AA64,
1043 .opc0 = 1, .opc1 = 0, .crn = 8, .crm = 5, .opc2 = 7,
1044 .access = PL1_W, .accessfn = access_ttlbos,
1045 .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS,
1046 .fgt = FGT_TLBIRVAALE1OS,
1047 .writefn = tlbi_aa64_rvae1is_write },
1048 { .name = "TLBI_RVAE1", .state = ARM_CP_STATE_AA64,
1049 .opc0 = 1, .opc1 = 0, .crn = 8, .crm = 6, .opc2 = 1,
1050 .access = PL1_W, .accessfn = access_ttlb,
1051 .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS,
1052 .fgt = FGT_TLBIRVAE1,
1053 .writefn = tlbi_aa64_rvae1_write },
1054 { .name = "TLBI_RVAAE1", .state = ARM_CP_STATE_AA64,
1055 .opc0 = 1, .opc1 = 0, .crn = 8, .crm = 6, .opc2 = 3,
1056 .access = PL1_W, .accessfn = access_ttlb,
1057 .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS,
1058 .fgt = FGT_TLBIRVAAE1,
1059 .writefn = tlbi_aa64_rvae1_write },
1060 { .name = "TLBI_RVALE1", .state = ARM_CP_STATE_AA64,
1061 .opc0 = 1, .opc1 = 0, .crn = 8, .crm = 6, .opc2 = 5,
1062 .access = PL1_W, .accessfn = access_ttlb,
1063 .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS,
1064 .fgt = FGT_TLBIRVALE1,
1065 .writefn = tlbi_aa64_rvae1_write },
1066 { .name = "TLBI_RVAALE1", .state = ARM_CP_STATE_AA64,
1067 .opc0 = 1, .opc1 = 0, .crn = 8, .crm = 6, .opc2 = 7,
1068 .access = PL1_W, .accessfn = access_ttlb,
1069 .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS,
1070 .fgt = FGT_TLBIRVAALE1,
1071 .writefn = tlbi_aa64_rvae1_write },
1072 { .name = "TLBI_RIPAS2E1IS", .state = ARM_CP_STATE_AA64,
1073 .opc0 = 1, .opc1 = 4, .crn = 8, .crm = 0, .opc2 = 2,
1074 .access = PL2_W, .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS,
1075 .writefn = tlbi_aa64_ripas2e1is_write },
1076 { .name = "TLBI_RIPAS2LE1IS", .state = ARM_CP_STATE_AA64,
1077 .opc0 = 1, .opc1 = 4, .crn = 8, .crm = 0, .opc2 = 6,
1078 .access = PL2_W, .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS,
1079 .writefn = tlbi_aa64_ripas2e1is_write },
1080 { .name = "TLBI_RVAE2IS", .state = ARM_CP_STATE_AA64,
1081 .opc0 = 1, .opc1 = 4, .crn = 8, .crm = 2, .opc2 = 1,
1082 .access = PL2_W,
1083 .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS | ARM_CP_EL3_NO_EL2_UNDEF,
1084 .writefn = tlbi_aa64_rvae2is_write },
1085 { .name = "TLBI_RVALE2IS", .state = ARM_CP_STATE_AA64,
1086 .opc0 = 1, .opc1 = 4, .crn = 8, .crm = 2, .opc2 = 5,
1087 .access = PL2_W,
1088 .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS | ARM_CP_EL3_NO_EL2_UNDEF,
1089 .writefn = tlbi_aa64_rvae2is_write },
1090 { .name = "TLBI_RIPAS2E1", .state = ARM_CP_STATE_AA64,
1091 .opc0 = 1, .opc1 = 4, .crn = 8, .crm = 4, .opc2 = 2,
1092 .access = PL2_W, .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS,
1093 .writefn = tlbi_aa64_ripas2e1_write },
1094 { .name = "TLBI_RIPAS2LE1", .state = ARM_CP_STATE_AA64,
1095 .opc0 = 1, .opc1 = 4, .crn = 8, .crm = 4, .opc2 = 6,
1096 .access = PL2_W, .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS,
1097 .writefn = tlbi_aa64_ripas2e1_write },
1098 { .name = "TLBI_RVAE2OS", .state = ARM_CP_STATE_AA64,
1099 .opc0 = 1, .opc1 = 4, .crn = 8, .crm = 5, .opc2 = 1,
1100 .access = PL2_W,
1101 .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS | ARM_CP_EL3_NO_EL2_UNDEF,
1102 .writefn = tlbi_aa64_rvae2is_write },
1103 { .name = "TLBI_RVALE2OS", .state = ARM_CP_STATE_AA64,
1104 .opc0 = 1, .opc1 = 4, .crn = 8, .crm = 5, .opc2 = 5,
1105 .access = PL2_W,
1106 .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS | ARM_CP_EL3_NO_EL2_UNDEF,
1107 .writefn = tlbi_aa64_rvae2is_write },
1108 { .name = "TLBI_RVAE2", .state = ARM_CP_STATE_AA64,
1109 .opc0 = 1, .opc1 = 4, .crn = 8, .crm = 6, .opc2 = 1,
1110 .access = PL2_W,
1111 .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS | ARM_CP_EL3_NO_EL2_UNDEF,
1112 .writefn = tlbi_aa64_rvae2_write },
1113 { .name = "TLBI_RVALE2", .state = ARM_CP_STATE_AA64,
1114 .opc0 = 1, .opc1 = 4, .crn = 8, .crm = 6, .opc2 = 5,
1115 .access = PL2_W,
1116 .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS | ARM_CP_EL3_NO_EL2_UNDEF,
1117 .writefn = tlbi_aa64_rvae2_write },
1118 { .name = "TLBI_RVAE3IS", .state = ARM_CP_STATE_AA64,
1119 .opc0 = 1, .opc1 = 6, .crn = 8, .crm = 2, .opc2 = 1,
1120 .access = PL3_W, .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS,
1121 .writefn = tlbi_aa64_rvae3is_write },
1122 { .name = "TLBI_RVALE3IS", .state = ARM_CP_STATE_AA64,
1123 .opc0 = 1, .opc1 = 6, .crn = 8, .crm = 2, .opc2 = 5,
1124 .access = PL3_W, .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS,
1125 .writefn = tlbi_aa64_rvae3is_write },
1126 { .name = "TLBI_RVAE3OS", .state = ARM_CP_STATE_AA64,
1127 .opc0 = 1, .opc1 = 6, .crn = 8, .crm = 5, .opc2 = 1,
1128 .access = PL3_W, .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS,
1129 .writefn = tlbi_aa64_rvae3is_write },
1130 { .name = "TLBI_RVALE3OS", .state = ARM_CP_STATE_AA64,
1131 .opc0 = 1, .opc1 = 6, .crn = 8, .crm = 5, .opc2 = 5,
1132 .access = PL3_W, .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS,
1133 .writefn = tlbi_aa64_rvae3is_write },
1134 { .name = "TLBI_RVAE3", .state = ARM_CP_STATE_AA64,
1135 .opc0 = 1, .opc1 = 6, .crn = 8, .crm = 6, .opc2 = 1,
1136 .access = PL3_W, .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS,
1137 .writefn = tlbi_aa64_rvae3_write },
1138 { .name = "TLBI_RVALE3", .state = ARM_CP_STATE_AA64,
1139 .opc0 = 1, .opc1 = 6, .crn = 8, .crm = 6, .opc2 = 5,
1140 .access = PL3_W, .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS,
1141 .writefn = tlbi_aa64_rvae3_write },
1142 };
1143
1144 static const ARMCPRegInfo tlbios_reginfo[] = {
1145 { .name = "TLBI_VMALLE1OS", .state = ARM_CP_STATE_AA64,
1146 .opc0 = 1, .opc1 = 0, .crn = 8, .crm = 1, .opc2 = 0,
1147 .access = PL1_W, .accessfn = access_ttlbos,
1148 .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS,
1149 .fgt = FGT_TLBIVMALLE1OS,
1150 .writefn = tlbi_aa64_vmalle1is_write },
1151 { .name = "TLBI_VAE1OS", .state = ARM_CP_STATE_AA64,
1152 .opc0 = 1, .opc1 = 0, .crn = 8, .crm = 1, .opc2 = 1,
1153 .fgt = FGT_TLBIVAE1OS,
1154 .access = PL1_W, .accessfn = access_ttlbos,
1155 .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS,
1156 .writefn = tlbi_aa64_vae1is_write },
1157 { .name = "TLBI_ASIDE1OS", .state = ARM_CP_STATE_AA64,
1158 .opc0 = 1, .opc1 = 0, .crn = 8, .crm = 1, .opc2 = 2,
1159 .access = PL1_W, .accessfn = access_ttlbos,
1160 .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS,
1161 .fgt = FGT_TLBIASIDE1OS,
1162 .writefn = tlbi_aa64_vmalle1is_write },
1163 { .name = "TLBI_VAAE1OS", .state = ARM_CP_STATE_AA64,
1164 .opc0 = 1, .opc1 = 0, .crn = 8, .crm = 1, .opc2 = 3,
1165 .access = PL1_W, .accessfn = access_ttlbos,
1166 .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS,
1167 .fgt = FGT_TLBIVAAE1OS,
1168 .writefn = tlbi_aa64_vae1is_write },
1169 { .name = "TLBI_VALE1OS", .state = ARM_CP_STATE_AA64,
1170 .opc0 = 1, .opc1 = 0, .crn = 8, .crm = 1, .opc2 = 5,
1171 .access = PL1_W, .accessfn = access_ttlbos,
1172 .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS,
1173 .fgt = FGT_TLBIVALE1OS,
1174 .writefn = tlbi_aa64_vae1is_write },
1175 { .name = "TLBI_VAALE1OS", .state = ARM_CP_STATE_AA64,
1176 .opc0 = 1, .opc1 = 0, .crn = 8, .crm = 1, .opc2 = 7,
1177 .access = PL1_W, .accessfn = access_ttlbos,
1178 .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS,
1179 .fgt = FGT_TLBIVAALE1OS,
1180 .writefn = tlbi_aa64_vae1is_write },
1181 { .name = "TLBI_ALLE2OS", .state = ARM_CP_STATE_AA64,
1182 .opc0 = 1, .opc1 = 4, .crn = 8, .crm = 1, .opc2 = 0,
1183 .access = PL2_W,
1184 .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS | ARM_CP_EL3_NO_EL2_UNDEF,
1185 .writefn = tlbi_aa64_alle2is_write },
1186 { .name = "TLBI_VAE2OS", .state = ARM_CP_STATE_AA64,
1187 .opc0 = 1, .opc1 = 4, .crn = 8, .crm = 1, .opc2 = 1,
1188 .access = PL2_W,
1189 .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS | ARM_CP_EL3_NO_EL2_UNDEF,
1190 .writefn = tlbi_aa64_vae2is_write },
1191 { .name = "TLBI_ALLE1OS", .state = ARM_CP_STATE_AA64,
1192 .opc0 = 1, .opc1 = 4, .crn = 8, .crm = 1, .opc2 = 4,
1193 .access = PL2_W,
1194 .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS,
1195 .writefn = tlbi_aa64_alle1is_write },
1196 { .name = "TLBI_VALE2OS", .state = ARM_CP_STATE_AA64,
1197 .opc0 = 1, .opc1 = 4, .crn = 8, .crm = 1, .opc2 = 5,
1198 .access = PL2_W,
1199 .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS | ARM_CP_EL3_NO_EL2_UNDEF,
1200 .writefn = tlbi_aa64_vae2is_write },
1201 { .name = "TLBI_VMALLS12E1OS", .state = ARM_CP_STATE_AA64,
1202 .opc0 = 1, .opc1 = 4, .crn = 8, .crm = 1, .opc2 = 6,
1203 .access = PL2_W, .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS,
1204 .writefn = tlbi_aa64_alle1is_write },
1205 { .name = "TLBI_IPAS2E1OS", .state = ARM_CP_STATE_AA64,
1206 .opc0 = 1, .opc1 = 4, .crn = 8, .crm = 4, .opc2 = 0,
1207 .access = PL2_W, .type = ARM_CP_NOP | ARM_CP_ADD_TLBI_NXS },
1208 { .name = "TLBI_RIPAS2E1OS", .state = ARM_CP_STATE_AA64,
1209 .opc0 = 1, .opc1 = 4, .crn = 8, .crm = 4, .opc2 = 3,
1210 .access = PL2_W, .type = ARM_CP_NOP | ARM_CP_ADD_TLBI_NXS },
1211 { .name = "TLBI_IPAS2LE1OS", .state = ARM_CP_STATE_AA64,
1212 .opc0 = 1, .opc1 = 4, .crn = 8, .crm = 4, .opc2 = 4,
1213 .access = PL2_W, .type = ARM_CP_NOP | ARM_CP_ADD_TLBI_NXS },
1214 { .name = "TLBI_RIPAS2LE1OS", .state = ARM_CP_STATE_AA64,
1215 .opc0 = 1, .opc1 = 4, .crn = 8, .crm = 4, .opc2 = 7,
1216 .access = PL2_W, .type = ARM_CP_NOP | ARM_CP_ADD_TLBI_NXS },
1217 { .name = "TLBI_ALLE3OS", .state = ARM_CP_STATE_AA64,
1218 .opc0 = 1, .opc1 = 6, .crn = 8, .crm = 1, .opc2 = 0,
1219 .access = PL3_W, .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS,
1220 .writefn = tlbi_aa64_alle3is_write },
1221 { .name = "TLBI_VAE3OS", .state = ARM_CP_STATE_AA64,
1222 .opc0 = 1, .opc1 = 6, .crn = 8, .crm = 1, .opc2 = 1,
1223 .access = PL3_W, .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS,
1224 .writefn = tlbi_aa64_vae3is_write },
1225 { .name = "TLBI_VALE3OS", .state = ARM_CP_STATE_AA64,
1226 .opc0 = 1, .opc1 = 6, .crn = 8, .crm = 1, .opc2 = 5,
1227 .access = PL3_W, .type = ARM_CP_NO_RAW | ARM_CP_ADD_TLBI_NXS,
1228 .writefn = tlbi_aa64_vae3is_write },
1229 };
1230
tlbi_aa64_paall_write(CPUARMState * env,const ARMCPRegInfo * ri,uint64_t value)1231 static void tlbi_aa64_paall_write(CPUARMState *env, const ARMCPRegInfo *ri,
1232 uint64_t value)
1233 {
1234 CPUState *cs = env_cpu(env);
1235
1236 tlb_flush(cs);
1237 }
1238
tlbi_aa64_paallos_write(CPUARMState * env,const ARMCPRegInfo * ri,uint64_t value)1239 static void tlbi_aa64_paallos_write(CPUARMState *env, const ARMCPRegInfo *ri,
1240 uint64_t value)
1241 {
1242 CPUState *cs = env_cpu(env);
1243
1244 tlb_flush_all_cpus_synced(cs);
1245 }
1246
1247 static const ARMCPRegInfo tlbi_rme_reginfo[] = {
1248 { .name = "TLBI_PAALL", .state = ARM_CP_STATE_AA64,
1249 .opc0 = 1, .opc1 = 6, .crn = 8, .crm = 7, .opc2 = 4,
1250 .access = PL3_W, .type = ARM_CP_NO_RAW,
1251 .writefn = tlbi_aa64_paall_write },
1252 { .name = "TLBI_PAALLOS", .state = ARM_CP_STATE_AA64,
1253 .opc0 = 1, .opc1 = 6, .crn = 8, .crm = 1, .opc2 = 4,
1254 .access = PL3_W, .type = ARM_CP_NO_RAW,
1255 .writefn = tlbi_aa64_paallos_write },
1256 /*
1257 * QEMU does not have a way to invalidate by physical address, thus
1258 * invalidating a range of physical addresses is accomplished by
1259 * flushing all tlb entries in the outer shareable domain,
1260 * just like PAALLOS.
1261 */
1262 { .name = "TLBI_RPALOS", .state = ARM_CP_STATE_AA64,
1263 .opc0 = 1, .opc1 = 6, .crn = 8, .crm = 4, .opc2 = 7,
1264 .access = PL3_W, .type = ARM_CP_NO_RAW,
1265 .writefn = tlbi_aa64_paallos_write },
1266 { .name = "TLBI_RPAOS", .state = ARM_CP_STATE_AA64,
1267 .opc0 = 1, .opc1 = 6, .crn = 8, .crm = 4, .opc2 = 3,
1268 .access = PL3_W, .type = ARM_CP_NO_RAW,
1269 .writefn = tlbi_aa64_paallos_write },
1270 };
1271
1272 #endif
1273
define_tlb_insn_regs(ARMCPU * cpu)1274 void define_tlb_insn_regs(ARMCPU *cpu)
1275 {
1276 CPUARMState *env = &cpu->env;
1277
1278 if (!arm_feature(env, ARM_FEATURE_V7)) {
1279 define_arm_cp_regs(cpu, tlbi_not_v7_cp_reginfo);
1280 } else {
1281 define_arm_cp_regs(cpu, tlbi_v7_cp_reginfo);
1282 }
1283 if (arm_feature(env, ARM_FEATURE_V7MP) &&
1284 !arm_feature(env, ARM_FEATURE_PMSA)) {
1285 define_arm_cp_regs(cpu, tlbi_v7mp_cp_reginfo);
1286 }
1287 if (arm_feature(env, ARM_FEATURE_V8)) {
1288 define_arm_cp_regs(cpu, tlbi_v8_cp_reginfo);
1289 }
1290 /*
1291 * We retain the existing logic for when to register these TLBI
1292 * ops (i.e. matching the condition for el2_cp_reginfo[] in
1293 * helper.c), but we will be able to simplify this later.
1294 */
1295 if (arm_feature(env, ARM_FEATURE_EL2)) {
1296 define_arm_cp_regs(cpu, tlbi_el2_cp_reginfo);
1297 }
1298 if (arm_feature(env, ARM_FEATURE_EL3)) {
1299 define_arm_cp_regs(cpu, tlbi_el3_cp_reginfo);
1300 }
1301 #ifdef TARGET_AARCH64
1302 if (cpu_isar_feature(aa64_tlbirange, cpu)) {
1303 define_arm_cp_regs(cpu, tlbirange_reginfo);
1304 }
1305 if (cpu_isar_feature(aa64_tlbios, cpu)) {
1306 define_arm_cp_regs(cpu, tlbios_reginfo);
1307 }
1308 if (cpu_isar_feature(aa64_rme, cpu)) {
1309 define_arm_cp_regs(cpu, tlbi_rme_reginfo);
1310 }
1311 #endif
1312 }
1313