Lines Matching +full:secure +full:- +full:reg +full:- +full:access
6 * SPDX-License-Identifier: GPL-2.0-or-later
12 #include "cpu-features.h"
14 #include "exec/helper-proto.h"
15 #include "qemu/main-loop.h"
18 #include "exec/exec-all.h"
19 #include "exec/page-protection.h"
22 #include "semihosting/common-semi.h"
29 uint32_t reg, uint32_t val) in v7m_msr_xpsr() argument
32 if (!(reg & 4)) { in v7m_msr_xpsr()
45 static uint32_t v7m_mrs_xpsr(CPUARMState *env, uint32_t reg, unsigned el) in v7m_mrs_xpsr() argument
49 if ((reg & 1) && el) { in v7m_mrs_xpsr()
52 if (!(reg & 4)) { in v7m_mrs_xpsr()
62 uint32_t arm_v7m_mrs_control(CPUARMState *env, uint32_t secure) in arm_v7m_mrs_control() argument
64 uint32_t value = env->v7m.control[secure]; in arm_v7m_mrs_control()
66 if (!secure) { in arm_v7m_mrs_control()
68 value |= env->v7m.control[M_REG_S] & R_V7M_CONTROL_FPCA_MASK; in arm_v7m_mrs_control()
78 uint32_t reg = extract32(maskreg, 0, 8); in HELPER() local
80 switch (reg) { in HELPER()
81 case 0 ... 7: /* xPSR sub-fields */ in HELPER()
82 v7m_msr_xpsr(env, mask, reg, val); in HELPER()
85 /* There are no sub-fields that are actually writable from EL0. */ in HELPER()
93 uint32_t HELPER(v7m_mrs)(CPUARMState *env, uint32_t reg) in HELPER()
95 switch (reg) { in HELPER()
96 case 0 ... 7: /* xPSR sub-fields */ in HELPER()
97 return v7m_mrs_xpsr(env, reg, 0); in HELPER()
108 /* translate.c should never generate calls here in user-only mode */ in HELPER()
114 /* translate.c should never generate calls here in user-only mode */ in HELPER()
120 /* translate.c should never generate calls here in user-only mode */ in HELPER()
126 /* translate.c should never generate calls here in user-only mode */ in HELPER()
132 /* translate.c should never generate calls here in user-only mode */ in HELPER()
140 * user-only emulation we don't have the MPU. in HELPER()
189 bool negpri = armv7m_nvic_neg_prio_requested(env->nvic, secstate); in arm_v7m_mmu_idx_for_secstate_and_priv()
198 !(env->v7m.control[secstate] & 1); in arm_v7m_mmu_idx_for_secstate()
217 CPUARMState *env = &cpu->env; in v7m_stack_write()
221 bool secure = mmu_idx & ARM_MMU_IDX_M_S; in v7m_stack_write() local
232 env->v7m.sfsr |= R_V7M_SFSR_LSPERR_MASK; in v7m_stack_write()
237 env->v7m.sfsr |= R_V7M_SFSR_AUVIOL_MASK; in v7m_stack_write()
239 env->v7m.sfsr |= R_V7M_SFSR_SFARVALID_MASK; in v7m_stack_write()
240 env->v7m.sfar = addr; in v7m_stack_write()
247 env->v7m.cfsr[secure] |= R_V7M_CFSR_MLSPERR_MASK; in v7m_stack_write()
251 env->v7m.cfsr[secure] |= R_V7M_CFSR_MSTKERR_MASK; in v7m_stack_write()
254 exc_secure = secure; in v7m_stack_write()
264 env->v7m.cfsr[M_REG_NS] |= R_V7M_CFSR_LSPERR_MASK; in v7m_stack_write()
267 env->v7m.cfsr[M_REG_NS] |= R_V7M_CFSR_STKERR_MASK; in v7m_stack_write()
290 armv7m_nvic_set_pending_derived(env->nvic, exc, exc_secure); in v7m_stack_write()
293 armv7m_nvic_set_pending_lazyfp(env->nvic, exc, exc_secure); in v7m_stack_write()
305 CPUARMState *env = &cpu->env; in v7m_stack_read()
309 bool secure = mmu_idx & ARM_MMU_IDX_M_S; in v7m_stack_read() local
319 env->v7m.sfsr |= R_V7M_SFSR_AUVIOL_MASK | R_V7M_SFSR_SFARVALID_MASK; in v7m_stack_read()
320 env->v7m.sfar = addr; in v7m_stack_read()
326 env->v7m.cfsr[secure] |= R_V7M_CFSR_MUNSTKERR_MASK; in v7m_stack_read()
328 exc_secure = secure; in v7m_stack_read()
338 env->v7m.cfsr[M_REG_NS] |= R_V7M_CFSR_UNSTKERR_MASK; in v7m_stack_read()
355 armv7m_nvic_set_pending(env->nvic, exc, exc_secure); in v7m_stack_read()
368 bool is_secure = env->v7m.fpccr[M_REG_S] & R_V7M_FPCCR_S_MASK; in HELPER()
369 bool negpri = !(env->v7m.fpccr[M_REG_S] & R_V7M_FPCCR_HFRDY_MASK); in HELPER()
370 bool is_priv = !(env->v7m.fpccr[is_secure] & R_V7M_FPCCR_USER_MASK); in HELPER()
371 bool splimviol = env->v7m.fpccr[is_secure] & R_V7M_FPCCR_SPLIMVIOL_MASK; in HELPER()
372 uint32_t fpcar = env->v7m.fpcar[is_secure]; in HELPER()
374 bool ts = is_secure && (env->v7m.fpccr[M_REG_S] & R_V7M_FPCCR_TS_MASK); in HELPER()
380 /* Check the background context had access to the FPU */ in HELPER()
382 armv7m_nvic_set_pending_lazyfp(env->nvic, ARMV7M_EXCP_USAGE, is_secure); in HELPER()
383 env->v7m.cfsr[is_secure] |= R_V7M_CFSR_NOCP_MASK; in HELPER()
385 } else if (!is_secure && !extract32(env->v7m.nsacr, 10, 1)) { in HELPER()
386 armv7m_nvic_set_pending_lazyfp(env->nvic, ARMV7M_EXCP_USAGE, M_REG_S); in HELPER()
387 env->v7m.cfsr[M_REG_S] |= R_V7M_CFSR_NOCP_MASK; in HELPER()
417 env->v7m.vpr, mmu_idx, STACK_LAZYFP); in HELPER()
430 armv7m_nvic_can_take_pending_exception(env->nvic); in HELPER()
438 env->v7m.fpccr[is_secure] &= ~R_V7M_FPCCR_LSPACT_MASK; in HELPER()
449 env->v7m.vpr = 0; in HELPER()
470 env->v7m.control[secstate] = in write_v7m_control_spsel_for_secstate()
471 deposit32(env->v7m.control[secstate], in write_v7m_control_spsel_for_secstate()
475 if (secstate == env->v7m.secure) { in write_v7m_control_spsel_for_secstate()
480 tmp = env->v7m.other_sp; in write_v7m_control_spsel_for_secstate()
481 env->v7m.other_sp = env->regs[13]; in write_v7m_control_spsel_for_secstate()
482 env->regs[13] = tmp; in write_v7m_control_spsel_for_secstate()
493 write_v7m_control_spsel_for_secstate(env, new_spsel, env->v7m.secure); in write_v7m_control_spsel()
505 env->v7m.exception = new_exc; in write_v7m_exception()
510 tmp = env->v7m.other_sp; in write_v7m_exception()
511 env->v7m.other_sp = env->regs[13]; in write_v7m_exception()
512 env->regs[13] = tmp; in write_v7m_exception()
521 if (env->v7m.secure == new_secstate) { in switch_v7m_security_state()
526 * All the banked state is accessed by looking at env->v7m.secure in switch_v7m_security_state()
529 new_ss_msp = env->v7m.other_ss_msp; in switch_v7m_security_state()
530 new_ss_psp = env->v7m.other_ss_psp; in switch_v7m_security_state()
533 env->v7m.other_ss_psp = env->regs[13]; in switch_v7m_security_state()
534 env->v7m.other_ss_msp = env->v7m.other_sp; in switch_v7m_security_state()
536 env->v7m.other_ss_msp = env->regs[13]; in switch_v7m_security_state()
537 env->v7m.other_ss_psp = env->v7m.other_sp; in switch_v7m_security_state()
540 env->v7m.secure = new_secstate; in switch_v7m_security_state()
543 env->regs[13] = new_ss_psp; in switch_v7m_security_state()
544 env->v7m.other_sp = new_ss_msp; in switch_v7m_security_state()
546 env->regs[13] = new_ss_msp; in switch_v7m_security_state()
547 env->v7m.other_sp = new_ss_psp; in switch_v7m_security_state()
555 * - if the return value is a magic value, do exception return (like BX) in HELPER()
556 * - otherwise bit 0 of the return value is the target security state in HELPER()
576 env->regs[15] = dest & ~1; in HELPER()
577 env->thumb = dest & 1; in HELPER()
582 /* translate.c should have made BXNS UNDEF unless we're secure */ in HELPER()
583 assert(env->v7m.secure); in HELPER()
586 env->v7m.control[M_REG_S] &= ~R_V7M_CONTROL_SFPA_MASK; in HELPER()
589 env->thumb = true; in HELPER()
590 env->regs[15] = dest & ~1; in HELPER()
598 * - bit 0 of the destination address is the target security state in HELPER()
602 uint32_t nextinst = env->regs[15] | 1; in HELPER()
603 uint32_t sp = env->regs[13] - 8; in HELPER()
606 /* translate.c will have made BLXNS UNDEF unless we're secure */ in HELPER()
607 assert(env->v7m.secure); in HELPER()
611 * Target is Secure, so this is just a normal BLX, in HELPER()
614 env->regs[14] = nextinst; in HELPER()
615 env->thumb = true; in HELPER()
616 env->regs[15] = dest & ~1; in HELPER()
620 /* Target is non-secure: first push a stack frame */ in HELPER()
630 saved_psr = env->v7m.exception; in HELPER()
631 if (env->v7m.control[M_REG_S] & R_V7M_CONTROL_SFPA_MASK) { in HELPER()
639 env->regs[13] = sp; in HELPER()
640 env->regs[14] = 0xfeffffff; in HELPER()
643 * Write a dummy value to IPSR, to avoid leaking the current secure in HELPER()
644 * exception number to non-secure code. This is guaranteed not in HELPER()
649 env->v7m.control[M_REG_S] &= ~R_V7M_CONTROL_SFPA_MASK; in HELPER()
651 env->thumb = true; in HELPER()
652 env->regs[15] = dest; in HELPER()
660 CPUARMState *env = &cpu->env; in arm_v7m_load_vector()
662 uint32_t addr = env->v7m.vecbase[targets_secure] + exc * 4; in arm_v7m_load_vector()
670 exc, targets_secure ? "secure" : "non-secure", addr); in arm_v7m_load_vector()
682 attrs.secure = targets_secure; in arm_v7m_load_vector()
691 attrs.secure = false; in arm_v7m_load_vector()
694 * NS access to S memory: the underlying exception which we escalate in arm_v7m_load_vector()
695 * to HardFault is SecureFault, which always targets Secure. in arm_v7m_load_vector()
709 exc_secure = !(cpu->env.v7m.aircr & R_V7M_AIRCR_BFHFNMINS_MASK); in arm_v7m_load_vector()
724 * The HardFault is Secure if BFHFNMINS is 0 (meaning that all HFs are in arm_v7m_load_vector()
725 * secure); otherwise it targets the same security state as the in arm_v7m_load_vector()
729 if (!(cpu->env.v7m.aircr & R_V7M_AIRCR_BFHFNMINS_MASK)) { in arm_v7m_load_vector()
732 env->v7m.hfsr |= R_V7M_HFSR_VECTTBL_MASK; in arm_v7m_load_vector()
734 env->v7m.hfsr |= R_V7M_HFSR_FORCED_MASK; in arm_v7m_load_vector()
736 armv7m_nvic_set_pending_derived(env->nvic, ARMV7M_EXCP_HARD, exc_secure); in arm_v7m_load_vector()
743 * Return the integrity signature value for the callee-saves in v7m_integrity_sig()
760 * For v8M, push the callee-saves register part of the stack frame. in v7m_push_callee_stack()
764 CPUARMState *env = &cpu->env; in v7m_push_callee_stack()
776 bool priv = !(env->v7m.control[M_REG_S] & R_V7M_CONTROL_NPRIV_MASK) || in v7m_push_callee_stack()
784 limit = env->v7m.psplim[M_REG_S]; in v7m_push_callee_stack()
786 limit = env->v7m.msplim[M_REG_S]; in v7m_push_callee_stack()
790 frame_sp_p = &env->regs[13]; in v7m_push_callee_stack()
794 frameptr = *frame_sp_p - 0x28; in v7m_push_callee_stack()
803 "...STKOF during callee-saves register stacking\n"); in v7m_push_callee_stack()
804 env->v7m.cfsr[env->v7m.secure] |= R_V7M_CFSR_STKOF_MASK; in v7m_push_callee_stack()
805 armv7m_nvic_set_pending(env->nvic, ARMV7M_EXCP_USAGE, in v7m_push_callee_stack()
806 env->v7m.secure); in v7m_push_callee_stack()
818 v7m_stack_write(cpu, frameptr + 0x8, env->regs[4], mmu_idx, smode) && in v7m_push_callee_stack()
819 v7m_stack_write(cpu, frameptr + 0xc, env->regs[5], mmu_idx, smode) && in v7m_push_callee_stack()
820 v7m_stack_write(cpu, frameptr + 0x10, env->regs[6], mmu_idx, smode) && in v7m_push_callee_stack()
821 v7m_stack_write(cpu, frameptr + 0x14, env->regs[7], mmu_idx, smode) && in v7m_push_callee_stack()
822 v7m_stack_write(cpu, frameptr + 0x18, env->regs[8], mmu_idx, smode) && in v7m_push_callee_stack()
823 v7m_stack_write(cpu, frameptr + 0x1c, env->regs[9], mmu_idx, smode) && in v7m_push_callee_stack()
824 v7m_stack_write(cpu, frameptr + 0x20, env->regs[10], mmu_idx, smode) && in v7m_push_callee_stack()
825 v7m_stack_write(cpu, frameptr + 0x24, env->regs[11], mmu_idx, smode); in v7m_push_callee_stack()
841 CPUARMState *env = &cpu->env; in v7m_exception_taken()
847 armv7m_nvic_get_pending_irq_info(env->nvic, &exc, &targets_secure); in v7m_exception_taken()
849 targets_secure ? "secure" : "nonsecure", exc); in v7m_exception_taken()
864 * exception frame) is Secure. This means it may either already in v7m_exception_taken()
865 * have or now needs to push callee-saves registers. in v7m_exception_taken()
870 * We took an exception from Secure to NonSecure in v7m_exception_taken()
871 * (which means the callee-saved registers got stacked) in v7m_exception_taken()
872 * and are now tailchaining to a Secure exception. in v7m_exception_taken()
873 * Clear DCRS so eventual return from this Secure in v7m_exception_taken()
874 * exception unstacks the callee-saved registers. in v7m_exception_taken()
880 * We're going to a non-secure exception; push the in v7m_exception_taken()
881 * callee-saves registers to the stack now, if they're in v7m_exception_taken()
898 if (env->v7m.control[targets_secure] & R_V7M_CONTROL_SPSEL_MASK) { in v7m_exception_taken()
903 * Clear registers if necessary to prevent non-secure exception in v7m_exception_taken()
904 * code being able to see register values from secure code. in v7m_exception_taken()
907 * here and always zeroes the caller-saved registers regardless in v7m_exception_taken()
913 * Always clear the caller-saved registers (they have been in v7m_exception_taken()
915 * Clear callee-saved registers if the background code is in v7m_exception_taken()
916 * Secure (in which case these regs were saved in in v7m_exception_taken()
921 * r4..r11 are callee-saves, zero only if background in v7m_exception_taken()
922 * state was Secure (EXCRET.S == 1) and exception in v7m_exception_taken()
923 * targets Non-secure state in v7m_exception_taken()
930 env->regs[i] = 0; in v7m_exception_taken()
941 * Derived exception on callee-saves register stacking: in v7m_exception_taken()
946 "...derived exception on callee-saves register stacking"); in v7m_exception_taken()
963 armv7m_nvic_acknowledge_irq(env->nvic); in v7m_exception_taken()
965 /* Switch to target security state -- must do this before writing SPSEL */ in v7m_exception_taken()
970 env->v7m.control[M_REG_S] &= in v7m_exception_taken()
973 env->condexec_bits = 0; in v7m_exception_taken()
974 env->regs[14] = lr; in v7m_exception_taken()
975 env->regs[15] = addr & 0xfffffffe; in v7m_exception_taken()
976 env->thumb = addr & 1; in v7m_exception_taken()
985 * that we will need later in order to do lazy FP reg stacking. in v7m_update_fpccr()
987 bool is_secure = env->v7m.secure; in v7m_update_fpccr()
988 NVICState *nvic = env->nvic; in v7m_update_fpccr()
993 * update the NS banked version of a bit even if we are secure. in v7m_update_fpccr()
995 uint32_t *fpccr_s = &env->v7m.fpccr[M_REG_S]; in v7m_update_fpccr()
996 uint32_t *fpccr_ns = &env->v7m.fpccr[M_REG_NS]; in v7m_update_fpccr()
997 uint32_t *fpccr = &env->v7m.fpccr[is_secure]; in v7m_update_fpccr()
1000 env->v7m.fpcar[is_secure] = frameptr & ~0x7; in v7m_update_fpccr()
1006 (env->v7m.ccr[is_secure] & R_V7M_CCR_STKOFHFNMIGN_MASK); in v7m_update_fpccr()
1049 bool s = env->v7m.fpccr[M_REG_S] & R_V7M_FPCCR_S_MASK; in HELPER()
1050 bool lspact = env->v7m.fpccr[s] & R_V7M_FPCCR_LSPACT_MASK; in HELPER()
1053 assert(env->v7m.secure); in HELPER()
1055 if (!(env->v7m.control[M_REG_S] & R_V7M_CONTROL_SFPA_MASK)) { in HELPER()
1059 /* Check access to the coprocessor is permitted */ in HELPER()
1080 if (!(env->v7m.fpccr[M_REG_S] & R_V7M_FPCCR_LSPEN_MASK)) { in HELPER()
1081 bool ts = env->v7m.fpccr[M_REG_S] & R_V7M_FPCCR_TS_MASK; in HELPER()
1098 cpu_stl_data_ra(env, fptr + 0x44, env->v7m.vpr, ra); in HELPER()
1111 env->v7m.vpr = 0; in HELPER()
1118 env->v7m.control[M_REG_S] &= ~R_V7M_CONTROL_FPCA_MASK; in HELPER()
1127 assert(env->v7m.secure); in HELPER()
1129 if (!(env->v7m.control[M_REG_S] & R_V7M_CONTROL_SFPA_MASK)) { in HELPER()
1133 /* Check access to the coprocessor is permitted */ in HELPER()
1138 if (env->v7m.fpccr[M_REG_S] & R_V7M_FPCCR_LSPACT_MASK) { in HELPER()
1140 env->v7m.fpccr[M_REG_S] &= ~R_V7M_FPCCR_LSPACT_MASK; in HELPER()
1142 bool ts = env->v7m.fpccr[M_REG_S] & R_V7M_FPCCR_TS_MASK; in HELPER()
1168 env->v7m.vpr = cpu_ldl_data_ra(env, fptr + 0x44, ra); in HELPER()
1172 env->v7m.control[M_REG_S] |= R_V7M_CONTROL_FPCA_MASK; in HELPER()
1185 CPUARMState *env = &cpu->env; in v7m_push_stack()
1187 uint32_t frameptr = env->regs[13]; in v7m_push_stack()
1190 bool nsacr_cp10 = extract32(env->v7m.nsacr, 10, 1); in v7m_push_stack()
1192 if ((env->v7m.control[M_REG_S] & R_V7M_CONTROL_FPCA_MASK) && in v7m_push_stack()
1193 (env->v7m.secure || nsacr_cp10)) { in v7m_push_stack()
1194 if (env->v7m.secure && in v7m_push_stack()
1195 env->v7m.fpccr[M_REG_S] & R_V7M_FPCCR_TS_MASK) { in v7m_push_stack()
1206 (env->v7m.ccr[env->v7m.secure] & R_V7M_CCR_STKALIGN_MASK)) { in v7m_push_stack()
1207 frameptr -= 4; in v7m_push_stack()
1212 if (env->v7m.secure && in v7m_push_stack()
1213 (env->v7m.control[M_REG_S] & R_V7M_CONTROL_SFPA_MASK)) { in v7m_push_stack()
1217 frameptr -= framesize; in v7m_push_stack()
1231 env->v7m.cfsr[env->v7m.secure] |= R_V7M_CFSR_STKOF_MASK; in v7m_push_stack()
1232 armv7m_nvic_set_pending(env->nvic, ARMV7M_EXCP_USAGE, in v7m_push_stack()
1233 env->v7m.secure); in v7m_push_stack()
1234 env->regs[13] = limit; in v7m_push_stack()
1253 v7m_stack_write(cpu, frameptr, env->regs[0], mmu_idx, STACK_NORMAL) && in v7m_push_stack()
1254 v7m_stack_write(cpu, frameptr + 4, env->regs[1], in v7m_push_stack()
1256 v7m_stack_write(cpu, frameptr + 8, env->regs[2], in v7m_push_stack()
1258 v7m_stack_write(cpu, frameptr + 12, env->regs[3], in v7m_push_stack()
1260 v7m_stack_write(cpu, frameptr + 16, env->regs[12], in v7m_push_stack()
1262 v7m_stack_write(cpu, frameptr + 20, env->regs[14], in v7m_push_stack()
1264 v7m_stack_write(cpu, frameptr + 24, env->regs[15], in v7m_push_stack()
1268 if (env->v7m.control[M_REG_S] & R_V7M_CONTROL_FPCA_MASK) { in v7m_push_stack()
1270 bool fpccr_s = env->v7m.fpccr[M_REG_S] & R_V7M_FPCCR_S_MASK; in v7m_push_stack()
1271 bool lspact = env->v7m.fpccr[fpccr_s] & R_V7M_FPCCR_LSPACT_MASK; in v7m_push_stack()
1276 env->v7m.sfsr |= R_V7M_SFSR_LSERR_MASK; in v7m_push_stack()
1277 armv7m_nvic_set_pending(env->nvic, ARMV7M_EXCP_SECURE, false); in v7m_push_stack()
1278 } else if (!env->v7m.secure && !nsacr_cp10) { in v7m_push_stack()
1280 "...Secure UsageFault with CFSR.NOCP because " in v7m_push_stack()
1282 armv7m_nvic_set_pending(env->nvic, ARMV7M_EXCP_USAGE, M_REG_S); in v7m_push_stack()
1283 env->v7m.cfsr[M_REG_S] |= R_V7M_CFSR_NOCP_MASK; in v7m_push_stack()
1285 if (!(env->v7m.fpccr[M_REG_S] & R_V7M_FPCCR_LSPEN_MASK)) { in v7m_push_stack()
1288 bool cpacr_pass = v7m_cpacr_pass(env, env->v7m.secure, in v7m_push_stack()
1293 * Take UsageFault if CPACR forbids access. The pseudocode in v7m_push_stack()
1300 armv7m_nvic_set_pending(env->nvic, ARMV7M_EXCP_USAGE, in v7m_push_stack()
1301 env->v7m.secure); in v7m_push_stack()
1302 env->v7m.cfsr[env->v7m.secure] |= R_V7M_CFSR_NOCP_MASK; in v7m_push_stack()
1327 env->v7m.vpr, mmu_idx, STACK_NORMAL); in v7m_push_stack()
1335 env->v7m.vpr = 0; in v7m_push_stack()
1351 env->regs[13] = frameptr; in v7m_push_stack()
1359 CPUARMState *env = &cpu->env; in do_v7m_exception_exit()
1373 * If we're not in Handler mode then jumps to magic exception-exit in do_v7m_exception_exit()
1375 * security extensions the magic secure-function-return has to in do_v7m_exception_exit()
1377 * the generated code we allow exception-exit magic to also cause the in do_v7m_exception_exit()
1391 * jump-to-register (which is then caught later on), and so split in do_v7m_exception_exit()
1392 * the target value up between env->regs[15] and env->thumb in in do_v7m_exception_exit()
1395 excret = env->regs[15]; in do_v7m_exception_exit()
1396 if (env->thumb) { in do_v7m_exception_exit()
1402 excret, env->v7m.exception); in do_v7m_exception_exit()
1425 if (!env->v7m.secure && in do_v7m_exception_exit()
1435 if (env->v7m.exception != ARMV7M_EXCP_NMI) { in do_v7m_exception_exit()
1437 * Auto-clear FAULTMASK on return from other than NMI. in do_v7m_exception_exit()
1444 if (armv7m_nvic_raw_execution_priority(env->nvic) >= 0) { in do_v7m_exception_exit()
1445 env->v7m.faultmask[exc_secure] = 0; in do_v7m_exception_exit()
1448 env->v7m.faultmask[M_REG_NS] = 0; in do_v7m_exception_exit()
1452 switch (armv7m_nvic_complete_irq(env->nvic, env->v7m.exception, in do_v7m_exception_exit()
1454 case -1: in do_v7m_exception_exit()
1505 !(env->v7m.ccr[env->v7m.secure] & in do_v7m_exception_exit()
1529 if ((env->v7m.fpccr[M_REG_S] & R_V7M_FPCCR_CLRONRET_MASK) && in do_v7m_exception_exit()
1530 (env->v7m.control[M_REG_S] & R_V7M_CONTROL_FPCA_MASK)) { in do_v7m_exception_exit()
1531 if (env->v7m.fpccr[M_REG_S] & R_V7M_FPCCR_LSPACT_MASK) { in do_v7m_exception_exit()
1532 env->v7m.sfsr |= R_V7M_SFSR_LSERR_MASK; in do_v7m_exception_exit()
1533 armv7m_nvic_set_pending(env->nvic, ARMV7M_EXCP_SECURE, false); in do_v7m_exception_exit()
1542 extract32(env->v7m.nsacr, 10, 1); in do_v7m_exception_exit()
1545 armv7m_nvic_set_pending(env->nvic, ARMV7M_EXCP_USAGE, true); in do_v7m_exception_exit()
1546 env->v7m.cfsr[M_REG_S] |= R_V7M_CFSR_NOCP_MASK; in do_v7m_exception_exit()
1552 armv7m_nvic_set_pending(env->nvic, ARMV7M_EXCP_USAGE, in do_v7m_exception_exit()
1554 env->v7m.cfsr[exc_secure] |= R_V7M_CFSR_NOCP_MASK; in do_v7m_exception_exit()
1569 env->v7m.vpr = 0; in do_v7m_exception_exit()
1575 env->v7m.sfsr |= R_V7M_SFSR_INVER_MASK; in do_v7m_exception_exit()
1576 armv7m_nvic_set_pending(env->nvic, ARMV7M_EXCP_SECURE, false); in do_v7m_exception_exit()
1588 env->v7m.cfsr[env->v7m.secure] |= R_V7M_CFSR_INVPC_MASK; in do_v7m_exception_exit()
1589 armv7m_nvic_set_pending(env->nvic, ARMV7M_EXCP_USAGE, env->v7m.secure); in do_v7m_exception_exit()
1600 * avoiding an unstack-and-then-stack. Note that now we have in do_v7m_exception_exit()
1603 * returning to -- none of the state we would unstack or set based on in do_v7m_exception_exit()
1606 if (armv7m_nvic_can_take_pending_exception(env->nvic)) { in do_v7m_exception_exit()
1625 bool spsel = env->v7m.control[return_to_secure] & R_V7M_CONTROL_SPSEL_MASK; in do_v7m_exception_exit()
1632 !(env->v7m.control[return_to_secure] & R_V7M_CONTROL_NPRIV_MASK); in do_v7m_exception_exit()
1640 "M profile exception return with non-8-aligned SP " in do_v7m_exception_exit()
1644 /* Do we need to pop callee-saved registers? */ in do_v7m_exception_exit()
1654 env->v7m.sfsr |= R_V7M_SFSR_INVIS_MASK; in do_v7m_exception_exit()
1655 armv7m_nvic_set_pending(env->nvic, ARMV7M_EXCP_SECURE, false); in do_v7m_exception_exit()
1664 v7m_stack_read(cpu, &env->regs[4], frameptr + 0x8, mmu_idx) && in do_v7m_exception_exit()
1665 v7m_stack_read(cpu, &env->regs[5], frameptr + 0xc, mmu_idx) && in do_v7m_exception_exit()
1666 v7m_stack_read(cpu, &env->regs[6], frameptr + 0x10, mmu_idx) && in do_v7m_exception_exit()
1667 v7m_stack_read(cpu, &env->regs[7], frameptr + 0x14, mmu_idx) && in do_v7m_exception_exit()
1668 v7m_stack_read(cpu, &env->regs[8], frameptr + 0x18, mmu_idx) && in do_v7m_exception_exit()
1669 v7m_stack_read(cpu, &env->regs[9], frameptr + 0x1c, mmu_idx) && in do_v7m_exception_exit()
1670 v7m_stack_read(cpu, &env->regs[10], frameptr + 0x20, mmu_idx) && in do_v7m_exception_exit()
1671 v7m_stack_read(cpu, &env->regs[11], frameptr + 0x24, mmu_idx); in do_v7m_exception_exit()
1678 v7m_stack_read(cpu, &env->regs[0], frameptr, mmu_idx) && in do_v7m_exception_exit()
1679 v7m_stack_read(cpu, &env->regs[1], frameptr + 0x4, mmu_idx) && in do_v7m_exception_exit()
1680 v7m_stack_read(cpu, &env->regs[2], frameptr + 0x8, mmu_idx) && in do_v7m_exception_exit()
1681 v7m_stack_read(cpu, &env->regs[3], frameptr + 0xc, mmu_idx) && in do_v7m_exception_exit()
1682 v7m_stack_read(cpu, &env->regs[12], frameptr + 0x10, mmu_idx) && in do_v7m_exception_exit()
1683 v7m_stack_read(cpu, &env->regs[14], frameptr + 0x14, mmu_idx) && in do_v7m_exception_exit()
1684 v7m_stack_read(cpu, &env->regs[15], frameptr + 0x18, mmu_idx) && in do_v7m_exception_exit()
1702 * assume the r15 in the stack frame should be a Thumb-style "lsbit in do_v7m_exception_exit()
1706 if (env->regs[15] & 1) { in do_v7m_exception_exit()
1707 env->regs[15] &= ~1U; in do_v7m_exception_exit()
1729 armv7m_nvic_set_pending(env->nvic, ARMV7M_EXCP_USAGE, in do_v7m_exception_exit()
1730 env->v7m.secure); in do_v7m_exception_exit()
1731 env->v7m.cfsr[env->v7m.secure] |= R_V7M_CFSR_INVPC_MASK; in do_v7m_exception_exit()
1743 (env->v7m.fpccr[M_REG_S] & R_V7M_FPCCR_LSPACT_MASK)) { in do_v7m_exception_exit()
1744 armv7m_nvic_set_pending(env->nvic, ARMV7M_EXCP_SECURE, false); in do_v7m_exception_exit()
1745 env->v7m.sfsr |= R_V7M_SFSR_LSERR_MASK; in do_v7m_exception_exit()
1748 "Secure LSPACT set but exception return is " in do_v7m_exception_exit()
1749 "not to secure state\n"); in do_v7m_exception_exit()
1755 (env->v7m.fpccr[M_REG_S] & R_V7M_FPCCR_TS_MASK); in do_v7m_exception_exit()
1757 if (env->v7m.fpccr[return_to_secure] & R_V7M_FPCCR_LSPACT_MASK) { in do_v7m_exception_exit()
1759 env->v7m.fpccr[return_to_secure] &= ~R_V7M_FPCCR_LSPACT_MASK; in do_v7m_exception_exit()
1768 extract32(env->v7m.nsacr, 10, 1); in do_v7m_exception_exit()
1771 armv7m_nvic_set_pending(env->nvic, ARMV7M_EXCP_USAGE, in do_v7m_exception_exit()
1773 env->v7m.cfsr[return_to_secure] |= R_V7M_CFSR_NOCP_MASK; in do_v7m_exception_exit()
1781 armv7m_nvic_set_pending(env->nvic, ARMV7M_EXCP_USAGE, true); in do_v7m_exception_exit()
1782 env->v7m.cfsr[M_REG_S] |= R_V7M_CFSR_INVPC_MASK; in do_v7m_exception_exit()
1784 "...taking Secure UsageFault on existing " in do_v7m_exception_exit()
1818 v7m_stack_read(cpu, &env->v7m.vpr, in do_v7m_exception_exit()
1831 env->v7m.vpr = 0; in do_v7m_exception_exit()
1836 env->v7m.control[M_REG_S] = FIELD_DP32(env->v7m.control[M_REG_S], in do_v7m_exception_exit()
1849 * pre-exception SP was not 8-aligned and we added a padding word to in do_v7m_exception_exit()
1851 * from the current 8-aligned value to the 8-unaligned value. (Adding 4 in do_v7m_exception_exit()
1867 if (env->v7m.secure) { in do_v7m_exception_exit()
1870 env->v7m.control[M_REG_S] = FIELD_DP32(env->v7m.control[M_REG_S], in do_v7m_exception_exit()
1883 * we know we're v7M so this is never a Secure UsageFault. in do_v7m_exception_exit()
1888 armv7m_nvic_set_pending(env->nvic, ARMV7M_EXCP_USAGE, false); in do_v7m_exception_exit()
1889 env->v7m.cfsr[env->v7m.secure] |= R_V7M_CFSR_INVPC_MASK; in do_v7m_exception_exit()
1913 * At this point the magic return value is split between env->regs[15] in do_v7m_function_return()
1914 * and env->thumb. We don't bother to reconstitute it because we don't in do_v7m_function_return()
1917 CPUARMState *env = &cpu->env; in do_v7m_function_return()
1920 qemu_log_mask(CPU_LOG_INT, "...really v7M secure function return\n"); in do_v7m_function_return()
1929 /* Pull the return address and IPSR from the Secure stack */ in do_v7m_function_return()
1931 spsel = env->v7m.control[M_REG_S] & R_V7M_CONTROL_SPSEL_MASK; in do_v7m_function_return()
1938 * do them as secure, so work out what MMU index that is. in do_v7m_function_return()
1947 if (!((env->v7m.exception == 0 && newpsr_exc == 0) || in do_v7m_function_return()
1948 (env->v7m.exception == 1 && newpsr_exc != 0))) { in do_v7m_function_return()
1950 env->v7m.cfsr[env->v7m.secure] |= R_V7M_CFSR_INVPC_MASK; in do_v7m_function_return()
1951 armv7m_nvic_set_pending(env->nvic, ARMV7M_EXCP_USAGE, in do_v7m_function_return()
1952 env->v7m.secure); in do_v7m_function_return()
1964 env->v7m.exception = newpsr_exc; in do_v7m_function_return()
1965 env->v7m.control[M_REG_S] &= ~R_V7M_CONTROL_SFPA_MASK; in do_v7m_function_return()
1967 env->v7m.control[M_REG_S] |= R_V7M_CONTROL_SFPA_MASK; in do_v7m_function_return()
1970 env->thumb = newpc & 1; in do_v7m_function_return()
1971 env->regs[15] = newpc & ~1; in do_v7m_function_return()
1978 static bool v7m_read_half_insn(ARMCPU *cpu, ARMMMUIdx mmu_idx, bool secure, in v7m_read_half_insn() argument
1982 * Load a 16-bit portion of a v7M instruction, returning true on success, in v7m_read_half_insn()
1990 * and then we do the load as a secure load (ie using the security in v7m_read_half_insn()
1994 CPUARMState *env = &cpu->env; in v7m_read_half_insn()
2000 v8m_security_lookup(env, addr, MMU_INST_FETCH, mmu_idx, secure, &sattrs); in v7m_read_half_insn()
2006 env->v7m.sfsr |= R_V7M_SFSR_INVEP_MASK; in v7m_read_half_insn()
2007 armv7m_nvic_set_pending(env->nvic, ARMV7M_EXCP_SECURE, false); in v7m_read_half_insn()
2014 env->v7m.cfsr[env->v7m.secure] |= R_V7M_CFSR_IACCVIOL_MASK; in v7m_read_half_insn()
2015 armv7m_nvic_set_pending(env->nvic, ARMV7M_EXCP_MEM, env->v7m.secure); in v7m_read_half_insn()
2022 env->v7m.cfsr[M_REG_NS] |= R_V7M_CFSR_IBUSERR_MASK; in v7m_read_half_insn()
2023 armv7m_nvic_set_pending(env->nvic, ARMV7M_EXCP_BUS, false); in v7m_read_half_insn()
2042 CPUARMState *env = &cpu->env; in v7m_read_sg_stack_word()
2053 env->v7m.sfsr |= R_V7M_SFSR_AUVIOL_MASK | R_V7M_SFSR_SFARVALID_MASK; in v7m_read_sg_stack_word()
2054 env->v7m.sfar = addr; in v7m_read_sg_stack_word()
2055 armv7m_nvic_set_pending(env->nvic, ARMV7M_EXCP_SECURE, false); in v7m_read_sg_stack_word()
2059 env->v7m.cfsr[M_REG_S] |= R_V7M_CFSR_DACCVIOL_MASK | in v7m_read_sg_stack_word()
2061 env->v7m.mmfar[M_REG_S] = addr; in v7m_read_sg_stack_word()
2062 armv7m_nvic_set_pending(env->nvic, ARMV7M_EXCP_MEM, false); in v7m_read_sg_stack_word()
2072 env->v7m.cfsr[M_REG_NS] |= in v7m_read_sg_stack_word()
2074 env->v7m.bfar = addr; in v7m_read_sg_stack_word()
2075 armv7m_nvic_set_pending(env->nvic, ARMV7M_EXCP_BUS, false); in v7m_read_sg_stack_word()
2086 * Check whether this attempt to execute code in a Secure & NS-Callable in v7m_handle_execute_nsc()
2091 CPUARMState *env = &cpu->env; in v7m_handle_execute_nsc()
2099 assert(!env->v7m.secure); in v7m_handle_execute_nsc()
2102 /* We want to do the MPU lookup as secure; work out what mmu_idx that is */ in v7m_handle_execute_nsc()
2105 if (!v7m_read_half_insn(cpu, mmu_idx, true, env->regs[15], &insn)) { in v7m_handle_execute_nsc()
2109 if (!env->thumb) { in v7m_handle_execute_nsc()
2116 * early-SG-check option). in v7m_handle_execute_nsc()
2121 if (!v7m_read_half_insn(cpu, mmu_idx, true, env->regs[15] + 2, &insn)) { in v7m_handle_execute_nsc()
2138 ", executing it\n", env->regs[15]); in v7m_handle_execute_nsc()
2144 * must perform the memory access even if CCR_S.TRD is zero in v7m_handle_execute_nsc()
2153 sp = v7m_using_psp(env) ? env->v7m.other_ss_psp : env->v7m.other_ss_msp; in v7m_handle_execute_nsc()
2155 /* Stack access failed and an exception has been pended */ in v7m_handle_execute_nsc()
2159 if (env->v7m.ccr[M_REG_S] & R_V7M_CCR_TRD_MASK) { in v7m_handle_execute_nsc()
2161 !(env->v7m.control[M_REG_S] & 1)) { in v7m_handle_execute_nsc()
2167 env->regs[14] &= ~1; in v7m_handle_execute_nsc()
2168 env->v7m.control[M_REG_S] &= ~R_V7M_CONTROL_SFPA_MASK; in v7m_handle_execute_nsc()
2171 env->regs[15] += 4; in v7m_handle_execute_nsc()
2176 env->v7m.sfsr |= R_V7M_SFSR_INVEP_MASK; in v7m_handle_execute_nsc()
2177 armv7m_nvic_set_pending(env->nvic, ARMV7M_EXCP_SECURE, false); in v7m_handle_execute_nsc()
2186 CPUARMState *env = &cpu->env; in arm_v7m_cpu_do_interrupt()
2196 switch (cs->exception_index) { in arm_v7m_cpu_do_interrupt()
2198 armv7m_nvic_set_pending(env->nvic, ARMV7M_EXCP_USAGE, env->v7m.secure); in arm_v7m_cpu_do_interrupt()
2199 env->v7m.cfsr[env->v7m.secure] |= R_V7M_CFSR_UNDEFINSTR_MASK; in arm_v7m_cpu_do_interrupt()
2210 if (env->exception.target_el == 3) { in arm_v7m_cpu_do_interrupt()
2213 target_secstate = env->v7m.secure; in arm_v7m_cpu_do_interrupt()
2215 armv7m_nvic_set_pending(env->nvic, ARMV7M_EXCP_USAGE, target_secstate); in arm_v7m_cpu_do_interrupt()
2216 env->v7m.cfsr[target_secstate] |= R_V7M_CFSR_NOCP_MASK; in arm_v7m_cpu_do_interrupt()
2220 armv7m_nvic_set_pending(env->nvic, ARMV7M_EXCP_USAGE, env->v7m.secure); in arm_v7m_cpu_do_interrupt()
2221 env->v7m.cfsr[env->v7m.secure] |= R_V7M_CFSR_INVSTATE_MASK; in arm_v7m_cpu_do_interrupt()
2224 armv7m_nvic_set_pending(env->nvic, ARMV7M_EXCP_USAGE, env->v7m.secure); in arm_v7m_cpu_do_interrupt()
2225 env->v7m.cfsr[env->v7m.secure] |= R_V7M_CFSR_STKOF_MASK; in arm_v7m_cpu_do_interrupt()
2228 armv7m_nvic_set_pending(env->nvic, ARMV7M_EXCP_SECURE, false); in arm_v7m_cpu_do_interrupt()
2229 env->v7m.sfsr |= R_V7M_SFSR_LSERR_MASK; in arm_v7m_cpu_do_interrupt()
2232 /* Unaligned faults reported by M-profile aware code */ in arm_v7m_cpu_do_interrupt()
2233 armv7m_nvic_set_pending(env->nvic, ARMV7M_EXCP_USAGE, env->v7m.secure); in arm_v7m_cpu_do_interrupt()
2234 env->v7m.cfsr[env->v7m.secure] |= R_V7M_CFSR_UNALIGNED_MASK; in arm_v7m_cpu_do_interrupt()
2237 armv7m_nvic_set_pending(env->nvic, ARMV7M_EXCP_USAGE, env->v7m.secure); in arm_v7m_cpu_do_interrupt()
2238 env->v7m.cfsr[env->v7m.secure] |= R_V7M_CFSR_DIVBYZERO_MASK; in arm_v7m_cpu_do_interrupt()
2242 armv7m_nvic_set_pending(env->nvic, ARMV7M_EXCP_SVC, env->v7m.secure); in arm_v7m_cpu_do_interrupt()
2248 * the env->exception.fsr will be populated by the code that in arm_v7m_cpu_do_interrupt()
2249 * raises the fault, in the A profile short-descriptor format. in arm_v7m_cpu_do_interrupt()
2256 (uint32_t)env->exception.vaddress); in arm_v7m_cpu_do_interrupt()
2257 switch (env->exception.fsr & 0xf) { in arm_v7m_cpu_do_interrupt()
2261 * which is marked as Secure & Non-Secure Callable and the CPU in arm_v7m_cpu_do_interrupt()
2262 * is in the Non-Secure state. The only instruction which can in arm_v7m_cpu_do_interrupt()
2275 * access data in the wrong security state. in arm_v7m_cpu_do_interrupt()
2277 switch (cs->exception_index) { in arm_v7m_cpu_do_interrupt()
2279 if (env->v7m.secure) { in arm_v7m_cpu_do_interrupt()
2280 env->v7m.sfsr |= R_V7M_SFSR_INVTRAN_MASK; in arm_v7m_cpu_do_interrupt()
2284 env->v7m.sfsr |= R_V7M_SFSR_INVEP_MASK; in arm_v7m_cpu_do_interrupt()
2290 /* This must be an NS access to S memory */ in arm_v7m_cpu_do_interrupt()
2291 env->v7m.sfsr |= R_V7M_SFSR_AUVIOL_MASK; in arm_v7m_cpu_do_interrupt()
2296 armv7m_nvic_set_pending(env->nvic, ARMV7M_EXCP_SECURE, false); in arm_v7m_cpu_do_interrupt()
2299 switch (cs->exception_index) { in arm_v7m_cpu_do_interrupt()
2301 env->v7m.cfsr[M_REG_NS] |= R_V7M_CFSR_IBUSERR_MASK; in arm_v7m_cpu_do_interrupt()
2305 env->v7m.cfsr[M_REG_NS] |= in arm_v7m_cpu_do_interrupt()
2307 env->v7m.bfar = env->exception.vaddress; in arm_v7m_cpu_do_interrupt()
2310 env->v7m.bfar); in arm_v7m_cpu_do_interrupt()
2313 armv7m_nvic_set_pending(env->nvic, ARMV7M_EXCP_BUS, false); in arm_v7m_cpu_do_interrupt()
2318 env->v7m.cfsr[env->v7m.secure] |= R_V7M_CFSR_UNALIGNED_MASK; in arm_v7m_cpu_do_interrupt()
2319 armv7m_nvic_set_pending(env->nvic, ARMV7M_EXCP_USAGE, in arm_v7m_cpu_do_interrupt()
2320 env->v7m.secure); in arm_v7m_cpu_do_interrupt()
2327 switch (cs->exception_index) { in arm_v7m_cpu_do_interrupt()
2329 env->v7m.cfsr[env->v7m.secure] |= R_V7M_CFSR_IACCVIOL_MASK; in arm_v7m_cpu_do_interrupt()
2333 env->v7m.cfsr[env->v7m.secure] |= in arm_v7m_cpu_do_interrupt()
2335 env->v7m.mmfar[env->v7m.secure] = env->exception.vaddress; in arm_v7m_cpu_do_interrupt()
2338 env->v7m.mmfar[env->v7m.secure]); in arm_v7m_cpu_do_interrupt()
2341 armv7m_nvic_set_pending(env->nvic, ARMV7M_EXCP_MEM, in arm_v7m_cpu_do_interrupt()
2342 env->v7m.secure); in arm_v7m_cpu_do_interrupt()
2349 env->regs[0]); in arm_v7m_cpu_do_interrupt()
2355 env->regs[15] += env->thumb ? 2 : 4; in arm_v7m_cpu_do_interrupt()
2358 armv7m_nvic_set_pending(env->nvic, ARMV7M_EXCP_DEBUG, false); in arm_v7m_cpu_do_interrupt()
2363 if (env->regs[15] < EXC_RETURN_MIN_MAGIC) { in arm_v7m_cpu_do_interrupt()
2365 assert(env->regs[15] >= FNC_RETURN_MIN_MAGIC); in arm_v7m_cpu_do_interrupt()
2382 cpu_abort(cs, "Unhandled exception 0x%x\n", cs->exception_index); in arm_v7m_cpu_do_interrupt()
2390 * The S bit indicates whether we should return to Secure in arm_v7m_cpu_do_interrupt()
2393 * to Secure or NonSecure (ie our target state). We set it in arm_v7m_cpu_do_interrupt()
2401 if (env->v7m.secure) { in arm_v7m_cpu_do_interrupt()
2409 if (env->v7m.control[M_REG_NS] & R_V7M_CONTROL_SPSEL_MASK) { in arm_v7m_cpu_do_interrupt()
2413 if (!(env->v7m.control[M_REG_S] & R_V7M_CONTROL_FPCA_MASK)) { in arm_v7m_cpu_do_interrupt()
2424 uint32_t HELPER(v7m_mrs)(CPUARMState *env, uint32_t reg) in HELPER()
2429 switch (reg) { in HELPER()
2430 case 0 ... 7: /* xPSR sub-fields */ in HELPER()
2431 return v7m_mrs_xpsr(env, reg, el); in HELPER()
2433 return arm_v7m_mrs_control(env, env->v7m.secure); in HELPER()
2436 * We have to handle this here because unprivileged Secure code in HELPER()
2439 if (!env->v7m.secure) { in HELPER()
2442 return env->v7m.control[M_REG_NS] | in HELPER()
2443 (env->v7m.control[M_REG_S] & R_V7M_CONTROL_FPCA_MASK); in HELPER()
2451 switch (reg) { in HELPER()
2453 if (!env->v7m.secure) { in HELPER()
2456 return env->v7m.other_ss_msp; in HELPER()
2458 if (!env->v7m.secure) { in HELPER()
2461 return env->v7m.other_ss_psp; in HELPER()
2463 if (!env->v7m.secure) { in HELPER()
2466 return env->v7m.msplim[M_REG_NS]; in HELPER()
2468 if (!env->v7m.secure) { in HELPER()
2471 return env->v7m.psplim[M_REG_NS]; in HELPER()
2473 if (!env->v7m.secure) { in HELPER()
2476 return env->v7m.primask[M_REG_NS]; in HELPER()
2481 if (!env->v7m.secure) { in HELPER()
2484 return env->v7m.basepri[M_REG_NS]; in HELPER()
2489 if (!env->v7m.secure) { in HELPER()
2492 return env->v7m.faultmask[M_REG_NS]; in HELPER()
2496 * This gives the non-secure SP selected based on whether we're in HELPER()
2499 bool spsel = env->v7m.control[M_REG_NS] & R_V7M_CONTROL_SPSEL_MASK; in HELPER()
2501 if (!env->v7m.secure) { in HELPER()
2505 return env->v7m.other_ss_psp; in HELPER()
2507 return env->v7m.other_ss_msp; in HELPER()
2515 switch (reg) { in HELPER()
2517 return v7m_using_psp(env) ? env->v7m.other_sp : env->regs[13]; in HELPER()
2519 return v7m_using_psp(env) ? env->regs[13] : env->v7m.other_sp; in HELPER()
2524 return env->v7m.msplim[env->v7m.secure]; in HELPER()
2529 return env->v7m.psplim[env->v7m.secure]; in HELPER()
2531 return env->v7m.primask[env->v7m.secure]; in HELPER()
2537 return env->v7m.basepri[env->v7m.secure]; in HELPER()
2542 return env->v7m.faultmask[env->v7m.secure]; in HELPER()
2546 " register %d\n", reg); in HELPER()
2562 uint32_t reg = extract32(maskreg, 0, 8); in HELPER() local
2565 if (cur_el == 0 && reg > 7 && reg != 20) { in HELPER()
2567 * only xPSR sub-fields and CONTROL.SFPA may be written by in HELPER()
2574 switch (reg) { in HELPER()
2576 if (!env->v7m.secure) { in HELPER()
2579 env->v7m.other_ss_msp = val & ~3; in HELPER()
2582 if (!env->v7m.secure) { in HELPER()
2585 env->v7m.other_ss_psp = val & ~3; in HELPER()
2588 if (!env->v7m.secure) { in HELPER()
2591 env->v7m.msplim[M_REG_NS] = val & ~7; in HELPER()
2594 if (!env->v7m.secure) { in HELPER()
2597 env->v7m.psplim[M_REG_NS] = val & ~7; in HELPER()
2600 if (!env->v7m.secure) { in HELPER()
2603 env->v7m.primask[M_REG_NS] = val & 1; in HELPER()
2609 if (!env->v7m.secure) { in HELPER()
2612 env->v7m.basepri[M_REG_NS] = val & 0xff; in HELPER()
2618 if (!env->v7m.secure) { in HELPER()
2621 env->v7m.faultmask[M_REG_NS] = val & 1; in HELPER()
2624 if (!env->v7m.secure) { in HELPER()
2631 env->v7m.control[M_REG_NS] &= ~R_V7M_CONTROL_NPRIV_MASK; in HELPER()
2632 env->v7m.control[M_REG_NS] |= val & R_V7M_CONTROL_NPRIV_MASK; in HELPER()
2639 extract32(env->v7m.nsacr, 10, 1)) { in HELPER()
2640 env->v7m.control[M_REG_S] &= ~R_V7M_CONTROL_FPCA_MASK; in HELPER()
2641 env->v7m.control[M_REG_S] |= val & R_V7M_CONTROL_FPCA_MASK; in HELPER()
2647 * This gives the non-secure SP selected based on whether we're in HELPER()
2650 bool spsel = env->v7m.control[M_REG_NS] & R_V7M_CONTROL_SPSEL_MASK; in HELPER()
2654 if (!env->v7m.secure) { in HELPER()
2658 limit = is_psp ? env->v7m.psplim[false] : env->v7m.msplim[false]; in HELPER()
2667 env->v7m.other_ss_psp = val; in HELPER()
2669 env->v7m.other_ss_msp = val; in HELPER()
2678 switch (reg) { in HELPER()
2679 case 0 ... 7: /* xPSR sub-fields */ in HELPER()
2680 v7m_msr_xpsr(env, mask, reg, val); in HELPER()
2684 env->v7m.other_sp = val & ~3; in HELPER()
2686 env->regs[13] = val & ~3; in HELPER()
2691 env->regs[13] = val & ~3; in HELPER()
2693 env->v7m.other_sp = val & ~3; in HELPER()
2700 env->v7m.msplim[env->v7m.secure] = val & ~7; in HELPER()
2706 env->v7m.psplim[env->v7m.secure] = val & ~7; in HELPER()
2709 env->v7m.primask[env->v7m.secure] = val & 1; in HELPER()
2715 env->v7m.basepri[env->v7m.secure] = val & 0xff; in HELPER()
2722 if (val != 0 && (val < env->v7m.basepri[env->v7m.secure] in HELPER()
2723 || env->v7m.basepri[env->v7m.secure] == 0)) { in HELPER()
2724 env->v7m.basepri[env->v7m.secure] = val; in HELPER()
2731 env->v7m.faultmask[env->v7m.secure] = val & 1; in HELPER()
2738 * env->v7m.control, so we only need update the others. in HELPER()
2741 * All these bits are writes-ignored from non-privileged code, in HELPER()
2749 env->v7m.control[env->v7m.secure] &= ~R_V7M_CONTROL_NPRIV_MASK; in HELPER()
2750 env->v7m.control[env->v7m.secure] |= val & R_V7M_CONTROL_NPRIV_MASK; in HELPER()
2758 if (env->v7m.secure) { in HELPER()
2759 env->v7m.control[M_REG_S] &= ~R_V7M_CONTROL_SFPA_MASK; in HELPER()
2760 env->v7m.control[M_REG_S] |= val & R_V7M_CONTROL_SFPA_MASK; in HELPER()
2763 (env->v7m.secure || !arm_feature(env, ARM_FEATURE_M_SECURITY) || in HELPER()
2764 extract32(env->v7m.nsacr, 10, 1))) { in HELPER()
2765 env->v7m.control[M_REG_S] &= ~R_V7M_CONTROL_FPCA_MASK; in HELPER()
2766 env->v7m.control[M_REG_S] |= val & R_V7M_CONTROL_FPCA_MASK; in HELPER()
2773 " register %d\n", reg); in HELPER()
2789 bool targetsec = env->v7m.secure; in HELPER()
2803 !(env->v7m.control[targetsec] & R_V7M_CONTROL_NPRIV_MASK); in HELPER()
2810 * We know that the MPU and SAU don't care about the access type in HELPER()
2826 if (mregion == -1) { in HELPER()
2841 if (env->v7m.secure) { in HELPER()
2869 uint32_t *arm_v7m_get_sp_ptr(CPUARMState *env, bool secure, bool threadmode, in arm_v7m_get_sp_ptr() argument
2881 * function in handling of pushing of the callee-saves registers in arm_v7m_get_sp_ptr()
2885 * opencodes the stack-selection in PushCalleeStack(), but we prefer in arm_v7m_get_sp_ptr()
2890 if (secure == env->v7m.secure) { in arm_v7m_get_sp_ptr()
2892 return &env->regs[13]; in arm_v7m_get_sp_ptr()
2894 return &env->v7m.other_sp; in arm_v7m_get_sp_ptr()
2898 return &env->v7m.other_ss_psp; in arm_v7m_get_sp_ptr()
2900 return &env->v7m.other_ss_msp; in arm_v7m_get_sp_ptr()