1*819833afSPeter Tyser #ifndef __ASM_ARM_SYSTEM_H 2*819833afSPeter Tyser #define __ASM_ARM_SYSTEM_H 3*819833afSPeter Tyser 4*819833afSPeter Tyser #ifdef __KERNEL__ 5*819833afSPeter Tyser 6*819833afSPeter Tyser #define CPU_ARCH_UNKNOWN 0 7*819833afSPeter Tyser #define CPU_ARCH_ARMv3 1 8*819833afSPeter Tyser #define CPU_ARCH_ARMv4 2 9*819833afSPeter Tyser #define CPU_ARCH_ARMv4T 3 10*819833afSPeter Tyser #define CPU_ARCH_ARMv5 4 11*819833afSPeter Tyser #define CPU_ARCH_ARMv5T 5 12*819833afSPeter Tyser #define CPU_ARCH_ARMv5TE 6 13*819833afSPeter Tyser #define CPU_ARCH_ARMv5TEJ 7 14*819833afSPeter Tyser #define CPU_ARCH_ARMv6 8 15*819833afSPeter Tyser #define CPU_ARCH_ARMv7 9 16*819833afSPeter Tyser 17*819833afSPeter Tyser /* 18*819833afSPeter Tyser * CR1 bits (CP#15 CR1) 19*819833afSPeter Tyser */ 20*819833afSPeter Tyser #define CR_M (1 << 0) /* MMU enable */ 21*819833afSPeter Tyser #define CR_A (1 << 1) /* Alignment abort enable */ 22*819833afSPeter Tyser #define CR_C (1 << 2) /* Dcache enable */ 23*819833afSPeter Tyser #define CR_W (1 << 3) /* Write buffer enable */ 24*819833afSPeter Tyser #define CR_P (1 << 4) /* 32-bit exception handler */ 25*819833afSPeter Tyser #define CR_D (1 << 5) /* 32-bit data address range */ 26*819833afSPeter Tyser #define CR_L (1 << 6) /* Implementation defined */ 27*819833afSPeter Tyser #define CR_B (1 << 7) /* Big endian */ 28*819833afSPeter Tyser #define CR_S (1 << 8) /* System MMU protection */ 29*819833afSPeter Tyser #define CR_R (1 << 9) /* ROM MMU protection */ 30*819833afSPeter Tyser #define CR_F (1 << 10) /* Implementation defined */ 31*819833afSPeter Tyser #define CR_Z (1 << 11) /* Implementation defined */ 32*819833afSPeter Tyser #define CR_I (1 << 12) /* Icache enable */ 33*819833afSPeter Tyser #define CR_V (1 << 13) /* Vectors relocated to 0xffff0000 */ 34*819833afSPeter Tyser #define CR_RR (1 << 14) /* Round Robin cache replacement */ 35*819833afSPeter Tyser #define CR_L4 (1 << 15) /* LDR pc can set T bit */ 36*819833afSPeter Tyser #define CR_DT (1 << 16) 37*819833afSPeter Tyser #define CR_IT (1 << 18) 38*819833afSPeter Tyser #define CR_ST (1 << 19) 39*819833afSPeter Tyser #define CR_FI (1 << 21) /* Fast interrupt (lower latency mode) */ 40*819833afSPeter Tyser #define CR_U (1 << 22) /* Unaligned access operation */ 41*819833afSPeter Tyser #define CR_XP (1 << 23) /* Extended page tables */ 42*819833afSPeter Tyser #define CR_VE (1 << 24) /* Vectored interrupts */ 43*819833afSPeter Tyser #define CR_EE (1 << 25) /* Exception (Big) Endian */ 44*819833afSPeter Tyser #define CR_TRE (1 << 28) /* TEX remap enable */ 45*819833afSPeter Tyser #define CR_AFE (1 << 29) /* Access flag enable */ 46*819833afSPeter Tyser #define CR_TE (1 << 30) /* Thumb exception enable */ 47*819833afSPeter Tyser 48*819833afSPeter Tyser /* 49*819833afSPeter Tyser * This is used to ensure the compiler did actually allocate the register we 50*819833afSPeter Tyser * asked it for some inline assembly sequences. Apparently we can't trust 51*819833afSPeter Tyser * the compiler from one version to another so a bit of paranoia won't hurt. 52*819833afSPeter Tyser * This string is meant to be concatenated with the inline asm string and 53*819833afSPeter Tyser * will cause compilation to stop on mismatch. 54*819833afSPeter Tyser * (for details, see gcc PR 15089) 55*819833afSPeter Tyser */ 56*819833afSPeter Tyser #define __asmeq(x, y) ".ifnc " x "," y " ; .err ; .endif\n\t" 57*819833afSPeter Tyser 58*819833afSPeter Tyser #ifndef __ASSEMBLY__ 59*819833afSPeter Tyser 60*819833afSPeter Tyser #define isb() __asm__ __volatile__ ("" : : : "memory") 61*819833afSPeter Tyser 62*819833afSPeter Tyser #define nop() __asm__ __volatile__("mov\tr0,r0\t@ nop\n\t"); 63*819833afSPeter Tyser 64*819833afSPeter Tyser static inline unsigned int get_cr(void) 65*819833afSPeter Tyser { 66*819833afSPeter Tyser unsigned int val; 67*819833afSPeter Tyser asm("mrc p15, 0, %0, c1, c0, 0 @ get CR" : "=r" (val) : : "cc"); 68*819833afSPeter Tyser return val; 69*819833afSPeter Tyser } 70*819833afSPeter Tyser 71*819833afSPeter Tyser static inline void set_cr(unsigned int val) 72*819833afSPeter Tyser { 73*819833afSPeter Tyser asm volatile("mcr p15, 0, %0, c1, c0, 0 @ set CR" 74*819833afSPeter Tyser : : "r" (val) : "cc"); 75*819833afSPeter Tyser isb(); 76*819833afSPeter Tyser } 77*819833afSPeter Tyser 78*819833afSPeter Tyser #endif /* __ASSEMBLY__ */ 79*819833afSPeter Tyser 80*819833afSPeter Tyser #define arch_align_stack(x) (x) 81*819833afSPeter Tyser 82*819833afSPeter Tyser #endif /* __KERNEL__ */ 83*819833afSPeter Tyser 84*819833afSPeter Tyser #endif 85