xref: /openbmc/u-boot/arch/arm/include/asm/armv7.h (revision bf7ab1e7)
1 /*
2  * (C) Copyright 2010
3  * Texas Instruments, <www.ti.com>
4  * Aneesh V <aneesh@ti.com>
5  *
6  * SPDX-License-Identifier:	GPL-2.0+
7  */
8 #ifndef ARMV7_H
9 #define ARMV7_H
10 
11 /* Cortex-A9 revisions */
12 #define MIDR_CORTEX_A9_R0P1	0x410FC091
13 #define MIDR_CORTEX_A9_R1P2	0x411FC092
14 #define MIDR_CORTEX_A9_R1P3	0x411FC093
15 #define MIDR_CORTEX_A9_R2P10	0x412FC09A
16 
17 /* Cortex-A15 revisions */
18 #define MIDR_CORTEX_A15_R0P0	0x410FC0F0
19 #define MIDR_CORTEX_A15_R2P2	0x412FC0F2
20 
21 /* Cortex-A7 revisions */
22 #define MIDR_CORTEX_A7_R0P0	0x410FC070
23 
24 #define MIDR_PRIMARY_PART_MASK	0xFF0FFFF0
25 
26 /* ID_PFR1 feature fields */
27 #define CPUID_ARM_SEC_SHIFT		4
28 #define CPUID_ARM_SEC_MASK		(0xF << CPUID_ARM_SEC_SHIFT)
29 #define CPUID_ARM_VIRT_SHIFT		12
30 #define CPUID_ARM_VIRT_MASK		(0xF << CPUID_ARM_VIRT_SHIFT)
31 #define CPUID_ARM_GENTIMER_SHIFT	16
32 #define CPUID_ARM_GENTIMER_MASK		(0xF << CPUID_ARM_GENTIMER_SHIFT)
33 
34 /* valid bits in CBAR register / PERIPHBASE value */
35 #define CBAR_MASK			0xFFFF8000
36 
37 /* CCSIDR */
38 #define CCSIDR_LINE_SIZE_OFFSET		0
39 #define CCSIDR_LINE_SIZE_MASK		0x7
40 #define CCSIDR_ASSOCIATIVITY_OFFSET	3
41 #define CCSIDR_ASSOCIATIVITY_MASK	(0x3FF << 3)
42 #define CCSIDR_NUM_SETS_OFFSET		13
43 #define CCSIDR_NUM_SETS_MASK		(0x7FFF << 13)
44 
45 /*
46  * Values for InD field in CSSELR
47  * Selects the type of cache
48  */
49 #define ARMV7_CSSELR_IND_DATA_UNIFIED	0
50 #define ARMV7_CSSELR_IND_INSTRUCTION	1
51 
52 /* Values for Ctype fields in CLIDR */
53 #define ARMV7_CLIDR_CTYPE_NO_CACHE		0
54 #define ARMV7_CLIDR_CTYPE_INSTRUCTION_ONLY	1
55 #define ARMV7_CLIDR_CTYPE_DATA_ONLY		2
56 #define ARMV7_CLIDR_CTYPE_INSTRUCTION_DATA	3
57 #define ARMV7_CLIDR_CTYPE_UNIFIED		4
58 
59 #ifndef __ASSEMBLY__
60 #include <linux/types.h>
61 #include <asm/io.h>
62 #include <asm/barriers.h>
63 
64 /*
65  * Workaround for ARM errata # 798870
66  * Set L2ACTLR[7] to reissue any memory transaction in the L2 that has been
67  * stalled for 1024 cycles to verify that its hazard condition still exists.
68  */
69 static inline void v7_enable_l2_hazard_detect(void)
70 {
71 	uint32_t val;
72 
73 	/* L2ACTLR[7]: Enable hazard detect timeout */
74 	asm volatile ("mrc     p15, 1, %0, c15, c0, 0\n\t" : "=r"(val));
75 	val |= (1 << 7);
76 	asm volatile ("mcr     p15, 1, %0, c15, c0, 0\n\t" : : "r"(val));
77 }
78 
79 /*
80  * Workaround for ARM errata # 799270
81  * Ensure that the L2 logic has been used within the previous 256 cycles
82  * before modifying the ACTLR.SMP bit. This is required during boot before
83  * MMU has been enabled, or during a specified reset or power down sequence.
84  */
85 static inline void v7_enable_smp(uint32_t address)
86 {
87 	uint32_t temp, val;
88 
89 	/* Read auxiliary control register */
90 	asm volatile ("mrc     p15, 0, %0, c1, c0, 1\n\t" : "=r"(val));
91 
92 	/* Enable SMP */
93 	val |= (1 << 6);
94 
95 	/* Dummy read to assure L2 access */
96 	temp = readl(address);
97 	temp &= 0;
98 	val |= temp;
99 
100 	/* Write auxiliary control register */
101 	asm volatile ("mcr     p15, 0, %0, c1, c0, 1\n\t" : : "r"(val));
102 
103 	CP15DSB;
104 	CP15ISB;
105 }
106 
107 void v7_en_l2_hazard_detect(void);
108 void v7_outer_cache_enable(void);
109 void v7_outer_cache_disable(void);
110 void v7_outer_cache_flush_all(void);
111 void v7_outer_cache_inval_all(void);
112 void v7_outer_cache_flush_range(u32 start, u32 end);
113 void v7_outer_cache_inval_range(u32 start, u32 end);
114 
115 #ifdef CONFIG_ARMV7_NONSEC
116 
117 int armv7_init_nonsec(void);
118 int armv7_apply_memory_carveout(u64 *start, u64 *size);
119 bool armv7_boot_nonsec(void);
120 
121 /* defined in assembly file */
122 unsigned int _nonsec_init(void);
123 void _do_nonsec_entry(void *target_pc, unsigned long r0,
124 		      unsigned long r1, unsigned long r2);
125 void _smp_pen(void);
126 
127 extern char __secure_start[];
128 extern char __secure_end[];
129 extern char __secure_stack_start[];
130 extern char __secure_stack_end[];
131 
132 #endif /* CONFIG_ARMV7_NONSEC */
133 
134 void v7_arch_cp15_set_l2aux_ctrl(u32 l2auxctrl, u32 cpu_midr,
135 				 u32 cpu_rev_comb, u32 cpu_variant,
136 				 u32 cpu_rev);
137 void v7_arch_cp15_set_acr(u32 acr, u32 cpu_midr, u32 cpu_rev_comb,
138 			  u32 cpu_variant, u32 cpu_rev);
139 #endif /* ! __ASSEMBLY__ */
140 
141 #endif
142