1b2441318SGreg Kroah-Hartman /* SPDX-License-Identifier: GPL-2.0 */
2f159f4edSTony Lindgren #ifndef __ASMARM_TLS_H
3f159f4edSTony Lindgren #define __ASMARM_TLS_H
4f159f4edSTony Lindgren
5fbfb872fSNathan Lynch #include <linux/compiler.h>
6fbfb872fSNathan Lynch #include <asm/thread_info.h>
7fbfb872fSNathan Lynch
8f159f4edSTony Lindgren #ifdef __ASSEMBLY__
9a4780adeSAndré Hentschel #include <asm/asm-offsets.h>
10a4780adeSAndré Hentschel .macro switch_tls_none, base, tp, tpuser, tmp1, tmp2
11f159f4edSTony Lindgren .endm
12f159f4edSTony Lindgren
13a4780adeSAndré Hentschel .macro switch_tls_v6k, base, tp, tpuser, tmp1, tmp2
14a4780adeSAndré Hentschel mrc p15, 0, \tmp2, c13, c0, 2 @ get the user r/w register
153855ab61SArd Biesheuvel @ TLS register update is deferred until return to user space
163855ab61SArd Biesheuvel mcr p15, 0, \tpuser, c13, c0, 2 @ set the user r/w register
17a4780adeSAndré Hentschel str \tmp2, [\base, #TI_TP_VALUE + 4] @ save it
18f159f4edSTony Lindgren .endm
19f159f4edSTony Lindgren
20a4780adeSAndré Hentschel .macro switch_tls_v6, base, tp, tpuser, tmp1, tmp2
21*75fa4adcSArd Biesheuvel #ifdef CONFIG_SMP
22*75fa4adcSArd Biesheuvel ALT_SMP(nop)
23*75fa4adcSArd Biesheuvel ALT_UP_B(.L0_\@)
24*75fa4adcSArd Biesheuvel .subsection 1
25*75fa4adcSArd Biesheuvel #endif
26*75fa4adcSArd Biesheuvel .L0_\@:
27c2755910SArd Biesheuvel ldr_va \tmp1, elf_hwcap
28f159f4edSTony Lindgren mov \tmp2, #0xffff0fff
29f159f4edSTony Lindgren tst \tmp1, #HWCAP_TLS @ hardware TLS available?
30f159f4edSTony Lindgren streq \tp, [\tmp2, #-15] @ set TLS value at 0xffff0ff0
31*75fa4adcSArd Biesheuvel beq .L2_\@
32*75fa4adcSArd Biesheuvel mcr p15, 0, \tp, c13, c0, 3 @ yes, set TLS register
33*75fa4adcSArd Biesheuvel #ifdef CONFIG_SMP
34*75fa4adcSArd Biesheuvel b .L1_\@
35*75fa4adcSArd Biesheuvel .previous
36c2755910SArd Biesheuvel #endif
37*75fa4adcSArd Biesheuvel .L1_\@: switch_tls_v6k \base, \tp, \tpuser, \tmp1, \tmp2
38*75fa4adcSArd Biesheuvel .L2_\@:
39f159f4edSTony Lindgren .endm
40f159f4edSTony Lindgren
41a4780adeSAndré Hentschel .macro switch_tls_software, base, tp, tpuser, tmp1, tmp2
42f159f4edSTony Lindgren mov \tmp1, #0xffff0fff
43f159f4edSTony Lindgren str \tp, [\tmp1, #-15] @ set TLS value at 0xffff0ff0
44f159f4edSTony Lindgren .endm
45*75fa4adcSArd Biesheuvel #else
46*75fa4adcSArd Biesheuvel #include <asm/smp_plat.h>
47f159f4edSTony Lindgren #endif
48f159f4edSTony Lindgren
49f159f4edSTony Lindgren #ifdef CONFIG_TLS_REG_EMUL
50f159f4edSTony Lindgren #define tls_emu 1
51f159f4edSTony Lindgren #define has_tls_reg 1
523855ab61SArd Biesheuvel #define defer_tls_reg_update 0
53a4780adeSAndré Hentschel #define switch_tls switch_tls_none
5437bc618fSRussell King #elif defined(CONFIG_CPU_V6)
55f159f4edSTony Lindgren #define tls_emu 0
56f159f4edSTony Lindgren #define has_tls_reg (elf_hwcap & HWCAP_TLS)
57*75fa4adcSArd Biesheuvel #define defer_tls_reg_update is_smp()
58a4780adeSAndré Hentschel #define switch_tls switch_tls_v6
5937bc618fSRussell King #elif defined(CONFIG_CPU_32v6K)
6037bc618fSRussell King #define tls_emu 0
6137bc618fSRussell King #define has_tls_reg 1
623855ab61SArd Biesheuvel #define defer_tls_reg_update 1
63a4780adeSAndré Hentschel #define switch_tls switch_tls_v6k
64f159f4edSTony Lindgren #else
65f159f4edSTony Lindgren #define tls_emu 0
66f159f4edSTony Lindgren #define has_tls_reg 0
673855ab61SArd Biesheuvel #define defer_tls_reg_update 0
68a4780adeSAndré Hentschel #define switch_tls switch_tls_software
69f159f4edSTony Lindgren #endif
70f159f4edSTony Lindgren
71a4780adeSAndré Hentschel #ifndef __ASSEMBLY__
72fbfb872fSNathan Lynch
set_tls(unsigned long val)73fbfb872fSNathan Lynch static inline void set_tls(unsigned long val)
74fbfb872fSNathan Lynch {
75fbfb872fSNathan Lynch struct thread_info *thread;
76fbfb872fSNathan Lynch
77fbfb872fSNathan Lynch thread = current_thread_info();
78fbfb872fSNathan Lynch
79fbfb872fSNathan Lynch thread->tp_value[0] = val;
80fbfb872fSNathan Lynch
81fbfb872fSNathan Lynch /*
82fbfb872fSNathan Lynch * This code runs with preemption enabled and therefore must
83fbfb872fSNathan Lynch * be reentrant with respect to switch_tls.
84fbfb872fSNathan Lynch *
85fbfb872fSNathan Lynch * We need to ensure ordering between the shadow state and the
86fbfb872fSNathan Lynch * hardware state, so that we don't corrupt the hardware state
87fbfb872fSNathan Lynch * with a stale shadow state during context switch.
88fbfb872fSNathan Lynch *
89fbfb872fSNathan Lynch * If we're preempted here, switch_tls will load TPIDRURO from
90fbfb872fSNathan Lynch * thread_info upon resuming execution and the following mcr
91fbfb872fSNathan Lynch * is merely redundant.
92fbfb872fSNathan Lynch */
93fbfb872fSNathan Lynch barrier();
94fbfb872fSNathan Lynch
95c2755910SArd Biesheuvel if (!tls_emu) {
96c2755910SArd Biesheuvel if (has_tls_reg && !defer_tls_reg_update) {
97fbfb872fSNathan Lynch asm("mcr p15, 0, %0, c13, c0, 3"
98fbfb872fSNathan Lynch : : "r" (val));
99c2755910SArd Biesheuvel } else if (!has_tls_reg) {
1009cc6d9e5SNathan Lynch #ifdef CONFIG_KUSER_HELPERS
101fbfb872fSNathan Lynch /*
102fbfb872fSNathan Lynch * User space must never try to access this
103fbfb872fSNathan Lynch * directly. Expect your app to break
104fbfb872fSNathan Lynch * eventually if you do so. The user helper
105fbfb872fSNathan Lynch * at 0xffff0fe0 must be used instead. (see
106fbfb872fSNathan Lynch * entry-armv.S for details)
107fbfb872fSNathan Lynch */
108fbfb872fSNathan Lynch *((unsigned int *)0xffff0ff0) = val;
1099cc6d9e5SNathan Lynch #endif
110fbfb872fSNathan Lynch }
111fbfb872fSNathan Lynch
112fbfb872fSNathan Lynch }
113fbfb872fSNathan Lynch }
114fbfb872fSNathan Lynch
get_tpuser(void)115a4780adeSAndré Hentschel static inline unsigned long get_tpuser(void)
116a4780adeSAndré Hentschel {
117a4780adeSAndré Hentschel unsigned long reg = 0;
118a4780adeSAndré Hentschel
119a4780adeSAndré Hentschel if (has_tls_reg && !tls_emu)
120a4780adeSAndré Hentschel __asm__("mrc p15, 0, %0, c13, c0, 2" : "=r" (reg));
121a4780adeSAndré Hentschel
122a4780adeSAndré Hentschel return reg;
123a4780adeSAndré Hentschel }
124fbfb872fSNathan Lynch
set_tpuser(unsigned long val)125fbfb872fSNathan Lynch static inline void set_tpuser(unsigned long val)
126fbfb872fSNathan Lynch {
127fbfb872fSNathan Lynch /* Since TPIDRURW is fully context-switched (unlike TPIDRURO),
128fbfb872fSNathan Lynch * we need not update thread_info.
129fbfb872fSNathan Lynch */
130fbfb872fSNathan Lynch if (has_tls_reg && !tls_emu) {
131fbfb872fSNathan Lynch asm("mcr p15, 0, %0, c13, c0, 2"
132fbfb872fSNathan Lynch : : "r" (val));
133fbfb872fSNathan Lynch }
134fbfb872fSNathan Lynch }
135fbfb872fSNathan Lynch
flush_tls(void)136fbfb872fSNathan Lynch static inline void flush_tls(void)
137fbfb872fSNathan Lynch {
138fbfb872fSNathan Lynch set_tls(0);
139fbfb872fSNathan Lynch set_tpuser(0);
140fbfb872fSNathan Lynch }
141fbfb872fSNathan Lynch
142a4780adeSAndré Hentschel #endif
143f159f4edSTony Lindgren #endif /* __ASMARM_TLS_H */
144