1d26b3817SChristophe Leroy /* SPDX-License-Identifier: GPL-2.0-or-later */
2d26b3817SChristophe Leroy /*
3d26b3817SChristophe Leroy * Common timebase prototypes and such for all ppc machines.
4d26b3817SChristophe Leroy */
5d26b3817SChristophe Leroy
6d26b3817SChristophe Leroy #ifndef _ASM_POWERPC_VDSO_TIMEBASE_H
7d26b3817SChristophe Leroy #define _ASM_POWERPC_VDSO_TIMEBASE_H
8d26b3817SChristophe Leroy
9d26b3817SChristophe Leroy #include <asm/reg.h>
10d26b3817SChristophe Leroy
115c189c52SMichael Ellerman /*
125c189c52SMichael Ellerman * We use __powerpc64__ here because we want the compat VDSO to use the 32-bit
135c189c52SMichael Ellerman * version below in the else case of the ifdef.
145c189c52SMichael Ellerman */
15*688de017SChristophe Leroy #if defined(__powerpc64__) && (defined(CONFIG_PPC_CELL) || defined(CONFIG_PPC_E500))
16d26b3817SChristophe Leroy #define mftb() ({unsigned long rval; \
17d26b3817SChristophe Leroy asm volatile( \
18d26b3817SChristophe Leroy "90: mfspr %0, %2;\n" \
19d26b3817SChristophe Leroy ASM_FTR_IFSET( \
20d26b3817SChristophe Leroy "97: cmpwi %0,0;\n" \
21d26b3817SChristophe Leroy " beq- 90b;\n", "", %1) \
22d26b3817SChristophe Leroy : "=r" (rval) \
23d26b3817SChristophe Leroy : "i" (CPU_FTR_CELL_TB_BUG), "i" (SPRN_TBRL) : "cr0"); \
24d26b3817SChristophe Leroy rval;})
25d26b3817SChristophe Leroy #elif defined(CONFIG_PPC_8xx)
26d26b3817SChristophe Leroy #define mftb() ({unsigned long rval; \
27d26b3817SChristophe Leroy asm volatile("mftbl %0" : "=r" (rval)); rval;})
28d26b3817SChristophe Leroy #else
29d26b3817SChristophe Leroy #define mftb() ({unsigned long rval; \
30d26b3817SChristophe Leroy asm volatile("mfspr %0, %1" : \
31d26b3817SChristophe Leroy "=r" (rval) : "i" (SPRN_TBRL)); rval;})
32d26b3817SChristophe Leroy #endif /* !CONFIG_PPC_CELL */
33d26b3817SChristophe Leroy
34d26b3817SChristophe Leroy #if defined(CONFIG_PPC_8xx)
35d26b3817SChristophe Leroy #define mftbu() ({unsigned long rval; \
36d26b3817SChristophe Leroy asm volatile("mftbu %0" : "=r" (rval)); rval;})
37d26b3817SChristophe Leroy #else
38d26b3817SChristophe Leroy #define mftbu() ({unsigned long rval; \
39d26b3817SChristophe Leroy asm volatile("mfspr %0, %1" : "=r" (rval) : \
40d26b3817SChristophe Leroy "i" (SPRN_TBRU)); rval;})
41d26b3817SChristophe Leroy #endif
42d26b3817SChristophe Leroy
43d26b3817SChristophe Leroy #define mttbl(v) asm volatile("mttbl %0":: "r"(v))
44d26b3817SChristophe Leroy #define mttbu(v) asm volatile("mttbu %0":: "r"(v))
45d26b3817SChristophe Leroy
get_tb(void)460faa22f0SChristophe Leroy static __always_inline u64 get_tb(void)
47d26b3817SChristophe Leroy {
48d26b3817SChristophe Leroy unsigned int tbhi, tblo, tbhi2;
49d26b3817SChristophe Leroy
505c189c52SMichael Ellerman /*
515c189c52SMichael Ellerman * We use __powerpc64__ here not CONFIG_PPC64 because we want the compat
525c189c52SMichael Ellerman * VDSO to use the 32-bit compatible version in the while loop below.
535c189c52SMichael Ellerman */
545c189c52SMichael Ellerman if (__is_defined(__powerpc64__))
55d26b3817SChristophe Leroy return mftb();
56d26b3817SChristophe Leroy
57d26b3817SChristophe Leroy do {
58d26b3817SChristophe Leroy tbhi = mftbu();
59d26b3817SChristophe Leroy tblo = mftb();
60d26b3817SChristophe Leroy tbhi2 = mftbu();
61d26b3817SChristophe Leroy } while (tbhi != tbhi2);
62d26b3817SChristophe Leroy
63d26b3817SChristophe Leroy return ((u64)tbhi << 32) | tblo;
64d26b3817SChristophe Leroy }
65d26b3817SChristophe Leroy
set_tb(unsigned int upper,unsigned int lower)66d26b3817SChristophe Leroy static inline void set_tb(unsigned int upper, unsigned int lower)
67d26b3817SChristophe Leroy {
68d26b3817SChristophe Leroy mtspr(SPRN_TBWL, 0);
69d26b3817SChristophe Leroy mtspr(SPRN_TBWU, upper);
70d26b3817SChristophe Leroy mtspr(SPRN_TBWL, lower);
71d26b3817SChristophe Leroy }
72d26b3817SChristophe Leroy
73d26b3817SChristophe Leroy #endif /* _ASM_POWERPC_VDSO_TIMEBASE_H */
74