1 /* SPDX-License-Identifier: GPL-2.0 */
2 #ifndef _ASM_POWERPC_VDSO_GETTIMEOFDAY_H
3 #define _ASM_POWERPC_VDSO_GETTIMEOFDAY_H
4 
5 #ifndef __ASSEMBLY__
6 
7 #include <asm/page.h>
8 #include <asm/vdso/timebase.h>
9 #include <asm/barrier.h>
10 #include <asm/unistd.h>
11 #include <uapi/linux/time.h>
12 
13 #define VDSO_HAS_CLOCK_GETRES		1
14 
15 #define VDSO_HAS_TIME			1
16 
do_syscall_2(const unsigned long _r0,const unsigned long _r3,const unsigned long _r4)17 static __always_inline int do_syscall_2(const unsigned long _r0, const unsigned long _r3,
18 					const unsigned long _r4)
19 {
20 	register long r0 asm("r0") = _r0;
21 	register unsigned long r3 asm("r3") = _r3;
22 	register unsigned long r4 asm("r4") = _r4;
23 	register int ret asm ("r3");
24 
25 	asm volatile(
26 		"       sc\n"
27 		"	bns+	1f\n"
28 		"	neg	%0, %0\n"
29 		"1:\n"
30 	: "=r" (ret), "+r" (r4), "+r" (r0)
31 	: "r" (r3)
32 	: "memory", "r5", "r6", "r7", "r8", "r9", "r10", "r11", "r12", "cr0", "ctr");
33 
34 	return ret;
35 }
36 
37 static __always_inline
gettimeofday_fallback(struct __kernel_old_timeval * _tv,struct timezone * _tz)38 int gettimeofday_fallback(struct __kernel_old_timeval *_tv, struct timezone *_tz)
39 {
40 	return do_syscall_2(__NR_gettimeofday, (unsigned long)_tv, (unsigned long)_tz);
41 }
42 
43 #ifdef __powerpc64__
44 
45 static __always_inline
clock_gettime_fallback(clockid_t _clkid,struct __kernel_timespec * _ts)46 int clock_gettime_fallback(clockid_t _clkid, struct __kernel_timespec *_ts)
47 {
48 	return do_syscall_2(__NR_clock_gettime, _clkid, (unsigned long)_ts);
49 }
50 
51 static __always_inline
clock_getres_fallback(clockid_t _clkid,struct __kernel_timespec * _ts)52 int clock_getres_fallback(clockid_t _clkid, struct __kernel_timespec *_ts)
53 {
54 	return do_syscall_2(__NR_clock_getres, _clkid, (unsigned long)_ts);
55 }
56 
57 #else
58 
59 #define BUILD_VDSO32		1
60 
61 static __always_inline
clock_gettime_fallback(clockid_t _clkid,struct __kernel_timespec * _ts)62 int clock_gettime_fallback(clockid_t _clkid, struct __kernel_timespec *_ts)
63 {
64 	return do_syscall_2(__NR_clock_gettime64, _clkid, (unsigned long)_ts);
65 }
66 
67 static __always_inline
clock_getres_fallback(clockid_t _clkid,struct __kernel_timespec * _ts)68 int clock_getres_fallback(clockid_t _clkid, struct __kernel_timespec *_ts)
69 {
70 	return do_syscall_2(__NR_clock_getres_time64, _clkid, (unsigned long)_ts);
71 }
72 
73 static __always_inline
clock_gettime32_fallback(clockid_t _clkid,struct old_timespec32 * _ts)74 int clock_gettime32_fallback(clockid_t _clkid, struct old_timespec32 *_ts)
75 {
76 	return do_syscall_2(__NR_clock_gettime, _clkid, (unsigned long)_ts);
77 }
78 
79 static __always_inline
clock_getres32_fallback(clockid_t _clkid,struct old_timespec32 * _ts)80 int clock_getres32_fallback(clockid_t _clkid, struct old_timespec32 *_ts)
81 {
82 	return do_syscall_2(__NR_clock_getres, _clkid, (unsigned long)_ts);
83 }
84 #endif
85 
__arch_get_hw_counter(s32 clock_mode,const struct vdso_data * vd)86 static __always_inline u64 __arch_get_hw_counter(s32 clock_mode,
87 						 const struct vdso_data *vd)
88 {
89 	return get_tb();
90 }
91 
92 const struct vdso_data *__arch_get_vdso_data(void);
93 
94 #ifdef CONFIG_TIME_NS
95 static __always_inline
__arch_get_timens_vdso_data(const struct vdso_data * vd)96 const struct vdso_data *__arch_get_timens_vdso_data(const struct vdso_data *vd)
97 {
98 	return (void *)vd + PAGE_SIZE;
99 }
100 #endif
101 
vdso_clocksource_ok(const struct vdso_data * vd)102 static inline bool vdso_clocksource_ok(const struct vdso_data *vd)
103 {
104 	return true;
105 }
106 #define vdso_clocksource_ok vdso_clocksource_ok
107 
108 /*
109  * powerpc specific delta calculation.
110  *
111  * This variant removes the masking of the subtraction because the
112  * clocksource mask of all VDSO capable clocksources on powerpc is U64_MAX
113  * which would result in a pointless operation. The compiler cannot
114  * optimize it away as the mask comes from the vdso data and is not compile
115  * time constant.
116  */
vdso_calc_delta(u64 cycles,u64 last,u64 mask,u32 mult)117 static __always_inline u64 vdso_calc_delta(u64 cycles, u64 last, u64 mask, u32 mult)
118 {
119 	return (cycles - last) * mult;
120 }
121 #define vdso_calc_delta vdso_calc_delta
122 
123 #ifndef __powerpc64__
vdso_shift_ns(u64 ns,unsigned long shift)124 static __always_inline u64 vdso_shift_ns(u64 ns, unsigned long shift)
125 {
126 	u32 hi = ns >> 32;
127 	u32 lo = ns;
128 
129 	lo >>= shift;
130 	lo |= hi << (32 - shift);
131 	hi >>= shift;
132 
133 	if (likely(hi == 0))
134 		return lo;
135 
136 	return ((u64)hi << 32) | lo;
137 }
138 #define vdso_shift_ns vdso_shift_ns
139 #endif
140 
141 #ifdef __powerpc64__
142 int __c_kernel_clock_gettime(clockid_t clock, struct __kernel_timespec *ts,
143 			     const struct vdso_data *vd);
144 int __c_kernel_clock_getres(clockid_t clock_id, struct __kernel_timespec *res,
145 			    const struct vdso_data *vd);
146 #else
147 int __c_kernel_clock_gettime(clockid_t clock, struct old_timespec32 *ts,
148 			     const struct vdso_data *vd);
149 int __c_kernel_clock_gettime64(clockid_t clock, struct __kernel_timespec *ts,
150 			       const struct vdso_data *vd);
151 int __c_kernel_clock_getres(clockid_t clock_id, struct old_timespec32 *res,
152 			    const struct vdso_data *vd);
153 #endif
154 int __c_kernel_gettimeofday(struct __kernel_old_timeval *tv, struct timezone *tz,
155 			    const struct vdso_data *vd);
156 __kernel_old_time_t __c_kernel_time(__kernel_old_time_t *time,
157 				    const struct vdso_data *vd);
158 #endif /* __ASSEMBLY__ */
159 
160 #endif /* _ASM_POWERPC_VDSO_GETTIMEOFDAY_H */
161