preempt.h (9095bf25ea08135a5b74875dd0e3eeaddc4218a0) preempt.h (f5caf621ee357279e759c0911daf6d55c7d36f03)
1#ifndef __ASM_PREEMPT_H
2#define __ASM_PREEMPT_H
3
4#include <asm/rmwcc.h>
5#include <asm/percpu.h>
6#include <linux/thread_info.h>
7
8DECLARE_PER_CPU(int, __preempt_count);

--- 86 unchanged lines hidden (view full) ---

95 */
96static __always_inline bool should_resched(int preempt_offset)
97{
98 return unlikely(raw_cpu_read_4(__preempt_count) == preempt_offset);
99}
100
101#ifdef CONFIG_PREEMPT
102 extern asmlinkage void ___preempt_schedule(void);
1#ifndef __ASM_PREEMPT_H
2#define __ASM_PREEMPT_H
3
4#include <asm/rmwcc.h>
5#include <asm/percpu.h>
6#include <linux/thread_info.h>
7
8DECLARE_PER_CPU(int, __preempt_count);

--- 86 unchanged lines hidden (view full) ---

95 */
96static __always_inline bool should_resched(int preempt_offset)
97{
98 return unlikely(raw_cpu_read_4(__preempt_count) == preempt_offset);
99}
100
101#ifdef CONFIG_PREEMPT
102 extern asmlinkage void ___preempt_schedule(void);
103# define __preempt_schedule() \
104({ \
105 register void *__sp asm(_ASM_SP); \
106 asm volatile ("call ___preempt_schedule" : "+r"(__sp)); \
107})
103# define __preempt_schedule() \
104 asm volatile ("call ___preempt_schedule" : ASM_CALL_CONSTRAINT)
108
109 extern asmlinkage void preempt_schedule(void);
110 extern asmlinkage void ___preempt_schedule_notrace(void);
105
106 extern asmlinkage void preempt_schedule(void);
107 extern asmlinkage void ___preempt_schedule_notrace(void);
111# define __preempt_schedule_notrace() \
112({ \
113 register void *__sp asm(_ASM_SP); \
114 asm volatile ("call ___preempt_schedule_notrace" : "+r"(__sp)); \
115})
108# define __preempt_schedule_notrace() \
109 asm volatile ("call ___preempt_schedule_notrace" : ASM_CALL_CONSTRAINT)
110
116 extern asmlinkage void preempt_schedule_notrace(void);
117#endif
118
119#endif /* __ASM_PREEMPT_H */
111 extern asmlinkage void preempt_schedule_notrace(void);
112#endif
113
114#endif /* __ASM_PREEMPT_H */