Home
last modified time | relevance | path

Searched refs:__my_cpu_offset (Results 1 – 9 of 9) sorted by relevance

/openbmc/linux/arch/arm/include/asm/
H A Dpercpu.h28 static __always_inline unsigned long __my_cpu_offset(void) in __my_cpu_offset() function
62 #define __my_cpu_offset __my_cpu_offset() macro
/openbmc/linux/arch/loongarch/include/asm/
H A Dpercpu.h25 register unsigned long __my_cpu_offset __asm__("$r21");
29 __my_cpu_offset = off; in set_my_cpu_offset()
33 #define __my_cpu_offset \ macro
35 __asm__ __volatile__("":"+r"(__my_cpu_offset)); \
36 __my_cpu_offset; \
/openbmc/linux/arch/sparc/include/asm/
H A Dpercpu_64.h19 #define __my_cpu_offset __local_per_cpu_offset macro
/openbmc/linux/arch/powerpc/include/asm/
H A Dpercpu.h13 #define __my_cpu_offset local_paca->data_offset macro
/openbmc/linux/arch/ia64/include/asm/
H A Dpercpu.h23 #define __my_cpu_offset __ia64_per_cpu_var(local_per_cpu_offset) macro
/openbmc/linux/include/asm-generic/
H A Dpercpu.h30 #ifndef __my_cpu_offset
31 #define __my_cpu_offset per_cpu_offset(raw_smp_processor_id()) macro
36 #define my_cpu_offset __my_cpu_offset
44 #define arch_raw_cpu_ptr(ptr) SHIFT_PERCPU_PTR(ptr, __my_cpu_offset)
/openbmc/linux/arch/arm64/include/asm/
H A Dpercpu.h50 #define __my_cpu_offset __hyp_my_cpu_offset() macro
52 #define __my_cpu_offset __kern_my_cpu_offset() macro
/openbmc/linux/arch/s390/include/asm/
H A Dpercpu.h12 #define __my_cpu_offset S390_lowcore.percpu_offset macro
/openbmc/linux/arch/x86/include/asm/
H A Dpercpu.h32 #define __my_cpu_offset this_cpu_read(this_cpu_off) macro