xref: /openbmc/linux/arch/riscv/include/asm/asm.h (revision d5a9588c)
150acfb2bSThomas Gleixner /* SPDX-License-Identifier: GPL-2.0-only */
25d8544e2SPalmer Dabbelt /*
35d8544e2SPalmer Dabbelt  * Copyright (C) 2015 Regents of the University of California
45d8544e2SPalmer Dabbelt  */
55d8544e2SPalmer Dabbelt 
65d8544e2SPalmer Dabbelt #ifndef _ASM_RISCV_ASM_H
75d8544e2SPalmer Dabbelt #define _ASM_RISCV_ASM_H
85d8544e2SPalmer Dabbelt 
95d8544e2SPalmer Dabbelt #ifdef __ASSEMBLY__
105d8544e2SPalmer Dabbelt #define __ASM_STR(x)	x
115d8544e2SPalmer Dabbelt #else
125d8544e2SPalmer Dabbelt #define __ASM_STR(x)	#x
135d8544e2SPalmer Dabbelt #endif
145d8544e2SPalmer Dabbelt 
155d8544e2SPalmer Dabbelt #if __riscv_xlen == 64
165d8544e2SPalmer Dabbelt #define __REG_SEL(a, b)	__ASM_STR(a)
175d8544e2SPalmer Dabbelt #elif __riscv_xlen == 32
185d8544e2SPalmer Dabbelt #define __REG_SEL(a, b)	__ASM_STR(b)
195d8544e2SPalmer Dabbelt #else
205d8544e2SPalmer Dabbelt #error "Unexpected __riscv_xlen"
215d8544e2SPalmer Dabbelt #endif
225d8544e2SPalmer Dabbelt 
235d8544e2SPalmer Dabbelt #define REG_L		__REG_SEL(ld, lw)
245d8544e2SPalmer Dabbelt #define REG_S		__REG_SEL(sd, sw)
2518856604SPalmer Dabbelt #define REG_SC		__REG_SEL(sc.d, sc.w)
267e186433SJisheng Zhang #define REG_AMOSWAP_AQ	__REG_SEL(amoswap.d.aq, amoswap.w.aq)
276f4eea90SVincent Chen #define REG_ASM		__REG_SEL(.dword, .word)
285d8544e2SPalmer Dabbelt #define SZREG		__REG_SEL(8, 4)
295d8544e2SPalmer Dabbelt #define LGREG		__REG_SEL(3, 2)
305d8544e2SPalmer Dabbelt 
315d8544e2SPalmer Dabbelt #if __SIZEOF_POINTER__ == 8
325d8544e2SPalmer Dabbelt #ifdef __ASSEMBLY__
335d8544e2SPalmer Dabbelt #define RISCV_PTR		.dword
345d8544e2SPalmer Dabbelt #define RISCV_SZPTR		8
355d8544e2SPalmer Dabbelt #define RISCV_LGPTR		3
365d8544e2SPalmer Dabbelt #else
375d8544e2SPalmer Dabbelt #define RISCV_PTR		".dword"
385d8544e2SPalmer Dabbelt #define RISCV_SZPTR		"8"
395d8544e2SPalmer Dabbelt #define RISCV_LGPTR		"3"
405d8544e2SPalmer Dabbelt #endif
415d8544e2SPalmer Dabbelt #elif __SIZEOF_POINTER__ == 4
425d8544e2SPalmer Dabbelt #ifdef __ASSEMBLY__
435d8544e2SPalmer Dabbelt #define RISCV_PTR		.word
445d8544e2SPalmer Dabbelt #define RISCV_SZPTR		4
455d8544e2SPalmer Dabbelt #define RISCV_LGPTR		2
465d8544e2SPalmer Dabbelt #else
475d8544e2SPalmer Dabbelt #define RISCV_PTR		".word"
485d8544e2SPalmer Dabbelt #define RISCV_SZPTR		"4"
495d8544e2SPalmer Dabbelt #define RISCV_LGPTR		"2"
505d8544e2SPalmer Dabbelt #endif
515d8544e2SPalmer Dabbelt #else
525d8544e2SPalmer Dabbelt #error "Unexpected __SIZEOF_POINTER__"
535d8544e2SPalmer Dabbelt #endif
545d8544e2SPalmer Dabbelt 
555d8544e2SPalmer Dabbelt #if (__SIZEOF_INT__ == 4)
565e6f82b0SOlof Johansson #define RISCV_INT		__ASM_STR(.word)
575e6f82b0SOlof Johansson #define RISCV_SZINT		__ASM_STR(4)
585e6f82b0SOlof Johansson #define RISCV_LGINT		__ASM_STR(2)
595d8544e2SPalmer Dabbelt #else
605d8544e2SPalmer Dabbelt #error "Unexpected __SIZEOF_INT__"
615d8544e2SPalmer Dabbelt #endif
625d8544e2SPalmer Dabbelt 
635d8544e2SPalmer Dabbelt #if (__SIZEOF_SHORT__ == 2)
645e6f82b0SOlof Johansson #define RISCV_SHORT		__ASM_STR(.half)
655e6f82b0SOlof Johansson #define RISCV_SZSHORT		__ASM_STR(2)
665e6f82b0SOlof Johansson #define RISCV_LGSHORT		__ASM_STR(1)
675d8544e2SPalmer Dabbelt #else
685d8544e2SPalmer Dabbelt #error "Unexpected __SIZEOF_SHORT__"
695d8544e2SPalmer Dabbelt #endif
705d8544e2SPalmer Dabbelt 
71c295bc34SHeiko Stuebner #ifdef __ASSEMBLY__
7245b32b94SJisheng Zhang #include <asm/asm-offsets.h>
73c295bc34SHeiko Stuebner 
74c295bc34SHeiko Stuebner /* Common assembly source macros */
75c295bc34SHeiko Stuebner 
76c295bc34SHeiko Stuebner /*
77c295bc34SHeiko Stuebner  * NOP sequence
78c295bc34SHeiko Stuebner  */
79c295bc34SHeiko Stuebner .macro	nops, num
80c295bc34SHeiko Stuebner 	.rept	\num
81c295bc34SHeiko Stuebner 	nop
82c295bc34SHeiko Stuebner 	.endr
83c295bc34SHeiko Stuebner .endm
84c295bc34SHeiko Stuebner 
85eff53aeaSDeepak Gupta #ifdef CONFIG_SMP
86eff53aeaSDeepak Gupta #ifdef CONFIG_32BIT
87eff53aeaSDeepak Gupta #define PER_CPU_OFFSET_SHIFT 2
88eff53aeaSDeepak Gupta #else
89eff53aeaSDeepak Gupta #define PER_CPU_OFFSET_SHIFT 3
90eff53aeaSDeepak Gupta #endif
91eff53aeaSDeepak Gupta 
92eff53aeaSDeepak Gupta .macro asm_per_cpu dst sym tmp
93eff53aeaSDeepak Gupta 	REG_L \tmp, TASK_TI_CPU_NUM(tp)
94eff53aeaSDeepak Gupta 	slli  \tmp, \tmp, PER_CPU_OFFSET_SHIFT
95eff53aeaSDeepak Gupta 	la    \dst, __per_cpu_offset
96eff53aeaSDeepak Gupta 	add   \dst, \dst, \tmp
97eff53aeaSDeepak Gupta 	REG_L \tmp, 0(\dst)
98eff53aeaSDeepak Gupta 	la    \dst, \sym
99eff53aeaSDeepak Gupta 	add   \dst, \dst, \tmp
100eff53aeaSDeepak Gupta .endm
101eff53aeaSDeepak Gupta #else /* CONFIG_SMP */
102eff53aeaSDeepak Gupta .macro asm_per_cpu dst sym tmp
103eff53aeaSDeepak Gupta 	la    \dst, \sym
104eff53aeaSDeepak Gupta .endm
105eff53aeaSDeepak Gupta #endif /* CONFIG_SMP */
106eff53aeaSDeepak Gupta 
10745b32b94SJisheng Zhang 	/* save all GPs except x1 ~ x5 */
10845b32b94SJisheng Zhang 	.macro save_from_x6_to_x31
10945b32b94SJisheng Zhang 	REG_S x6,  PT_T1(sp)
11045b32b94SJisheng Zhang 	REG_S x7,  PT_T2(sp)
11145b32b94SJisheng Zhang 	REG_S x8,  PT_S0(sp)
11245b32b94SJisheng Zhang 	REG_S x9,  PT_S1(sp)
11345b32b94SJisheng Zhang 	REG_S x10, PT_A0(sp)
11445b32b94SJisheng Zhang 	REG_S x11, PT_A1(sp)
11545b32b94SJisheng Zhang 	REG_S x12, PT_A2(sp)
11645b32b94SJisheng Zhang 	REG_S x13, PT_A3(sp)
11745b32b94SJisheng Zhang 	REG_S x14, PT_A4(sp)
11845b32b94SJisheng Zhang 	REG_S x15, PT_A5(sp)
11945b32b94SJisheng Zhang 	REG_S x16, PT_A6(sp)
12045b32b94SJisheng Zhang 	REG_S x17, PT_A7(sp)
12145b32b94SJisheng Zhang 	REG_S x18, PT_S2(sp)
12245b32b94SJisheng Zhang 	REG_S x19, PT_S3(sp)
12345b32b94SJisheng Zhang 	REG_S x20, PT_S4(sp)
12445b32b94SJisheng Zhang 	REG_S x21, PT_S5(sp)
12545b32b94SJisheng Zhang 	REG_S x22, PT_S6(sp)
12645b32b94SJisheng Zhang 	REG_S x23, PT_S7(sp)
12745b32b94SJisheng Zhang 	REG_S x24, PT_S8(sp)
12845b32b94SJisheng Zhang 	REG_S x25, PT_S9(sp)
12945b32b94SJisheng Zhang 	REG_S x26, PT_S10(sp)
13045b32b94SJisheng Zhang 	REG_S x27, PT_S11(sp)
13145b32b94SJisheng Zhang 	REG_S x28, PT_T3(sp)
13245b32b94SJisheng Zhang 	REG_S x29, PT_T4(sp)
13345b32b94SJisheng Zhang 	REG_S x30, PT_T5(sp)
13445b32b94SJisheng Zhang 	REG_S x31, PT_T6(sp)
13545b32b94SJisheng Zhang 	.endm
13645b32b94SJisheng Zhang 
13745b32b94SJisheng Zhang 	/* restore all GPs except x1 ~ x5 */
13845b32b94SJisheng Zhang 	.macro restore_from_x6_to_x31
13945b32b94SJisheng Zhang 	REG_L x6,  PT_T1(sp)
14045b32b94SJisheng Zhang 	REG_L x7,  PT_T2(sp)
14145b32b94SJisheng Zhang 	REG_L x8,  PT_S0(sp)
14245b32b94SJisheng Zhang 	REG_L x9,  PT_S1(sp)
14345b32b94SJisheng Zhang 	REG_L x10, PT_A0(sp)
14445b32b94SJisheng Zhang 	REG_L x11, PT_A1(sp)
14545b32b94SJisheng Zhang 	REG_L x12, PT_A2(sp)
14645b32b94SJisheng Zhang 	REG_L x13, PT_A3(sp)
14745b32b94SJisheng Zhang 	REG_L x14, PT_A4(sp)
14845b32b94SJisheng Zhang 	REG_L x15, PT_A5(sp)
14945b32b94SJisheng Zhang 	REG_L x16, PT_A6(sp)
15045b32b94SJisheng Zhang 	REG_L x17, PT_A7(sp)
15145b32b94SJisheng Zhang 	REG_L x18, PT_S2(sp)
15245b32b94SJisheng Zhang 	REG_L x19, PT_S3(sp)
15345b32b94SJisheng Zhang 	REG_L x20, PT_S4(sp)
15445b32b94SJisheng Zhang 	REG_L x21, PT_S5(sp)
15545b32b94SJisheng Zhang 	REG_L x22, PT_S6(sp)
15645b32b94SJisheng Zhang 	REG_L x23, PT_S7(sp)
15745b32b94SJisheng Zhang 	REG_L x24, PT_S8(sp)
15845b32b94SJisheng Zhang 	REG_L x25, PT_S9(sp)
15945b32b94SJisheng Zhang 	REG_L x26, PT_S10(sp)
16045b32b94SJisheng Zhang 	REG_L x27, PT_S11(sp)
16145b32b94SJisheng Zhang 	REG_L x28, PT_T3(sp)
16245b32b94SJisheng Zhang 	REG_L x29, PT_T4(sp)
16345b32b94SJisheng Zhang 	REG_L x30, PT_T5(sp)
16445b32b94SJisheng Zhang 	REG_L x31, PT_T6(sp)
16545b32b94SJisheng Zhang 	.endm
16645b32b94SJisheng Zhang 
167*d5a9588cSClément Léger /* Annotate a function as being unsuitable for kprobes. */
168*d5a9588cSClément Léger #ifdef CONFIG_KPROBES
169*d5a9588cSClément Léger #define ASM_NOKPROBE(name)				\
170*d5a9588cSClément Léger 	.pushsection "_kprobe_blacklist", "aw";		\
171*d5a9588cSClément Léger 	RISCV_PTR name;					\
172*d5a9588cSClément Léger 	.popsection
173*d5a9588cSClément Léger #else
174*d5a9588cSClément Léger #define ASM_NOKPROBE(name)
175*d5a9588cSClément Léger #endif
176*d5a9588cSClément Léger 
177c295bc34SHeiko Stuebner #endif /* __ASSEMBLY__ */
178c295bc34SHeiko Stuebner 
1795d8544e2SPalmer Dabbelt #endif /* _ASM_RISCV_ASM_H */
180