1 /* SPDX-License-Identifier: LGPL-2.1 OR MIT */
2 /*
3 * LoongArch specific definitions for NOLIBC
4 * Copyright (C) 2023 Loongson Technology Corporation Limited
5 */
6
7 #ifndef _NOLIBC_ARCH_LOONGARCH_H
8 #define _NOLIBC_ARCH_LOONGARCH_H
9
10 #include "compiler.h"
11 #include "crt.h"
12
13 /* Syscalls for LoongArch :
14 * - stack is 16-byte aligned
15 * - syscall number is passed in a7
16 * - arguments are in a0, a1, a2, a3, a4, a5
17 * - the system call is performed by calling "syscall 0"
18 * - syscall return comes in a0
19 * - the arguments are cast to long and assigned into the target
20 * registers which are then simply passed as registers to the asm code,
21 * so that we don't have to experience issues with register constraints.
22 *
23 * On LoongArch, select() is not implemented so we have to use pselect6().
24 */
25 #define __ARCH_WANT_SYS_PSELECT6
26 #define _NOLIBC_SYSCALL_CLOBBERLIST \
27 "memory", "$t0", "$t1", "$t2", "$t3", "$t4", "$t5", "$t6", "$t7", "$t8"
28
29 #define my_syscall0(num) \
30 ({ \
31 register long _num __asm__ ("a7") = (num); \
32 register long _arg1 __asm__ ("a0"); \
33 \
34 __asm__ volatile ( \
35 "syscall 0\n" \
36 : "=r"(_arg1) \
37 : "r"(_num) \
38 : _NOLIBC_SYSCALL_CLOBBERLIST \
39 ); \
40 _arg1; \
41 })
42
43 #define my_syscall1(num, arg1) \
44 ({ \
45 register long _num __asm__ ("a7") = (num); \
46 register long _arg1 __asm__ ("a0") = (long)(arg1); \
47 \
48 __asm__ volatile ( \
49 "syscall 0\n" \
50 : "+r"(_arg1) \
51 : "r"(_num) \
52 : _NOLIBC_SYSCALL_CLOBBERLIST \
53 ); \
54 _arg1; \
55 })
56
57 #define my_syscall2(num, arg1, arg2) \
58 ({ \
59 register long _num __asm__ ("a7") = (num); \
60 register long _arg1 __asm__ ("a0") = (long)(arg1); \
61 register long _arg2 __asm__ ("a1") = (long)(arg2); \
62 \
63 __asm__ volatile ( \
64 "syscall 0\n" \
65 : "+r"(_arg1) \
66 : "r"(_arg2), \
67 "r"(_num) \
68 : _NOLIBC_SYSCALL_CLOBBERLIST \
69 ); \
70 _arg1; \
71 })
72
73 #define my_syscall3(num, arg1, arg2, arg3) \
74 ({ \
75 register long _num __asm__ ("a7") = (num); \
76 register long _arg1 __asm__ ("a0") = (long)(arg1); \
77 register long _arg2 __asm__ ("a1") = (long)(arg2); \
78 register long _arg3 __asm__ ("a2") = (long)(arg3); \
79 \
80 __asm__ volatile ( \
81 "syscall 0\n" \
82 : "+r"(_arg1) \
83 : "r"(_arg2), "r"(_arg3), \
84 "r"(_num) \
85 : _NOLIBC_SYSCALL_CLOBBERLIST \
86 ); \
87 _arg1; \
88 })
89
90 #define my_syscall4(num, arg1, arg2, arg3, arg4) \
91 ({ \
92 register long _num __asm__ ("a7") = (num); \
93 register long _arg1 __asm__ ("a0") = (long)(arg1); \
94 register long _arg2 __asm__ ("a1") = (long)(arg2); \
95 register long _arg3 __asm__ ("a2") = (long)(arg3); \
96 register long _arg4 __asm__ ("a3") = (long)(arg4); \
97 \
98 __asm__ volatile ( \
99 "syscall 0\n" \
100 : "+r"(_arg1) \
101 : "r"(_arg2), "r"(_arg3), "r"(_arg4), \
102 "r"(_num) \
103 : _NOLIBC_SYSCALL_CLOBBERLIST \
104 ); \
105 _arg1; \
106 })
107
108 #define my_syscall5(num, arg1, arg2, arg3, arg4, arg5) \
109 ({ \
110 register long _num __asm__ ("a7") = (num); \
111 register long _arg1 __asm__ ("a0") = (long)(arg1); \
112 register long _arg2 __asm__ ("a1") = (long)(arg2); \
113 register long _arg3 __asm__ ("a2") = (long)(arg3); \
114 register long _arg4 __asm__ ("a3") = (long)(arg4); \
115 register long _arg5 __asm__ ("a4") = (long)(arg5); \
116 \
117 __asm__ volatile ( \
118 "syscall 0\n" \
119 : "+r"(_arg1) \
120 : "r"(_arg2), "r"(_arg3), "r"(_arg4), "r"(_arg5), \
121 "r"(_num) \
122 : _NOLIBC_SYSCALL_CLOBBERLIST \
123 ); \
124 _arg1; \
125 })
126
127 #define my_syscall6(num, arg1, arg2, arg3, arg4, arg5, arg6) \
128 ({ \
129 register long _num __asm__ ("a7") = (num); \
130 register long _arg1 __asm__ ("a0") = (long)(arg1); \
131 register long _arg2 __asm__ ("a1") = (long)(arg2); \
132 register long _arg3 __asm__ ("a2") = (long)(arg3); \
133 register long _arg4 __asm__ ("a3") = (long)(arg4); \
134 register long _arg5 __asm__ ("a4") = (long)(arg5); \
135 register long _arg6 __asm__ ("a5") = (long)(arg6); \
136 \
137 __asm__ volatile ( \
138 "syscall 0\n" \
139 : "+r"(_arg1) \
140 : "r"(_arg2), "r"(_arg3), "r"(_arg4), "r"(_arg5), "r"(_arg6), \
141 "r"(_num) \
142 : _NOLIBC_SYSCALL_CLOBBERLIST \
143 ); \
144 _arg1; \
145 })
146
147 #if __loongarch_grlen == 32
148 #define LONG_BSTRINS "bstrins.w"
149 #else /* __loongarch_grlen == 64 */
150 #define LONG_BSTRINS "bstrins.d"
151 #endif
152
153 /* startup code */
_start(void)154 void __attribute__((weak, noreturn, optimize("Os", "omit-frame-pointer"))) __no_stack_protector _start(void)
155 {
156 __asm__ volatile (
157 "move $a0, $sp\n" /* save stack pointer to $a0, as arg1 of _start_c */
158 LONG_BSTRINS " $sp, $zero, 3, 0\n" /* $sp must be 16-byte aligned */
159 "bl _start_c\n" /* transfer to c runtime */
160 );
161 __builtin_unreachable();
162 }
163
164 #endif /* _NOLIBC_ARCH_LOONGARCH_H */
165