1 /* SPDX-License-Identifier: LGPL-2.1 OR MIT */ 2 /* 3 * AARCH64 specific definitions for NOLIBC 4 * Copyright (C) 2017-2022 Willy Tarreau <w@1wt.eu> 5 */ 6 7 #ifndef _NOLIBC_ARCH_AARCH64_H 8 #define _NOLIBC_ARCH_AARCH64_H 9 10 #include "compiler.h" 11 #include "crt.h" 12 13 /* Syscalls for AARCH64 : 14 * - registers are 64-bit 15 * - stack is 16-byte aligned 16 * - syscall number is passed in x8 17 * - arguments are in x0, x1, x2, x3, x4, x5 18 * - the system call is performed by calling svc 0 19 * - syscall return comes in x0. 20 * - the arguments are cast to long and assigned into the target registers 21 * which are then simply passed as registers to the asm code, so that we 22 * don't have to experience issues with register constraints. 23 * 24 * On aarch64, select() is not implemented so we have to use pselect6(). 25 */ 26 #define __ARCH_WANT_SYS_PSELECT6 27 28 #define my_syscall0(num) \ 29 ({ \ 30 register long _num __asm__ ("x8") = (num); \ 31 register long _arg1 __asm__ ("x0"); \ 32 \ 33 __asm__ volatile ( \ 34 "svc #0\n" \ 35 : "=r"(_arg1) \ 36 : "r"(_num) \ 37 : "memory", "cc" \ 38 ); \ 39 _arg1; \ 40 }) 41 42 #define my_syscall1(num, arg1) \ 43 ({ \ 44 register long _num __asm__ ("x8") = (num); \ 45 register long _arg1 __asm__ ("x0") = (long)(arg1); \ 46 \ 47 __asm__ volatile ( \ 48 "svc #0\n" \ 49 : "=r"(_arg1) \ 50 : "r"(_arg1), \ 51 "r"(_num) \ 52 : "memory", "cc" \ 53 ); \ 54 _arg1; \ 55 }) 56 57 #define my_syscall2(num, arg1, arg2) \ 58 ({ \ 59 register long _num __asm__ ("x8") = (num); \ 60 register long _arg1 __asm__ ("x0") = (long)(arg1); \ 61 register long _arg2 __asm__ ("x1") = (long)(arg2); \ 62 \ 63 __asm__ volatile ( \ 64 "svc #0\n" \ 65 : "=r"(_arg1) \ 66 : "r"(_arg1), "r"(_arg2), \ 67 "r"(_num) \ 68 : "memory", "cc" \ 69 ); \ 70 _arg1; \ 71 }) 72 73 #define my_syscall3(num, arg1, arg2, arg3) \ 74 ({ \ 75 register long _num __asm__ ("x8") = (num); \ 76 register long _arg1 __asm__ ("x0") = (long)(arg1); \ 77 register long _arg2 __asm__ ("x1") = (long)(arg2); \ 78 register long _arg3 __asm__ ("x2") = (long)(arg3); \ 79 \ 80 __asm__ volatile ( \ 81 "svc #0\n" \ 82 : "=r"(_arg1) \ 83 : "r"(_arg1), "r"(_arg2), "r"(_arg3), \ 84 "r"(_num) \ 85 : "memory", "cc" \ 86 ); \ 87 _arg1; \ 88 }) 89 90 #define my_syscall4(num, arg1, arg2, arg3, arg4) \ 91 ({ \ 92 register long _num __asm__ ("x8") = (num); \ 93 register long _arg1 __asm__ ("x0") = (long)(arg1); \ 94 register long _arg2 __asm__ ("x1") = (long)(arg2); \ 95 register long _arg3 __asm__ ("x2") = (long)(arg3); \ 96 register long _arg4 __asm__ ("x3") = (long)(arg4); \ 97 \ 98 __asm__ volatile ( \ 99 "svc #0\n" \ 100 : "=r"(_arg1) \ 101 : "r"(_arg1), "r"(_arg2), "r"(_arg3), "r"(_arg4), \ 102 "r"(_num) \ 103 : "memory", "cc" \ 104 ); \ 105 _arg1; \ 106 }) 107 108 #define my_syscall5(num, arg1, arg2, arg3, arg4, arg5) \ 109 ({ \ 110 register long _num __asm__ ("x8") = (num); \ 111 register long _arg1 __asm__ ("x0") = (long)(arg1); \ 112 register long _arg2 __asm__ ("x1") = (long)(arg2); \ 113 register long _arg3 __asm__ ("x2") = (long)(arg3); \ 114 register long _arg4 __asm__ ("x3") = (long)(arg4); \ 115 register long _arg5 __asm__ ("x4") = (long)(arg5); \ 116 \ 117 __asm__ volatile ( \ 118 "svc #0\n" \ 119 : "=r" (_arg1) \ 120 : "r"(_arg1), "r"(_arg2), "r"(_arg3), "r"(_arg4), "r"(_arg5), \ 121 "r"(_num) \ 122 : "memory", "cc" \ 123 ); \ 124 _arg1; \ 125 }) 126 127 #define my_syscall6(num, arg1, arg2, arg3, arg4, arg5, arg6) \ 128 ({ \ 129 register long _num __asm__ ("x8") = (num); \ 130 register long _arg1 __asm__ ("x0") = (long)(arg1); \ 131 register long _arg2 __asm__ ("x1") = (long)(arg2); \ 132 register long _arg3 __asm__ ("x2") = (long)(arg3); \ 133 register long _arg4 __asm__ ("x3") = (long)(arg4); \ 134 register long _arg5 __asm__ ("x4") = (long)(arg5); \ 135 register long _arg6 __asm__ ("x5") = (long)(arg6); \ 136 \ 137 __asm__ volatile ( \ 138 "svc #0\n" \ 139 : "=r" (_arg1) \ 140 : "r"(_arg1), "r"(_arg2), "r"(_arg3), "r"(_arg4), "r"(_arg5), \ 141 "r"(_arg6), "r"(_num) \ 142 : "memory", "cc" \ 143 ); \ 144 _arg1; \ 145 }) 146 147 /* startup code */ 148 void __attribute__((weak, noreturn, optimize("Os", "omit-frame-pointer"))) __no_stack_protector _start(void) 149 { 150 __asm__ volatile ( 151 "mov x0, sp\n" /* save stack pointer to x0, as arg1 of _start_c */ 152 "and sp, x0, -16\n" /* sp must be 16-byte aligned in the callee */ 153 "bl _start_c\n" /* transfer to c runtime */ 154 ); 155 __builtin_unreachable(); 156 } 157 #endif /* _NOLIBC_ARCH_AARCH64_H */ 158