xref: /openbmc/qemu/linux-user/loongarch64/vdso.S (revision 05caa062)
1/*
2 * Loongarch64 linux replacement vdso.
3 *
4 * Copyright 2023 Linaro, Ltd.
5 *
6 * SPDX-License-Identifier: GPL-2.0-or-later
7 */
8
9#include <asm/unistd.h>
10#include <asm/errno.h>
11#include "vdso-asmoffset.h"
12
13
14	.text
15
16.macro endf name
17	.globl	\name
18	.type	\name, @function
19	.size	\name, . - \name
20.endm
21
22.macro vdso_syscall name, nr
23\name:
24	li.w	$a7, \nr
25	syscall	0
26	jr	$ra
27endf	\name
28.endm
29
30	.cfi_startproc
31
32vdso_syscall __vdso_gettimeofday, __NR_gettimeofday
33vdso_syscall __vdso_clock_gettime, __NR_clock_gettime
34vdso_syscall __vdso_clock_getres, __NR_clock_getres
35vdso_syscall __vdso_getcpu, __NR_getcpu
36
37	.cfi_endproc
38
39/*
40 * Start the unwind info at least one instruction before the signal
41 * trampoline, because the unwinder will assume we are returning
42 * after a call site.
43 */
44
45	.cfi_startproc simple
46	.cfi_signal_frame
47
48#define B_GR	offsetof_sigcontext_gr
49#define B_FR	sizeof_sigcontext + sizeof_sctx_info + offsetof_fpucontext_fr
50
51	.cfi_def_cfa	2, offsetof_sigcontext
52
53	/* Return address */
54	.cfi_return_column 64
55	.cfi_offset	64, offsetof_sigcontext_pc	/* pc */
56
57	/* Integer registers */
58	.cfi_offset	1, B_GR + 1 * 8
59	.cfi_offset	2, B_GR + 2 * 8
60	.cfi_offset	3, B_GR + 3 * 8
61	.cfi_offset	4, B_GR + 4 * 8
62	.cfi_offset	5, B_GR + 5 * 8
63	.cfi_offset	6, B_GR + 6 * 8
64	.cfi_offset	7, B_GR + 7 * 8
65	.cfi_offset	8, B_GR + 8 * 8
66	.cfi_offset	9, B_GR + 9 * 8
67	.cfi_offset	10, B_GR + 10 * 8
68	.cfi_offset	11, B_GR + 11 * 8
69	.cfi_offset	12, B_GR + 12 * 8
70	.cfi_offset	13, B_GR + 13 * 8
71	.cfi_offset	14, B_GR + 14 * 8
72	.cfi_offset	15, B_GR + 15 * 8
73	.cfi_offset	16, B_GR + 16 * 8
74	.cfi_offset	17, B_GR + 17 * 8
75	.cfi_offset	18, B_GR + 18 * 8
76	.cfi_offset	19, B_GR + 19 * 8
77	.cfi_offset	20, B_GR + 20 * 8
78	.cfi_offset	21, B_GR + 21 * 8
79	.cfi_offset	22, B_GR + 22 * 8
80	.cfi_offset	23, B_GR + 23 * 8
81	.cfi_offset	24, B_GR + 24 * 8
82	.cfi_offset	25, B_GR + 25 * 8
83	.cfi_offset	26, B_GR + 26 * 8
84	.cfi_offset	27, B_GR + 27 * 8
85	.cfi_offset	28, B_GR + 28 * 8
86	.cfi_offset	29, B_GR + 29 * 8
87	.cfi_offset	30, B_GR + 30 * 8
88	.cfi_offset	31, B_GR + 31 * 8
89
90	/* Floating point registers */
91	.cfi_offset	32, B_FR + 0
92	.cfi_offset	33, B_FR + 1 * 8
93	.cfi_offset	34, B_FR + 2 * 8
94	.cfi_offset	35, B_FR + 3 * 8
95	.cfi_offset	36, B_FR + 4 * 8
96	.cfi_offset	37, B_FR + 5 * 8
97	.cfi_offset	38, B_FR + 6 * 8
98	.cfi_offset	39, B_FR + 7 * 8
99	.cfi_offset	40, B_FR + 8 * 8
100	.cfi_offset	41, B_FR + 9 * 8
101	.cfi_offset	42, B_FR + 10 * 8
102	.cfi_offset	43, B_FR + 11 * 8
103	.cfi_offset	44, B_FR + 12 * 8
104	.cfi_offset	45, B_FR + 13 * 8
105	.cfi_offset	46, B_FR + 14 * 8
106	.cfi_offset	47, B_FR + 15 * 8
107	.cfi_offset	48, B_FR + 16 * 8
108	.cfi_offset	49, B_FR + 17 * 8
109	.cfi_offset	50, B_FR + 18 * 8
110	.cfi_offset	51, B_FR + 19 * 8
111	.cfi_offset	52, B_FR + 20 * 8
112	.cfi_offset	53, B_FR + 21 * 8
113	.cfi_offset	54, B_FR + 22 * 8
114	.cfi_offset	55, B_FR + 23 * 8
115	.cfi_offset	56, B_FR + 24 * 8
116	.cfi_offset	57, B_FR + 25 * 8
117	.cfi_offset	58, B_FR + 26 * 8
118	.cfi_offset	59, B_FR + 27 * 8
119	.cfi_offset	60, B_FR + 28 * 8
120	.cfi_offset	61, B_FR + 29 * 8
121	.cfi_offset	62, B_FR + 30 * 8
122	.cfi_offset	63, B_FR + 31 * 8
123
124	nop
125
126__vdso_rt_sigreturn:
127	li.w	$a7, __NR_rt_sigreturn
128	syscall	0
129	.cfi_endproc
130endf __vdso_rt_sigreturn
131