xref: /openbmc/linux/arch/arm64/kernel/entry-fpsimd.S (revision 2dd6532e)
1/* SPDX-License-Identifier: GPL-2.0-only */
2/*
3 * FP/SIMD state saving and restoring
4 *
5 * Copyright (C) 2012 ARM Ltd.
6 * Author: Catalin Marinas <catalin.marinas@arm.com>
7 */
8
9#include <linux/linkage.h>
10
11#include <asm/assembler.h>
12#include <asm/fpsimdmacros.h>
13
14/*
15 * Save the FP registers.
16 *
17 * x0 - pointer to struct fpsimd_state
18 */
19SYM_FUNC_START(fpsimd_save_state)
20	fpsimd_save x0, 8
21	ret
22SYM_FUNC_END(fpsimd_save_state)
23
24/*
25 * Load the FP registers.
26 *
27 * x0 - pointer to struct fpsimd_state
28 */
29SYM_FUNC_START(fpsimd_load_state)
30	fpsimd_restore x0, 8
31	ret
32SYM_FUNC_END(fpsimd_load_state)
33
34#ifdef CONFIG_ARM64_SVE
35
36/*
37 * Save the SVE state
38 *
39 * x0 - pointer to buffer for state
40 * x1 - pointer to storage for FPSR
41 * x2 - Save FFR if non-zero
42 */
43SYM_FUNC_START(sve_save_state)
44	sve_save 0, x1, x2, 3
45	ret
46SYM_FUNC_END(sve_save_state)
47
48/*
49 * Load the SVE state
50 *
51 * x0 - pointer to buffer for state
52 * x1 - pointer to storage for FPSR
53 * x2 - Restore FFR if non-zero
54 */
55SYM_FUNC_START(sve_load_state)
56	sve_load 0, x1, x2, 4
57	ret
58SYM_FUNC_END(sve_load_state)
59
60SYM_FUNC_START(sve_get_vl)
61	_sve_rdvl	0, 1
62	ret
63SYM_FUNC_END(sve_get_vl)
64
65SYM_FUNC_START(sve_set_vq)
66	sve_load_vq x0, x1, x2
67	ret
68SYM_FUNC_END(sve_set_vq)
69
70/*
71 * Zero all SVE registers but the first 128-bits of each vector
72 *
73 * VQ must already be configured by caller, any further updates of VQ
74 * will need to ensure that the register state remains valid.
75 *
76 * x0 = include FFR?
77 * x1 = VQ - 1
78 */
79SYM_FUNC_START(sve_flush_live)
80	cbz		x1, 1f	// A VQ-1 of 0 is 128 bits so no extra Z state
81	sve_flush_z
821:	sve_flush_p
83	tbz		x0, #0, 2f
84	sve_flush_ffr
852:	ret
86SYM_FUNC_END(sve_flush_live)
87
88#endif /* CONFIG_ARM64_SVE */
89
90#ifdef CONFIG_ARM64_SME
91
92SYM_FUNC_START(sme_get_vl)
93	_sme_rdsvl	0, 1
94	ret
95SYM_FUNC_END(sme_get_vl)
96
97SYM_FUNC_START(sme_set_vq)
98	sme_load_vq x0, x1, x2
99	ret
100SYM_FUNC_END(sme_set_vq)
101
102/*
103 * Save the SME state
104 *
105 * x0 - pointer to buffer for state
106 */
107SYM_FUNC_START(za_save_state)
108	_sme_rdsvl	1, 1		// x1 = VL/8
109	sme_save_za 0, x1, 12
110	ret
111SYM_FUNC_END(za_save_state)
112
113/*
114 * Load the SME state
115 *
116 * x0 - pointer to buffer for state
117 */
118SYM_FUNC_START(za_load_state)
119	_sme_rdsvl	1, 1		// x1 = VL/8
120	sme_load_za 0, x1, 12
121	ret
122SYM_FUNC_END(za_load_state)
123
124#endif /* CONFIG_ARM64_SME */
125