1/*
2 * RISC-V translation routines for the Zk[nd,ne,nh,sed,sh] Standard Extension.
3 *
4 * Copyright (c) 2021 Ruibo Lu, luruibo2000@163.com
5 * Copyright (c) 2021 Zewen Ye, lustrew@foxmail.com
6 *
7 * This program is free software; you can redistribute it and/or modify it
8 * under the terms and conditions of the GNU General Public License,
9 * version 2 or later, as published by the Free Software Foundation.
10 *
11 * This program is distributed in the hope it will be useful, but WITHOUT
12 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License for
14 * more details.
15 *
16 * You should have received a copy of the GNU General Public License along with
17 * this program.  If not, see <http://www.gnu.org/licenses/>.
18 */
19
20#define REQUIRE_ZKND(ctx) do {                  \
21    if (!ctx->cfg_ptr->ext_zknd) {              \
22        return false;                           \
23    }                                           \
24} while (0)
25
26#define REQUIRE_ZKNE(ctx) do {                  \
27    if (!ctx->cfg_ptr->ext_zkne) {              \
28        return false;                           \
29    }                                           \
30} while (0)
31
32#define REQUIRE_ZKNH(ctx) do {                  \
33    if (!ctx->cfg_ptr->ext_zknh) {              \
34        return false;                           \
35    }                                           \
36} while (0)
37
38static bool gen_aes32_sm4(DisasContext *ctx, arg_k_aes *a,
39                          void (*func)(TCGv, TCGv, TCGv, TCGv))
40{
41    TCGv shamt = tcg_constant_tl(a->shamt);
42    TCGv dest = dest_gpr(ctx, a->rd);
43    TCGv src1 = get_gpr(ctx, a->rs1, EXT_NONE);
44    TCGv src2 = get_gpr(ctx, a->rs2, EXT_NONE);
45
46    func(dest, src1, src2, shamt);
47    gen_set_gpr(ctx, a->rd, dest);
48    return true;
49}
50
51static bool trans_aes32esmi(DisasContext *ctx, arg_aes32esmi *a)
52{
53    REQUIRE_32BIT(ctx);
54    REQUIRE_ZKNE(ctx);
55    return gen_aes32_sm4(ctx, a, gen_helper_aes32esmi);
56}
57
58static bool trans_aes32esi(DisasContext *ctx, arg_aes32esi *a)
59{
60    REQUIRE_32BIT(ctx);
61    REQUIRE_ZKNE(ctx);
62    return gen_aes32_sm4(ctx, a, gen_helper_aes32esi);
63}
64
65static bool trans_aes32dsmi(DisasContext *ctx, arg_aes32dsmi *a)
66{
67    REQUIRE_32BIT(ctx);
68    REQUIRE_ZKND(ctx);
69    return gen_aes32_sm4(ctx, a, gen_helper_aes32dsmi);
70}
71
72static bool trans_aes32dsi(DisasContext *ctx, arg_aes32dsi *a)
73{
74    REQUIRE_32BIT(ctx);
75    REQUIRE_ZKND(ctx);
76    return gen_aes32_sm4(ctx, a, gen_helper_aes32dsi);
77}
78
79static bool trans_aes64es(DisasContext *ctx, arg_aes64es *a)
80{
81    REQUIRE_64BIT(ctx);
82    REQUIRE_ZKNE(ctx);
83    return gen_arith(ctx, a, EXT_NONE, gen_helper_aes64es, NULL);
84}
85
86static bool trans_aes64esm(DisasContext *ctx, arg_aes64esm *a)
87{
88    REQUIRE_64BIT(ctx);
89    REQUIRE_ZKNE(ctx);
90    return gen_arith(ctx, a, EXT_NONE, gen_helper_aes64esm, NULL);
91}
92
93static bool trans_aes64ds(DisasContext *ctx, arg_aes64ds *a)
94{
95    REQUIRE_64BIT(ctx);
96    REQUIRE_ZKND(ctx);
97    return gen_arith(ctx, a, EXT_NONE, gen_helper_aes64ds, NULL);
98}
99
100static bool trans_aes64dsm(DisasContext *ctx, arg_aes64dsm *a)
101{
102    REQUIRE_64BIT(ctx);
103    REQUIRE_ZKND(ctx);
104    return gen_arith(ctx, a, EXT_NONE, gen_helper_aes64dsm, NULL);
105}
106
107static bool trans_aes64ks2(DisasContext *ctx, arg_aes64ks2 *a)
108{
109    REQUIRE_64BIT(ctx);
110    REQUIRE_EITHER_EXT(ctx, zknd, zkne);
111    return gen_arith(ctx, a, EXT_NONE, gen_helper_aes64ks2, NULL);
112}
113
114static bool trans_aes64ks1i(DisasContext *ctx, arg_aes64ks1i *a)
115{
116    REQUIRE_64BIT(ctx);
117    REQUIRE_EITHER_EXT(ctx, zknd, zkne);
118
119    if (a->imm > 0xA) {
120        return false;
121    }
122
123    return gen_arith_imm_tl(ctx, a, EXT_NONE, gen_helper_aes64ks1i, NULL);
124}
125
126static bool trans_aes64im(DisasContext *ctx, arg_aes64im *a)
127{
128    REQUIRE_64BIT(ctx);
129    REQUIRE_ZKND(ctx);
130    return gen_unary(ctx, a, EXT_NONE, gen_helper_aes64im);
131}
132
133static bool gen_sha256(DisasContext *ctx, arg_r2 *a, DisasExtend ext,
134                       void (*func)(TCGv_i32, TCGv_i32, int32_t),
135                       int32_t num1, int32_t num2, int32_t num3)
136{
137    TCGv dest = dest_gpr(ctx, a->rd);
138    TCGv src1 = get_gpr(ctx, a->rs1, ext);
139    TCGv_i32 t0 = tcg_temp_new_i32();
140    TCGv_i32 t1 = tcg_temp_new_i32();
141    TCGv_i32 t2 = tcg_temp_new_i32();
142
143    tcg_gen_trunc_tl_i32(t0, src1);
144    tcg_gen_rotri_i32(t1, t0, num1);
145    tcg_gen_rotri_i32(t2, t0, num2);
146    tcg_gen_xor_i32(t1, t1, t2);
147    func(t2, t0, num3);
148    tcg_gen_xor_i32(t1, t1, t2);
149    tcg_gen_ext_i32_tl(dest, t1);
150
151    gen_set_gpr(ctx, a->rd, dest);
152    tcg_temp_free_i32(t0);
153    tcg_temp_free_i32(t1);
154    tcg_temp_free_i32(t2);
155    return true;
156}
157
158static bool trans_sha256sig0(DisasContext *ctx, arg_sha256sig0 *a)
159{
160    REQUIRE_ZKNH(ctx);
161    return gen_sha256(ctx, a, EXT_NONE, tcg_gen_shri_i32, 7, 18, 3);
162}
163
164static bool trans_sha256sig1(DisasContext *ctx, arg_sha256sig1 *a)
165{
166    REQUIRE_ZKNH(ctx);
167    return gen_sha256(ctx, a, EXT_NONE, tcg_gen_shri_i32, 17, 19, 10);
168}
169
170static bool trans_sha256sum0(DisasContext *ctx, arg_sha256sum0 *a)
171{
172    REQUIRE_ZKNH(ctx);
173    return gen_sha256(ctx, a, EXT_NONE, tcg_gen_rotri_i32, 2, 13, 22);
174}
175
176static bool trans_sha256sum1(DisasContext *ctx, arg_sha256sum1 *a)
177{
178    REQUIRE_ZKNH(ctx);
179    return gen_sha256(ctx, a, EXT_NONE, tcg_gen_rotri_i32, 6, 11, 25);
180}
181
182static bool gen_sha512_rv32(DisasContext *ctx, arg_r *a, DisasExtend ext,
183                            void (*func1)(TCGv_i64, TCGv_i64, int64_t),
184                            void (*func2)(TCGv_i64, TCGv_i64, int64_t),
185                            int64_t num1, int64_t num2, int64_t num3)
186{
187    TCGv dest = dest_gpr(ctx, a->rd);
188    TCGv src1 = get_gpr(ctx, a->rs1, ext);
189    TCGv src2 = get_gpr(ctx, a->rs2, ext);
190    TCGv_i64 t0 = tcg_temp_new_i64();
191    TCGv_i64 t1 = tcg_temp_new_i64();
192    TCGv_i64 t2 = tcg_temp_new_i64();
193
194    tcg_gen_concat_tl_i64(t0, src1, src2);
195    func1(t1, t0, num1);
196    func2(t2, t0, num2);
197    tcg_gen_xor_i64(t1, t1, t2);
198    tcg_gen_rotri_i64(t2, t0, num3);
199    tcg_gen_xor_i64(t1, t1, t2);
200    tcg_gen_trunc_i64_tl(dest, t1);
201
202    gen_set_gpr(ctx, a->rd, dest);
203    tcg_temp_free_i64(t0);
204    tcg_temp_free_i64(t1);
205    tcg_temp_free_i64(t2);
206    return true;
207}
208
209static bool trans_sha512sum0r(DisasContext *ctx, arg_sha512sum0r *a)
210{
211    REQUIRE_32BIT(ctx);
212    REQUIRE_ZKNH(ctx);
213    return gen_sha512_rv32(ctx, a, EXT_NONE, tcg_gen_rotli_i64,
214                           tcg_gen_rotli_i64, 25, 30, 28);
215}
216
217static bool trans_sha512sum1r(DisasContext *ctx, arg_sha512sum1r *a)
218{
219    REQUIRE_32BIT(ctx);
220    REQUIRE_ZKNH(ctx);
221    return gen_sha512_rv32(ctx, a, EXT_NONE, tcg_gen_rotli_i64,
222                           tcg_gen_rotri_i64, 23, 14, 18);
223}
224
225static bool trans_sha512sig0l(DisasContext *ctx, arg_sha512sig0l *a)
226{
227    REQUIRE_32BIT(ctx);
228    REQUIRE_ZKNH(ctx);
229    return gen_sha512_rv32(ctx, a, EXT_NONE, tcg_gen_rotri_i64,
230                           tcg_gen_rotri_i64, 1, 7, 8);
231}
232
233static bool trans_sha512sig1l(DisasContext *ctx, arg_sha512sig1l *a)
234{
235    REQUIRE_32BIT(ctx);
236    REQUIRE_ZKNH(ctx);
237    return gen_sha512_rv32(ctx, a, EXT_NONE, tcg_gen_rotli_i64,
238                           tcg_gen_rotri_i64, 3, 6, 19);
239}
240
241static bool gen_sha512h_rv32(DisasContext *ctx, arg_r *a, DisasExtend ext,
242                             void (*func)(TCGv_i64, TCGv_i64, int64_t),
243                             int64_t num1, int64_t num2, int64_t num3)
244{
245    TCGv dest = dest_gpr(ctx, a->rd);
246    TCGv src1 = get_gpr(ctx, a->rs1, ext);
247    TCGv src2 = get_gpr(ctx, a->rs2, ext);
248    TCGv_i64 t0 = tcg_temp_new_i64();
249    TCGv_i64 t1 = tcg_temp_new_i64();
250    TCGv_i64 t2 = tcg_temp_new_i64();
251
252    tcg_gen_concat_tl_i64(t0, src1, src2);
253    func(t1, t0, num1);
254    tcg_gen_ext32u_i64(t2, t0);
255    tcg_gen_shri_i64(t2, t2, num2);
256    tcg_gen_xor_i64(t1, t1, t2);
257    tcg_gen_rotri_i64(t2, t0, num3);
258    tcg_gen_xor_i64(t1, t1, t2);
259    tcg_gen_trunc_i64_tl(dest, t1);
260
261    gen_set_gpr(ctx, a->rd, dest);
262    tcg_temp_free_i64(t0);
263    tcg_temp_free_i64(t1);
264    tcg_temp_free_i64(t2);
265    return true;
266}
267
268static bool trans_sha512sig0h(DisasContext *ctx, arg_sha512sig0h *a)
269{
270    REQUIRE_32BIT(ctx);
271    REQUIRE_ZKNH(ctx);
272    return gen_sha512h_rv32(ctx, a, EXT_NONE, tcg_gen_rotri_i64, 1, 7, 8);
273}
274
275static bool trans_sha512sig1h(DisasContext *ctx, arg_sha512sig1h *a)
276{
277    REQUIRE_32BIT(ctx);
278    REQUIRE_ZKNH(ctx);
279    return gen_sha512h_rv32(ctx, a, EXT_NONE, tcg_gen_rotli_i64, 3, 6, 19);
280}
281