1/*
2 * RISC-V translation routines for the RV64Zfh Standard Extension.
3 *
4 * Copyright (c) 2020 Chih-Min Chao, chihmin.chao@sifive.com
5 *
6 * This program is free software; you can redistribute it and/or modify it
7 * under the terms and conditions of the GNU General Public License,
8 * version 2 or later, as published by the Free Software Foundation.
9 *
10 * This program is distributed in the hope it will be useful, but WITHOUT
11 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
12 * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License for
13 * more details.
14 *
15 * You should have received a copy of the GNU General Public License along with
16 * this program.  If not, see <http://www.gnu.org/licenses/>.
17 */
18
19#define REQUIRE_ZFH(ctx) do { \
20    if (!ctx->cfg_ptr->ext_zfh) {      \
21        return false;         \
22    }                         \
23} while (0)
24
25#define REQUIRE_ZHINX_OR_ZFH(ctx) do { \
26    if (!ctx->cfg_ptr->ext_zhinx && !ctx->cfg_ptr->ext_zfh) { \
27        return false;                  \
28    }                                  \
29} while (0)
30
31#define REQUIRE_ZFHMIN(ctx) do {              \
32    if (!ctx->cfg_ptr->ext_zfhmin) {          \
33        return false;                         \
34    }                                         \
35} while (0)
36
37#define REQUIRE_ZFHMIN_OR_ZHINXMIN(ctx) do {                 \
38    if (!(ctx->cfg_ptr->ext_zfhmin || ctx->cfg_ptr->ext_zhinxmin)) { \
39        return false;                                        \
40    }                                                        \
41} while (0)
42
43static bool trans_flh(DisasContext *ctx, arg_flh *a)
44{
45    TCGv_i64 dest;
46    TCGv t0;
47
48    REQUIRE_FPU;
49    REQUIRE_ZFHMIN(ctx);
50
51    decode_save_opc(ctx);
52    t0 = get_gpr(ctx, a->rs1, EXT_NONE);
53    if (a->imm) {
54        TCGv temp = temp_new(ctx);
55        tcg_gen_addi_tl(temp, t0, a->imm);
56        t0 = temp;
57    }
58
59    dest = cpu_fpr[a->rd];
60    tcg_gen_qemu_ld_i64(dest, t0, ctx->mem_idx, MO_TEUW);
61    gen_nanbox_h(dest, dest);
62
63    mark_fs_dirty(ctx);
64    return true;
65}
66
67static bool trans_fsh(DisasContext *ctx, arg_fsh *a)
68{
69    TCGv t0;
70
71    REQUIRE_FPU;
72    REQUIRE_ZFHMIN(ctx);
73
74    decode_save_opc(ctx);
75    t0 = get_gpr(ctx, a->rs1, EXT_NONE);
76    if (a->imm) {
77        TCGv temp = tcg_temp_new();
78        tcg_gen_addi_tl(temp, t0, a->imm);
79        t0 = temp;
80    }
81
82    tcg_gen_qemu_st_i64(cpu_fpr[a->rs2], t0, ctx->mem_idx, MO_TEUW);
83
84    return true;
85}
86
87static bool trans_fmadd_h(DisasContext *ctx, arg_fmadd_h *a)
88{
89    REQUIRE_FPU;
90    REQUIRE_ZHINX_OR_ZFH(ctx);
91
92    TCGv_i64 dest = dest_fpr(ctx, a->rd);
93    TCGv_i64 src1 = get_fpr_hs(ctx, a->rs1);
94    TCGv_i64 src2 = get_fpr_hs(ctx, a->rs2);
95    TCGv_i64 src3 = get_fpr_hs(ctx, a->rs3);
96
97    gen_set_rm(ctx, a->rm);
98    gen_helper_fmadd_h(dest, cpu_env, src1, src2, src3);
99    gen_set_fpr_hs(ctx, a->rd, dest);
100    mark_fs_dirty(ctx);
101    return true;
102}
103
104static bool trans_fmsub_h(DisasContext *ctx, arg_fmsub_h *a)
105{
106    REQUIRE_FPU;
107    REQUIRE_ZHINX_OR_ZFH(ctx);
108
109    TCGv_i64 dest = dest_fpr(ctx, a->rd);
110    TCGv_i64 src1 = get_fpr_hs(ctx, a->rs1);
111    TCGv_i64 src2 = get_fpr_hs(ctx, a->rs2);
112    TCGv_i64 src3 = get_fpr_hs(ctx, a->rs3);
113
114    gen_set_rm(ctx, a->rm);
115    gen_helper_fmsub_h(dest, cpu_env, src1, src2, src3);
116    gen_set_fpr_hs(ctx, a->rd, dest);
117    mark_fs_dirty(ctx);
118    return true;
119}
120
121static bool trans_fnmsub_h(DisasContext *ctx, arg_fnmsub_h *a)
122{
123    REQUIRE_FPU;
124    REQUIRE_ZHINX_OR_ZFH(ctx);
125
126    TCGv_i64 dest = dest_fpr(ctx, a->rd);
127    TCGv_i64 src1 = get_fpr_hs(ctx, a->rs1);
128    TCGv_i64 src2 = get_fpr_hs(ctx, a->rs2);
129    TCGv_i64 src3 = get_fpr_hs(ctx, a->rs3);
130
131    gen_set_rm(ctx, a->rm);
132    gen_helper_fnmsub_h(dest, cpu_env, src1, src2, src3);
133    gen_set_fpr_hs(ctx, a->rd, dest);
134    mark_fs_dirty(ctx);
135    return true;
136}
137
138static bool trans_fnmadd_h(DisasContext *ctx, arg_fnmadd_h *a)
139{
140    REQUIRE_FPU;
141    REQUIRE_ZHINX_OR_ZFH(ctx);
142
143    TCGv_i64 dest = dest_fpr(ctx, a->rd);
144    TCGv_i64 src1 = get_fpr_hs(ctx, a->rs1);
145    TCGv_i64 src2 = get_fpr_hs(ctx, a->rs2);
146    TCGv_i64 src3 = get_fpr_hs(ctx, a->rs3);
147
148    gen_set_rm(ctx, a->rm);
149    gen_helper_fnmadd_h(dest, cpu_env, src1, src2, src3);
150    gen_set_fpr_hs(ctx, a->rd, dest);
151    mark_fs_dirty(ctx);
152    return true;
153}
154
155static bool trans_fadd_h(DisasContext *ctx, arg_fadd_h *a)
156{
157    REQUIRE_FPU;
158    REQUIRE_ZHINX_OR_ZFH(ctx);
159
160    TCGv_i64 dest = dest_fpr(ctx, a->rd);
161    TCGv_i64 src1 = get_fpr_hs(ctx, a->rs1);
162    TCGv_i64 src2 = get_fpr_hs(ctx, a->rs2);
163
164    gen_set_rm(ctx, a->rm);
165    gen_helper_fadd_h(dest, cpu_env, src1, src2);
166    gen_set_fpr_hs(ctx, a->rd, dest);
167    mark_fs_dirty(ctx);
168    return true;
169}
170
171static bool trans_fsub_h(DisasContext *ctx, arg_fsub_h *a)
172{
173    REQUIRE_FPU;
174    REQUIRE_ZHINX_OR_ZFH(ctx);
175
176    TCGv_i64 dest = dest_fpr(ctx, a->rd);
177    TCGv_i64 src1 = get_fpr_hs(ctx, a->rs1);
178    TCGv_i64 src2 = get_fpr_hs(ctx, a->rs2);
179
180    gen_set_rm(ctx, a->rm);
181    gen_helper_fsub_h(dest, cpu_env, src1, src2);
182    gen_set_fpr_hs(ctx, a->rd, dest);
183    mark_fs_dirty(ctx);
184    return true;
185}
186
187static bool trans_fmul_h(DisasContext *ctx, arg_fmul_h *a)
188{
189    REQUIRE_FPU;
190    REQUIRE_ZHINX_OR_ZFH(ctx);
191
192    TCGv_i64 dest = dest_fpr(ctx, a->rd);
193    TCGv_i64 src1 = get_fpr_hs(ctx, a->rs1);
194    TCGv_i64 src2 = get_fpr_hs(ctx, a->rs2);
195
196    gen_set_rm(ctx, a->rm);
197    gen_helper_fmul_h(dest, cpu_env, src1, src2);
198    gen_set_fpr_hs(ctx, a->rd, dest);
199    mark_fs_dirty(ctx);
200    return true;
201}
202
203static bool trans_fdiv_h(DisasContext *ctx, arg_fdiv_h *a)
204{
205    REQUIRE_FPU;
206    REQUIRE_ZHINX_OR_ZFH(ctx);
207
208    TCGv_i64 dest = dest_fpr(ctx, a->rd);
209    TCGv_i64 src1 = get_fpr_hs(ctx, a->rs1);
210    TCGv_i64 src2 = get_fpr_hs(ctx, a->rs2);
211
212    gen_set_rm(ctx, a->rm);
213    gen_helper_fdiv_h(dest, cpu_env, src1, src2);
214    gen_set_fpr_hs(ctx, a->rd, dest);
215    mark_fs_dirty(ctx);
216    return true;
217}
218
219static bool trans_fsqrt_h(DisasContext *ctx, arg_fsqrt_h *a)
220{
221    REQUIRE_FPU;
222    REQUIRE_ZHINX_OR_ZFH(ctx);
223
224    TCGv_i64 dest = dest_fpr(ctx, a->rd);
225    TCGv_i64 src1 = get_fpr_hs(ctx, a->rs1);
226
227    gen_set_rm(ctx, a->rm);
228    gen_helper_fsqrt_h(dest, cpu_env, src1);
229    gen_set_fpr_hs(ctx, a->rd, dest);
230    mark_fs_dirty(ctx);
231    return true;
232}
233
234static bool trans_fsgnj_h(DisasContext *ctx, arg_fsgnj_h *a)
235{
236    REQUIRE_FPU;
237    REQUIRE_ZHINX_OR_ZFH(ctx);
238
239    TCGv_i64 dest = dest_fpr(ctx, a->rd);
240    TCGv_i64 src1 = get_fpr_hs(ctx, a->rs1);
241
242    if (a->rs1 == a->rs2) { /* FMOV */
243        if (!ctx->cfg_ptr->ext_zfinx) {
244            gen_check_nanbox_h(dest, src1);
245        } else {
246            tcg_gen_ext16s_i64(dest, src1);
247        }
248    } else {
249        TCGv_i64 src2 = get_fpr_hs(ctx, a->rs2);
250
251        if (!ctx->cfg_ptr->ext_zfinx) {
252            TCGv_i64 rs1 = tcg_temp_new_i64();
253            TCGv_i64 rs2 = tcg_temp_new_i64();
254            gen_check_nanbox_h(rs1, src1);
255            gen_check_nanbox_h(rs2, src2);
256
257            /* This formulation retains the nanboxing of rs2 in normal 'Zfh'. */
258            tcg_gen_deposit_i64(dest, rs2, rs1, 0, 15);
259
260            tcg_temp_free_i64(rs1);
261            tcg_temp_free_i64(rs2);
262        } else {
263            tcg_gen_deposit_i64(dest, src2, src1, 0, 15);
264            tcg_gen_ext16s_i64(dest, dest);
265        }
266    }
267    gen_set_fpr_hs(ctx, a->rd, dest);
268    mark_fs_dirty(ctx);
269    return true;
270}
271
272static bool trans_fsgnjn_h(DisasContext *ctx, arg_fsgnjn_h *a)
273{
274    TCGv_i64 rs1, rs2, mask;
275
276    REQUIRE_FPU;
277    REQUIRE_ZHINX_OR_ZFH(ctx);
278
279    TCGv_i64 dest = dest_fpr(ctx, a->rd);
280    TCGv_i64 src1 = get_fpr_hs(ctx, a->rs1);
281
282    rs1 = tcg_temp_new_i64();
283    if (!ctx->cfg_ptr->ext_zfinx) {
284        gen_check_nanbox_h(rs1, src1);
285    } else {
286        tcg_gen_mov_i64(rs1, src1);
287    }
288
289    if (a->rs1 == a->rs2) { /* FNEG */
290        tcg_gen_xori_i64(dest, rs1, MAKE_64BIT_MASK(15, 1));
291    } else {
292        TCGv_i64 src2 = get_fpr_hs(ctx, a->rs2);
293        rs2 = tcg_temp_new_i64();
294
295        if (!ctx->cfg_ptr->ext_zfinx) {
296            gen_check_nanbox_h(rs2, src2);
297        } else {
298            tcg_gen_mov_i64(rs2, src2);
299        }
300
301        /*
302         * Replace bit 15 in rs1 with inverse in rs2.
303         * This formulation retains the nanboxing of rs1.
304         */
305        mask = tcg_const_i64(~MAKE_64BIT_MASK(15, 1));
306        tcg_gen_not_i64(rs2, rs2);
307        tcg_gen_andc_i64(rs2, rs2, mask);
308        tcg_gen_and_i64(dest, mask, rs1);
309        tcg_gen_or_i64(dest, dest, rs2);
310
311        tcg_temp_free_i64(mask);
312        tcg_temp_free_i64(rs2);
313    }
314    /* signed-extended intead of nanboxing for result if enable zfinx */
315    if (ctx->cfg_ptr->ext_zfinx) {
316        tcg_gen_ext16s_i64(dest, dest);
317    }
318    tcg_temp_free_i64(rs1);
319    mark_fs_dirty(ctx);
320    return true;
321}
322
323static bool trans_fsgnjx_h(DisasContext *ctx, arg_fsgnjx_h *a)
324{
325    TCGv_i64 rs1, rs2;
326
327    REQUIRE_FPU;
328    REQUIRE_ZHINX_OR_ZFH(ctx);
329
330    TCGv_i64 dest = dest_fpr(ctx, a->rd);
331    TCGv_i64 src1 = get_fpr_hs(ctx, a->rs1);
332
333    rs1 = tcg_temp_new_i64();
334    if (!ctx->cfg_ptr->ext_zfinx) {
335        gen_check_nanbox_h(rs1, src1);
336    } else {
337        tcg_gen_mov_i64(rs1, src1);
338    }
339
340    if (a->rs1 == a->rs2) { /* FABS */
341        tcg_gen_andi_i64(dest, rs1, ~MAKE_64BIT_MASK(15, 1));
342    } else {
343        TCGv_i64 src2 = get_fpr_hs(ctx, a->rs2);
344        rs2 = tcg_temp_new_i64();
345
346        if (!ctx->cfg_ptr->ext_zfinx) {
347            gen_check_nanbox_h(rs2, src2);
348        } else {
349            tcg_gen_mov_i64(rs2, src2);
350        }
351
352        /*
353         * Xor bit 15 in rs1 with that in rs2.
354         * This formulation retains the nanboxing of rs1.
355         */
356        tcg_gen_andi_i64(dest, rs2, MAKE_64BIT_MASK(15, 1));
357        tcg_gen_xor_i64(dest, rs1, dest);
358
359        tcg_temp_free_i64(rs2);
360    }
361    /* signed-extended intead of nanboxing for result if enable zfinx */
362    if (ctx->cfg_ptr->ext_zfinx) {
363        tcg_gen_ext16s_i64(dest, dest);
364    }
365    tcg_temp_free_i64(rs1);
366    mark_fs_dirty(ctx);
367    return true;
368}
369
370static bool trans_fmin_h(DisasContext *ctx, arg_fmin_h *a)
371{
372    REQUIRE_FPU;
373    REQUIRE_ZHINX_OR_ZFH(ctx);
374
375    TCGv_i64 dest = dest_fpr(ctx, a->rd);
376    TCGv_i64 src1 = get_fpr_hs(ctx, a->rs1);
377    TCGv_i64 src2 = get_fpr_hs(ctx, a->rs2);
378
379    gen_helper_fmin_h(dest, cpu_env, src1, src2);
380    gen_set_fpr_hs(ctx, a->rd, dest);
381    mark_fs_dirty(ctx);
382    return true;
383}
384
385static bool trans_fmax_h(DisasContext *ctx, arg_fmax_h *a)
386{
387    REQUIRE_FPU;
388    REQUIRE_ZHINX_OR_ZFH(ctx);
389
390    TCGv_i64 dest = dest_fpr(ctx, a->rd);
391    TCGv_i64 src1 = get_fpr_hs(ctx, a->rs1);
392    TCGv_i64 src2 = get_fpr_hs(ctx, a->rs2);
393
394    gen_helper_fmax_h(dest, cpu_env, src1, src2);
395    gen_set_fpr_hs(ctx, a->rd, dest);
396    mark_fs_dirty(ctx);
397    return true;
398}
399
400static bool trans_fcvt_s_h(DisasContext *ctx, arg_fcvt_s_h *a)
401{
402    REQUIRE_FPU;
403    REQUIRE_ZFHMIN_OR_ZHINXMIN(ctx);
404
405    TCGv_i64 dest = dest_fpr(ctx, a->rd);
406    TCGv_i64 src1 = get_fpr_hs(ctx, a->rs1);
407
408    gen_set_rm(ctx, a->rm);
409    gen_helper_fcvt_s_h(dest, cpu_env, src1);
410    gen_set_fpr_hs(ctx, a->rd, dest);
411
412    mark_fs_dirty(ctx);
413
414    return true;
415}
416
417static bool trans_fcvt_d_h(DisasContext *ctx, arg_fcvt_d_h *a)
418{
419    REQUIRE_FPU;
420    REQUIRE_ZFHMIN_OR_ZHINXMIN(ctx);
421    REQUIRE_ZDINX_OR_D(ctx);
422
423    TCGv_i64 dest = dest_fpr(ctx, a->rd);
424    TCGv_i64 src1 = get_fpr_hs(ctx, a->rs1);
425
426    gen_set_rm(ctx, a->rm);
427    gen_helper_fcvt_d_h(dest, cpu_env, src1);
428    gen_set_fpr_d(ctx, a->rd, dest);
429
430    mark_fs_dirty(ctx);
431
432    return true;
433}
434
435static bool trans_fcvt_h_s(DisasContext *ctx, arg_fcvt_h_s *a)
436{
437    REQUIRE_FPU;
438    REQUIRE_ZFHMIN_OR_ZHINXMIN(ctx);
439
440    TCGv_i64 dest = dest_fpr(ctx, a->rd);
441    TCGv_i64 src1 = get_fpr_hs(ctx, a->rs1);
442
443    gen_set_rm(ctx, a->rm);
444    gen_helper_fcvt_h_s(dest, cpu_env, src1);
445    gen_set_fpr_hs(ctx, a->rd, dest);
446    mark_fs_dirty(ctx);
447
448    return true;
449}
450
451static bool trans_fcvt_h_d(DisasContext *ctx, arg_fcvt_h_d *a)
452{
453    REQUIRE_FPU;
454    REQUIRE_ZFHMIN_OR_ZHINXMIN(ctx);
455    REQUIRE_ZDINX_OR_D(ctx);
456
457    TCGv_i64 dest = dest_fpr(ctx, a->rd);
458    TCGv_i64 src1 = get_fpr_d(ctx, a->rs1);
459
460    gen_set_rm(ctx, a->rm);
461    gen_helper_fcvt_h_d(dest, cpu_env, src1);
462    gen_set_fpr_hs(ctx, a->rd, dest);
463    mark_fs_dirty(ctx);
464
465    return true;
466}
467
468static bool trans_feq_h(DisasContext *ctx, arg_feq_h *a)
469{
470    REQUIRE_FPU;
471    REQUIRE_ZHINX_OR_ZFH(ctx);
472
473    TCGv dest = dest_gpr(ctx, a->rd);
474    TCGv_i64 src1 = get_fpr_hs(ctx, a->rs1);
475    TCGv_i64 src2 = get_fpr_hs(ctx, a->rs2);
476
477    gen_helper_feq_h(dest, cpu_env, src1, src2);
478    gen_set_gpr(ctx, a->rd, dest);
479    return true;
480}
481
482static bool trans_flt_h(DisasContext *ctx, arg_flt_h *a)
483{
484    REQUIRE_FPU;
485    REQUIRE_ZHINX_OR_ZFH(ctx);
486
487    TCGv dest = dest_gpr(ctx, a->rd);
488    TCGv_i64 src1 = get_fpr_hs(ctx, a->rs1);
489    TCGv_i64 src2 = get_fpr_hs(ctx, a->rs2);
490
491    gen_helper_flt_h(dest, cpu_env, src1, src2);
492    gen_set_gpr(ctx, a->rd, dest);
493
494    return true;
495}
496
497static bool trans_fle_h(DisasContext *ctx, arg_fle_h *a)
498{
499    REQUIRE_FPU;
500    REQUIRE_ZHINX_OR_ZFH(ctx);
501
502    TCGv dest = dest_gpr(ctx, a->rd);
503    TCGv_i64 src1 = get_fpr_hs(ctx, a->rs1);
504    TCGv_i64 src2 = get_fpr_hs(ctx, a->rs2);
505
506    gen_helper_fle_h(dest, cpu_env, src1, src2);
507    gen_set_gpr(ctx, a->rd, dest);
508    return true;
509}
510
511static bool trans_fclass_h(DisasContext *ctx, arg_fclass_h *a)
512{
513    REQUIRE_FPU;
514    REQUIRE_ZHINX_OR_ZFH(ctx);
515
516    TCGv dest = dest_gpr(ctx, a->rd);
517    TCGv_i64 src1 = get_fpr_hs(ctx, a->rs1);
518
519    gen_helper_fclass_h(dest, cpu_env, src1);
520    gen_set_gpr(ctx, a->rd, dest);
521    return true;
522}
523
524static bool trans_fcvt_w_h(DisasContext *ctx, arg_fcvt_w_h *a)
525{
526    REQUIRE_FPU;
527    REQUIRE_ZHINX_OR_ZFH(ctx);
528
529    TCGv dest = dest_gpr(ctx, a->rd);
530    TCGv_i64 src1 = get_fpr_hs(ctx, a->rs1);
531
532    gen_set_rm(ctx, a->rm);
533    gen_helper_fcvt_w_h(dest, cpu_env, src1);
534    gen_set_gpr(ctx, a->rd, dest);
535    return true;
536}
537
538static bool trans_fcvt_wu_h(DisasContext *ctx, arg_fcvt_wu_h *a)
539{
540    REQUIRE_FPU;
541    REQUIRE_ZHINX_OR_ZFH(ctx);
542
543    TCGv dest = dest_gpr(ctx, a->rd);
544    TCGv_i64 src1 = get_fpr_hs(ctx, a->rs1);
545
546    gen_set_rm(ctx, a->rm);
547    gen_helper_fcvt_wu_h(dest, cpu_env, src1);
548    gen_set_gpr(ctx, a->rd, dest);
549    return true;
550}
551
552static bool trans_fcvt_h_w(DisasContext *ctx, arg_fcvt_h_w *a)
553{
554    REQUIRE_FPU;
555    REQUIRE_ZHINX_OR_ZFH(ctx);
556
557    TCGv_i64 dest = dest_fpr(ctx, a->rd);
558    TCGv t0 = get_gpr(ctx, a->rs1, EXT_SIGN);
559
560    gen_set_rm(ctx, a->rm);
561    gen_helper_fcvt_h_w(dest, cpu_env, t0);
562    gen_set_fpr_hs(ctx, a->rd, dest);
563
564    mark_fs_dirty(ctx);
565    return true;
566}
567
568static bool trans_fcvt_h_wu(DisasContext *ctx, arg_fcvt_h_wu *a)
569{
570    REQUIRE_FPU;
571    REQUIRE_ZHINX_OR_ZFH(ctx);
572
573    TCGv_i64 dest = dest_fpr(ctx, a->rd);
574    TCGv t0 = get_gpr(ctx, a->rs1, EXT_SIGN);
575
576    gen_set_rm(ctx, a->rm);
577    gen_helper_fcvt_h_wu(dest, cpu_env, t0);
578    gen_set_fpr_hs(ctx, a->rd, dest);
579
580    mark_fs_dirty(ctx);
581    return true;
582}
583
584static bool trans_fmv_x_h(DisasContext *ctx, arg_fmv_x_h *a)
585{
586    REQUIRE_FPU;
587    REQUIRE_ZFHMIN(ctx);
588
589    TCGv dest = dest_gpr(ctx, a->rd);
590
591#if defined(TARGET_RISCV64)
592    /* 16 bits -> 64 bits */
593    tcg_gen_ext16s_tl(dest, cpu_fpr[a->rs1]);
594#else
595    /* 16 bits -> 32 bits */
596    tcg_gen_extrl_i64_i32(dest, cpu_fpr[a->rs1]);
597    tcg_gen_ext16s_tl(dest, dest);
598#endif
599
600    gen_set_gpr(ctx, a->rd, dest);
601    return true;
602}
603
604static bool trans_fmv_h_x(DisasContext *ctx, arg_fmv_h_x *a)
605{
606    REQUIRE_FPU;
607    REQUIRE_ZFHMIN(ctx);
608
609    TCGv t0 = get_gpr(ctx, a->rs1, EXT_ZERO);
610
611    tcg_gen_extu_tl_i64(cpu_fpr[a->rd], t0);
612    gen_nanbox_h(cpu_fpr[a->rd], cpu_fpr[a->rd]);
613
614    mark_fs_dirty(ctx);
615    return true;
616}
617
618static bool trans_fcvt_l_h(DisasContext *ctx, arg_fcvt_l_h *a)
619{
620    REQUIRE_64BIT(ctx);
621    REQUIRE_FPU;
622    REQUIRE_ZHINX_OR_ZFH(ctx);
623
624    TCGv dest = dest_gpr(ctx, a->rd);
625    TCGv_i64 src1 = get_fpr_hs(ctx, a->rs1);
626
627    gen_set_rm(ctx, a->rm);
628    gen_helper_fcvt_l_h(dest, cpu_env, src1);
629    gen_set_gpr(ctx, a->rd, dest);
630    return true;
631}
632
633static bool trans_fcvt_lu_h(DisasContext *ctx, arg_fcvt_lu_h *a)
634{
635    REQUIRE_64BIT(ctx);
636    REQUIRE_FPU;
637    REQUIRE_ZHINX_OR_ZFH(ctx);
638
639    TCGv dest = dest_gpr(ctx, a->rd);
640    TCGv_i64 src1 = get_fpr_hs(ctx, a->rs1);
641
642    gen_set_rm(ctx, a->rm);
643    gen_helper_fcvt_lu_h(dest, cpu_env, src1);
644    gen_set_gpr(ctx, a->rd, dest);
645    return true;
646}
647
648static bool trans_fcvt_h_l(DisasContext *ctx, arg_fcvt_h_l *a)
649{
650    REQUIRE_64BIT(ctx);
651    REQUIRE_FPU;
652    REQUIRE_ZHINX_OR_ZFH(ctx);
653
654    TCGv_i64 dest = dest_fpr(ctx, a->rd);
655    TCGv t0 = get_gpr(ctx, a->rs1, EXT_SIGN);
656
657    gen_set_rm(ctx, a->rm);
658    gen_helper_fcvt_h_l(dest, cpu_env, t0);
659    gen_set_fpr_hs(ctx, a->rd, dest);
660
661    mark_fs_dirty(ctx);
662    return true;
663}
664
665static bool trans_fcvt_h_lu(DisasContext *ctx, arg_fcvt_h_lu *a)
666{
667    REQUIRE_64BIT(ctx);
668    REQUIRE_FPU;
669    REQUIRE_ZHINX_OR_ZFH(ctx);
670
671    TCGv_i64 dest = dest_fpr(ctx, a->rd);
672    TCGv t0 = get_gpr(ctx, a->rs1, EXT_SIGN);
673
674    gen_set_rm(ctx, a->rm);
675    gen_helper_fcvt_h_lu(dest, cpu_env, t0);
676    gen_set_fpr_hs(ctx, a->rd, dest);
677
678    mark_fs_dirty(ctx);
679    return true;
680}
681