xref: /openbmc/qemu/tcg/tcg-op.c (revision 6edfca9e)
1 /*
2  * Tiny Code Generator for QEMU
3  *
4  * Copyright (c) 2008 Fabrice Bellard
5  *
6  * Permission is hereby granted, free of charge, to any person obtaining a copy
7  * of this software and associated documentation files (the "Software"), to deal
8  * in the Software without restriction, including without limitation the rights
9  * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
10  * copies of the Software, and to permit persons to whom the Software is
11  * furnished to do so, subject to the following conditions:
12  *
13  * The above copyright notice and this permission notice shall be included in
14  * all copies or substantial portions of the Software.
15  *
16  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19  * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20  * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21  * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
22  * THE SOFTWARE.
23  */
24 
25 #include "qemu/osdep.h"
26 #include "tcg/tcg.h"
27 #include "tcg/tcg-temp-internal.h"
28 #include "tcg/tcg-op-common.h"
29 #include "exec/translation-block.h"
30 #include "exec/plugin-gen.h"
31 #include "tcg-internal.h"
32 
33 
34 void tcg_gen_op1(TCGOpcode opc, TCGArg a1)
35 {
36     TCGOp *op = tcg_emit_op(opc, 1);
37     op->args[0] = a1;
38 }
39 
40 void tcg_gen_op2(TCGOpcode opc, TCGArg a1, TCGArg a2)
41 {
42     TCGOp *op = tcg_emit_op(opc, 2);
43     op->args[0] = a1;
44     op->args[1] = a2;
45 }
46 
47 void tcg_gen_op3(TCGOpcode opc, TCGArg a1, TCGArg a2, TCGArg a3)
48 {
49     TCGOp *op = tcg_emit_op(opc, 3);
50     op->args[0] = a1;
51     op->args[1] = a2;
52     op->args[2] = a3;
53 }
54 
55 void tcg_gen_op4(TCGOpcode opc, TCGArg a1, TCGArg a2, TCGArg a3, TCGArg a4)
56 {
57     TCGOp *op = tcg_emit_op(opc, 4);
58     op->args[0] = a1;
59     op->args[1] = a2;
60     op->args[2] = a3;
61     op->args[3] = a4;
62 }
63 
64 void tcg_gen_op5(TCGOpcode opc, TCGArg a1, TCGArg a2, TCGArg a3,
65                  TCGArg a4, TCGArg a5)
66 {
67     TCGOp *op = tcg_emit_op(opc, 5);
68     op->args[0] = a1;
69     op->args[1] = a2;
70     op->args[2] = a3;
71     op->args[3] = a4;
72     op->args[4] = a5;
73 }
74 
75 void tcg_gen_op6(TCGOpcode opc, TCGArg a1, TCGArg a2, TCGArg a3,
76                  TCGArg a4, TCGArg a5, TCGArg a6)
77 {
78     TCGOp *op = tcg_emit_op(opc, 6);
79     op->args[0] = a1;
80     op->args[1] = a2;
81     op->args[2] = a3;
82     op->args[3] = a4;
83     op->args[4] = a5;
84     op->args[5] = a6;
85 }
86 
87 /* Generic ops.  */
88 
89 static void add_last_as_label_use(TCGLabel *l)
90 {
91     TCGLabelUse *u = tcg_malloc(sizeof(TCGLabelUse));
92 
93     u->op = tcg_last_op();
94     QSIMPLEQ_INSERT_TAIL(&l->branches, u, next);
95 }
96 
97 void tcg_gen_br(TCGLabel *l)
98 {
99     tcg_gen_op1(INDEX_op_br, label_arg(l));
100     add_last_as_label_use(l);
101 }
102 
103 void tcg_gen_mb(TCGBar mb_type)
104 {
105 #ifdef CONFIG_USER_ONLY
106     bool parallel = tcg_ctx->gen_tb->cflags & CF_PARALLEL;
107 #else
108     /*
109      * It is tempting to elide the barrier in a uniprocessor context.
110      * However, even with a single cpu we have i/o threads running in
111      * parallel, and lack of memory order can result in e.g. virtio
112      * queue entries being read incorrectly.
113      */
114     bool parallel = true;
115 #endif
116 
117     if (parallel) {
118         tcg_gen_op1(INDEX_op_mb, mb_type);
119     }
120 }
121 
122 /* 32 bit ops */
123 
124 void tcg_gen_movi_i32(TCGv_i32 ret, int32_t arg)
125 {
126     tcg_gen_mov_i32(ret, tcg_constant_i32(arg));
127 }
128 
129 void tcg_gen_addi_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
130 {
131     /* some cases can be optimized here */
132     if (arg2 == 0) {
133         tcg_gen_mov_i32(ret, arg1);
134     } else {
135         tcg_gen_add_i32(ret, arg1, tcg_constant_i32(arg2));
136     }
137 }
138 
139 void tcg_gen_subfi_i32(TCGv_i32 ret, int32_t arg1, TCGv_i32 arg2)
140 {
141     if (arg1 == 0 && TCG_TARGET_HAS_neg_i32) {
142         /* Don't recurse with tcg_gen_neg_i32.  */
143         tcg_gen_op2_i32(INDEX_op_neg_i32, ret, arg2);
144     } else {
145         tcg_gen_sub_i32(ret, tcg_constant_i32(arg1), arg2);
146     }
147 }
148 
149 void tcg_gen_subi_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
150 {
151     /* some cases can be optimized here */
152     if (arg2 == 0) {
153         tcg_gen_mov_i32(ret, arg1);
154     } else {
155         tcg_gen_sub_i32(ret, arg1, tcg_constant_i32(arg2));
156     }
157 }
158 
159 void tcg_gen_andi_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
160 {
161     /* Some cases can be optimized here.  */
162     switch (arg2) {
163     case 0:
164         tcg_gen_movi_i32(ret, 0);
165         return;
166     case -1:
167         tcg_gen_mov_i32(ret, arg1);
168         return;
169     case 0xff:
170         /* Don't recurse with tcg_gen_ext8u_i32.  */
171         if (TCG_TARGET_HAS_ext8u_i32) {
172             tcg_gen_op2_i32(INDEX_op_ext8u_i32, ret, arg1);
173             return;
174         }
175         break;
176     case 0xffff:
177         if (TCG_TARGET_HAS_ext16u_i32) {
178             tcg_gen_op2_i32(INDEX_op_ext16u_i32, ret, arg1);
179             return;
180         }
181         break;
182     }
183 
184     tcg_gen_and_i32(ret, arg1, tcg_constant_i32(arg2));
185 }
186 
187 void tcg_gen_ori_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
188 {
189     /* Some cases can be optimized here.  */
190     if (arg2 == -1) {
191         tcg_gen_movi_i32(ret, -1);
192     } else if (arg2 == 0) {
193         tcg_gen_mov_i32(ret, arg1);
194     } else {
195         tcg_gen_or_i32(ret, arg1, tcg_constant_i32(arg2));
196     }
197 }
198 
199 void tcg_gen_xori_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
200 {
201     /* Some cases can be optimized here.  */
202     if (arg2 == 0) {
203         tcg_gen_mov_i32(ret, arg1);
204     } else if (arg2 == -1 && TCG_TARGET_HAS_not_i32) {
205         /* Don't recurse with tcg_gen_not_i32.  */
206         tcg_gen_op2_i32(INDEX_op_not_i32, ret, arg1);
207     } else {
208         tcg_gen_xor_i32(ret, arg1, tcg_constant_i32(arg2));
209     }
210 }
211 
212 void tcg_gen_shli_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
213 {
214     tcg_debug_assert(arg2 >= 0 && arg2 < 32);
215     if (arg2 == 0) {
216         tcg_gen_mov_i32(ret, arg1);
217     } else {
218         tcg_gen_shl_i32(ret, arg1, tcg_constant_i32(arg2));
219     }
220 }
221 
222 void tcg_gen_shri_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
223 {
224     tcg_debug_assert(arg2 >= 0 && arg2 < 32);
225     if (arg2 == 0) {
226         tcg_gen_mov_i32(ret, arg1);
227     } else {
228         tcg_gen_shr_i32(ret, arg1, tcg_constant_i32(arg2));
229     }
230 }
231 
232 void tcg_gen_sari_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
233 {
234     tcg_debug_assert(arg2 >= 0 && arg2 < 32);
235     if (arg2 == 0) {
236         tcg_gen_mov_i32(ret, arg1);
237     } else {
238         tcg_gen_sar_i32(ret, arg1, tcg_constant_i32(arg2));
239     }
240 }
241 
242 void tcg_gen_brcond_i32(TCGCond cond, TCGv_i32 arg1, TCGv_i32 arg2, TCGLabel *l)
243 {
244     if (cond == TCG_COND_ALWAYS) {
245         tcg_gen_br(l);
246     } else if (cond != TCG_COND_NEVER) {
247         tcg_gen_op4ii_i32(INDEX_op_brcond_i32, arg1, arg2, cond, label_arg(l));
248         add_last_as_label_use(l);
249     }
250 }
251 
252 void tcg_gen_brcondi_i32(TCGCond cond, TCGv_i32 arg1, int32_t arg2, TCGLabel *l)
253 {
254     if (cond == TCG_COND_ALWAYS) {
255         tcg_gen_br(l);
256     } else if (cond != TCG_COND_NEVER) {
257         tcg_gen_brcond_i32(cond, arg1, tcg_constant_i32(arg2), l);
258     }
259 }
260 
261 void tcg_gen_setcond_i32(TCGCond cond, TCGv_i32 ret,
262                          TCGv_i32 arg1, TCGv_i32 arg2)
263 {
264     if (cond == TCG_COND_ALWAYS) {
265         tcg_gen_movi_i32(ret, 1);
266     } else if (cond == TCG_COND_NEVER) {
267         tcg_gen_movi_i32(ret, 0);
268     } else {
269         tcg_gen_op4i_i32(INDEX_op_setcond_i32, ret, arg1, arg2, cond);
270     }
271 }
272 
273 void tcg_gen_setcondi_i32(TCGCond cond, TCGv_i32 ret,
274                           TCGv_i32 arg1, int32_t arg2)
275 {
276     tcg_gen_setcond_i32(cond, ret, arg1, tcg_constant_i32(arg2));
277 }
278 
279 void tcg_gen_muli_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
280 {
281     if (arg2 == 0) {
282         tcg_gen_movi_i32(ret, 0);
283     } else if (is_power_of_2(arg2)) {
284         tcg_gen_shli_i32(ret, arg1, ctz32(arg2));
285     } else {
286         tcg_gen_mul_i32(ret, arg1, tcg_constant_i32(arg2));
287     }
288 }
289 
290 void tcg_gen_div_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
291 {
292     if (TCG_TARGET_HAS_div_i32) {
293         tcg_gen_op3_i32(INDEX_op_div_i32, ret, arg1, arg2);
294     } else if (TCG_TARGET_HAS_div2_i32) {
295         TCGv_i32 t0 = tcg_temp_ebb_new_i32();
296         tcg_gen_sari_i32(t0, arg1, 31);
297         tcg_gen_op5_i32(INDEX_op_div2_i32, ret, t0, arg1, t0, arg2);
298         tcg_temp_free_i32(t0);
299     } else {
300         gen_helper_div_i32(ret, arg1, arg2);
301     }
302 }
303 
304 void tcg_gen_rem_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
305 {
306     if (TCG_TARGET_HAS_rem_i32) {
307         tcg_gen_op3_i32(INDEX_op_rem_i32, ret, arg1, arg2);
308     } else if (TCG_TARGET_HAS_div_i32) {
309         TCGv_i32 t0 = tcg_temp_ebb_new_i32();
310         tcg_gen_op3_i32(INDEX_op_div_i32, t0, arg1, arg2);
311         tcg_gen_mul_i32(t0, t0, arg2);
312         tcg_gen_sub_i32(ret, arg1, t0);
313         tcg_temp_free_i32(t0);
314     } else if (TCG_TARGET_HAS_div2_i32) {
315         TCGv_i32 t0 = tcg_temp_ebb_new_i32();
316         tcg_gen_sari_i32(t0, arg1, 31);
317         tcg_gen_op5_i32(INDEX_op_div2_i32, t0, ret, arg1, t0, arg2);
318         tcg_temp_free_i32(t0);
319     } else {
320         gen_helper_rem_i32(ret, arg1, arg2);
321     }
322 }
323 
324 void tcg_gen_divu_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
325 {
326     if (TCG_TARGET_HAS_div_i32) {
327         tcg_gen_op3_i32(INDEX_op_divu_i32, ret, arg1, arg2);
328     } else if (TCG_TARGET_HAS_div2_i32) {
329         TCGv_i32 t0 = tcg_temp_ebb_new_i32();
330         tcg_gen_movi_i32(t0, 0);
331         tcg_gen_op5_i32(INDEX_op_divu2_i32, ret, t0, arg1, t0, arg2);
332         tcg_temp_free_i32(t0);
333     } else {
334         gen_helper_divu_i32(ret, arg1, arg2);
335     }
336 }
337 
338 void tcg_gen_remu_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
339 {
340     if (TCG_TARGET_HAS_rem_i32) {
341         tcg_gen_op3_i32(INDEX_op_remu_i32, ret, arg1, arg2);
342     } else if (TCG_TARGET_HAS_div_i32) {
343         TCGv_i32 t0 = tcg_temp_ebb_new_i32();
344         tcg_gen_op3_i32(INDEX_op_divu_i32, t0, arg1, arg2);
345         tcg_gen_mul_i32(t0, t0, arg2);
346         tcg_gen_sub_i32(ret, arg1, t0);
347         tcg_temp_free_i32(t0);
348     } else if (TCG_TARGET_HAS_div2_i32) {
349         TCGv_i32 t0 = tcg_temp_ebb_new_i32();
350         tcg_gen_movi_i32(t0, 0);
351         tcg_gen_op5_i32(INDEX_op_divu2_i32, t0, ret, arg1, t0, arg2);
352         tcg_temp_free_i32(t0);
353     } else {
354         gen_helper_remu_i32(ret, arg1, arg2);
355     }
356 }
357 
358 void tcg_gen_andc_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
359 {
360     if (TCG_TARGET_HAS_andc_i32) {
361         tcg_gen_op3_i32(INDEX_op_andc_i32, ret, arg1, arg2);
362     } else {
363         TCGv_i32 t0 = tcg_temp_ebb_new_i32();
364         tcg_gen_not_i32(t0, arg2);
365         tcg_gen_and_i32(ret, arg1, t0);
366         tcg_temp_free_i32(t0);
367     }
368 }
369 
370 void tcg_gen_eqv_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
371 {
372     if (TCG_TARGET_HAS_eqv_i32) {
373         tcg_gen_op3_i32(INDEX_op_eqv_i32, ret, arg1, arg2);
374     } else {
375         tcg_gen_xor_i32(ret, arg1, arg2);
376         tcg_gen_not_i32(ret, ret);
377     }
378 }
379 
380 void tcg_gen_nand_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
381 {
382     if (TCG_TARGET_HAS_nand_i32) {
383         tcg_gen_op3_i32(INDEX_op_nand_i32, ret, arg1, arg2);
384     } else {
385         tcg_gen_and_i32(ret, arg1, arg2);
386         tcg_gen_not_i32(ret, ret);
387     }
388 }
389 
390 void tcg_gen_nor_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
391 {
392     if (TCG_TARGET_HAS_nor_i32) {
393         tcg_gen_op3_i32(INDEX_op_nor_i32, ret, arg1, arg2);
394     } else {
395         tcg_gen_or_i32(ret, arg1, arg2);
396         tcg_gen_not_i32(ret, ret);
397     }
398 }
399 
400 void tcg_gen_orc_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
401 {
402     if (TCG_TARGET_HAS_orc_i32) {
403         tcg_gen_op3_i32(INDEX_op_orc_i32, ret, arg1, arg2);
404     } else {
405         TCGv_i32 t0 = tcg_temp_ebb_new_i32();
406         tcg_gen_not_i32(t0, arg2);
407         tcg_gen_or_i32(ret, arg1, t0);
408         tcg_temp_free_i32(t0);
409     }
410 }
411 
412 void tcg_gen_clz_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
413 {
414     if (TCG_TARGET_HAS_clz_i32) {
415         tcg_gen_op3_i32(INDEX_op_clz_i32, ret, arg1, arg2);
416     } else if (TCG_TARGET_HAS_clz_i64) {
417         TCGv_i64 t1 = tcg_temp_ebb_new_i64();
418         TCGv_i64 t2 = tcg_temp_ebb_new_i64();
419         tcg_gen_extu_i32_i64(t1, arg1);
420         tcg_gen_extu_i32_i64(t2, arg2);
421         tcg_gen_addi_i64(t2, t2, 32);
422         tcg_gen_clz_i64(t1, t1, t2);
423         tcg_gen_extrl_i64_i32(ret, t1);
424         tcg_temp_free_i64(t1);
425         tcg_temp_free_i64(t2);
426         tcg_gen_subi_i32(ret, ret, 32);
427     } else {
428         gen_helper_clz_i32(ret, arg1, arg2);
429     }
430 }
431 
432 void tcg_gen_clzi_i32(TCGv_i32 ret, TCGv_i32 arg1, uint32_t arg2)
433 {
434     tcg_gen_clz_i32(ret, arg1, tcg_constant_i32(arg2));
435 }
436 
437 void tcg_gen_ctz_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
438 {
439     if (TCG_TARGET_HAS_ctz_i32) {
440         tcg_gen_op3_i32(INDEX_op_ctz_i32, ret, arg1, arg2);
441     } else if (TCG_TARGET_HAS_ctz_i64) {
442         TCGv_i64 t1 = tcg_temp_ebb_new_i64();
443         TCGv_i64 t2 = tcg_temp_ebb_new_i64();
444         tcg_gen_extu_i32_i64(t1, arg1);
445         tcg_gen_extu_i32_i64(t2, arg2);
446         tcg_gen_ctz_i64(t1, t1, t2);
447         tcg_gen_extrl_i64_i32(ret, t1);
448         tcg_temp_free_i64(t1);
449         tcg_temp_free_i64(t2);
450     } else if (TCG_TARGET_HAS_ctpop_i32
451                || TCG_TARGET_HAS_ctpop_i64
452                || TCG_TARGET_HAS_clz_i32
453                || TCG_TARGET_HAS_clz_i64) {
454         TCGv_i32 z, t = tcg_temp_ebb_new_i32();
455 
456         if (TCG_TARGET_HAS_ctpop_i32 || TCG_TARGET_HAS_ctpop_i64) {
457             tcg_gen_subi_i32(t, arg1, 1);
458             tcg_gen_andc_i32(t, t, arg1);
459             tcg_gen_ctpop_i32(t, t);
460         } else {
461             /* Since all non-x86 hosts have clz(0) == 32, don't fight it.  */
462             tcg_gen_neg_i32(t, arg1);
463             tcg_gen_and_i32(t, t, arg1);
464             tcg_gen_clzi_i32(t, t, 32);
465             tcg_gen_xori_i32(t, t, 31);
466         }
467         z = tcg_constant_i32(0);
468         tcg_gen_movcond_i32(TCG_COND_EQ, ret, arg1, z, arg2, t);
469         tcg_temp_free_i32(t);
470     } else {
471         gen_helper_ctz_i32(ret, arg1, arg2);
472     }
473 }
474 
475 void tcg_gen_ctzi_i32(TCGv_i32 ret, TCGv_i32 arg1, uint32_t arg2)
476 {
477     if (!TCG_TARGET_HAS_ctz_i32 && TCG_TARGET_HAS_ctpop_i32 && arg2 == 32) {
478         /* This equivalence has the advantage of not requiring a fixup.  */
479         TCGv_i32 t = tcg_temp_ebb_new_i32();
480         tcg_gen_subi_i32(t, arg1, 1);
481         tcg_gen_andc_i32(t, t, arg1);
482         tcg_gen_ctpop_i32(ret, t);
483         tcg_temp_free_i32(t);
484     } else {
485         tcg_gen_ctz_i32(ret, arg1, tcg_constant_i32(arg2));
486     }
487 }
488 
489 void tcg_gen_clrsb_i32(TCGv_i32 ret, TCGv_i32 arg)
490 {
491     if (TCG_TARGET_HAS_clz_i32) {
492         TCGv_i32 t = tcg_temp_ebb_new_i32();
493         tcg_gen_sari_i32(t, arg, 31);
494         tcg_gen_xor_i32(t, t, arg);
495         tcg_gen_clzi_i32(t, t, 32);
496         tcg_gen_subi_i32(ret, t, 1);
497         tcg_temp_free_i32(t);
498     } else {
499         gen_helper_clrsb_i32(ret, arg);
500     }
501 }
502 
503 void tcg_gen_ctpop_i32(TCGv_i32 ret, TCGv_i32 arg1)
504 {
505     if (TCG_TARGET_HAS_ctpop_i32) {
506         tcg_gen_op2_i32(INDEX_op_ctpop_i32, ret, arg1);
507     } else if (TCG_TARGET_HAS_ctpop_i64) {
508         TCGv_i64 t = tcg_temp_ebb_new_i64();
509         tcg_gen_extu_i32_i64(t, arg1);
510         tcg_gen_ctpop_i64(t, t);
511         tcg_gen_extrl_i64_i32(ret, t);
512         tcg_temp_free_i64(t);
513     } else {
514         gen_helper_ctpop_i32(ret, arg1);
515     }
516 }
517 
518 void tcg_gen_rotl_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
519 {
520     if (TCG_TARGET_HAS_rot_i32) {
521         tcg_gen_op3_i32(INDEX_op_rotl_i32, ret, arg1, arg2);
522     } else {
523         TCGv_i32 t0, t1;
524 
525         t0 = tcg_temp_ebb_new_i32();
526         t1 = tcg_temp_ebb_new_i32();
527         tcg_gen_shl_i32(t0, arg1, arg2);
528         tcg_gen_subfi_i32(t1, 32, arg2);
529         tcg_gen_shr_i32(t1, arg1, t1);
530         tcg_gen_or_i32(ret, t0, t1);
531         tcg_temp_free_i32(t0);
532         tcg_temp_free_i32(t1);
533     }
534 }
535 
536 void tcg_gen_rotli_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
537 {
538     tcg_debug_assert(arg2 >= 0 && arg2 < 32);
539     /* some cases can be optimized here */
540     if (arg2 == 0) {
541         tcg_gen_mov_i32(ret, arg1);
542     } else if (TCG_TARGET_HAS_rot_i32) {
543         tcg_gen_rotl_i32(ret, arg1, tcg_constant_i32(arg2));
544     } else {
545         TCGv_i32 t0, t1;
546         t0 = tcg_temp_ebb_new_i32();
547         t1 = tcg_temp_ebb_new_i32();
548         tcg_gen_shli_i32(t0, arg1, arg2);
549         tcg_gen_shri_i32(t1, arg1, 32 - arg2);
550         tcg_gen_or_i32(ret, t0, t1);
551         tcg_temp_free_i32(t0);
552         tcg_temp_free_i32(t1);
553     }
554 }
555 
556 void tcg_gen_rotr_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
557 {
558     if (TCG_TARGET_HAS_rot_i32) {
559         tcg_gen_op3_i32(INDEX_op_rotr_i32, ret, arg1, arg2);
560     } else {
561         TCGv_i32 t0, t1;
562 
563         t0 = tcg_temp_ebb_new_i32();
564         t1 = tcg_temp_ebb_new_i32();
565         tcg_gen_shr_i32(t0, arg1, arg2);
566         tcg_gen_subfi_i32(t1, 32, arg2);
567         tcg_gen_shl_i32(t1, arg1, t1);
568         tcg_gen_or_i32(ret, t0, t1);
569         tcg_temp_free_i32(t0);
570         tcg_temp_free_i32(t1);
571     }
572 }
573 
574 void tcg_gen_rotri_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
575 {
576     tcg_debug_assert(arg2 >= 0 && arg2 < 32);
577     /* some cases can be optimized here */
578     if (arg2 == 0) {
579         tcg_gen_mov_i32(ret, arg1);
580     } else {
581         tcg_gen_rotli_i32(ret, arg1, 32 - arg2);
582     }
583 }
584 
585 void tcg_gen_deposit_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2,
586                          unsigned int ofs, unsigned int len)
587 {
588     uint32_t mask;
589     TCGv_i32 t1;
590 
591     tcg_debug_assert(ofs < 32);
592     tcg_debug_assert(len > 0);
593     tcg_debug_assert(len <= 32);
594     tcg_debug_assert(ofs + len <= 32);
595 
596     if (len == 32) {
597         tcg_gen_mov_i32(ret, arg2);
598         return;
599     }
600     if (TCG_TARGET_HAS_deposit_i32 && TCG_TARGET_deposit_i32_valid(ofs, len)) {
601         tcg_gen_op5ii_i32(INDEX_op_deposit_i32, ret, arg1, arg2, ofs, len);
602         return;
603     }
604 
605     t1 = tcg_temp_ebb_new_i32();
606 
607     if (TCG_TARGET_HAS_extract2_i32) {
608         if (ofs + len == 32) {
609             tcg_gen_shli_i32(t1, arg1, len);
610             tcg_gen_extract2_i32(ret, t1, arg2, len);
611             goto done;
612         }
613         if (ofs == 0) {
614             tcg_gen_extract2_i32(ret, arg1, arg2, len);
615             tcg_gen_rotli_i32(ret, ret, len);
616             goto done;
617         }
618     }
619 
620     mask = (1u << len) - 1;
621     if (ofs + len < 32) {
622         tcg_gen_andi_i32(t1, arg2, mask);
623         tcg_gen_shli_i32(t1, t1, ofs);
624     } else {
625         tcg_gen_shli_i32(t1, arg2, ofs);
626     }
627     tcg_gen_andi_i32(ret, arg1, ~(mask << ofs));
628     tcg_gen_or_i32(ret, ret, t1);
629  done:
630     tcg_temp_free_i32(t1);
631 }
632 
633 void tcg_gen_deposit_z_i32(TCGv_i32 ret, TCGv_i32 arg,
634                            unsigned int ofs, unsigned int len)
635 {
636     tcg_debug_assert(ofs < 32);
637     tcg_debug_assert(len > 0);
638     tcg_debug_assert(len <= 32);
639     tcg_debug_assert(ofs + len <= 32);
640 
641     if (ofs + len == 32) {
642         tcg_gen_shli_i32(ret, arg, ofs);
643     } else if (ofs == 0) {
644         tcg_gen_andi_i32(ret, arg, (1u << len) - 1);
645     } else if (TCG_TARGET_HAS_deposit_i32
646                && TCG_TARGET_deposit_i32_valid(ofs, len)) {
647         TCGv_i32 zero = tcg_constant_i32(0);
648         tcg_gen_op5ii_i32(INDEX_op_deposit_i32, ret, zero, arg, ofs, len);
649     } else {
650         /* To help two-operand hosts we prefer to zero-extend first,
651            which allows ARG to stay live.  */
652         switch (len) {
653         case 16:
654             if (TCG_TARGET_HAS_ext16u_i32) {
655                 tcg_gen_ext16u_i32(ret, arg);
656                 tcg_gen_shli_i32(ret, ret, ofs);
657                 return;
658             }
659             break;
660         case 8:
661             if (TCG_TARGET_HAS_ext8u_i32) {
662                 tcg_gen_ext8u_i32(ret, arg);
663                 tcg_gen_shli_i32(ret, ret, ofs);
664                 return;
665             }
666             break;
667         }
668         /* Otherwise prefer zero-extension over AND for code size.  */
669         switch (ofs + len) {
670         case 16:
671             if (TCG_TARGET_HAS_ext16u_i32) {
672                 tcg_gen_shli_i32(ret, arg, ofs);
673                 tcg_gen_ext16u_i32(ret, ret);
674                 return;
675             }
676             break;
677         case 8:
678             if (TCG_TARGET_HAS_ext8u_i32) {
679                 tcg_gen_shli_i32(ret, arg, ofs);
680                 tcg_gen_ext8u_i32(ret, ret);
681                 return;
682             }
683             break;
684         }
685         tcg_gen_andi_i32(ret, arg, (1u << len) - 1);
686         tcg_gen_shli_i32(ret, ret, ofs);
687     }
688 }
689 
690 void tcg_gen_extract_i32(TCGv_i32 ret, TCGv_i32 arg,
691                          unsigned int ofs, unsigned int len)
692 {
693     tcg_debug_assert(ofs < 32);
694     tcg_debug_assert(len > 0);
695     tcg_debug_assert(len <= 32);
696     tcg_debug_assert(ofs + len <= 32);
697 
698     /* Canonicalize certain special cases, even if extract is supported.  */
699     if (ofs + len == 32) {
700         tcg_gen_shri_i32(ret, arg, 32 - len);
701         return;
702     }
703     if (ofs == 0) {
704         tcg_gen_andi_i32(ret, arg, (1u << len) - 1);
705         return;
706     }
707 
708     if (TCG_TARGET_HAS_extract_i32
709         && TCG_TARGET_extract_i32_valid(ofs, len)) {
710         tcg_gen_op4ii_i32(INDEX_op_extract_i32, ret, arg, ofs, len);
711         return;
712     }
713 
714     /* Assume that zero-extension, if available, is cheaper than a shift.  */
715     switch (ofs + len) {
716     case 16:
717         if (TCG_TARGET_HAS_ext16u_i32) {
718             tcg_gen_ext16u_i32(ret, arg);
719             tcg_gen_shri_i32(ret, ret, ofs);
720             return;
721         }
722         break;
723     case 8:
724         if (TCG_TARGET_HAS_ext8u_i32) {
725             tcg_gen_ext8u_i32(ret, arg);
726             tcg_gen_shri_i32(ret, ret, ofs);
727             return;
728         }
729         break;
730     }
731 
732     /* ??? Ideally we'd know what values are available for immediate AND.
733        Assume that 8 bits are available, plus the special case of 16,
734        so that we get ext8u, ext16u.  */
735     switch (len) {
736     case 1 ... 8: case 16:
737         tcg_gen_shri_i32(ret, arg, ofs);
738         tcg_gen_andi_i32(ret, ret, (1u << len) - 1);
739         break;
740     default:
741         tcg_gen_shli_i32(ret, arg, 32 - len - ofs);
742         tcg_gen_shri_i32(ret, ret, 32 - len);
743         break;
744     }
745 }
746 
747 void tcg_gen_sextract_i32(TCGv_i32 ret, TCGv_i32 arg,
748                           unsigned int ofs, unsigned int len)
749 {
750     tcg_debug_assert(ofs < 32);
751     tcg_debug_assert(len > 0);
752     tcg_debug_assert(len <= 32);
753     tcg_debug_assert(ofs + len <= 32);
754 
755     /* Canonicalize certain special cases, even if extract is supported.  */
756     if (ofs + len == 32) {
757         tcg_gen_sari_i32(ret, arg, 32 - len);
758         return;
759     }
760     if (ofs == 0) {
761         switch (len) {
762         case 16:
763             tcg_gen_ext16s_i32(ret, arg);
764             return;
765         case 8:
766             tcg_gen_ext8s_i32(ret, arg);
767             return;
768         }
769     }
770 
771     if (TCG_TARGET_HAS_sextract_i32
772         && TCG_TARGET_extract_i32_valid(ofs, len)) {
773         tcg_gen_op4ii_i32(INDEX_op_sextract_i32, ret, arg, ofs, len);
774         return;
775     }
776 
777     /* Assume that sign-extension, if available, is cheaper than a shift.  */
778     switch (ofs + len) {
779     case 16:
780         if (TCG_TARGET_HAS_ext16s_i32) {
781             tcg_gen_ext16s_i32(ret, arg);
782             tcg_gen_sari_i32(ret, ret, ofs);
783             return;
784         }
785         break;
786     case 8:
787         if (TCG_TARGET_HAS_ext8s_i32) {
788             tcg_gen_ext8s_i32(ret, arg);
789             tcg_gen_sari_i32(ret, ret, ofs);
790             return;
791         }
792         break;
793     }
794     switch (len) {
795     case 16:
796         if (TCG_TARGET_HAS_ext16s_i32) {
797             tcg_gen_shri_i32(ret, arg, ofs);
798             tcg_gen_ext16s_i32(ret, ret);
799             return;
800         }
801         break;
802     case 8:
803         if (TCG_TARGET_HAS_ext8s_i32) {
804             tcg_gen_shri_i32(ret, arg, ofs);
805             tcg_gen_ext8s_i32(ret, ret);
806             return;
807         }
808         break;
809     }
810 
811     tcg_gen_shli_i32(ret, arg, 32 - len - ofs);
812     tcg_gen_sari_i32(ret, ret, 32 - len);
813 }
814 
815 /*
816  * Extract 32-bits from a 64-bit input, ah:al, starting from ofs.
817  * Unlike tcg_gen_extract_i32 above, len is fixed at 32.
818  */
819 void tcg_gen_extract2_i32(TCGv_i32 ret, TCGv_i32 al, TCGv_i32 ah,
820                           unsigned int ofs)
821 {
822     tcg_debug_assert(ofs <= 32);
823     if (ofs == 0) {
824         tcg_gen_mov_i32(ret, al);
825     } else if (ofs == 32) {
826         tcg_gen_mov_i32(ret, ah);
827     } else if (al == ah) {
828         tcg_gen_rotri_i32(ret, al, ofs);
829     } else if (TCG_TARGET_HAS_extract2_i32) {
830         tcg_gen_op4i_i32(INDEX_op_extract2_i32, ret, al, ah, ofs);
831     } else {
832         TCGv_i32 t0 = tcg_temp_ebb_new_i32();
833         tcg_gen_shri_i32(t0, al, ofs);
834         tcg_gen_deposit_i32(ret, t0, ah, 32 - ofs, ofs);
835         tcg_temp_free_i32(t0);
836     }
837 }
838 
839 void tcg_gen_movcond_i32(TCGCond cond, TCGv_i32 ret, TCGv_i32 c1,
840                          TCGv_i32 c2, TCGv_i32 v1, TCGv_i32 v2)
841 {
842     if (cond == TCG_COND_ALWAYS) {
843         tcg_gen_mov_i32(ret, v1);
844     } else if (cond == TCG_COND_NEVER) {
845         tcg_gen_mov_i32(ret, v2);
846     } else if (TCG_TARGET_HAS_movcond_i32) {
847         tcg_gen_op6i_i32(INDEX_op_movcond_i32, ret, c1, c2, v1, v2, cond);
848     } else {
849         TCGv_i32 t0 = tcg_temp_ebb_new_i32();
850         TCGv_i32 t1 = tcg_temp_ebb_new_i32();
851         tcg_gen_setcond_i32(cond, t0, c1, c2);
852         tcg_gen_neg_i32(t0, t0);
853         tcg_gen_and_i32(t1, v1, t0);
854         tcg_gen_andc_i32(ret, v2, t0);
855         tcg_gen_or_i32(ret, ret, t1);
856         tcg_temp_free_i32(t0);
857         tcg_temp_free_i32(t1);
858     }
859 }
860 
861 void tcg_gen_add2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 al,
862                       TCGv_i32 ah, TCGv_i32 bl, TCGv_i32 bh)
863 {
864     if (TCG_TARGET_HAS_add2_i32) {
865         tcg_gen_op6_i32(INDEX_op_add2_i32, rl, rh, al, ah, bl, bh);
866     } else {
867         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
868         TCGv_i64 t1 = tcg_temp_ebb_new_i64();
869         tcg_gen_concat_i32_i64(t0, al, ah);
870         tcg_gen_concat_i32_i64(t1, bl, bh);
871         tcg_gen_add_i64(t0, t0, t1);
872         tcg_gen_extr_i64_i32(rl, rh, t0);
873         tcg_temp_free_i64(t0);
874         tcg_temp_free_i64(t1);
875     }
876 }
877 
878 void tcg_gen_sub2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 al,
879                       TCGv_i32 ah, TCGv_i32 bl, TCGv_i32 bh)
880 {
881     if (TCG_TARGET_HAS_sub2_i32) {
882         tcg_gen_op6_i32(INDEX_op_sub2_i32, rl, rh, al, ah, bl, bh);
883     } else {
884         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
885         TCGv_i64 t1 = tcg_temp_ebb_new_i64();
886         tcg_gen_concat_i32_i64(t0, al, ah);
887         tcg_gen_concat_i32_i64(t1, bl, bh);
888         tcg_gen_sub_i64(t0, t0, t1);
889         tcg_gen_extr_i64_i32(rl, rh, t0);
890         tcg_temp_free_i64(t0);
891         tcg_temp_free_i64(t1);
892     }
893 }
894 
895 void tcg_gen_mulu2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 arg1, TCGv_i32 arg2)
896 {
897     if (TCG_TARGET_HAS_mulu2_i32) {
898         tcg_gen_op4_i32(INDEX_op_mulu2_i32, rl, rh, arg1, arg2);
899     } else if (TCG_TARGET_HAS_muluh_i32) {
900         TCGv_i32 t = tcg_temp_ebb_new_i32();
901         tcg_gen_op3_i32(INDEX_op_mul_i32, t, arg1, arg2);
902         tcg_gen_op3_i32(INDEX_op_muluh_i32, rh, arg1, arg2);
903         tcg_gen_mov_i32(rl, t);
904         tcg_temp_free_i32(t);
905     } else if (TCG_TARGET_REG_BITS == 64) {
906         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
907         TCGv_i64 t1 = tcg_temp_ebb_new_i64();
908         tcg_gen_extu_i32_i64(t0, arg1);
909         tcg_gen_extu_i32_i64(t1, arg2);
910         tcg_gen_mul_i64(t0, t0, t1);
911         tcg_gen_extr_i64_i32(rl, rh, t0);
912         tcg_temp_free_i64(t0);
913         tcg_temp_free_i64(t1);
914     } else {
915         qemu_build_not_reached();
916     }
917 }
918 
919 void tcg_gen_muls2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 arg1, TCGv_i32 arg2)
920 {
921     if (TCG_TARGET_HAS_muls2_i32) {
922         tcg_gen_op4_i32(INDEX_op_muls2_i32, rl, rh, arg1, arg2);
923     } else if (TCG_TARGET_HAS_mulsh_i32) {
924         TCGv_i32 t = tcg_temp_ebb_new_i32();
925         tcg_gen_op3_i32(INDEX_op_mul_i32, t, arg1, arg2);
926         tcg_gen_op3_i32(INDEX_op_mulsh_i32, rh, arg1, arg2);
927         tcg_gen_mov_i32(rl, t);
928         tcg_temp_free_i32(t);
929     } else if (TCG_TARGET_REG_BITS == 32) {
930         TCGv_i32 t0 = tcg_temp_ebb_new_i32();
931         TCGv_i32 t1 = tcg_temp_ebb_new_i32();
932         TCGv_i32 t2 = tcg_temp_ebb_new_i32();
933         TCGv_i32 t3 = tcg_temp_ebb_new_i32();
934         tcg_gen_mulu2_i32(t0, t1, arg1, arg2);
935         /* Adjust for negative inputs.  */
936         tcg_gen_sari_i32(t2, arg1, 31);
937         tcg_gen_sari_i32(t3, arg2, 31);
938         tcg_gen_and_i32(t2, t2, arg2);
939         tcg_gen_and_i32(t3, t3, arg1);
940         tcg_gen_sub_i32(rh, t1, t2);
941         tcg_gen_sub_i32(rh, rh, t3);
942         tcg_gen_mov_i32(rl, t0);
943         tcg_temp_free_i32(t0);
944         tcg_temp_free_i32(t1);
945         tcg_temp_free_i32(t2);
946         tcg_temp_free_i32(t3);
947     } else {
948         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
949         TCGv_i64 t1 = tcg_temp_ebb_new_i64();
950         tcg_gen_ext_i32_i64(t0, arg1);
951         tcg_gen_ext_i32_i64(t1, arg2);
952         tcg_gen_mul_i64(t0, t0, t1);
953         tcg_gen_extr_i64_i32(rl, rh, t0);
954         tcg_temp_free_i64(t0);
955         tcg_temp_free_i64(t1);
956     }
957 }
958 
959 void tcg_gen_mulsu2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 arg1, TCGv_i32 arg2)
960 {
961     if (TCG_TARGET_REG_BITS == 32) {
962         TCGv_i32 t0 = tcg_temp_ebb_new_i32();
963         TCGv_i32 t1 = tcg_temp_ebb_new_i32();
964         TCGv_i32 t2 = tcg_temp_ebb_new_i32();
965         tcg_gen_mulu2_i32(t0, t1, arg1, arg2);
966         /* Adjust for negative input for the signed arg1.  */
967         tcg_gen_sari_i32(t2, arg1, 31);
968         tcg_gen_and_i32(t2, t2, arg2);
969         tcg_gen_sub_i32(rh, t1, t2);
970         tcg_gen_mov_i32(rl, t0);
971         tcg_temp_free_i32(t0);
972         tcg_temp_free_i32(t1);
973         tcg_temp_free_i32(t2);
974     } else {
975         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
976         TCGv_i64 t1 = tcg_temp_ebb_new_i64();
977         tcg_gen_ext_i32_i64(t0, arg1);
978         tcg_gen_extu_i32_i64(t1, arg2);
979         tcg_gen_mul_i64(t0, t0, t1);
980         tcg_gen_extr_i64_i32(rl, rh, t0);
981         tcg_temp_free_i64(t0);
982         tcg_temp_free_i64(t1);
983     }
984 }
985 
986 void tcg_gen_ext8s_i32(TCGv_i32 ret, TCGv_i32 arg)
987 {
988     if (TCG_TARGET_HAS_ext8s_i32) {
989         tcg_gen_op2_i32(INDEX_op_ext8s_i32, ret, arg);
990     } else {
991         tcg_gen_shli_i32(ret, arg, 24);
992         tcg_gen_sari_i32(ret, ret, 24);
993     }
994 }
995 
996 void tcg_gen_ext16s_i32(TCGv_i32 ret, TCGv_i32 arg)
997 {
998     if (TCG_TARGET_HAS_ext16s_i32) {
999         tcg_gen_op2_i32(INDEX_op_ext16s_i32, ret, arg);
1000     } else {
1001         tcg_gen_shli_i32(ret, arg, 16);
1002         tcg_gen_sari_i32(ret, ret, 16);
1003     }
1004 }
1005 
1006 void tcg_gen_ext8u_i32(TCGv_i32 ret, TCGv_i32 arg)
1007 {
1008     if (TCG_TARGET_HAS_ext8u_i32) {
1009         tcg_gen_op2_i32(INDEX_op_ext8u_i32, ret, arg);
1010     } else {
1011         tcg_gen_andi_i32(ret, arg, 0xffu);
1012     }
1013 }
1014 
1015 void tcg_gen_ext16u_i32(TCGv_i32 ret, TCGv_i32 arg)
1016 {
1017     if (TCG_TARGET_HAS_ext16u_i32) {
1018         tcg_gen_op2_i32(INDEX_op_ext16u_i32, ret, arg);
1019     } else {
1020         tcg_gen_andi_i32(ret, arg, 0xffffu);
1021     }
1022 }
1023 
1024 void tcg_gen_bswap16_i32(TCGv_i32 ret, TCGv_i32 arg, int flags)
1025 {
1026     /* Only one extension flag may be present. */
1027     tcg_debug_assert(!(flags & TCG_BSWAP_OS) || !(flags & TCG_BSWAP_OZ));
1028 
1029     if (TCG_TARGET_HAS_bswap16_i32) {
1030         tcg_gen_op3i_i32(INDEX_op_bswap16_i32, ret, arg, flags);
1031     } else {
1032         TCGv_i32 t0 = tcg_temp_ebb_new_i32();
1033         TCGv_i32 t1 = tcg_temp_ebb_new_i32();
1034 
1035         tcg_gen_shri_i32(t0, arg, 8);
1036         if (!(flags & TCG_BSWAP_IZ)) {
1037             tcg_gen_ext8u_i32(t0, t0);
1038         }
1039 
1040         if (flags & TCG_BSWAP_OS) {
1041             tcg_gen_shli_i32(t1, arg, 24);
1042             tcg_gen_sari_i32(t1, t1, 16);
1043         } else if (flags & TCG_BSWAP_OZ) {
1044             tcg_gen_ext8u_i32(t1, arg);
1045             tcg_gen_shli_i32(t1, t1, 8);
1046         } else {
1047             tcg_gen_shli_i32(t1, arg, 8);
1048         }
1049 
1050         tcg_gen_or_i32(ret, t0, t1);
1051         tcg_temp_free_i32(t0);
1052         tcg_temp_free_i32(t1);
1053     }
1054 }
1055 
1056 void tcg_gen_bswap32_i32(TCGv_i32 ret, TCGv_i32 arg)
1057 {
1058     if (TCG_TARGET_HAS_bswap32_i32) {
1059         tcg_gen_op3i_i32(INDEX_op_bswap32_i32, ret, arg, 0);
1060     } else {
1061         TCGv_i32 t0 = tcg_temp_ebb_new_i32();
1062         TCGv_i32 t1 = tcg_temp_ebb_new_i32();
1063         TCGv_i32 t2 = tcg_constant_i32(0x00ff00ff);
1064 
1065                                         /* arg = abcd */
1066         tcg_gen_shri_i32(t0, arg, 8);   /*  t0 = .abc */
1067         tcg_gen_and_i32(t1, arg, t2);   /*  t1 = .b.d */
1068         tcg_gen_and_i32(t0, t0, t2);    /*  t0 = .a.c */
1069         tcg_gen_shli_i32(t1, t1, 8);    /*  t1 = b.d. */
1070         tcg_gen_or_i32(ret, t0, t1);    /* ret = badc */
1071 
1072         tcg_gen_shri_i32(t0, ret, 16);  /*  t0 = ..ba */
1073         tcg_gen_shli_i32(t1, ret, 16);  /*  t1 = dc.. */
1074         tcg_gen_or_i32(ret, t0, t1);    /* ret = dcba */
1075 
1076         tcg_temp_free_i32(t0);
1077         tcg_temp_free_i32(t1);
1078     }
1079 }
1080 
1081 void tcg_gen_hswap_i32(TCGv_i32 ret, TCGv_i32 arg)
1082 {
1083     /* Swapping 2 16-bit elements is a rotate. */
1084     tcg_gen_rotli_i32(ret, arg, 16);
1085 }
1086 
1087 void tcg_gen_smin_i32(TCGv_i32 ret, TCGv_i32 a, TCGv_i32 b)
1088 {
1089     tcg_gen_movcond_i32(TCG_COND_LT, ret, a, b, a, b);
1090 }
1091 
1092 void tcg_gen_umin_i32(TCGv_i32 ret, TCGv_i32 a, TCGv_i32 b)
1093 {
1094     tcg_gen_movcond_i32(TCG_COND_LTU, ret, a, b, a, b);
1095 }
1096 
1097 void tcg_gen_smax_i32(TCGv_i32 ret, TCGv_i32 a, TCGv_i32 b)
1098 {
1099     tcg_gen_movcond_i32(TCG_COND_LT, ret, a, b, b, a);
1100 }
1101 
1102 void tcg_gen_umax_i32(TCGv_i32 ret, TCGv_i32 a, TCGv_i32 b)
1103 {
1104     tcg_gen_movcond_i32(TCG_COND_LTU, ret, a, b, b, a);
1105 }
1106 
1107 void tcg_gen_abs_i32(TCGv_i32 ret, TCGv_i32 a)
1108 {
1109     TCGv_i32 t = tcg_temp_ebb_new_i32();
1110 
1111     tcg_gen_sari_i32(t, a, 31);
1112     tcg_gen_xor_i32(ret, a, t);
1113     tcg_gen_sub_i32(ret, ret, t);
1114     tcg_temp_free_i32(t);
1115 }
1116 
1117 /* 64-bit ops */
1118 
1119 #if TCG_TARGET_REG_BITS == 32
1120 /* These are all inline for TCG_TARGET_REG_BITS == 64.  */
1121 
1122 void tcg_gen_discard_i64(TCGv_i64 arg)
1123 {
1124     tcg_gen_discard_i32(TCGV_LOW(arg));
1125     tcg_gen_discard_i32(TCGV_HIGH(arg));
1126 }
1127 
1128 void tcg_gen_mov_i64(TCGv_i64 ret, TCGv_i64 arg)
1129 {
1130     TCGTemp *ts = tcgv_i64_temp(arg);
1131 
1132     /* Canonicalize TCGv_i64 TEMP_CONST into TCGv_i32 TEMP_CONST. */
1133     if (ts->kind == TEMP_CONST) {
1134         tcg_gen_movi_i64(ret, ts->val);
1135     } else {
1136         tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1137         tcg_gen_mov_i32(TCGV_HIGH(ret), TCGV_HIGH(arg));
1138     }
1139 }
1140 
1141 void tcg_gen_movi_i64(TCGv_i64 ret, int64_t arg)
1142 {
1143     tcg_gen_movi_i32(TCGV_LOW(ret), arg);
1144     tcg_gen_movi_i32(TCGV_HIGH(ret), arg >> 32);
1145 }
1146 
1147 void tcg_gen_ld8u_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
1148 {
1149     tcg_gen_ld8u_i32(TCGV_LOW(ret), arg2, offset);
1150     tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1151 }
1152 
1153 void tcg_gen_ld8s_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
1154 {
1155     tcg_gen_ld8s_i32(TCGV_LOW(ret), arg2, offset);
1156     tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1157 }
1158 
1159 void tcg_gen_ld16u_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
1160 {
1161     tcg_gen_ld16u_i32(TCGV_LOW(ret), arg2, offset);
1162     tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1163 }
1164 
1165 void tcg_gen_ld16s_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
1166 {
1167     tcg_gen_ld16s_i32(TCGV_LOW(ret), arg2, offset);
1168     tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1169 }
1170 
1171 void tcg_gen_ld32u_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
1172 {
1173     tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset);
1174     tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1175 }
1176 
1177 void tcg_gen_ld32s_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
1178 {
1179     tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset);
1180     tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1181 }
1182 
1183 void tcg_gen_ld_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
1184 {
1185     /* Since arg2 and ret have different types,
1186        they cannot be the same temporary */
1187 #if HOST_BIG_ENDIAN
1188     tcg_gen_ld_i32(TCGV_HIGH(ret), arg2, offset);
1189     tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset + 4);
1190 #else
1191     tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset);
1192     tcg_gen_ld_i32(TCGV_HIGH(ret), arg2, offset + 4);
1193 #endif
1194 }
1195 
1196 void tcg_gen_st8_i64(TCGv_i64 arg1, TCGv_ptr arg2, tcg_target_long offset)
1197 {
1198     tcg_gen_st8_i32(TCGV_LOW(arg1), arg2, offset);
1199 }
1200 
1201 void tcg_gen_st16_i64(TCGv_i64 arg1, TCGv_ptr arg2, tcg_target_long offset)
1202 {
1203     tcg_gen_st16_i32(TCGV_LOW(arg1), arg2, offset);
1204 }
1205 
1206 void tcg_gen_st32_i64(TCGv_i64 arg1, TCGv_ptr arg2, tcg_target_long offset)
1207 {
1208     tcg_gen_st_i32(TCGV_LOW(arg1), arg2, offset);
1209 }
1210 
1211 void tcg_gen_st_i64(TCGv_i64 arg1, TCGv_ptr arg2, tcg_target_long offset)
1212 {
1213 #if HOST_BIG_ENDIAN
1214     tcg_gen_st_i32(TCGV_HIGH(arg1), arg2, offset);
1215     tcg_gen_st_i32(TCGV_LOW(arg1), arg2, offset + 4);
1216 #else
1217     tcg_gen_st_i32(TCGV_LOW(arg1), arg2, offset);
1218     tcg_gen_st_i32(TCGV_HIGH(arg1), arg2, offset + 4);
1219 #endif
1220 }
1221 
1222 void tcg_gen_add_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1223 {
1224     tcg_gen_add2_i32(TCGV_LOW(ret), TCGV_HIGH(ret), TCGV_LOW(arg1),
1225                      TCGV_HIGH(arg1), TCGV_LOW(arg2), TCGV_HIGH(arg2));
1226 }
1227 
1228 void tcg_gen_sub_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1229 {
1230     tcg_gen_sub2_i32(TCGV_LOW(ret), TCGV_HIGH(ret), TCGV_LOW(arg1),
1231                      TCGV_HIGH(arg1), TCGV_LOW(arg2), TCGV_HIGH(arg2));
1232 }
1233 
1234 void tcg_gen_and_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1235 {
1236     tcg_gen_and_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1237     tcg_gen_and_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1238 }
1239 
1240 void tcg_gen_or_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1241 {
1242     tcg_gen_or_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1243     tcg_gen_or_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1244 }
1245 
1246 void tcg_gen_xor_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1247 {
1248     tcg_gen_xor_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1249     tcg_gen_xor_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1250 }
1251 
1252 void tcg_gen_shl_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1253 {
1254     gen_helper_shl_i64(ret, arg1, arg2);
1255 }
1256 
1257 void tcg_gen_shr_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1258 {
1259     gen_helper_shr_i64(ret, arg1, arg2);
1260 }
1261 
1262 void tcg_gen_sar_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1263 {
1264     gen_helper_sar_i64(ret, arg1, arg2);
1265 }
1266 
1267 void tcg_gen_mul_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1268 {
1269     TCGv_i64 t0;
1270     TCGv_i32 t1;
1271 
1272     t0 = tcg_temp_ebb_new_i64();
1273     t1 = tcg_temp_ebb_new_i32();
1274 
1275     tcg_gen_mulu2_i32(TCGV_LOW(t0), TCGV_HIGH(t0),
1276                       TCGV_LOW(arg1), TCGV_LOW(arg2));
1277 
1278     tcg_gen_mul_i32(t1, TCGV_LOW(arg1), TCGV_HIGH(arg2));
1279     tcg_gen_add_i32(TCGV_HIGH(t0), TCGV_HIGH(t0), t1);
1280     tcg_gen_mul_i32(t1, TCGV_HIGH(arg1), TCGV_LOW(arg2));
1281     tcg_gen_add_i32(TCGV_HIGH(t0), TCGV_HIGH(t0), t1);
1282 
1283     tcg_gen_mov_i64(ret, t0);
1284     tcg_temp_free_i64(t0);
1285     tcg_temp_free_i32(t1);
1286 }
1287 
1288 #else
1289 
1290 void tcg_gen_movi_i64(TCGv_i64 ret, int64_t arg)
1291 {
1292     tcg_gen_mov_i64(ret, tcg_constant_i64(arg));
1293 }
1294 
1295 #endif /* TCG_TARGET_REG_SIZE == 32 */
1296 
1297 void tcg_gen_addi_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1298 {
1299     /* some cases can be optimized here */
1300     if (arg2 == 0) {
1301         tcg_gen_mov_i64(ret, arg1);
1302     } else if (TCG_TARGET_REG_BITS == 64) {
1303         tcg_gen_add_i64(ret, arg1, tcg_constant_i64(arg2));
1304     } else {
1305         tcg_gen_add2_i32(TCGV_LOW(ret), TCGV_HIGH(ret),
1306                          TCGV_LOW(arg1), TCGV_HIGH(arg1),
1307                          tcg_constant_i32(arg2), tcg_constant_i32(arg2 >> 32));
1308     }
1309 }
1310 
1311 void tcg_gen_subfi_i64(TCGv_i64 ret, int64_t arg1, TCGv_i64 arg2)
1312 {
1313     if (arg1 == 0 && TCG_TARGET_HAS_neg_i64) {
1314         /* Don't recurse with tcg_gen_neg_i64.  */
1315         tcg_gen_op2_i64(INDEX_op_neg_i64, ret, arg2);
1316     } else if (TCG_TARGET_REG_BITS == 64) {
1317         tcg_gen_sub_i64(ret, tcg_constant_i64(arg1), arg2);
1318     } else {
1319         tcg_gen_sub2_i32(TCGV_LOW(ret), TCGV_HIGH(ret),
1320                          tcg_constant_i32(arg1), tcg_constant_i32(arg1 >> 32),
1321                          TCGV_LOW(arg2), TCGV_HIGH(arg2));
1322     }
1323 }
1324 
1325 void tcg_gen_subi_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1326 {
1327     /* some cases can be optimized here */
1328     if (arg2 == 0) {
1329         tcg_gen_mov_i64(ret, arg1);
1330     } else if (TCG_TARGET_REG_BITS == 64) {
1331         tcg_gen_sub_i64(ret, arg1, tcg_constant_i64(arg2));
1332     } else {
1333         tcg_gen_sub2_i32(TCGV_LOW(ret), TCGV_HIGH(ret),
1334                          TCGV_LOW(arg1), TCGV_HIGH(arg1),
1335                          tcg_constant_i32(arg2), tcg_constant_i32(arg2 >> 32));
1336     }
1337 }
1338 
1339 void tcg_gen_andi_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1340 {
1341     if (TCG_TARGET_REG_BITS == 32) {
1342         tcg_gen_andi_i32(TCGV_LOW(ret), TCGV_LOW(arg1), arg2);
1343         tcg_gen_andi_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), arg2 >> 32);
1344         return;
1345     }
1346 
1347     /* Some cases can be optimized here.  */
1348     switch (arg2) {
1349     case 0:
1350         tcg_gen_movi_i64(ret, 0);
1351         return;
1352     case -1:
1353         tcg_gen_mov_i64(ret, arg1);
1354         return;
1355     case 0xff:
1356         /* Don't recurse with tcg_gen_ext8u_i64.  */
1357         if (TCG_TARGET_HAS_ext8u_i64) {
1358             tcg_gen_op2_i64(INDEX_op_ext8u_i64, ret, arg1);
1359             return;
1360         }
1361         break;
1362     case 0xffff:
1363         if (TCG_TARGET_HAS_ext16u_i64) {
1364             tcg_gen_op2_i64(INDEX_op_ext16u_i64, ret, arg1);
1365             return;
1366         }
1367         break;
1368     case 0xffffffffu:
1369         if (TCG_TARGET_HAS_ext32u_i64) {
1370             tcg_gen_op2_i64(INDEX_op_ext32u_i64, ret, arg1);
1371             return;
1372         }
1373         break;
1374     }
1375 
1376     tcg_gen_and_i64(ret, arg1, tcg_constant_i64(arg2));
1377 }
1378 
1379 void tcg_gen_ori_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1380 {
1381     if (TCG_TARGET_REG_BITS == 32) {
1382         tcg_gen_ori_i32(TCGV_LOW(ret), TCGV_LOW(arg1), arg2);
1383         tcg_gen_ori_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), arg2 >> 32);
1384         return;
1385     }
1386     /* Some cases can be optimized here.  */
1387     if (arg2 == -1) {
1388         tcg_gen_movi_i64(ret, -1);
1389     } else if (arg2 == 0) {
1390         tcg_gen_mov_i64(ret, arg1);
1391     } else {
1392         tcg_gen_or_i64(ret, arg1, tcg_constant_i64(arg2));
1393     }
1394 }
1395 
1396 void tcg_gen_xori_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1397 {
1398     if (TCG_TARGET_REG_BITS == 32) {
1399         tcg_gen_xori_i32(TCGV_LOW(ret), TCGV_LOW(arg1), arg2);
1400         tcg_gen_xori_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), arg2 >> 32);
1401         return;
1402     }
1403     /* Some cases can be optimized here.  */
1404     if (arg2 == 0) {
1405         tcg_gen_mov_i64(ret, arg1);
1406     } else if (arg2 == -1 && TCG_TARGET_HAS_not_i64) {
1407         /* Don't recurse with tcg_gen_not_i64.  */
1408         tcg_gen_op2_i64(INDEX_op_not_i64, ret, arg1);
1409     } else {
1410         tcg_gen_xor_i64(ret, arg1, tcg_constant_i64(arg2));
1411     }
1412 }
1413 
1414 static inline void tcg_gen_shifti_i64(TCGv_i64 ret, TCGv_i64 arg1,
1415                                       unsigned c, bool right, bool arith)
1416 {
1417     tcg_debug_assert(c < 64);
1418     if (c == 0) {
1419         tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg1));
1420         tcg_gen_mov_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1));
1421     } else if (c >= 32) {
1422         c -= 32;
1423         if (right) {
1424             if (arith) {
1425                 tcg_gen_sari_i32(TCGV_LOW(ret), TCGV_HIGH(arg1), c);
1426                 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), 31);
1427             } else {
1428                 tcg_gen_shri_i32(TCGV_LOW(ret), TCGV_HIGH(arg1), c);
1429                 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1430             }
1431         } else {
1432             tcg_gen_shli_i32(TCGV_HIGH(ret), TCGV_LOW(arg1), c);
1433             tcg_gen_movi_i32(TCGV_LOW(ret), 0);
1434         }
1435     } else if (right) {
1436         if (TCG_TARGET_HAS_extract2_i32) {
1437             tcg_gen_extract2_i32(TCGV_LOW(ret),
1438                                  TCGV_LOW(arg1), TCGV_HIGH(arg1), c);
1439         } else {
1440             tcg_gen_shri_i32(TCGV_LOW(ret), TCGV_LOW(arg1), c);
1441             tcg_gen_deposit_i32(TCGV_LOW(ret), TCGV_LOW(ret),
1442                                 TCGV_HIGH(arg1), 32 - c, c);
1443         }
1444         if (arith) {
1445             tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), c);
1446         } else {
1447             tcg_gen_shri_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), c);
1448         }
1449     } else {
1450         if (TCG_TARGET_HAS_extract2_i32) {
1451             tcg_gen_extract2_i32(TCGV_HIGH(ret),
1452                                  TCGV_LOW(arg1), TCGV_HIGH(arg1), 32 - c);
1453         } else {
1454             TCGv_i32 t0 = tcg_temp_ebb_new_i32();
1455             tcg_gen_shri_i32(t0, TCGV_LOW(arg1), 32 - c);
1456             tcg_gen_deposit_i32(TCGV_HIGH(ret), t0,
1457                                 TCGV_HIGH(arg1), c, 32 - c);
1458             tcg_temp_free_i32(t0);
1459         }
1460         tcg_gen_shli_i32(TCGV_LOW(ret), TCGV_LOW(arg1), c);
1461     }
1462 }
1463 
1464 void tcg_gen_shli_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1465 {
1466     tcg_debug_assert(arg2 >= 0 && arg2 < 64);
1467     if (TCG_TARGET_REG_BITS == 32) {
1468         tcg_gen_shifti_i64(ret, arg1, arg2, 0, 0);
1469     } else if (arg2 == 0) {
1470         tcg_gen_mov_i64(ret, arg1);
1471     } else {
1472         tcg_gen_shl_i64(ret, arg1, tcg_constant_i64(arg2));
1473     }
1474 }
1475 
1476 void tcg_gen_shri_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1477 {
1478     tcg_debug_assert(arg2 >= 0 && arg2 < 64);
1479     if (TCG_TARGET_REG_BITS == 32) {
1480         tcg_gen_shifti_i64(ret, arg1, arg2, 1, 0);
1481     } else if (arg2 == 0) {
1482         tcg_gen_mov_i64(ret, arg1);
1483     } else {
1484         tcg_gen_shr_i64(ret, arg1, tcg_constant_i64(arg2));
1485     }
1486 }
1487 
1488 void tcg_gen_sari_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1489 {
1490     tcg_debug_assert(arg2 >= 0 && arg2 < 64);
1491     if (TCG_TARGET_REG_BITS == 32) {
1492         tcg_gen_shifti_i64(ret, arg1, arg2, 1, 1);
1493     } else if (arg2 == 0) {
1494         tcg_gen_mov_i64(ret, arg1);
1495     } else {
1496         tcg_gen_sar_i64(ret, arg1, tcg_constant_i64(arg2));
1497     }
1498 }
1499 
1500 void tcg_gen_brcond_i64(TCGCond cond, TCGv_i64 arg1, TCGv_i64 arg2, TCGLabel *l)
1501 {
1502     if (cond == TCG_COND_ALWAYS) {
1503         tcg_gen_br(l);
1504     } else if (cond != TCG_COND_NEVER) {
1505         if (TCG_TARGET_REG_BITS == 32) {
1506             tcg_gen_op6ii_i32(INDEX_op_brcond2_i32, TCGV_LOW(arg1),
1507                               TCGV_HIGH(arg1), TCGV_LOW(arg2),
1508                               TCGV_HIGH(arg2), cond, label_arg(l));
1509         } else {
1510             tcg_gen_op4ii_i64(INDEX_op_brcond_i64, arg1, arg2, cond,
1511                               label_arg(l));
1512         }
1513         add_last_as_label_use(l);
1514     }
1515 }
1516 
1517 void tcg_gen_brcondi_i64(TCGCond cond, TCGv_i64 arg1, int64_t arg2, TCGLabel *l)
1518 {
1519     if (TCG_TARGET_REG_BITS == 64) {
1520         tcg_gen_brcond_i64(cond, arg1, tcg_constant_i64(arg2), l);
1521     } else if (cond == TCG_COND_ALWAYS) {
1522         tcg_gen_br(l);
1523     } else if (cond != TCG_COND_NEVER) {
1524         tcg_gen_op6ii_i32(INDEX_op_brcond2_i32,
1525                           TCGV_LOW(arg1), TCGV_HIGH(arg1),
1526                           tcg_constant_i32(arg2),
1527                           tcg_constant_i32(arg2 >> 32),
1528                           cond, label_arg(l));
1529         add_last_as_label_use(l);
1530     }
1531 }
1532 
1533 void tcg_gen_setcond_i64(TCGCond cond, TCGv_i64 ret,
1534                          TCGv_i64 arg1, TCGv_i64 arg2)
1535 {
1536     if (cond == TCG_COND_ALWAYS) {
1537         tcg_gen_movi_i64(ret, 1);
1538     } else if (cond == TCG_COND_NEVER) {
1539         tcg_gen_movi_i64(ret, 0);
1540     } else {
1541         if (TCG_TARGET_REG_BITS == 32) {
1542             tcg_gen_op6i_i32(INDEX_op_setcond2_i32, TCGV_LOW(ret),
1543                              TCGV_LOW(arg1), TCGV_HIGH(arg1),
1544                              TCGV_LOW(arg2), TCGV_HIGH(arg2), cond);
1545             tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1546         } else {
1547             tcg_gen_op4i_i64(INDEX_op_setcond_i64, ret, arg1, arg2, cond);
1548         }
1549     }
1550 }
1551 
1552 void tcg_gen_setcondi_i64(TCGCond cond, TCGv_i64 ret,
1553                           TCGv_i64 arg1, int64_t arg2)
1554 {
1555     if (TCG_TARGET_REG_BITS == 64) {
1556         tcg_gen_setcond_i64(cond, ret, arg1, tcg_constant_i64(arg2));
1557     } else if (cond == TCG_COND_ALWAYS) {
1558         tcg_gen_movi_i64(ret, 1);
1559     } else if (cond == TCG_COND_NEVER) {
1560         tcg_gen_movi_i64(ret, 0);
1561     } else {
1562         tcg_gen_op6i_i32(INDEX_op_setcond2_i32, TCGV_LOW(ret),
1563                          TCGV_LOW(arg1), TCGV_HIGH(arg1),
1564                          tcg_constant_i32(arg2),
1565                          tcg_constant_i32(arg2 >> 32), cond);
1566         tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1567     }
1568 }
1569 
1570 void tcg_gen_muli_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1571 {
1572     if (arg2 == 0) {
1573         tcg_gen_movi_i64(ret, 0);
1574     } else if (is_power_of_2(arg2)) {
1575         tcg_gen_shli_i64(ret, arg1, ctz64(arg2));
1576     } else {
1577         tcg_gen_mul_i64(ret, arg1, tcg_constant_i64(arg2));
1578     }
1579 }
1580 
1581 void tcg_gen_div_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1582 {
1583     if (TCG_TARGET_HAS_div_i64) {
1584         tcg_gen_op3_i64(INDEX_op_div_i64, ret, arg1, arg2);
1585     } else if (TCG_TARGET_HAS_div2_i64) {
1586         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
1587         tcg_gen_sari_i64(t0, arg1, 63);
1588         tcg_gen_op5_i64(INDEX_op_div2_i64, ret, t0, arg1, t0, arg2);
1589         tcg_temp_free_i64(t0);
1590     } else {
1591         gen_helper_div_i64(ret, arg1, arg2);
1592     }
1593 }
1594 
1595 void tcg_gen_rem_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1596 {
1597     if (TCG_TARGET_HAS_rem_i64) {
1598         tcg_gen_op3_i64(INDEX_op_rem_i64, ret, arg1, arg2);
1599     } else if (TCG_TARGET_HAS_div_i64) {
1600         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
1601         tcg_gen_op3_i64(INDEX_op_div_i64, t0, arg1, arg2);
1602         tcg_gen_mul_i64(t0, t0, arg2);
1603         tcg_gen_sub_i64(ret, arg1, t0);
1604         tcg_temp_free_i64(t0);
1605     } else if (TCG_TARGET_HAS_div2_i64) {
1606         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
1607         tcg_gen_sari_i64(t0, arg1, 63);
1608         tcg_gen_op5_i64(INDEX_op_div2_i64, t0, ret, arg1, t0, arg2);
1609         tcg_temp_free_i64(t0);
1610     } else {
1611         gen_helper_rem_i64(ret, arg1, arg2);
1612     }
1613 }
1614 
1615 void tcg_gen_divu_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1616 {
1617     if (TCG_TARGET_HAS_div_i64) {
1618         tcg_gen_op3_i64(INDEX_op_divu_i64, ret, arg1, arg2);
1619     } else if (TCG_TARGET_HAS_div2_i64) {
1620         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
1621         tcg_gen_movi_i64(t0, 0);
1622         tcg_gen_op5_i64(INDEX_op_divu2_i64, ret, t0, arg1, t0, arg2);
1623         tcg_temp_free_i64(t0);
1624     } else {
1625         gen_helper_divu_i64(ret, arg1, arg2);
1626     }
1627 }
1628 
1629 void tcg_gen_remu_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1630 {
1631     if (TCG_TARGET_HAS_rem_i64) {
1632         tcg_gen_op3_i64(INDEX_op_remu_i64, ret, arg1, arg2);
1633     } else if (TCG_TARGET_HAS_div_i64) {
1634         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
1635         tcg_gen_op3_i64(INDEX_op_divu_i64, t0, arg1, arg2);
1636         tcg_gen_mul_i64(t0, t0, arg2);
1637         tcg_gen_sub_i64(ret, arg1, t0);
1638         tcg_temp_free_i64(t0);
1639     } else if (TCG_TARGET_HAS_div2_i64) {
1640         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
1641         tcg_gen_movi_i64(t0, 0);
1642         tcg_gen_op5_i64(INDEX_op_divu2_i64, t0, ret, arg1, t0, arg2);
1643         tcg_temp_free_i64(t0);
1644     } else {
1645         gen_helper_remu_i64(ret, arg1, arg2);
1646     }
1647 }
1648 
1649 void tcg_gen_ext8s_i64(TCGv_i64 ret, TCGv_i64 arg)
1650 {
1651     if (TCG_TARGET_REG_BITS == 32) {
1652         tcg_gen_ext8s_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1653         tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1654     } else if (TCG_TARGET_HAS_ext8s_i64) {
1655         tcg_gen_op2_i64(INDEX_op_ext8s_i64, ret, arg);
1656     } else {
1657         tcg_gen_shli_i64(ret, arg, 56);
1658         tcg_gen_sari_i64(ret, ret, 56);
1659     }
1660 }
1661 
1662 void tcg_gen_ext16s_i64(TCGv_i64 ret, TCGv_i64 arg)
1663 {
1664     if (TCG_TARGET_REG_BITS == 32) {
1665         tcg_gen_ext16s_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1666         tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1667     } else if (TCG_TARGET_HAS_ext16s_i64) {
1668         tcg_gen_op2_i64(INDEX_op_ext16s_i64, ret, arg);
1669     } else {
1670         tcg_gen_shli_i64(ret, arg, 48);
1671         tcg_gen_sari_i64(ret, ret, 48);
1672     }
1673 }
1674 
1675 void tcg_gen_ext32s_i64(TCGv_i64 ret, TCGv_i64 arg)
1676 {
1677     if (TCG_TARGET_REG_BITS == 32) {
1678         tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1679         tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1680     } else if (TCG_TARGET_HAS_ext32s_i64) {
1681         tcg_gen_op2_i64(INDEX_op_ext32s_i64, ret, arg);
1682     } else {
1683         tcg_gen_shli_i64(ret, arg, 32);
1684         tcg_gen_sari_i64(ret, ret, 32);
1685     }
1686 }
1687 
1688 void tcg_gen_ext8u_i64(TCGv_i64 ret, TCGv_i64 arg)
1689 {
1690     if (TCG_TARGET_REG_BITS == 32) {
1691         tcg_gen_ext8u_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1692         tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1693     } else if (TCG_TARGET_HAS_ext8u_i64) {
1694         tcg_gen_op2_i64(INDEX_op_ext8u_i64, ret, arg);
1695     } else {
1696         tcg_gen_andi_i64(ret, arg, 0xffu);
1697     }
1698 }
1699 
1700 void tcg_gen_ext16u_i64(TCGv_i64 ret, TCGv_i64 arg)
1701 {
1702     if (TCG_TARGET_REG_BITS == 32) {
1703         tcg_gen_ext16u_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1704         tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1705     } else if (TCG_TARGET_HAS_ext16u_i64) {
1706         tcg_gen_op2_i64(INDEX_op_ext16u_i64, ret, arg);
1707     } else {
1708         tcg_gen_andi_i64(ret, arg, 0xffffu);
1709     }
1710 }
1711 
1712 void tcg_gen_ext32u_i64(TCGv_i64 ret, TCGv_i64 arg)
1713 {
1714     if (TCG_TARGET_REG_BITS == 32) {
1715         tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1716         tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1717     } else if (TCG_TARGET_HAS_ext32u_i64) {
1718         tcg_gen_op2_i64(INDEX_op_ext32u_i64, ret, arg);
1719     } else {
1720         tcg_gen_andi_i64(ret, arg, 0xffffffffu);
1721     }
1722 }
1723 
1724 void tcg_gen_bswap16_i64(TCGv_i64 ret, TCGv_i64 arg, int flags)
1725 {
1726     /* Only one extension flag may be present. */
1727     tcg_debug_assert(!(flags & TCG_BSWAP_OS) || !(flags & TCG_BSWAP_OZ));
1728 
1729     if (TCG_TARGET_REG_BITS == 32) {
1730         tcg_gen_bswap16_i32(TCGV_LOW(ret), TCGV_LOW(arg), flags);
1731         if (flags & TCG_BSWAP_OS) {
1732             tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1733         } else {
1734             tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1735         }
1736     } else if (TCG_TARGET_HAS_bswap16_i64) {
1737         tcg_gen_op3i_i64(INDEX_op_bswap16_i64, ret, arg, flags);
1738     } else {
1739         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
1740         TCGv_i64 t1 = tcg_temp_ebb_new_i64();
1741 
1742         tcg_gen_shri_i64(t0, arg, 8);
1743         if (!(flags & TCG_BSWAP_IZ)) {
1744             tcg_gen_ext8u_i64(t0, t0);
1745         }
1746 
1747         if (flags & TCG_BSWAP_OS) {
1748             tcg_gen_shli_i64(t1, arg, 56);
1749             tcg_gen_sari_i64(t1, t1, 48);
1750         } else if (flags & TCG_BSWAP_OZ) {
1751             tcg_gen_ext8u_i64(t1, arg);
1752             tcg_gen_shli_i64(t1, t1, 8);
1753         } else {
1754             tcg_gen_shli_i64(t1, arg, 8);
1755         }
1756 
1757         tcg_gen_or_i64(ret, t0, t1);
1758         tcg_temp_free_i64(t0);
1759         tcg_temp_free_i64(t1);
1760     }
1761 }
1762 
1763 void tcg_gen_bswap32_i64(TCGv_i64 ret, TCGv_i64 arg, int flags)
1764 {
1765     /* Only one extension flag may be present. */
1766     tcg_debug_assert(!(flags & TCG_BSWAP_OS) || !(flags & TCG_BSWAP_OZ));
1767 
1768     if (TCG_TARGET_REG_BITS == 32) {
1769         tcg_gen_bswap32_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1770         if (flags & TCG_BSWAP_OS) {
1771             tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1772         } else {
1773             tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1774         }
1775     } else if (TCG_TARGET_HAS_bswap32_i64) {
1776         tcg_gen_op3i_i64(INDEX_op_bswap32_i64, ret, arg, flags);
1777     } else {
1778         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
1779         TCGv_i64 t1 = tcg_temp_ebb_new_i64();
1780         TCGv_i64 t2 = tcg_constant_i64(0x00ff00ff);
1781 
1782                                             /* arg = xxxxabcd */
1783         tcg_gen_shri_i64(t0, arg, 8);       /*  t0 = .xxxxabc */
1784         tcg_gen_and_i64(t1, arg, t2);       /*  t1 = .....b.d */
1785         tcg_gen_and_i64(t0, t0, t2);        /*  t0 = .....a.c */
1786         tcg_gen_shli_i64(t1, t1, 8);        /*  t1 = ....b.d. */
1787         tcg_gen_or_i64(ret, t0, t1);        /* ret = ....badc */
1788 
1789         tcg_gen_shli_i64(t1, ret, 48);      /*  t1 = dc...... */
1790         tcg_gen_shri_i64(t0, ret, 16);      /*  t0 = ......ba */
1791         if (flags & TCG_BSWAP_OS) {
1792             tcg_gen_sari_i64(t1, t1, 32);   /*  t1 = ssssdc.. */
1793         } else {
1794             tcg_gen_shri_i64(t1, t1, 32);   /*  t1 = ....dc.. */
1795         }
1796         tcg_gen_or_i64(ret, t0, t1);        /* ret = ssssdcba */
1797 
1798         tcg_temp_free_i64(t0);
1799         tcg_temp_free_i64(t1);
1800     }
1801 }
1802 
1803 void tcg_gen_bswap64_i64(TCGv_i64 ret, TCGv_i64 arg)
1804 {
1805     if (TCG_TARGET_REG_BITS == 32) {
1806         TCGv_i32 t0, t1;
1807         t0 = tcg_temp_ebb_new_i32();
1808         t1 = tcg_temp_ebb_new_i32();
1809 
1810         tcg_gen_bswap32_i32(t0, TCGV_LOW(arg));
1811         tcg_gen_bswap32_i32(t1, TCGV_HIGH(arg));
1812         tcg_gen_mov_i32(TCGV_LOW(ret), t1);
1813         tcg_gen_mov_i32(TCGV_HIGH(ret), t0);
1814         tcg_temp_free_i32(t0);
1815         tcg_temp_free_i32(t1);
1816     } else if (TCG_TARGET_HAS_bswap64_i64) {
1817         tcg_gen_op3i_i64(INDEX_op_bswap64_i64, ret, arg, 0);
1818     } else {
1819         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
1820         TCGv_i64 t1 = tcg_temp_ebb_new_i64();
1821         TCGv_i64 t2 = tcg_temp_ebb_new_i64();
1822 
1823                                         /* arg = abcdefgh */
1824         tcg_gen_movi_i64(t2, 0x00ff00ff00ff00ffull);
1825         tcg_gen_shri_i64(t0, arg, 8);   /*  t0 = .abcdefg */
1826         tcg_gen_and_i64(t1, arg, t2);   /*  t1 = .b.d.f.h */
1827         tcg_gen_and_i64(t0, t0, t2);    /*  t0 = .a.c.e.g */
1828         tcg_gen_shli_i64(t1, t1, 8);    /*  t1 = b.d.f.h. */
1829         tcg_gen_or_i64(ret, t0, t1);    /* ret = badcfehg */
1830 
1831         tcg_gen_movi_i64(t2, 0x0000ffff0000ffffull);
1832         tcg_gen_shri_i64(t0, ret, 16);  /*  t0 = ..badcfe */
1833         tcg_gen_and_i64(t1, ret, t2);   /*  t1 = ..dc..hg */
1834         tcg_gen_and_i64(t0, t0, t2);    /*  t0 = ..ba..fe */
1835         tcg_gen_shli_i64(t1, t1, 16);   /*  t1 = dc..hg.. */
1836         tcg_gen_or_i64(ret, t0, t1);    /* ret = dcbahgfe */
1837 
1838         tcg_gen_shri_i64(t0, ret, 32);  /*  t0 = ....dcba */
1839         tcg_gen_shli_i64(t1, ret, 32);  /*  t1 = hgfe.... */
1840         tcg_gen_or_i64(ret, t0, t1);    /* ret = hgfedcba */
1841 
1842         tcg_temp_free_i64(t0);
1843         tcg_temp_free_i64(t1);
1844         tcg_temp_free_i64(t2);
1845     }
1846 }
1847 
1848 void tcg_gen_hswap_i64(TCGv_i64 ret, TCGv_i64 arg)
1849 {
1850     uint64_t m = 0x0000ffff0000ffffull;
1851     TCGv_i64 t0 = tcg_temp_ebb_new_i64();
1852     TCGv_i64 t1 = tcg_temp_ebb_new_i64();
1853 
1854     /* See include/qemu/bitops.h, hswap64. */
1855     tcg_gen_rotli_i64(t1, arg, 32);
1856     tcg_gen_andi_i64(t0, t1, m);
1857     tcg_gen_shli_i64(t0, t0, 16);
1858     tcg_gen_shri_i64(t1, t1, 16);
1859     tcg_gen_andi_i64(t1, t1, m);
1860     tcg_gen_or_i64(ret, t0, t1);
1861 
1862     tcg_temp_free_i64(t0);
1863     tcg_temp_free_i64(t1);
1864 }
1865 
1866 void tcg_gen_wswap_i64(TCGv_i64 ret, TCGv_i64 arg)
1867 {
1868     /* Swapping 2 32-bit elements is a rotate. */
1869     tcg_gen_rotli_i64(ret, arg, 32);
1870 }
1871 
1872 void tcg_gen_not_i64(TCGv_i64 ret, TCGv_i64 arg)
1873 {
1874     if (TCG_TARGET_REG_BITS == 32) {
1875         tcg_gen_not_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1876         tcg_gen_not_i32(TCGV_HIGH(ret), TCGV_HIGH(arg));
1877     } else if (TCG_TARGET_HAS_not_i64) {
1878         tcg_gen_op2_i64(INDEX_op_not_i64, ret, arg);
1879     } else {
1880         tcg_gen_xori_i64(ret, arg, -1);
1881     }
1882 }
1883 
1884 void tcg_gen_andc_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1885 {
1886     if (TCG_TARGET_REG_BITS == 32) {
1887         tcg_gen_andc_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1888         tcg_gen_andc_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1889     } else if (TCG_TARGET_HAS_andc_i64) {
1890         tcg_gen_op3_i64(INDEX_op_andc_i64, ret, arg1, arg2);
1891     } else {
1892         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
1893         tcg_gen_not_i64(t0, arg2);
1894         tcg_gen_and_i64(ret, arg1, t0);
1895         tcg_temp_free_i64(t0);
1896     }
1897 }
1898 
1899 void tcg_gen_eqv_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1900 {
1901     if (TCG_TARGET_REG_BITS == 32) {
1902         tcg_gen_eqv_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1903         tcg_gen_eqv_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1904     } else if (TCG_TARGET_HAS_eqv_i64) {
1905         tcg_gen_op3_i64(INDEX_op_eqv_i64, ret, arg1, arg2);
1906     } else {
1907         tcg_gen_xor_i64(ret, arg1, arg2);
1908         tcg_gen_not_i64(ret, ret);
1909     }
1910 }
1911 
1912 void tcg_gen_nand_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1913 {
1914     if (TCG_TARGET_REG_BITS == 32) {
1915         tcg_gen_nand_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1916         tcg_gen_nand_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1917     } else if (TCG_TARGET_HAS_nand_i64) {
1918         tcg_gen_op3_i64(INDEX_op_nand_i64, ret, arg1, arg2);
1919     } else {
1920         tcg_gen_and_i64(ret, arg1, arg2);
1921         tcg_gen_not_i64(ret, ret);
1922     }
1923 }
1924 
1925 void tcg_gen_nor_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1926 {
1927     if (TCG_TARGET_REG_BITS == 32) {
1928         tcg_gen_nor_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1929         tcg_gen_nor_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1930     } else if (TCG_TARGET_HAS_nor_i64) {
1931         tcg_gen_op3_i64(INDEX_op_nor_i64, ret, arg1, arg2);
1932     } else {
1933         tcg_gen_or_i64(ret, arg1, arg2);
1934         tcg_gen_not_i64(ret, ret);
1935     }
1936 }
1937 
1938 void tcg_gen_orc_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1939 {
1940     if (TCG_TARGET_REG_BITS == 32) {
1941         tcg_gen_orc_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1942         tcg_gen_orc_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1943     } else if (TCG_TARGET_HAS_orc_i64) {
1944         tcg_gen_op3_i64(INDEX_op_orc_i64, ret, arg1, arg2);
1945     } else {
1946         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
1947         tcg_gen_not_i64(t0, arg2);
1948         tcg_gen_or_i64(ret, arg1, t0);
1949         tcg_temp_free_i64(t0);
1950     }
1951 }
1952 
1953 void tcg_gen_clz_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1954 {
1955     if (TCG_TARGET_HAS_clz_i64) {
1956         tcg_gen_op3_i64(INDEX_op_clz_i64, ret, arg1, arg2);
1957     } else {
1958         gen_helper_clz_i64(ret, arg1, arg2);
1959     }
1960 }
1961 
1962 void tcg_gen_clzi_i64(TCGv_i64 ret, TCGv_i64 arg1, uint64_t arg2)
1963 {
1964     if (TCG_TARGET_REG_BITS == 32
1965         && TCG_TARGET_HAS_clz_i32
1966         && arg2 <= 0xffffffffu) {
1967         TCGv_i32 t = tcg_temp_ebb_new_i32();
1968         tcg_gen_clzi_i32(t, TCGV_LOW(arg1), arg2 - 32);
1969         tcg_gen_addi_i32(t, t, 32);
1970         tcg_gen_clz_i32(TCGV_LOW(ret), TCGV_HIGH(arg1), t);
1971         tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1972         tcg_temp_free_i32(t);
1973     } else {
1974         tcg_gen_clz_i64(ret, arg1, tcg_constant_i64(arg2));
1975     }
1976 }
1977 
1978 void tcg_gen_ctz_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1979 {
1980     if (TCG_TARGET_HAS_ctz_i64) {
1981         tcg_gen_op3_i64(INDEX_op_ctz_i64, ret, arg1, arg2);
1982     } else if (TCG_TARGET_HAS_ctpop_i64 || TCG_TARGET_HAS_clz_i64) {
1983         TCGv_i64 z, t = tcg_temp_ebb_new_i64();
1984 
1985         if (TCG_TARGET_HAS_ctpop_i64) {
1986             tcg_gen_subi_i64(t, arg1, 1);
1987             tcg_gen_andc_i64(t, t, arg1);
1988             tcg_gen_ctpop_i64(t, t);
1989         } else {
1990             /* Since all non-x86 hosts have clz(0) == 64, don't fight it.  */
1991             tcg_gen_neg_i64(t, arg1);
1992             tcg_gen_and_i64(t, t, arg1);
1993             tcg_gen_clzi_i64(t, t, 64);
1994             tcg_gen_xori_i64(t, t, 63);
1995         }
1996         z = tcg_constant_i64(0);
1997         tcg_gen_movcond_i64(TCG_COND_EQ, ret, arg1, z, arg2, t);
1998         tcg_temp_free_i64(t);
1999         tcg_temp_free_i64(z);
2000     } else {
2001         gen_helper_ctz_i64(ret, arg1, arg2);
2002     }
2003 }
2004 
2005 void tcg_gen_ctzi_i64(TCGv_i64 ret, TCGv_i64 arg1, uint64_t arg2)
2006 {
2007     if (TCG_TARGET_REG_BITS == 32
2008         && TCG_TARGET_HAS_ctz_i32
2009         && arg2 <= 0xffffffffu) {
2010         TCGv_i32 t32 = tcg_temp_ebb_new_i32();
2011         tcg_gen_ctzi_i32(t32, TCGV_HIGH(arg1), arg2 - 32);
2012         tcg_gen_addi_i32(t32, t32, 32);
2013         tcg_gen_ctz_i32(TCGV_LOW(ret), TCGV_LOW(arg1), t32);
2014         tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
2015         tcg_temp_free_i32(t32);
2016     } else if (!TCG_TARGET_HAS_ctz_i64
2017                && TCG_TARGET_HAS_ctpop_i64
2018                && arg2 == 64) {
2019         /* This equivalence has the advantage of not requiring a fixup.  */
2020         TCGv_i64 t = tcg_temp_ebb_new_i64();
2021         tcg_gen_subi_i64(t, arg1, 1);
2022         tcg_gen_andc_i64(t, t, arg1);
2023         tcg_gen_ctpop_i64(ret, t);
2024         tcg_temp_free_i64(t);
2025     } else {
2026         tcg_gen_ctz_i64(ret, arg1, tcg_constant_i64(arg2));
2027     }
2028 }
2029 
2030 void tcg_gen_clrsb_i64(TCGv_i64 ret, TCGv_i64 arg)
2031 {
2032     if (TCG_TARGET_HAS_clz_i64 || TCG_TARGET_HAS_clz_i32) {
2033         TCGv_i64 t = tcg_temp_ebb_new_i64();
2034         tcg_gen_sari_i64(t, arg, 63);
2035         tcg_gen_xor_i64(t, t, arg);
2036         tcg_gen_clzi_i64(t, t, 64);
2037         tcg_gen_subi_i64(ret, t, 1);
2038         tcg_temp_free_i64(t);
2039     } else {
2040         gen_helper_clrsb_i64(ret, arg);
2041     }
2042 }
2043 
2044 void tcg_gen_ctpop_i64(TCGv_i64 ret, TCGv_i64 arg1)
2045 {
2046     if (TCG_TARGET_HAS_ctpop_i64) {
2047         tcg_gen_op2_i64(INDEX_op_ctpop_i64, ret, arg1);
2048     } else if (TCG_TARGET_REG_BITS == 32 && TCG_TARGET_HAS_ctpop_i32) {
2049         tcg_gen_ctpop_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1));
2050         tcg_gen_ctpop_i32(TCGV_LOW(ret), TCGV_LOW(arg1));
2051         tcg_gen_add_i32(TCGV_LOW(ret), TCGV_LOW(ret), TCGV_HIGH(ret));
2052         tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
2053     } else {
2054         gen_helper_ctpop_i64(ret, arg1);
2055     }
2056 }
2057 
2058 void tcg_gen_rotl_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
2059 {
2060     if (TCG_TARGET_HAS_rot_i64) {
2061         tcg_gen_op3_i64(INDEX_op_rotl_i64, ret, arg1, arg2);
2062     } else {
2063         TCGv_i64 t0, t1;
2064         t0 = tcg_temp_ebb_new_i64();
2065         t1 = tcg_temp_ebb_new_i64();
2066         tcg_gen_shl_i64(t0, arg1, arg2);
2067         tcg_gen_subfi_i64(t1, 64, arg2);
2068         tcg_gen_shr_i64(t1, arg1, t1);
2069         tcg_gen_or_i64(ret, t0, t1);
2070         tcg_temp_free_i64(t0);
2071         tcg_temp_free_i64(t1);
2072     }
2073 }
2074 
2075 void tcg_gen_rotli_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
2076 {
2077     tcg_debug_assert(arg2 >= 0 && arg2 < 64);
2078     /* some cases can be optimized here */
2079     if (arg2 == 0) {
2080         tcg_gen_mov_i64(ret, arg1);
2081     } else if (TCG_TARGET_HAS_rot_i64) {
2082         tcg_gen_rotl_i64(ret, arg1, tcg_constant_i64(arg2));
2083     } else {
2084         TCGv_i64 t0, t1;
2085         t0 = tcg_temp_ebb_new_i64();
2086         t1 = tcg_temp_ebb_new_i64();
2087         tcg_gen_shli_i64(t0, arg1, arg2);
2088         tcg_gen_shri_i64(t1, arg1, 64 - arg2);
2089         tcg_gen_or_i64(ret, t0, t1);
2090         tcg_temp_free_i64(t0);
2091         tcg_temp_free_i64(t1);
2092     }
2093 }
2094 
2095 void tcg_gen_rotr_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
2096 {
2097     if (TCG_TARGET_HAS_rot_i64) {
2098         tcg_gen_op3_i64(INDEX_op_rotr_i64, ret, arg1, arg2);
2099     } else {
2100         TCGv_i64 t0, t1;
2101         t0 = tcg_temp_ebb_new_i64();
2102         t1 = tcg_temp_ebb_new_i64();
2103         tcg_gen_shr_i64(t0, arg1, arg2);
2104         tcg_gen_subfi_i64(t1, 64, arg2);
2105         tcg_gen_shl_i64(t1, arg1, t1);
2106         tcg_gen_or_i64(ret, t0, t1);
2107         tcg_temp_free_i64(t0);
2108         tcg_temp_free_i64(t1);
2109     }
2110 }
2111 
2112 void tcg_gen_rotri_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
2113 {
2114     tcg_debug_assert(arg2 >= 0 && arg2 < 64);
2115     /* some cases can be optimized here */
2116     if (arg2 == 0) {
2117         tcg_gen_mov_i64(ret, arg1);
2118     } else {
2119         tcg_gen_rotli_i64(ret, arg1, 64 - arg2);
2120     }
2121 }
2122 
2123 void tcg_gen_deposit_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2,
2124                          unsigned int ofs, unsigned int len)
2125 {
2126     uint64_t mask;
2127     TCGv_i64 t1;
2128 
2129     tcg_debug_assert(ofs < 64);
2130     tcg_debug_assert(len > 0);
2131     tcg_debug_assert(len <= 64);
2132     tcg_debug_assert(ofs + len <= 64);
2133 
2134     if (len == 64) {
2135         tcg_gen_mov_i64(ret, arg2);
2136         return;
2137     }
2138     if (TCG_TARGET_HAS_deposit_i64 && TCG_TARGET_deposit_i64_valid(ofs, len)) {
2139         tcg_gen_op5ii_i64(INDEX_op_deposit_i64, ret, arg1, arg2, ofs, len);
2140         return;
2141     }
2142 
2143     if (TCG_TARGET_REG_BITS == 32) {
2144         if (ofs >= 32) {
2145             tcg_gen_deposit_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1),
2146                                 TCGV_LOW(arg2), ofs - 32, len);
2147             tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg1));
2148             return;
2149         }
2150         if (ofs + len <= 32) {
2151             tcg_gen_deposit_i32(TCGV_LOW(ret), TCGV_LOW(arg1),
2152                                 TCGV_LOW(arg2), ofs, len);
2153             tcg_gen_mov_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1));
2154             return;
2155         }
2156     }
2157 
2158     t1 = tcg_temp_ebb_new_i64();
2159 
2160     if (TCG_TARGET_HAS_extract2_i64) {
2161         if (ofs + len == 64) {
2162             tcg_gen_shli_i64(t1, arg1, len);
2163             tcg_gen_extract2_i64(ret, t1, arg2, len);
2164             goto done;
2165         }
2166         if (ofs == 0) {
2167             tcg_gen_extract2_i64(ret, arg1, arg2, len);
2168             tcg_gen_rotli_i64(ret, ret, len);
2169             goto done;
2170         }
2171     }
2172 
2173     mask = (1ull << len) - 1;
2174     if (ofs + len < 64) {
2175         tcg_gen_andi_i64(t1, arg2, mask);
2176         tcg_gen_shli_i64(t1, t1, ofs);
2177     } else {
2178         tcg_gen_shli_i64(t1, arg2, ofs);
2179     }
2180     tcg_gen_andi_i64(ret, arg1, ~(mask << ofs));
2181     tcg_gen_or_i64(ret, ret, t1);
2182  done:
2183     tcg_temp_free_i64(t1);
2184 }
2185 
2186 void tcg_gen_deposit_z_i64(TCGv_i64 ret, TCGv_i64 arg,
2187                            unsigned int ofs, unsigned int len)
2188 {
2189     tcg_debug_assert(ofs < 64);
2190     tcg_debug_assert(len > 0);
2191     tcg_debug_assert(len <= 64);
2192     tcg_debug_assert(ofs + len <= 64);
2193 
2194     if (ofs + len == 64) {
2195         tcg_gen_shli_i64(ret, arg, ofs);
2196     } else if (ofs == 0) {
2197         tcg_gen_andi_i64(ret, arg, (1ull << len) - 1);
2198     } else if (TCG_TARGET_HAS_deposit_i64
2199                && TCG_TARGET_deposit_i64_valid(ofs, len)) {
2200         TCGv_i64 zero = tcg_constant_i64(0);
2201         tcg_gen_op5ii_i64(INDEX_op_deposit_i64, ret, zero, arg, ofs, len);
2202     } else {
2203         if (TCG_TARGET_REG_BITS == 32) {
2204             if (ofs >= 32) {
2205                 tcg_gen_deposit_z_i32(TCGV_HIGH(ret), TCGV_LOW(arg),
2206                                       ofs - 32, len);
2207                 tcg_gen_movi_i32(TCGV_LOW(ret), 0);
2208                 return;
2209             }
2210             if (ofs + len <= 32) {
2211                 tcg_gen_deposit_z_i32(TCGV_LOW(ret), TCGV_LOW(arg), ofs, len);
2212                 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
2213                 return;
2214             }
2215         }
2216         /* To help two-operand hosts we prefer to zero-extend first,
2217            which allows ARG to stay live.  */
2218         switch (len) {
2219         case 32:
2220             if (TCG_TARGET_HAS_ext32u_i64) {
2221                 tcg_gen_ext32u_i64(ret, arg);
2222                 tcg_gen_shli_i64(ret, ret, ofs);
2223                 return;
2224             }
2225             break;
2226         case 16:
2227             if (TCG_TARGET_HAS_ext16u_i64) {
2228                 tcg_gen_ext16u_i64(ret, arg);
2229                 tcg_gen_shli_i64(ret, ret, ofs);
2230                 return;
2231             }
2232             break;
2233         case 8:
2234             if (TCG_TARGET_HAS_ext8u_i64) {
2235                 tcg_gen_ext8u_i64(ret, arg);
2236                 tcg_gen_shli_i64(ret, ret, ofs);
2237                 return;
2238             }
2239             break;
2240         }
2241         /* Otherwise prefer zero-extension over AND for code size.  */
2242         switch (ofs + len) {
2243         case 32:
2244             if (TCG_TARGET_HAS_ext32u_i64) {
2245                 tcg_gen_shli_i64(ret, arg, ofs);
2246                 tcg_gen_ext32u_i64(ret, ret);
2247                 return;
2248             }
2249             break;
2250         case 16:
2251             if (TCG_TARGET_HAS_ext16u_i64) {
2252                 tcg_gen_shli_i64(ret, arg, ofs);
2253                 tcg_gen_ext16u_i64(ret, ret);
2254                 return;
2255             }
2256             break;
2257         case 8:
2258             if (TCG_TARGET_HAS_ext8u_i64) {
2259                 tcg_gen_shli_i64(ret, arg, ofs);
2260                 tcg_gen_ext8u_i64(ret, ret);
2261                 return;
2262             }
2263             break;
2264         }
2265         tcg_gen_andi_i64(ret, arg, (1ull << len) - 1);
2266         tcg_gen_shli_i64(ret, ret, ofs);
2267     }
2268 }
2269 
2270 void tcg_gen_extract_i64(TCGv_i64 ret, TCGv_i64 arg,
2271                          unsigned int ofs, unsigned int len)
2272 {
2273     tcg_debug_assert(ofs < 64);
2274     tcg_debug_assert(len > 0);
2275     tcg_debug_assert(len <= 64);
2276     tcg_debug_assert(ofs + len <= 64);
2277 
2278     /* Canonicalize certain special cases, even if extract is supported.  */
2279     if (ofs + len == 64) {
2280         tcg_gen_shri_i64(ret, arg, 64 - len);
2281         return;
2282     }
2283     if (ofs == 0) {
2284         tcg_gen_andi_i64(ret, arg, (1ull << len) - 1);
2285         return;
2286     }
2287 
2288     if (TCG_TARGET_REG_BITS == 32) {
2289         /* Look for a 32-bit extract within one of the two words.  */
2290         if (ofs >= 32) {
2291             tcg_gen_extract_i32(TCGV_LOW(ret), TCGV_HIGH(arg), ofs - 32, len);
2292             tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
2293             return;
2294         }
2295         if (ofs + len <= 32) {
2296             tcg_gen_extract_i32(TCGV_LOW(ret), TCGV_LOW(arg), ofs, len);
2297             tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
2298             return;
2299         }
2300         /* The field is split across two words.  One double-word
2301            shift is better than two double-word shifts.  */
2302         goto do_shift_and;
2303     }
2304 
2305     if (TCG_TARGET_HAS_extract_i64
2306         && TCG_TARGET_extract_i64_valid(ofs, len)) {
2307         tcg_gen_op4ii_i64(INDEX_op_extract_i64, ret, arg, ofs, len);
2308         return;
2309     }
2310 
2311     /* Assume that zero-extension, if available, is cheaper than a shift.  */
2312     switch (ofs + len) {
2313     case 32:
2314         if (TCG_TARGET_HAS_ext32u_i64) {
2315             tcg_gen_ext32u_i64(ret, arg);
2316             tcg_gen_shri_i64(ret, ret, ofs);
2317             return;
2318         }
2319         break;
2320     case 16:
2321         if (TCG_TARGET_HAS_ext16u_i64) {
2322             tcg_gen_ext16u_i64(ret, arg);
2323             tcg_gen_shri_i64(ret, ret, ofs);
2324             return;
2325         }
2326         break;
2327     case 8:
2328         if (TCG_TARGET_HAS_ext8u_i64) {
2329             tcg_gen_ext8u_i64(ret, arg);
2330             tcg_gen_shri_i64(ret, ret, ofs);
2331             return;
2332         }
2333         break;
2334     }
2335 
2336     /* ??? Ideally we'd know what values are available for immediate AND.
2337        Assume that 8 bits are available, plus the special cases of 16 and 32,
2338        so that we get ext8u, ext16u, and ext32u.  */
2339     switch (len) {
2340     case 1 ... 8: case 16: case 32:
2341     do_shift_and:
2342         tcg_gen_shri_i64(ret, arg, ofs);
2343         tcg_gen_andi_i64(ret, ret, (1ull << len) - 1);
2344         break;
2345     default:
2346         tcg_gen_shli_i64(ret, arg, 64 - len - ofs);
2347         tcg_gen_shri_i64(ret, ret, 64 - len);
2348         break;
2349     }
2350 }
2351 
2352 void tcg_gen_sextract_i64(TCGv_i64 ret, TCGv_i64 arg,
2353                           unsigned int ofs, unsigned int len)
2354 {
2355     tcg_debug_assert(ofs < 64);
2356     tcg_debug_assert(len > 0);
2357     tcg_debug_assert(len <= 64);
2358     tcg_debug_assert(ofs + len <= 64);
2359 
2360     /* Canonicalize certain special cases, even if sextract is supported.  */
2361     if (ofs + len == 64) {
2362         tcg_gen_sari_i64(ret, arg, 64 - len);
2363         return;
2364     }
2365     if (ofs == 0) {
2366         switch (len) {
2367         case 32:
2368             tcg_gen_ext32s_i64(ret, arg);
2369             return;
2370         case 16:
2371             tcg_gen_ext16s_i64(ret, arg);
2372             return;
2373         case 8:
2374             tcg_gen_ext8s_i64(ret, arg);
2375             return;
2376         }
2377     }
2378 
2379     if (TCG_TARGET_REG_BITS == 32) {
2380         /* Look for a 32-bit extract within one of the two words.  */
2381         if (ofs >= 32) {
2382             tcg_gen_sextract_i32(TCGV_LOW(ret), TCGV_HIGH(arg), ofs - 32, len);
2383         } else if (ofs + len <= 32) {
2384             tcg_gen_sextract_i32(TCGV_LOW(ret), TCGV_LOW(arg), ofs, len);
2385         } else if (ofs == 0) {
2386             tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg));
2387             tcg_gen_sextract_i32(TCGV_HIGH(ret), TCGV_HIGH(arg), 0, len - 32);
2388             return;
2389         } else if (len > 32) {
2390             TCGv_i32 t = tcg_temp_ebb_new_i32();
2391             /* Extract the bits for the high word normally.  */
2392             tcg_gen_sextract_i32(t, TCGV_HIGH(arg), ofs + 32, len - 32);
2393             /* Shift the field down for the low part.  */
2394             tcg_gen_shri_i64(ret, arg, ofs);
2395             /* Overwrite the shift into the high part.  */
2396             tcg_gen_mov_i32(TCGV_HIGH(ret), t);
2397             tcg_temp_free_i32(t);
2398             return;
2399         } else {
2400             /* Shift the field down for the low part, such that the
2401                field sits at the MSB.  */
2402             tcg_gen_shri_i64(ret, arg, ofs + len - 32);
2403             /* Shift the field down from the MSB, sign extending.  */
2404             tcg_gen_sari_i32(TCGV_LOW(ret), TCGV_LOW(ret), 32 - len);
2405         }
2406         /* Sign-extend the field from 32 bits.  */
2407         tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
2408         return;
2409     }
2410 
2411     if (TCG_TARGET_HAS_sextract_i64
2412         && TCG_TARGET_extract_i64_valid(ofs, len)) {
2413         tcg_gen_op4ii_i64(INDEX_op_sextract_i64, ret, arg, ofs, len);
2414         return;
2415     }
2416 
2417     /* Assume that sign-extension, if available, is cheaper than a shift.  */
2418     switch (ofs + len) {
2419     case 32:
2420         if (TCG_TARGET_HAS_ext32s_i64) {
2421             tcg_gen_ext32s_i64(ret, arg);
2422             tcg_gen_sari_i64(ret, ret, ofs);
2423             return;
2424         }
2425         break;
2426     case 16:
2427         if (TCG_TARGET_HAS_ext16s_i64) {
2428             tcg_gen_ext16s_i64(ret, arg);
2429             tcg_gen_sari_i64(ret, ret, ofs);
2430             return;
2431         }
2432         break;
2433     case 8:
2434         if (TCG_TARGET_HAS_ext8s_i64) {
2435             tcg_gen_ext8s_i64(ret, arg);
2436             tcg_gen_sari_i64(ret, ret, ofs);
2437             return;
2438         }
2439         break;
2440     }
2441     switch (len) {
2442     case 32:
2443         if (TCG_TARGET_HAS_ext32s_i64) {
2444             tcg_gen_shri_i64(ret, arg, ofs);
2445             tcg_gen_ext32s_i64(ret, ret);
2446             return;
2447         }
2448         break;
2449     case 16:
2450         if (TCG_TARGET_HAS_ext16s_i64) {
2451             tcg_gen_shri_i64(ret, arg, ofs);
2452             tcg_gen_ext16s_i64(ret, ret);
2453             return;
2454         }
2455         break;
2456     case 8:
2457         if (TCG_TARGET_HAS_ext8s_i64) {
2458             tcg_gen_shri_i64(ret, arg, ofs);
2459             tcg_gen_ext8s_i64(ret, ret);
2460             return;
2461         }
2462         break;
2463     }
2464     tcg_gen_shli_i64(ret, arg, 64 - len - ofs);
2465     tcg_gen_sari_i64(ret, ret, 64 - len);
2466 }
2467 
2468 /*
2469  * Extract 64 bits from a 128-bit input, ah:al, starting from ofs.
2470  * Unlike tcg_gen_extract_i64 above, len is fixed at 64.
2471  */
2472 void tcg_gen_extract2_i64(TCGv_i64 ret, TCGv_i64 al, TCGv_i64 ah,
2473                           unsigned int ofs)
2474 {
2475     tcg_debug_assert(ofs <= 64);
2476     if (ofs == 0) {
2477         tcg_gen_mov_i64(ret, al);
2478     } else if (ofs == 64) {
2479         tcg_gen_mov_i64(ret, ah);
2480     } else if (al == ah) {
2481         tcg_gen_rotri_i64(ret, al, ofs);
2482     } else if (TCG_TARGET_HAS_extract2_i64) {
2483         tcg_gen_op4i_i64(INDEX_op_extract2_i64, ret, al, ah, ofs);
2484     } else {
2485         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
2486         tcg_gen_shri_i64(t0, al, ofs);
2487         tcg_gen_deposit_i64(ret, t0, ah, 64 - ofs, ofs);
2488         tcg_temp_free_i64(t0);
2489     }
2490 }
2491 
2492 void tcg_gen_movcond_i64(TCGCond cond, TCGv_i64 ret, TCGv_i64 c1,
2493                          TCGv_i64 c2, TCGv_i64 v1, TCGv_i64 v2)
2494 {
2495     if (cond == TCG_COND_ALWAYS) {
2496         tcg_gen_mov_i64(ret, v1);
2497     } else if (cond == TCG_COND_NEVER) {
2498         tcg_gen_mov_i64(ret, v2);
2499     } else if (TCG_TARGET_REG_BITS == 32) {
2500         TCGv_i32 t0 = tcg_temp_ebb_new_i32();
2501         TCGv_i32 t1 = tcg_temp_ebb_new_i32();
2502         tcg_gen_op6i_i32(INDEX_op_setcond2_i32, t0,
2503                          TCGV_LOW(c1), TCGV_HIGH(c1),
2504                          TCGV_LOW(c2), TCGV_HIGH(c2), cond);
2505 
2506         if (TCG_TARGET_HAS_movcond_i32) {
2507             tcg_gen_movi_i32(t1, 0);
2508             tcg_gen_movcond_i32(TCG_COND_NE, TCGV_LOW(ret), t0, t1,
2509                                 TCGV_LOW(v1), TCGV_LOW(v2));
2510             tcg_gen_movcond_i32(TCG_COND_NE, TCGV_HIGH(ret), t0, t1,
2511                                 TCGV_HIGH(v1), TCGV_HIGH(v2));
2512         } else {
2513             tcg_gen_neg_i32(t0, t0);
2514 
2515             tcg_gen_and_i32(t1, TCGV_LOW(v1), t0);
2516             tcg_gen_andc_i32(TCGV_LOW(ret), TCGV_LOW(v2), t0);
2517             tcg_gen_or_i32(TCGV_LOW(ret), TCGV_LOW(ret), t1);
2518 
2519             tcg_gen_and_i32(t1, TCGV_HIGH(v1), t0);
2520             tcg_gen_andc_i32(TCGV_HIGH(ret), TCGV_HIGH(v2), t0);
2521             tcg_gen_or_i32(TCGV_HIGH(ret), TCGV_HIGH(ret), t1);
2522         }
2523         tcg_temp_free_i32(t0);
2524         tcg_temp_free_i32(t1);
2525     } else if (TCG_TARGET_HAS_movcond_i64) {
2526         tcg_gen_op6i_i64(INDEX_op_movcond_i64, ret, c1, c2, v1, v2, cond);
2527     } else {
2528         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
2529         TCGv_i64 t1 = tcg_temp_ebb_new_i64();
2530         tcg_gen_setcond_i64(cond, t0, c1, c2);
2531         tcg_gen_neg_i64(t0, t0);
2532         tcg_gen_and_i64(t1, v1, t0);
2533         tcg_gen_andc_i64(ret, v2, t0);
2534         tcg_gen_or_i64(ret, ret, t1);
2535         tcg_temp_free_i64(t0);
2536         tcg_temp_free_i64(t1);
2537     }
2538 }
2539 
2540 void tcg_gen_add2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 al,
2541                       TCGv_i64 ah, TCGv_i64 bl, TCGv_i64 bh)
2542 {
2543     if (TCG_TARGET_HAS_add2_i64) {
2544         tcg_gen_op6_i64(INDEX_op_add2_i64, rl, rh, al, ah, bl, bh);
2545     } else {
2546         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
2547         TCGv_i64 t1 = tcg_temp_ebb_new_i64();
2548         tcg_gen_add_i64(t0, al, bl);
2549         tcg_gen_setcond_i64(TCG_COND_LTU, t1, t0, al);
2550         tcg_gen_add_i64(rh, ah, bh);
2551         tcg_gen_add_i64(rh, rh, t1);
2552         tcg_gen_mov_i64(rl, t0);
2553         tcg_temp_free_i64(t0);
2554         tcg_temp_free_i64(t1);
2555     }
2556 }
2557 
2558 void tcg_gen_sub2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 al,
2559                       TCGv_i64 ah, TCGv_i64 bl, TCGv_i64 bh)
2560 {
2561     if (TCG_TARGET_HAS_sub2_i64) {
2562         tcg_gen_op6_i64(INDEX_op_sub2_i64, rl, rh, al, ah, bl, bh);
2563     } else {
2564         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
2565         TCGv_i64 t1 = tcg_temp_ebb_new_i64();
2566         tcg_gen_sub_i64(t0, al, bl);
2567         tcg_gen_setcond_i64(TCG_COND_LTU, t1, al, bl);
2568         tcg_gen_sub_i64(rh, ah, bh);
2569         tcg_gen_sub_i64(rh, rh, t1);
2570         tcg_gen_mov_i64(rl, t0);
2571         tcg_temp_free_i64(t0);
2572         tcg_temp_free_i64(t1);
2573     }
2574 }
2575 
2576 void tcg_gen_mulu2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 arg1, TCGv_i64 arg2)
2577 {
2578     if (TCG_TARGET_HAS_mulu2_i64) {
2579         tcg_gen_op4_i64(INDEX_op_mulu2_i64, rl, rh, arg1, arg2);
2580     } else if (TCG_TARGET_HAS_muluh_i64) {
2581         TCGv_i64 t = tcg_temp_ebb_new_i64();
2582         tcg_gen_op3_i64(INDEX_op_mul_i64, t, arg1, arg2);
2583         tcg_gen_op3_i64(INDEX_op_muluh_i64, rh, arg1, arg2);
2584         tcg_gen_mov_i64(rl, t);
2585         tcg_temp_free_i64(t);
2586     } else {
2587         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
2588         tcg_gen_mul_i64(t0, arg1, arg2);
2589         gen_helper_muluh_i64(rh, arg1, arg2);
2590         tcg_gen_mov_i64(rl, t0);
2591         tcg_temp_free_i64(t0);
2592     }
2593 }
2594 
2595 void tcg_gen_muls2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 arg1, TCGv_i64 arg2)
2596 {
2597     if (TCG_TARGET_HAS_muls2_i64) {
2598         tcg_gen_op4_i64(INDEX_op_muls2_i64, rl, rh, arg1, arg2);
2599     } else if (TCG_TARGET_HAS_mulsh_i64) {
2600         TCGv_i64 t = tcg_temp_ebb_new_i64();
2601         tcg_gen_op3_i64(INDEX_op_mul_i64, t, arg1, arg2);
2602         tcg_gen_op3_i64(INDEX_op_mulsh_i64, rh, arg1, arg2);
2603         tcg_gen_mov_i64(rl, t);
2604         tcg_temp_free_i64(t);
2605     } else if (TCG_TARGET_HAS_mulu2_i64 || TCG_TARGET_HAS_muluh_i64) {
2606         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
2607         TCGv_i64 t1 = tcg_temp_ebb_new_i64();
2608         TCGv_i64 t2 = tcg_temp_ebb_new_i64();
2609         TCGv_i64 t3 = tcg_temp_ebb_new_i64();
2610         tcg_gen_mulu2_i64(t0, t1, arg1, arg2);
2611         /* Adjust for negative inputs.  */
2612         tcg_gen_sari_i64(t2, arg1, 63);
2613         tcg_gen_sari_i64(t3, arg2, 63);
2614         tcg_gen_and_i64(t2, t2, arg2);
2615         tcg_gen_and_i64(t3, t3, arg1);
2616         tcg_gen_sub_i64(rh, t1, t2);
2617         tcg_gen_sub_i64(rh, rh, t3);
2618         tcg_gen_mov_i64(rl, t0);
2619         tcg_temp_free_i64(t0);
2620         tcg_temp_free_i64(t1);
2621         tcg_temp_free_i64(t2);
2622         tcg_temp_free_i64(t3);
2623     } else {
2624         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
2625         tcg_gen_mul_i64(t0, arg1, arg2);
2626         gen_helper_mulsh_i64(rh, arg1, arg2);
2627         tcg_gen_mov_i64(rl, t0);
2628         tcg_temp_free_i64(t0);
2629     }
2630 }
2631 
2632 void tcg_gen_mulsu2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 arg1, TCGv_i64 arg2)
2633 {
2634     TCGv_i64 t0 = tcg_temp_ebb_new_i64();
2635     TCGv_i64 t1 = tcg_temp_ebb_new_i64();
2636     TCGv_i64 t2 = tcg_temp_ebb_new_i64();
2637     tcg_gen_mulu2_i64(t0, t1, arg1, arg2);
2638     /* Adjust for negative input for the signed arg1.  */
2639     tcg_gen_sari_i64(t2, arg1, 63);
2640     tcg_gen_and_i64(t2, t2, arg2);
2641     tcg_gen_sub_i64(rh, t1, t2);
2642     tcg_gen_mov_i64(rl, t0);
2643     tcg_temp_free_i64(t0);
2644     tcg_temp_free_i64(t1);
2645     tcg_temp_free_i64(t2);
2646 }
2647 
2648 void tcg_gen_smin_i64(TCGv_i64 ret, TCGv_i64 a, TCGv_i64 b)
2649 {
2650     tcg_gen_movcond_i64(TCG_COND_LT, ret, a, b, a, b);
2651 }
2652 
2653 void tcg_gen_umin_i64(TCGv_i64 ret, TCGv_i64 a, TCGv_i64 b)
2654 {
2655     tcg_gen_movcond_i64(TCG_COND_LTU, ret, a, b, a, b);
2656 }
2657 
2658 void tcg_gen_smax_i64(TCGv_i64 ret, TCGv_i64 a, TCGv_i64 b)
2659 {
2660     tcg_gen_movcond_i64(TCG_COND_LT, ret, a, b, b, a);
2661 }
2662 
2663 void tcg_gen_umax_i64(TCGv_i64 ret, TCGv_i64 a, TCGv_i64 b)
2664 {
2665     tcg_gen_movcond_i64(TCG_COND_LTU, ret, a, b, b, a);
2666 }
2667 
2668 void tcg_gen_abs_i64(TCGv_i64 ret, TCGv_i64 a)
2669 {
2670     TCGv_i64 t = tcg_temp_ebb_new_i64();
2671 
2672     tcg_gen_sari_i64(t, a, 63);
2673     tcg_gen_xor_i64(ret, a, t);
2674     tcg_gen_sub_i64(ret, ret, t);
2675     tcg_temp_free_i64(t);
2676 }
2677 
2678 /* Size changing operations.  */
2679 
2680 void tcg_gen_extrl_i64_i32(TCGv_i32 ret, TCGv_i64 arg)
2681 {
2682     if (TCG_TARGET_REG_BITS == 32) {
2683         tcg_gen_mov_i32(ret, TCGV_LOW(arg));
2684     } else if (TCG_TARGET_HAS_extrl_i64_i32) {
2685         tcg_gen_op2(INDEX_op_extrl_i64_i32,
2686                     tcgv_i32_arg(ret), tcgv_i64_arg(arg));
2687     } else {
2688         tcg_gen_mov_i32(ret, (TCGv_i32)arg);
2689     }
2690 }
2691 
2692 void tcg_gen_extrh_i64_i32(TCGv_i32 ret, TCGv_i64 arg)
2693 {
2694     if (TCG_TARGET_REG_BITS == 32) {
2695         tcg_gen_mov_i32(ret, TCGV_HIGH(arg));
2696     } else if (TCG_TARGET_HAS_extrh_i64_i32) {
2697         tcg_gen_op2(INDEX_op_extrh_i64_i32,
2698                     tcgv_i32_arg(ret), tcgv_i64_arg(arg));
2699     } else {
2700         TCGv_i64 t = tcg_temp_ebb_new_i64();
2701         tcg_gen_shri_i64(t, arg, 32);
2702         tcg_gen_mov_i32(ret, (TCGv_i32)t);
2703         tcg_temp_free_i64(t);
2704     }
2705 }
2706 
2707 void tcg_gen_extu_i32_i64(TCGv_i64 ret, TCGv_i32 arg)
2708 {
2709     if (TCG_TARGET_REG_BITS == 32) {
2710         tcg_gen_mov_i32(TCGV_LOW(ret), arg);
2711         tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
2712     } else {
2713         tcg_gen_op2(INDEX_op_extu_i32_i64,
2714                     tcgv_i64_arg(ret), tcgv_i32_arg(arg));
2715     }
2716 }
2717 
2718 void tcg_gen_ext_i32_i64(TCGv_i64 ret, TCGv_i32 arg)
2719 {
2720     if (TCG_TARGET_REG_BITS == 32) {
2721         tcg_gen_mov_i32(TCGV_LOW(ret), arg);
2722         tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
2723     } else {
2724         tcg_gen_op2(INDEX_op_ext_i32_i64,
2725                     tcgv_i64_arg(ret), tcgv_i32_arg(arg));
2726     }
2727 }
2728 
2729 void tcg_gen_concat_i32_i64(TCGv_i64 dest, TCGv_i32 low, TCGv_i32 high)
2730 {
2731     TCGv_i64 tmp;
2732 
2733     if (TCG_TARGET_REG_BITS == 32) {
2734         tcg_gen_mov_i32(TCGV_LOW(dest), low);
2735         tcg_gen_mov_i32(TCGV_HIGH(dest), high);
2736         return;
2737     }
2738 
2739     tmp = tcg_temp_ebb_new_i64();
2740     /* These extensions are only needed for type correctness.
2741        We may be able to do better given target specific information.  */
2742     tcg_gen_extu_i32_i64(tmp, high);
2743     tcg_gen_extu_i32_i64(dest, low);
2744     /* If deposit is available, use it.  Otherwise use the extra
2745        knowledge that we have of the zero-extensions above.  */
2746     if (TCG_TARGET_HAS_deposit_i64 && TCG_TARGET_deposit_i64_valid(32, 32)) {
2747         tcg_gen_deposit_i64(dest, dest, tmp, 32, 32);
2748     } else {
2749         tcg_gen_shli_i64(tmp, tmp, 32);
2750         tcg_gen_or_i64(dest, dest, tmp);
2751     }
2752     tcg_temp_free_i64(tmp);
2753 }
2754 
2755 void tcg_gen_extr_i64_i32(TCGv_i32 lo, TCGv_i32 hi, TCGv_i64 arg)
2756 {
2757     if (TCG_TARGET_REG_BITS == 32) {
2758         tcg_gen_mov_i32(lo, TCGV_LOW(arg));
2759         tcg_gen_mov_i32(hi, TCGV_HIGH(arg));
2760     } else {
2761         tcg_gen_extrl_i64_i32(lo, arg);
2762         tcg_gen_extrh_i64_i32(hi, arg);
2763     }
2764 }
2765 
2766 void tcg_gen_extr32_i64(TCGv_i64 lo, TCGv_i64 hi, TCGv_i64 arg)
2767 {
2768     tcg_gen_ext32u_i64(lo, arg);
2769     tcg_gen_shri_i64(hi, arg, 32);
2770 }
2771 
2772 void tcg_gen_extr_i128_i64(TCGv_i64 lo, TCGv_i64 hi, TCGv_i128 arg)
2773 {
2774     tcg_gen_mov_i64(lo, TCGV128_LOW(arg));
2775     tcg_gen_mov_i64(hi, TCGV128_HIGH(arg));
2776 }
2777 
2778 void tcg_gen_concat_i64_i128(TCGv_i128 ret, TCGv_i64 lo, TCGv_i64 hi)
2779 {
2780     tcg_gen_mov_i64(TCGV128_LOW(ret), lo);
2781     tcg_gen_mov_i64(TCGV128_HIGH(ret), hi);
2782 }
2783 
2784 void tcg_gen_mov_i128(TCGv_i128 dst, TCGv_i128 src)
2785 {
2786     if (dst != src) {
2787         tcg_gen_mov_i64(TCGV128_LOW(dst), TCGV128_LOW(src));
2788         tcg_gen_mov_i64(TCGV128_HIGH(dst), TCGV128_HIGH(src));
2789     }
2790 }
2791 
2792 /* QEMU specific operations.  */
2793 
2794 void tcg_gen_exit_tb(const TranslationBlock *tb, unsigned idx)
2795 {
2796     /*
2797      * Let the jit code return the read-only version of the
2798      * TranslationBlock, so that we minimize the pc-relative
2799      * distance of the address of the exit_tb code to TB.
2800      * This will improve utilization of pc-relative address loads.
2801      *
2802      * TODO: Move this to translator_loop, so that all const
2803      * TranslationBlock pointers refer to read-only memory.
2804      * This requires coordination with targets that do not use
2805      * the translator_loop.
2806      */
2807     uintptr_t val = (uintptr_t)tcg_splitwx_to_rx((void *)tb) + idx;
2808 
2809     if (tb == NULL) {
2810         tcg_debug_assert(idx == 0);
2811     } else if (idx <= TB_EXIT_IDXMAX) {
2812 #ifdef CONFIG_DEBUG_TCG
2813         /* This is an exit following a goto_tb.  Verify that we have
2814            seen this numbered exit before, via tcg_gen_goto_tb.  */
2815         tcg_debug_assert(tcg_ctx->goto_tb_issue_mask & (1 << idx));
2816 #endif
2817     } else {
2818         /* This is an exit via the exitreq label.  */
2819         tcg_debug_assert(idx == TB_EXIT_REQUESTED);
2820     }
2821 
2822     tcg_gen_op1i(INDEX_op_exit_tb, val);
2823 }
2824 
2825 void tcg_gen_goto_tb(unsigned idx)
2826 {
2827     /* We tested CF_NO_GOTO_TB in translator_use_goto_tb. */
2828     tcg_debug_assert(!(tcg_ctx->gen_tb->cflags & CF_NO_GOTO_TB));
2829     /* We only support two chained exits.  */
2830     tcg_debug_assert(idx <= TB_EXIT_IDXMAX);
2831 #ifdef CONFIG_DEBUG_TCG
2832     /* Verify that we haven't seen this numbered exit before.  */
2833     tcg_debug_assert((tcg_ctx->goto_tb_issue_mask & (1 << idx)) == 0);
2834     tcg_ctx->goto_tb_issue_mask |= 1 << idx;
2835 #endif
2836     plugin_gen_disable_mem_helpers();
2837     tcg_gen_op1i(INDEX_op_goto_tb, idx);
2838 }
2839 
2840 void tcg_gen_lookup_and_goto_ptr(void)
2841 {
2842     TCGv_ptr ptr;
2843 
2844     if (tcg_ctx->gen_tb->cflags & CF_NO_GOTO_PTR) {
2845         tcg_gen_exit_tb(NULL, 0);
2846         return;
2847     }
2848 
2849     plugin_gen_disable_mem_helpers();
2850     ptr = tcg_temp_ebb_new_ptr();
2851     gen_helper_lookup_tb_ptr(ptr, cpu_env);
2852     tcg_gen_op1i(INDEX_op_goto_ptr, tcgv_ptr_arg(ptr));
2853     tcg_temp_free_ptr(ptr);
2854 }
2855