xref: /openbmc/qemu/tcg/tcg-op.c (revision afb81fe8)
1 /*
2  * Tiny Code Generator for QEMU
3  *
4  * Copyright (c) 2008 Fabrice Bellard
5  *
6  * Permission is hereby granted, free of charge, to any person obtaining a copy
7  * of this software and associated documentation files (the "Software"), to deal
8  * in the Software without restriction, including without limitation the rights
9  * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
10  * copies of the Software, and to permit persons to whom the Software is
11  * furnished to do so, subject to the following conditions:
12  *
13  * The above copyright notice and this permission notice shall be included in
14  * all copies or substantial portions of the Software.
15  *
16  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19  * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20  * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21  * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
22  * THE SOFTWARE.
23  */
24 
25 #include "qemu/osdep.h"
26 #include "tcg/tcg.h"
27 #include "tcg/tcg-temp-internal.h"
28 #include "tcg/tcg-op-common.h"
29 #include "exec/translation-block.h"
30 #include "exec/plugin-gen.h"
31 #include "tcg-internal.h"
32 
33 
34 void tcg_gen_op1(TCGOpcode opc, TCGArg a1)
35 {
36     TCGOp *op = tcg_emit_op(opc, 1);
37     op->args[0] = a1;
38 }
39 
40 void tcg_gen_op2(TCGOpcode opc, TCGArg a1, TCGArg a2)
41 {
42     TCGOp *op = tcg_emit_op(opc, 2);
43     op->args[0] = a1;
44     op->args[1] = a2;
45 }
46 
47 void tcg_gen_op3(TCGOpcode opc, TCGArg a1, TCGArg a2, TCGArg a3)
48 {
49     TCGOp *op = tcg_emit_op(opc, 3);
50     op->args[0] = a1;
51     op->args[1] = a2;
52     op->args[2] = a3;
53 }
54 
55 void tcg_gen_op4(TCGOpcode opc, TCGArg a1, TCGArg a2, TCGArg a3, TCGArg a4)
56 {
57     TCGOp *op = tcg_emit_op(opc, 4);
58     op->args[0] = a1;
59     op->args[1] = a2;
60     op->args[2] = a3;
61     op->args[3] = a4;
62 }
63 
64 void tcg_gen_op5(TCGOpcode opc, TCGArg a1, TCGArg a2, TCGArg a3,
65                  TCGArg a4, TCGArg a5)
66 {
67     TCGOp *op = tcg_emit_op(opc, 5);
68     op->args[0] = a1;
69     op->args[1] = a2;
70     op->args[2] = a3;
71     op->args[3] = a4;
72     op->args[4] = a5;
73 }
74 
75 void tcg_gen_op6(TCGOpcode opc, TCGArg a1, TCGArg a2, TCGArg a3,
76                  TCGArg a4, TCGArg a5, TCGArg a6)
77 {
78     TCGOp *op = tcg_emit_op(opc, 6);
79     op->args[0] = a1;
80     op->args[1] = a2;
81     op->args[2] = a3;
82     op->args[3] = a4;
83     op->args[4] = a5;
84     op->args[5] = a6;
85 }
86 
87 /* Generic ops.  */
88 
89 static void add_last_as_label_use(TCGLabel *l)
90 {
91     TCGLabelUse *u = tcg_malloc(sizeof(TCGLabelUse));
92 
93     u->op = tcg_last_op();
94     QSIMPLEQ_INSERT_TAIL(&l->branches, u, next);
95 }
96 
97 void tcg_gen_br(TCGLabel *l)
98 {
99     tcg_gen_op1(INDEX_op_br, label_arg(l));
100     add_last_as_label_use(l);
101 }
102 
103 void tcg_gen_mb(TCGBar mb_type)
104 {
105 #ifdef CONFIG_USER_ONLY
106     bool parallel = tcg_ctx->gen_tb->cflags & CF_PARALLEL;
107 #else
108     /*
109      * It is tempting to elide the barrier in a uniprocessor context.
110      * However, even with a single cpu we have i/o threads running in
111      * parallel, and lack of memory order can result in e.g. virtio
112      * queue entries being read incorrectly.
113      */
114     bool parallel = true;
115 #endif
116 
117     if (parallel) {
118         tcg_gen_op1(INDEX_op_mb, mb_type);
119     }
120 }
121 
122 /* 32 bit ops */
123 
124 void tcg_gen_movi_i32(TCGv_i32 ret, int32_t arg)
125 {
126     tcg_gen_mov_i32(ret, tcg_constant_i32(arg));
127 }
128 
129 void tcg_gen_addi_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
130 {
131     /* some cases can be optimized here */
132     if (arg2 == 0) {
133         tcg_gen_mov_i32(ret, arg1);
134     } else {
135         tcg_gen_add_i32(ret, arg1, tcg_constant_i32(arg2));
136     }
137 }
138 
139 void tcg_gen_subfi_i32(TCGv_i32 ret, int32_t arg1, TCGv_i32 arg2)
140 {
141     if (arg1 == 0 && TCG_TARGET_HAS_neg_i32) {
142         /* Don't recurse with tcg_gen_neg_i32.  */
143         tcg_gen_op2_i32(INDEX_op_neg_i32, ret, arg2);
144     } else {
145         tcg_gen_sub_i32(ret, tcg_constant_i32(arg1), arg2);
146     }
147 }
148 
149 void tcg_gen_subi_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
150 {
151     /* some cases can be optimized here */
152     if (arg2 == 0) {
153         tcg_gen_mov_i32(ret, arg1);
154     } else {
155         tcg_gen_sub_i32(ret, arg1, tcg_constant_i32(arg2));
156     }
157 }
158 
159 void tcg_gen_andi_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
160 {
161     /* Some cases can be optimized here.  */
162     switch (arg2) {
163     case 0:
164         tcg_gen_movi_i32(ret, 0);
165         return;
166     case -1:
167         tcg_gen_mov_i32(ret, arg1);
168         return;
169     case 0xff:
170         /* Don't recurse with tcg_gen_ext8u_i32.  */
171         if (TCG_TARGET_HAS_ext8u_i32) {
172             tcg_gen_op2_i32(INDEX_op_ext8u_i32, ret, arg1);
173             return;
174         }
175         break;
176     case 0xffff:
177         if (TCG_TARGET_HAS_ext16u_i32) {
178             tcg_gen_op2_i32(INDEX_op_ext16u_i32, ret, arg1);
179             return;
180         }
181         break;
182     }
183 
184     tcg_gen_and_i32(ret, arg1, tcg_constant_i32(arg2));
185 }
186 
187 void tcg_gen_ori_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
188 {
189     /* Some cases can be optimized here.  */
190     if (arg2 == -1) {
191         tcg_gen_movi_i32(ret, -1);
192     } else if (arg2 == 0) {
193         tcg_gen_mov_i32(ret, arg1);
194     } else {
195         tcg_gen_or_i32(ret, arg1, tcg_constant_i32(arg2));
196     }
197 }
198 
199 void tcg_gen_xori_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
200 {
201     /* Some cases can be optimized here.  */
202     if (arg2 == 0) {
203         tcg_gen_mov_i32(ret, arg1);
204     } else if (arg2 == -1 && TCG_TARGET_HAS_not_i32) {
205         /* Don't recurse with tcg_gen_not_i32.  */
206         tcg_gen_op2_i32(INDEX_op_not_i32, ret, arg1);
207     } else {
208         tcg_gen_xor_i32(ret, arg1, tcg_constant_i32(arg2));
209     }
210 }
211 
212 void tcg_gen_shli_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
213 {
214     tcg_debug_assert(arg2 >= 0 && arg2 < 32);
215     if (arg2 == 0) {
216         tcg_gen_mov_i32(ret, arg1);
217     } else {
218         tcg_gen_shl_i32(ret, arg1, tcg_constant_i32(arg2));
219     }
220 }
221 
222 void tcg_gen_shri_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
223 {
224     tcg_debug_assert(arg2 >= 0 && arg2 < 32);
225     if (arg2 == 0) {
226         tcg_gen_mov_i32(ret, arg1);
227     } else {
228         tcg_gen_shr_i32(ret, arg1, tcg_constant_i32(arg2));
229     }
230 }
231 
232 void tcg_gen_sari_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
233 {
234     tcg_debug_assert(arg2 >= 0 && arg2 < 32);
235     if (arg2 == 0) {
236         tcg_gen_mov_i32(ret, arg1);
237     } else {
238         tcg_gen_sar_i32(ret, arg1, tcg_constant_i32(arg2));
239     }
240 }
241 
242 void tcg_gen_brcond_i32(TCGCond cond, TCGv_i32 arg1, TCGv_i32 arg2, TCGLabel *l)
243 {
244     if (cond == TCG_COND_ALWAYS) {
245         tcg_gen_br(l);
246     } else if (cond != TCG_COND_NEVER) {
247         tcg_gen_op4ii_i32(INDEX_op_brcond_i32, arg1, arg2, cond, label_arg(l));
248         add_last_as_label_use(l);
249     }
250 }
251 
252 void tcg_gen_brcondi_i32(TCGCond cond, TCGv_i32 arg1, int32_t arg2, TCGLabel *l)
253 {
254     if (cond == TCG_COND_ALWAYS) {
255         tcg_gen_br(l);
256     } else if (cond != TCG_COND_NEVER) {
257         tcg_gen_brcond_i32(cond, arg1, tcg_constant_i32(arg2), l);
258     }
259 }
260 
261 void tcg_gen_setcond_i32(TCGCond cond, TCGv_i32 ret,
262                          TCGv_i32 arg1, TCGv_i32 arg2)
263 {
264     if (cond == TCG_COND_ALWAYS) {
265         tcg_gen_movi_i32(ret, 1);
266     } else if (cond == TCG_COND_NEVER) {
267         tcg_gen_movi_i32(ret, 0);
268     } else {
269         tcg_gen_op4i_i32(INDEX_op_setcond_i32, ret, arg1, arg2, cond);
270     }
271 }
272 
273 void tcg_gen_setcondi_i32(TCGCond cond, TCGv_i32 ret,
274                           TCGv_i32 arg1, int32_t arg2)
275 {
276     tcg_gen_setcond_i32(cond, ret, arg1, tcg_constant_i32(arg2));
277 }
278 
279 void tcg_gen_negsetcond_i32(TCGCond cond, TCGv_i32 ret,
280                             TCGv_i32 arg1, TCGv_i32 arg2)
281 {
282     if (cond == TCG_COND_ALWAYS) {
283         tcg_gen_movi_i32(ret, -1);
284     } else if (cond == TCG_COND_NEVER) {
285         tcg_gen_movi_i32(ret, 0);
286     } else if (TCG_TARGET_HAS_negsetcond_i32) {
287         tcg_gen_op4i_i32(INDEX_op_negsetcond_i32, ret, arg1, arg2, cond);
288     } else {
289         tcg_gen_setcond_i32(cond, ret, arg1, arg2);
290         tcg_gen_neg_i32(ret, ret);
291     }
292 }
293 
294 void tcg_gen_muli_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
295 {
296     if (arg2 == 0) {
297         tcg_gen_movi_i32(ret, 0);
298     } else if (is_power_of_2(arg2)) {
299         tcg_gen_shli_i32(ret, arg1, ctz32(arg2));
300     } else {
301         tcg_gen_mul_i32(ret, arg1, tcg_constant_i32(arg2));
302     }
303 }
304 
305 void tcg_gen_div_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
306 {
307     if (TCG_TARGET_HAS_div_i32) {
308         tcg_gen_op3_i32(INDEX_op_div_i32, ret, arg1, arg2);
309     } else if (TCG_TARGET_HAS_div2_i32) {
310         TCGv_i32 t0 = tcg_temp_ebb_new_i32();
311         tcg_gen_sari_i32(t0, arg1, 31);
312         tcg_gen_op5_i32(INDEX_op_div2_i32, ret, t0, arg1, t0, arg2);
313         tcg_temp_free_i32(t0);
314     } else {
315         gen_helper_div_i32(ret, arg1, arg2);
316     }
317 }
318 
319 void tcg_gen_rem_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
320 {
321     if (TCG_TARGET_HAS_rem_i32) {
322         tcg_gen_op3_i32(INDEX_op_rem_i32, ret, arg1, arg2);
323     } else if (TCG_TARGET_HAS_div_i32) {
324         TCGv_i32 t0 = tcg_temp_ebb_new_i32();
325         tcg_gen_op3_i32(INDEX_op_div_i32, t0, arg1, arg2);
326         tcg_gen_mul_i32(t0, t0, arg2);
327         tcg_gen_sub_i32(ret, arg1, t0);
328         tcg_temp_free_i32(t0);
329     } else if (TCG_TARGET_HAS_div2_i32) {
330         TCGv_i32 t0 = tcg_temp_ebb_new_i32();
331         tcg_gen_sari_i32(t0, arg1, 31);
332         tcg_gen_op5_i32(INDEX_op_div2_i32, t0, ret, arg1, t0, arg2);
333         tcg_temp_free_i32(t0);
334     } else {
335         gen_helper_rem_i32(ret, arg1, arg2);
336     }
337 }
338 
339 void tcg_gen_divu_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
340 {
341     if (TCG_TARGET_HAS_div_i32) {
342         tcg_gen_op3_i32(INDEX_op_divu_i32, ret, arg1, arg2);
343     } else if (TCG_TARGET_HAS_div2_i32) {
344         TCGv_i32 t0 = tcg_temp_ebb_new_i32();
345         tcg_gen_movi_i32(t0, 0);
346         tcg_gen_op5_i32(INDEX_op_divu2_i32, ret, t0, arg1, t0, arg2);
347         tcg_temp_free_i32(t0);
348     } else {
349         gen_helper_divu_i32(ret, arg1, arg2);
350     }
351 }
352 
353 void tcg_gen_remu_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
354 {
355     if (TCG_TARGET_HAS_rem_i32) {
356         tcg_gen_op3_i32(INDEX_op_remu_i32, ret, arg1, arg2);
357     } else if (TCG_TARGET_HAS_div_i32) {
358         TCGv_i32 t0 = tcg_temp_ebb_new_i32();
359         tcg_gen_op3_i32(INDEX_op_divu_i32, t0, arg1, arg2);
360         tcg_gen_mul_i32(t0, t0, arg2);
361         tcg_gen_sub_i32(ret, arg1, t0);
362         tcg_temp_free_i32(t0);
363     } else if (TCG_TARGET_HAS_div2_i32) {
364         TCGv_i32 t0 = tcg_temp_ebb_new_i32();
365         tcg_gen_movi_i32(t0, 0);
366         tcg_gen_op5_i32(INDEX_op_divu2_i32, t0, ret, arg1, t0, arg2);
367         tcg_temp_free_i32(t0);
368     } else {
369         gen_helper_remu_i32(ret, arg1, arg2);
370     }
371 }
372 
373 void tcg_gen_andc_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
374 {
375     if (TCG_TARGET_HAS_andc_i32) {
376         tcg_gen_op3_i32(INDEX_op_andc_i32, ret, arg1, arg2);
377     } else {
378         TCGv_i32 t0 = tcg_temp_ebb_new_i32();
379         tcg_gen_not_i32(t0, arg2);
380         tcg_gen_and_i32(ret, arg1, t0);
381         tcg_temp_free_i32(t0);
382     }
383 }
384 
385 void tcg_gen_eqv_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
386 {
387     if (TCG_TARGET_HAS_eqv_i32) {
388         tcg_gen_op3_i32(INDEX_op_eqv_i32, ret, arg1, arg2);
389     } else {
390         tcg_gen_xor_i32(ret, arg1, arg2);
391         tcg_gen_not_i32(ret, ret);
392     }
393 }
394 
395 void tcg_gen_nand_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
396 {
397     if (TCG_TARGET_HAS_nand_i32) {
398         tcg_gen_op3_i32(INDEX_op_nand_i32, ret, arg1, arg2);
399     } else {
400         tcg_gen_and_i32(ret, arg1, arg2);
401         tcg_gen_not_i32(ret, ret);
402     }
403 }
404 
405 void tcg_gen_nor_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
406 {
407     if (TCG_TARGET_HAS_nor_i32) {
408         tcg_gen_op3_i32(INDEX_op_nor_i32, ret, arg1, arg2);
409     } else {
410         tcg_gen_or_i32(ret, arg1, arg2);
411         tcg_gen_not_i32(ret, ret);
412     }
413 }
414 
415 void tcg_gen_orc_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
416 {
417     if (TCG_TARGET_HAS_orc_i32) {
418         tcg_gen_op3_i32(INDEX_op_orc_i32, ret, arg1, arg2);
419     } else {
420         TCGv_i32 t0 = tcg_temp_ebb_new_i32();
421         tcg_gen_not_i32(t0, arg2);
422         tcg_gen_or_i32(ret, arg1, t0);
423         tcg_temp_free_i32(t0);
424     }
425 }
426 
427 void tcg_gen_clz_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
428 {
429     if (TCG_TARGET_HAS_clz_i32) {
430         tcg_gen_op3_i32(INDEX_op_clz_i32, ret, arg1, arg2);
431     } else if (TCG_TARGET_HAS_clz_i64) {
432         TCGv_i64 t1 = tcg_temp_ebb_new_i64();
433         TCGv_i64 t2 = tcg_temp_ebb_new_i64();
434         tcg_gen_extu_i32_i64(t1, arg1);
435         tcg_gen_extu_i32_i64(t2, arg2);
436         tcg_gen_addi_i64(t2, t2, 32);
437         tcg_gen_clz_i64(t1, t1, t2);
438         tcg_gen_extrl_i64_i32(ret, t1);
439         tcg_temp_free_i64(t1);
440         tcg_temp_free_i64(t2);
441         tcg_gen_subi_i32(ret, ret, 32);
442     } else {
443         gen_helper_clz_i32(ret, arg1, arg2);
444     }
445 }
446 
447 void tcg_gen_clzi_i32(TCGv_i32 ret, TCGv_i32 arg1, uint32_t arg2)
448 {
449     tcg_gen_clz_i32(ret, arg1, tcg_constant_i32(arg2));
450 }
451 
452 void tcg_gen_ctz_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
453 {
454     if (TCG_TARGET_HAS_ctz_i32) {
455         tcg_gen_op3_i32(INDEX_op_ctz_i32, ret, arg1, arg2);
456     } else if (TCG_TARGET_HAS_ctz_i64) {
457         TCGv_i64 t1 = tcg_temp_ebb_new_i64();
458         TCGv_i64 t2 = tcg_temp_ebb_new_i64();
459         tcg_gen_extu_i32_i64(t1, arg1);
460         tcg_gen_extu_i32_i64(t2, arg2);
461         tcg_gen_ctz_i64(t1, t1, t2);
462         tcg_gen_extrl_i64_i32(ret, t1);
463         tcg_temp_free_i64(t1);
464         tcg_temp_free_i64(t2);
465     } else if (TCG_TARGET_HAS_ctpop_i32
466                || TCG_TARGET_HAS_ctpop_i64
467                || TCG_TARGET_HAS_clz_i32
468                || TCG_TARGET_HAS_clz_i64) {
469         TCGv_i32 z, t = tcg_temp_ebb_new_i32();
470 
471         if (TCG_TARGET_HAS_ctpop_i32 || TCG_TARGET_HAS_ctpop_i64) {
472             tcg_gen_subi_i32(t, arg1, 1);
473             tcg_gen_andc_i32(t, t, arg1);
474             tcg_gen_ctpop_i32(t, t);
475         } else {
476             /* Since all non-x86 hosts have clz(0) == 32, don't fight it.  */
477             tcg_gen_neg_i32(t, arg1);
478             tcg_gen_and_i32(t, t, arg1);
479             tcg_gen_clzi_i32(t, t, 32);
480             tcg_gen_xori_i32(t, t, 31);
481         }
482         z = tcg_constant_i32(0);
483         tcg_gen_movcond_i32(TCG_COND_EQ, ret, arg1, z, arg2, t);
484         tcg_temp_free_i32(t);
485     } else {
486         gen_helper_ctz_i32(ret, arg1, arg2);
487     }
488 }
489 
490 void tcg_gen_ctzi_i32(TCGv_i32 ret, TCGv_i32 arg1, uint32_t arg2)
491 {
492     if (!TCG_TARGET_HAS_ctz_i32 && TCG_TARGET_HAS_ctpop_i32 && arg2 == 32) {
493         /* This equivalence has the advantage of not requiring a fixup.  */
494         TCGv_i32 t = tcg_temp_ebb_new_i32();
495         tcg_gen_subi_i32(t, arg1, 1);
496         tcg_gen_andc_i32(t, t, arg1);
497         tcg_gen_ctpop_i32(ret, t);
498         tcg_temp_free_i32(t);
499     } else {
500         tcg_gen_ctz_i32(ret, arg1, tcg_constant_i32(arg2));
501     }
502 }
503 
504 void tcg_gen_clrsb_i32(TCGv_i32 ret, TCGv_i32 arg)
505 {
506     if (TCG_TARGET_HAS_clz_i32) {
507         TCGv_i32 t = tcg_temp_ebb_new_i32();
508         tcg_gen_sari_i32(t, arg, 31);
509         tcg_gen_xor_i32(t, t, arg);
510         tcg_gen_clzi_i32(t, t, 32);
511         tcg_gen_subi_i32(ret, t, 1);
512         tcg_temp_free_i32(t);
513     } else {
514         gen_helper_clrsb_i32(ret, arg);
515     }
516 }
517 
518 void tcg_gen_ctpop_i32(TCGv_i32 ret, TCGv_i32 arg1)
519 {
520     if (TCG_TARGET_HAS_ctpop_i32) {
521         tcg_gen_op2_i32(INDEX_op_ctpop_i32, ret, arg1);
522     } else if (TCG_TARGET_HAS_ctpop_i64) {
523         TCGv_i64 t = tcg_temp_ebb_new_i64();
524         tcg_gen_extu_i32_i64(t, arg1);
525         tcg_gen_ctpop_i64(t, t);
526         tcg_gen_extrl_i64_i32(ret, t);
527         tcg_temp_free_i64(t);
528     } else {
529         gen_helper_ctpop_i32(ret, arg1);
530     }
531 }
532 
533 void tcg_gen_rotl_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
534 {
535     if (TCG_TARGET_HAS_rot_i32) {
536         tcg_gen_op3_i32(INDEX_op_rotl_i32, ret, arg1, arg2);
537     } else {
538         TCGv_i32 t0, t1;
539 
540         t0 = tcg_temp_ebb_new_i32();
541         t1 = tcg_temp_ebb_new_i32();
542         tcg_gen_shl_i32(t0, arg1, arg2);
543         tcg_gen_subfi_i32(t1, 32, arg2);
544         tcg_gen_shr_i32(t1, arg1, t1);
545         tcg_gen_or_i32(ret, t0, t1);
546         tcg_temp_free_i32(t0);
547         tcg_temp_free_i32(t1);
548     }
549 }
550 
551 void tcg_gen_rotli_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
552 {
553     tcg_debug_assert(arg2 >= 0 && arg2 < 32);
554     /* some cases can be optimized here */
555     if (arg2 == 0) {
556         tcg_gen_mov_i32(ret, arg1);
557     } else if (TCG_TARGET_HAS_rot_i32) {
558         tcg_gen_rotl_i32(ret, arg1, tcg_constant_i32(arg2));
559     } else {
560         TCGv_i32 t0, t1;
561         t0 = tcg_temp_ebb_new_i32();
562         t1 = tcg_temp_ebb_new_i32();
563         tcg_gen_shli_i32(t0, arg1, arg2);
564         tcg_gen_shri_i32(t1, arg1, 32 - arg2);
565         tcg_gen_or_i32(ret, t0, t1);
566         tcg_temp_free_i32(t0);
567         tcg_temp_free_i32(t1);
568     }
569 }
570 
571 void tcg_gen_rotr_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
572 {
573     if (TCG_TARGET_HAS_rot_i32) {
574         tcg_gen_op3_i32(INDEX_op_rotr_i32, ret, arg1, arg2);
575     } else {
576         TCGv_i32 t0, t1;
577 
578         t0 = tcg_temp_ebb_new_i32();
579         t1 = tcg_temp_ebb_new_i32();
580         tcg_gen_shr_i32(t0, arg1, arg2);
581         tcg_gen_subfi_i32(t1, 32, arg2);
582         tcg_gen_shl_i32(t1, arg1, t1);
583         tcg_gen_or_i32(ret, t0, t1);
584         tcg_temp_free_i32(t0);
585         tcg_temp_free_i32(t1);
586     }
587 }
588 
589 void tcg_gen_rotri_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
590 {
591     tcg_debug_assert(arg2 >= 0 && arg2 < 32);
592     /* some cases can be optimized here */
593     if (arg2 == 0) {
594         tcg_gen_mov_i32(ret, arg1);
595     } else {
596         tcg_gen_rotli_i32(ret, arg1, 32 - arg2);
597     }
598 }
599 
600 void tcg_gen_deposit_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2,
601                          unsigned int ofs, unsigned int len)
602 {
603     uint32_t mask;
604     TCGv_i32 t1;
605 
606     tcg_debug_assert(ofs < 32);
607     tcg_debug_assert(len > 0);
608     tcg_debug_assert(len <= 32);
609     tcg_debug_assert(ofs + len <= 32);
610 
611     if (len == 32) {
612         tcg_gen_mov_i32(ret, arg2);
613         return;
614     }
615     if (TCG_TARGET_HAS_deposit_i32 && TCG_TARGET_deposit_i32_valid(ofs, len)) {
616         tcg_gen_op5ii_i32(INDEX_op_deposit_i32, ret, arg1, arg2, ofs, len);
617         return;
618     }
619 
620     t1 = tcg_temp_ebb_new_i32();
621 
622     if (TCG_TARGET_HAS_extract2_i32) {
623         if (ofs + len == 32) {
624             tcg_gen_shli_i32(t1, arg1, len);
625             tcg_gen_extract2_i32(ret, t1, arg2, len);
626             goto done;
627         }
628         if (ofs == 0) {
629             tcg_gen_extract2_i32(ret, arg1, arg2, len);
630             tcg_gen_rotli_i32(ret, ret, len);
631             goto done;
632         }
633     }
634 
635     mask = (1u << len) - 1;
636     if (ofs + len < 32) {
637         tcg_gen_andi_i32(t1, arg2, mask);
638         tcg_gen_shli_i32(t1, t1, ofs);
639     } else {
640         tcg_gen_shli_i32(t1, arg2, ofs);
641     }
642     tcg_gen_andi_i32(ret, arg1, ~(mask << ofs));
643     tcg_gen_or_i32(ret, ret, t1);
644  done:
645     tcg_temp_free_i32(t1);
646 }
647 
648 void tcg_gen_deposit_z_i32(TCGv_i32 ret, TCGv_i32 arg,
649                            unsigned int ofs, unsigned int len)
650 {
651     tcg_debug_assert(ofs < 32);
652     tcg_debug_assert(len > 0);
653     tcg_debug_assert(len <= 32);
654     tcg_debug_assert(ofs + len <= 32);
655 
656     if (ofs + len == 32) {
657         tcg_gen_shli_i32(ret, arg, ofs);
658     } else if (ofs == 0) {
659         tcg_gen_andi_i32(ret, arg, (1u << len) - 1);
660     } else if (TCG_TARGET_HAS_deposit_i32
661                && TCG_TARGET_deposit_i32_valid(ofs, len)) {
662         TCGv_i32 zero = tcg_constant_i32(0);
663         tcg_gen_op5ii_i32(INDEX_op_deposit_i32, ret, zero, arg, ofs, len);
664     } else {
665         /* To help two-operand hosts we prefer to zero-extend first,
666            which allows ARG to stay live.  */
667         switch (len) {
668         case 16:
669             if (TCG_TARGET_HAS_ext16u_i32) {
670                 tcg_gen_ext16u_i32(ret, arg);
671                 tcg_gen_shli_i32(ret, ret, ofs);
672                 return;
673             }
674             break;
675         case 8:
676             if (TCG_TARGET_HAS_ext8u_i32) {
677                 tcg_gen_ext8u_i32(ret, arg);
678                 tcg_gen_shli_i32(ret, ret, ofs);
679                 return;
680             }
681             break;
682         }
683         /* Otherwise prefer zero-extension over AND for code size.  */
684         switch (ofs + len) {
685         case 16:
686             if (TCG_TARGET_HAS_ext16u_i32) {
687                 tcg_gen_shli_i32(ret, arg, ofs);
688                 tcg_gen_ext16u_i32(ret, ret);
689                 return;
690             }
691             break;
692         case 8:
693             if (TCG_TARGET_HAS_ext8u_i32) {
694                 tcg_gen_shli_i32(ret, arg, ofs);
695                 tcg_gen_ext8u_i32(ret, ret);
696                 return;
697             }
698             break;
699         }
700         tcg_gen_andi_i32(ret, arg, (1u << len) - 1);
701         tcg_gen_shli_i32(ret, ret, ofs);
702     }
703 }
704 
705 void tcg_gen_extract_i32(TCGv_i32 ret, TCGv_i32 arg,
706                          unsigned int ofs, unsigned int len)
707 {
708     tcg_debug_assert(ofs < 32);
709     tcg_debug_assert(len > 0);
710     tcg_debug_assert(len <= 32);
711     tcg_debug_assert(ofs + len <= 32);
712 
713     /* Canonicalize certain special cases, even if extract is supported.  */
714     if (ofs + len == 32) {
715         tcg_gen_shri_i32(ret, arg, 32 - len);
716         return;
717     }
718     if (ofs == 0) {
719         tcg_gen_andi_i32(ret, arg, (1u << len) - 1);
720         return;
721     }
722 
723     if (TCG_TARGET_HAS_extract_i32
724         && TCG_TARGET_extract_i32_valid(ofs, len)) {
725         tcg_gen_op4ii_i32(INDEX_op_extract_i32, ret, arg, ofs, len);
726         return;
727     }
728 
729     /* Assume that zero-extension, if available, is cheaper than a shift.  */
730     switch (ofs + len) {
731     case 16:
732         if (TCG_TARGET_HAS_ext16u_i32) {
733             tcg_gen_ext16u_i32(ret, arg);
734             tcg_gen_shri_i32(ret, ret, ofs);
735             return;
736         }
737         break;
738     case 8:
739         if (TCG_TARGET_HAS_ext8u_i32) {
740             tcg_gen_ext8u_i32(ret, arg);
741             tcg_gen_shri_i32(ret, ret, ofs);
742             return;
743         }
744         break;
745     }
746 
747     /* ??? Ideally we'd know what values are available for immediate AND.
748        Assume that 8 bits are available, plus the special case of 16,
749        so that we get ext8u, ext16u.  */
750     switch (len) {
751     case 1 ... 8: case 16:
752         tcg_gen_shri_i32(ret, arg, ofs);
753         tcg_gen_andi_i32(ret, ret, (1u << len) - 1);
754         break;
755     default:
756         tcg_gen_shli_i32(ret, arg, 32 - len - ofs);
757         tcg_gen_shri_i32(ret, ret, 32 - len);
758         break;
759     }
760 }
761 
762 void tcg_gen_sextract_i32(TCGv_i32 ret, TCGv_i32 arg,
763                           unsigned int ofs, unsigned int len)
764 {
765     tcg_debug_assert(ofs < 32);
766     tcg_debug_assert(len > 0);
767     tcg_debug_assert(len <= 32);
768     tcg_debug_assert(ofs + len <= 32);
769 
770     /* Canonicalize certain special cases, even if extract is supported.  */
771     if (ofs + len == 32) {
772         tcg_gen_sari_i32(ret, arg, 32 - len);
773         return;
774     }
775     if (ofs == 0) {
776         switch (len) {
777         case 16:
778             tcg_gen_ext16s_i32(ret, arg);
779             return;
780         case 8:
781             tcg_gen_ext8s_i32(ret, arg);
782             return;
783         }
784     }
785 
786     if (TCG_TARGET_HAS_sextract_i32
787         && TCG_TARGET_extract_i32_valid(ofs, len)) {
788         tcg_gen_op4ii_i32(INDEX_op_sextract_i32, ret, arg, ofs, len);
789         return;
790     }
791 
792     /* Assume that sign-extension, if available, is cheaper than a shift.  */
793     switch (ofs + len) {
794     case 16:
795         if (TCG_TARGET_HAS_ext16s_i32) {
796             tcg_gen_ext16s_i32(ret, arg);
797             tcg_gen_sari_i32(ret, ret, ofs);
798             return;
799         }
800         break;
801     case 8:
802         if (TCG_TARGET_HAS_ext8s_i32) {
803             tcg_gen_ext8s_i32(ret, arg);
804             tcg_gen_sari_i32(ret, ret, ofs);
805             return;
806         }
807         break;
808     }
809     switch (len) {
810     case 16:
811         if (TCG_TARGET_HAS_ext16s_i32) {
812             tcg_gen_shri_i32(ret, arg, ofs);
813             tcg_gen_ext16s_i32(ret, ret);
814             return;
815         }
816         break;
817     case 8:
818         if (TCG_TARGET_HAS_ext8s_i32) {
819             tcg_gen_shri_i32(ret, arg, ofs);
820             tcg_gen_ext8s_i32(ret, ret);
821             return;
822         }
823         break;
824     }
825 
826     tcg_gen_shli_i32(ret, arg, 32 - len - ofs);
827     tcg_gen_sari_i32(ret, ret, 32 - len);
828 }
829 
830 /*
831  * Extract 32-bits from a 64-bit input, ah:al, starting from ofs.
832  * Unlike tcg_gen_extract_i32 above, len is fixed at 32.
833  */
834 void tcg_gen_extract2_i32(TCGv_i32 ret, TCGv_i32 al, TCGv_i32 ah,
835                           unsigned int ofs)
836 {
837     tcg_debug_assert(ofs <= 32);
838     if (ofs == 0) {
839         tcg_gen_mov_i32(ret, al);
840     } else if (ofs == 32) {
841         tcg_gen_mov_i32(ret, ah);
842     } else if (al == ah) {
843         tcg_gen_rotri_i32(ret, al, ofs);
844     } else if (TCG_TARGET_HAS_extract2_i32) {
845         tcg_gen_op4i_i32(INDEX_op_extract2_i32, ret, al, ah, ofs);
846     } else {
847         TCGv_i32 t0 = tcg_temp_ebb_new_i32();
848         tcg_gen_shri_i32(t0, al, ofs);
849         tcg_gen_deposit_i32(ret, t0, ah, 32 - ofs, ofs);
850         tcg_temp_free_i32(t0);
851     }
852 }
853 
854 void tcg_gen_movcond_i32(TCGCond cond, TCGv_i32 ret, TCGv_i32 c1,
855                          TCGv_i32 c2, TCGv_i32 v1, TCGv_i32 v2)
856 {
857     if (cond == TCG_COND_ALWAYS) {
858         tcg_gen_mov_i32(ret, v1);
859     } else if (cond == TCG_COND_NEVER) {
860         tcg_gen_mov_i32(ret, v2);
861     } else if (TCG_TARGET_HAS_movcond_i32) {
862         tcg_gen_op6i_i32(INDEX_op_movcond_i32, ret, c1, c2, v1, v2, cond);
863     } else {
864         TCGv_i32 t0 = tcg_temp_ebb_new_i32();
865         TCGv_i32 t1 = tcg_temp_ebb_new_i32();
866         tcg_gen_negsetcond_i32(cond, t0, c1, c2);
867         tcg_gen_and_i32(t1, v1, t0);
868         tcg_gen_andc_i32(ret, v2, t0);
869         tcg_gen_or_i32(ret, ret, t1);
870         tcg_temp_free_i32(t0);
871         tcg_temp_free_i32(t1);
872     }
873 }
874 
875 void tcg_gen_add2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 al,
876                       TCGv_i32 ah, TCGv_i32 bl, TCGv_i32 bh)
877 {
878     if (TCG_TARGET_HAS_add2_i32) {
879         tcg_gen_op6_i32(INDEX_op_add2_i32, rl, rh, al, ah, bl, bh);
880     } else {
881         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
882         TCGv_i64 t1 = tcg_temp_ebb_new_i64();
883         tcg_gen_concat_i32_i64(t0, al, ah);
884         tcg_gen_concat_i32_i64(t1, bl, bh);
885         tcg_gen_add_i64(t0, t0, t1);
886         tcg_gen_extr_i64_i32(rl, rh, t0);
887         tcg_temp_free_i64(t0);
888         tcg_temp_free_i64(t1);
889     }
890 }
891 
892 void tcg_gen_sub2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 al,
893                       TCGv_i32 ah, TCGv_i32 bl, TCGv_i32 bh)
894 {
895     if (TCG_TARGET_HAS_sub2_i32) {
896         tcg_gen_op6_i32(INDEX_op_sub2_i32, rl, rh, al, ah, bl, bh);
897     } else {
898         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
899         TCGv_i64 t1 = tcg_temp_ebb_new_i64();
900         tcg_gen_concat_i32_i64(t0, al, ah);
901         tcg_gen_concat_i32_i64(t1, bl, bh);
902         tcg_gen_sub_i64(t0, t0, t1);
903         tcg_gen_extr_i64_i32(rl, rh, t0);
904         tcg_temp_free_i64(t0);
905         tcg_temp_free_i64(t1);
906     }
907 }
908 
909 void tcg_gen_mulu2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 arg1, TCGv_i32 arg2)
910 {
911     if (TCG_TARGET_HAS_mulu2_i32) {
912         tcg_gen_op4_i32(INDEX_op_mulu2_i32, rl, rh, arg1, arg2);
913     } else if (TCG_TARGET_HAS_muluh_i32) {
914         TCGv_i32 t = tcg_temp_ebb_new_i32();
915         tcg_gen_op3_i32(INDEX_op_mul_i32, t, arg1, arg2);
916         tcg_gen_op3_i32(INDEX_op_muluh_i32, rh, arg1, arg2);
917         tcg_gen_mov_i32(rl, t);
918         tcg_temp_free_i32(t);
919     } else if (TCG_TARGET_REG_BITS == 64) {
920         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
921         TCGv_i64 t1 = tcg_temp_ebb_new_i64();
922         tcg_gen_extu_i32_i64(t0, arg1);
923         tcg_gen_extu_i32_i64(t1, arg2);
924         tcg_gen_mul_i64(t0, t0, t1);
925         tcg_gen_extr_i64_i32(rl, rh, t0);
926         tcg_temp_free_i64(t0);
927         tcg_temp_free_i64(t1);
928     } else {
929         qemu_build_not_reached();
930     }
931 }
932 
933 void tcg_gen_muls2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 arg1, TCGv_i32 arg2)
934 {
935     if (TCG_TARGET_HAS_muls2_i32) {
936         tcg_gen_op4_i32(INDEX_op_muls2_i32, rl, rh, arg1, arg2);
937     } else if (TCG_TARGET_HAS_mulsh_i32) {
938         TCGv_i32 t = tcg_temp_ebb_new_i32();
939         tcg_gen_op3_i32(INDEX_op_mul_i32, t, arg1, arg2);
940         tcg_gen_op3_i32(INDEX_op_mulsh_i32, rh, arg1, arg2);
941         tcg_gen_mov_i32(rl, t);
942         tcg_temp_free_i32(t);
943     } else if (TCG_TARGET_REG_BITS == 32) {
944         TCGv_i32 t0 = tcg_temp_ebb_new_i32();
945         TCGv_i32 t1 = tcg_temp_ebb_new_i32();
946         TCGv_i32 t2 = tcg_temp_ebb_new_i32();
947         TCGv_i32 t3 = tcg_temp_ebb_new_i32();
948         tcg_gen_mulu2_i32(t0, t1, arg1, arg2);
949         /* Adjust for negative inputs.  */
950         tcg_gen_sari_i32(t2, arg1, 31);
951         tcg_gen_sari_i32(t3, arg2, 31);
952         tcg_gen_and_i32(t2, t2, arg2);
953         tcg_gen_and_i32(t3, t3, arg1);
954         tcg_gen_sub_i32(rh, t1, t2);
955         tcg_gen_sub_i32(rh, rh, t3);
956         tcg_gen_mov_i32(rl, t0);
957         tcg_temp_free_i32(t0);
958         tcg_temp_free_i32(t1);
959         tcg_temp_free_i32(t2);
960         tcg_temp_free_i32(t3);
961     } else {
962         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
963         TCGv_i64 t1 = tcg_temp_ebb_new_i64();
964         tcg_gen_ext_i32_i64(t0, arg1);
965         tcg_gen_ext_i32_i64(t1, arg2);
966         tcg_gen_mul_i64(t0, t0, t1);
967         tcg_gen_extr_i64_i32(rl, rh, t0);
968         tcg_temp_free_i64(t0);
969         tcg_temp_free_i64(t1);
970     }
971 }
972 
973 void tcg_gen_mulsu2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 arg1, TCGv_i32 arg2)
974 {
975     if (TCG_TARGET_REG_BITS == 32) {
976         TCGv_i32 t0 = tcg_temp_ebb_new_i32();
977         TCGv_i32 t1 = tcg_temp_ebb_new_i32();
978         TCGv_i32 t2 = tcg_temp_ebb_new_i32();
979         tcg_gen_mulu2_i32(t0, t1, arg1, arg2);
980         /* Adjust for negative input for the signed arg1.  */
981         tcg_gen_sari_i32(t2, arg1, 31);
982         tcg_gen_and_i32(t2, t2, arg2);
983         tcg_gen_sub_i32(rh, t1, t2);
984         tcg_gen_mov_i32(rl, t0);
985         tcg_temp_free_i32(t0);
986         tcg_temp_free_i32(t1);
987         tcg_temp_free_i32(t2);
988     } else {
989         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
990         TCGv_i64 t1 = tcg_temp_ebb_new_i64();
991         tcg_gen_ext_i32_i64(t0, arg1);
992         tcg_gen_extu_i32_i64(t1, arg2);
993         tcg_gen_mul_i64(t0, t0, t1);
994         tcg_gen_extr_i64_i32(rl, rh, t0);
995         tcg_temp_free_i64(t0);
996         tcg_temp_free_i64(t1);
997     }
998 }
999 
1000 void tcg_gen_ext8s_i32(TCGv_i32 ret, TCGv_i32 arg)
1001 {
1002     if (TCG_TARGET_HAS_ext8s_i32) {
1003         tcg_gen_op2_i32(INDEX_op_ext8s_i32, ret, arg);
1004     } else {
1005         tcg_gen_shli_i32(ret, arg, 24);
1006         tcg_gen_sari_i32(ret, ret, 24);
1007     }
1008 }
1009 
1010 void tcg_gen_ext16s_i32(TCGv_i32 ret, TCGv_i32 arg)
1011 {
1012     if (TCG_TARGET_HAS_ext16s_i32) {
1013         tcg_gen_op2_i32(INDEX_op_ext16s_i32, ret, arg);
1014     } else {
1015         tcg_gen_shli_i32(ret, arg, 16);
1016         tcg_gen_sari_i32(ret, ret, 16);
1017     }
1018 }
1019 
1020 void tcg_gen_ext8u_i32(TCGv_i32 ret, TCGv_i32 arg)
1021 {
1022     if (TCG_TARGET_HAS_ext8u_i32) {
1023         tcg_gen_op2_i32(INDEX_op_ext8u_i32, ret, arg);
1024     } else {
1025         tcg_gen_andi_i32(ret, arg, 0xffu);
1026     }
1027 }
1028 
1029 void tcg_gen_ext16u_i32(TCGv_i32 ret, TCGv_i32 arg)
1030 {
1031     if (TCG_TARGET_HAS_ext16u_i32) {
1032         tcg_gen_op2_i32(INDEX_op_ext16u_i32, ret, arg);
1033     } else {
1034         tcg_gen_andi_i32(ret, arg, 0xffffu);
1035     }
1036 }
1037 
1038 /*
1039  * bswap16_i32: 16-bit byte swap on the low bits of a 32-bit value.
1040  *
1041  * Byte pattern: xxab -> yyba
1042  *
1043  * With TCG_BSWAP_IZ, x == zero, else undefined.
1044  * With TCG_BSWAP_OZ, y == zero, with TCG_BSWAP_OS y == sign, else undefined.
1045  */
1046 void tcg_gen_bswap16_i32(TCGv_i32 ret, TCGv_i32 arg, int flags)
1047 {
1048     /* Only one extension flag may be present. */
1049     tcg_debug_assert(!(flags & TCG_BSWAP_OS) || !(flags & TCG_BSWAP_OZ));
1050 
1051     if (TCG_TARGET_HAS_bswap16_i32) {
1052         tcg_gen_op3i_i32(INDEX_op_bswap16_i32, ret, arg, flags);
1053     } else {
1054         TCGv_i32 t0 = tcg_temp_ebb_new_i32();
1055         TCGv_i32 t1 = tcg_temp_ebb_new_i32();
1056 
1057                                             /* arg = ..ab (IZ) xxab (!IZ) */
1058         tcg_gen_shri_i32(t0, arg, 8);       /*  t0 = ...a (IZ) .xxa (!IZ) */
1059         if (!(flags & TCG_BSWAP_IZ)) {
1060             tcg_gen_ext8u_i32(t0, t0);      /*  t0 = ...a */
1061         }
1062 
1063         if (flags & TCG_BSWAP_OS) {
1064             tcg_gen_shli_i32(t1, arg, 24);  /*  t1 = b... */
1065             tcg_gen_sari_i32(t1, t1, 16);   /*  t1 = ssb. */
1066         } else if (flags & TCG_BSWAP_OZ) {
1067             tcg_gen_ext8u_i32(t1, arg);     /*  t1 = ...b */
1068             tcg_gen_shli_i32(t1, t1, 8);    /*  t1 = ..b. */
1069         } else {
1070             tcg_gen_shli_i32(t1, arg, 8);   /*  t1 = xab. */
1071         }
1072 
1073         tcg_gen_or_i32(ret, t0, t1);        /* ret = ..ba (OZ) */
1074                                             /*     = ssba (OS) */
1075                                             /*     = xaba (no flag) */
1076         tcg_temp_free_i32(t0);
1077         tcg_temp_free_i32(t1);
1078     }
1079 }
1080 
1081 /*
1082  * bswap32_i32: 32-bit byte swap on a 32-bit value.
1083  *
1084  * Byte pattern: abcd -> dcba
1085  */
1086 void tcg_gen_bswap32_i32(TCGv_i32 ret, TCGv_i32 arg)
1087 {
1088     if (TCG_TARGET_HAS_bswap32_i32) {
1089         tcg_gen_op3i_i32(INDEX_op_bswap32_i32, ret, arg, 0);
1090     } else {
1091         TCGv_i32 t0 = tcg_temp_ebb_new_i32();
1092         TCGv_i32 t1 = tcg_temp_ebb_new_i32();
1093         TCGv_i32 t2 = tcg_constant_i32(0x00ff00ff);
1094 
1095                                         /* arg = abcd */
1096         tcg_gen_shri_i32(t0, arg, 8);   /*  t0 = .abc */
1097         tcg_gen_and_i32(t1, arg, t2);   /*  t1 = .b.d */
1098         tcg_gen_and_i32(t0, t0, t2);    /*  t0 = .a.c */
1099         tcg_gen_shli_i32(t1, t1, 8);    /*  t1 = b.d. */
1100         tcg_gen_or_i32(ret, t0, t1);    /* ret = badc */
1101 
1102         tcg_gen_shri_i32(t0, ret, 16);  /*  t0 = ..ba */
1103         tcg_gen_shli_i32(t1, ret, 16);  /*  t1 = dc.. */
1104         tcg_gen_or_i32(ret, t0, t1);    /* ret = dcba */
1105 
1106         tcg_temp_free_i32(t0);
1107         tcg_temp_free_i32(t1);
1108     }
1109 }
1110 
1111 /*
1112  * hswap_i32: Swap 16-bit halfwords within a 32-bit value.
1113  *
1114  * Byte pattern: abcd -> cdab
1115  */
1116 void tcg_gen_hswap_i32(TCGv_i32 ret, TCGv_i32 arg)
1117 {
1118     /* Swapping 2 16-bit elements is a rotate. */
1119     tcg_gen_rotli_i32(ret, arg, 16);
1120 }
1121 
1122 void tcg_gen_smin_i32(TCGv_i32 ret, TCGv_i32 a, TCGv_i32 b)
1123 {
1124     tcg_gen_movcond_i32(TCG_COND_LT, ret, a, b, a, b);
1125 }
1126 
1127 void tcg_gen_umin_i32(TCGv_i32 ret, TCGv_i32 a, TCGv_i32 b)
1128 {
1129     tcg_gen_movcond_i32(TCG_COND_LTU, ret, a, b, a, b);
1130 }
1131 
1132 void tcg_gen_smax_i32(TCGv_i32 ret, TCGv_i32 a, TCGv_i32 b)
1133 {
1134     tcg_gen_movcond_i32(TCG_COND_LT, ret, a, b, b, a);
1135 }
1136 
1137 void tcg_gen_umax_i32(TCGv_i32 ret, TCGv_i32 a, TCGv_i32 b)
1138 {
1139     tcg_gen_movcond_i32(TCG_COND_LTU, ret, a, b, b, a);
1140 }
1141 
1142 void tcg_gen_abs_i32(TCGv_i32 ret, TCGv_i32 a)
1143 {
1144     TCGv_i32 t = tcg_temp_ebb_new_i32();
1145 
1146     tcg_gen_sari_i32(t, a, 31);
1147     tcg_gen_xor_i32(ret, a, t);
1148     tcg_gen_sub_i32(ret, ret, t);
1149     tcg_temp_free_i32(t);
1150 }
1151 
1152 /* 64-bit ops */
1153 
1154 #if TCG_TARGET_REG_BITS == 32
1155 /* These are all inline for TCG_TARGET_REG_BITS == 64.  */
1156 
1157 void tcg_gen_discard_i64(TCGv_i64 arg)
1158 {
1159     tcg_gen_discard_i32(TCGV_LOW(arg));
1160     tcg_gen_discard_i32(TCGV_HIGH(arg));
1161 }
1162 
1163 void tcg_gen_mov_i64(TCGv_i64 ret, TCGv_i64 arg)
1164 {
1165     TCGTemp *ts = tcgv_i64_temp(arg);
1166 
1167     /* Canonicalize TCGv_i64 TEMP_CONST into TCGv_i32 TEMP_CONST. */
1168     if (ts->kind == TEMP_CONST) {
1169         tcg_gen_movi_i64(ret, ts->val);
1170     } else {
1171         tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1172         tcg_gen_mov_i32(TCGV_HIGH(ret), TCGV_HIGH(arg));
1173     }
1174 }
1175 
1176 void tcg_gen_movi_i64(TCGv_i64 ret, int64_t arg)
1177 {
1178     tcg_gen_movi_i32(TCGV_LOW(ret), arg);
1179     tcg_gen_movi_i32(TCGV_HIGH(ret), arg >> 32);
1180 }
1181 
1182 void tcg_gen_ld8u_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
1183 {
1184     tcg_gen_ld8u_i32(TCGV_LOW(ret), arg2, offset);
1185     tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1186 }
1187 
1188 void tcg_gen_ld8s_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
1189 {
1190     tcg_gen_ld8s_i32(TCGV_LOW(ret), arg2, offset);
1191     tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1192 }
1193 
1194 void tcg_gen_ld16u_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
1195 {
1196     tcg_gen_ld16u_i32(TCGV_LOW(ret), arg2, offset);
1197     tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1198 }
1199 
1200 void tcg_gen_ld16s_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
1201 {
1202     tcg_gen_ld16s_i32(TCGV_LOW(ret), arg2, offset);
1203     tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1204 }
1205 
1206 void tcg_gen_ld32u_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
1207 {
1208     tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset);
1209     tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1210 }
1211 
1212 void tcg_gen_ld32s_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
1213 {
1214     tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset);
1215     tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1216 }
1217 
1218 void tcg_gen_ld_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
1219 {
1220     /* Since arg2 and ret have different types,
1221        they cannot be the same temporary */
1222 #if HOST_BIG_ENDIAN
1223     tcg_gen_ld_i32(TCGV_HIGH(ret), arg2, offset);
1224     tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset + 4);
1225 #else
1226     tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset);
1227     tcg_gen_ld_i32(TCGV_HIGH(ret), arg2, offset + 4);
1228 #endif
1229 }
1230 
1231 void tcg_gen_st8_i64(TCGv_i64 arg1, TCGv_ptr arg2, tcg_target_long offset)
1232 {
1233     tcg_gen_st8_i32(TCGV_LOW(arg1), arg2, offset);
1234 }
1235 
1236 void tcg_gen_st16_i64(TCGv_i64 arg1, TCGv_ptr arg2, tcg_target_long offset)
1237 {
1238     tcg_gen_st16_i32(TCGV_LOW(arg1), arg2, offset);
1239 }
1240 
1241 void tcg_gen_st32_i64(TCGv_i64 arg1, TCGv_ptr arg2, tcg_target_long offset)
1242 {
1243     tcg_gen_st_i32(TCGV_LOW(arg1), arg2, offset);
1244 }
1245 
1246 void tcg_gen_st_i64(TCGv_i64 arg1, TCGv_ptr arg2, tcg_target_long offset)
1247 {
1248 #if HOST_BIG_ENDIAN
1249     tcg_gen_st_i32(TCGV_HIGH(arg1), arg2, offset);
1250     tcg_gen_st_i32(TCGV_LOW(arg1), arg2, offset + 4);
1251 #else
1252     tcg_gen_st_i32(TCGV_LOW(arg1), arg2, offset);
1253     tcg_gen_st_i32(TCGV_HIGH(arg1), arg2, offset + 4);
1254 #endif
1255 }
1256 
1257 void tcg_gen_add_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1258 {
1259     tcg_gen_add2_i32(TCGV_LOW(ret), TCGV_HIGH(ret), TCGV_LOW(arg1),
1260                      TCGV_HIGH(arg1), TCGV_LOW(arg2), TCGV_HIGH(arg2));
1261 }
1262 
1263 void tcg_gen_sub_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1264 {
1265     tcg_gen_sub2_i32(TCGV_LOW(ret), TCGV_HIGH(ret), TCGV_LOW(arg1),
1266                      TCGV_HIGH(arg1), TCGV_LOW(arg2), TCGV_HIGH(arg2));
1267 }
1268 
1269 void tcg_gen_and_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1270 {
1271     tcg_gen_and_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1272     tcg_gen_and_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1273 }
1274 
1275 void tcg_gen_or_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1276 {
1277     tcg_gen_or_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1278     tcg_gen_or_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1279 }
1280 
1281 void tcg_gen_xor_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1282 {
1283     tcg_gen_xor_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1284     tcg_gen_xor_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1285 }
1286 
1287 void tcg_gen_shl_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1288 {
1289     gen_helper_shl_i64(ret, arg1, arg2);
1290 }
1291 
1292 void tcg_gen_shr_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1293 {
1294     gen_helper_shr_i64(ret, arg1, arg2);
1295 }
1296 
1297 void tcg_gen_sar_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1298 {
1299     gen_helper_sar_i64(ret, arg1, arg2);
1300 }
1301 
1302 void tcg_gen_mul_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1303 {
1304     TCGv_i64 t0;
1305     TCGv_i32 t1;
1306 
1307     t0 = tcg_temp_ebb_new_i64();
1308     t1 = tcg_temp_ebb_new_i32();
1309 
1310     tcg_gen_mulu2_i32(TCGV_LOW(t0), TCGV_HIGH(t0),
1311                       TCGV_LOW(arg1), TCGV_LOW(arg2));
1312 
1313     tcg_gen_mul_i32(t1, TCGV_LOW(arg1), TCGV_HIGH(arg2));
1314     tcg_gen_add_i32(TCGV_HIGH(t0), TCGV_HIGH(t0), t1);
1315     tcg_gen_mul_i32(t1, TCGV_HIGH(arg1), TCGV_LOW(arg2));
1316     tcg_gen_add_i32(TCGV_HIGH(t0), TCGV_HIGH(t0), t1);
1317 
1318     tcg_gen_mov_i64(ret, t0);
1319     tcg_temp_free_i64(t0);
1320     tcg_temp_free_i32(t1);
1321 }
1322 
1323 #else
1324 
1325 void tcg_gen_movi_i64(TCGv_i64 ret, int64_t arg)
1326 {
1327     tcg_gen_mov_i64(ret, tcg_constant_i64(arg));
1328 }
1329 
1330 #endif /* TCG_TARGET_REG_SIZE == 32 */
1331 
1332 void tcg_gen_addi_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1333 {
1334     /* some cases can be optimized here */
1335     if (arg2 == 0) {
1336         tcg_gen_mov_i64(ret, arg1);
1337     } else if (TCG_TARGET_REG_BITS == 64) {
1338         tcg_gen_add_i64(ret, arg1, tcg_constant_i64(arg2));
1339     } else {
1340         tcg_gen_add2_i32(TCGV_LOW(ret), TCGV_HIGH(ret),
1341                          TCGV_LOW(arg1), TCGV_HIGH(arg1),
1342                          tcg_constant_i32(arg2), tcg_constant_i32(arg2 >> 32));
1343     }
1344 }
1345 
1346 void tcg_gen_subfi_i64(TCGv_i64 ret, int64_t arg1, TCGv_i64 arg2)
1347 {
1348     if (arg1 == 0 && TCG_TARGET_HAS_neg_i64) {
1349         /* Don't recurse with tcg_gen_neg_i64.  */
1350         tcg_gen_op2_i64(INDEX_op_neg_i64, ret, arg2);
1351     } else if (TCG_TARGET_REG_BITS == 64) {
1352         tcg_gen_sub_i64(ret, tcg_constant_i64(arg1), arg2);
1353     } else {
1354         tcg_gen_sub2_i32(TCGV_LOW(ret), TCGV_HIGH(ret),
1355                          tcg_constant_i32(arg1), tcg_constant_i32(arg1 >> 32),
1356                          TCGV_LOW(arg2), TCGV_HIGH(arg2));
1357     }
1358 }
1359 
1360 void tcg_gen_subi_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1361 {
1362     /* some cases can be optimized here */
1363     if (arg2 == 0) {
1364         tcg_gen_mov_i64(ret, arg1);
1365     } else if (TCG_TARGET_REG_BITS == 64) {
1366         tcg_gen_sub_i64(ret, arg1, tcg_constant_i64(arg2));
1367     } else {
1368         tcg_gen_sub2_i32(TCGV_LOW(ret), TCGV_HIGH(ret),
1369                          TCGV_LOW(arg1), TCGV_HIGH(arg1),
1370                          tcg_constant_i32(arg2), tcg_constant_i32(arg2 >> 32));
1371     }
1372 }
1373 
1374 void tcg_gen_andi_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1375 {
1376     if (TCG_TARGET_REG_BITS == 32) {
1377         tcg_gen_andi_i32(TCGV_LOW(ret), TCGV_LOW(arg1), arg2);
1378         tcg_gen_andi_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), arg2 >> 32);
1379         return;
1380     }
1381 
1382     /* Some cases can be optimized here.  */
1383     switch (arg2) {
1384     case 0:
1385         tcg_gen_movi_i64(ret, 0);
1386         return;
1387     case -1:
1388         tcg_gen_mov_i64(ret, arg1);
1389         return;
1390     case 0xff:
1391         /* Don't recurse with tcg_gen_ext8u_i64.  */
1392         if (TCG_TARGET_HAS_ext8u_i64) {
1393             tcg_gen_op2_i64(INDEX_op_ext8u_i64, ret, arg1);
1394             return;
1395         }
1396         break;
1397     case 0xffff:
1398         if (TCG_TARGET_HAS_ext16u_i64) {
1399             tcg_gen_op2_i64(INDEX_op_ext16u_i64, ret, arg1);
1400             return;
1401         }
1402         break;
1403     case 0xffffffffu:
1404         if (TCG_TARGET_HAS_ext32u_i64) {
1405             tcg_gen_op2_i64(INDEX_op_ext32u_i64, ret, arg1);
1406             return;
1407         }
1408         break;
1409     }
1410 
1411     tcg_gen_and_i64(ret, arg1, tcg_constant_i64(arg2));
1412 }
1413 
1414 void tcg_gen_ori_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1415 {
1416     if (TCG_TARGET_REG_BITS == 32) {
1417         tcg_gen_ori_i32(TCGV_LOW(ret), TCGV_LOW(arg1), arg2);
1418         tcg_gen_ori_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), arg2 >> 32);
1419         return;
1420     }
1421     /* Some cases can be optimized here.  */
1422     if (arg2 == -1) {
1423         tcg_gen_movi_i64(ret, -1);
1424     } else if (arg2 == 0) {
1425         tcg_gen_mov_i64(ret, arg1);
1426     } else {
1427         tcg_gen_or_i64(ret, arg1, tcg_constant_i64(arg2));
1428     }
1429 }
1430 
1431 void tcg_gen_xori_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1432 {
1433     if (TCG_TARGET_REG_BITS == 32) {
1434         tcg_gen_xori_i32(TCGV_LOW(ret), TCGV_LOW(arg1), arg2);
1435         tcg_gen_xori_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), arg2 >> 32);
1436         return;
1437     }
1438     /* Some cases can be optimized here.  */
1439     if (arg2 == 0) {
1440         tcg_gen_mov_i64(ret, arg1);
1441     } else if (arg2 == -1 && TCG_TARGET_HAS_not_i64) {
1442         /* Don't recurse with tcg_gen_not_i64.  */
1443         tcg_gen_op2_i64(INDEX_op_not_i64, ret, arg1);
1444     } else {
1445         tcg_gen_xor_i64(ret, arg1, tcg_constant_i64(arg2));
1446     }
1447 }
1448 
1449 static inline void tcg_gen_shifti_i64(TCGv_i64 ret, TCGv_i64 arg1,
1450                                       unsigned c, bool right, bool arith)
1451 {
1452     tcg_debug_assert(c < 64);
1453     if (c == 0) {
1454         tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg1));
1455         tcg_gen_mov_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1));
1456     } else if (c >= 32) {
1457         c -= 32;
1458         if (right) {
1459             if (arith) {
1460                 tcg_gen_sari_i32(TCGV_LOW(ret), TCGV_HIGH(arg1), c);
1461                 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), 31);
1462             } else {
1463                 tcg_gen_shri_i32(TCGV_LOW(ret), TCGV_HIGH(arg1), c);
1464                 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1465             }
1466         } else {
1467             tcg_gen_shli_i32(TCGV_HIGH(ret), TCGV_LOW(arg1), c);
1468             tcg_gen_movi_i32(TCGV_LOW(ret), 0);
1469         }
1470     } else if (right) {
1471         if (TCG_TARGET_HAS_extract2_i32) {
1472             tcg_gen_extract2_i32(TCGV_LOW(ret),
1473                                  TCGV_LOW(arg1), TCGV_HIGH(arg1), c);
1474         } else {
1475             tcg_gen_shri_i32(TCGV_LOW(ret), TCGV_LOW(arg1), c);
1476             tcg_gen_deposit_i32(TCGV_LOW(ret), TCGV_LOW(ret),
1477                                 TCGV_HIGH(arg1), 32 - c, c);
1478         }
1479         if (arith) {
1480             tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), c);
1481         } else {
1482             tcg_gen_shri_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), c);
1483         }
1484     } else {
1485         if (TCG_TARGET_HAS_extract2_i32) {
1486             tcg_gen_extract2_i32(TCGV_HIGH(ret),
1487                                  TCGV_LOW(arg1), TCGV_HIGH(arg1), 32 - c);
1488         } else {
1489             TCGv_i32 t0 = tcg_temp_ebb_new_i32();
1490             tcg_gen_shri_i32(t0, TCGV_LOW(arg1), 32 - c);
1491             tcg_gen_deposit_i32(TCGV_HIGH(ret), t0,
1492                                 TCGV_HIGH(arg1), c, 32 - c);
1493             tcg_temp_free_i32(t0);
1494         }
1495         tcg_gen_shli_i32(TCGV_LOW(ret), TCGV_LOW(arg1), c);
1496     }
1497 }
1498 
1499 void tcg_gen_shli_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1500 {
1501     tcg_debug_assert(arg2 >= 0 && arg2 < 64);
1502     if (TCG_TARGET_REG_BITS == 32) {
1503         tcg_gen_shifti_i64(ret, arg1, arg2, 0, 0);
1504     } else if (arg2 == 0) {
1505         tcg_gen_mov_i64(ret, arg1);
1506     } else {
1507         tcg_gen_shl_i64(ret, arg1, tcg_constant_i64(arg2));
1508     }
1509 }
1510 
1511 void tcg_gen_shri_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1512 {
1513     tcg_debug_assert(arg2 >= 0 && arg2 < 64);
1514     if (TCG_TARGET_REG_BITS == 32) {
1515         tcg_gen_shifti_i64(ret, arg1, arg2, 1, 0);
1516     } else if (arg2 == 0) {
1517         tcg_gen_mov_i64(ret, arg1);
1518     } else {
1519         tcg_gen_shr_i64(ret, arg1, tcg_constant_i64(arg2));
1520     }
1521 }
1522 
1523 void tcg_gen_sari_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1524 {
1525     tcg_debug_assert(arg2 >= 0 && arg2 < 64);
1526     if (TCG_TARGET_REG_BITS == 32) {
1527         tcg_gen_shifti_i64(ret, arg1, arg2, 1, 1);
1528     } else if (arg2 == 0) {
1529         tcg_gen_mov_i64(ret, arg1);
1530     } else {
1531         tcg_gen_sar_i64(ret, arg1, tcg_constant_i64(arg2));
1532     }
1533 }
1534 
1535 void tcg_gen_brcond_i64(TCGCond cond, TCGv_i64 arg1, TCGv_i64 arg2, TCGLabel *l)
1536 {
1537     if (cond == TCG_COND_ALWAYS) {
1538         tcg_gen_br(l);
1539     } else if (cond != TCG_COND_NEVER) {
1540         if (TCG_TARGET_REG_BITS == 32) {
1541             tcg_gen_op6ii_i32(INDEX_op_brcond2_i32, TCGV_LOW(arg1),
1542                               TCGV_HIGH(arg1), TCGV_LOW(arg2),
1543                               TCGV_HIGH(arg2), cond, label_arg(l));
1544         } else {
1545             tcg_gen_op4ii_i64(INDEX_op_brcond_i64, arg1, arg2, cond,
1546                               label_arg(l));
1547         }
1548         add_last_as_label_use(l);
1549     }
1550 }
1551 
1552 void tcg_gen_brcondi_i64(TCGCond cond, TCGv_i64 arg1, int64_t arg2, TCGLabel *l)
1553 {
1554     if (TCG_TARGET_REG_BITS == 64) {
1555         tcg_gen_brcond_i64(cond, arg1, tcg_constant_i64(arg2), l);
1556     } else if (cond == TCG_COND_ALWAYS) {
1557         tcg_gen_br(l);
1558     } else if (cond != TCG_COND_NEVER) {
1559         tcg_gen_op6ii_i32(INDEX_op_brcond2_i32,
1560                           TCGV_LOW(arg1), TCGV_HIGH(arg1),
1561                           tcg_constant_i32(arg2),
1562                           tcg_constant_i32(arg2 >> 32),
1563                           cond, label_arg(l));
1564         add_last_as_label_use(l);
1565     }
1566 }
1567 
1568 void tcg_gen_setcond_i64(TCGCond cond, TCGv_i64 ret,
1569                          TCGv_i64 arg1, TCGv_i64 arg2)
1570 {
1571     if (cond == TCG_COND_ALWAYS) {
1572         tcg_gen_movi_i64(ret, 1);
1573     } else if (cond == TCG_COND_NEVER) {
1574         tcg_gen_movi_i64(ret, 0);
1575     } else {
1576         if (TCG_TARGET_REG_BITS == 32) {
1577             tcg_gen_op6i_i32(INDEX_op_setcond2_i32, TCGV_LOW(ret),
1578                              TCGV_LOW(arg1), TCGV_HIGH(arg1),
1579                              TCGV_LOW(arg2), TCGV_HIGH(arg2), cond);
1580             tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1581         } else {
1582             tcg_gen_op4i_i64(INDEX_op_setcond_i64, ret, arg1, arg2, cond);
1583         }
1584     }
1585 }
1586 
1587 void tcg_gen_setcondi_i64(TCGCond cond, TCGv_i64 ret,
1588                           TCGv_i64 arg1, int64_t arg2)
1589 {
1590     if (TCG_TARGET_REG_BITS == 64) {
1591         tcg_gen_setcond_i64(cond, ret, arg1, tcg_constant_i64(arg2));
1592     } else if (cond == TCG_COND_ALWAYS) {
1593         tcg_gen_movi_i64(ret, 1);
1594     } else if (cond == TCG_COND_NEVER) {
1595         tcg_gen_movi_i64(ret, 0);
1596     } else {
1597         tcg_gen_op6i_i32(INDEX_op_setcond2_i32, TCGV_LOW(ret),
1598                          TCGV_LOW(arg1), TCGV_HIGH(arg1),
1599                          tcg_constant_i32(arg2),
1600                          tcg_constant_i32(arg2 >> 32), cond);
1601         tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1602     }
1603 }
1604 
1605 void tcg_gen_negsetcond_i64(TCGCond cond, TCGv_i64 ret,
1606                             TCGv_i64 arg1, TCGv_i64 arg2)
1607 {
1608     if (cond == TCG_COND_ALWAYS) {
1609         tcg_gen_movi_i64(ret, -1);
1610     } else if (cond == TCG_COND_NEVER) {
1611         tcg_gen_movi_i64(ret, 0);
1612     } else if (TCG_TARGET_HAS_negsetcond_i64) {
1613         tcg_gen_op4i_i64(INDEX_op_negsetcond_i64, ret, arg1, arg2, cond);
1614     } else if (TCG_TARGET_REG_BITS == 32) {
1615         tcg_gen_op6i_i32(INDEX_op_setcond2_i32, TCGV_LOW(ret),
1616                          TCGV_LOW(arg1), TCGV_HIGH(arg1),
1617                          TCGV_LOW(arg2), TCGV_HIGH(arg2), cond);
1618         tcg_gen_neg_i32(TCGV_LOW(ret), TCGV_LOW(ret));
1619         tcg_gen_mov_i32(TCGV_HIGH(ret), TCGV_LOW(ret));
1620     } else {
1621         tcg_gen_setcond_i64(cond, ret, arg1, arg2);
1622         tcg_gen_neg_i64(ret, ret);
1623     }
1624 }
1625 
1626 void tcg_gen_muli_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1627 {
1628     if (arg2 == 0) {
1629         tcg_gen_movi_i64(ret, 0);
1630     } else if (is_power_of_2(arg2)) {
1631         tcg_gen_shli_i64(ret, arg1, ctz64(arg2));
1632     } else {
1633         tcg_gen_mul_i64(ret, arg1, tcg_constant_i64(arg2));
1634     }
1635 }
1636 
1637 void tcg_gen_div_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1638 {
1639     if (TCG_TARGET_HAS_div_i64) {
1640         tcg_gen_op3_i64(INDEX_op_div_i64, ret, arg1, arg2);
1641     } else if (TCG_TARGET_HAS_div2_i64) {
1642         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
1643         tcg_gen_sari_i64(t0, arg1, 63);
1644         tcg_gen_op5_i64(INDEX_op_div2_i64, ret, t0, arg1, t0, arg2);
1645         tcg_temp_free_i64(t0);
1646     } else {
1647         gen_helper_div_i64(ret, arg1, arg2);
1648     }
1649 }
1650 
1651 void tcg_gen_rem_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1652 {
1653     if (TCG_TARGET_HAS_rem_i64) {
1654         tcg_gen_op3_i64(INDEX_op_rem_i64, ret, arg1, arg2);
1655     } else if (TCG_TARGET_HAS_div_i64) {
1656         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
1657         tcg_gen_op3_i64(INDEX_op_div_i64, t0, arg1, arg2);
1658         tcg_gen_mul_i64(t0, t0, arg2);
1659         tcg_gen_sub_i64(ret, arg1, t0);
1660         tcg_temp_free_i64(t0);
1661     } else if (TCG_TARGET_HAS_div2_i64) {
1662         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
1663         tcg_gen_sari_i64(t0, arg1, 63);
1664         tcg_gen_op5_i64(INDEX_op_div2_i64, t0, ret, arg1, t0, arg2);
1665         tcg_temp_free_i64(t0);
1666     } else {
1667         gen_helper_rem_i64(ret, arg1, arg2);
1668     }
1669 }
1670 
1671 void tcg_gen_divu_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1672 {
1673     if (TCG_TARGET_HAS_div_i64) {
1674         tcg_gen_op3_i64(INDEX_op_divu_i64, ret, arg1, arg2);
1675     } else if (TCG_TARGET_HAS_div2_i64) {
1676         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
1677         tcg_gen_movi_i64(t0, 0);
1678         tcg_gen_op5_i64(INDEX_op_divu2_i64, ret, t0, arg1, t0, arg2);
1679         tcg_temp_free_i64(t0);
1680     } else {
1681         gen_helper_divu_i64(ret, arg1, arg2);
1682     }
1683 }
1684 
1685 void tcg_gen_remu_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1686 {
1687     if (TCG_TARGET_HAS_rem_i64) {
1688         tcg_gen_op3_i64(INDEX_op_remu_i64, ret, arg1, arg2);
1689     } else if (TCG_TARGET_HAS_div_i64) {
1690         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
1691         tcg_gen_op3_i64(INDEX_op_divu_i64, t0, arg1, arg2);
1692         tcg_gen_mul_i64(t0, t0, arg2);
1693         tcg_gen_sub_i64(ret, arg1, t0);
1694         tcg_temp_free_i64(t0);
1695     } else if (TCG_TARGET_HAS_div2_i64) {
1696         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
1697         tcg_gen_movi_i64(t0, 0);
1698         tcg_gen_op5_i64(INDEX_op_divu2_i64, t0, ret, arg1, t0, arg2);
1699         tcg_temp_free_i64(t0);
1700     } else {
1701         gen_helper_remu_i64(ret, arg1, arg2);
1702     }
1703 }
1704 
1705 void tcg_gen_ext8s_i64(TCGv_i64 ret, TCGv_i64 arg)
1706 {
1707     if (TCG_TARGET_REG_BITS == 32) {
1708         tcg_gen_ext8s_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1709         tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1710     } else if (TCG_TARGET_HAS_ext8s_i64) {
1711         tcg_gen_op2_i64(INDEX_op_ext8s_i64, ret, arg);
1712     } else {
1713         tcg_gen_shli_i64(ret, arg, 56);
1714         tcg_gen_sari_i64(ret, ret, 56);
1715     }
1716 }
1717 
1718 void tcg_gen_ext16s_i64(TCGv_i64 ret, TCGv_i64 arg)
1719 {
1720     if (TCG_TARGET_REG_BITS == 32) {
1721         tcg_gen_ext16s_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1722         tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1723     } else if (TCG_TARGET_HAS_ext16s_i64) {
1724         tcg_gen_op2_i64(INDEX_op_ext16s_i64, ret, arg);
1725     } else {
1726         tcg_gen_shli_i64(ret, arg, 48);
1727         tcg_gen_sari_i64(ret, ret, 48);
1728     }
1729 }
1730 
1731 void tcg_gen_ext32s_i64(TCGv_i64 ret, TCGv_i64 arg)
1732 {
1733     if (TCG_TARGET_REG_BITS == 32) {
1734         tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1735         tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1736     } else if (TCG_TARGET_HAS_ext32s_i64) {
1737         tcg_gen_op2_i64(INDEX_op_ext32s_i64, ret, arg);
1738     } else {
1739         tcg_gen_shli_i64(ret, arg, 32);
1740         tcg_gen_sari_i64(ret, ret, 32);
1741     }
1742 }
1743 
1744 void tcg_gen_ext8u_i64(TCGv_i64 ret, TCGv_i64 arg)
1745 {
1746     if (TCG_TARGET_REG_BITS == 32) {
1747         tcg_gen_ext8u_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1748         tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1749     } else if (TCG_TARGET_HAS_ext8u_i64) {
1750         tcg_gen_op2_i64(INDEX_op_ext8u_i64, ret, arg);
1751     } else {
1752         tcg_gen_andi_i64(ret, arg, 0xffu);
1753     }
1754 }
1755 
1756 void tcg_gen_ext16u_i64(TCGv_i64 ret, TCGv_i64 arg)
1757 {
1758     if (TCG_TARGET_REG_BITS == 32) {
1759         tcg_gen_ext16u_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1760         tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1761     } else if (TCG_TARGET_HAS_ext16u_i64) {
1762         tcg_gen_op2_i64(INDEX_op_ext16u_i64, ret, arg);
1763     } else {
1764         tcg_gen_andi_i64(ret, arg, 0xffffu);
1765     }
1766 }
1767 
1768 void tcg_gen_ext32u_i64(TCGv_i64 ret, TCGv_i64 arg)
1769 {
1770     if (TCG_TARGET_REG_BITS == 32) {
1771         tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1772         tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1773     } else if (TCG_TARGET_HAS_ext32u_i64) {
1774         tcg_gen_op2_i64(INDEX_op_ext32u_i64, ret, arg);
1775     } else {
1776         tcg_gen_andi_i64(ret, arg, 0xffffffffu);
1777     }
1778 }
1779 
1780 /*
1781  * bswap16_i64: 16-bit byte swap on the low bits of a 64-bit value.
1782  *
1783  * Byte pattern: xxxxxxxxab -> yyyyyyyyba
1784  *
1785  * With TCG_BSWAP_IZ, x == zero, else undefined.
1786  * With TCG_BSWAP_OZ, y == zero, with TCG_BSWAP_OS y == sign, else undefined.
1787  */
1788 void tcg_gen_bswap16_i64(TCGv_i64 ret, TCGv_i64 arg, int flags)
1789 {
1790     /* Only one extension flag may be present. */
1791     tcg_debug_assert(!(flags & TCG_BSWAP_OS) || !(flags & TCG_BSWAP_OZ));
1792 
1793     if (TCG_TARGET_REG_BITS == 32) {
1794         tcg_gen_bswap16_i32(TCGV_LOW(ret), TCGV_LOW(arg), flags);
1795         if (flags & TCG_BSWAP_OS) {
1796             tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1797         } else {
1798             tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1799         }
1800     } else if (TCG_TARGET_HAS_bswap16_i64) {
1801         tcg_gen_op3i_i64(INDEX_op_bswap16_i64, ret, arg, flags);
1802     } else {
1803         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
1804         TCGv_i64 t1 = tcg_temp_ebb_new_i64();
1805 
1806                                             /* arg = ......ab or xxxxxxab */
1807         tcg_gen_shri_i64(t0, arg, 8);       /*  t0 = .......a or .xxxxxxa */
1808         if (!(flags & TCG_BSWAP_IZ)) {
1809             tcg_gen_ext8u_i64(t0, t0);      /*  t0 = .......a */
1810         }
1811 
1812         if (flags & TCG_BSWAP_OS) {
1813             tcg_gen_shli_i64(t1, arg, 56);  /*  t1 = b....... */
1814             tcg_gen_sari_i64(t1, t1, 48);   /*  t1 = ssssssb. */
1815         } else if (flags & TCG_BSWAP_OZ) {
1816             tcg_gen_ext8u_i64(t1, arg);     /*  t1 = .......b */
1817             tcg_gen_shli_i64(t1, t1, 8);    /*  t1 = ......b. */
1818         } else {
1819             tcg_gen_shli_i64(t1, arg, 8);   /*  t1 = xxxxxab. */
1820         }
1821 
1822         tcg_gen_or_i64(ret, t0, t1);        /* ret = ......ba (OZ) */
1823                                             /*       ssssssba (OS) */
1824                                             /*       xxxxxaba (no flag) */
1825         tcg_temp_free_i64(t0);
1826         tcg_temp_free_i64(t1);
1827     }
1828 }
1829 
1830 /*
1831  * bswap32_i64: 32-bit byte swap on the low bits of a 64-bit value.
1832  *
1833  * Byte pattern: xxxxabcd -> yyyydcba
1834  *
1835  * With TCG_BSWAP_IZ, x == zero, else undefined.
1836  * With TCG_BSWAP_OZ, y == zero, with TCG_BSWAP_OS y == sign, else undefined.
1837  */
1838 void tcg_gen_bswap32_i64(TCGv_i64 ret, TCGv_i64 arg, int flags)
1839 {
1840     /* Only one extension flag may be present. */
1841     tcg_debug_assert(!(flags & TCG_BSWAP_OS) || !(flags & TCG_BSWAP_OZ));
1842 
1843     if (TCG_TARGET_REG_BITS == 32) {
1844         tcg_gen_bswap32_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1845         if (flags & TCG_BSWAP_OS) {
1846             tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1847         } else {
1848             tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1849         }
1850     } else if (TCG_TARGET_HAS_bswap32_i64) {
1851         tcg_gen_op3i_i64(INDEX_op_bswap32_i64, ret, arg, flags);
1852     } else {
1853         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
1854         TCGv_i64 t1 = tcg_temp_ebb_new_i64();
1855         TCGv_i64 t2 = tcg_constant_i64(0x00ff00ff);
1856 
1857                                             /* arg = xxxxabcd */
1858         tcg_gen_shri_i64(t0, arg, 8);       /*  t0 = .xxxxabc */
1859         tcg_gen_and_i64(t1, arg, t2);       /*  t1 = .....b.d */
1860         tcg_gen_and_i64(t0, t0, t2);        /*  t0 = .....a.c */
1861         tcg_gen_shli_i64(t1, t1, 8);        /*  t1 = ....b.d. */
1862         tcg_gen_or_i64(ret, t0, t1);        /* ret = ....badc */
1863 
1864         tcg_gen_shli_i64(t1, ret, 48);      /*  t1 = dc...... */
1865         tcg_gen_shri_i64(t0, ret, 16);      /*  t0 = ......ba */
1866         if (flags & TCG_BSWAP_OS) {
1867             tcg_gen_sari_i64(t1, t1, 32);   /*  t1 = ssssdc.. */
1868         } else {
1869             tcg_gen_shri_i64(t1, t1, 32);   /*  t1 = ....dc.. */
1870         }
1871         tcg_gen_or_i64(ret, t0, t1);        /* ret = ssssdcba (OS) */
1872                                             /*       ....dcba (else) */
1873 
1874         tcg_temp_free_i64(t0);
1875         tcg_temp_free_i64(t1);
1876     }
1877 }
1878 
1879 /*
1880  * bswap64_i64: 64-bit byte swap on a 64-bit value.
1881  *
1882  * Byte pattern: abcdefgh -> hgfedcba
1883  */
1884 void tcg_gen_bswap64_i64(TCGv_i64 ret, TCGv_i64 arg)
1885 {
1886     if (TCG_TARGET_REG_BITS == 32) {
1887         TCGv_i32 t0, t1;
1888         t0 = tcg_temp_ebb_new_i32();
1889         t1 = tcg_temp_ebb_new_i32();
1890 
1891         tcg_gen_bswap32_i32(t0, TCGV_LOW(arg));
1892         tcg_gen_bswap32_i32(t1, TCGV_HIGH(arg));
1893         tcg_gen_mov_i32(TCGV_LOW(ret), t1);
1894         tcg_gen_mov_i32(TCGV_HIGH(ret), t0);
1895         tcg_temp_free_i32(t0);
1896         tcg_temp_free_i32(t1);
1897     } else if (TCG_TARGET_HAS_bswap64_i64) {
1898         tcg_gen_op3i_i64(INDEX_op_bswap64_i64, ret, arg, 0);
1899     } else {
1900         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
1901         TCGv_i64 t1 = tcg_temp_ebb_new_i64();
1902         TCGv_i64 t2 = tcg_temp_ebb_new_i64();
1903 
1904                                         /* arg = abcdefgh */
1905         tcg_gen_movi_i64(t2, 0x00ff00ff00ff00ffull);
1906         tcg_gen_shri_i64(t0, arg, 8);   /*  t0 = .abcdefg */
1907         tcg_gen_and_i64(t1, arg, t2);   /*  t1 = .b.d.f.h */
1908         tcg_gen_and_i64(t0, t0, t2);    /*  t0 = .a.c.e.g */
1909         tcg_gen_shli_i64(t1, t1, 8);    /*  t1 = b.d.f.h. */
1910         tcg_gen_or_i64(ret, t0, t1);    /* ret = badcfehg */
1911 
1912         tcg_gen_movi_i64(t2, 0x0000ffff0000ffffull);
1913         tcg_gen_shri_i64(t0, ret, 16);  /*  t0 = ..badcfe */
1914         tcg_gen_and_i64(t1, ret, t2);   /*  t1 = ..dc..hg */
1915         tcg_gen_and_i64(t0, t0, t2);    /*  t0 = ..ba..fe */
1916         tcg_gen_shli_i64(t1, t1, 16);   /*  t1 = dc..hg.. */
1917         tcg_gen_or_i64(ret, t0, t1);    /* ret = dcbahgfe */
1918 
1919         tcg_gen_shri_i64(t0, ret, 32);  /*  t0 = ....dcba */
1920         tcg_gen_shli_i64(t1, ret, 32);  /*  t1 = hgfe.... */
1921         tcg_gen_or_i64(ret, t0, t1);    /* ret = hgfedcba */
1922 
1923         tcg_temp_free_i64(t0);
1924         tcg_temp_free_i64(t1);
1925         tcg_temp_free_i64(t2);
1926     }
1927 }
1928 
1929 /*
1930  * hswap_i64: Swap 16-bit halfwords within a 64-bit value.
1931  * See also include/qemu/bitops.h, hswap64.
1932  *
1933  * Byte pattern: abcdefgh -> ghefcdab
1934  */
1935 void tcg_gen_hswap_i64(TCGv_i64 ret, TCGv_i64 arg)
1936 {
1937     uint64_t m = 0x0000ffff0000ffffull;
1938     TCGv_i64 t0 = tcg_temp_ebb_new_i64();
1939     TCGv_i64 t1 = tcg_temp_ebb_new_i64();
1940 
1941                                         /* arg = abcdefgh */
1942     tcg_gen_rotli_i64(t1, arg, 32);     /*  t1 = efghabcd */
1943     tcg_gen_andi_i64(t0, t1, m);        /*  t0 = ..gh..cd */
1944     tcg_gen_shli_i64(t0, t0, 16);       /*  t0 = gh..cd.. */
1945     tcg_gen_shri_i64(t1, t1, 16);       /*  t1 = ..efghab */
1946     tcg_gen_andi_i64(t1, t1, m);        /*  t1 = ..ef..ab */
1947     tcg_gen_or_i64(ret, t0, t1);        /* ret = ghefcdab */
1948 
1949     tcg_temp_free_i64(t0);
1950     tcg_temp_free_i64(t1);
1951 }
1952 
1953 /*
1954  * wswap_i64: Swap 32-bit words within a 64-bit value.
1955  *
1956  * Byte pattern: abcdefgh -> efghabcd
1957  */
1958 void tcg_gen_wswap_i64(TCGv_i64 ret, TCGv_i64 arg)
1959 {
1960     /* Swapping 2 32-bit elements is a rotate. */
1961     tcg_gen_rotli_i64(ret, arg, 32);
1962 }
1963 
1964 void tcg_gen_not_i64(TCGv_i64 ret, TCGv_i64 arg)
1965 {
1966     if (TCG_TARGET_REG_BITS == 32) {
1967         tcg_gen_not_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1968         tcg_gen_not_i32(TCGV_HIGH(ret), TCGV_HIGH(arg));
1969     } else if (TCG_TARGET_HAS_not_i64) {
1970         tcg_gen_op2_i64(INDEX_op_not_i64, ret, arg);
1971     } else {
1972         tcg_gen_xori_i64(ret, arg, -1);
1973     }
1974 }
1975 
1976 void tcg_gen_andc_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1977 {
1978     if (TCG_TARGET_REG_BITS == 32) {
1979         tcg_gen_andc_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1980         tcg_gen_andc_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1981     } else if (TCG_TARGET_HAS_andc_i64) {
1982         tcg_gen_op3_i64(INDEX_op_andc_i64, ret, arg1, arg2);
1983     } else {
1984         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
1985         tcg_gen_not_i64(t0, arg2);
1986         tcg_gen_and_i64(ret, arg1, t0);
1987         tcg_temp_free_i64(t0);
1988     }
1989 }
1990 
1991 void tcg_gen_eqv_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1992 {
1993     if (TCG_TARGET_REG_BITS == 32) {
1994         tcg_gen_eqv_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1995         tcg_gen_eqv_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1996     } else if (TCG_TARGET_HAS_eqv_i64) {
1997         tcg_gen_op3_i64(INDEX_op_eqv_i64, ret, arg1, arg2);
1998     } else {
1999         tcg_gen_xor_i64(ret, arg1, arg2);
2000         tcg_gen_not_i64(ret, ret);
2001     }
2002 }
2003 
2004 void tcg_gen_nand_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
2005 {
2006     if (TCG_TARGET_REG_BITS == 32) {
2007         tcg_gen_nand_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
2008         tcg_gen_nand_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
2009     } else if (TCG_TARGET_HAS_nand_i64) {
2010         tcg_gen_op3_i64(INDEX_op_nand_i64, ret, arg1, arg2);
2011     } else {
2012         tcg_gen_and_i64(ret, arg1, arg2);
2013         tcg_gen_not_i64(ret, ret);
2014     }
2015 }
2016 
2017 void tcg_gen_nor_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
2018 {
2019     if (TCG_TARGET_REG_BITS == 32) {
2020         tcg_gen_nor_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
2021         tcg_gen_nor_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
2022     } else if (TCG_TARGET_HAS_nor_i64) {
2023         tcg_gen_op3_i64(INDEX_op_nor_i64, ret, arg1, arg2);
2024     } else {
2025         tcg_gen_or_i64(ret, arg1, arg2);
2026         tcg_gen_not_i64(ret, ret);
2027     }
2028 }
2029 
2030 void tcg_gen_orc_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
2031 {
2032     if (TCG_TARGET_REG_BITS == 32) {
2033         tcg_gen_orc_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
2034         tcg_gen_orc_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
2035     } else if (TCG_TARGET_HAS_orc_i64) {
2036         tcg_gen_op3_i64(INDEX_op_orc_i64, ret, arg1, arg2);
2037     } else {
2038         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
2039         tcg_gen_not_i64(t0, arg2);
2040         tcg_gen_or_i64(ret, arg1, t0);
2041         tcg_temp_free_i64(t0);
2042     }
2043 }
2044 
2045 void tcg_gen_clz_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
2046 {
2047     if (TCG_TARGET_HAS_clz_i64) {
2048         tcg_gen_op3_i64(INDEX_op_clz_i64, ret, arg1, arg2);
2049     } else {
2050         gen_helper_clz_i64(ret, arg1, arg2);
2051     }
2052 }
2053 
2054 void tcg_gen_clzi_i64(TCGv_i64 ret, TCGv_i64 arg1, uint64_t arg2)
2055 {
2056     if (TCG_TARGET_REG_BITS == 32
2057         && TCG_TARGET_HAS_clz_i32
2058         && arg2 <= 0xffffffffu) {
2059         TCGv_i32 t = tcg_temp_ebb_new_i32();
2060         tcg_gen_clzi_i32(t, TCGV_LOW(arg1), arg2 - 32);
2061         tcg_gen_addi_i32(t, t, 32);
2062         tcg_gen_clz_i32(TCGV_LOW(ret), TCGV_HIGH(arg1), t);
2063         tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
2064         tcg_temp_free_i32(t);
2065     } else {
2066         tcg_gen_clz_i64(ret, arg1, tcg_constant_i64(arg2));
2067     }
2068 }
2069 
2070 void tcg_gen_ctz_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
2071 {
2072     if (TCG_TARGET_HAS_ctz_i64) {
2073         tcg_gen_op3_i64(INDEX_op_ctz_i64, ret, arg1, arg2);
2074     } else if (TCG_TARGET_HAS_ctpop_i64 || TCG_TARGET_HAS_clz_i64) {
2075         TCGv_i64 z, t = tcg_temp_ebb_new_i64();
2076 
2077         if (TCG_TARGET_HAS_ctpop_i64) {
2078             tcg_gen_subi_i64(t, arg1, 1);
2079             tcg_gen_andc_i64(t, t, arg1);
2080             tcg_gen_ctpop_i64(t, t);
2081         } else {
2082             /* Since all non-x86 hosts have clz(0) == 64, don't fight it.  */
2083             tcg_gen_neg_i64(t, arg1);
2084             tcg_gen_and_i64(t, t, arg1);
2085             tcg_gen_clzi_i64(t, t, 64);
2086             tcg_gen_xori_i64(t, t, 63);
2087         }
2088         z = tcg_constant_i64(0);
2089         tcg_gen_movcond_i64(TCG_COND_EQ, ret, arg1, z, arg2, t);
2090         tcg_temp_free_i64(t);
2091         tcg_temp_free_i64(z);
2092     } else {
2093         gen_helper_ctz_i64(ret, arg1, arg2);
2094     }
2095 }
2096 
2097 void tcg_gen_ctzi_i64(TCGv_i64 ret, TCGv_i64 arg1, uint64_t arg2)
2098 {
2099     if (TCG_TARGET_REG_BITS == 32
2100         && TCG_TARGET_HAS_ctz_i32
2101         && arg2 <= 0xffffffffu) {
2102         TCGv_i32 t32 = tcg_temp_ebb_new_i32();
2103         tcg_gen_ctzi_i32(t32, TCGV_HIGH(arg1), arg2 - 32);
2104         tcg_gen_addi_i32(t32, t32, 32);
2105         tcg_gen_ctz_i32(TCGV_LOW(ret), TCGV_LOW(arg1), t32);
2106         tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
2107         tcg_temp_free_i32(t32);
2108     } else if (!TCG_TARGET_HAS_ctz_i64
2109                && TCG_TARGET_HAS_ctpop_i64
2110                && arg2 == 64) {
2111         /* This equivalence has the advantage of not requiring a fixup.  */
2112         TCGv_i64 t = tcg_temp_ebb_new_i64();
2113         tcg_gen_subi_i64(t, arg1, 1);
2114         tcg_gen_andc_i64(t, t, arg1);
2115         tcg_gen_ctpop_i64(ret, t);
2116         tcg_temp_free_i64(t);
2117     } else {
2118         tcg_gen_ctz_i64(ret, arg1, tcg_constant_i64(arg2));
2119     }
2120 }
2121 
2122 void tcg_gen_clrsb_i64(TCGv_i64 ret, TCGv_i64 arg)
2123 {
2124     if (TCG_TARGET_HAS_clz_i64 || TCG_TARGET_HAS_clz_i32) {
2125         TCGv_i64 t = tcg_temp_ebb_new_i64();
2126         tcg_gen_sari_i64(t, arg, 63);
2127         tcg_gen_xor_i64(t, t, arg);
2128         tcg_gen_clzi_i64(t, t, 64);
2129         tcg_gen_subi_i64(ret, t, 1);
2130         tcg_temp_free_i64(t);
2131     } else {
2132         gen_helper_clrsb_i64(ret, arg);
2133     }
2134 }
2135 
2136 void tcg_gen_ctpop_i64(TCGv_i64 ret, TCGv_i64 arg1)
2137 {
2138     if (TCG_TARGET_HAS_ctpop_i64) {
2139         tcg_gen_op2_i64(INDEX_op_ctpop_i64, ret, arg1);
2140     } else if (TCG_TARGET_REG_BITS == 32 && TCG_TARGET_HAS_ctpop_i32) {
2141         tcg_gen_ctpop_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1));
2142         tcg_gen_ctpop_i32(TCGV_LOW(ret), TCGV_LOW(arg1));
2143         tcg_gen_add_i32(TCGV_LOW(ret), TCGV_LOW(ret), TCGV_HIGH(ret));
2144         tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
2145     } else {
2146         gen_helper_ctpop_i64(ret, arg1);
2147     }
2148 }
2149 
2150 void tcg_gen_rotl_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
2151 {
2152     if (TCG_TARGET_HAS_rot_i64) {
2153         tcg_gen_op3_i64(INDEX_op_rotl_i64, ret, arg1, arg2);
2154     } else {
2155         TCGv_i64 t0, t1;
2156         t0 = tcg_temp_ebb_new_i64();
2157         t1 = tcg_temp_ebb_new_i64();
2158         tcg_gen_shl_i64(t0, arg1, arg2);
2159         tcg_gen_subfi_i64(t1, 64, arg2);
2160         tcg_gen_shr_i64(t1, arg1, t1);
2161         tcg_gen_or_i64(ret, t0, t1);
2162         tcg_temp_free_i64(t0);
2163         tcg_temp_free_i64(t1);
2164     }
2165 }
2166 
2167 void tcg_gen_rotli_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
2168 {
2169     tcg_debug_assert(arg2 >= 0 && arg2 < 64);
2170     /* some cases can be optimized here */
2171     if (arg2 == 0) {
2172         tcg_gen_mov_i64(ret, arg1);
2173     } else if (TCG_TARGET_HAS_rot_i64) {
2174         tcg_gen_rotl_i64(ret, arg1, tcg_constant_i64(arg2));
2175     } else {
2176         TCGv_i64 t0, t1;
2177         t0 = tcg_temp_ebb_new_i64();
2178         t1 = tcg_temp_ebb_new_i64();
2179         tcg_gen_shli_i64(t0, arg1, arg2);
2180         tcg_gen_shri_i64(t1, arg1, 64 - arg2);
2181         tcg_gen_or_i64(ret, t0, t1);
2182         tcg_temp_free_i64(t0);
2183         tcg_temp_free_i64(t1);
2184     }
2185 }
2186 
2187 void tcg_gen_rotr_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
2188 {
2189     if (TCG_TARGET_HAS_rot_i64) {
2190         tcg_gen_op3_i64(INDEX_op_rotr_i64, ret, arg1, arg2);
2191     } else {
2192         TCGv_i64 t0, t1;
2193         t0 = tcg_temp_ebb_new_i64();
2194         t1 = tcg_temp_ebb_new_i64();
2195         tcg_gen_shr_i64(t0, arg1, arg2);
2196         tcg_gen_subfi_i64(t1, 64, arg2);
2197         tcg_gen_shl_i64(t1, arg1, t1);
2198         tcg_gen_or_i64(ret, t0, t1);
2199         tcg_temp_free_i64(t0);
2200         tcg_temp_free_i64(t1);
2201     }
2202 }
2203 
2204 void tcg_gen_rotri_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
2205 {
2206     tcg_debug_assert(arg2 >= 0 && arg2 < 64);
2207     /* some cases can be optimized here */
2208     if (arg2 == 0) {
2209         tcg_gen_mov_i64(ret, arg1);
2210     } else {
2211         tcg_gen_rotli_i64(ret, arg1, 64 - arg2);
2212     }
2213 }
2214 
2215 void tcg_gen_deposit_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2,
2216                          unsigned int ofs, unsigned int len)
2217 {
2218     uint64_t mask;
2219     TCGv_i64 t1;
2220 
2221     tcg_debug_assert(ofs < 64);
2222     tcg_debug_assert(len > 0);
2223     tcg_debug_assert(len <= 64);
2224     tcg_debug_assert(ofs + len <= 64);
2225 
2226     if (len == 64) {
2227         tcg_gen_mov_i64(ret, arg2);
2228         return;
2229     }
2230     if (TCG_TARGET_HAS_deposit_i64 && TCG_TARGET_deposit_i64_valid(ofs, len)) {
2231         tcg_gen_op5ii_i64(INDEX_op_deposit_i64, ret, arg1, arg2, ofs, len);
2232         return;
2233     }
2234 
2235     if (TCG_TARGET_REG_BITS == 32) {
2236         if (ofs >= 32) {
2237             tcg_gen_deposit_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1),
2238                                 TCGV_LOW(arg2), ofs - 32, len);
2239             tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg1));
2240             return;
2241         }
2242         if (ofs + len <= 32) {
2243             tcg_gen_deposit_i32(TCGV_LOW(ret), TCGV_LOW(arg1),
2244                                 TCGV_LOW(arg2), ofs, len);
2245             tcg_gen_mov_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1));
2246             return;
2247         }
2248     }
2249 
2250     t1 = tcg_temp_ebb_new_i64();
2251 
2252     if (TCG_TARGET_HAS_extract2_i64) {
2253         if (ofs + len == 64) {
2254             tcg_gen_shli_i64(t1, arg1, len);
2255             tcg_gen_extract2_i64(ret, t1, arg2, len);
2256             goto done;
2257         }
2258         if (ofs == 0) {
2259             tcg_gen_extract2_i64(ret, arg1, arg2, len);
2260             tcg_gen_rotli_i64(ret, ret, len);
2261             goto done;
2262         }
2263     }
2264 
2265     mask = (1ull << len) - 1;
2266     if (ofs + len < 64) {
2267         tcg_gen_andi_i64(t1, arg2, mask);
2268         tcg_gen_shli_i64(t1, t1, ofs);
2269     } else {
2270         tcg_gen_shli_i64(t1, arg2, ofs);
2271     }
2272     tcg_gen_andi_i64(ret, arg1, ~(mask << ofs));
2273     tcg_gen_or_i64(ret, ret, t1);
2274  done:
2275     tcg_temp_free_i64(t1);
2276 }
2277 
2278 void tcg_gen_deposit_z_i64(TCGv_i64 ret, TCGv_i64 arg,
2279                            unsigned int ofs, unsigned int len)
2280 {
2281     tcg_debug_assert(ofs < 64);
2282     tcg_debug_assert(len > 0);
2283     tcg_debug_assert(len <= 64);
2284     tcg_debug_assert(ofs + len <= 64);
2285 
2286     if (ofs + len == 64) {
2287         tcg_gen_shli_i64(ret, arg, ofs);
2288     } else if (ofs == 0) {
2289         tcg_gen_andi_i64(ret, arg, (1ull << len) - 1);
2290     } else if (TCG_TARGET_HAS_deposit_i64
2291                && TCG_TARGET_deposit_i64_valid(ofs, len)) {
2292         TCGv_i64 zero = tcg_constant_i64(0);
2293         tcg_gen_op5ii_i64(INDEX_op_deposit_i64, ret, zero, arg, ofs, len);
2294     } else {
2295         if (TCG_TARGET_REG_BITS == 32) {
2296             if (ofs >= 32) {
2297                 tcg_gen_deposit_z_i32(TCGV_HIGH(ret), TCGV_LOW(arg),
2298                                       ofs - 32, len);
2299                 tcg_gen_movi_i32(TCGV_LOW(ret), 0);
2300                 return;
2301             }
2302             if (ofs + len <= 32) {
2303                 tcg_gen_deposit_z_i32(TCGV_LOW(ret), TCGV_LOW(arg), ofs, len);
2304                 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
2305                 return;
2306             }
2307         }
2308         /* To help two-operand hosts we prefer to zero-extend first,
2309            which allows ARG to stay live.  */
2310         switch (len) {
2311         case 32:
2312             if (TCG_TARGET_HAS_ext32u_i64) {
2313                 tcg_gen_ext32u_i64(ret, arg);
2314                 tcg_gen_shli_i64(ret, ret, ofs);
2315                 return;
2316             }
2317             break;
2318         case 16:
2319             if (TCG_TARGET_HAS_ext16u_i64) {
2320                 tcg_gen_ext16u_i64(ret, arg);
2321                 tcg_gen_shli_i64(ret, ret, ofs);
2322                 return;
2323             }
2324             break;
2325         case 8:
2326             if (TCG_TARGET_HAS_ext8u_i64) {
2327                 tcg_gen_ext8u_i64(ret, arg);
2328                 tcg_gen_shli_i64(ret, ret, ofs);
2329                 return;
2330             }
2331             break;
2332         }
2333         /* Otherwise prefer zero-extension over AND for code size.  */
2334         switch (ofs + len) {
2335         case 32:
2336             if (TCG_TARGET_HAS_ext32u_i64) {
2337                 tcg_gen_shli_i64(ret, arg, ofs);
2338                 tcg_gen_ext32u_i64(ret, ret);
2339                 return;
2340             }
2341             break;
2342         case 16:
2343             if (TCG_TARGET_HAS_ext16u_i64) {
2344                 tcg_gen_shli_i64(ret, arg, ofs);
2345                 tcg_gen_ext16u_i64(ret, ret);
2346                 return;
2347             }
2348             break;
2349         case 8:
2350             if (TCG_TARGET_HAS_ext8u_i64) {
2351                 tcg_gen_shli_i64(ret, arg, ofs);
2352                 tcg_gen_ext8u_i64(ret, ret);
2353                 return;
2354             }
2355             break;
2356         }
2357         tcg_gen_andi_i64(ret, arg, (1ull << len) - 1);
2358         tcg_gen_shli_i64(ret, ret, ofs);
2359     }
2360 }
2361 
2362 void tcg_gen_extract_i64(TCGv_i64 ret, TCGv_i64 arg,
2363                          unsigned int ofs, unsigned int len)
2364 {
2365     tcg_debug_assert(ofs < 64);
2366     tcg_debug_assert(len > 0);
2367     tcg_debug_assert(len <= 64);
2368     tcg_debug_assert(ofs + len <= 64);
2369 
2370     /* Canonicalize certain special cases, even if extract is supported.  */
2371     if (ofs + len == 64) {
2372         tcg_gen_shri_i64(ret, arg, 64 - len);
2373         return;
2374     }
2375     if (ofs == 0) {
2376         tcg_gen_andi_i64(ret, arg, (1ull << len) - 1);
2377         return;
2378     }
2379 
2380     if (TCG_TARGET_REG_BITS == 32) {
2381         /* Look for a 32-bit extract within one of the two words.  */
2382         if (ofs >= 32) {
2383             tcg_gen_extract_i32(TCGV_LOW(ret), TCGV_HIGH(arg), ofs - 32, len);
2384             tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
2385             return;
2386         }
2387         if (ofs + len <= 32) {
2388             tcg_gen_extract_i32(TCGV_LOW(ret), TCGV_LOW(arg), ofs, len);
2389             tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
2390             return;
2391         }
2392         /* The field is split across two words.  One double-word
2393            shift is better than two double-word shifts.  */
2394         goto do_shift_and;
2395     }
2396 
2397     if (TCG_TARGET_HAS_extract_i64
2398         && TCG_TARGET_extract_i64_valid(ofs, len)) {
2399         tcg_gen_op4ii_i64(INDEX_op_extract_i64, ret, arg, ofs, len);
2400         return;
2401     }
2402 
2403     /* Assume that zero-extension, if available, is cheaper than a shift.  */
2404     switch (ofs + len) {
2405     case 32:
2406         if (TCG_TARGET_HAS_ext32u_i64) {
2407             tcg_gen_ext32u_i64(ret, arg);
2408             tcg_gen_shri_i64(ret, ret, ofs);
2409             return;
2410         }
2411         break;
2412     case 16:
2413         if (TCG_TARGET_HAS_ext16u_i64) {
2414             tcg_gen_ext16u_i64(ret, arg);
2415             tcg_gen_shri_i64(ret, ret, ofs);
2416             return;
2417         }
2418         break;
2419     case 8:
2420         if (TCG_TARGET_HAS_ext8u_i64) {
2421             tcg_gen_ext8u_i64(ret, arg);
2422             tcg_gen_shri_i64(ret, ret, ofs);
2423             return;
2424         }
2425         break;
2426     }
2427 
2428     /* ??? Ideally we'd know what values are available for immediate AND.
2429        Assume that 8 bits are available, plus the special cases of 16 and 32,
2430        so that we get ext8u, ext16u, and ext32u.  */
2431     switch (len) {
2432     case 1 ... 8: case 16: case 32:
2433     do_shift_and:
2434         tcg_gen_shri_i64(ret, arg, ofs);
2435         tcg_gen_andi_i64(ret, ret, (1ull << len) - 1);
2436         break;
2437     default:
2438         tcg_gen_shli_i64(ret, arg, 64 - len - ofs);
2439         tcg_gen_shri_i64(ret, ret, 64 - len);
2440         break;
2441     }
2442 }
2443 
2444 void tcg_gen_sextract_i64(TCGv_i64 ret, TCGv_i64 arg,
2445                           unsigned int ofs, unsigned int len)
2446 {
2447     tcg_debug_assert(ofs < 64);
2448     tcg_debug_assert(len > 0);
2449     tcg_debug_assert(len <= 64);
2450     tcg_debug_assert(ofs + len <= 64);
2451 
2452     /* Canonicalize certain special cases, even if sextract is supported.  */
2453     if (ofs + len == 64) {
2454         tcg_gen_sari_i64(ret, arg, 64 - len);
2455         return;
2456     }
2457     if (ofs == 0) {
2458         switch (len) {
2459         case 32:
2460             tcg_gen_ext32s_i64(ret, arg);
2461             return;
2462         case 16:
2463             tcg_gen_ext16s_i64(ret, arg);
2464             return;
2465         case 8:
2466             tcg_gen_ext8s_i64(ret, arg);
2467             return;
2468         }
2469     }
2470 
2471     if (TCG_TARGET_REG_BITS == 32) {
2472         /* Look for a 32-bit extract within one of the two words.  */
2473         if (ofs >= 32) {
2474             tcg_gen_sextract_i32(TCGV_LOW(ret), TCGV_HIGH(arg), ofs - 32, len);
2475         } else if (ofs + len <= 32) {
2476             tcg_gen_sextract_i32(TCGV_LOW(ret), TCGV_LOW(arg), ofs, len);
2477         } else if (ofs == 0) {
2478             tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg));
2479             tcg_gen_sextract_i32(TCGV_HIGH(ret), TCGV_HIGH(arg), 0, len - 32);
2480             return;
2481         } else if (len > 32) {
2482             TCGv_i32 t = tcg_temp_ebb_new_i32();
2483             /* Extract the bits for the high word normally.  */
2484             tcg_gen_sextract_i32(t, TCGV_HIGH(arg), ofs + 32, len - 32);
2485             /* Shift the field down for the low part.  */
2486             tcg_gen_shri_i64(ret, arg, ofs);
2487             /* Overwrite the shift into the high part.  */
2488             tcg_gen_mov_i32(TCGV_HIGH(ret), t);
2489             tcg_temp_free_i32(t);
2490             return;
2491         } else {
2492             /* Shift the field down for the low part, such that the
2493                field sits at the MSB.  */
2494             tcg_gen_shri_i64(ret, arg, ofs + len - 32);
2495             /* Shift the field down from the MSB, sign extending.  */
2496             tcg_gen_sari_i32(TCGV_LOW(ret), TCGV_LOW(ret), 32 - len);
2497         }
2498         /* Sign-extend the field from 32 bits.  */
2499         tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
2500         return;
2501     }
2502 
2503     if (TCG_TARGET_HAS_sextract_i64
2504         && TCG_TARGET_extract_i64_valid(ofs, len)) {
2505         tcg_gen_op4ii_i64(INDEX_op_sextract_i64, ret, arg, ofs, len);
2506         return;
2507     }
2508 
2509     /* Assume that sign-extension, if available, is cheaper than a shift.  */
2510     switch (ofs + len) {
2511     case 32:
2512         if (TCG_TARGET_HAS_ext32s_i64) {
2513             tcg_gen_ext32s_i64(ret, arg);
2514             tcg_gen_sari_i64(ret, ret, ofs);
2515             return;
2516         }
2517         break;
2518     case 16:
2519         if (TCG_TARGET_HAS_ext16s_i64) {
2520             tcg_gen_ext16s_i64(ret, arg);
2521             tcg_gen_sari_i64(ret, ret, ofs);
2522             return;
2523         }
2524         break;
2525     case 8:
2526         if (TCG_TARGET_HAS_ext8s_i64) {
2527             tcg_gen_ext8s_i64(ret, arg);
2528             tcg_gen_sari_i64(ret, ret, ofs);
2529             return;
2530         }
2531         break;
2532     }
2533     switch (len) {
2534     case 32:
2535         if (TCG_TARGET_HAS_ext32s_i64) {
2536             tcg_gen_shri_i64(ret, arg, ofs);
2537             tcg_gen_ext32s_i64(ret, ret);
2538             return;
2539         }
2540         break;
2541     case 16:
2542         if (TCG_TARGET_HAS_ext16s_i64) {
2543             tcg_gen_shri_i64(ret, arg, ofs);
2544             tcg_gen_ext16s_i64(ret, ret);
2545             return;
2546         }
2547         break;
2548     case 8:
2549         if (TCG_TARGET_HAS_ext8s_i64) {
2550             tcg_gen_shri_i64(ret, arg, ofs);
2551             tcg_gen_ext8s_i64(ret, ret);
2552             return;
2553         }
2554         break;
2555     }
2556     tcg_gen_shli_i64(ret, arg, 64 - len - ofs);
2557     tcg_gen_sari_i64(ret, ret, 64 - len);
2558 }
2559 
2560 /*
2561  * Extract 64 bits from a 128-bit input, ah:al, starting from ofs.
2562  * Unlike tcg_gen_extract_i64 above, len is fixed at 64.
2563  */
2564 void tcg_gen_extract2_i64(TCGv_i64 ret, TCGv_i64 al, TCGv_i64 ah,
2565                           unsigned int ofs)
2566 {
2567     tcg_debug_assert(ofs <= 64);
2568     if (ofs == 0) {
2569         tcg_gen_mov_i64(ret, al);
2570     } else if (ofs == 64) {
2571         tcg_gen_mov_i64(ret, ah);
2572     } else if (al == ah) {
2573         tcg_gen_rotri_i64(ret, al, ofs);
2574     } else if (TCG_TARGET_HAS_extract2_i64) {
2575         tcg_gen_op4i_i64(INDEX_op_extract2_i64, ret, al, ah, ofs);
2576     } else {
2577         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
2578         tcg_gen_shri_i64(t0, al, ofs);
2579         tcg_gen_deposit_i64(ret, t0, ah, 64 - ofs, ofs);
2580         tcg_temp_free_i64(t0);
2581     }
2582 }
2583 
2584 void tcg_gen_movcond_i64(TCGCond cond, TCGv_i64 ret, TCGv_i64 c1,
2585                          TCGv_i64 c2, TCGv_i64 v1, TCGv_i64 v2)
2586 {
2587     if (cond == TCG_COND_ALWAYS) {
2588         tcg_gen_mov_i64(ret, v1);
2589     } else if (cond == TCG_COND_NEVER) {
2590         tcg_gen_mov_i64(ret, v2);
2591     } else if (TCG_TARGET_REG_BITS == 32) {
2592         TCGv_i32 t0 = tcg_temp_ebb_new_i32();
2593         TCGv_i32 t1 = tcg_temp_ebb_new_i32();
2594         tcg_gen_op6i_i32(INDEX_op_setcond2_i32, t0,
2595                          TCGV_LOW(c1), TCGV_HIGH(c1),
2596                          TCGV_LOW(c2), TCGV_HIGH(c2), cond);
2597 
2598         if (TCG_TARGET_HAS_movcond_i32) {
2599             tcg_gen_movi_i32(t1, 0);
2600             tcg_gen_movcond_i32(TCG_COND_NE, TCGV_LOW(ret), t0, t1,
2601                                 TCGV_LOW(v1), TCGV_LOW(v2));
2602             tcg_gen_movcond_i32(TCG_COND_NE, TCGV_HIGH(ret), t0, t1,
2603                                 TCGV_HIGH(v1), TCGV_HIGH(v2));
2604         } else {
2605             tcg_gen_neg_i32(t0, t0);
2606 
2607             tcg_gen_and_i32(t1, TCGV_LOW(v1), t0);
2608             tcg_gen_andc_i32(TCGV_LOW(ret), TCGV_LOW(v2), t0);
2609             tcg_gen_or_i32(TCGV_LOW(ret), TCGV_LOW(ret), t1);
2610 
2611             tcg_gen_and_i32(t1, TCGV_HIGH(v1), t0);
2612             tcg_gen_andc_i32(TCGV_HIGH(ret), TCGV_HIGH(v2), t0);
2613             tcg_gen_or_i32(TCGV_HIGH(ret), TCGV_HIGH(ret), t1);
2614         }
2615         tcg_temp_free_i32(t0);
2616         tcg_temp_free_i32(t1);
2617     } else if (TCG_TARGET_HAS_movcond_i64) {
2618         tcg_gen_op6i_i64(INDEX_op_movcond_i64, ret, c1, c2, v1, v2, cond);
2619     } else {
2620         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
2621         TCGv_i64 t1 = tcg_temp_ebb_new_i64();
2622         tcg_gen_negsetcond_i64(cond, t0, c1, c2);
2623         tcg_gen_and_i64(t1, v1, t0);
2624         tcg_gen_andc_i64(ret, v2, t0);
2625         tcg_gen_or_i64(ret, ret, t1);
2626         tcg_temp_free_i64(t0);
2627         tcg_temp_free_i64(t1);
2628     }
2629 }
2630 
2631 void tcg_gen_add2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 al,
2632                       TCGv_i64 ah, TCGv_i64 bl, TCGv_i64 bh)
2633 {
2634     if (TCG_TARGET_HAS_add2_i64) {
2635         tcg_gen_op6_i64(INDEX_op_add2_i64, rl, rh, al, ah, bl, bh);
2636     } else {
2637         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
2638         TCGv_i64 t1 = tcg_temp_ebb_new_i64();
2639         tcg_gen_add_i64(t0, al, bl);
2640         tcg_gen_setcond_i64(TCG_COND_LTU, t1, t0, al);
2641         tcg_gen_add_i64(rh, ah, bh);
2642         tcg_gen_add_i64(rh, rh, t1);
2643         tcg_gen_mov_i64(rl, t0);
2644         tcg_temp_free_i64(t0);
2645         tcg_temp_free_i64(t1);
2646     }
2647 }
2648 
2649 void tcg_gen_sub2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 al,
2650                       TCGv_i64 ah, TCGv_i64 bl, TCGv_i64 bh)
2651 {
2652     if (TCG_TARGET_HAS_sub2_i64) {
2653         tcg_gen_op6_i64(INDEX_op_sub2_i64, rl, rh, al, ah, bl, bh);
2654     } else {
2655         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
2656         TCGv_i64 t1 = tcg_temp_ebb_new_i64();
2657         tcg_gen_sub_i64(t0, al, bl);
2658         tcg_gen_setcond_i64(TCG_COND_LTU, t1, al, bl);
2659         tcg_gen_sub_i64(rh, ah, bh);
2660         tcg_gen_sub_i64(rh, rh, t1);
2661         tcg_gen_mov_i64(rl, t0);
2662         tcg_temp_free_i64(t0);
2663         tcg_temp_free_i64(t1);
2664     }
2665 }
2666 
2667 void tcg_gen_mulu2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 arg1, TCGv_i64 arg2)
2668 {
2669     if (TCG_TARGET_HAS_mulu2_i64) {
2670         tcg_gen_op4_i64(INDEX_op_mulu2_i64, rl, rh, arg1, arg2);
2671     } else if (TCG_TARGET_HAS_muluh_i64) {
2672         TCGv_i64 t = tcg_temp_ebb_new_i64();
2673         tcg_gen_op3_i64(INDEX_op_mul_i64, t, arg1, arg2);
2674         tcg_gen_op3_i64(INDEX_op_muluh_i64, rh, arg1, arg2);
2675         tcg_gen_mov_i64(rl, t);
2676         tcg_temp_free_i64(t);
2677     } else {
2678         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
2679         tcg_gen_mul_i64(t0, arg1, arg2);
2680         gen_helper_muluh_i64(rh, arg1, arg2);
2681         tcg_gen_mov_i64(rl, t0);
2682         tcg_temp_free_i64(t0);
2683     }
2684 }
2685 
2686 void tcg_gen_muls2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 arg1, TCGv_i64 arg2)
2687 {
2688     if (TCG_TARGET_HAS_muls2_i64) {
2689         tcg_gen_op4_i64(INDEX_op_muls2_i64, rl, rh, arg1, arg2);
2690     } else if (TCG_TARGET_HAS_mulsh_i64) {
2691         TCGv_i64 t = tcg_temp_ebb_new_i64();
2692         tcg_gen_op3_i64(INDEX_op_mul_i64, t, arg1, arg2);
2693         tcg_gen_op3_i64(INDEX_op_mulsh_i64, rh, arg1, arg2);
2694         tcg_gen_mov_i64(rl, t);
2695         tcg_temp_free_i64(t);
2696     } else if (TCG_TARGET_HAS_mulu2_i64 || TCG_TARGET_HAS_muluh_i64) {
2697         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
2698         TCGv_i64 t1 = tcg_temp_ebb_new_i64();
2699         TCGv_i64 t2 = tcg_temp_ebb_new_i64();
2700         TCGv_i64 t3 = tcg_temp_ebb_new_i64();
2701         tcg_gen_mulu2_i64(t0, t1, arg1, arg2);
2702         /* Adjust for negative inputs.  */
2703         tcg_gen_sari_i64(t2, arg1, 63);
2704         tcg_gen_sari_i64(t3, arg2, 63);
2705         tcg_gen_and_i64(t2, t2, arg2);
2706         tcg_gen_and_i64(t3, t3, arg1);
2707         tcg_gen_sub_i64(rh, t1, t2);
2708         tcg_gen_sub_i64(rh, rh, t3);
2709         tcg_gen_mov_i64(rl, t0);
2710         tcg_temp_free_i64(t0);
2711         tcg_temp_free_i64(t1);
2712         tcg_temp_free_i64(t2);
2713         tcg_temp_free_i64(t3);
2714     } else {
2715         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
2716         tcg_gen_mul_i64(t0, arg1, arg2);
2717         gen_helper_mulsh_i64(rh, arg1, arg2);
2718         tcg_gen_mov_i64(rl, t0);
2719         tcg_temp_free_i64(t0);
2720     }
2721 }
2722 
2723 void tcg_gen_mulsu2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 arg1, TCGv_i64 arg2)
2724 {
2725     TCGv_i64 t0 = tcg_temp_ebb_new_i64();
2726     TCGv_i64 t1 = tcg_temp_ebb_new_i64();
2727     TCGv_i64 t2 = tcg_temp_ebb_new_i64();
2728     tcg_gen_mulu2_i64(t0, t1, arg1, arg2);
2729     /* Adjust for negative input for the signed arg1.  */
2730     tcg_gen_sari_i64(t2, arg1, 63);
2731     tcg_gen_and_i64(t2, t2, arg2);
2732     tcg_gen_sub_i64(rh, t1, t2);
2733     tcg_gen_mov_i64(rl, t0);
2734     tcg_temp_free_i64(t0);
2735     tcg_temp_free_i64(t1);
2736     tcg_temp_free_i64(t2);
2737 }
2738 
2739 void tcg_gen_smin_i64(TCGv_i64 ret, TCGv_i64 a, TCGv_i64 b)
2740 {
2741     tcg_gen_movcond_i64(TCG_COND_LT, ret, a, b, a, b);
2742 }
2743 
2744 void tcg_gen_umin_i64(TCGv_i64 ret, TCGv_i64 a, TCGv_i64 b)
2745 {
2746     tcg_gen_movcond_i64(TCG_COND_LTU, ret, a, b, a, b);
2747 }
2748 
2749 void tcg_gen_smax_i64(TCGv_i64 ret, TCGv_i64 a, TCGv_i64 b)
2750 {
2751     tcg_gen_movcond_i64(TCG_COND_LT, ret, a, b, b, a);
2752 }
2753 
2754 void tcg_gen_umax_i64(TCGv_i64 ret, TCGv_i64 a, TCGv_i64 b)
2755 {
2756     tcg_gen_movcond_i64(TCG_COND_LTU, ret, a, b, b, a);
2757 }
2758 
2759 void tcg_gen_abs_i64(TCGv_i64 ret, TCGv_i64 a)
2760 {
2761     TCGv_i64 t = tcg_temp_ebb_new_i64();
2762 
2763     tcg_gen_sari_i64(t, a, 63);
2764     tcg_gen_xor_i64(ret, a, t);
2765     tcg_gen_sub_i64(ret, ret, t);
2766     tcg_temp_free_i64(t);
2767 }
2768 
2769 /* Size changing operations.  */
2770 
2771 void tcg_gen_extrl_i64_i32(TCGv_i32 ret, TCGv_i64 arg)
2772 {
2773     if (TCG_TARGET_REG_BITS == 32) {
2774         tcg_gen_mov_i32(ret, TCGV_LOW(arg));
2775     } else if (TCG_TARGET_HAS_extr_i64_i32) {
2776         tcg_gen_op2(INDEX_op_extrl_i64_i32,
2777                     tcgv_i32_arg(ret), tcgv_i64_arg(arg));
2778     } else {
2779         tcg_gen_mov_i32(ret, (TCGv_i32)arg);
2780     }
2781 }
2782 
2783 void tcg_gen_extrh_i64_i32(TCGv_i32 ret, TCGv_i64 arg)
2784 {
2785     if (TCG_TARGET_REG_BITS == 32) {
2786         tcg_gen_mov_i32(ret, TCGV_HIGH(arg));
2787     } else if (TCG_TARGET_HAS_extr_i64_i32) {
2788         tcg_gen_op2(INDEX_op_extrh_i64_i32,
2789                     tcgv_i32_arg(ret), tcgv_i64_arg(arg));
2790     } else {
2791         TCGv_i64 t = tcg_temp_ebb_new_i64();
2792         tcg_gen_shri_i64(t, arg, 32);
2793         tcg_gen_mov_i32(ret, (TCGv_i32)t);
2794         tcg_temp_free_i64(t);
2795     }
2796 }
2797 
2798 void tcg_gen_extu_i32_i64(TCGv_i64 ret, TCGv_i32 arg)
2799 {
2800     if (TCG_TARGET_REG_BITS == 32) {
2801         tcg_gen_mov_i32(TCGV_LOW(ret), arg);
2802         tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
2803     } else {
2804         tcg_gen_op2(INDEX_op_extu_i32_i64,
2805                     tcgv_i64_arg(ret), tcgv_i32_arg(arg));
2806     }
2807 }
2808 
2809 void tcg_gen_ext_i32_i64(TCGv_i64 ret, TCGv_i32 arg)
2810 {
2811     if (TCG_TARGET_REG_BITS == 32) {
2812         tcg_gen_mov_i32(TCGV_LOW(ret), arg);
2813         tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
2814     } else {
2815         tcg_gen_op2(INDEX_op_ext_i32_i64,
2816                     tcgv_i64_arg(ret), tcgv_i32_arg(arg));
2817     }
2818 }
2819 
2820 void tcg_gen_concat_i32_i64(TCGv_i64 dest, TCGv_i32 low, TCGv_i32 high)
2821 {
2822     TCGv_i64 tmp;
2823 
2824     if (TCG_TARGET_REG_BITS == 32) {
2825         tcg_gen_mov_i32(TCGV_LOW(dest), low);
2826         tcg_gen_mov_i32(TCGV_HIGH(dest), high);
2827         return;
2828     }
2829 
2830     tmp = tcg_temp_ebb_new_i64();
2831     /* These extensions are only needed for type correctness.
2832        We may be able to do better given target specific information.  */
2833     tcg_gen_extu_i32_i64(tmp, high);
2834     tcg_gen_extu_i32_i64(dest, low);
2835     /* If deposit is available, use it.  Otherwise use the extra
2836        knowledge that we have of the zero-extensions above.  */
2837     if (TCG_TARGET_HAS_deposit_i64 && TCG_TARGET_deposit_i64_valid(32, 32)) {
2838         tcg_gen_deposit_i64(dest, dest, tmp, 32, 32);
2839     } else {
2840         tcg_gen_shli_i64(tmp, tmp, 32);
2841         tcg_gen_or_i64(dest, dest, tmp);
2842     }
2843     tcg_temp_free_i64(tmp);
2844 }
2845 
2846 void tcg_gen_extr_i64_i32(TCGv_i32 lo, TCGv_i32 hi, TCGv_i64 arg)
2847 {
2848     if (TCG_TARGET_REG_BITS == 32) {
2849         tcg_gen_mov_i32(lo, TCGV_LOW(arg));
2850         tcg_gen_mov_i32(hi, TCGV_HIGH(arg));
2851     } else {
2852         tcg_gen_extrl_i64_i32(lo, arg);
2853         tcg_gen_extrh_i64_i32(hi, arg);
2854     }
2855 }
2856 
2857 void tcg_gen_extr32_i64(TCGv_i64 lo, TCGv_i64 hi, TCGv_i64 arg)
2858 {
2859     tcg_gen_ext32u_i64(lo, arg);
2860     tcg_gen_shri_i64(hi, arg, 32);
2861 }
2862 
2863 void tcg_gen_extr_i128_i64(TCGv_i64 lo, TCGv_i64 hi, TCGv_i128 arg)
2864 {
2865     tcg_gen_mov_i64(lo, TCGV128_LOW(arg));
2866     tcg_gen_mov_i64(hi, TCGV128_HIGH(arg));
2867 }
2868 
2869 void tcg_gen_concat_i64_i128(TCGv_i128 ret, TCGv_i64 lo, TCGv_i64 hi)
2870 {
2871     tcg_gen_mov_i64(TCGV128_LOW(ret), lo);
2872     tcg_gen_mov_i64(TCGV128_HIGH(ret), hi);
2873 }
2874 
2875 void tcg_gen_mov_i128(TCGv_i128 dst, TCGv_i128 src)
2876 {
2877     if (dst != src) {
2878         tcg_gen_mov_i64(TCGV128_LOW(dst), TCGV128_LOW(src));
2879         tcg_gen_mov_i64(TCGV128_HIGH(dst), TCGV128_HIGH(src));
2880     }
2881 }
2882 
2883 /* QEMU specific operations.  */
2884 
2885 void tcg_gen_exit_tb(const TranslationBlock *tb, unsigned idx)
2886 {
2887     /*
2888      * Let the jit code return the read-only version of the
2889      * TranslationBlock, so that we minimize the pc-relative
2890      * distance of the address of the exit_tb code to TB.
2891      * This will improve utilization of pc-relative address loads.
2892      *
2893      * TODO: Move this to translator_loop, so that all const
2894      * TranslationBlock pointers refer to read-only memory.
2895      * This requires coordination with targets that do not use
2896      * the translator_loop.
2897      */
2898     uintptr_t val = (uintptr_t)tcg_splitwx_to_rx((void *)tb) + idx;
2899 
2900     if (tb == NULL) {
2901         tcg_debug_assert(idx == 0);
2902     } else if (idx <= TB_EXIT_IDXMAX) {
2903 #ifdef CONFIG_DEBUG_TCG
2904         /* This is an exit following a goto_tb.  Verify that we have
2905            seen this numbered exit before, via tcg_gen_goto_tb.  */
2906         tcg_debug_assert(tcg_ctx->goto_tb_issue_mask & (1 << idx));
2907 #endif
2908     } else {
2909         /* This is an exit via the exitreq label.  */
2910         tcg_debug_assert(idx == TB_EXIT_REQUESTED);
2911     }
2912 
2913     tcg_gen_op1i(INDEX_op_exit_tb, val);
2914 }
2915 
2916 void tcg_gen_goto_tb(unsigned idx)
2917 {
2918     /* We tested CF_NO_GOTO_TB in translator_use_goto_tb. */
2919     tcg_debug_assert(!(tcg_ctx->gen_tb->cflags & CF_NO_GOTO_TB));
2920     /* We only support two chained exits.  */
2921     tcg_debug_assert(idx <= TB_EXIT_IDXMAX);
2922 #ifdef CONFIG_DEBUG_TCG
2923     /* Verify that we haven't seen this numbered exit before.  */
2924     tcg_debug_assert((tcg_ctx->goto_tb_issue_mask & (1 << idx)) == 0);
2925     tcg_ctx->goto_tb_issue_mask |= 1 << idx;
2926 #endif
2927     plugin_gen_disable_mem_helpers();
2928     tcg_gen_op1i(INDEX_op_goto_tb, idx);
2929 }
2930 
2931 void tcg_gen_lookup_and_goto_ptr(void)
2932 {
2933     TCGv_ptr ptr;
2934 
2935     if (tcg_ctx->gen_tb->cflags & CF_NO_GOTO_PTR) {
2936         tcg_gen_exit_tb(NULL, 0);
2937         return;
2938     }
2939 
2940     plugin_gen_disable_mem_helpers();
2941     ptr = tcg_temp_ebb_new_ptr();
2942     gen_helper_lookup_tb_ptr(ptr, tcg_env);
2943     tcg_gen_op1i(INDEX_op_goto_ptr, tcgv_ptr_arg(ptr));
2944     tcg_temp_free_ptr(ptr);
2945 }
2946