xref: /openbmc/qemu/tcg/tcg-op.c (revision 3635502d)
1 /*
2  * Tiny Code Generator for QEMU
3  *
4  * Copyright (c) 2008 Fabrice Bellard
5  *
6  * Permission is hereby granted, free of charge, to any person obtaining a copy
7  * of this software and associated documentation files (the "Software"), to deal
8  * in the Software without restriction, including without limitation the rights
9  * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
10  * copies of the Software, and to permit persons to whom the Software is
11  * furnished to do so, subject to the following conditions:
12  *
13  * The above copyright notice and this permission notice shall be included in
14  * all copies or substantial portions of the Software.
15  *
16  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19  * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20  * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21  * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
22  * THE SOFTWARE.
23  */
24 
25 #include "qemu/osdep.h"
26 #include "tcg/tcg.h"
27 #include "tcg/tcg-temp-internal.h"
28 #include "tcg/tcg-op-common.h"
29 #include "exec/translation-block.h"
30 #include "exec/plugin-gen.h"
31 #include "tcg-internal.h"
32 
33 
34 void tcg_gen_op1(TCGOpcode opc, TCGArg a1)
35 {
36     TCGOp *op = tcg_emit_op(opc, 1);
37     op->args[0] = a1;
38 }
39 
40 void tcg_gen_op2(TCGOpcode opc, TCGArg a1, TCGArg a2)
41 {
42     TCGOp *op = tcg_emit_op(opc, 2);
43     op->args[0] = a1;
44     op->args[1] = a2;
45 }
46 
47 void tcg_gen_op3(TCGOpcode opc, TCGArg a1, TCGArg a2, TCGArg a3)
48 {
49     TCGOp *op = tcg_emit_op(opc, 3);
50     op->args[0] = a1;
51     op->args[1] = a2;
52     op->args[2] = a3;
53 }
54 
55 void tcg_gen_op4(TCGOpcode opc, TCGArg a1, TCGArg a2, TCGArg a3, TCGArg a4)
56 {
57     TCGOp *op = tcg_emit_op(opc, 4);
58     op->args[0] = a1;
59     op->args[1] = a2;
60     op->args[2] = a3;
61     op->args[3] = a4;
62 }
63 
64 void tcg_gen_op5(TCGOpcode opc, TCGArg a1, TCGArg a2, TCGArg a3,
65                  TCGArg a4, TCGArg a5)
66 {
67     TCGOp *op = tcg_emit_op(opc, 5);
68     op->args[0] = a1;
69     op->args[1] = a2;
70     op->args[2] = a3;
71     op->args[3] = a4;
72     op->args[4] = a5;
73 }
74 
75 void tcg_gen_op6(TCGOpcode opc, TCGArg a1, TCGArg a2, TCGArg a3,
76                  TCGArg a4, TCGArg a5, TCGArg a6)
77 {
78     TCGOp *op = tcg_emit_op(opc, 6);
79     op->args[0] = a1;
80     op->args[1] = a2;
81     op->args[2] = a3;
82     op->args[3] = a4;
83     op->args[4] = a5;
84     op->args[5] = a6;
85 }
86 
87 /* Generic ops.  */
88 
89 static void add_last_as_label_use(TCGLabel *l)
90 {
91     TCGLabelUse *u = tcg_malloc(sizeof(TCGLabelUse));
92 
93     u->op = tcg_last_op();
94     QSIMPLEQ_INSERT_TAIL(&l->branches, u, next);
95 }
96 
97 void tcg_gen_br(TCGLabel *l)
98 {
99     tcg_gen_op1(INDEX_op_br, label_arg(l));
100     add_last_as_label_use(l);
101 }
102 
103 void tcg_gen_mb(TCGBar mb_type)
104 {
105 #ifdef CONFIG_USER_ONLY
106     bool parallel = tcg_ctx->gen_tb->cflags & CF_PARALLEL;
107 #else
108     /*
109      * It is tempting to elide the barrier in a uniprocessor context.
110      * However, even with a single cpu we have i/o threads running in
111      * parallel, and lack of memory order can result in e.g. virtio
112      * queue entries being read incorrectly.
113      */
114     bool parallel = true;
115 #endif
116 
117     if (parallel) {
118         tcg_gen_op1(INDEX_op_mb, mb_type);
119     }
120 }
121 
122 /* 32 bit ops */
123 
124 void tcg_gen_movi_i32(TCGv_i32 ret, int32_t arg)
125 {
126     tcg_gen_mov_i32(ret, tcg_constant_i32(arg));
127 }
128 
129 void tcg_gen_addi_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
130 {
131     /* some cases can be optimized here */
132     if (arg2 == 0) {
133         tcg_gen_mov_i32(ret, arg1);
134     } else {
135         tcg_gen_add_i32(ret, arg1, tcg_constant_i32(arg2));
136     }
137 }
138 
139 void tcg_gen_subfi_i32(TCGv_i32 ret, int32_t arg1, TCGv_i32 arg2)
140 {
141     if (arg1 == 0 && TCG_TARGET_HAS_neg_i32) {
142         /* Don't recurse with tcg_gen_neg_i32.  */
143         tcg_gen_op2_i32(INDEX_op_neg_i32, ret, arg2);
144     } else {
145         tcg_gen_sub_i32(ret, tcg_constant_i32(arg1), arg2);
146     }
147 }
148 
149 void tcg_gen_subi_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
150 {
151     /* some cases can be optimized here */
152     if (arg2 == 0) {
153         tcg_gen_mov_i32(ret, arg1);
154     } else {
155         tcg_gen_sub_i32(ret, arg1, tcg_constant_i32(arg2));
156     }
157 }
158 
159 void tcg_gen_andi_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
160 {
161     /* Some cases can be optimized here.  */
162     switch (arg2) {
163     case 0:
164         tcg_gen_movi_i32(ret, 0);
165         return;
166     case -1:
167         tcg_gen_mov_i32(ret, arg1);
168         return;
169     case 0xff:
170         /* Don't recurse with tcg_gen_ext8u_i32.  */
171         if (TCG_TARGET_HAS_ext8u_i32) {
172             tcg_gen_op2_i32(INDEX_op_ext8u_i32, ret, arg1);
173             return;
174         }
175         break;
176     case 0xffff:
177         if (TCG_TARGET_HAS_ext16u_i32) {
178             tcg_gen_op2_i32(INDEX_op_ext16u_i32, ret, arg1);
179             return;
180         }
181         break;
182     }
183 
184     tcg_gen_and_i32(ret, arg1, tcg_constant_i32(arg2));
185 }
186 
187 void tcg_gen_ori_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
188 {
189     /* Some cases can be optimized here.  */
190     if (arg2 == -1) {
191         tcg_gen_movi_i32(ret, -1);
192     } else if (arg2 == 0) {
193         tcg_gen_mov_i32(ret, arg1);
194     } else {
195         tcg_gen_or_i32(ret, arg1, tcg_constant_i32(arg2));
196     }
197 }
198 
199 void tcg_gen_xori_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
200 {
201     /* Some cases can be optimized here.  */
202     if (arg2 == 0) {
203         tcg_gen_mov_i32(ret, arg1);
204     } else if (arg2 == -1 && TCG_TARGET_HAS_not_i32) {
205         /* Don't recurse with tcg_gen_not_i32.  */
206         tcg_gen_op2_i32(INDEX_op_not_i32, ret, arg1);
207     } else {
208         tcg_gen_xor_i32(ret, arg1, tcg_constant_i32(arg2));
209     }
210 }
211 
212 void tcg_gen_shli_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
213 {
214     tcg_debug_assert(arg2 >= 0 && arg2 < 32);
215     if (arg2 == 0) {
216         tcg_gen_mov_i32(ret, arg1);
217     } else {
218         tcg_gen_shl_i32(ret, arg1, tcg_constant_i32(arg2));
219     }
220 }
221 
222 void tcg_gen_shri_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
223 {
224     tcg_debug_assert(arg2 >= 0 && arg2 < 32);
225     if (arg2 == 0) {
226         tcg_gen_mov_i32(ret, arg1);
227     } else {
228         tcg_gen_shr_i32(ret, arg1, tcg_constant_i32(arg2));
229     }
230 }
231 
232 void tcg_gen_sari_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
233 {
234     tcg_debug_assert(arg2 >= 0 && arg2 < 32);
235     if (arg2 == 0) {
236         tcg_gen_mov_i32(ret, arg1);
237     } else {
238         tcg_gen_sar_i32(ret, arg1, tcg_constant_i32(arg2));
239     }
240 }
241 
242 void tcg_gen_brcond_i32(TCGCond cond, TCGv_i32 arg1, TCGv_i32 arg2, TCGLabel *l)
243 {
244     if (cond == TCG_COND_ALWAYS) {
245         tcg_gen_br(l);
246     } else if (cond != TCG_COND_NEVER) {
247         tcg_gen_op4ii_i32(INDEX_op_brcond_i32, arg1, arg2, cond, label_arg(l));
248         add_last_as_label_use(l);
249     }
250 }
251 
252 void tcg_gen_brcondi_i32(TCGCond cond, TCGv_i32 arg1, int32_t arg2, TCGLabel *l)
253 {
254     if (cond == TCG_COND_ALWAYS) {
255         tcg_gen_br(l);
256     } else if (cond != TCG_COND_NEVER) {
257         tcg_gen_brcond_i32(cond, arg1, tcg_constant_i32(arg2), l);
258     }
259 }
260 
261 void tcg_gen_setcond_i32(TCGCond cond, TCGv_i32 ret,
262                          TCGv_i32 arg1, TCGv_i32 arg2)
263 {
264     if (cond == TCG_COND_ALWAYS) {
265         tcg_gen_movi_i32(ret, 1);
266     } else if (cond == TCG_COND_NEVER) {
267         tcg_gen_movi_i32(ret, 0);
268     } else {
269         tcg_gen_op4i_i32(INDEX_op_setcond_i32, ret, arg1, arg2, cond);
270     }
271 }
272 
273 void tcg_gen_setcondi_i32(TCGCond cond, TCGv_i32 ret,
274                           TCGv_i32 arg1, int32_t arg2)
275 {
276     tcg_gen_setcond_i32(cond, ret, arg1, tcg_constant_i32(arg2));
277 }
278 
279 void tcg_gen_negsetcond_i32(TCGCond cond, TCGv_i32 ret,
280                             TCGv_i32 arg1, TCGv_i32 arg2)
281 {
282     if (cond == TCG_COND_ALWAYS) {
283         tcg_gen_movi_i32(ret, -1);
284     } else if (cond == TCG_COND_NEVER) {
285         tcg_gen_movi_i32(ret, 0);
286     } else if (TCG_TARGET_HAS_negsetcond_i32) {
287         tcg_gen_op4i_i32(INDEX_op_negsetcond_i32, ret, arg1, arg2, cond);
288     } else {
289         tcg_gen_setcond_i32(cond, ret, arg1, arg2);
290         tcg_gen_neg_i32(ret, ret);
291     }
292 }
293 
294 void tcg_gen_muli_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
295 {
296     if (arg2 == 0) {
297         tcg_gen_movi_i32(ret, 0);
298     } else if (is_power_of_2(arg2)) {
299         tcg_gen_shli_i32(ret, arg1, ctz32(arg2));
300     } else {
301         tcg_gen_mul_i32(ret, arg1, tcg_constant_i32(arg2));
302     }
303 }
304 
305 void tcg_gen_div_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
306 {
307     if (TCG_TARGET_HAS_div_i32) {
308         tcg_gen_op3_i32(INDEX_op_div_i32, ret, arg1, arg2);
309     } else if (TCG_TARGET_HAS_div2_i32) {
310         TCGv_i32 t0 = tcg_temp_ebb_new_i32();
311         tcg_gen_sari_i32(t0, arg1, 31);
312         tcg_gen_op5_i32(INDEX_op_div2_i32, ret, t0, arg1, t0, arg2);
313         tcg_temp_free_i32(t0);
314     } else {
315         gen_helper_div_i32(ret, arg1, arg2);
316     }
317 }
318 
319 void tcg_gen_rem_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
320 {
321     if (TCG_TARGET_HAS_rem_i32) {
322         tcg_gen_op3_i32(INDEX_op_rem_i32, ret, arg1, arg2);
323     } else if (TCG_TARGET_HAS_div_i32) {
324         TCGv_i32 t0 = tcg_temp_ebb_new_i32();
325         tcg_gen_op3_i32(INDEX_op_div_i32, t0, arg1, arg2);
326         tcg_gen_mul_i32(t0, t0, arg2);
327         tcg_gen_sub_i32(ret, arg1, t0);
328         tcg_temp_free_i32(t0);
329     } else if (TCG_TARGET_HAS_div2_i32) {
330         TCGv_i32 t0 = tcg_temp_ebb_new_i32();
331         tcg_gen_sari_i32(t0, arg1, 31);
332         tcg_gen_op5_i32(INDEX_op_div2_i32, t0, ret, arg1, t0, arg2);
333         tcg_temp_free_i32(t0);
334     } else {
335         gen_helper_rem_i32(ret, arg1, arg2);
336     }
337 }
338 
339 void tcg_gen_divu_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
340 {
341     if (TCG_TARGET_HAS_div_i32) {
342         tcg_gen_op3_i32(INDEX_op_divu_i32, ret, arg1, arg2);
343     } else if (TCG_TARGET_HAS_div2_i32) {
344         TCGv_i32 t0 = tcg_temp_ebb_new_i32();
345         tcg_gen_movi_i32(t0, 0);
346         tcg_gen_op5_i32(INDEX_op_divu2_i32, ret, t0, arg1, t0, arg2);
347         tcg_temp_free_i32(t0);
348     } else {
349         gen_helper_divu_i32(ret, arg1, arg2);
350     }
351 }
352 
353 void tcg_gen_remu_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
354 {
355     if (TCG_TARGET_HAS_rem_i32) {
356         tcg_gen_op3_i32(INDEX_op_remu_i32, ret, arg1, arg2);
357     } else if (TCG_TARGET_HAS_div_i32) {
358         TCGv_i32 t0 = tcg_temp_ebb_new_i32();
359         tcg_gen_op3_i32(INDEX_op_divu_i32, t0, arg1, arg2);
360         tcg_gen_mul_i32(t0, t0, arg2);
361         tcg_gen_sub_i32(ret, arg1, t0);
362         tcg_temp_free_i32(t0);
363     } else if (TCG_TARGET_HAS_div2_i32) {
364         TCGv_i32 t0 = tcg_temp_ebb_new_i32();
365         tcg_gen_movi_i32(t0, 0);
366         tcg_gen_op5_i32(INDEX_op_divu2_i32, t0, ret, arg1, t0, arg2);
367         tcg_temp_free_i32(t0);
368     } else {
369         gen_helper_remu_i32(ret, arg1, arg2);
370     }
371 }
372 
373 void tcg_gen_andc_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
374 {
375     if (TCG_TARGET_HAS_andc_i32) {
376         tcg_gen_op3_i32(INDEX_op_andc_i32, ret, arg1, arg2);
377     } else {
378         TCGv_i32 t0 = tcg_temp_ebb_new_i32();
379         tcg_gen_not_i32(t0, arg2);
380         tcg_gen_and_i32(ret, arg1, t0);
381         tcg_temp_free_i32(t0);
382     }
383 }
384 
385 void tcg_gen_eqv_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
386 {
387     if (TCG_TARGET_HAS_eqv_i32) {
388         tcg_gen_op3_i32(INDEX_op_eqv_i32, ret, arg1, arg2);
389     } else {
390         tcg_gen_xor_i32(ret, arg1, arg2);
391         tcg_gen_not_i32(ret, ret);
392     }
393 }
394 
395 void tcg_gen_nand_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
396 {
397     if (TCG_TARGET_HAS_nand_i32) {
398         tcg_gen_op3_i32(INDEX_op_nand_i32, ret, arg1, arg2);
399     } else {
400         tcg_gen_and_i32(ret, arg1, arg2);
401         tcg_gen_not_i32(ret, ret);
402     }
403 }
404 
405 void tcg_gen_nor_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
406 {
407     if (TCG_TARGET_HAS_nor_i32) {
408         tcg_gen_op3_i32(INDEX_op_nor_i32, ret, arg1, arg2);
409     } else {
410         tcg_gen_or_i32(ret, arg1, arg2);
411         tcg_gen_not_i32(ret, ret);
412     }
413 }
414 
415 void tcg_gen_orc_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
416 {
417     if (TCG_TARGET_HAS_orc_i32) {
418         tcg_gen_op3_i32(INDEX_op_orc_i32, ret, arg1, arg2);
419     } else {
420         TCGv_i32 t0 = tcg_temp_ebb_new_i32();
421         tcg_gen_not_i32(t0, arg2);
422         tcg_gen_or_i32(ret, arg1, t0);
423         tcg_temp_free_i32(t0);
424     }
425 }
426 
427 void tcg_gen_clz_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
428 {
429     if (TCG_TARGET_HAS_clz_i32) {
430         tcg_gen_op3_i32(INDEX_op_clz_i32, ret, arg1, arg2);
431     } else if (TCG_TARGET_HAS_clz_i64) {
432         TCGv_i64 t1 = tcg_temp_ebb_new_i64();
433         TCGv_i64 t2 = tcg_temp_ebb_new_i64();
434         tcg_gen_extu_i32_i64(t1, arg1);
435         tcg_gen_extu_i32_i64(t2, arg2);
436         tcg_gen_addi_i64(t2, t2, 32);
437         tcg_gen_clz_i64(t1, t1, t2);
438         tcg_gen_extrl_i64_i32(ret, t1);
439         tcg_temp_free_i64(t1);
440         tcg_temp_free_i64(t2);
441         tcg_gen_subi_i32(ret, ret, 32);
442     } else {
443         gen_helper_clz_i32(ret, arg1, arg2);
444     }
445 }
446 
447 void tcg_gen_clzi_i32(TCGv_i32 ret, TCGv_i32 arg1, uint32_t arg2)
448 {
449     tcg_gen_clz_i32(ret, arg1, tcg_constant_i32(arg2));
450 }
451 
452 void tcg_gen_ctz_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
453 {
454     if (TCG_TARGET_HAS_ctz_i32) {
455         tcg_gen_op3_i32(INDEX_op_ctz_i32, ret, arg1, arg2);
456     } else if (TCG_TARGET_HAS_ctz_i64) {
457         TCGv_i64 t1 = tcg_temp_ebb_new_i64();
458         TCGv_i64 t2 = tcg_temp_ebb_new_i64();
459         tcg_gen_extu_i32_i64(t1, arg1);
460         tcg_gen_extu_i32_i64(t2, arg2);
461         tcg_gen_ctz_i64(t1, t1, t2);
462         tcg_gen_extrl_i64_i32(ret, t1);
463         tcg_temp_free_i64(t1);
464         tcg_temp_free_i64(t2);
465     } else if (TCG_TARGET_HAS_ctpop_i32
466                || TCG_TARGET_HAS_ctpop_i64
467                || TCG_TARGET_HAS_clz_i32
468                || TCG_TARGET_HAS_clz_i64) {
469         TCGv_i32 z, t = tcg_temp_ebb_new_i32();
470 
471         if (TCG_TARGET_HAS_ctpop_i32 || TCG_TARGET_HAS_ctpop_i64) {
472             tcg_gen_subi_i32(t, arg1, 1);
473             tcg_gen_andc_i32(t, t, arg1);
474             tcg_gen_ctpop_i32(t, t);
475         } else {
476             /* Since all non-x86 hosts have clz(0) == 32, don't fight it.  */
477             tcg_gen_neg_i32(t, arg1);
478             tcg_gen_and_i32(t, t, arg1);
479             tcg_gen_clzi_i32(t, t, 32);
480             tcg_gen_xori_i32(t, t, 31);
481         }
482         z = tcg_constant_i32(0);
483         tcg_gen_movcond_i32(TCG_COND_EQ, ret, arg1, z, arg2, t);
484         tcg_temp_free_i32(t);
485     } else {
486         gen_helper_ctz_i32(ret, arg1, arg2);
487     }
488 }
489 
490 void tcg_gen_ctzi_i32(TCGv_i32 ret, TCGv_i32 arg1, uint32_t arg2)
491 {
492     if (!TCG_TARGET_HAS_ctz_i32 && TCG_TARGET_HAS_ctpop_i32 && arg2 == 32) {
493         /* This equivalence has the advantage of not requiring a fixup.  */
494         TCGv_i32 t = tcg_temp_ebb_new_i32();
495         tcg_gen_subi_i32(t, arg1, 1);
496         tcg_gen_andc_i32(t, t, arg1);
497         tcg_gen_ctpop_i32(ret, t);
498         tcg_temp_free_i32(t);
499     } else {
500         tcg_gen_ctz_i32(ret, arg1, tcg_constant_i32(arg2));
501     }
502 }
503 
504 void tcg_gen_clrsb_i32(TCGv_i32 ret, TCGv_i32 arg)
505 {
506     if (TCG_TARGET_HAS_clz_i32) {
507         TCGv_i32 t = tcg_temp_ebb_new_i32();
508         tcg_gen_sari_i32(t, arg, 31);
509         tcg_gen_xor_i32(t, t, arg);
510         tcg_gen_clzi_i32(t, t, 32);
511         tcg_gen_subi_i32(ret, t, 1);
512         tcg_temp_free_i32(t);
513     } else {
514         gen_helper_clrsb_i32(ret, arg);
515     }
516 }
517 
518 void tcg_gen_ctpop_i32(TCGv_i32 ret, TCGv_i32 arg1)
519 {
520     if (TCG_TARGET_HAS_ctpop_i32) {
521         tcg_gen_op2_i32(INDEX_op_ctpop_i32, ret, arg1);
522     } else if (TCG_TARGET_HAS_ctpop_i64) {
523         TCGv_i64 t = tcg_temp_ebb_new_i64();
524         tcg_gen_extu_i32_i64(t, arg1);
525         tcg_gen_ctpop_i64(t, t);
526         tcg_gen_extrl_i64_i32(ret, t);
527         tcg_temp_free_i64(t);
528     } else {
529         gen_helper_ctpop_i32(ret, arg1);
530     }
531 }
532 
533 void tcg_gen_rotl_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
534 {
535     if (TCG_TARGET_HAS_rot_i32) {
536         tcg_gen_op3_i32(INDEX_op_rotl_i32, ret, arg1, arg2);
537     } else {
538         TCGv_i32 t0, t1;
539 
540         t0 = tcg_temp_ebb_new_i32();
541         t1 = tcg_temp_ebb_new_i32();
542         tcg_gen_shl_i32(t0, arg1, arg2);
543         tcg_gen_subfi_i32(t1, 32, arg2);
544         tcg_gen_shr_i32(t1, arg1, t1);
545         tcg_gen_or_i32(ret, t0, t1);
546         tcg_temp_free_i32(t0);
547         tcg_temp_free_i32(t1);
548     }
549 }
550 
551 void tcg_gen_rotli_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
552 {
553     tcg_debug_assert(arg2 >= 0 && arg2 < 32);
554     /* some cases can be optimized here */
555     if (arg2 == 0) {
556         tcg_gen_mov_i32(ret, arg1);
557     } else if (TCG_TARGET_HAS_rot_i32) {
558         tcg_gen_rotl_i32(ret, arg1, tcg_constant_i32(arg2));
559     } else {
560         TCGv_i32 t0, t1;
561         t0 = tcg_temp_ebb_new_i32();
562         t1 = tcg_temp_ebb_new_i32();
563         tcg_gen_shli_i32(t0, arg1, arg2);
564         tcg_gen_shri_i32(t1, arg1, 32 - arg2);
565         tcg_gen_or_i32(ret, t0, t1);
566         tcg_temp_free_i32(t0);
567         tcg_temp_free_i32(t1);
568     }
569 }
570 
571 void tcg_gen_rotr_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
572 {
573     if (TCG_TARGET_HAS_rot_i32) {
574         tcg_gen_op3_i32(INDEX_op_rotr_i32, ret, arg1, arg2);
575     } else {
576         TCGv_i32 t0, t1;
577 
578         t0 = tcg_temp_ebb_new_i32();
579         t1 = tcg_temp_ebb_new_i32();
580         tcg_gen_shr_i32(t0, arg1, arg2);
581         tcg_gen_subfi_i32(t1, 32, arg2);
582         tcg_gen_shl_i32(t1, arg1, t1);
583         tcg_gen_or_i32(ret, t0, t1);
584         tcg_temp_free_i32(t0);
585         tcg_temp_free_i32(t1);
586     }
587 }
588 
589 void tcg_gen_rotri_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
590 {
591     tcg_debug_assert(arg2 >= 0 && arg2 < 32);
592     /* some cases can be optimized here */
593     if (arg2 == 0) {
594         tcg_gen_mov_i32(ret, arg1);
595     } else {
596         tcg_gen_rotli_i32(ret, arg1, 32 - arg2);
597     }
598 }
599 
600 void tcg_gen_deposit_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2,
601                          unsigned int ofs, unsigned int len)
602 {
603     uint32_t mask;
604     TCGv_i32 t1;
605 
606     tcg_debug_assert(ofs < 32);
607     tcg_debug_assert(len > 0);
608     tcg_debug_assert(len <= 32);
609     tcg_debug_assert(ofs + len <= 32);
610 
611     if (len == 32) {
612         tcg_gen_mov_i32(ret, arg2);
613         return;
614     }
615     if (TCG_TARGET_HAS_deposit_i32 && TCG_TARGET_deposit_i32_valid(ofs, len)) {
616         tcg_gen_op5ii_i32(INDEX_op_deposit_i32, ret, arg1, arg2, ofs, len);
617         return;
618     }
619 
620     t1 = tcg_temp_ebb_new_i32();
621 
622     if (TCG_TARGET_HAS_extract2_i32) {
623         if (ofs + len == 32) {
624             tcg_gen_shli_i32(t1, arg1, len);
625             tcg_gen_extract2_i32(ret, t1, arg2, len);
626             goto done;
627         }
628         if (ofs == 0) {
629             tcg_gen_extract2_i32(ret, arg1, arg2, len);
630             tcg_gen_rotli_i32(ret, ret, len);
631             goto done;
632         }
633     }
634 
635     mask = (1u << len) - 1;
636     if (ofs + len < 32) {
637         tcg_gen_andi_i32(t1, arg2, mask);
638         tcg_gen_shli_i32(t1, t1, ofs);
639     } else {
640         tcg_gen_shli_i32(t1, arg2, ofs);
641     }
642     tcg_gen_andi_i32(ret, arg1, ~(mask << ofs));
643     tcg_gen_or_i32(ret, ret, t1);
644  done:
645     tcg_temp_free_i32(t1);
646 }
647 
648 void tcg_gen_deposit_z_i32(TCGv_i32 ret, TCGv_i32 arg,
649                            unsigned int ofs, unsigned int len)
650 {
651     tcg_debug_assert(ofs < 32);
652     tcg_debug_assert(len > 0);
653     tcg_debug_assert(len <= 32);
654     tcg_debug_assert(ofs + len <= 32);
655 
656     if (ofs + len == 32) {
657         tcg_gen_shli_i32(ret, arg, ofs);
658     } else if (ofs == 0) {
659         tcg_gen_andi_i32(ret, arg, (1u << len) - 1);
660     } else if (TCG_TARGET_HAS_deposit_i32
661                && TCG_TARGET_deposit_i32_valid(ofs, len)) {
662         TCGv_i32 zero = tcg_constant_i32(0);
663         tcg_gen_op5ii_i32(INDEX_op_deposit_i32, ret, zero, arg, ofs, len);
664     } else {
665         /* To help two-operand hosts we prefer to zero-extend first,
666            which allows ARG to stay live.  */
667         switch (len) {
668         case 16:
669             if (TCG_TARGET_HAS_ext16u_i32) {
670                 tcg_gen_ext16u_i32(ret, arg);
671                 tcg_gen_shli_i32(ret, ret, ofs);
672                 return;
673             }
674             break;
675         case 8:
676             if (TCG_TARGET_HAS_ext8u_i32) {
677                 tcg_gen_ext8u_i32(ret, arg);
678                 tcg_gen_shli_i32(ret, ret, ofs);
679                 return;
680             }
681             break;
682         }
683         /* Otherwise prefer zero-extension over AND for code size.  */
684         switch (ofs + len) {
685         case 16:
686             if (TCG_TARGET_HAS_ext16u_i32) {
687                 tcg_gen_shli_i32(ret, arg, ofs);
688                 tcg_gen_ext16u_i32(ret, ret);
689                 return;
690             }
691             break;
692         case 8:
693             if (TCG_TARGET_HAS_ext8u_i32) {
694                 tcg_gen_shli_i32(ret, arg, ofs);
695                 tcg_gen_ext8u_i32(ret, ret);
696                 return;
697             }
698             break;
699         }
700         tcg_gen_andi_i32(ret, arg, (1u << len) - 1);
701         tcg_gen_shli_i32(ret, ret, ofs);
702     }
703 }
704 
705 void tcg_gen_extract_i32(TCGv_i32 ret, TCGv_i32 arg,
706                          unsigned int ofs, unsigned int len)
707 {
708     tcg_debug_assert(ofs < 32);
709     tcg_debug_assert(len > 0);
710     tcg_debug_assert(len <= 32);
711     tcg_debug_assert(ofs + len <= 32);
712 
713     /* Canonicalize certain special cases, even if extract is supported.  */
714     if (ofs + len == 32) {
715         tcg_gen_shri_i32(ret, arg, 32 - len);
716         return;
717     }
718     if (ofs == 0) {
719         tcg_gen_andi_i32(ret, arg, (1u << len) - 1);
720         return;
721     }
722 
723     if (TCG_TARGET_HAS_extract_i32
724         && TCG_TARGET_extract_i32_valid(ofs, len)) {
725         tcg_gen_op4ii_i32(INDEX_op_extract_i32, ret, arg, ofs, len);
726         return;
727     }
728 
729     /* Assume that zero-extension, if available, is cheaper than a shift.  */
730     switch (ofs + len) {
731     case 16:
732         if (TCG_TARGET_HAS_ext16u_i32) {
733             tcg_gen_ext16u_i32(ret, arg);
734             tcg_gen_shri_i32(ret, ret, ofs);
735             return;
736         }
737         break;
738     case 8:
739         if (TCG_TARGET_HAS_ext8u_i32) {
740             tcg_gen_ext8u_i32(ret, arg);
741             tcg_gen_shri_i32(ret, ret, ofs);
742             return;
743         }
744         break;
745     }
746 
747     /* ??? Ideally we'd know what values are available for immediate AND.
748        Assume that 8 bits are available, plus the special case of 16,
749        so that we get ext8u, ext16u.  */
750     switch (len) {
751     case 1 ... 8: case 16:
752         tcg_gen_shri_i32(ret, arg, ofs);
753         tcg_gen_andi_i32(ret, ret, (1u << len) - 1);
754         break;
755     default:
756         tcg_gen_shli_i32(ret, arg, 32 - len - ofs);
757         tcg_gen_shri_i32(ret, ret, 32 - len);
758         break;
759     }
760 }
761 
762 void tcg_gen_sextract_i32(TCGv_i32 ret, TCGv_i32 arg,
763                           unsigned int ofs, unsigned int len)
764 {
765     tcg_debug_assert(ofs < 32);
766     tcg_debug_assert(len > 0);
767     tcg_debug_assert(len <= 32);
768     tcg_debug_assert(ofs + len <= 32);
769 
770     /* Canonicalize certain special cases, even if extract is supported.  */
771     if (ofs + len == 32) {
772         tcg_gen_sari_i32(ret, arg, 32 - len);
773         return;
774     }
775     if (ofs == 0) {
776         switch (len) {
777         case 16:
778             tcg_gen_ext16s_i32(ret, arg);
779             return;
780         case 8:
781             tcg_gen_ext8s_i32(ret, arg);
782             return;
783         }
784     }
785 
786     if (TCG_TARGET_HAS_sextract_i32
787         && TCG_TARGET_extract_i32_valid(ofs, len)) {
788         tcg_gen_op4ii_i32(INDEX_op_sextract_i32, ret, arg, ofs, len);
789         return;
790     }
791 
792     /* Assume that sign-extension, if available, is cheaper than a shift.  */
793     switch (ofs + len) {
794     case 16:
795         if (TCG_TARGET_HAS_ext16s_i32) {
796             tcg_gen_ext16s_i32(ret, arg);
797             tcg_gen_sari_i32(ret, ret, ofs);
798             return;
799         }
800         break;
801     case 8:
802         if (TCG_TARGET_HAS_ext8s_i32) {
803             tcg_gen_ext8s_i32(ret, arg);
804             tcg_gen_sari_i32(ret, ret, ofs);
805             return;
806         }
807         break;
808     }
809     switch (len) {
810     case 16:
811         if (TCG_TARGET_HAS_ext16s_i32) {
812             tcg_gen_shri_i32(ret, arg, ofs);
813             tcg_gen_ext16s_i32(ret, ret);
814             return;
815         }
816         break;
817     case 8:
818         if (TCG_TARGET_HAS_ext8s_i32) {
819             tcg_gen_shri_i32(ret, arg, ofs);
820             tcg_gen_ext8s_i32(ret, ret);
821             return;
822         }
823         break;
824     }
825 
826     tcg_gen_shli_i32(ret, arg, 32 - len - ofs);
827     tcg_gen_sari_i32(ret, ret, 32 - len);
828 }
829 
830 /*
831  * Extract 32-bits from a 64-bit input, ah:al, starting from ofs.
832  * Unlike tcg_gen_extract_i32 above, len is fixed at 32.
833  */
834 void tcg_gen_extract2_i32(TCGv_i32 ret, TCGv_i32 al, TCGv_i32 ah,
835                           unsigned int ofs)
836 {
837     tcg_debug_assert(ofs <= 32);
838     if (ofs == 0) {
839         tcg_gen_mov_i32(ret, al);
840     } else if (ofs == 32) {
841         tcg_gen_mov_i32(ret, ah);
842     } else if (al == ah) {
843         tcg_gen_rotri_i32(ret, al, ofs);
844     } else if (TCG_TARGET_HAS_extract2_i32) {
845         tcg_gen_op4i_i32(INDEX_op_extract2_i32, ret, al, ah, ofs);
846     } else {
847         TCGv_i32 t0 = tcg_temp_ebb_new_i32();
848         tcg_gen_shri_i32(t0, al, ofs);
849         tcg_gen_deposit_i32(ret, t0, ah, 32 - ofs, ofs);
850         tcg_temp_free_i32(t0);
851     }
852 }
853 
854 void tcg_gen_movcond_i32(TCGCond cond, TCGv_i32 ret, TCGv_i32 c1,
855                          TCGv_i32 c2, TCGv_i32 v1, TCGv_i32 v2)
856 {
857     if (cond == TCG_COND_ALWAYS) {
858         tcg_gen_mov_i32(ret, v1);
859     } else if (cond == TCG_COND_NEVER) {
860         tcg_gen_mov_i32(ret, v2);
861     } else if (TCG_TARGET_HAS_movcond_i32) {
862         tcg_gen_op6i_i32(INDEX_op_movcond_i32, ret, c1, c2, v1, v2, cond);
863     } else {
864         TCGv_i32 t0 = tcg_temp_ebb_new_i32();
865         TCGv_i32 t1 = tcg_temp_ebb_new_i32();
866         tcg_gen_setcond_i32(cond, t0, c1, c2);
867         tcg_gen_neg_i32(t0, t0);
868         tcg_gen_and_i32(t1, v1, t0);
869         tcg_gen_andc_i32(ret, v2, t0);
870         tcg_gen_or_i32(ret, ret, t1);
871         tcg_temp_free_i32(t0);
872         tcg_temp_free_i32(t1);
873     }
874 }
875 
876 void tcg_gen_add2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 al,
877                       TCGv_i32 ah, TCGv_i32 bl, TCGv_i32 bh)
878 {
879     if (TCG_TARGET_HAS_add2_i32) {
880         tcg_gen_op6_i32(INDEX_op_add2_i32, rl, rh, al, ah, bl, bh);
881     } else {
882         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
883         TCGv_i64 t1 = tcg_temp_ebb_new_i64();
884         tcg_gen_concat_i32_i64(t0, al, ah);
885         tcg_gen_concat_i32_i64(t1, bl, bh);
886         tcg_gen_add_i64(t0, t0, t1);
887         tcg_gen_extr_i64_i32(rl, rh, t0);
888         tcg_temp_free_i64(t0);
889         tcg_temp_free_i64(t1);
890     }
891 }
892 
893 void tcg_gen_sub2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 al,
894                       TCGv_i32 ah, TCGv_i32 bl, TCGv_i32 bh)
895 {
896     if (TCG_TARGET_HAS_sub2_i32) {
897         tcg_gen_op6_i32(INDEX_op_sub2_i32, rl, rh, al, ah, bl, bh);
898     } else {
899         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
900         TCGv_i64 t1 = tcg_temp_ebb_new_i64();
901         tcg_gen_concat_i32_i64(t0, al, ah);
902         tcg_gen_concat_i32_i64(t1, bl, bh);
903         tcg_gen_sub_i64(t0, t0, t1);
904         tcg_gen_extr_i64_i32(rl, rh, t0);
905         tcg_temp_free_i64(t0);
906         tcg_temp_free_i64(t1);
907     }
908 }
909 
910 void tcg_gen_mulu2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 arg1, TCGv_i32 arg2)
911 {
912     if (TCG_TARGET_HAS_mulu2_i32) {
913         tcg_gen_op4_i32(INDEX_op_mulu2_i32, rl, rh, arg1, arg2);
914     } else if (TCG_TARGET_HAS_muluh_i32) {
915         TCGv_i32 t = tcg_temp_ebb_new_i32();
916         tcg_gen_op3_i32(INDEX_op_mul_i32, t, arg1, arg2);
917         tcg_gen_op3_i32(INDEX_op_muluh_i32, rh, arg1, arg2);
918         tcg_gen_mov_i32(rl, t);
919         tcg_temp_free_i32(t);
920     } else if (TCG_TARGET_REG_BITS == 64) {
921         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
922         TCGv_i64 t1 = tcg_temp_ebb_new_i64();
923         tcg_gen_extu_i32_i64(t0, arg1);
924         tcg_gen_extu_i32_i64(t1, arg2);
925         tcg_gen_mul_i64(t0, t0, t1);
926         tcg_gen_extr_i64_i32(rl, rh, t0);
927         tcg_temp_free_i64(t0);
928         tcg_temp_free_i64(t1);
929     } else {
930         qemu_build_not_reached();
931     }
932 }
933 
934 void tcg_gen_muls2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 arg1, TCGv_i32 arg2)
935 {
936     if (TCG_TARGET_HAS_muls2_i32) {
937         tcg_gen_op4_i32(INDEX_op_muls2_i32, rl, rh, arg1, arg2);
938     } else if (TCG_TARGET_HAS_mulsh_i32) {
939         TCGv_i32 t = tcg_temp_ebb_new_i32();
940         tcg_gen_op3_i32(INDEX_op_mul_i32, t, arg1, arg2);
941         tcg_gen_op3_i32(INDEX_op_mulsh_i32, rh, arg1, arg2);
942         tcg_gen_mov_i32(rl, t);
943         tcg_temp_free_i32(t);
944     } else if (TCG_TARGET_REG_BITS == 32) {
945         TCGv_i32 t0 = tcg_temp_ebb_new_i32();
946         TCGv_i32 t1 = tcg_temp_ebb_new_i32();
947         TCGv_i32 t2 = tcg_temp_ebb_new_i32();
948         TCGv_i32 t3 = tcg_temp_ebb_new_i32();
949         tcg_gen_mulu2_i32(t0, t1, arg1, arg2);
950         /* Adjust for negative inputs.  */
951         tcg_gen_sari_i32(t2, arg1, 31);
952         tcg_gen_sari_i32(t3, arg2, 31);
953         tcg_gen_and_i32(t2, t2, arg2);
954         tcg_gen_and_i32(t3, t3, arg1);
955         tcg_gen_sub_i32(rh, t1, t2);
956         tcg_gen_sub_i32(rh, rh, t3);
957         tcg_gen_mov_i32(rl, t0);
958         tcg_temp_free_i32(t0);
959         tcg_temp_free_i32(t1);
960         tcg_temp_free_i32(t2);
961         tcg_temp_free_i32(t3);
962     } else {
963         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
964         TCGv_i64 t1 = tcg_temp_ebb_new_i64();
965         tcg_gen_ext_i32_i64(t0, arg1);
966         tcg_gen_ext_i32_i64(t1, arg2);
967         tcg_gen_mul_i64(t0, t0, t1);
968         tcg_gen_extr_i64_i32(rl, rh, t0);
969         tcg_temp_free_i64(t0);
970         tcg_temp_free_i64(t1);
971     }
972 }
973 
974 void tcg_gen_mulsu2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 arg1, TCGv_i32 arg2)
975 {
976     if (TCG_TARGET_REG_BITS == 32) {
977         TCGv_i32 t0 = tcg_temp_ebb_new_i32();
978         TCGv_i32 t1 = tcg_temp_ebb_new_i32();
979         TCGv_i32 t2 = tcg_temp_ebb_new_i32();
980         tcg_gen_mulu2_i32(t0, t1, arg1, arg2);
981         /* Adjust for negative input for the signed arg1.  */
982         tcg_gen_sari_i32(t2, arg1, 31);
983         tcg_gen_and_i32(t2, t2, arg2);
984         tcg_gen_sub_i32(rh, t1, t2);
985         tcg_gen_mov_i32(rl, t0);
986         tcg_temp_free_i32(t0);
987         tcg_temp_free_i32(t1);
988         tcg_temp_free_i32(t2);
989     } else {
990         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
991         TCGv_i64 t1 = tcg_temp_ebb_new_i64();
992         tcg_gen_ext_i32_i64(t0, arg1);
993         tcg_gen_extu_i32_i64(t1, arg2);
994         tcg_gen_mul_i64(t0, t0, t1);
995         tcg_gen_extr_i64_i32(rl, rh, t0);
996         tcg_temp_free_i64(t0);
997         tcg_temp_free_i64(t1);
998     }
999 }
1000 
1001 void tcg_gen_ext8s_i32(TCGv_i32 ret, TCGv_i32 arg)
1002 {
1003     if (TCG_TARGET_HAS_ext8s_i32) {
1004         tcg_gen_op2_i32(INDEX_op_ext8s_i32, ret, arg);
1005     } else {
1006         tcg_gen_shli_i32(ret, arg, 24);
1007         tcg_gen_sari_i32(ret, ret, 24);
1008     }
1009 }
1010 
1011 void tcg_gen_ext16s_i32(TCGv_i32 ret, TCGv_i32 arg)
1012 {
1013     if (TCG_TARGET_HAS_ext16s_i32) {
1014         tcg_gen_op2_i32(INDEX_op_ext16s_i32, ret, arg);
1015     } else {
1016         tcg_gen_shli_i32(ret, arg, 16);
1017         tcg_gen_sari_i32(ret, ret, 16);
1018     }
1019 }
1020 
1021 void tcg_gen_ext8u_i32(TCGv_i32 ret, TCGv_i32 arg)
1022 {
1023     if (TCG_TARGET_HAS_ext8u_i32) {
1024         tcg_gen_op2_i32(INDEX_op_ext8u_i32, ret, arg);
1025     } else {
1026         tcg_gen_andi_i32(ret, arg, 0xffu);
1027     }
1028 }
1029 
1030 void tcg_gen_ext16u_i32(TCGv_i32 ret, TCGv_i32 arg)
1031 {
1032     if (TCG_TARGET_HAS_ext16u_i32) {
1033         tcg_gen_op2_i32(INDEX_op_ext16u_i32, ret, arg);
1034     } else {
1035         tcg_gen_andi_i32(ret, arg, 0xffffu);
1036     }
1037 }
1038 
1039 void tcg_gen_bswap16_i32(TCGv_i32 ret, TCGv_i32 arg, int flags)
1040 {
1041     /* Only one extension flag may be present. */
1042     tcg_debug_assert(!(flags & TCG_BSWAP_OS) || !(flags & TCG_BSWAP_OZ));
1043 
1044     if (TCG_TARGET_HAS_bswap16_i32) {
1045         tcg_gen_op3i_i32(INDEX_op_bswap16_i32, ret, arg, flags);
1046     } else {
1047         TCGv_i32 t0 = tcg_temp_ebb_new_i32();
1048         TCGv_i32 t1 = tcg_temp_ebb_new_i32();
1049 
1050         tcg_gen_shri_i32(t0, arg, 8);
1051         if (!(flags & TCG_BSWAP_IZ)) {
1052             tcg_gen_ext8u_i32(t0, t0);
1053         }
1054 
1055         if (flags & TCG_BSWAP_OS) {
1056             tcg_gen_shli_i32(t1, arg, 24);
1057             tcg_gen_sari_i32(t1, t1, 16);
1058         } else if (flags & TCG_BSWAP_OZ) {
1059             tcg_gen_ext8u_i32(t1, arg);
1060             tcg_gen_shli_i32(t1, t1, 8);
1061         } else {
1062             tcg_gen_shli_i32(t1, arg, 8);
1063         }
1064 
1065         tcg_gen_or_i32(ret, t0, t1);
1066         tcg_temp_free_i32(t0);
1067         tcg_temp_free_i32(t1);
1068     }
1069 }
1070 
1071 void tcg_gen_bswap32_i32(TCGv_i32 ret, TCGv_i32 arg)
1072 {
1073     if (TCG_TARGET_HAS_bswap32_i32) {
1074         tcg_gen_op3i_i32(INDEX_op_bswap32_i32, ret, arg, 0);
1075     } else {
1076         TCGv_i32 t0 = tcg_temp_ebb_new_i32();
1077         TCGv_i32 t1 = tcg_temp_ebb_new_i32();
1078         TCGv_i32 t2 = tcg_constant_i32(0x00ff00ff);
1079 
1080                                         /* arg = abcd */
1081         tcg_gen_shri_i32(t0, arg, 8);   /*  t0 = .abc */
1082         tcg_gen_and_i32(t1, arg, t2);   /*  t1 = .b.d */
1083         tcg_gen_and_i32(t0, t0, t2);    /*  t0 = .a.c */
1084         tcg_gen_shli_i32(t1, t1, 8);    /*  t1 = b.d. */
1085         tcg_gen_or_i32(ret, t0, t1);    /* ret = badc */
1086 
1087         tcg_gen_shri_i32(t0, ret, 16);  /*  t0 = ..ba */
1088         tcg_gen_shli_i32(t1, ret, 16);  /*  t1 = dc.. */
1089         tcg_gen_or_i32(ret, t0, t1);    /* ret = dcba */
1090 
1091         tcg_temp_free_i32(t0);
1092         tcg_temp_free_i32(t1);
1093     }
1094 }
1095 
1096 void tcg_gen_hswap_i32(TCGv_i32 ret, TCGv_i32 arg)
1097 {
1098     /* Swapping 2 16-bit elements is a rotate. */
1099     tcg_gen_rotli_i32(ret, arg, 16);
1100 }
1101 
1102 void tcg_gen_smin_i32(TCGv_i32 ret, TCGv_i32 a, TCGv_i32 b)
1103 {
1104     tcg_gen_movcond_i32(TCG_COND_LT, ret, a, b, a, b);
1105 }
1106 
1107 void tcg_gen_umin_i32(TCGv_i32 ret, TCGv_i32 a, TCGv_i32 b)
1108 {
1109     tcg_gen_movcond_i32(TCG_COND_LTU, ret, a, b, a, b);
1110 }
1111 
1112 void tcg_gen_smax_i32(TCGv_i32 ret, TCGv_i32 a, TCGv_i32 b)
1113 {
1114     tcg_gen_movcond_i32(TCG_COND_LT, ret, a, b, b, a);
1115 }
1116 
1117 void tcg_gen_umax_i32(TCGv_i32 ret, TCGv_i32 a, TCGv_i32 b)
1118 {
1119     tcg_gen_movcond_i32(TCG_COND_LTU, ret, a, b, b, a);
1120 }
1121 
1122 void tcg_gen_abs_i32(TCGv_i32 ret, TCGv_i32 a)
1123 {
1124     TCGv_i32 t = tcg_temp_ebb_new_i32();
1125 
1126     tcg_gen_sari_i32(t, a, 31);
1127     tcg_gen_xor_i32(ret, a, t);
1128     tcg_gen_sub_i32(ret, ret, t);
1129     tcg_temp_free_i32(t);
1130 }
1131 
1132 /* 64-bit ops */
1133 
1134 #if TCG_TARGET_REG_BITS == 32
1135 /* These are all inline for TCG_TARGET_REG_BITS == 64.  */
1136 
1137 void tcg_gen_discard_i64(TCGv_i64 arg)
1138 {
1139     tcg_gen_discard_i32(TCGV_LOW(arg));
1140     tcg_gen_discard_i32(TCGV_HIGH(arg));
1141 }
1142 
1143 void tcg_gen_mov_i64(TCGv_i64 ret, TCGv_i64 arg)
1144 {
1145     TCGTemp *ts = tcgv_i64_temp(arg);
1146 
1147     /* Canonicalize TCGv_i64 TEMP_CONST into TCGv_i32 TEMP_CONST. */
1148     if (ts->kind == TEMP_CONST) {
1149         tcg_gen_movi_i64(ret, ts->val);
1150     } else {
1151         tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1152         tcg_gen_mov_i32(TCGV_HIGH(ret), TCGV_HIGH(arg));
1153     }
1154 }
1155 
1156 void tcg_gen_movi_i64(TCGv_i64 ret, int64_t arg)
1157 {
1158     tcg_gen_movi_i32(TCGV_LOW(ret), arg);
1159     tcg_gen_movi_i32(TCGV_HIGH(ret), arg >> 32);
1160 }
1161 
1162 void tcg_gen_ld8u_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
1163 {
1164     tcg_gen_ld8u_i32(TCGV_LOW(ret), arg2, offset);
1165     tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1166 }
1167 
1168 void tcg_gen_ld8s_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
1169 {
1170     tcg_gen_ld8s_i32(TCGV_LOW(ret), arg2, offset);
1171     tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1172 }
1173 
1174 void tcg_gen_ld16u_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
1175 {
1176     tcg_gen_ld16u_i32(TCGV_LOW(ret), arg2, offset);
1177     tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1178 }
1179 
1180 void tcg_gen_ld16s_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
1181 {
1182     tcg_gen_ld16s_i32(TCGV_LOW(ret), arg2, offset);
1183     tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1184 }
1185 
1186 void tcg_gen_ld32u_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
1187 {
1188     tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset);
1189     tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1190 }
1191 
1192 void tcg_gen_ld32s_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
1193 {
1194     tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset);
1195     tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1196 }
1197 
1198 void tcg_gen_ld_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
1199 {
1200     /* Since arg2 and ret have different types,
1201        they cannot be the same temporary */
1202 #if HOST_BIG_ENDIAN
1203     tcg_gen_ld_i32(TCGV_HIGH(ret), arg2, offset);
1204     tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset + 4);
1205 #else
1206     tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset);
1207     tcg_gen_ld_i32(TCGV_HIGH(ret), arg2, offset + 4);
1208 #endif
1209 }
1210 
1211 void tcg_gen_st8_i64(TCGv_i64 arg1, TCGv_ptr arg2, tcg_target_long offset)
1212 {
1213     tcg_gen_st8_i32(TCGV_LOW(arg1), arg2, offset);
1214 }
1215 
1216 void tcg_gen_st16_i64(TCGv_i64 arg1, TCGv_ptr arg2, tcg_target_long offset)
1217 {
1218     tcg_gen_st16_i32(TCGV_LOW(arg1), arg2, offset);
1219 }
1220 
1221 void tcg_gen_st32_i64(TCGv_i64 arg1, TCGv_ptr arg2, tcg_target_long offset)
1222 {
1223     tcg_gen_st_i32(TCGV_LOW(arg1), arg2, offset);
1224 }
1225 
1226 void tcg_gen_st_i64(TCGv_i64 arg1, TCGv_ptr arg2, tcg_target_long offset)
1227 {
1228 #if HOST_BIG_ENDIAN
1229     tcg_gen_st_i32(TCGV_HIGH(arg1), arg2, offset);
1230     tcg_gen_st_i32(TCGV_LOW(arg1), arg2, offset + 4);
1231 #else
1232     tcg_gen_st_i32(TCGV_LOW(arg1), arg2, offset);
1233     tcg_gen_st_i32(TCGV_HIGH(arg1), arg2, offset + 4);
1234 #endif
1235 }
1236 
1237 void tcg_gen_add_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1238 {
1239     tcg_gen_add2_i32(TCGV_LOW(ret), TCGV_HIGH(ret), TCGV_LOW(arg1),
1240                      TCGV_HIGH(arg1), TCGV_LOW(arg2), TCGV_HIGH(arg2));
1241 }
1242 
1243 void tcg_gen_sub_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1244 {
1245     tcg_gen_sub2_i32(TCGV_LOW(ret), TCGV_HIGH(ret), TCGV_LOW(arg1),
1246                      TCGV_HIGH(arg1), TCGV_LOW(arg2), TCGV_HIGH(arg2));
1247 }
1248 
1249 void tcg_gen_and_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1250 {
1251     tcg_gen_and_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1252     tcg_gen_and_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1253 }
1254 
1255 void tcg_gen_or_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1256 {
1257     tcg_gen_or_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1258     tcg_gen_or_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1259 }
1260 
1261 void tcg_gen_xor_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1262 {
1263     tcg_gen_xor_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1264     tcg_gen_xor_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1265 }
1266 
1267 void tcg_gen_shl_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1268 {
1269     gen_helper_shl_i64(ret, arg1, arg2);
1270 }
1271 
1272 void tcg_gen_shr_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1273 {
1274     gen_helper_shr_i64(ret, arg1, arg2);
1275 }
1276 
1277 void tcg_gen_sar_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1278 {
1279     gen_helper_sar_i64(ret, arg1, arg2);
1280 }
1281 
1282 void tcg_gen_mul_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1283 {
1284     TCGv_i64 t0;
1285     TCGv_i32 t1;
1286 
1287     t0 = tcg_temp_ebb_new_i64();
1288     t1 = tcg_temp_ebb_new_i32();
1289 
1290     tcg_gen_mulu2_i32(TCGV_LOW(t0), TCGV_HIGH(t0),
1291                       TCGV_LOW(arg1), TCGV_LOW(arg2));
1292 
1293     tcg_gen_mul_i32(t1, TCGV_LOW(arg1), TCGV_HIGH(arg2));
1294     tcg_gen_add_i32(TCGV_HIGH(t0), TCGV_HIGH(t0), t1);
1295     tcg_gen_mul_i32(t1, TCGV_HIGH(arg1), TCGV_LOW(arg2));
1296     tcg_gen_add_i32(TCGV_HIGH(t0), TCGV_HIGH(t0), t1);
1297 
1298     tcg_gen_mov_i64(ret, t0);
1299     tcg_temp_free_i64(t0);
1300     tcg_temp_free_i32(t1);
1301 }
1302 
1303 #else
1304 
1305 void tcg_gen_movi_i64(TCGv_i64 ret, int64_t arg)
1306 {
1307     tcg_gen_mov_i64(ret, tcg_constant_i64(arg));
1308 }
1309 
1310 #endif /* TCG_TARGET_REG_SIZE == 32 */
1311 
1312 void tcg_gen_addi_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1313 {
1314     /* some cases can be optimized here */
1315     if (arg2 == 0) {
1316         tcg_gen_mov_i64(ret, arg1);
1317     } else if (TCG_TARGET_REG_BITS == 64) {
1318         tcg_gen_add_i64(ret, arg1, tcg_constant_i64(arg2));
1319     } else {
1320         tcg_gen_add2_i32(TCGV_LOW(ret), TCGV_HIGH(ret),
1321                          TCGV_LOW(arg1), TCGV_HIGH(arg1),
1322                          tcg_constant_i32(arg2), tcg_constant_i32(arg2 >> 32));
1323     }
1324 }
1325 
1326 void tcg_gen_subfi_i64(TCGv_i64 ret, int64_t arg1, TCGv_i64 arg2)
1327 {
1328     if (arg1 == 0 && TCG_TARGET_HAS_neg_i64) {
1329         /* Don't recurse with tcg_gen_neg_i64.  */
1330         tcg_gen_op2_i64(INDEX_op_neg_i64, ret, arg2);
1331     } else if (TCG_TARGET_REG_BITS == 64) {
1332         tcg_gen_sub_i64(ret, tcg_constant_i64(arg1), arg2);
1333     } else {
1334         tcg_gen_sub2_i32(TCGV_LOW(ret), TCGV_HIGH(ret),
1335                          tcg_constant_i32(arg1), tcg_constant_i32(arg1 >> 32),
1336                          TCGV_LOW(arg2), TCGV_HIGH(arg2));
1337     }
1338 }
1339 
1340 void tcg_gen_subi_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1341 {
1342     /* some cases can be optimized here */
1343     if (arg2 == 0) {
1344         tcg_gen_mov_i64(ret, arg1);
1345     } else if (TCG_TARGET_REG_BITS == 64) {
1346         tcg_gen_sub_i64(ret, arg1, tcg_constant_i64(arg2));
1347     } else {
1348         tcg_gen_sub2_i32(TCGV_LOW(ret), TCGV_HIGH(ret),
1349                          TCGV_LOW(arg1), TCGV_HIGH(arg1),
1350                          tcg_constant_i32(arg2), tcg_constant_i32(arg2 >> 32));
1351     }
1352 }
1353 
1354 void tcg_gen_andi_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1355 {
1356     if (TCG_TARGET_REG_BITS == 32) {
1357         tcg_gen_andi_i32(TCGV_LOW(ret), TCGV_LOW(arg1), arg2);
1358         tcg_gen_andi_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), arg2 >> 32);
1359         return;
1360     }
1361 
1362     /* Some cases can be optimized here.  */
1363     switch (arg2) {
1364     case 0:
1365         tcg_gen_movi_i64(ret, 0);
1366         return;
1367     case -1:
1368         tcg_gen_mov_i64(ret, arg1);
1369         return;
1370     case 0xff:
1371         /* Don't recurse with tcg_gen_ext8u_i64.  */
1372         if (TCG_TARGET_HAS_ext8u_i64) {
1373             tcg_gen_op2_i64(INDEX_op_ext8u_i64, ret, arg1);
1374             return;
1375         }
1376         break;
1377     case 0xffff:
1378         if (TCG_TARGET_HAS_ext16u_i64) {
1379             tcg_gen_op2_i64(INDEX_op_ext16u_i64, ret, arg1);
1380             return;
1381         }
1382         break;
1383     case 0xffffffffu:
1384         if (TCG_TARGET_HAS_ext32u_i64) {
1385             tcg_gen_op2_i64(INDEX_op_ext32u_i64, ret, arg1);
1386             return;
1387         }
1388         break;
1389     }
1390 
1391     tcg_gen_and_i64(ret, arg1, tcg_constant_i64(arg2));
1392 }
1393 
1394 void tcg_gen_ori_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1395 {
1396     if (TCG_TARGET_REG_BITS == 32) {
1397         tcg_gen_ori_i32(TCGV_LOW(ret), TCGV_LOW(arg1), arg2);
1398         tcg_gen_ori_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), arg2 >> 32);
1399         return;
1400     }
1401     /* Some cases can be optimized here.  */
1402     if (arg2 == -1) {
1403         tcg_gen_movi_i64(ret, -1);
1404     } else if (arg2 == 0) {
1405         tcg_gen_mov_i64(ret, arg1);
1406     } else {
1407         tcg_gen_or_i64(ret, arg1, tcg_constant_i64(arg2));
1408     }
1409 }
1410 
1411 void tcg_gen_xori_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1412 {
1413     if (TCG_TARGET_REG_BITS == 32) {
1414         tcg_gen_xori_i32(TCGV_LOW(ret), TCGV_LOW(arg1), arg2);
1415         tcg_gen_xori_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), arg2 >> 32);
1416         return;
1417     }
1418     /* Some cases can be optimized here.  */
1419     if (arg2 == 0) {
1420         tcg_gen_mov_i64(ret, arg1);
1421     } else if (arg2 == -1 && TCG_TARGET_HAS_not_i64) {
1422         /* Don't recurse with tcg_gen_not_i64.  */
1423         tcg_gen_op2_i64(INDEX_op_not_i64, ret, arg1);
1424     } else {
1425         tcg_gen_xor_i64(ret, arg1, tcg_constant_i64(arg2));
1426     }
1427 }
1428 
1429 static inline void tcg_gen_shifti_i64(TCGv_i64 ret, TCGv_i64 arg1,
1430                                       unsigned c, bool right, bool arith)
1431 {
1432     tcg_debug_assert(c < 64);
1433     if (c == 0) {
1434         tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg1));
1435         tcg_gen_mov_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1));
1436     } else if (c >= 32) {
1437         c -= 32;
1438         if (right) {
1439             if (arith) {
1440                 tcg_gen_sari_i32(TCGV_LOW(ret), TCGV_HIGH(arg1), c);
1441                 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), 31);
1442             } else {
1443                 tcg_gen_shri_i32(TCGV_LOW(ret), TCGV_HIGH(arg1), c);
1444                 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1445             }
1446         } else {
1447             tcg_gen_shli_i32(TCGV_HIGH(ret), TCGV_LOW(arg1), c);
1448             tcg_gen_movi_i32(TCGV_LOW(ret), 0);
1449         }
1450     } else if (right) {
1451         if (TCG_TARGET_HAS_extract2_i32) {
1452             tcg_gen_extract2_i32(TCGV_LOW(ret),
1453                                  TCGV_LOW(arg1), TCGV_HIGH(arg1), c);
1454         } else {
1455             tcg_gen_shri_i32(TCGV_LOW(ret), TCGV_LOW(arg1), c);
1456             tcg_gen_deposit_i32(TCGV_LOW(ret), TCGV_LOW(ret),
1457                                 TCGV_HIGH(arg1), 32 - c, c);
1458         }
1459         if (arith) {
1460             tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), c);
1461         } else {
1462             tcg_gen_shri_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), c);
1463         }
1464     } else {
1465         if (TCG_TARGET_HAS_extract2_i32) {
1466             tcg_gen_extract2_i32(TCGV_HIGH(ret),
1467                                  TCGV_LOW(arg1), TCGV_HIGH(arg1), 32 - c);
1468         } else {
1469             TCGv_i32 t0 = tcg_temp_ebb_new_i32();
1470             tcg_gen_shri_i32(t0, TCGV_LOW(arg1), 32 - c);
1471             tcg_gen_deposit_i32(TCGV_HIGH(ret), t0,
1472                                 TCGV_HIGH(arg1), c, 32 - c);
1473             tcg_temp_free_i32(t0);
1474         }
1475         tcg_gen_shli_i32(TCGV_LOW(ret), TCGV_LOW(arg1), c);
1476     }
1477 }
1478 
1479 void tcg_gen_shli_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1480 {
1481     tcg_debug_assert(arg2 >= 0 && arg2 < 64);
1482     if (TCG_TARGET_REG_BITS == 32) {
1483         tcg_gen_shifti_i64(ret, arg1, arg2, 0, 0);
1484     } else if (arg2 == 0) {
1485         tcg_gen_mov_i64(ret, arg1);
1486     } else {
1487         tcg_gen_shl_i64(ret, arg1, tcg_constant_i64(arg2));
1488     }
1489 }
1490 
1491 void tcg_gen_shri_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1492 {
1493     tcg_debug_assert(arg2 >= 0 && arg2 < 64);
1494     if (TCG_TARGET_REG_BITS == 32) {
1495         tcg_gen_shifti_i64(ret, arg1, arg2, 1, 0);
1496     } else if (arg2 == 0) {
1497         tcg_gen_mov_i64(ret, arg1);
1498     } else {
1499         tcg_gen_shr_i64(ret, arg1, tcg_constant_i64(arg2));
1500     }
1501 }
1502 
1503 void tcg_gen_sari_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1504 {
1505     tcg_debug_assert(arg2 >= 0 && arg2 < 64);
1506     if (TCG_TARGET_REG_BITS == 32) {
1507         tcg_gen_shifti_i64(ret, arg1, arg2, 1, 1);
1508     } else if (arg2 == 0) {
1509         tcg_gen_mov_i64(ret, arg1);
1510     } else {
1511         tcg_gen_sar_i64(ret, arg1, tcg_constant_i64(arg2));
1512     }
1513 }
1514 
1515 void tcg_gen_brcond_i64(TCGCond cond, TCGv_i64 arg1, TCGv_i64 arg2, TCGLabel *l)
1516 {
1517     if (cond == TCG_COND_ALWAYS) {
1518         tcg_gen_br(l);
1519     } else if (cond != TCG_COND_NEVER) {
1520         if (TCG_TARGET_REG_BITS == 32) {
1521             tcg_gen_op6ii_i32(INDEX_op_brcond2_i32, TCGV_LOW(arg1),
1522                               TCGV_HIGH(arg1), TCGV_LOW(arg2),
1523                               TCGV_HIGH(arg2), cond, label_arg(l));
1524         } else {
1525             tcg_gen_op4ii_i64(INDEX_op_brcond_i64, arg1, arg2, cond,
1526                               label_arg(l));
1527         }
1528         add_last_as_label_use(l);
1529     }
1530 }
1531 
1532 void tcg_gen_brcondi_i64(TCGCond cond, TCGv_i64 arg1, int64_t arg2, TCGLabel *l)
1533 {
1534     if (TCG_TARGET_REG_BITS == 64) {
1535         tcg_gen_brcond_i64(cond, arg1, tcg_constant_i64(arg2), l);
1536     } else if (cond == TCG_COND_ALWAYS) {
1537         tcg_gen_br(l);
1538     } else if (cond != TCG_COND_NEVER) {
1539         tcg_gen_op6ii_i32(INDEX_op_brcond2_i32,
1540                           TCGV_LOW(arg1), TCGV_HIGH(arg1),
1541                           tcg_constant_i32(arg2),
1542                           tcg_constant_i32(arg2 >> 32),
1543                           cond, label_arg(l));
1544         add_last_as_label_use(l);
1545     }
1546 }
1547 
1548 void tcg_gen_setcond_i64(TCGCond cond, TCGv_i64 ret,
1549                          TCGv_i64 arg1, TCGv_i64 arg2)
1550 {
1551     if (cond == TCG_COND_ALWAYS) {
1552         tcg_gen_movi_i64(ret, 1);
1553     } else if (cond == TCG_COND_NEVER) {
1554         tcg_gen_movi_i64(ret, 0);
1555     } else {
1556         if (TCG_TARGET_REG_BITS == 32) {
1557             tcg_gen_op6i_i32(INDEX_op_setcond2_i32, TCGV_LOW(ret),
1558                              TCGV_LOW(arg1), TCGV_HIGH(arg1),
1559                              TCGV_LOW(arg2), TCGV_HIGH(arg2), cond);
1560             tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1561         } else {
1562             tcg_gen_op4i_i64(INDEX_op_setcond_i64, ret, arg1, arg2, cond);
1563         }
1564     }
1565 }
1566 
1567 void tcg_gen_setcondi_i64(TCGCond cond, TCGv_i64 ret,
1568                           TCGv_i64 arg1, int64_t arg2)
1569 {
1570     if (TCG_TARGET_REG_BITS == 64) {
1571         tcg_gen_setcond_i64(cond, ret, arg1, tcg_constant_i64(arg2));
1572     } else if (cond == TCG_COND_ALWAYS) {
1573         tcg_gen_movi_i64(ret, 1);
1574     } else if (cond == TCG_COND_NEVER) {
1575         tcg_gen_movi_i64(ret, 0);
1576     } else {
1577         tcg_gen_op6i_i32(INDEX_op_setcond2_i32, TCGV_LOW(ret),
1578                          TCGV_LOW(arg1), TCGV_HIGH(arg1),
1579                          tcg_constant_i32(arg2),
1580                          tcg_constant_i32(arg2 >> 32), cond);
1581         tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1582     }
1583 }
1584 
1585 void tcg_gen_negsetcond_i64(TCGCond cond, TCGv_i64 ret,
1586                             TCGv_i64 arg1, TCGv_i64 arg2)
1587 {
1588     if (cond == TCG_COND_ALWAYS) {
1589         tcg_gen_movi_i64(ret, -1);
1590     } else if (cond == TCG_COND_NEVER) {
1591         tcg_gen_movi_i64(ret, 0);
1592     } else if (TCG_TARGET_HAS_negsetcond_i64) {
1593         tcg_gen_op4i_i64(INDEX_op_negsetcond_i64, ret, arg1, arg2, cond);
1594     } else if (TCG_TARGET_REG_BITS == 32) {
1595         tcg_gen_op6i_i32(INDEX_op_setcond2_i32, TCGV_LOW(ret),
1596                          TCGV_LOW(arg1), TCGV_HIGH(arg1),
1597                          TCGV_LOW(arg2), TCGV_HIGH(arg2), cond);
1598         tcg_gen_neg_i32(TCGV_LOW(ret), TCGV_LOW(ret));
1599         tcg_gen_mov_i32(TCGV_HIGH(ret), TCGV_LOW(ret));
1600     } else {
1601         tcg_gen_setcond_i64(cond, ret, arg1, arg2);
1602         tcg_gen_neg_i64(ret, ret);
1603     }
1604 }
1605 
1606 void tcg_gen_muli_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1607 {
1608     if (arg2 == 0) {
1609         tcg_gen_movi_i64(ret, 0);
1610     } else if (is_power_of_2(arg2)) {
1611         tcg_gen_shli_i64(ret, arg1, ctz64(arg2));
1612     } else {
1613         tcg_gen_mul_i64(ret, arg1, tcg_constant_i64(arg2));
1614     }
1615 }
1616 
1617 void tcg_gen_div_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1618 {
1619     if (TCG_TARGET_HAS_div_i64) {
1620         tcg_gen_op3_i64(INDEX_op_div_i64, ret, arg1, arg2);
1621     } else if (TCG_TARGET_HAS_div2_i64) {
1622         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
1623         tcg_gen_sari_i64(t0, arg1, 63);
1624         tcg_gen_op5_i64(INDEX_op_div2_i64, ret, t0, arg1, t0, arg2);
1625         tcg_temp_free_i64(t0);
1626     } else {
1627         gen_helper_div_i64(ret, arg1, arg2);
1628     }
1629 }
1630 
1631 void tcg_gen_rem_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1632 {
1633     if (TCG_TARGET_HAS_rem_i64) {
1634         tcg_gen_op3_i64(INDEX_op_rem_i64, ret, arg1, arg2);
1635     } else if (TCG_TARGET_HAS_div_i64) {
1636         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
1637         tcg_gen_op3_i64(INDEX_op_div_i64, t0, arg1, arg2);
1638         tcg_gen_mul_i64(t0, t0, arg2);
1639         tcg_gen_sub_i64(ret, arg1, t0);
1640         tcg_temp_free_i64(t0);
1641     } else if (TCG_TARGET_HAS_div2_i64) {
1642         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
1643         tcg_gen_sari_i64(t0, arg1, 63);
1644         tcg_gen_op5_i64(INDEX_op_div2_i64, t0, ret, arg1, t0, arg2);
1645         tcg_temp_free_i64(t0);
1646     } else {
1647         gen_helper_rem_i64(ret, arg1, arg2);
1648     }
1649 }
1650 
1651 void tcg_gen_divu_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1652 {
1653     if (TCG_TARGET_HAS_div_i64) {
1654         tcg_gen_op3_i64(INDEX_op_divu_i64, ret, arg1, arg2);
1655     } else if (TCG_TARGET_HAS_div2_i64) {
1656         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
1657         tcg_gen_movi_i64(t0, 0);
1658         tcg_gen_op5_i64(INDEX_op_divu2_i64, ret, t0, arg1, t0, arg2);
1659         tcg_temp_free_i64(t0);
1660     } else {
1661         gen_helper_divu_i64(ret, arg1, arg2);
1662     }
1663 }
1664 
1665 void tcg_gen_remu_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1666 {
1667     if (TCG_TARGET_HAS_rem_i64) {
1668         tcg_gen_op3_i64(INDEX_op_remu_i64, ret, arg1, arg2);
1669     } else if (TCG_TARGET_HAS_div_i64) {
1670         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
1671         tcg_gen_op3_i64(INDEX_op_divu_i64, t0, arg1, arg2);
1672         tcg_gen_mul_i64(t0, t0, arg2);
1673         tcg_gen_sub_i64(ret, arg1, t0);
1674         tcg_temp_free_i64(t0);
1675     } else if (TCG_TARGET_HAS_div2_i64) {
1676         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
1677         tcg_gen_movi_i64(t0, 0);
1678         tcg_gen_op5_i64(INDEX_op_divu2_i64, t0, ret, arg1, t0, arg2);
1679         tcg_temp_free_i64(t0);
1680     } else {
1681         gen_helper_remu_i64(ret, arg1, arg2);
1682     }
1683 }
1684 
1685 void tcg_gen_ext8s_i64(TCGv_i64 ret, TCGv_i64 arg)
1686 {
1687     if (TCG_TARGET_REG_BITS == 32) {
1688         tcg_gen_ext8s_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1689         tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1690     } else if (TCG_TARGET_HAS_ext8s_i64) {
1691         tcg_gen_op2_i64(INDEX_op_ext8s_i64, ret, arg);
1692     } else {
1693         tcg_gen_shli_i64(ret, arg, 56);
1694         tcg_gen_sari_i64(ret, ret, 56);
1695     }
1696 }
1697 
1698 void tcg_gen_ext16s_i64(TCGv_i64 ret, TCGv_i64 arg)
1699 {
1700     if (TCG_TARGET_REG_BITS == 32) {
1701         tcg_gen_ext16s_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1702         tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1703     } else if (TCG_TARGET_HAS_ext16s_i64) {
1704         tcg_gen_op2_i64(INDEX_op_ext16s_i64, ret, arg);
1705     } else {
1706         tcg_gen_shli_i64(ret, arg, 48);
1707         tcg_gen_sari_i64(ret, ret, 48);
1708     }
1709 }
1710 
1711 void tcg_gen_ext32s_i64(TCGv_i64 ret, TCGv_i64 arg)
1712 {
1713     if (TCG_TARGET_REG_BITS == 32) {
1714         tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1715         tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1716     } else if (TCG_TARGET_HAS_ext32s_i64) {
1717         tcg_gen_op2_i64(INDEX_op_ext32s_i64, ret, arg);
1718     } else {
1719         tcg_gen_shli_i64(ret, arg, 32);
1720         tcg_gen_sari_i64(ret, ret, 32);
1721     }
1722 }
1723 
1724 void tcg_gen_ext8u_i64(TCGv_i64 ret, TCGv_i64 arg)
1725 {
1726     if (TCG_TARGET_REG_BITS == 32) {
1727         tcg_gen_ext8u_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1728         tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1729     } else if (TCG_TARGET_HAS_ext8u_i64) {
1730         tcg_gen_op2_i64(INDEX_op_ext8u_i64, ret, arg);
1731     } else {
1732         tcg_gen_andi_i64(ret, arg, 0xffu);
1733     }
1734 }
1735 
1736 void tcg_gen_ext16u_i64(TCGv_i64 ret, TCGv_i64 arg)
1737 {
1738     if (TCG_TARGET_REG_BITS == 32) {
1739         tcg_gen_ext16u_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1740         tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1741     } else if (TCG_TARGET_HAS_ext16u_i64) {
1742         tcg_gen_op2_i64(INDEX_op_ext16u_i64, ret, arg);
1743     } else {
1744         tcg_gen_andi_i64(ret, arg, 0xffffu);
1745     }
1746 }
1747 
1748 void tcg_gen_ext32u_i64(TCGv_i64 ret, TCGv_i64 arg)
1749 {
1750     if (TCG_TARGET_REG_BITS == 32) {
1751         tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1752         tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1753     } else if (TCG_TARGET_HAS_ext32u_i64) {
1754         tcg_gen_op2_i64(INDEX_op_ext32u_i64, ret, arg);
1755     } else {
1756         tcg_gen_andi_i64(ret, arg, 0xffffffffu);
1757     }
1758 }
1759 
1760 void tcg_gen_bswap16_i64(TCGv_i64 ret, TCGv_i64 arg, int flags)
1761 {
1762     /* Only one extension flag may be present. */
1763     tcg_debug_assert(!(flags & TCG_BSWAP_OS) || !(flags & TCG_BSWAP_OZ));
1764 
1765     if (TCG_TARGET_REG_BITS == 32) {
1766         tcg_gen_bswap16_i32(TCGV_LOW(ret), TCGV_LOW(arg), flags);
1767         if (flags & TCG_BSWAP_OS) {
1768             tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1769         } else {
1770             tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1771         }
1772     } else if (TCG_TARGET_HAS_bswap16_i64) {
1773         tcg_gen_op3i_i64(INDEX_op_bswap16_i64, ret, arg, flags);
1774     } else {
1775         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
1776         TCGv_i64 t1 = tcg_temp_ebb_new_i64();
1777 
1778         tcg_gen_shri_i64(t0, arg, 8);
1779         if (!(flags & TCG_BSWAP_IZ)) {
1780             tcg_gen_ext8u_i64(t0, t0);
1781         }
1782 
1783         if (flags & TCG_BSWAP_OS) {
1784             tcg_gen_shli_i64(t1, arg, 56);
1785             tcg_gen_sari_i64(t1, t1, 48);
1786         } else if (flags & TCG_BSWAP_OZ) {
1787             tcg_gen_ext8u_i64(t1, arg);
1788             tcg_gen_shli_i64(t1, t1, 8);
1789         } else {
1790             tcg_gen_shli_i64(t1, arg, 8);
1791         }
1792 
1793         tcg_gen_or_i64(ret, t0, t1);
1794         tcg_temp_free_i64(t0);
1795         tcg_temp_free_i64(t1);
1796     }
1797 }
1798 
1799 void tcg_gen_bswap32_i64(TCGv_i64 ret, TCGv_i64 arg, int flags)
1800 {
1801     /* Only one extension flag may be present. */
1802     tcg_debug_assert(!(flags & TCG_BSWAP_OS) || !(flags & TCG_BSWAP_OZ));
1803 
1804     if (TCG_TARGET_REG_BITS == 32) {
1805         tcg_gen_bswap32_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1806         if (flags & TCG_BSWAP_OS) {
1807             tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1808         } else {
1809             tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1810         }
1811     } else if (TCG_TARGET_HAS_bswap32_i64) {
1812         tcg_gen_op3i_i64(INDEX_op_bswap32_i64, ret, arg, flags);
1813     } else {
1814         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
1815         TCGv_i64 t1 = tcg_temp_ebb_new_i64();
1816         TCGv_i64 t2 = tcg_constant_i64(0x00ff00ff);
1817 
1818                                             /* arg = xxxxabcd */
1819         tcg_gen_shri_i64(t0, arg, 8);       /*  t0 = .xxxxabc */
1820         tcg_gen_and_i64(t1, arg, t2);       /*  t1 = .....b.d */
1821         tcg_gen_and_i64(t0, t0, t2);        /*  t0 = .....a.c */
1822         tcg_gen_shli_i64(t1, t1, 8);        /*  t1 = ....b.d. */
1823         tcg_gen_or_i64(ret, t0, t1);        /* ret = ....badc */
1824 
1825         tcg_gen_shli_i64(t1, ret, 48);      /*  t1 = dc...... */
1826         tcg_gen_shri_i64(t0, ret, 16);      /*  t0 = ......ba */
1827         if (flags & TCG_BSWAP_OS) {
1828             tcg_gen_sari_i64(t1, t1, 32);   /*  t1 = ssssdc.. */
1829         } else {
1830             tcg_gen_shri_i64(t1, t1, 32);   /*  t1 = ....dc.. */
1831         }
1832         tcg_gen_or_i64(ret, t0, t1);        /* ret = ssssdcba */
1833 
1834         tcg_temp_free_i64(t0);
1835         tcg_temp_free_i64(t1);
1836     }
1837 }
1838 
1839 void tcg_gen_bswap64_i64(TCGv_i64 ret, TCGv_i64 arg)
1840 {
1841     if (TCG_TARGET_REG_BITS == 32) {
1842         TCGv_i32 t0, t1;
1843         t0 = tcg_temp_ebb_new_i32();
1844         t1 = tcg_temp_ebb_new_i32();
1845 
1846         tcg_gen_bswap32_i32(t0, TCGV_LOW(arg));
1847         tcg_gen_bswap32_i32(t1, TCGV_HIGH(arg));
1848         tcg_gen_mov_i32(TCGV_LOW(ret), t1);
1849         tcg_gen_mov_i32(TCGV_HIGH(ret), t0);
1850         tcg_temp_free_i32(t0);
1851         tcg_temp_free_i32(t1);
1852     } else if (TCG_TARGET_HAS_bswap64_i64) {
1853         tcg_gen_op3i_i64(INDEX_op_bswap64_i64, ret, arg, 0);
1854     } else {
1855         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
1856         TCGv_i64 t1 = tcg_temp_ebb_new_i64();
1857         TCGv_i64 t2 = tcg_temp_ebb_new_i64();
1858 
1859                                         /* arg = abcdefgh */
1860         tcg_gen_movi_i64(t2, 0x00ff00ff00ff00ffull);
1861         tcg_gen_shri_i64(t0, arg, 8);   /*  t0 = .abcdefg */
1862         tcg_gen_and_i64(t1, arg, t2);   /*  t1 = .b.d.f.h */
1863         tcg_gen_and_i64(t0, t0, t2);    /*  t0 = .a.c.e.g */
1864         tcg_gen_shli_i64(t1, t1, 8);    /*  t1 = b.d.f.h. */
1865         tcg_gen_or_i64(ret, t0, t1);    /* ret = badcfehg */
1866 
1867         tcg_gen_movi_i64(t2, 0x0000ffff0000ffffull);
1868         tcg_gen_shri_i64(t0, ret, 16);  /*  t0 = ..badcfe */
1869         tcg_gen_and_i64(t1, ret, t2);   /*  t1 = ..dc..hg */
1870         tcg_gen_and_i64(t0, t0, t2);    /*  t0 = ..ba..fe */
1871         tcg_gen_shli_i64(t1, t1, 16);   /*  t1 = dc..hg.. */
1872         tcg_gen_or_i64(ret, t0, t1);    /* ret = dcbahgfe */
1873 
1874         tcg_gen_shri_i64(t0, ret, 32);  /*  t0 = ....dcba */
1875         tcg_gen_shli_i64(t1, ret, 32);  /*  t1 = hgfe.... */
1876         tcg_gen_or_i64(ret, t0, t1);    /* ret = hgfedcba */
1877 
1878         tcg_temp_free_i64(t0);
1879         tcg_temp_free_i64(t1);
1880         tcg_temp_free_i64(t2);
1881     }
1882 }
1883 
1884 void tcg_gen_hswap_i64(TCGv_i64 ret, TCGv_i64 arg)
1885 {
1886     uint64_t m = 0x0000ffff0000ffffull;
1887     TCGv_i64 t0 = tcg_temp_ebb_new_i64();
1888     TCGv_i64 t1 = tcg_temp_ebb_new_i64();
1889 
1890     /* See include/qemu/bitops.h, hswap64. */
1891     tcg_gen_rotli_i64(t1, arg, 32);
1892     tcg_gen_andi_i64(t0, t1, m);
1893     tcg_gen_shli_i64(t0, t0, 16);
1894     tcg_gen_shri_i64(t1, t1, 16);
1895     tcg_gen_andi_i64(t1, t1, m);
1896     tcg_gen_or_i64(ret, t0, t1);
1897 
1898     tcg_temp_free_i64(t0);
1899     tcg_temp_free_i64(t1);
1900 }
1901 
1902 void tcg_gen_wswap_i64(TCGv_i64 ret, TCGv_i64 arg)
1903 {
1904     /* Swapping 2 32-bit elements is a rotate. */
1905     tcg_gen_rotli_i64(ret, arg, 32);
1906 }
1907 
1908 void tcg_gen_not_i64(TCGv_i64 ret, TCGv_i64 arg)
1909 {
1910     if (TCG_TARGET_REG_BITS == 32) {
1911         tcg_gen_not_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1912         tcg_gen_not_i32(TCGV_HIGH(ret), TCGV_HIGH(arg));
1913     } else if (TCG_TARGET_HAS_not_i64) {
1914         tcg_gen_op2_i64(INDEX_op_not_i64, ret, arg);
1915     } else {
1916         tcg_gen_xori_i64(ret, arg, -1);
1917     }
1918 }
1919 
1920 void tcg_gen_andc_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1921 {
1922     if (TCG_TARGET_REG_BITS == 32) {
1923         tcg_gen_andc_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1924         tcg_gen_andc_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1925     } else if (TCG_TARGET_HAS_andc_i64) {
1926         tcg_gen_op3_i64(INDEX_op_andc_i64, ret, arg1, arg2);
1927     } else {
1928         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
1929         tcg_gen_not_i64(t0, arg2);
1930         tcg_gen_and_i64(ret, arg1, t0);
1931         tcg_temp_free_i64(t0);
1932     }
1933 }
1934 
1935 void tcg_gen_eqv_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1936 {
1937     if (TCG_TARGET_REG_BITS == 32) {
1938         tcg_gen_eqv_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1939         tcg_gen_eqv_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1940     } else if (TCG_TARGET_HAS_eqv_i64) {
1941         tcg_gen_op3_i64(INDEX_op_eqv_i64, ret, arg1, arg2);
1942     } else {
1943         tcg_gen_xor_i64(ret, arg1, arg2);
1944         tcg_gen_not_i64(ret, ret);
1945     }
1946 }
1947 
1948 void tcg_gen_nand_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1949 {
1950     if (TCG_TARGET_REG_BITS == 32) {
1951         tcg_gen_nand_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1952         tcg_gen_nand_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1953     } else if (TCG_TARGET_HAS_nand_i64) {
1954         tcg_gen_op3_i64(INDEX_op_nand_i64, ret, arg1, arg2);
1955     } else {
1956         tcg_gen_and_i64(ret, arg1, arg2);
1957         tcg_gen_not_i64(ret, ret);
1958     }
1959 }
1960 
1961 void tcg_gen_nor_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1962 {
1963     if (TCG_TARGET_REG_BITS == 32) {
1964         tcg_gen_nor_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1965         tcg_gen_nor_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1966     } else if (TCG_TARGET_HAS_nor_i64) {
1967         tcg_gen_op3_i64(INDEX_op_nor_i64, ret, arg1, arg2);
1968     } else {
1969         tcg_gen_or_i64(ret, arg1, arg2);
1970         tcg_gen_not_i64(ret, ret);
1971     }
1972 }
1973 
1974 void tcg_gen_orc_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1975 {
1976     if (TCG_TARGET_REG_BITS == 32) {
1977         tcg_gen_orc_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1978         tcg_gen_orc_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1979     } else if (TCG_TARGET_HAS_orc_i64) {
1980         tcg_gen_op3_i64(INDEX_op_orc_i64, ret, arg1, arg2);
1981     } else {
1982         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
1983         tcg_gen_not_i64(t0, arg2);
1984         tcg_gen_or_i64(ret, arg1, t0);
1985         tcg_temp_free_i64(t0);
1986     }
1987 }
1988 
1989 void tcg_gen_clz_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1990 {
1991     if (TCG_TARGET_HAS_clz_i64) {
1992         tcg_gen_op3_i64(INDEX_op_clz_i64, ret, arg1, arg2);
1993     } else {
1994         gen_helper_clz_i64(ret, arg1, arg2);
1995     }
1996 }
1997 
1998 void tcg_gen_clzi_i64(TCGv_i64 ret, TCGv_i64 arg1, uint64_t arg2)
1999 {
2000     if (TCG_TARGET_REG_BITS == 32
2001         && TCG_TARGET_HAS_clz_i32
2002         && arg2 <= 0xffffffffu) {
2003         TCGv_i32 t = tcg_temp_ebb_new_i32();
2004         tcg_gen_clzi_i32(t, TCGV_LOW(arg1), arg2 - 32);
2005         tcg_gen_addi_i32(t, t, 32);
2006         tcg_gen_clz_i32(TCGV_LOW(ret), TCGV_HIGH(arg1), t);
2007         tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
2008         tcg_temp_free_i32(t);
2009     } else {
2010         tcg_gen_clz_i64(ret, arg1, tcg_constant_i64(arg2));
2011     }
2012 }
2013 
2014 void tcg_gen_ctz_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
2015 {
2016     if (TCG_TARGET_HAS_ctz_i64) {
2017         tcg_gen_op3_i64(INDEX_op_ctz_i64, ret, arg1, arg2);
2018     } else if (TCG_TARGET_HAS_ctpop_i64 || TCG_TARGET_HAS_clz_i64) {
2019         TCGv_i64 z, t = tcg_temp_ebb_new_i64();
2020 
2021         if (TCG_TARGET_HAS_ctpop_i64) {
2022             tcg_gen_subi_i64(t, arg1, 1);
2023             tcg_gen_andc_i64(t, t, arg1);
2024             tcg_gen_ctpop_i64(t, t);
2025         } else {
2026             /* Since all non-x86 hosts have clz(0) == 64, don't fight it.  */
2027             tcg_gen_neg_i64(t, arg1);
2028             tcg_gen_and_i64(t, t, arg1);
2029             tcg_gen_clzi_i64(t, t, 64);
2030             tcg_gen_xori_i64(t, t, 63);
2031         }
2032         z = tcg_constant_i64(0);
2033         tcg_gen_movcond_i64(TCG_COND_EQ, ret, arg1, z, arg2, t);
2034         tcg_temp_free_i64(t);
2035         tcg_temp_free_i64(z);
2036     } else {
2037         gen_helper_ctz_i64(ret, arg1, arg2);
2038     }
2039 }
2040 
2041 void tcg_gen_ctzi_i64(TCGv_i64 ret, TCGv_i64 arg1, uint64_t arg2)
2042 {
2043     if (TCG_TARGET_REG_BITS == 32
2044         && TCG_TARGET_HAS_ctz_i32
2045         && arg2 <= 0xffffffffu) {
2046         TCGv_i32 t32 = tcg_temp_ebb_new_i32();
2047         tcg_gen_ctzi_i32(t32, TCGV_HIGH(arg1), arg2 - 32);
2048         tcg_gen_addi_i32(t32, t32, 32);
2049         tcg_gen_ctz_i32(TCGV_LOW(ret), TCGV_LOW(arg1), t32);
2050         tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
2051         tcg_temp_free_i32(t32);
2052     } else if (!TCG_TARGET_HAS_ctz_i64
2053                && TCG_TARGET_HAS_ctpop_i64
2054                && arg2 == 64) {
2055         /* This equivalence has the advantage of not requiring a fixup.  */
2056         TCGv_i64 t = tcg_temp_ebb_new_i64();
2057         tcg_gen_subi_i64(t, arg1, 1);
2058         tcg_gen_andc_i64(t, t, arg1);
2059         tcg_gen_ctpop_i64(ret, t);
2060         tcg_temp_free_i64(t);
2061     } else {
2062         tcg_gen_ctz_i64(ret, arg1, tcg_constant_i64(arg2));
2063     }
2064 }
2065 
2066 void tcg_gen_clrsb_i64(TCGv_i64 ret, TCGv_i64 arg)
2067 {
2068     if (TCG_TARGET_HAS_clz_i64 || TCG_TARGET_HAS_clz_i32) {
2069         TCGv_i64 t = tcg_temp_ebb_new_i64();
2070         tcg_gen_sari_i64(t, arg, 63);
2071         tcg_gen_xor_i64(t, t, arg);
2072         tcg_gen_clzi_i64(t, t, 64);
2073         tcg_gen_subi_i64(ret, t, 1);
2074         tcg_temp_free_i64(t);
2075     } else {
2076         gen_helper_clrsb_i64(ret, arg);
2077     }
2078 }
2079 
2080 void tcg_gen_ctpop_i64(TCGv_i64 ret, TCGv_i64 arg1)
2081 {
2082     if (TCG_TARGET_HAS_ctpop_i64) {
2083         tcg_gen_op2_i64(INDEX_op_ctpop_i64, ret, arg1);
2084     } else if (TCG_TARGET_REG_BITS == 32 && TCG_TARGET_HAS_ctpop_i32) {
2085         tcg_gen_ctpop_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1));
2086         tcg_gen_ctpop_i32(TCGV_LOW(ret), TCGV_LOW(arg1));
2087         tcg_gen_add_i32(TCGV_LOW(ret), TCGV_LOW(ret), TCGV_HIGH(ret));
2088         tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
2089     } else {
2090         gen_helper_ctpop_i64(ret, arg1);
2091     }
2092 }
2093 
2094 void tcg_gen_rotl_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
2095 {
2096     if (TCG_TARGET_HAS_rot_i64) {
2097         tcg_gen_op3_i64(INDEX_op_rotl_i64, ret, arg1, arg2);
2098     } else {
2099         TCGv_i64 t0, t1;
2100         t0 = tcg_temp_ebb_new_i64();
2101         t1 = tcg_temp_ebb_new_i64();
2102         tcg_gen_shl_i64(t0, arg1, arg2);
2103         tcg_gen_subfi_i64(t1, 64, arg2);
2104         tcg_gen_shr_i64(t1, arg1, t1);
2105         tcg_gen_or_i64(ret, t0, t1);
2106         tcg_temp_free_i64(t0);
2107         tcg_temp_free_i64(t1);
2108     }
2109 }
2110 
2111 void tcg_gen_rotli_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
2112 {
2113     tcg_debug_assert(arg2 >= 0 && arg2 < 64);
2114     /* some cases can be optimized here */
2115     if (arg2 == 0) {
2116         tcg_gen_mov_i64(ret, arg1);
2117     } else if (TCG_TARGET_HAS_rot_i64) {
2118         tcg_gen_rotl_i64(ret, arg1, tcg_constant_i64(arg2));
2119     } else {
2120         TCGv_i64 t0, t1;
2121         t0 = tcg_temp_ebb_new_i64();
2122         t1 = tcg_temp_ebb_new_i64();
2123         tcg_gen_shli_i64(t0, arg1, arg2);
2124         tcg_gen_shri_i64(t1, arg1, 64 - arg2);
2125         tcg_gen_or_i64(ret, t0, t1);
2126         tcg_temp_free_i64(t0);
2127         tcg_temp_free_i64(t1);
2128     }
2129 }
2130 
2131 void tcg_gen_rotr_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
2132 {
2133     if (TCG_TARGET_HAS_rot_i64) {
2134         tcg_gen_op3_i64(INDEX_op_rotr_i64, ret, arg1, arg2);
2135     } else {
2136         TCGv_i64 t0, t1;
2137         t0 = tcg_temp_ebb_new_i64();
2138         t1 = tcg_temp_ebb_new_i64();
2139         tcg_gen_shr_i64(t0, arg1, arg2);
2140         tcg_gen_subfi_i64(t1, 64, arg2);
2141         tcg_gen_shl_i64(t1, arg1, t1);
2142         tcg_gen_or_i64(ret, t0, t1);
2143         tcg_temp_free_i64(t0);
2144         tcg_temp_free_i64(t1);
2145     }
2146 }
2147 
2148 void tcg_gen_rotri_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
2149 {
2150     tcg_debug_assert(arg2 >= 0 && arg2 < 64);
2151     /* some cases can be optimized here */
2152     if (arg2 == 0) {
2153         tcg_gen_mov_i64(ret, arg1);
2154     } else {
2155         tcg_gen_rotli_i64(ret, arg1, 64 - arg2);
2156     }
2157 }
2158 
2159 void tcg_gen_deposit_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2,
2160                          unsigned int ofs, unsigned int len)
2161 {
2162     uint64_t mask;
2163     TCGv_i64 t1;
2164 
2165     tcg_debug_assert(ofs < 64);
2166     tcg_debug_assert(len > 0);
2167     tcg_debug_assert(len <= 64);
2168     tcg_debug_assert(ofs + len <= 64);
2169 
2170     if (len == 64) {
2171         tcg_gen_mov_i64(ret, arg2);
2172         return;
2173     }
2174     if (TCG_TARGET_HAS_deposit_i64 && TCG_TARGET_deposit_i64_valid(ofs, len)) {
2175         tcg_gen_op5ii_i64(INDEX_op_deposit_i64, ret, arg1, arg2, ofs, len);
2176         return;
2177     }
2178 
2179     if (TCG_TARGET_REG_BITS == 32) {
2180         if (ofs >= 32) {
2181             tcg_gen_deposit_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1),
2182                                 TCGV_LOW(arg2), ofs - 32, len);
2183             tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg1));
2184             return;
2185         }
2186         if (ofs + len <= 32) {
2187             tcg_gen_deposit_i32(TCGV_LOW(ret), TCGV_LOW(arg1),
2188                                 TCGV_LOW(arg2), ofs, len);
2189             tcg_gen_mov_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1));
2190             return;
2191         }
2192     }
2193 
2194     t1 = tcg_temp_ebb_new_i64();
2195 
2196     if (TCG_TARGET_HAS_extract2_i64) {
2197         if (ofs + len == 64) {
2198             tcg_gen_shli_i64(t1, arg1, len);
2199             tcg_gen_extract2_i64(ret, t1, arg2, len);
2200             goto done;
2201         }
2202         if (ofs == 0) {
2203             tcg_gen_extract2_i64(ret, arg1, arg2, len);
2204             tcg_gen_rotli_i64(ret, ret, len);
2205             goto done;
2206         }
2207     }
2208 
2209     mask = (1ull << len) - 1;
2210     if (ofs + len < 64) {
2211         tcg_gen_andi_i64(t1, arg2, mask);
2212         tcg_gen_shli_i64(t1, t1, ofs);
2213     } else {
2214         tcg_gen_shli_i64(t1, arg2, ofs);
2215     }
2216     tcg_gen_andi_i64(ret, arg1, ~(mask << ofs));
2217     tcg_gen_or_i64(ret, ret, t1);
2218  done:
2219     tcg_temp_free_i64(t1);
2220 }
2221 
2222 void tcg_gen_deposit_z_i64(TCGv_i64 ret, TCGv_i64 arg,
2223                            unsigned int ofs, unsigned int len)
2224 {
2225     tcg_debug_assert(ofs < 64);
2226     tcg_debug_assert(len > 0);
2227     tcg_debug_assert(len <= 64);
2228     tcg_debug_assert(ofs + len <= 64);
2229 
2230     if (ofs + len == 64) {
2231         tcg_gen_shli_i64(ret, arg, ofs);
2232     } else if (ofs == 0) {
2233         tcg_gen_andi_i64(ret, arg, (1ull << len) - 1);
2234     } else if (TCG_TARGET_HAS_deposit_i64
2235                && TCG_TARGET_deposit_i64_valid(ofs, len)) {
2236         TCGv_i64 zero = tcg_constant_i64(0);
2237         tcg_gen_op5ii_i64(INDEX_op_deposit_i64, ret, zero, arg, ofs, len);
2238     } else {
2239         if (TCG_TARGET_REG_BITS == 32) {
2240             if (ofs >= 32) {
2241                 tcg_gen_deposit_z_i32(TCGV_HIGH(ret), TCGV_LOW(arg),
2242                                       ofs - 32, len);
2243                 tcg_gen_movi_i32(TCGV_LOW(ret), 0);
2244                 return;
2245             }
2246             if (ofs + len <= 32) {
2247                 tcg_gen_deposit_z_i32(TCGV_LOW(ret), TCGV_LOW(arg), ofs, len);
2248                 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
2249                 return;
2250             }
2251         }
2252         /* To help two-operand hosts we prefer to zero-extend first,
2253            which allows ARG to stay live.  */
2254         switch (len) {
2255         case 32:
2256             if (TCG_TARGET_HAS_ext32u_i64) {
2257                 tcg_gen_ext32u_i64(ret, arg);
2258                 tcg_gen_shli_i64(ret, ret, ofs);
2259                 return;
2260             }
2261             break;
2262         case 16:
2263             if (TCG_TARGET_HAS_ext16u_i64) {
2264                 tcg_gen_ext16u_i64(ret, arg);
2265                 tcg_gen_shli_i64(ret, ret, ofs);
2266                 return;
2267             }
2268             break;
2269         case 8:
2270             if (TCG_TARGET_HAS_ext8u_i64) {
2271                 tcg_gen_ext8u_i64(ret, arg);
2272                 tcg_gen_shli_i64(ret, ret, ofs);
2273                 return;
2274             }
2275             break;
2276         }
2277         /* Otherwise prefer zero-extension over AND for code size.  */
2278         switch (ofs + len) {
2279         case 32:
2280             if (TCG_TARGET_HAS_ext32u_i64) {
2281                 tcg_gen_shli_i64(ret, arg, ofs);
2282                 tcg_gen_ext32u_i64(ret, ret);
2283                 return;
2284             }
2285             break;
2286         case 16:
2287             if (TCG_TARGET_HAS_ext16u_i64) {
2288                 tcg_gen_shli_i64(ret, arg, ofs);
2289                 tcg_gen_ext16u_i64(ret, ret);
2290                 return;
2291             }
2292             break;
2293         case 8:
2294             if (TCG_TARGET_HAS_ext8u_i64) {
2295                 tcg_gen_shli_i64(ret, arg, ofs);
2296                 tcg_gen_ext8u_i64(ret, ret);
2297                 return;
2298             }
2299             break;
2300         }
2301         tcg_gen_andi_i64(ret, arg, (1ull << len) - 1);
2302         tcg_gen_shli_i64(ret, ret, ofs);
2303     }
2304 }
2305 
2306 void tcg_gen_extract_i64(TCGv_i64 ret, TCGv_i64 arg,
2307                          unsigned int ofs, unsigned int len)
2308 {
2309     tcg_debug_assert(ofs < 64);
2310     tcg_debug_assert(len > 0);
2311     tcg_debug_assert(len <= 64);
2312     tcg_debug_assert(ofs + len <= 64);
2313 
2314     /* Canonicalize certain special cases, even if extract is supported.  */
2315     if (ofs + len == 64) {
2316         tcg_gen_shri_i64(ret, arg, 64 - len);
2317         return;
2318     }
2319     if (ofs == 0) {
2320         tcg_gen_andi_i64(ret, arg, (1ull << len) - 1);
2321         return;
2322     }
2323 
2324     if (TCG_TARGET_REG_BITS == 32) {
2325         /* Look for a 32-bit extract within one of the two words.  */
2326         if (ofs >= 32) {
2327             tcg_gen_extract_i32(TCGV_LOW(ret), TCGV_HIGH(arg), ofs - 32, len);
2328             tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
2329             return;
2330         }
2331         if (ofs + len <= 32) {
2332             tcg_gen_extract_i32(TCGV_LOW(ret), TCGV_LOW(arg), ofs, len);
2333             tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
2334             return;
2335         }
2336         /* The field is split across two words.  One double-word
2337            shift is better than two double-word shifts.  */
2338         goto do_shift_and;
2339     }
2340 
2341     if (TCG_TARGET_HAS_extract_i64
2342         && TCG_TARGET_extract_i64_valid(ofs, len)) {
2343         tcg_gen_op4ii_i64(INDEX_op_extract_i64, ret, arg, ofs, len);
2344         return;
2345     }
2346 
2347     /* Assume that zero-extension, if available, is cheaper than a shift.  */
2348     switch (ofs + len) {
2349     case 32:
2350         if (TCG_TARGET_HAS_ext32u_i64) {
2351             tcg_gen_ext32u_i64(ret, arg);
2352             tcg_gen_shri_i64(ret, ret, ofs);
2353             return;
2354         }
2355         break;
2356     case 16:
2357         if (TCG_TARGET_HAS_ext16u_i64) {
2358             tcg_gen_ext16u_i64(ret, arg);
2359             tcg_gen_shri_i64(ret, ret, ofs);
2360             return;
2361         }
2362         break;
2363     case 8:
2364         if (TCG_TARGET_HAS_ext8u_i64) {
2365             tcg_gen_ext8u_i64(ret, arg);
2366             tcg_gen_shri_i64(ret, ret, ofs);
2367             return;
2368         }
2369         break;
2370     }
2371 
2372     /* ??? Ideally we'd know what values are available for immediate AND.
2373        Assume that 8 bits are available, plus the special cases of 16 and 32,
2374        so that we get ext8u, ext16u, and ext32u.  */
2375     switch (len) {
2376     case 1 ... 8: case 16: case 32:
2377     do_shift_and:
2378         tcg_gen_shri_i64(ret, arg, ofs);
2379         tcg_gen_andi_i64(ret, ret, (1ull << len) - 1);
2380         break;
2381     default:
2382         tcg_gen_shli_i64(ret, arg, 64 - len - ofs);
2383         tcg_gen_shri_i64(ret, ret, 64 - len);
2384         break;
2385     }
2386 }
2387 
2388 void tcg_gen_sextract_i64(TCGv_i64 ret, TCGv_i64 arg,
2389                           unsigned int ofs, unsigned int len)
2390 {
2391     tcg_debug_assert(ofs < 64);
2392     tcg_debug_assert(len > 0);
2393     tcg_debug_assert(len <= 64);
2394     tcg_debug_assert(ofs + len <= 64);
2395 
2396     /* Canonicalize certain special cases, even if sextract is supported.  */
2397     if (ofs + len == 64) {
2398         tcg_gen_sari_i64(ret, arg, 64 - len);
2399         return;
2400     }
2401     if (ofs == 0) {
2402         switch (len) {
2403         case 32:
2404             tcg_gen_ext32s_i64(ret, arg);
2405             return;
2406         case 16:
2407             tcg_gen_ext16s_i64(ret, arg);
2408             return;
2409         case 8:
2410             tcg_gen_ext8s_i64(ret, arg);
2411             return;
2412         }
2413     }
2414 
2415     if (TCG_TARGET_REG_BITS == 32) {
2416         /* Look for a 32-bit extract within one of the two words.  */
2417         if (ofs >= 32) {
2418             tcg_gen_sextract_i32(TCGV_LOW(ret), TCGV_HIGH(arg), ofs - 32, len);
2419         } else if (ofs + len <= 32) {
2420             tcg_gen_sextract_i32(TCGV_LOW(ret), TCGV_LOW(arg), ofs, len);
2421         } else if (ofs == 0) {
2422             tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg));
2423             tcg_gen_sextract_i32(TCGV_HIGH(ret), TCGV_HIGH(arg), 0, len - 32);
2424             return;
2425         } else if (len > 32) {
2426             TCGv_i32 t = tcg_temp_ebb_new_i32();
2427             /* Extract the bits for the high word normally.  */
2428             tcg_gen_sextract_i32(t, TCGV_HIGH(arg), ofs + 32, len - 32);
2429             /* Shift the field down for the low part.  */
2430             tcg_gen_shri_i64(ret, arg, ofs);
2431             /* Overwrite the shift into the high part.  */
2432             tcg_gen_mov_i32(TCGV_HIGH(ret), t);
2433             tcg_temp_free_i32(t);
2434             return;
2435         } else {
2436             /* Shift the field down for the low part, such that the
2437                field sits at the MSB.  */
2438             tcg_gen_shri_i64(ret, arg, ofs + len - 32);
2439             /* Shift the field down from the MSB, sign extending.  */
2440             tcg_gen_sari_i32(TCGV_LOW(ret), TCGV_LOW(ret), 32 - len);
2441         }
2442         /* Sign-extend the field from 32 bits.  */
2443         tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
2444         return;
2445     }
2446 
2447     if (TCG_TARGET_HAS_sextract_i64
2448         && TCG_TARGET_extract_i64_valid(ofs, len)) {
2449         tcg_gen_op4ii_i64(INDEX_op_sextract_i64, ret, arg, ofs, len);
2450         return;
2451     }
2452 
2453     /* Assume that sign-extension, if available, is cheaper than a shift.  */
2454     switch (ofs + len) {
2455     case 32:
2456         if (TCG_TARGET_HAS_ext32s_i64) {
2457             tcg_gen_ext32s_i64(ret, arg);
2458             tcg_gen_sari_i64(ret, ret, ofs);
2459             return;
2460         }
2461         break;
2462     case 16:
2463         if (TCG_TARGET_HAS_ext16s_i64) {
2464             tcg_gen_ext16s_i64(ret, arg);
2465             tcg_gen_sari_i64(ret, ret, ofs);
2466             return;
2467         }
2468         break;
2469     case 8:
2470         if (TCG_TARGET_HAS_ext8s_i64) {
2471             tcg_gen_ext8s_i64(ret, arg);
2472             tcg_gen_sari_i64(ret, ret, ofs);
2473             return;
2474         }
2475         break;
2476     }
2477     switch (len) {
2478     case 32:
2479         if (TCG_TARGET_HAS_ext32s_i64) {
2480             tcg_gen_shri_i64(ret, arg, ofs);
2481             tcg_gen_ext32s_i64(ret, ret);
2482             return;
2483         }
2484         break;
2485     case 16:
2486         if (TCG_TARGET_HAS_ext16s_i64) {
2487             tcg_gen_shri_i64(ret, arg, ofs);
2488             tcg_gen_ext16s_i64(ret, ret);
2489             return;
2490         }
2491         break;
2492     case 8:
2493         if (TCG_TARGET_HAS_ext8s_i64) {
2494             tcg_gen_shri_i64(ret, arg, ofs);
2495             tcg_gen_ext8s_i64(ret, ret);
2496             return;
2497         }
2498         break;
2499     }
2500     tcg_gen_shli_i64(ret, arg, 64 - len - ofs);
2501     tcg_gen_sari_i64(ret, ret, 64 - len);
2502 }
2503 
2504 /*
2505  * Extract 64 bits from a 128-bit input, ah:al, starting from ofs.
2506  * Unlike tcg_gen_extract_i64 above, len is fixed at 64.
2507  */
2508 void tcg_gen_extract2_i64(TCGv_i64 ret, TCGv_i64 al, TCGv_i64 ah,
2509                           unsigned int ofs)
2510 {
2511     tcg_debug_assert(ofs <= 64);
2512     if (ofs == 0) {
2513         tcg_gen_mov_i64(ret, al);
2514     } else if (ofs == 64) {
2515         tcg_gen_mov_i64(ret, ah);
2516     } else if (al == ah) {
2517         tcg_gen_rotri_i64(ret, al, ofs);
2518     } else if (TCG_TARGET_HAS_extract2_i64) {
2519         tcg_gen_op4i_i64(INDEX_op_extract2_i64, ret, al, ah, ofs);
2520     } else {
2521         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
2522         tcg_gen_shri_i64(t0, al, ofs);
2523         tcg_gen_deposit_i64(ret, t0, ah, 64 - ofs, ofs);
2524         tcg_temp_free_i64(t0);
2525     }
2526 }
2527 
2528 void tcg_gen_movcond_i64(TCGCond cond, TCGv_i64 ret, TCGv_i64 c1,
2529                          TCGv_i64 c2, TCGv_i64 v1, TCGv_i64 v2)
2530 {
2531     if (cond == TCG_COND_ALWAYS) {
2532         tcg_gen_mov_i64(ret, v1);
2533     } else if (cond == TCG_COND_NEVER) {
2534         tcg_gen_mov_i64(ret, v2);
2535     } else if (TCG_TARGET_REG_BITS == 32) {
2536         TCGv_i32 t0 = tcg_temp_ebb_new_i32();
2537         TCGv_i32 t1 = tcg_temp_ebb_new_i32();
2538         tcg_gen_op6i_i32(INDEX_op_setcond2_i32, t0,
2539                          TCGV_LOW(c1), TCGV_HIGH(c1),
2540                          TCGV_LOW(c2), TCGV_HIGH(c2), cond);
2541 
2542         if (TCG_TARGET_HAS_movcond_i32) {
2543             tcg_gen_movi_i32(t1, 0);
2544             tcg_gen_movcond_i32(TCG_COND_NE, TCGV_LOW(ret), t0, t1,
2545                                 TCGV_LOW(v1), TCGV_LOW(v2));
2546             tcg_gen_movcond_i32(TCG_COND_NE, TCGV_HIGH(ret), t0, t1,
2547                                 TCGV_HIGH(v1), TCGV_HIGH(v2));
2548         } else {
2549             tcg_gen_neg_i32(t0, t0);
2550 
2551             tcg_gen_and_i32(t1, TCGV_LOW(v1), t0);
2552             tcg_gen_andc_i32(TCGV_LOW(ret), TCGV_LOW(v2), t0);
2553             tcg_gen_or_i32(TCGV_LOW(ret), TCGV_LOW(ret), t1);
2554 
2555             tcg_gen_and_i32(t1, TCGV_HIGH(v1), t0);
2556             tcg_gen_andc_i32(TCGV_HIGH(ret), TCGV_HIGH(v2), t0);
2557             tcg_gen_or_i32(TCGV_HIGH(ret), TCGV_HIGH(ret), t1);
2558         }
2559         tcg_temp_free_i32(t0);
2560         tcg_temp_free_i32(t1);
2561     } else if (TCG_TARGET_HAS_movcond_i64) {
2562         tcg_gen_op6i_i64(INDEX_op_movcond_i64, ret, c1, c2, v1, v2, cond);
2563     } else {
2564         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
2565         TCGv_i64 t1 = tcg_temp_ebb_new_i64();
2566         tcg_gen_setcond_i64(cond, t0, c1, c2);
2567         tcg_gen_neg_i64(t0, t0);
2568         tcg_gen_and_i64(t1, v1, t0);
2569         tcg_gen_andc_i64(ret, v2, t0);
2570         tcg_gen_or_i64(ret, ret, t1);
2571         tcg_temp_free_i64(t0);
2572         tcg_temp_free_i64(t1);
2573     }
2574 }
2575 
2576 void tcg_gen_add2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 al,
2577                       TCGv_i64 ah, TCGv_i64 bl, TCGv_i64 bh)
2578 {
2579     if (TCG_TARGET_HAS_add2_i64) {
2580         tcg_gen_op6_i64(INDEX_op_add2_i64, rl, rh, al, ah, bl, bh);
2581     } else {
2582         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
2583         TCGv_i64 t1 = tcg_temp_ebb_new_i64();
2584         tcg_gen_add_i64(t0, al, bl);
2585         tcg_gen_setcond_i64(TCG_COND_LTU, t1, t0, al);
2586         tcg_gen_add_i64(rh, ah, bh);
2587         tcg_gen_add_i64(rh, rh, t1);
2588         tcg_gen_mov_i64(rl, t0);
2589         tcg_temp_free_i64(t0);
2590         tcg_temp_free_i64(t1);
2591     }
2592 }
2593 
2594 void tcg_gen_sub2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 al,
2595                       TCGv_i64 ah, TCGv_i64 bl, TCGv_i64 bh)
2596 {
2597     if (TCG_TARGET_HAS_sub2_i64) {
2598         tcg_gen_op6_i64(INDEX_op_sub2_i64, rl, rh, al, ah, bl, bh);
2599     } else {
2600         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
2601         TCGv_i64 t1 = tcg_temp_ebb_new_i64();
2602         tcg_gen_sub_i64(t0, al, bl);
2603         tcg_gen_setcond_i64(TCG_COND_LTU, t1, al, bl);
2604         tcg_gen_sub_i64(rh, ah, bh);
2605         tcg_gen_sub_i64(rh, rh, t1);
2606         tcg_gen_mov_i64(rl, t0);
2607         tcg_temp_free_i64(t0);
2608         tcg_temp_free_i64(t1);
2609     }
2610 }
2611 
2612 void tcg_gen_mulu2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 arg1, TCGv_i64 arg2)
2613 {
2614     if (TCG_TARGET_HAS_mulu2_i64) {
2615         tcg_gen_op4_i64(INDEX_op_mulu2_i64, rl, rh, arg1, arg2);
2616     } else if (TCG_TARGET_HAS_muluh_i64) {
2617         TCGv_i64 t = tcg_temp_ebb_new_i64();
2618         tcg_gen_op3_i64(INDEX_op_mul_i64, t, arg1, arg2);
2619         tcg_gen_op3_i64(INDEX_op_muluh_i64, rh, arg1, arg2);
2620         tcg_gen_mov_i64(rl, t);
2621         tcg_temp_free_i64(t);
2622     } else {
2623         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
2624         tcg_gen_mul_i64(t0, arg1, arg2);
2625         gen_helper_muluh_i64(rh, arg1, arg2);
2626         tcg_gen_mov_i64(rl, t0);
2627         tcg_temp_free_i64(t0);
2628     }
2629 }
2630 
2631 void tcg_gen_muls2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 arg1, TCGv_i64 arg2)
2632 {
2633     if (TCG_TARGET_HAS_muls2_i64) {
2634         tcg_gen_op4_i64(INDEX_op_muls2_i64, rl, rh, arg1, arg2);
2635     } else if (TCG_TARGET_HAS_mulsh_i64) {
2636         TCGv_i64 t = tcg_temp_ebb_new_i64();
2637         tcg_gen_op3_i64(INDEX_op_mul_i64, t, arg1, arg2);
2638         tcg_gen_op3_i64(INDEX_op_mulsh_i64, rh, arg1, arg2);
2639         tcg_gen_mov_i64(rl, t);
2640         tcg_temp_free_i64(t);
2641     } else if (TCG_TARGET_HAS_mulu2_i64 || TCG_TARGET_HAS_muluh_i64) {
2642         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
2643         TCGv_i64 t1 = tcg_temp_ebb_new_i64();
2644         TCGv_i64 t2 = tcg_temp_ebb_new_i64();
2645         TCGv_i64 t3 = tcg_temp_ebb_new_i64();
2646         tcg_gen_mulu2_i64(t0, t1, arg1, arg2);
2647         /* Adjust for negative inputs.  */
2648         tcg_gen_sari_i64(t2, arg1, 63);
2649         tcg_gen_sari_i64(t3, arg2, 63);
2650         tcg_gen_and_i64(t2, t2, arg2);
2651         tcg_gen_and_i64(t3, t3, arg1);
2652         tcg_gen_sub_i64(rh, t1, t2);
2653         tcg_gen_sub_i64(rh, rh, t3);
2654         tcg_gen_mov_i64(rl, t0);
2655         tcg_temp_free_i64(t0);
2656         tcg_temp_free_i64(t1);
2657         tcg_temp_free_i64(t2);
2658         tcg_temp_free_i64(t3);
2659     } else {
2660         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
2661         tcg_gen_mul_i64(t0, arg1, arg2);
2662         gen_helper_mulsh_i64(rh, arg1, arg2);
2663         tcg_gen_mov_i64(rl, t0);
2664         tcg_temp_free_i64(t0);
2665     }
2666 }
2667 
2668 void tcg_gen_mulsu2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 arg1, TCGv_i64 arg2)
2669 {
2670     TCGv_i64 t0 = tcg_temp_ebb_new_i64();
2671     TCGv_i64 t1 = tcg_temp_ebb_new_i64();
2672     TCGv_i64 t2 = tcg_temp_ebb_new_i64();
2673     tcg_gen_mulu2_i64(t0, t1, arg1, arg2);
2674     /* Adjust for negative input for the signed arg1.  */
2675     tcg_gen_sari_i64(t2, arg1, 63);
2676     tcg_gen_and_i64(t2, t2, arg2);
2677     tcg_gen_sub_i64(rh, t1, t2);
2678     tcg_gen_mov_i64(rl, t0);
2679     tcg_temp_free_i64(t0);
2680     tcg_temp_free_i64(t1);
2681     tcg_temp_free_i64(t2);
2682 }
2683 
2684 void tcg_gen_smin_i64(TCGv_i64 ret, TCGv_i64 a, TCGv_i64 b)
2685 {
2686     tcg_gen_movcond_i64(TCG_COND_LT, ret, a, b, a, b);
2687 }
2688 
2689 void tcg_gen_umin_i64(TCGv_i64 ret, TCGv_i64 a, TCGv_i64 b)
2690 {
2691     tcg_gen_movcond_i64(TCG_COND_LTU, ret, a, b, a, b);
2692 }
2693 
2694 void tcg_gen_smax_i64(TCGv_i64 ret, TCGv_i64 a, TCGv_i64 b)
2695 {
2696     tcg_gen_movcond_i64(TCG_COND_LT, ret, a, b, b, a);
2697 }
2698 
2699 void tcg_gen_umax_i64(TCGv_i64 ret, TCGv_i64 a, TCGv_i64 b)
2700 {
2701     tcg_gen_movcond_i64(TCG_COND_LTU, ret, a, b, b, a);
2702 }
2703 
2704 void tcg_gen_abs_i64(TCGv_i64 ret, TCGv_i64 a)
2705 {
2706     TCGv_i64 t = tcg_temp_ebb_new_i64();
2707 
2708     tcg_gen_sari_i64(t, a, 63);
2709     tcg_gen_xor_i64(ret, a, t);
2710     tcg_gen_sub_i64(ret, ret, t);
2711     tcg_temp_free_i64(t);
2712 }
2713 
2714 /* Size changing operations.  */
2715 
2716 void tcg_gen_extrl_i64_i32(TCGv_i32 ret, TCGv_i64 arg)
2717 {
2718     if (TCG_TARGET_REG_BITS == 32) {
2719         tcg_gen_mov_i32(ret, TCGV_LOW(arg));
2720     } else if (TCG_TARGET_HAS_extr_i64_i32) {
2721         tcg_gen_op2(INDEX_op_extrl_i64_i32,
2722                     tcgv_i32_arg(ret), tcgv_i64_arg(arg));
2723     } else {
2724         tcg_gen_mov_i32(ret, (TCGv_i32)arg);
2725     }
2726 }
2727 
2728 void tcg_gen_extrh_i64_i32(TCGv_i32 ret, TCGv_i64 arg)
2729 {
2730     if (TCG_TARGET_REG_BITS == 32) {
2731         tcg_gen_mov_i32(ret, TCGV_HIGH(arg));
2732     } else if (TCG_TARGET_HAS_extr_i64_i32) {
2733         tcg_gen_op2(INDEX_op_extrh_i64_i32,
2734                     tcgv_i32_arg(ret), tcgv_i64_arg(arg));
2735     } else {
2736         TCGv_i64 t = tcg_temp_ebb_new_i64();
2737         tcg_gen_shri_i64(t, arg, 32);
2738         tcg_gen_mov_i32(ret, (TCGv_i32)t);
2739         tcg_temp_free_i64(t);
2740     }
2741 }
2742 
2743 void tcg_gen_extu_i32_i64(TCGv_i64 ret, TCGv_i32 arg)
2744 {
2745     if (TCG_TARGET_REG_BITS == 32) {
2746         tcg_gen_mov_i32(TCGV_LOW(ret), arg);
2747         tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
2748     } else {
2749         tcg_gen_op2(INDEX_op_extu_i32_i64,
2750                     tcgv_i64_arg(ret), tcgv_i32_arg(arg));
2751     }
2752 }
2753 
2754 void tcg_gen_ext_i32_i64(TCGv_i64 ret, TCGv_i32 arg)
2755 {
2756     if (TCG_TARGET_REG_BITS == 32) {
2757         tcg_gen_mov_i32(TCGV_LOW(ret), arg);
2758         tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
2759     } else {
2760         tcg_gen_op2(INDEX_op_ext_i32_i64,
2761                     tcgv_i64_arg(ret), tcgv_i32_arg(arg));
2762     }
2763 }
2764 
2765 void tcg_gen_concat_i32_i64(TCGv_i64 dest, TCGv_i32 low, TCGv_i32 high)
2766 {
2767     TCGv_i64 tmp;
2768 
2769     if (TCG_TARGET_REG_BITS == 32) {
2770         tcg_gen_mov_i32(TCGV_LOW(dest), low);
2771         tcg_gen_mov_i32(TCGV_HIGH(dest), high);
2772         return;
2773     }
2774 
2775     tmp = tcg_temp_ebb_new_i64();
2776     /* These extensions are only needed for type correctness.
2777        We may be able to do better given target specific information.  */
2778     tcg_gen_extu_i32_i64(tmp, high);
2779     tcg_gen_extu_i32_i64(dest, low);
2780     /* If deposit is available, use it.  Otherwise use the extra
2781        knowledge that we have of the zero-extensions above.  */
2782     if (TCG_TARGET_HAS_deposit_i64 && TCG_TARGET_deposit_i64_valid(32, 32)) {
2783         tcg_gen_deposit_i64(dest, dest, tmp, 32, 32);
2784     } else {
2785         tcg_gen_shli_i64(tmp, tmp, 32);
2786         tcg_gen_or_i64(dest, dest, tmp);
2787     }
2788     tcg_temp_free_i64(tmp);
2789 }
2790 
2791 void tcg_gen_extr_i64_i32(TCGv_i32 lo, TCGv_i32 hi, TCGv_i64 arg)
2792 {
2793     if (TCG_TARGET_REG_BITS == 32) {
2794         tcg_gen_mov_i32(lo, TCGV_LOW(arg));
2795         tcg_gen_mov_i32(hi, TCGV_HIGH(arg));
2796     } else {
2797         tcg_gen_extrl_i64_i32(lo, arg);
2798         tcg_gen_extrh_i64_i32(hi, arg);
2799     }
2800 }
2801 
2802 void tcg_gen_extr32_i64(TCGv_i64 lo, TCGv_i64 hi, TCGv_i64 arg)
2803 {
2804     tcg_gen_ext32u_i64(lo, arg);
2805     tcg_gen_shri_i64(hi, arg, 32);
2806 }
2807 
2808 void tcg_gen_extr_i128_i64(TCGv_i64 lo, TCGv_i64 hi, TCGv_i128 arg)
2809 {
2810     tcg_gen_mov_i64(lo, TCGV128_LOW(arg));
2811     tcg_gen_mov_i64(hi, TCGV128_HIGH(arg));
2812 }
2813 
2814 void tcg_gen_concat_i64_i128(TCGv_i128 ret, TCGv_i64 lo, TCGv_i64 hi)
2815 {
2816     tcg_gen_mov_i64(TCGV128_LOW(ret), lo);
2817     tcg_gen_mov_i64(TCGV128_HIGH(ret), hi);
2818 }
2819 
2820 void tcg_gen_mov_i128(TCGv_i128 dst, TCGv_i128 src)
2821 {
2822     if (dst != src) {
2823         tcg_gen_mov_i64(TCGV128_LOW(dst), TCGV128_LOW(src));
2824         tcg_gen_mov_i64(TCGV128_HIGH(dst), TCGV128_HIGH(src));
2825     }
2826 }
2827 
2828 /* QEMU specific operations.  */
2829 
2830 void tcg_gen_exit_tb(const TranslationBlock *tb, unsigned idx)
2831 {
2832     /*
2833      * Let the jit code return the read-only version of the
2834      * TranslationBlock, so that we minimize the pc-relative
2835      * distance of the address of the exit_tb code to TB.
2836      * This will improve utilization of pc-relative address loads.
2837      *
2838      * TODO: Move this to translator_loop, so that all const
2839      * TranslationBlock pointers refer to read-only memory.
2840      * This requires coordination with targets that do not use
2841      * the translator_loop.
2842      */
2843     uintptr_t val = (uintptr_t)tcg_splitwx_to_rx((void *)tb) + idx;
2844 
2845     if (tb == NULL) {
2846         tcg_debug_assert(idx == 0);
2847     } else if (idx <= TB_EXIT_IDXMAX) {
2848 #ifdef CONFIG_DEBUG_TCG
2849         /* This is an exit following a goto_tb.  Verify that we have
2850            seen this numbered exit before, via tcg_gen_goto_tb.  */
2851         tcg_debug_assert(tcg_ctx->goto_tb_issue_mask & (1 << idx));
2852 #endif
2853     } else {
2854         /* This is an exit via the exitreq label.  */
2855         tcg_debug_assert(idx == TB_EXIT_REQUESTED);
2856     }
2857 
2858     tcg_gen_op1i(INDEX_op_exit_tb, val);
2859 }
2860 
2861 void tcg_gen_goto_tb(unsigned idx)
2862 {
2863     /* We tested CF_NO_GOTO_TB in translator_use_goto_tb. */
2864     tcg_debug_assert(!(tcg_ctx->gen_tb->cflags & CF_NO_GOTO_TB));
2865     /* We only support two chained exits.  */
2866     tcg_debug_assert(idx <= TB_EXIT_IDXMAX);
2867 #ifdef CONFIG_DEBUG_TCG
2868     /* Verify that we haven't seen this numbered exit before.  */
2869     tcg_debug_assert((tcg_ctx->goto_tb_issue_mask & (1 << idx)) == 0);
2870     tcg_ctx->goto_tb_issue_mask |= 1 << idx;
2871 #endif
2872     plugin_gen_disable_mem_helpers();
2873     tcg_gen_op1i(INDEX_op_goto_tb, idx);
2874 }
2875 
2876 void tcg_gen_lookup_and_goto_ptr(void)
2877 {
2878     TCGv_ptr ptr;
2879 
2880     if (tcg_ctx->gen_tb->cflags & CF_NO_GOTO_PTR) {
2881         tcg_gen_exit_tb(NULL, 0);
2882         return;
2883     }
2884 
2885     plugin_gen_disable_mem_helpers();
2886     ptr = tcg_temp_ebb_new_ptr();
2887     gen_helper_lookup_tb_ptr(ptr, cpu_env);
2888     tcg_gen_op1i(INDEX_op_goto_ptr, tcgv_ptr_arg(ptr));
2889     tcg_temp_free_ptr(ptr);
2890 }
2891