xref: /openbmc/qemu/tcg/tcg-op.c (revision 70f168f8)
1 /*
2  * Tiny Code Generator for QEMU
3  *
4  * Copyright (c) 2008 Fabrice Bellard
5  *
6  * Permission is hereby granted, free of charge, to any person obtaining a copy
7  * of this software and associated documentation files (the "Software"), to deal
8  * in the Software without restriction, including without limitation the rights
9  * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
10  * copies of the Software, and to permit persons to whom the Software is
11  * furnished to do so, subject to the following conditions:
12  *
13  * The above copyright notice and this permission notice shall be included in
14  * all copies or substantial portions of the Software.
15  *
16  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19  * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20  * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21  * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
22  * THE SOFTWARE.
23  */
24 
25 #include "qemu/osdep.h"
26 #include "exec/exec-all.h"
27 #include "tcg/tcg.h"
28 #include "tcg/tcg-temp-internal.h"
29 #include "tcg/tcg-op.h"
30 #include "exec/plugin-gen.h"
31 #include "tcg-internal.h"
32 
33 
34 void tcg_gen_op1(TCGOpcode opc, TCGArg a1)
35 {
36     TCGOp *op = tcg_emit_op(opc, 1);
37     op->args[0] = a1;
38 }
39 
40 void tcg_gen_op2(TCGOpcode opc, TCGArg a1, TCGArg a2)
41 {
42     TCGOp *op = tcg_emit_op(opc, 2);
43     op->args[0] = a1;
44     op->args[1] = a2;
45 }
46 
47 void tcg_gen_op3(TCGOpcode opc, TCGArg a1, TCGArg a2, TCGArg a3)
48 {
49     TCGOp *op = tcg_emit_op(opc, 3);
50     op->args[0] = a1;
51     op->args[1] = a2;
52     op->args[2] = a3;
53 }
54 
55 void tcg_gen_op4(TCGOpcode opc, TCGArg a1, TCGArg a2, TCGArg a3, TCGArg a4)
56 {
57     TCGOp *op = tcg_emit_op(opc, 4);
58     op->args[0] = a1;
59     op->args[1] = a2;
60     op->args[2] = a3;
61     op->args[3] = a4;
62 }
63 
64 void tcg_gen_op5(TCGOpcode opc, TCGArg a1, TCGArg a2, TCGArg a3,
65                  TCGArg a4, TCGArg a5)
66 {
67     TCGOp *op = tcg_emit_op(opc, 5);
68     op->args[0] = a1;
69     op->args[1] = a2;
70     op->args[2] = a3;
71     op->args[3] = a4;
72     op->args[4] = a5;
73 }
74 
75 void tcg_gen_op6(TCGOpcode opc, TCGArg a1, TCGArg a2, TCGArg a3,
76                  TCGArg a4, TCGArg a5, TCGArg a6)
77 {
78     TCGOp *op = tcg_emit_op(opc, 6);
79     op->args[0] = a1;
80     op->args[1] = a2;
81     op->args[2] = a3;
82     op->args[3] = a4;
83     op->args[4] = a5;
84     op->args[5] = a6;
85 }
86 
87 /* Generic ops.  */
88 
89 static void add_last_as_label_use(TCGLabel *l)
90 {
91     TCGLabelUse *u = tcg_malloc(sizeof(TCGLabelUse));
92 
93     u->op = tcg_last_op();
94     QSIMPLEQ_INSERT_TAIL(&l->branches, u, next);
95 }
96 
97 void tcg_gen_br(TCGLabel *l)
98 {
99     tcg_gen_op1(INDEX_op_br, label_arg(l));
100     add_last_as_label_use(l);
101 }
102 
103 void tcg_gen_mb(TCGBar mb_type)
104 {
105     if (tcg_ctx->gen_tb->cflags & CF_PARALLEL) {
106         tcg_gen_op1(INDEX_op_mb, mb_type);
107     }
108 }
109 
110 /* 32 bit ops */
111 
112 void tcg_gen_movi_i32(TCGv_i32 ret, int32_t arg)
113 {
114     tcg_gen_mov_i32(ret, tcg_constant_i32(arg));
115 }
116 
117 void tcg_gen_addi_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
118 {
119     /* some cases can be optimized here */
120     if (arg2 == 0) {
121         tcg_gen_mov_i32(ret, arg1);
122     } else {
123         tcg_gen_add_i32(ret, arg1, tcg_constant_i32(arg2));
124     }
125 }
126 
127 void tcg_gen_subfi_i32(TCGv_i32 ret, int32_t arg1, TCGv_i32 arg2)
128 {
129     if (arg1 == 0 && TCG_TARGET_HAS_neg_i32) {
130         /* Don't recurse with tcg_gen_neg_i32.  */
131         tcg_gen_op2_i32(INDEX_op_neg_i32, ret, arg2);
132     } else {
133         tcg_gen_sub_i32(ret, tcg_constant_i32(arg1), arg2);
134     }
135 }
136 
137 void tcg_gen_subi_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
138 {
139     /* some cases can be optimized here */
140     if (arg2 == 0) {
141         tcg_gen_mov_i32(ret, arg1);
142     } else {
143         tcg_gen_sub_i32(ret, arg1, tcg_constant_i32(arg2));
144     }
145 }
146 
147 void tcg_gen_andi_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
148 {
149     /* Some cases can be optimized here.  */
150     switch (arg2) {
151     case 0:
152         tcg_gen_movi_i32(ret, 0);
153         return;
154     case -1:
155         tcg_gen_mov_i32(ret, arg1);
156         return;
157     case 0xff:
158         /* Don't recurse with tcg_gen_ext8u_i32.  */
159         if (TCG_TARGET_HAS_ext8u_i32) {
160             tcg_gen_op2_i32(INDEX_op_ext8u_i32, ret, arg1);
161             return;
162         }
163         break;
164     case 0xffff:
165         if (TCG_TARGET_HAS_ext16u_i32) {
166             tcg_gen_op2_i32(INDEX_op_ext16u_i32, ret, arg1);
167             return;
168         }
169         break;
170     }
171 
172     tcg_gen_and_i32(ret, arg1, tcg_constant_i32(arg2));
173 }
174 
175 void tcg_gen_ori_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
176 {
177     /* Some cases can be optimized here.  */
178     if (arg2 == -1) {
179         tcg_gen_movi_i32(ret, -1);
180     } else if (arg2 == 0) {
181         tcg_gen_mov_i32(ret, arg1);
182     } else {
183         tcg_gen_or_i32(ret, arg1, tcg_constant_i32(arg2));
184     }
185 }
186 
187 void tcg_gen_xori_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
188 {
189     /* Some cases can be optimized here.  */
190     if (arg2 == 0) {
191         tcg_gen_mov_i32(ret, arg1);
192     } else if (arg2 == -1 && TCG_TARGET_HAS_not_i32) {
193         /* Don't recurse with tcg_gen_not_i32.  */
194         tcg_gen_op2_i32(INDEX_op_not_i32, ret, arg1);
195     } else {
196         tcg_gen_xor_i32(ret, arg1, tcg_constant_i32(arg2));
197     }
198 }
199 
200 void tcg_gen_shli_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
201 {
202     tcg_debug_assert(arg2 >= 0 && arg2 < 32);
203     if (arg2 == 0) {
204         tcg_gen_mov_i32(ret, arg1);
205     } else {
206         tcg_gen_shl_i32(ret, arg1, tcg_constant_i32(arg2));
207     }
208 }
209 
210 void tcg_gen_shri_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
211 {
212     tcg_debug_assert(arg2 >= 0 && arg2 < 32);
213     if (arg2 == 0) {
214         tcg_gen_mov_i32(ret, arg1);
215     } else {
216         tcg_gen_shr_i32(ret, arg1, tcg_constant_i32(arg2));
217     }
218 }
219 
220 void tcg_gen_sari_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
221 {
222     tcg_debug_assert(arg2 >= 0 && arg2 < 32);
223     if (arg2 == 0) {
224         tcg_gen_mov_i32(ret, arg1);
225     } else {
226         tcg_gen_sar_i32(ret, arg1, tcg_constant_i32(arg2));
227     }
228 }
229 
230 void tcg_gen_brcond_i32(TCGCond cond, TCGv_i32 arg1, TCGv_i32 arg2, TCGLabel *l)
231 {
232     if (cond == TCG_COND_ALWAYS) {
233         tcg_gen_br(l);
234     } else if (cond != TCG_COND_NEVER) {
235         tcg_gen_op4ii_i32(INDEX_op_brcond_i32, arg1, arg2, cond, label_arg(l));
236         add_last_as_label_use(l);
237     }
238 }
239 
240 void tcg_gen_brcondi_i32(TCGCond cond, TCGv_i32 arg1, int32_t arg2, TCGLabel *l)
241 {
242     if (cond == TCG_COND_ALWAYS) {
243         tcg_gen_br(l);
244     } else if (cond != TCG_COND_NEVER) {
245         tcg_gen_brcond_i32(cond, arg1, tcg_constant_i32(arg2), l);
246     }
247 }
248 
249 void tcg_gen_setcond_i32(TCGCond cond, TCGv_i32 ret,
250                          TCGv_i32 arg1, TCGv_i32 arg2)
251 {
252     if (cond == TCG_COND_ALWAYS) {
253         tcg_gen_movi_i32(ret, 1);
254     } else if (cond == TCG_COND_NEVER) {
255         tcg_gen_movi_i32(ret, 0);
256     } else {
257         tcg_gen_op4i_i32(INDEX_op_setcond_i32, ret, arg1, arg2, cond);
258     }
259 }
260 
261 void tcg_gen_setcondi_i32(TCGCond cond, TCGv_i32 ret,
262                           TCGv_i32 arg1, int32_t arg2)
263 {
264     tcg_gen_setcond_i32(cond, ret, arg1, tcg_constant_i32(arg2));
265 }
266 
267 void tcg_gen_muli_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
268 {
269     if (arg2 == 0) {
270         tcg_gen_movi_i32(ret, 0);
271     } else if (is_power_of_2(arg2)) {
272         tcg_gen_shli_i32(ret, arg1, ctz32(arg2));
273     } else {
274         tcg_gen_mul_i32(ret, arg1, tcg_constant_i32(arg2));
275     }
276 }
277 
278 void tcg_gen_div_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
279 {
280     if (TCG_TARGET_HAS_div_i32) {
281         tcg_gen_op3_i32(INDEX_op_div_i32, ret, arg1, arg2);
282     } else if (TCG_TARGET_HAS_div2_i32) {
283         TCGv_i32 t0 = tcg_temp_ebb_new_i32();
284         tcg_gen_sari_i32(t0, arg1, 31);
285         tcg_gen_op5_i32(INDEX_op_div2_i32, ret, t0, arg1, t0, arg2);
286         tcg_temp_free_i32(t0);
287     } else {
288         gen_helper_div_i32(ret, arg1, arg2);
289     }
290 }
291 
292 void tcg_gen_rem_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
293 {
294     if (TCG_TARGET_HAS_rem_i32) {
295         tcg_gen_op3_i32(INDEX_op_rem_i32, ret, arg1, arg2);
296     } else if (TCG_TARGET_HAS_div_i32) {
297         TCGv_i32 t0 = tcg_temp_ebb_new_i32();
298         tcg_gen_op3_i32(INDEX_op_div_i32, t0, arg1, arg2);
299         tcg_gen_mul_i32(t0, t0, arg2);
300         tcg_gen_sub_i32(ret, arg1, t0);
301         tcg_temp_free_i32(t0);
302     } else if (TCG_TARGET_HAS_div2_i32) {
303         TCGv_i32 t0 = tcg_temp_ebb_new_i32();
304         tcg_gen_sari_i32(t0, arg1, 31);
305         tcg_gen_op5_i32(INDEX_op_div2_i32, t0, ret, arg1, t0, arg2);
306         tcg_temp_free_i32(t0);
307     } else {
308         gen_helper_rem_i32(ret, arg1, arg2);
309     }
310 }
311 
312 void tcg_gen_divu_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
313 {
314     if (TCG_TARGET_HAS_div_i32) {
315         tcg_gen_op3_i32(INDEX_op_divu_i32, ret, arg1, arg2);
316     } else if (TCG_TARGET_HAS_div2_i32) {
317         TCGv_i32 t0 = tcg_temp_ebb_new_i32();
318         tcg_gen_movi_i32(t0, 0);
319         tcg_gen_op5_i32(INDEX_op_divu2_i32, ret, t0, arg1, t0, arg2);
320         tcg_temp_free_i32(t0);
321     } else {
322         gen_helper_divu_i32(ret, arg1, arg2);
323     }
324 }
325 
326 void tcg_gen_remu_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
327 {
328     if (TCG_TARGET_HAS_rem_i32) {
329         tcg_gen_op3_i32(INDEX_op_remu_i32, ret, arg1, arg2);
330     } else if (TCG_TARGET_HAS_div_i32) {
331         TCGv_i32 t0 = tcg_temp_ebb_new_i32();
332         tcg_gen_op3_i32(INDEX_op_divu_i32, t0, arg1, arg2);
333         tcg_gen_mul_i32(t0, t0, arg2);
334         tcg_gen_sub_i32(ret, arg1, t0);
335         tcg_temp_free_i32(t0);
336     } else if (TCG_TARGET_HAS_div2_i32) {
337         TCGv_i32 t0 = tcg_temp_ebb_new_i32();
338         tcg_gen_movi_i32(t0, 0);
339         tcg_gen_op5_i32(INDEX_op_divu2_i32, t0, ret, arg1, t0, arg2);
340         tcg_temp_free_i32(t0);
341     } else {
342         gen_helper_remu_i32(ret, arg1, arg2);
343     }
344 }
345 
346 void tcg_gen_andc_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
347 {
348     if (TCG_TARGET_HAS_andc_i32) {
349         tcg_gen_op3_i32(INDEX_op_andc_i32, ret, arg1, arg2);
350     } else {
351         TCGv_i32 t0 = tcg_temp_ebb_new_i32();
352         tcg_gen_not_i32(t0, arg2);
353         tcg_gen_and_i32(ret, arg1, t0);
354         tcg_temp_free_i32(t0);
355     }
356 }
357 
358 void tcg_gen_eqv_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
359 {
360     if (TCG_TARGET_HAS_eqv_i32) {
361         tcg_gen_op3_i32(INDEX_op_eqv_i32, ret, arg1, arg2);
362     } else {
363         tcg_gen_xor_i32(ret, arg1, arg2);
364         tcg_gen_not_i32(ret, ret);
365     }
366 }
367 
368 void tcg_gen_nand_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
369 {
370     if (TCG_TARGET_HAS_nand_i32) {
371         tcg_gen_op3_i32(INDEX_op_nand_i32, ret, arg1, arg2);
372     } else {
373         tcg_gen_and_i32(ret, arg1, arg2);
374         tcg_gen_not_i32(ret, ret);
375     }
376 }
377 
378 void tcg_gen_nor_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
379 {
380     if (TCG_TARGET_HAS_nor_i32) {
381         tcg_gen_op3_i32(INDEX_op_nor_i32, ret, arg1, arg2);
382     } else {
383         tcg_gen_or_i32(ret, arg1, arg2);
384         tcg_gen_not_i32(ret, ret);
385     }
386 }
387 
388 void tcg_gen_orc_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
389 {
390     if (TCG_TARGET_HAS_orc_i32) {
391         tcg_gen_op3_i32(INDEX_op_orc_i32, ret, arg1, arg2);
392     } else {
393         TCGv_i32 t0 = tcg_temp_ebb_new_i32();
394         tcg_gen_not_i32(t0, arg2);
395         tcg_gen_or_i32(ret, arg1, t0);
396         tcg_temp_free_i32(t0);
397     }
398 }
399 
400 void tcg_gen_clz_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
401 {
402     if (TCG_TARGET_HAS_clz_i32) {
403         tcg_gen_op3_i32(INDEX_op_clz_i32, ret, arg1, arg2);
404     } else if (TCG_TARGET_HAS_clz_i64) {
405         TCGv_i64 t1 = tcg_temp_ebb_new_i64();
406         TCGv_i64 t2 = tcg_temp_ebb_new_i64();
407         tcg_gen_extu_i32_i64(t1, arg1);
408         tcg_gen_extu_i32_i64(t2, arg2);
409         tcg_gen_addi_i64(t2, t2, 32);
410         tcg_gen_clz_i64(t1, t1, t2);
411         tcg_gen_extrl_i64_i32(ret, t1);
412         tcg_temp_free_i64(t1);
413         tcg_temp_free_i64(t2);
414         tcg_gen_subi_i32(ret, ret, 32);
415     } else {
416         gen_helper_clz_i32(ret, arg1, arg2);
417     }
418 }
419 
420 void tcg_gen_clzi_i32(TCGv_i32 ret, TCGv_i32 arg1, uint32_t arg2)
421 {
422     tcg_gen_clz_i32(ret, arg1, tcg_constant_i32(arg2));
423 }
424 
425 void tcg_gen_ctz_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
426 {
427     if (TCG_TARGET_HAS_ctz_i32) {
428         tcg_gen_op3_i32(INDEX_op_ctz_i32, ret, arg1, arg2);
429     } else if (TCG_TARGET_HAS_ctz_i64) {
430         TCGv_i64 t1 = tcg_temp_ebb_new_i64();
431         TCGv_i64 t2 = tcg_temp_ebb_new_i64();
432         tcg_gen_extu_i32_i64(t1, arg1);
433         tcg_gen_extu_i32_i64(t2, arg2);
434         tcg_gen_ctz_i64(t1, t1, t2);
435         tcg_gen_extrl_i64_i32(ret, t1);
436         tcg_temp_free_i64(t1);
437         tcg_temp_free_i64(t2);
438     } else if (TCG_TARGET_HAS_ctpop_i32
439                || TCG_TARGET_HAS_ctpop_i64
440                || TCG_TARGET_HAS_clz_i32
441                || TCG_TARGET_HAS_clz_i64) {
442         TCGv_i32 z, t = tcg_temp_ebb_new_i32();
443 
444         if (TCG_TARGET_HAS_ctpop_i32 || TCG_TARGET_HAS_ctpop_i64) {
445             tcg_gen_subi_i32(t, arg1, 1);
446             tcg_gen_andc_i32(t, t, arg1);
447             tcg_gen_ctpop_i32(t, t);
448         } else {
449             /* Since all non-x86 hosts have clz(0) == 32, don't fight it.  */
450             tcg_gen_neg_i32(t, arg1);
451             tcg_gen_and_i32(t, t, arg1);
452             tcg_gen_clzi_i32(t, t, 32);
453             tcg_gen_xori_i32(t, t, 31);
454         }
455         z = tcg_constant_i32(0);
456         tcg_gen_movcond_i32(TCG_COND_EQ, ret, arg1, z, arg2, t);
457         tcg_temp_free_i32(t);
458     } else {
459         gen_helper_ctz_i32(ret, arg1, arg2);
460     }
461 }
462 
463 void tcg_gen_ctzi_i32(TCGv_i32 ret, TCGv_i32 arg1, uint32_t arg2)
464 {
465     if (!TCG_TARGET_HAS_ctz_i32 && TCG_TARGET_HAS_ctpop_i32 && arg2 == 32) {
466         /* This equivalence has the advantage of not requiring a fixup.  */
467         TCGv_i32 t = tcg_temp_ebb_new_i32();
468         tcg_gen_subi_i32(t, arg1, 1);
469         tcg_gen_andc_i32(t, t, arg1);
470         tcg_gen_ctpop_i32(ret, t);
471         tcg_temp_free_i32(t);
472     } else {
473         tcg_gen_ctz_i32(ret, arg1, tcg_constant_i32(arg2));
474     }
475 }
476 
477 void tcg_gen_clrsb_i32(TCGv_i32 ret, TCGv_i32 arg)
478 {
479     if (TCG_TARGET_HAS_clz_i32) {
480         TCGv_i32 t = tcg_temp_ebb_new_i32();
481         tcg_gen_sari_i32(t, arg, 31);
482         tcg_gen_xor_i32(t, t, arg);
483         tcg_gen_clzi_i32(t, t, 32);
484         tcg_gen_subi_i32(ret, t, 1);
485         tcg_temp_free_i32(t);
486     } else {
487         gen_helper_clrsb_i32(ret, arg);
488     }
489 }
490 
491 void tcg_gen_ctpop_i32(TCGv_i32 ret, TCGv_i32 arg1)
492 {
493     if (TCG_TARGET_HAS_ctpop_i32) {
494         tcg_gen_op2_i32(INDEX_op_ctpop_i32, ret, arg1);
495     } else if (TCG_TARGET_HAS_ctpop_i64) {
496         TCGv_i64 t = tcg_temp_ebb_new_i64();
497         tcg_gen_extu_i32_i64(t, arg1);
498         tcg_gen_ctpop_i64(t, t);
499         tcg_gen_extrl_i64_i32(ret, t);
500         tcg_temp_free_i64(t);
501     } else {
502         gen_helper_ctpop_i32(ret, arg1);
503     }
504 }
505 
506 void tcg_gen_rotl_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
507 {
508     if (TCG_TARGET_HAS_rot_i32) {
509         tcg_gen_op3_i32(INDEX_op_rotl_i32, ret, arg1, arg2);
510     } else {
511         TCGv_i32 t0, t1;
512 
513         t0 = tcg_temp_ebb_new_i32();
514         t1 = tcg_temp_ebb_new_i32();
515         tcg_gen_shl_i32(t0, arg1, arg2);
516         tcg_gen_subfi_i32(t1, 32, arg2);
517         tcg_gen_shr_i32(t1, arg1, t1);
518         tcg_gen_or_i32(ret, t0, t1);
519         tcg_temp_free_i32(t0);
520         tcg_temp_free_i32(t1);
521     }
522 }
523 
524 void tcg_gen_rotli_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
525 {
526     tcg_debug_assert(arg2 >= 0 && arg2 < 32);
527     /* some cases can be optimized here */
528     if (arg2 == 0) {
529         tcg_gen_mov_i32(ret, arg1);
530     } else if (TCG_TARGET_HAS_rot_i32) {
531         tcg_gen_rotl_i32(ret, arg1, tcg_constant_i32(arg2));
532     } else {
533         TCGv_i32 t0, t1;
534         t0 = tcg_temp_ebb_new_i32();
535         t1 = tcg_temp_ebb_new_i32();
536         tcg_gen_shli_i32(t0, arg1, arg2);
537         tcg_gen_shri_i32(t1, arg1, 32 - arg2);
538         tcg_gen_or_i32(ret, t0, t1);
539         tcg_temp_free_i32(t0);
540         tcg_temp_free_i32(t1);
541     }
542 }
543 
544 void tcg_gen_rotr_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
545 {
546     if (TCG_TARGET_HAS_rot_i32) {
547         tcg_gen_op3_i32(INDEX_op_rotr_i32, ret, arg1, arg2);
548     } else {
549         TCGv_i32 t0, t1;
550 
551         t0 = tcg_temp_ebb_new_i32();
552         t1 = tcg_temp_ebb_new_i32();
553         tcg_gen_shr_i32(t0, arg1, arg2);
554         tcg_gen_subfi_i32(t1, 32, arg2);
555         tcg_gen_shl_i32(t1, arg1, t1);
556         tcg_gen_or_i32(ret, t0, t1);
557         tcg_temp_free_i32(t0);
558         tcg_temp_free_i32(t1);
559     }
560 }
561 
562 void tcg_gen_rotri_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
563 {
564     tcg_debug_assert(arg2 >= 0 && arg2 < 32);
565     /* some cases can be optimized here */
566     if (arg2 == 0) {
567         tcg_gen_mov_i32(ret, arg1);
568     } else {
569         tcg_gen_rotli_i32(ret, arg1, 32 - arg2);
570     }
571 }
572 
573 void tcg_gen_deposit_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2,
574                          unsigned int ofs, unsigned int len)
575 {
576     uint32_t mask;
577     TCGv_i32 t1;
578 
579     tcg_debug_assert(ofs < 32);
580     tcg_debug_assert(len > 0);
581     tcg_debug_assert(len <= 32);
582     tcg_debug_assert(ofs + len <= 32);
583 
584     if (len == 32) {
585         tcg_gen_mov_i32(ret, arg2);
586         return;
587     }
588     if (TCG_TARGET_HAS_deposit_i32 && TCG_TARGET_deposit_i32_valid(ofs, len)) {
589         tcg_gen_op5ii_i32(INDEX_op_deposit_i32, ret, arg1, arg2, ofs, len);
590         return;
591     }
592 
593     t1 = tcg_temp_ebb_new_i32();
594 
595     if (TCG_TARGET_HAS_extract2_i32) {
596         if (ofs + len == 32) {
597             tcg_gen_shli_i32(t1, arg1, len);
598             tcg_gen_extract2_i32(ret, t1, arg2, len);
599             goto done;
600         }
601         if (ofs == 0) {
602             tcg_gen_extract2_i32(ret, arg1, arg2, len);
603             tcg_gen_rotli_i32(ret, ret, len);
604             goto done;
605         }
606     }
607 
608     mask = (1u << len) - 1;
609     if (ofs + len < 32) {
610         tcg_gen_andi_i32(t1, arg2, mask);
611         tcg_gen_shli_i32(t1, t1, ofs);
612     } else {
613         tcg_gen_shli_i32(t1, arg2, ofs);
614     }
615     tcg_gen_andi_i32(ret, arg1, ~(mask << ofs));
616     tcg_gen_or_i32(ret, ret, t1);
617  done:
618     tcg_temp_free_i32(t1);
619 }
620 
621 void tcg_gen_deposit_z_i32(TCGv_i32 ret, TCGv_i32 arg,
622                            unsigned int ofs, unsigned int len)
623 {
624     tcg_debug_assert(ofs < 32);
625     tcg_debug_assert(len > 0);
626     tcg_debug_assert(len <= 32);
627     tcg_debug_assert(ofs + len <= 32);
628 
629     if (ofs + len == 32) {
630         tcg_gen_shli_i32(ret, arg, ofs);
631     } else if (ofs == 0) {
632         tcg_gen_andi_i32(ret, arg, (1u << len) - 1);
633     } else if (TCG_TARGET_HAS_deposit_i32
634                && TCG_TARGET_deposit_i32_valid(ofs, len)) {
635         TCGv_i32 zero = tcg_constant_i32(0);
636         tcg_gen_op5ii_i32(INDEX_op_deposit_i32, ret, zero, arg, ofs, len);
637     } else {
638         /* To help two-operand hosts we prefer to zero-extend first,
639            which allows ARG to stay live.  */
640         switch (len) {
641         case 16:
642             if (TCG_TARGET_HAS_ext16u_i32) {
643                 tcg_gen_ext16u_i32(ret, arg);
644                 tcg_gen_shli_i32(ret, ret, ofs);
645                 return;
646             }
647             break;
648         case 8:
649             if (TCG_TARGET_HAS_ext8u_i32) {
650                 tcg_gen_ext8u_i32(ret, arg);
651                 tcg_gen_shli_i32(ret, ret, ofs);
652                 return;
653             }
654             break;
655         }
656         /* Otherwise prefer zero-extension over AND for code size.  */
657         switch (ofs + len) {
658         case 16:
659             if (TCG_TARGET_HAS_ext16u_i32) {
660                 tcg_gen_shli_i32(ret, arg, ofs);
661                 tcg_gen_ext16u_i32(ret, ret);
662                 return;
663             }
664             break;
665         case 8:
666             if (TCG_TARGET_HAS_ext8u_i32) {
667                 tcg_gen_shli_i32(ret, arg, ofs);
668                 tcg_gen_ext8u_i32(ret, ret);
669                 return;
670             }
671             break;
672         }
673         tcg_gen_andi_i32(ret, arg, (1u << len) - 1);
674         tcg_gen_shli_i32(ret, ret, ofs);
675     }
676 }
677 
678 void tcg_gen_extract_i32(TCGv_i32 ret, TCGv_i32 arg,
679                          unsigned int ofs, unsigned int len)
680 {
681     tcg_debug_assert(ofs < 32);
682     tcg_debug_assert(len > 0);
683     tcg_debug_assert(len <= 32);
684     tcg_debug_assert(ofs + len <= 32);
685 
686     /* Canonicalize certain special cases, even if extract is supported.  */
687     if (ofs + len == 32) {
688         tcg_gen_shri_i32(ret, arg, 32 - len);
689         return;
690     }
691     if (ofs == 0) {
692         tcg_gen_andi_i32(ret, arg, (1u << len) - 1);
693         return;
694     }
695 
696     if (TCG_TARGET_HAS_extract_i32
697         && TCG_TARGET_extract_i32_valid(ofs, len)) {
698         tcg_gen_op4ii_i32(INDEX_op_extract_i32, ret, arg, ofs, len);
699         return;
700     }
701 
702     /* Assume that zero-extension, if available, is cheaper than a shift.  */
703     switch (ofs + len) {
704     case 16:
705         if (TCG_TARGET_HAS_ext16u_i32) {
706             tcg_gen_ext16u_i32(ret, arg);
707             tcg_gen_shri_i32(ret, ret, ofs);
708             return;
709         }
710         break;
711     case 8:
712         if (TCG_TARGET_HAS_ext8u_i32) {
713             tcg_gen_ext8u_i32(ret, arg);
714             tcg_gen_shri_i32(ret, ret, ofs);
715             return;
716         }
717         break;
718     }
719 
720     /* ??? Ideally we'd know what values are available for immediate AND.
721        Assume that 8 bits are available, plus the special case of 16,
722        so that we get ext8u, ext16u.  */
723     switch (len) {
724     case 1 ... 8: case 16:
725         tcg_gen_shri_i32(ret, arg, ofs);
726         tcg_gen_andi_i32(ret, ret, (1u << len) - 1);
727         break;
728     default:
729         tcg_gen_shli_i32(ret, arg, 32 - len - ofs);
730         tcg_gen_shri_i32(ret, ret, 32 - len);
731         break;
732     }
733 }
734 
735 void tcg_gen_sextract_i32(TCGv_i32 ret, TCGv_i32 arg,
736                           unsigned int ofs, unsigned int len)
737 {
738     tcg_debug_assert(ofs < 32);
739     tcg_debug_assert(len > 0);
740     tcg_debug_assert(len <= 32);
741     tcg_debug_assert(ofs + len <= 32);
742 
743     /* Canonicalize certain special cases, even if extract is supported.  */
744     if (ofs + len == 32) {
745         tcg_gen_sari_i32(ret, arg, 32 - len);
746         return;
747     }
748     if (ofs == 0) {
749         switch (len) {
750         case 16:
751             tcg_gen_ext16s_i32(ret, arg);
752             return;
753         case 8:
754             tcg_gen_ext8s_i32(ret, arg);
755             return;
756         }
757     }
758 
759     if (TCG_TARGET_HAS_sextract_i32
760         && TCG_TARGET_extract_i32_valid(ofs, len)) {
761         tcg_gen_op4ii_i32(INDEX_op_sextract_i32, ret, arg, ofs, len);
762         return;
763     }
764 
765     /* Assume that sign-extension, if available, is cheaper than a shift.  */
766     switch (ofs + len) {
767     case 16:
768         if (TCG_TARGET_HAS_ext16s_i32) {
769             tcg_gen_ext16s_i32(ret, arg);
770             tcg_gen_sari_i32(ret, ret, ofs);
771             return;
772         }
773         break;
774     case 8:
775         if (TCG_TARGET_HAS_ext8s_i32) {
776             tcg_gen_ext8s_i32(ret, arg);
777             tcg_gen_sari_i32(ret, ret, ofs);
778             return;
779         }
780         break;
781     }
782     switch (len) {
783     case 16:
784         if (TCG_TARGET_HAS_ext16s_i32) {
785             tcg_gen_shri_i32(ret, arg, ofs);
786             tcg_gen_ext16s_i32(ret, ret);
787             return;
788         }
789         break;
790     case 8:
791         if (TCG_TARGET_HAS_ext8s_i32) {
792             tcg_gen_shri_i32(ret, arg, ofs);
793             tcg_gen_ext8s_i32(ret, ret);
794             return;
795         }
796         break;
797     }
798 
799     tcg_gen_shli_i32(ret, arg, 32 - len - ofs);
800     tcg_gen_sari_i32(ret, ret, 32 - len);
801 }
802 
803 /*
804  * Extract 32-bits from a 64-bit input, ah:al, starting from ofs.
805  * Unlike tcg_gen_extract_i32 above, len is fixed at 32.
806  */
807 void tcg_gen_extract2_i32(TCGv_i32 ret, TCGv_i32 al, TCGv_i32 ah,
808                           unsigned int ofs)
809 {
810     tcg_debug_assert(ofs <= 32);
811     if (ofs == 0) {
812         tcg_gen_mov_i32(ret, al);
813     } else if (ofs == 32) {
814         tcg_gen_mov_i32(ret, ah);
815     } else if (al == ah) {
816         tcg_gen_rotri_i32(ret, al, ofs);
817     } else if (TCG_TARGET_HAS_extract2_i32) {
818         tcg_gen_op4i_i32(INDEX_op_extract2_i32, ret, al, ah, ofs);
819     } else {
820         TCGv_i32 t0 = tcg_temp_ebb_new_i32();
821         tcg_gen_shri_i32(t0, al, ofs);
822         tcg_gen_deposit_i32(ret, t0, ah, 32 - ofs, ofs);
823         tcg_temp_free_i32(t0);
824     }
825 }
826 
827 void tcg_gen_movcond_i32(TCGCond cond, TCGv_i32 ret, TCGv_i32 c1,
828                          TCGv_i32 c2, TCGv_i32 v1, TCGv_i32 v2)
829 {
830     if (cond == TCG_COND_ALWAYS) {
831         tcg_gen_mov_i32(ret, v1);
832     } else if (cond == TCG_COND_NEVER) {
833         tcg_gen_mov_i32(ret, v2);
834     } else if (TCG_TARGET_HAS_movcond_i32) {
835         tcg_gen_op6i_i32(INDEX_op_movcond_i32, ret, c1, c2, v1, v2, cond);
836     } else {
837         TCGv_i32 t0 = tcg_temp_ebb_new_i32();
838         TCGv_i32 t1 = tcg_temp_ebb_new_i32();
839         tcg_gen_setcond_i32(cond, t0, c1, c2);
840         tcg_gen_neg_i32(t0, t0);
841         tcg_gen_and_i32(t1, v1, t0);
842         tcg_gen_andc_i32(ret, v2, t0);
843         tcg_gen_or_i32(ret, ret, t1);
844         tcg_temp_free_i32(t0);
845         tcg_temp_free_i32(t1);
846     }
847 }
848 
849 void tcg_gen_add2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 al,
850                       TCGv_i32 ah, TCGv_i32 bl, TCGv_i32 bh)
851 {
852     if (TCG_TARGET_HAS_add2_i32) {
853         tcg_gen_op6_i32(INDEX_op_add2_i32, rl, rh, al, ah, bl, bh);
854     } else {
855         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
856         TCGv_i64 t1 = tcg_temp_ebb_new_i64();
857         tcg_gen_concat_i32_i64(t0, al, ah);
858         tcg_gen_concat_i32_i64(t1, bl, bh);
859         tcg_gen_add_i64(t0, t0, t1);
860         tcg_gen_extr_i64_i32(rl, rh, t0);
861         tcg_temp_free_i64(t0);
862         tcg_temp_free_i64(t1);
863     }
864 }
865 
866 void tcg_gen_sub2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 al,
867                       TCGv_i32 ah, TCGv_i32 bl, TCGv_i32 bh)
868 {
869     if (TCG_TARGET_HAS_sub2_i32) {
870         tcg_gen_op6_i32(INDEX_op_sub2_i32, rl, rh, al, ah, bl, bh);
871     } else {
872         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
873         TCGv_i64 t1 = tcg_temp_ebb_new_i64();
874         tcg_gen_concat_i32_i64(t0, al, ah);
875         tcg_gen_concat_i32_i64(t1, bl, bh);
876         tcg_gen_sub_i64(t0, t0, t1);
877         tcg_gen_extr_i64_i32(rl, rh, t0);
878         tcg_temp_free_i64(t0);
879         tcg_temp_free_i64(t1);
880     }
881 }
882 
883 void tcg_gen_mulu2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 arg1, TCGv_i32 arg2)
884 {
885     if (TCG_TARGET_HAS_mulu2_i32) {
886         tcg_gen_op4_i32(INDEX_op_mulu2_i32, rl, rh, arg1, arg2);
887     } else if (TCG_TARGET_HAS_muluh_i32) {
888         TCGv_i32 t = tcg_temp_ebb_new_i32();
889         tcg_gen_op3_i32(INDEX_op_mul_i32, t, arg1, arg2);
890         tcg_gen_op3_i32(INDEX_op_muluh_i32, rh, arg1, arg2);
891         tcg_gen_mov_i32(rl, t);
892         tcg_temp_free_i32(t);
893     } else if (TCG_TARGET_REG_BITS == 64) {
894         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
895         TCGv_i64 t1 = tcg_temp_ebb_new_i64();
896         tcg_gen_extu_i32_i64(t0, arg1);
897         tcg_gen_extu_i32_i64(t1, arg2);
898         tcg_gen_mul_i64(t0, t0, t1);
899         tcg_gen_extr_i64_i32(rl, rh, t0);
900         tcg_temp_free_i64(t0);
901         tcg_temp_free_i64(t1);
902     } else {
903         qemu_build_not_reached();
904     }
905 }
906 
907 void tcg_gen_muls2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 arg1, TCGv_i32 arg2)
908 {
909     if (TCG_TARGET_HAS_muls2_i32) {
910         tcg_gen_op4_i32(INDEX_op_muls2_i32, rl, rh, arg1, arg2);
911     } else if (TCG_TARGET_HAS_mulsh_i32) {
912         TCGv_i32 t = tcg_temp_ebb_new_i32();
913         tcg_gen_op3_i32(INDEX_op_mul_i32, t, arg1, arg2);
914         tcg_gen_op3_i32(INDEX_op_mulsh_i32, rh, arg1, arg2);
915         tcg_gen_mov_i32(rl, t);
916         tcg_temp_free_i32(t);
917     } else if (TCG_TARGET_REG_BITS == 32) {
918         TCGv_i32 t0 = tcg_temp_ebb_new_i32();
919         TCGv_i32 t1 = tcg_temp_ebb_new_i32();
920         TCGv_i32 t2 = tcg_temp_ebb_new_i32();
921         TCGv_i32 t3 = tcg_temp_ebb_new_i32();
922         tcg_gen_mulu2_i32(t0, t1, arg1, arg2);
923         /* Adjust for negative inputs.  */
924         tcg_gen_sari_i32(t2, arg1, 31);
925         tcg_gen_sari_i32(t3, arg2, 31);
926         tcg_gen_and_i32(t2, t2, arg2);
927         tcg_gen_and_i32(t3, t3, arg1);
928         tcg_gen_sub_i32(rh, t1, t2);
929         tcg_gen_sub_i32(rh, rh, t3);
930         tcg_gen_mov_i32(rl, t0);
931         tcg_temp_free_i32(t0);
932         tcg_temp_free_i32(t1);
933         tcg_temp_free_i32(t2);
934         tcg_temp_free_i32(t3);
935     } else {
936         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
937         TCGv_i64 t1 = tcg_temp_ebb_new_i64();
938         tcg_gen_ext_i32_i64(t0, arg1);
939         tcg_gen_ext_i32_i64(t1, arg2);
940         tcg_gen_mul_i64(t0, t0, t1);
941         tcg_gen_extr_i64_i32(rl, rh, t0);
942         tcg_temp_free_i64(t0);
943         tcg_temp_free_i64(t1);
944     }
945 }
946 
947 void tcg_gen_mulsu2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 arg1, TCGv_i32 arg2)
948 {
949     if (TCG_TARGET_REG_BITS == 32) {
950         TCGv_i32 t0 = tcg_temp_ebb_new_i32();
951         TCGv_i32 t1 = tcg_temp_ebb_new_i32();
952         TCGv_i32 t2 = tcg_temp_ebb_new_i32();
953         tcg_gen_mulu2_i32(t0, t1, arg1, arg2);
954         /* Adjust for negative input for the signed arg1.  */
955         tcg_gen_sari_i32(t2, arg1, 31);
956         tcg_gen_and_i32(t2, t2, arg2);
957         tcg_gen_sub_i32(rh, t1, t2);
958         tcg_gen_mov_i32(rl, t0);
959         tcg_temp_free_i32(t0);
960         tcg_temp_free_i32(t1);
961         tcg_temp_free_i32(t2);
962     } else {
963         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
964         TCGv_i64 t1 = tcg_temp_ebb_new_i64();
965         tcg_gen_ext_i32_i64(t0, arg1);
966         tcg_gen_extu_i32_i64(t1, arg2);
967         tcg_gen_mul_i64(t0, t0, t1);
968         tcg_gen_extr_i64_i32(rl, rh, t0);
969         tcg_temp_free_i64(t0);
970         tcg_temp_free_i64(t1);
971     }
972 }
973 
974 void tcg_gen_ext8s_i32(TCGv_i32 ret, TCGv_i32 arg)
975 {
976     if (TCG_TARGET_HAS_ext8s_i32) {
977         tcg_gen_op2_i32(INDEX_op_ext8s_i32, ret, arg);
978     } else {
979         tcg_gen_shli_i32(ret, arg, 24);
980         tcg_gen_sari_i32(ret, ret, 24);
981     }
982 }
983 
984 void tcg_gen_ext16s_i32(TCGv_i32 ret, TCGv_i32 arg)
985 {
986     if (TCG_TARGET_HAS_ext16s_i32) {
987         tcg_gen_op2_i32(INDEX_op_ext16s_i32, ret, arg);
988     } else {
989         tcg_gen_shli_i32(ret, arg, 16);
990         tcg_gen_sari_i32(ret, ret, 16);
991     }
992 }
993 
994 void tcg_gen_ext8u_i32(TCGv_i32 ret, TCGv_i32 arg)
995 {
996     if (TCG_TARGET_HAS_ext8u_i32) {
997         tcg_gen_op2_i32(INDEX_op_ext8u_i32, ret, arg);
998     } else {
999         tcg_gen_andi_i32(ret, arg, 0xffu);
1000     }
1001 }
1002 
1003 void tcg_gen_ext16u_i32(TCGv_i32 ret, TCGv_i32 arg)
1004 {
1005     if (TCG_TARGET_HAS_ext16u_i32) {
1006         tcg_gen_op2_i32(INDEX_op_ext16u_i32, ret, arg);
1007     } else {
1008         tcg_gen_andi_i32(ret, arg, 0xffffu);
1009     }
1010 }
1011 
1012 void tcg_gen_bswap16_i32(TCGv_i32 ret, TCGv_i32 arg, int flags)
1013 {
1014     /* Only one extension flag may be present. */
1015     tcg_debug_assert(!(flags & TCG_BSWAP_OS) || !(flags & TCG_BSWAP_OZ));
1016 
1017     if (TCG_TARGET_HAS_bswap16_i32) {
1018         tcg_gen_op3i_i32(INDEX_op_bswap16_i32, ret, arg, flags);
1019     } else {
1020         TCGv_i32 t0 = tcg_temp_ebb_new_i32();
1021         TCGv_i32 t1 = tcg_temp_ebb_new_i32();
1022 
1023         tcg_gen_shri_i32(t0, arg, 8);
1024         if (!(flags & TCG_BSWAP_IZ)) {
1025             tcg_gen_ext8u_i32(t0, t0);
1026         }
1027 
1028         if (flags & TCG_BSWAP_OS) {
1029             tcg_gen_shli_i32(t1, arg, 24);
1030             tcg_gen_sari_i32(t1, t1, 16);
1031         } else if (flags & TCG_BSWAP_OZ) {
1032             tcg_gen_ext8u_i32(t1, arg);
1033             tcg_gen_shli_i32(t1, t1, 8);
1034         } else {
1035             tcg_gen_shli_i32(t1, arg, 8);
1036         }
1037 
1038         tcg_gen_or_i32(ret, t0, t1);
1039         tcg_temp_free_i32(t0);
1040         tcg_temp_free_i32(t1);
1041     }
1042 }
1043 
1044 void tcg_gen_bswap32_i32(TCGv_i32 ret, TCGv_i32 arg)
1045 {
1046     if (TCG_TARGET_HAS_bswap32_i32) {
1047         tcg_gen_op3i_i32(INDEX_op_bswap32_i32, ret, arg, 0);
1048     } else {
1049         TCGv_i32 t0 = tcg_temp_ebb_new_i32();
1050         TCGv_i32 t1 = tcg_temp_ebb_new_i32();
1051         TCGv_i32 t2 = tcg_constant_i32(0x00ff00ff);
1052 
1053                                         /* arg = abcd */
1054         tcg_gen_shri_i32(t0, arg, 8);   /*  t0 = .abc */
1055         tcg_gen_and_i32(t1, arg, t2);   /*  t1 = .b.d */
1056         tcg_gen_and_i32(t0, t0, t2);    /*  t0 = .a.c */
1057         tcg_gen_shli_i32(t1, t1, 8);    /*  t1 = b.d. */
1058         tcg_gen_or_i32(ret, t0, t1);    /* ret = badc */
1059 
1060         tcg_gen_shri_i32(t0, ret, 16);  /*  t0 = ..ba */
1061         tcg_gen_shli_i32(t1, ret, 16);  /*  t1 = dc.. */
1062         tcg_gen_or_i32(ret, t0, t1);    /* ret = dcba */
1063 
1064         tcg_temp_free_i32(t0);
1065         tcg_temp_free_i32(t1);
1066     }
1067 }
1068 
1069 void tcg_gen_hswap_i32(TCGv_i32 ret, TCGv_i32 arg)
1070 {
1071     /* Swapping 2 16-bit elements is a rotate. */
1072     tcg_gen_rotli_i32(ret, arg, 16);
1073 }
1074 
1075 void tcg_gen_smin_i32(TCGv_i32 ret, TCGv_i32 a, TCGv_i32 b)
1076 {
1077     tcg_gen_movcond_i32(TCG_COND_LT, ret, a, b, a, b);
1078 }
1079 
1080 void tcg_gen_umin_i32(TCGv_i32 ret, TCGv_i32 a, TCGv_i32 b)
1081 {
1082     tcg_gen_movcond_i32(TCG_COND_LTU, ret, a, b, a, b);
1083 }
1084 
1085 void tcg_gen_smax_i32(TCGv_i32 ret, TCGv_i32 a, TCGv_i32 b)
1086 {
1087     tcg_gen_movcond_i32(TCG_COND_LT, ret, a, b, b, a);
1088 }
1089 
1090 void tcg_gen_umax_i32(TCGv_i32 ret, TCGv_i32 a, TCGv_i32 b)
1091 {
1092     tcg_gen_movcond_i32(TCG_COND_LTU, ret, a, b, b, a);
1093 }
1094 
1095 void tcg_gen_abs_i32(TCGv_i32 ret, TCGv_i32 a)
1096 {
1097     TCGv_i32 t = tcg_temp_ebb_new_i32();
1098 
1099     tcg_gen_sari_i32(t, a, 31);
1100     tcg_gen_xor_i32(ret, a, t);
1101     tcg_gen_sub_i32(ret, ret, t);
1102     tcg_temp_free_i32(t);
1103 }
1104 
1105 /* 64-bit ops */
1106 
1107 #if TCG_TARGET_REG_BITS == 32
1108 /* These are all inline for TCG_TARGET_REG_BITS == 64.  */
1109 
1110 void tcg_gen_discard_i64(TCGv_i64 arg)
1111 {
1112     tcg_gen_discard_i32(TCGV_LOW(arg));
1113     tcg_gen_discard_i32(TCGV_HIGH(arg));
1114 }
1115 
1116 void tcg_gen_mov_i64(TCGv_i64 ret, TCGv_i64 arg)
1117 {
1118     TCGTemp *ts = tcgv_i64_temp(arg);
1119 
1120     /* Canonicalize TCGv_i64 TEMP_CONST into TCGv_i32 TEMP_CONST. */
1121     if (ts->kind == TEMP_CONST) {
1122         tcg_gen_movi_i64(ret, ts->val);
1123     } else {
1124         tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1125         tcg_gen_mov_i32(TCGV_HIGH(ret), TCGV_HIGH(arg));
1126     }
1127 }
1128 
1129 void tcg_gen_movi_i64(TCGv_i64 ret, int64_t arg)
1130 {
1131     tcg_gen_movi_i32(TCGV_LOW(ret), arg);
1132     tcg_gen_movi_i32(TCGV_HIGH(ret), arg >> 32);
1133 }
1134 
1135 void tcg_gen_ld8u_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
1136 {
1137     tcg_gen_ld8u_i32(TCGV_LOW(ret), arg2, offset);
1138     tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1139 }
1140 
1141 void tcg_gen_ld8s_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
1142 {
1143     tcg_gen_ld8s_i32(TCGV_LOW(ret), arg2, offset);
1144     tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1145 }
1146 
1147 void tcg_gen_ld16u_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
1148 {
1149     tcg_gen_ld16u_i32(TCGV_LOW(ret), arg2, offset);
1150     tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1151 }
1152 
1153 void tcg_gen_ld16s_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
1154 {
1155     tcg_gen_ld16s_i32(TCGV_LOW(ret), arg2, offset);
1156     tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1157 }
1158 
1159 void tcg_gen_ld32u_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
1160 {
1161     tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset);
1162     tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1163 }
1164 
1165 void tcg_gen_ld32s_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
1166 {
1167     tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset);
1168     tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1169 }
1170 
1171 void tcg_gen_ld_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
1172 {
1173     /* Since arg2 and ret have different types,
1174        they cannot be the same temporary */
1175 #if HOST_BIG_ENDIAN
1176     tcg_gen_ld_i32(TCGV_HIGH(ret), arg2, offset);
1177     tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset + 4);
1178 #else
1179     tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset);
1180     tcg_gen_ld_i32(TCGV_HIGH(ret), arg2, offset + 4);
1181 #endif
1182 }
1183 
1184 void tcg_gen_st8_i64(TCGv_i64 arg1, TCGv_ptr arg2, tcg_target_long offset)
1185 {
1186     tcg_gen_st8_i32(TCGV_LOW(arg1), arg2, offset);
1187 }
1188 
1189 void tcg_gen_st16_i64(TCGv_i64 arg1, TCGv_ptr arg2, tcg_target_long offset)
1190 {
1191     tcg_gen_st16_i32(TCGV_LOW(arg1), arg2, offset);
1192 }
1193 
1194 void tcg_gen_st32_i64(TCGv_i64 arg1, TCGv_ptr arg2, tcg_target_long offset)
1195 {
1196     tcg_gen_st_i32(TCGV_LOW(arg1), arg2, offset);
1197 }
1198 
1199 void tcg_gen_st_i64(TCGv_i64 arg1, TCGv_ptr arg2, tcg_target_long offset)
1200 {
1201 #if HOST_BIG_ENDIAN
1202     tcg_gen_st_i32(TCGV_HIGH(arg1), arg2, offset);
1203     tcg_gen_st_i32(TCGV_LOW(arg1), arg2, offset + 4);
1204 #else
1205     tcg_gen_st_i32(TCGV_LOW(arg1), arg2, offset);
1206     tcg_gen_st_i32(TCGV_HIGH(arg1), arg2, offset + 4);
1207 #endif
1208 }
1209 
1210 void tcg_gen_add_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1211 {
1212     tcg_gen_add2_i32(TCGV_LOW(ret), TCGV_HIGH(ret), TCGV_LOW(arg1),
1213                      TCGV_HIGH(arg1), TCGV_LOW(arg2), TCGV_HIGH(arg2));
1214 }
1215 
1216 void tcg_gen_sub_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1217 {
1218     tcg_gen_sub2_i32(TCGV_LOW(ret), TCGV_HIGH(ret), TCGV_LOW(arg1),
1219                      TCGV_HIGH(arg1), TCGV_LOW(arg2), TCGV_HIGH(arg2));
1220 }
1221 
1222 void tcg_gen_and_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1223 {
1224     tcg_gen_and_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1225     tcg_gen_and_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1226 }
1227 
1228 void tcg_gen_or_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1229 {
1230     tcg_gen_or_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1231     tcg_gen_or_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1232 }
1233 
1234 void tcg_gen_xor_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1235 {
1236     tcg_gen_xor_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1237     tcg_gen_xor_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1238 }
1239 
1240 void tcg_gen_shl_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1241 {
1242     gen_helper_shl_i64(ret, arg1, arg2);
1243 }
1244 
1245 void tcg_gen_shr_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1246 {
1247     gen_helper_shr_i64(ret, arg1, arg2);
1248 }
1249 
1250 void tcg_gen_sar_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1251 {
1252     gen_helper_sar_i64(ret, arg1, arg2);
1253 }
1254 
1255 void tcg_gen_mul_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1256 {
1257     TCGv_i64 t0;
1258     TCGv_i32 t1;
1259 
1260     t0 = tcg_temp_ebb_new_i64();
1261     t1 = tcg_temp_ebb_new_i32();
1262 
1263     tcg_gen_mulu2_i32(TCGV_LOW(t0), TCGV_HIGH(t0),
1264                       TCGV_LOW(arg1), TCGV_LOW(arg2));
1265 
1266     tcg_gen_mul_i32(t1, TCGV_LOW(arg1), TCGV_HIGH(arg2));
1267     tcg_gen_add_i32(TCGV_HIGH(t0), TCGV_HIGH(t0), t1);
1268     tcg_gen_mul_i32(t1, TCGV_HIGH(arg1), TCGV_LOW(arg2));
1269     tcg_gen_add_i32(TCGV_HIGH(t0), TCGV_HIGH(t0), t1);
1270 
1271     tcg_gen_mov_i64(ret, t0);
1272     tcg_temp_free_i64(t0);
1273     tcg_temp_free_i32(t1);
1274 }
1275 
1276 #else
1277 
1278 void tcg_gen_movi_i64(TCGv_i64 ret, int64_t arg)
1279 {
1280     tcg_gen_mov_i64(ret, tcg_constant_i64(arg));
1281 }
1282 
1283 #endif /* TCG_TARGET_REG_SIZE == 32 */
1284 
1285 void tcg_gen_addi_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1286 {
1287     /* some cases can be optimized here */
1288     if (arg2 == 0) {
1289         tcg_gen_mov_i64(ret, arg1);
1290     } else if (TCG_TARGET_REG_BITS == 64) {
1291         tcg_gen_add_i64(ret, arg1, tcg_constant_i64(arg2));
1292     } else {
1293         tcg_gen_add2_i32(TCGV_LOW(ret), TCGV_HIGH(ret),
1294                          TCGV_LOW(arg1), TCGV_HIGH(arg1),
1295                          tcg_constant_i32(arg2), tcg_constant_i32(arg2 >> 32));
1296     }
1297 }
1298 
1299 void tcg_gen_subfi_i64(TCGv_i64 ret, int64_t arg1, TCGv_i64 arg2)
1300 {
1301     if (arg1 == 0 && TCG_TARGET_HAS_neg_i64) {
1302         /* Don't recurse with tcg_gen_neg_i64.  */
1303         tcg_gen_op2_i64(INDEX_op_neg_i64, ret, arg2);
1304     } else if (TCG_TARGET_REG_BITS == 64) {
1305         tcg_gen_sub_i64(ret, tcg_constant_i64(arg1), arg2);
1306     } else {
1307         tcg_gen_sub2_i32(TCGV_LOW(ret), TCGV_HIGH(ret),
1308                          tcg_constant_i32(arg1), tcg_constant_i32(arg1 >> 32),
1309                          TCGV_LOW(arg2), TCGV_HIGH(arg2));
1310     }
1311 }
1312 
1313 void tcg_gen_subi_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1314 {
1315     /* some cases can be optimized here */
1316     if (arg2 == 0) {
1317         tcg_gen_mov_i64(ret, arg1);
1318     } else if (TCG_TARGET_REG_BITS == 64) {
1319         tcg_gen_sub_i64(ret, arg1, tcg_constant_i64(arg2));
1320     } else {
1321         tcg_gen_sub2_i32(TCGV_LOW(ret), TCGV_HIGH(ret),
1322                          TCGV_LOW(arg1), TCGV_HIGH(arg1),
1323                          tcg_constant_i32(arg2), tcg_constant_i32(arg2 >> 32));
1324     }
1325 }
1326 
1327 void tcg_gen_andi_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1328 {
1329     if (TCG_TARGET_REG_BITS == 32) {
1330         tcg_gen_andi_i32(TCGV_LOW(ret), TCGV_LOW(arg1), arg2);
1331         tcg_gen_andi_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), arg2 >> 32);
1332         return;
1333     }
1334 
1335     /* Some cases can be optimized here.  */
1336     switch (arg2) {
1337     case 0:
1338         tcg_gen_movi_i64(ret, 0);
1339         return;
1340     case -1:
1341         tcg_gen_mov_i64(ret, arg1);
1342         return;
1343     case 0xff:
1344         /* Don't recurse with tcg_gen_ext8u_i64.  */
1345         if (TCG_TARGET_HAS_ext8u_i64) {
1346             tcg_gen_op2_i64(INDEX_op_ext8u_i64, ret, arg1);
1347             return;
1348         }
1349         break;
1350     case 0xffff:
1351         if (TCG_TARGET_HAS_ext16u_i64) {
1352             tcg_gen_op2_i64(INDEX_op_ext16u_i64, ret, arg1);
1353             return;
1354         }
1355         break;
1356     case 0xffffffffu:
1357         if (TCG_TARGET_HAS_ext32u_i64) {
1358             tcg_gen_op2_i64(INDEX_op_ext32u_i64, ret, arg1);
1359             return;
1360         }
1361         break;
1362     }
1363 
1364     tcg_gen_and_i64(ret, arg1, tcg_constant_i64(arg2));
1365 }
1366 
1367 void tcg_gen_ori_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1368 {
1369     if (TCG_TARGET_REG_BITS == 32) {
1370         tcg_gen_ori_i32(TCGV_LOW(ret), TCGV_LOW(arg1), arg2);
1371         tcg_gen_ori_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), arg2 >> 32);
1372         return;
1373     }
1374     /* Some cases can be optimized here.  */
1375     if (arg2 == -1) {
1376         tcg_gen_movi_i64(ret, -1);
1377     } else if (arg2 == 0) {
1378         tcg_gen_mov_i64(ret, arg1);
1379     } else {
1380         tcg_gen_or_i64(ret, arg1, tcg_constant_i64(arg2));
1381     }
1382 }
1383 
1384 void tcg_gen_xori_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1385 {
1386     if (TCG_TARGET_REG_BITS == 32) {
1387         tcg_gen_xori_i32(TCGV_LOW(ret), TCGV_LOW(arg1), arg2);
1388         tcg_gen_xori_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), arg2 >> 32);
1389         return;
1390     }
1391     /* Some cases can be optimized here.  */
1392     if (arg2 == 0) {
1393         tcg_gen_mov_i64(ret, arg1);
1394     } else if (arg2 == -1 && TCG_TARGET_HAS_not_i64) {
1395         /* Don't recurse with tcg_gen_not_i64.  */
1396         tcg_gen_op2_i64(INDEX_op_not_i64, ret, arg1);
1397     } else {
1398         tcg_gen_xor_i64(ret, arg1, tcg_constant_i64(arg2));
1399     }
1400 }
1401 
1402 static inline void tcg_gen_shifti_i64(TCGv_i64 ret, TCGv_i64 arg1,
1403                                       unsigned c, bool right, bool arith)
1404 {
1405     tcg_debug_assert(c < 64);
1406     if (c == 0) {
1407         tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg1));
1408         tcg_gen_mov_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1));
1409     } else if (c >= 32) {
1410         c -= 32;
1411         if (right) {
1412             if (arith) {
1413                 tcg_gen_sari_i32(TCGV_LOW(ret), TCGV_HIGH(arg1), c);
1414                 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), 31);
1415             } else {
1416                 tcg_gen_shri_i32(TCGV_LOW(ret), TCGV_HIGH(arg1), c);
1417                 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1418             }
1419         } else {
1420             tcg_gen_shli_i32(TCGV_HIGH(ret), TCGV_LOW(arg1), c);
1421             tcg_gen_movi_i32(TCGV_LOW(ret), 0);
1422         }
1423     } else if (right) {
1424         if (TCG_TARGET_HAS_extract2_i32) {
1425             tcg_gen_extract2_i32(TCGV_LOW(ret),
1426                                  TCGV_LOW(arg1), TCGV_HIGH(arg1), c);
1427         } else {
1428             tcg_gen_shri_i32(TCGV_LOW(ret), TCGV_LOW(arg1), c);
1429             tcg_gen_deposit_i32(TCGV_LOW(ret), TCGV_LOW(ret),
1430                                 TCGV_HIGH(arg1), 32 - c, c);
1431         }
1432         if (arith) {
1433             tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), c);
1434         } else {
1435             tcg_gen_shri_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), c);
1436         }
1437     } else {
1438         if (TCG_TARGET_HAS_extract2_i32) {
1439             tcg_gen_extract2_i32(TCGV_HIGH(ret),
1440                                  TCGV_LOW(arg1), TCGV_HIGH(arg1), 32 - c);
1441         } else {
1442             TCGv_i32 t0 = tcg_temp_ebb_new_i32();
1443             tcg_gen_shri_i32(t0, TCGV_LOW(arg1), 32 - c);
1444             tcg_gen_deposit_i32(TCGV_HIGH(ret), t0,
1445                                 TCGV_HIGH(arg1), c, 32 - c);
1446             tcg_temp_free_i32(t0);
1447         }
1448         tcg_gen_shli_i32(TCGV_LOW(ret), TCGV_LOW(arg1), c);
1449     }
1450 }
1451 
1452 void tcg_gen_shli_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1453 {
1454     tcg_debug_assert(arg2 >= 0 && arg2 < 64);
1455     if (TCG_TARGET_REG_BITS == 32) {
1456         tcg_gen_shifti_i64(ret, arg1, arg2, 0, 0);
1457     } else if (arg2 == 0) {
1458         tcg_gen_mov_i64(ret, arg1);
1459     } else {
1460         tcg_gen_shl_i64(ret, arg1, tcg_constant_i64(arg2));
1461     }
1462 }
1463 
1464 void tcg_gen_shri_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1465 {
1466     tcg_debug_assert(arg2 >= 0 && arg2 < 64);
1467     if (TCG_TARGET_REG_BITS == 32) {
1468         tcg_gen_shifti_i64(ret, arg1, arg2, 1, 0);
1469     } else if (arg2 == 0) {
1470         tcg_gen_mov_i64(ret, arg1);
1471     } else {
1472         tcg_gen_shr_i64(ret, arg1, tcg_constant_i64(arg2));
1473     }
1474 }
1475 
1476 void tcg_gen_sari_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1477 {
1478     tcg_debug_assert(arg2 >= 0 && arg2 < 64);
1479     if (TCG_TARGET_REG_BITS == 32) {
1480         tcg_gen_shifti_i64(ret, arg1, arg2, 1, 1);
1481     } else if (arg2 == 0) {
1482         tcg_gen_mov_i64(ret, arg1);
1483     } else {
1484         tcg_gen_sar_i64(ret, arg1, tcg_constant_i64(arg2));
1485     }
1486 }
1487 
1488 void tcg_gen_brcond_i64(TCGCond cond, TCGv_i64 arg1, TCGv_i64 arg2, TCGLabel *l)
1489 {
1490     if (cond == TCG_COND_ALWAYS) {
1491         tcg_gen_br(l);
1492     } else if (cond != TCG_COND_NEVER) {
1493         if (TCG_TARGET_REG_BITS == 32) {
1494             tcg_gen_op6ii_i32(INDEX_op_brcond2_i32, TCGV_LOW(arg1),
1495                               TCGV_HIGH(arg1), TCGV_LOW(arg2),
1496                               TCGV_HIGH(arg2), cond, label_arg(l));
1497         } else {
1498             tcg_gen_op4ii_i64(INDEX_op_brcond_i64, arg1, arg2, cond,
1499                               label_arg(l));
1500         }
1501         add_last_as_label_use(l);
1502     }
1503 }
1504 
1505 void tcg_gen_brcondi_i64(TCGCond cond, TCGv_i64 arg1, int64_t arg2, TCGLabel *l)
1506 {
1507     if (TCG_TARGET_REG_BITS == 64) {
1508         tcg_gen_brcond_i64(cond, arg1, tcg_constant_i64(arg2), l);
1509     } else if (cond == TCG_COND_ALWAYS) {
1510         tcg_gen_br(l);
1511     } else if (cond != TCG_COND_NEVER) {
1512         tcg_gen_op6ii_i32(INDEX_op_brcond2_i32,
1513                           TCGV_LOW(arg1), TCGV_HIGH(arg1),
1514                           tcg_constant_i32(arg2),
1515                           tcg_constant_i32(arg2 >> 32),
1516                           cond, label_arg(l));
1517         add_last_as_label_use(l);
1518     }
1519 }
1520 
1521 void tcg_gen_setcond_i64(TCGCond cond, TCGv_i64 ret,
1522                          TCGv_i64 arg1, TCGv_i64 arg2)
1523 {
1524     if (cond == TCG_COND_ALWAYS) {
1525         tcg_gen_movi_i64(ret, 1);
1526     } else if (cond == TCG_COND_NEVER) {
1527         tcg_gen_movi_i64(ret, 0);
1528     } else {
1529         if (TCG_TARGET_REG_BITS == 32) {
1530             tcg_gen_op6i_i32(INDEX_op_setcond2_i32, TCGV_LOW(ret),
1531                              TCGV_LOW(arg1), TCGV_HIGH(arg1),
1532                              TCGV_LOW(arg2), TCGV_HIGH(arg2), cond);
1533             tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1534         } else {
1535             tcg_gen_op4i_i64(INDEX_op_setcond_i64, ret, arg1, arg2, cond);
1536         }
1537     }
1538 }
1539 
1540 void tcg_gen_setcondi_i64(TCGCond cond, TCGv_i64 ret,
1541                           TCGv_i64 arg1, int64_t arg2)
1542 {
1543     if (TCG_TARGET_REG_BITS == 64) {
1544         tcg_gen_setcond_i64(cond, ret, arg1, tcg_constant_i64(arg2));
1545     } else if (cond == TCG_COND_ALWAYS) {
1546         tcg_gen_movi_i64(ret, 1);
1547     } else if (cond == TCG_COND_NEVER) {
1548         tcg_gen_movi_i64(ret, 0);
1549     } else {
1550         tcg_gen_op6i_i32(INDEX_op_setcond2_i32, TCGV_LOW(ret),
1551                          TCGV_LOW(arg1), TCGV_HIGH(arg1),
1552                          tcg_constant_i32(arg2),
1553                          tcg_constant_i32(arg2 >> 32), cond);
1554         tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1555     }
1556 }
1557 
1558 void tcg_gen_muli_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1559 {
1560     if (arg2 == 0) {
1561         tcg_gen_movi_i64(ret, 0);
1562     } else if (is_power_of_2(arg2)) {
1563         tcg_gen_shli_i64(ret, arg1, ctz64(arg2));
1564     } else {
1565         tcg_gen_mul_i64(ret, arg1, tcg_constant_i64(arg2));
1566     }
1567 }
1568 
1569 void tcg_gen_div_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1570 {
1571     if (TCG_TARGET_HAS_div_i64) {
1572         tcg_gen_op3_i64(INDEX_op_div_i64, ret, arg1, arg2);
1573     } else if (TCG_TARGET_HAS_div2_i64) {
1574         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
1575         tcg_gen_sari_i64(t0, arg1, 63);
1576         tcg_gen_op5_i64(INDEX_op_div2_i64, ret, t0, arg1, t0, arg2);
1577         tcg_temp_free_i64(t0);
1578     } else {
1579         gen_helper_div_i64(ret, arg1, arg2);
1580     }
1581 }
1582 
1583 void tcg_gen_rem_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1584 {
1585     if (TCG_TARGET_HAS_rem_i64) {
1586         tcg_gen_op3_i64(INDEX_op_rem_i64, ret, arg1, arg2);
1587     } else if (TCG_TARGET_HAS_div_i64) {
1588         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
1589         tcg_gen_op3_i64(INDEX_op_div_i64, t0, arg1, arg2);
1590         tcg_gen_mul_i64(t0, t0, arg2);
1591         tcg_gen_sub_i64(ret, arg1, t0);
1592         tcg_temp_free_i64(t0);
1593     } else if (TCG_TARGET_HAS_div2_i64) {
1594         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
1595         tcg_gen_sari_i64(t0, arg1, 63);
1596         tcg_gen_op5_i64(INDEX_op_div2_i64, t0, ret, arg1, t0, arg2);
1597         tcg_temp_free_i64(t0);
1598     } else {
1599         gen_helper_rem_i64(ret, arg1, arg2);
1600     }
1601 }
1602 
1603 void tcg_gen_divu_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1604 {
1605     if (TCG_TARGET_HAS_div_i64) {
1606         tcg_gen_op3_i64(INDEX_op_divu_i64, ret, arg1, arg2);
1607     } else if (TCG_TARGET_HAS_div2_i64) {
1608         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
1609         tcg_gen_movi_i64(t0, 0);
1610         tcg_gen_op5_i64(INDEX_op_divu2_i64, ret, t0, arg1, t0, arg2);
1611         tcg_temp_free_i64(t0);
1612     } else {
1613         gen_helper_divu_i64(ret, arg1, arg2);
1614     }
1615 }
1616 
1617 void tcg_gen_remu_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1618 {
1619     if (TCG_TARGET_HAS_rem_i64) {
1620         tcg_gen_op3_i64(INDEX_op_remu_i64, ret, arg1, arg2);
1621     } else if (TCG_TARGET_HAS_div_i64) {
1622         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
1623         tcg_gen_op3_i64(INDEX_op_divu_i64, t0, arg1, arg2);
1624         tcg_gen_mul_i64(t0, t0, arg2);
1625         tcg_gen_sub_i64(ret, arg1, t0);
1626         tcg_temp_free_i64(t0);
1627     } else if (TCG_TARGET_HAS_div2_i64) {
1628         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
1629         tcg_gen_movi_i64(t0, 0);
1630         tcg_gen_op5_i64(INDEX_op_divu2_i64, t0, ret, arg1, t0, arg2);
1631         tcg_temp_free_i64(t0);
1632     } else {
1633         gen_helper_remu_i64(ret, arg1, arg2);
1634     }
1635 }
1636 
1637 void tcg_gen_ext8s_i64(TCGv_i64 ret, TCGv_i64 arg)
1638 {
1639     if (TCG_TARGET_REG_BITS == 32) {
1640         tcg_gen_ext8s_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1641         tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1642     } else if (TCG_TARGET_HAS_ext8s_i64) {
1643         tcg_gen_op2_i64(INDEX_op_ext8s_i64, ret, arg);
1644     } else {
1645         tcg_gen_shli_i64(ret, arg, 56);
1646         tcg_gen_sari_i64(ret, ret, 56);
1647     }
1648 }
1649 
1650 void tcg_gen_ext16s_i64(TCGv_i64 ret, TCGv_i64 arg)
1651 {
1652     if (TCG_TARGET_REG_BITS == 32) {
1653         tcg_gen_ext16s_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1654         tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1655     } else if (TCG_TARGET_HAS_ext16s_i64) {
1656         tcg_gen_op2_i64(INDEX_op_ext16s_i64, ret, arg);
1657     } else {
1658         tcg_gen_shli_i64(ret, arg, 48);
1659         tcg_gen_sari_i64(ret, ret, 48);
1660     }
1661 }
1662 
1663 void tcg_gen_ext32s_i64(TCGv_i64 ret, TCGv_i64 arg)
1664 {
1665     if (TCG_TARGET_REG_BITS == 32) {
1666         tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1667         tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1668     } else if (TCG_TARGET_HAS_ext32s_i64) {
1669         tcg_gen_op2_i64(INDEX_op_ext32s_i64, ret, arg);
1670     } else {
1671         tcg_gen_shli_i64(ret, arg, 32);
1672         tcg_gen_sari_i64(ret, ret, 32);
1673     }
1674 }
1675 
1676 void tcg_gen_ext8u_i64(TCGv_i64 ret, TCGv_i64 arg)
1677 {
1678     if (TCG_TARGET_REG_BITS == 32) {
1679         tcg_gen_ext8u_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1680         tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1681     } else if (TCG_TARGET_HAS_ext8u_i64) {
1682         tcg_gen_op2_i64(INDEX_op_ext8u_i64, ret, arg);
1683     } else {
1684         tcg_gen_andi_i64(ret, arg, 0xffu);
1685     }
1686 }
1687 
1688 void tcg_gen_ext16u_i64(TCGv_i64 ret, TCGv_i64 arg)
1689 {
1690     if (TCG_TARGET_REG_BITS == 32) {
1691         tcg_gen_ext16u_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1692         tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1693     } else if (TCG_TARGET_HAS_ext16u_i64) {
1694         tcg_gen_op2_i64(INDEX_op_ext16u_i64, ret, arg);
1695     } else {
1696         tcg_gen_andi_i64(ret, arg, 0xffffu);
1697     }
1698 }
1699 
1700 void tcg_gen_ext32u_i64(TCGv_i64 ret, TCGv_i64 arg)
1701 {
1702     if (TCG_TARGET_REG_BITS == 32) {
1703         tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1704         tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1705     } else if (TCG_TARGET_HAS_ext32u_i64) {
1706         tcg_gen_op2_i64(INDEX_op_ext32u_i64, ret, arg);
1707     } else {
1708         tcg_gen_andi_i64(ret, arg, 0xffffffffu);
1709     }
1710 }
1711 
1712 void tcg_gen_bswap16_i64(TCGv_i64 ret, TCGv_i64 arg, int flags)
1713 {
1714     /* Only one extension flag may be present. */
1715     tcg_debug_assert(!(flags & TCG_BSWAP_OS) || !(flags & TCG_BSWAP_OZ));
1716 
1717     if (TCG_TARGET_REG_BITS == 32) {
1718         tcg_gen_bswap16_i32(TCGV_LOW(ret), TCGV_LOW(arg), flags);
1719         if (flags & TCG_BSWAP_OS) {
1720             tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1721         } else {
1722             tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1723         }
1724     } else if (TCG_TARGET_HAS_bswap16_i64) {
1725         tcg_gen_op3i_i64(INDEX_op_bswap16_i64, ret, arg, flags);
1726     } else {
1727         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
1728         TCGv_i64 t1 = tcg_temp_ebb_new_i64();
1729 
1730         tcg_gen_shri_i64(t0, arg, 8);
1731         if (!(flags & TCG_BSWAP_IZ)) {
1732             tcg_gen_ext8u_i64(t0, t0);
1733         }
1734 
1735         if (flags & TCG_BSWAP_OS) {
1736             tcg_gen_shli_i64(t1, arg, 56);
1737             tcg_gen_sari_i64(t1, t1, 48);
1738         } else if (flags & TCG_BSWAP_OZ) {
1739             tcg_gen_ext8u_i64(t1, arg);
1740             tcg_gen_shli_i64(t1, t1, 8);
1741         } else {
1742             tcg_gen_shli_i64(t1, arg, 8);
1743         }
1744 
1745         tcg_gen_or_i64(ret, t0, t1);
1746         tcg_temp_free_i64(t0);
1747         tcg_temp_free_i64(t1);
1748     }
1749 }
1750 
1751 void tcg_gen_bswap32_i64(TCGv_i64 ret, TCGv_i64 arg, int flags)
1752 {
1753     /* Only one extension flag may be present. */
1754     tcg_debug_assert(!(flags & TCG_BSWAP_OS) || !(flags & TCG_BSWAP_OZ));
1755 
1756     if (TCG_TARGET_REG_BITS == 32) {
1757         tcg_gen_bswap32_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1758         if (flags & TCG_BSWAP_OS) {
1759             tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1760         } else {
1761             tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1762         }
1763     } else if (TCG_TARGET_HAS_bswap32_i64) {
1764         tcg_gen_op3i_i64(INDEX_op_bswap32_i64, ret, arg, flags);
1765     } else {
1766         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
1767         TCGv_i64 t1 = tcg_temp_ebb_new_i64();
1768         TCGv_i64 t2 = tcg_constant_i64(0x00ff00ff);
1769 
1770                                             /* arg = xxxxabcd */
1771         tcg_gen_shri_i64(t0, arg, 8);       /*  t0 = .xxxxabc */
1772         tcg_gen_and_i64(t1, arg, t2);       /*  t1 = .....b.d */
1773         tcg_gen_and_i64(t0, t0, t2);        /*  t0 = .....a.c */
1774         tcg_gen_shli_i64(t1, t1, 8);        /*  t1 = ....b.d. */
1775         tcg_gen_or_i64(ret, t0, t1);        /* ret = ....badc */
1776 
1777         tcg_gen_shli_i64(t1, ret, 48);      /*  t1 = dc...... */
1778         tcg_gen_shri_i64(t0, ret, 16);      /*  t0 = ......ba */
1779         if (flags & TCG_BSWAP_OS) {
1780             tcg_gen_sari_i64(t1, t1, 32);   /*  t1 = ssssdc.. */
1781         } else {
1782             tcg_gen_shri_i64(t1, t1, 32);   /*  t1 = ....dc.. */
1783         }
1784         tcg_gen_or_i64(ret, t0, t1);        /* ret = ssssdcba */
1785 
1786         tcg_temp_free_i64(t0);
1787         tcg_temp_free_i64(t1);
1788     }
1789 }
1790 
1791 void tcg_gen_bswap64_i64(TCGv_i64 ret, TCGv_i64 arg)
1792 {
1793     if (TCG_TARGET_REG_BITS == 32) {
1794         TCGv_i32 t0, t1;
1795         t0 = tcg_temp_ebb_new_i32();
1796         t1 = tcg_temp_ebb_new_i32();
1797 
1798         tcg_gen_bswap32_i32(t0, TCGV_LOW(arg));
1799         tcg_gen_bswap32_i32(t1, TCGV_HIGH(arg));
1800         tcg_gen_mov_i32(TCGV_LOW(ret), t1);
1801         tcg_gen_mov_i32(TCGV_HIGH(ret), t0);
1802         tcg_temp_free_i32(t0);
1803         tcg_temp_free_i32(t1);
1804     } else if (TCG_TARGET_HAS_bswap64_i64) {
1805         tcg_gen_op3i_i64(INDEX_op_bswap64_i64, ret, arg, 0);
1806     } else {
1807         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
1808         TCGv_i64 t1 = tcg_temp_ebb_new_i64();
1809         TCGv_i64 t2 = tcg_temp_ebb_new_i64();
1810 
1811                                         /* arg = abcdefgh */
1812         tcg_gen_movi_i64(t2, 0x00ff00ff00ff00ffull);
1813         tcg_gen_shri_i64(t0, arg, 8);   /*  t0 = .abcdefg */
1814         tcg_gen_and_i64(t1, arg, t2);   /*  t1 = .b.d.f.h */
1815         tcg_gen_and_i64(t0, t0, t2);    /*  t0 = .a.c.e.g */
1816         tcg_gen_shli_i64(t1, t1, 8);    /*  t1 = b.d.f.h. */
1817         tcg_gen_or_i64(ret, t0, t1);    /* ret = badcfehg */
1818 
1819         tcg_gen_movi_i64(t2, 0x0000ffff0000ffffull);
1820         tcg_gen_shri_i64(t0, ret, 16);  /*  t0 = ..badcfe */
1821         tcg_gen_and_i64(t1, ret, t2);   /*  t1 = ..dc..hg */
1822         tcg_gen_and_i64(t0, t0, t2);    /*  t0 = ..ba..fe */
1823         tcg_gen_shli_i64(t1, t1, 16);   /*  t1 = dc..hg.. */
1824         tcg_gen_or_i64(ret, t0, t1);    /* ret = dcbahgfe */
1825 
1826         tcg_gen_shri_i64(t0, ret, 32);  /*  t0 = ....dcba */
1827         tcg_gen_shli_i64(t1, ret, 32);  /*  t1 = hgfe.... */
1828         tcg_gen_or_i64(ret, t0, t1);    /* ret = hgfedcba */
1829 
1830         tcg_temp_free_i64(t0);
1831         tcg_temp_free_i64(t1);
1832         tcg_temp_free_i64(t2);
1833     }
1834 }
1835 
1836 void tcg_gen_hswap_i64(TCGv_i64 ret, TCGv_i64 arg)
1837 {
1838     uint64_t m = 0x0000ffff0000ffffull;
1839     TCGv_i64 t0 = tcg_temp_ebb_new_i64();
1840     TCGv_i64 t1 = tcg_temp_ebb_new_i64();
1841 
1842     /* See include/qemu/bitops.h, hswap64. */
1843     tcg_gen_rotli_i64(t1, arg, 32);
1844     tcg_gen_andi_i64(t0, t1, m);
1845     tcg_gen_shli_i64(t0, t0, 16);
1846     tcg_gen_shri_i64(t1, t1, 16);
1847     tcg_gen_andi_i64(t1, t1, m);
1848     tcg_gen_or_i64(ret, t0, t1);
1849 
1850     tcg_temp_free_i64(t0);
1851     tcg_temp_free_i64(t1);
1852 }
1853 
1854 void tcg_gen_wswap_i64(TCGv_i64 ret, TCGv_i64 arg)
1855 {
1856     /* Swapping 2 32-bit elements is a rotate. */
1857     tcg_gen_rotli_i64(ret, arg, 32);
1858 }
1859 
1860 void tcg_gen_not_i64(TCGv_i64 ret, TCGv_i64 arg)
1861 {
1862     if (TCG_TARGET_REG_BITS == 32) {
1863         tcg_gen_not_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1864         tcg_gen_not_i32(TCGV_HIGH(ret), TCGV_HIGH(arg));
1865     } else if (TCG_TARGET_HAS_not_i64) {
1866         tcg_gen_op2_i64(INDEX_op_not_i64, ret, arg);
1867     } else {
1868         tcg_gen_xori_i64(ret, arg, -1);
1869     }
1870 }
1871 
1872 void tcg_gen_andc_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1873 {
1874     if (TCG_TARGET_REG_BITS == 32) {
1875         tcg_gen_andc_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1876         tcg_gen_andc_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1877     } else if (TCG_TARGET_HAS_andc_i64) {
1878         tcg_gen_op3_i64(INDEX_op_andc_i64, ret, arg1, arg2);
1879     } else {
1880         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
1881         tcg_gen_not_i64(t0, arg2);
1882         tcg_gen_and_i64(ret, arg1, t0);
1883         tcg_temp_free_i64(t0);
1884     }
1885 }
1886 
1887 void tcg_gen_eqv_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1888 {
1889     if (TCG_TARGET_REG_BITS == 32) {
1890         tcg_gen_eqv_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1891         tcg_gen_eqv_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1892     } else if (TCG_TARGET_HAS_eqv_i64) {
1893         tcg_gen_op3_i64(INDEX_op_eqv_i64, ret, arg1, arg2);
1894     } else {
1895         tcg_gen_xor_i64(ret, arg1, arg2);
1896         tcg_gen_not_i64(ret, ret);
1897     }
1898 }
1899 
1900 void tcg_gen_nand_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1901 {
1902     if (TCG_TARGET_REG_BITS == 32) {
1903         tcg_gen_nand_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1904         tcg_gen_nand_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1905     } else if (TCG_TARGET_HAS_nand_i64) {
1906         tcg_gen_op3_i64(INDEX_op_nand_i64, ret, arg1, arg2);
1907     } else {
1908         tcg_gen_and_i64(ret, arg1, arg2);
1909         tcg_gen_not_i64(ret, ret);
1910     }
1911 }
1912 
1913 void tcg_gen_nor_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1914 {
1915     if (TCG_TARGET_REG_BITS == 32) {
1916         tcg_gen_nor_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1917         tcg_gen_nor_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1918     } else if (TCG_TARGET_HAS_nor_i64) {
1919         tcg_gen_op3_i64(INDEX_op_nor_i64, ret, arg1, arg2);
1920     } else {
1921         tcg_gen_or_i64(ret, arg1, arg2);
1922         tcg_gen_not_i64(ret, ret);
1923     }
1924 }
1925 
1926 void tcg_gen_orc_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1927 {
1928     if (TCG_TARGET_REG_BITS == 32) {
1929         tcg_gen_orc_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1930         tcg_gen_orc_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1931     } else if (TCG_TARGET_HAS_orc_i64) {
1932         tcg_gen_op3_i64(INDEX_op_orc_i64, ret, arg1, arg2);
1933     } else {
1934         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
1935         tcg_gen_not_i64(t0, arg2);
1936         tcg_gen_or_i64(ret, arg1, t0);
1937         tcg_temp_free_i64(t0);
1938     }
1939 }
1940 
1941 void tcg_gen_clz_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1942 {
1943     if (TCG_TARGET_HAS_clz_i64) {
1944         tcg_gen_op3_i64(INDEX_op_clz_i64, ret, arg1, arg2);
1945     } else {
1946         gen_helper_clz_i64(ret, arg1, arg2);
1947     }
1948 }
1949 
1950 void tcg_gen_clzi_i64(TCGv_i64 ret, TCGv_i64 arg1, uint64_t arg2)
1951 {
1952     if (TCG_TARGET_REG_BITS == 32
1953         && TCG_TARGET_HAS_clz_i32
1954         && arg2 <= 0xffffffffu) {
1955         TCGv_i32 t = tcg_temp_ebb_new_i32();
1956         tcg_gen_clzi_i32(t, TCGV_LOW(arg1), arg2 - 32);
1957         tcg_gen_addi_i32(t, t, 32);
1958         tcg_gen_clz_i32(TCGV_LOW(ret), TCGV_HIGH(arg1), t);
1959         tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1960         tcg_temp_free_i32(t);
1961     } else {
1962         tcg_gen_clz_i64(ret, arg1, tcg_constant_i64(arg2));
1963     }
1964 }
1965 
1966 void tcg_gen_ctz_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1967 {
1968     if (TCG_TARGET_HAS_ctz_i64) {
1969         tcg_gen_op3_i64(INDEX_op_ctz_i64, ret, arg1, arg2);
1970     } else if (TCG_TARGET_HAS_ctpop_i64 || TCG_TARGET_HAS_clz_i64) {
1971         TCGv_i64 z, t = tcg_temp_ebb_new_i64();
1972 
1973         if (TCG_TARGET_HAS_ctpop_i64) {
1974             tcg_gen_subi_i64(t, arg1, 1);
1975             tcg_gen_andc_i64(t, t, arg1);
1976             tcg_gen_ctpop_i64(t, t);
1977         } else {
1978             /* Since all non-x86 hosts have clz(0) == 64, don't fight it.  */
1979             tcg_gen_neg_i64(t, arg1);
1980             tcg_gen_and_i64(t, t, arg1);
1981             tcg_gen_clzi_i64(t, t, 64);
1982             tcg_gen_xori_i64(t, t, 63);
1983         }
1984         z = tcg_constant_i64(0);
1985         tcg_gen_movcond_i64(TCG_COND_EQ, ret, arg1, z, arg2, t);
1986         tcg_temp_free_i64(t);
1987         tcg_temp_free_i64(z);
1988     } else {
1989         gen_helper_ctz_i64(ret, arg1, arg2);
1990     }
1991 }
1992 
1993 void tcg_gen_ctzi_i64(TCGv_i64 ret, TCGv_i64 arg1, uint64_t arg2)
1994 {
1995     if (TCG_TARGET_REG_BITS == 32
1996         && TCG_TARGET_HAS_ctz_i32
1997         && arg2 <= 0xffffffffu) {
1998         TCGv_i32 t32 = tcg_temp_ebb_new_i32();
1999         tcg_gen_ctzi_i32(t32, TCGV_HIGH(arg1), arg2 - 32);
2000         tcg_gen_addi_i32(t32, t32, 32);
2001         tcg_gen_ctz_i32(TCGV_LOW(ret), TCGV_LOW(arg1), t32);
2002         tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
2003         tcg_temp_free_i32(t32);
2004     } else if (!TCG_TARGET_HAS_ctz_i64
2005                && TCG_TARGET_HAS_ctpop_i64
2006                && arg2 == 64) {
2007         /* This equivalence has the advantage of not requiring a fixup.  */
2008         TCGv_i64 t = tcg_temp_ebb_new_i64();
2009         tcg_gen_subi_i64(t, arg1, 1);
2010         tcg_gen_andc_i64(t, t, arg1);
2011         tcg_gen_ctpop_i64(ret, t);
2012         tcg_temp_free_i64(t);
2013     } else {
2014         tcg_gen_ctz_i64(ret, arg1, tcg_constant_i64(arg2));
2015     }
2016 }
2017 
2018 void tcg_gen_clrsb_i64(TCGv_i64 ret, TCGv_i64 arg)
2019 {
2020     if (TCG_TARGET_HAS_clz_i64 || TCG_TARGET_HAS_clz_i32) {
2021         TCGv_i64 t = tcg_temp_ebb_new_i64();
2022         tcg_gen_sari_i64(t, arg, 63);
2023         tcg_gen_xor_i64(t, t, arg);
2024         tcg_gen_clzi_i64(t, t, 64);
2025         tcg_gen_subi_i64(ret, t, 1);
2026         tcg_temp_free_i64(t);
2027     } else {
2028         gen_helper_clrsb_i64(ret, arg);
2029     }
2030 }
2031 
2032 void tcg_gen_ctpop_i64(TCGv_i64 ret, TCGv_i64 arg1)
2033 {
2034     if (TCG_TARGET_HAS_ctpop_i64) {
2035         tcg_gen_op2_i64(INDEX_op_ctpop_i64, ret, arg1);
2036     } else if (TCG_TARGET_REG_BITS == 32 && TCG_TARGET_HAS_ctpop_i32) {
2037         tcg_gen_ctpop_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1));
2038         tcg_gen_ctpop_i32(TCGV_LOW(ret), TCGV_LOW(arg1));
2039         tcg_gen_add_i32(TCGV_LOW(ret), TCGV_LOW(ret), TCGV_HIGH(ret));
2040         tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
2041     } else {
2042         gen_helper_ctpop_i64(ret, arg1);
2043     }
2044 }
2045 
2046 void tcg_gen_rotl_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
2047 {
2048     if (TCG_TARGET_HAS_rot_i64) {
2049         tcg_gen_op3_i64(INDEX_op_rotl_i64, ret, arg1, arg2);
2050     } else {
2051         TCGv_i64 t0, t1;
2052         t0 = tcg_temp_ebb_new_i64();
2053         t1 = tcg_temp_ebb_new_i64();
2054         tcg_gen_shl_i64(t0, arg1, arg2);
2055         tcg_gen_subfi_i64(t1, 64, arg2);
2056         tcg_gen_shr_i64(t1, arg1, t1);
2057         tcg_gen_or_i64(ret, t0, t1);
2058         tcg_temp_free_i64(t0);
2059         tcg_temp_free_i64(t1);
2060     }
2061 }
2062 
2063 void tcg_gen_rotli_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
2064 {
2065     tcg_debug_assert(arg2 >= 0 && arg2 < 64);
2066     /* some cases can be optimized here */
2067     if (arg2 == 0) {
2068         tcg_gen_mov_i64(ret, arg1);
2069     } else if (TCG_TARGET_HAS_rot_i64) {
2070         tcg_gen_rotl_i64(ret, arg1, tcg_constant_i64(arg2));
2071     } else {
2072         TCGv_i64 t0, t1;
2073         t0 = tcg_temp_ebb_new_i64();
2074         t1 = tcg_temp_ebb_new_i64();
2075         tcg_gen_shli_i64(t0, arg1, arg2);
2076         tcg_gen_shri_i64(t1, arg1, 64 - arg2);
2077         tcg_gen_or_i64(ret, t0, t1);
2078         tcg_temp_free_i64(t0);
2079         tcg_temp_free_i64(t1);
2080     }
2081 }
2082 
2083 void tcg_gen_rotr_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
2084 {
2085     if (TCG_TARGET_HAS_rot_i64) {
2086         tcg_gen_op3_i64(INDEX_op_rotr_i64, ret, arg1, arg2);
2087     } else {
2088         TCGv_i64 t0, t1;
2089         t0 = tcg_temp_ebb_new_i64();
2090         t1 = tcg_temp_ebb_new_i64();
2091         tcg_gen_shr_i64(t0, arg1, arg2);
2092         tcg_gen_subfi_i64(t1, 64, arg2);
2093         tcg_gen_shl_i64(t1, arg1, t1);
2094         tcg_gen_or_i64(ret, t0, t1);
2095         tcg_temp_free_i64(t0);
2096         tcg_temp_free_i64(t1);
2097     }
2098 }
2099 
2100 void tcg_gen_rotri_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
2101 {
2102     tcg_debug_assert(arg2 >= 0 && arg2 < 64);
2103     /* some cases can be optimized here */
2104     if (arg2 == 0) {
2105         tcg_gen_mov_i64(ret, arg1);
2106     } else {
2107         tcg_gen_rotli_i64(ret, arg1, 64 - arg2);
2108     }
2109 }
2110 
2111 void tcg_gen_deposit_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2,
2112                          unsigned int ofs, unsigned int len)
2113 {
2114     uint64_t mask;
2115     TCGv_i64 t1;
2116 
2117     tcg_debug_assert(ofs < 64);
2118     tcg_debug_assert(len > 0);
2119     tcg_debug_assert(len <= 64);
2120     tcg_debug_assert(ofs + len <= 64);
2121 
2122     if (len == 64) {
2123         tcg_gen_mov_i64(ret, arg2);
2124         return;
2125     }
2126     if (TCG_TARGET_HAS_deposit_i64 && TCG_TARGET_deposit_i64_valid(ofs, len)) {
2127         tcg_gen_op5ii_i64(INDEX_op_deposit_i64, ret, arg1, arg2, ofs, len);
2128         return;
2129     }
2130 
2131     if (TCG_TARGET_REG_BITS == 32) {
2132         if (ofs >= 32) {
2133             tcg_gen_deposit_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1),
2134                                 TCGV_LOW(arg2), ofs - 32, len);
2135             tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg1));
2136             return;
2137         }
2138         if (ofs + len <= 32) {
2139             tcg_gen_deposit_i32(TCGV_LOW(ret), TCGV_LOW(arg1),
2140                                 TCGV_LOW(arg2), ofs, len);
2141             tcg_gen_mov_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1));
2142             return;
2143         }
2144     }
2145 
2146     t1 = tcg_temp_ebb_new_i64();
2147 
2148     if (TCG_TARGET_HAS_extract2_i64) {
2149         if (ofs + len == 64) {
2150             tcg_gen_shli_i64(t1, arg1, len);
2151             tcg_gen_extract2_i64(ret, t1, arg2, len);
2152             goto done;
2153         }
2154         if (ofs == 0) {
2155             tcg_gen_extract2_i64(ret, arg1, arg2, len);
2156             tcg_gen_rotli_i64(ret, ret, len);
2157             goto done;
2158         }
2159     }
2160 
2161     mask = (1ull << len) - 1;
2162     if (ofs + len < 64) {
2163         tcg_gen_andi_i64(t1, arg2, mask);
2164         tcg_gen_shli_i64(t1, t1, ofs);
2165     } else {
2166         tcg_gen_shli_i64(t1, arg2, ofs);
2167     }
2168     tcg_gen_andi_i64(ret, arg1, ~(mask << ofs));
2169     tcg_gen_or_i64(ret, ret, t1);
2170  done:
2171     tcg_temp_free_i64(t1);
2172 }
2173 
2174 void tcg_gen_deposit_z_i64(TCGv_i64 ret, TCGv_i64 arg,
2175                            unsigned int ofs, unsigned int len)
2176 {
2177     tcg_debug_assert(ofs < 64);
2178     tcg_debug_assert(len > 0);
2179     tcg_debug_assert(len <= 64);
2180     tcg_debug_assert(ofs + len <= 64);
2181 
2182     if (ofs + len == 64) {
2183         tcg_gen_shli_i64(ret, arg, ofs);
2184     } else if (ofs == 0) {
2185         tcg_gen_andi_i64(ret, arg, (1ull << len) - 1);
2186     } else if (TCG_TARGET_HAS_deposit_i64
2187                && TCG_TARGET_deposit_i64_valid(ofs, len)) {
2188         TCGv_i64 zero = tcg_constant_i64(0);
2189         tcg_gen_op5ii_i64(INDEX_op_deposit_i64, ret, zero, arg, ofs, len);
2190     } else {
2191         if (TCG_TARGET_REG_BITS == 32) {
2192             if (ofs >= 32) {
2193                 tcg_gen_deposit_z_i32(TCGV_HIGH(ret), TCGV_LOW(arg),
2194                                       ofs - 32, len);
2195                 tcg_gen_movi_i32(TCGV_LOW(ret), 0);
2196                 return;
2197             }
2198             if (ofs + len <= 32) {
2199                 tcg_gen_deposit_z_i32(TCGV_LOW(ret), TCGV_LOW(arg), ofs, len);
2200                 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
2201                 return;
2202             }
2203         }
2204         /* To help two-operand hosts we prefer to zero-extend first,
2205            which allows ARG to stay live.  */
2206         switch (len) {
2207         case 32:
2208             if (TCG_TARGET_HAS_ext32u_i64) {
2209                 tcg_gen_ext32u_i64(ret, arg);
2210                 tcg_gen_shli_i64(ret, ret, ofs);
2211                 return;
2212             }
2213             break;
2214         case 16:
2215             if (TCG_TARGET_HAS_ext16u_i64) {
2216                 tcg_gen_ext16u_i64(ret, arg);
2217                 tcg_gen_shli_i64(ret, ret, ofs);
2218                 return;
2219             }
2220             break;
2221         case 8:
2222             if (TCG_TARGET_HAS_ext8u_i64) {
2223                 tcg_gen_ext8u_i64(ret, arg);
2224                 tcg_gen_shli_i64(ret, ret, ofs);
2225                 return;
2226             }
2227             break;
2228         }
2229         /* Otherwise prefer zero-extension over AND for code size.  */
2230         switch (ofs + len) {
2231         case 32:
2232             if (TCG_TARGET_HAS_ext32u_i64) {
2233                 tcg_gen_shli_i64(ret, arg, ofs);
2234                 tcg_gen_ext32u_i64(ret, ret);
2235                 return;
2236             }
2237             break;
2238         case 16:
2239             if (TCG_TARGET_HAS_ext16u_i64) {
2240                 tcg_gen_shli_i64(ret, arg, ofs);
2241                 tcg_gen_ext16u_i64(ret, ret);
2242                 return;
2243             }
2244             break;
2245         case 8:
2246             if (TCG_TARGET_HAS_ext8u_i64) {
2247                 tcg_gen_shli_i64(ret, arg, ofs);
2248                 tcg_gen_ext8u_i64(ret, ret);
2249                 return;
2250             }
2251             break;
2252         }
2253         tcg_gen_andi_i64(ret, arg, (1ull << len) - 1);
2254         tcg_gen_shli_i64(ret, ret, ofs);
2255     }
2256 }
2257 
2258 void tcg_gen_extract_i64(TCGv_i64 ret, TCGv_i64 arg,
2259                          unsigned int ofs, unsigned int len)
2260 {
2261     tcg_debug_assert(ofs < 64);
2262     tcg_debug_assert(len > 0);
2263     tcg_debug_assert(len <= 64);
2264     tcg_debug_assert(ofs + len <= 64);
2265 
2266     /* Canonicalize certain special cases, even if extract is supported.  */
2267     if (ofs + len == 64) {
2268         tcg_gen_shri_i64(ret, arg, 64 - len);
2269         return;
2270     }
2271     if (ofs == 0) {
2272         tcg_gen_andi_i64(ret, arg, (1ull << len) - 1);
2273         return;
2274     }
2275 
2276     if (TCG_TARGET_REG_BITS == 32) {
2277         /* Look for a 32-bit extract within one of the two words.  */
2278         if (ofs >= 32) {
2279             tcg_gen_extract_i32(TCGV_LOW(ret), TCGV_HIGH(arg), ofs - 32, len);
2280             tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
2281             return;
2282         }
2283         if (ofs + len <= 32) {
2284             tcg_gen_extract_i32(TCGV_LOW(ret), TCGV_LOW(arg), ofs, len);
2285             tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
2286             return;
2287         }
2288         /* The field is split across two words.  One double-word
2289            shift is better than two double-word shifts.  */
2290         goto do_shift_and;
2291     }
2292 
2293     if (TCG_TARGET_HAS_extract_i64
2294         && TCG_TARGET_extract_i64_valid(ofs, len)) {
2295         tcg_gen_op4ii_i64(INDEX_op_extract_i64, ret, arg, ofs, len);
2296         return;
2297     }
2298 
2299     /* Assume that zero-extension, if available, is cheaper than a shift.  */
2300     switch (ofs + len) {
2301     case 32:
2302         if (TCG_TARGET_HAS_ext32u_i64) {
2303             tcg_gen_ext32u_i64(ret, arg);
2304             tcg_gen_shri_i64(ret, ret, ofs);
2305             return;
2306         }
2307         break;
2308     case 16:
2309         if (TCG_TARGET_HAS_ext16u_i64) {
2310             tcg_gen_ext16u_i64(ret, arg);
2311             tcg_gen_shri_i64(ret, ret, ofs);
2312             return;
2313         }
2314         break;
2315     case 8:
2316         if (TCG_TARGET_HAS_ext8u_i64) {
2317             tcg_gen_ext8u_i64(ret, arg);
2318             tcg_gen_shri_i64(ret, ret, ofs);
2319             return;
2320         }
2321         break;
2322     }
2323 
2324     /* ??? Ideally we'd know what values are available for immediate AND.
2325        Assume that 8 bits are available, plus the special cases of 16 and 32,
2326        so that we get ext8u, ext16u, and ext32u.  */
2327     switch (len) {
2328     case 1 ... 8: case 16: case 32:
2329     do_shift_and:
2330         tcg_gen_shri_i64(ret, arg, ofs);
2331         tcg_gen_andi_i64(ret, ret, (1ull << len) - 1);
2332         break;
2333     default:
2334         tcg_gen_shli_i64(ret, arg, 64 - len - ofs);
2335         tcg_gen_shri_i64(ret, ret, 64 - len);
2336         break;
2337     }
2338 }
2339 
2340 void tcg_gen_sextract_i64(TCGv_i64 ret, TCGv_i64 arg,
2341                           unsigned int ofs, unsigned int len)
2342 {
2343     tcg_debug_assert(ofs < 64);
2344     tcg_debug_assert(len > 0);
2345     tcg_debug_assert(len <= 64);
2346     tcg_debug_assert(ofs + len <= 64);
2347 
2348     /* Canonicalize certain special cases, even if sextract is supported.  */
2349     if (ofs + len == 64) {
2350         tcg_gen_sari_i64(ret, arg, 64 - len);
2351         return;
2352     }
2353     if (ofs == 0) {
2354         switch (len) {
2355         case 32:
2356             tcg_gen_ext32s_i64(ret, arg);
2357             return;
2358         case 16:
2359             tcg_gen_ext16s_i64(ret, arg);
2360             return;
2361         case 8:
2362             tcg_gen_ext8s_i64(ret, arg);
2363             return;
2364         }
2365     }
2366 
2367     if (TCG_TARGET_REG_BITS == 32) {
2368         /* Look for a 32-bit extract within one of the two words.  */
2369         if (ofs >= 32) {
2370             tcg_gen_sextract_i32(TCGV_LOW(ret), TCGV_HIGH(arg), ofs - 32, len);
2371         } else if (ofs + len <= 32) {
2372             tcg_gen_sextract_i32(TCGV_LOW(ret), TCGV_LOW(arg), ofs, len);
2373         } else if (ofs == 0) {
2374             tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg));
2375             tcg_gen_sextract_i32(TCGV_HIGH(ret), TCGV_HIGH(arg), 0, len - 32);
2376             return;
2377         } else if (len > 32) {
2378             TCGv_i32 t = tcg_temp_ebb_new_i32();
2379             /* Extract the bits for the high word normally.  */
2380             tcg_gen_sextract_i32(t, TCGV_HIGH(arg), ofs + 32, len - 32);
2381             /* Shift the field down for the low part.  */
2382             tcg_gen_shri_i64(ret, arg, ofs);
2383             /* Overwrite the shift into the high part.  */
2384             tcg_gen_mov_i32(TCGV_HIGH(ret), t);
2385             tcg_temp_free_i32(t);
2386             return;
2387         } else {
2388             /* Shift the field down for the low part, such that the
2389                field sits at the MSB.  */
2390             tcg_gen_shri_i64(ret, arg, ofs + len - 32);
2391             /* Shift the field down from the MSB, sign extending.  */
2392             tcg_gen_sari_i32(TCGV_LOW(ret), TCGV_LOW(ret), 32 - len);
2393         }
2394         /* Sign-extend the field from 32 bits.  */
2395         tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
2396         return;
2397     }
2398 
2399     if (TCG_TARGET_HAS_sextract_i64
2400         && TCG_TARGET_extract_i64_valid(ofs, len)) {
2401         tcg_gen_op4ii_i64(INDEX_op_sextract_i64, ret, arg, ofs, len);
2402         return;
2403     }
2404 
2405     /* Assume that sign-extension, if available, is cheaper than a shift.  */
2406     switch (ofs + len) {
2407     case 32:
2408         if (TCG_TARGET_HAS_ext32s_i64) {
2409             tcg_gen_ext32s_i64(ret, arg);
2410             tcg_gen_sari_i64(ret, ret, ofs);
2411             return;
2412         }
2413         break;
2414     case 16:
2415         if (TCG_TARGET_HAS_ext16s_i64) {
2416             tcg_gen_ext16s_i64(ret, arg);
2417             tcg_gen_sari_i64(ret, ret, ofs);
2418             return;
2419         }
2420         break;
2421     case 8:
2422         if (TCG_TARGET_HAS_ext8s_i64) {
2423             tcg_gen_ext8s_i64(ret, arg);
2424             tcg_gen_sari_i64(ret, ret, ofs);
2425             return;
2426         }
2427         break;
2428     }
2429     switch (len) {
2430     case 32:
2431         if (TCG_TARGET_HAS_ext32s_i64) {
2432             tcg_gen_shri_i64(ret, arg, ofs);
2433             tcg_gen_ext32s_i64(ret, ret);
2434             return;
2435         }
2436         break;
2437     case 16:
2438         if (TCG_TARGET_HAS_ext16s_i64) {
2439             tcg_gen_shri_i64(ret, arg, ofs);
2440             tcg_gen_ext16s_i64(ret, ret);
2441             return;
2442         }
2443         break;
2444     case 8:
2445         if (TCG_TARGET_HAS_ext8s_i64) {
2446             tcg_gen_shri_i64(ret, arg, ofs);
2447             tcg_gen_ext8s_i64(ret, ret);
2448             return;
2449         }
2450         break;
2451     }
2452     tcg_gen_shli_i64(ret, arg, 64 - len - ofs);
2453     tcg_gen_sari_i64(ret, ret, 64 - len);
2454 }
2455 
2456 /*
2457  * Extract 64 bits from a 128-bit input, ah:al, starting from ofs.
2458  * Unlike tcg_gen_extract_i64 above, len is fixed at 64.
2459  */
2460 void tcg_gen_extract2_i64(TCGv_i64 ret, TCGv_i64 al, TCGv_i64 ah,
2461                           unsigned int ofs)
2462 {
2463     tcg_debug_assert(ofs <= 64);
2464     if (ofs == 0) {
2465         tcg_gen_mov_i64(ret, al);
2466     } else if (ofs == 64) {
2467         tcg_gen_mov_i64(ret, ah);
2468     } else if (al == ah) {
2469         tcg_gen_rotri_i64(ret, al, ofs);
2470     } else if (TCG_TARGET_HAS_extract2_i64) {
2471         tcg_gen_op4i_i64(INDEX_op_extract2_i64, ret, al, ah, ofs);
2472     } else {
2473         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
2474         tcg_gen_shri_i64(t0, al, ofs);
2475         tcg_gen_deposit_i64(ret, t0, ah, 64 - ofs, ofs);
2476         tcg_temp_free_i64(t0);
2477     }
2478 }
2479 
2480 void tcg_gen_movcond_i64(TCGCond cond, TCGv_i64 ret, TCGv_i64 c1,
2481                          TCGv_i64 c2, TCGv_i64 v1, TCGv_i64 v2)
2482 {
2483     if (cond == TCG_COND_ALWAYS) {
2484         tcg_gen_mov_i64(ret, v1);
2485     } else if (cond == TCG_COND_NEVER) {
2486         tcg_gen_mov_i64(ret, v2);
2487     } else if (TCG_TARGET_REG_BITS == 32) {
2488         TCGv_i32 t0 = tcg_temp_ebb_new_i32();
2489         TCGv_i32 t1 = tcg_temp_ebb_new_i32();
2490         tcg_gen_op6i_i32(INDEX_op_setcond2_i32, t0,
2491                          TCGV_LOW(c1), TCGV_HIGH(c1),
2492                          TCGV_LOW(c2), TCGV_HIGH(c2), cond);
2493 
2494         if (TCG_TARGET_HAS_movcond_i32) {
2495             tcg_gen_movi_i32(t1, 0);
2496             tcg_gen_movcond_i32(TCG_COND_NE, TCGV_LOW(ret), t0, t1,
2497                                 TCGV_LOW(v1), TCGV_LOW(v2));
2498             tcg_gen_movcond_i32(TCG_COND_NE, TCGV_HIGH(ret), t0, t1,
2499                                 TCGV_HIGH(v1), TCGV_HIGH(v2));
2500         } else {
2501             tcg_gen_neg_i32(t0, t0);
2502 
2503             tcg_gen_and_i32(t1, TCGV_LOW(v1), t0);
2504             tcg_gen_andc_i32(TCGV_LOW(ret), TCGV_LOW(v2), t0);
2505             tcg_gen_or_i32(TCGV_LOW(ret), TCGV_LOW(ret), t1);
2506 
2507             tcg_gen_and_i32(t1, TCGV_HIGH(v1), t0);
2508             tcg_gen_andc_i32(TCGV_HIGH(ret), TCGV_HIGH(v2), t0);
2509             tcg_gen_or_i32(TCGV_HIGH(ret), TCGV_HIGH(ret), t1);
2510         }
2511         tcg_temp_free_i32(t0);
2512         tcg_temp_free_i32(t1);
2513     } else if (TCG_TARGET_HAS_movcond_i64) {
2514         tcg_gen_op6i_i64(INDEX_op_movcond_i64, ret, c1, c2, v1, v2, cond);
2515     } else {
2516         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
2517         TCGv_i64 t1 = tcg_temp_ebb_new_i64();
2518         tcg_gen_setcond_i64(cond, t0, c1, c2);
2519         tcg_gen_neg_i64(t0, t0);
2520         tcg_gen_and_i64(t1, v1, t0);
2521         tcg_gen_andc_i64(ret, v2, t0);
2522         tcg_gen_or_i64(ret, ret, t1);
2523         tcg_temp_free_i64(t0);
2524         tcg_temp_free_i64(t1);
2525     }
2526 }
2527 
2528 void tcg_gen_add2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 al,
2529                       TCGv_i64 ah, TCGv_i64 bl, TCGv_i64 bh)
2530 {
2531     if (TCG_TARGET_HAS_add2_i64) {
2532         tcg_gen_op6_i64(INDEX_op_add2_i64, rl, rh, al, ah, bl, bh);
2533     } else {
2534         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
2535         TCGv_i64 t1 = tcg_temp_ebb_new_i64();
2536         tcg_gen_add_i64(t0, al, bl);
2537         tcg_gen_setcond_i64(TCG_COND_LTU, t1, t0, al);
2538         tcg_gen_add_i64(rh, ah, bh);
2539         tcg_gen_add_i64(rh, rh, t1);
2540         tcg_gen_mov_i64(rl, t0);
2541         tcg_temp_free_i64(t0);
2542         tcg_temp_free_i64(t1);
2543     }
2544 }
2545 
2546 void tcg_gen_sub2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 al,
2547                       TCGv_i64 ah, TCGv_i64 bl, TCGv_i64 bh)
2548 {
2549     if (TCG_TARGET_HAS_sub2_i64) {
2550         tcg_gen_op6_i64(INDEX_op_sub2_i64, rl, rh, al, ah, bl, bh);
2551     } else {
2552         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
2553         TCGv_i64 t1 = tcg_temp_ebb_new_i64();
2554         tcg_gen_sub_i64(t0, al, bl);
2555         tcg_gen_setcond_i64(TCG_COND_LTU, t1, al, bl);
2556         tcg_gen_sub_i64(rh, ah, bh);
2557         tcg_gen_sub_i64(rh, rh, t1);
2558         tcg_gen_mov_i64(rl, t0);
2559         tcg_temp_free_i64(t0);
2560         tcg_temp_free_i64(t1);
2561     }
2562 }
2563 
2564 void tcg_gen_mulu2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 arg1, TCGv_i64 arg2)
2565 {
2566     if (TCG_TARGET_HAS_mulu2_i64) {
2567         tcg_gen_op4_i64(INDEX_op_mulu2_i64, rl, rh, arg1, arg2);
2568     } else if (TCG_TARGET_HAS_muluh_i64) {
2569         TCGv_i64 t = tcg_temp_ebb_new_i64();
2570         tcg_gen_op3_i64(INDEX_op_mul_i64, t, arg1, arg2);
2571         tcg_gen_op3_i64(INDEX_op_muluh_i64, rh, arg1, arg2);
2572         tcg_gen_mov_i64(rl, t);
2573         tcg_temp_free_i64(t);
2574     } else {
2575         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
2576         tcg_gen_mul_i64(t0, arg1, arg2);
2577         gen_helper_muluh_i64(rh, arg1, arg2);
2578         tcg_gen_mov_i64(rl, t0);
2579         tcg_temp_free_i64(t0);
2580     }
2581 }
2582 
2583 void tcg_gen_muls2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 arg1, TCGv_i64 arg2)
2584 {
2585     if (TCG_TARGET_HAS_muls2_i64) {
2586         tcg_gen_op4_i64(INDEX_op_muls2_i64, rl, rh, arg1, arg2);
2587     } else if (TCG_TARGET_HAS_mulsh_i64) {
2588         TCGv_i64 t = tcg_temp_ebb_new_i64();
2589         tcg_gen_op3_i64(INDEX_op_mul_i64, t, arg1, arg2);
2590         tcg_gen_op3_i64(INDEX_op_mulsh_i64, rh, arg1, arg2);
2591         tcg_gen_mov_i64(rl, t);
2592         tcg_temp_free_i64(t);
2593     } else if (TCG_TARGET_HAS_mulu2_i64 || TCG_TARGET_HAS_muluh_i64) {
2594         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
2595         TCGv_i64 t1 = tcg_temp_ebb_new_i64();
2596         TCGv_i64 t2 = tcg_temp_ebb_new_i64();
2597         TCGv_i64 t3 = tcg_temp_ebb_new_i64();
2598         tcg_gen_mulu2_i64(t0, t1, arg1, arg2);
2599         /* Adjust for negative inputs.  */
2600         tcg_gen_sari_i64(t2, arg1, 63);
2601         tcg_gen_sari_i64(t3, arg2, 63);
2602         tcg_gen_and_i64(t2, t2, arg2);
2603         tcg_gen_and_i64(t3, t3, arg1);
2604         tcg_gen_sub_i64(rh, t1, t2);
2605         tcg_gen_sub_i64(rh, rh, t3);
2606         tcg_gen_mov_i64(rl, t0);
2607         tcg_temp_free_i64(t0);
2608         tcg_temp_free_i64(t1);
2609         tcg_temp_free_i64(t2);
2610         tcg_temp_free_i64(t3);
2611     } else {
2612         TCGv_i64 t0 = tcg_temp_ebb_new_i64();
2613         tcg_gen_mul_i64(t0, arg1, arg2);
2614         gen_helper_mulsh_i64(rh, arg1, arg2);
2615         tcg_gen_mov_i64(rl, t0);
2616         tcg_temp_free_i64(t0);
2617     }
2618 }
2619 
2620 void tcg_gen_mulsu2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 arg1, TCGv_i64 arg2)
2621 {
2622     TCGv_i64 t0 = tcg_temp_ebb_new_i64();
2623     TCGv_i64 t1 = tcg_temp_ebb_new_i64();
2624     TCGv_i64 t2 = tcg_temp_ebb_new_i64();
2625     tcg_gen_mulu2_i64(t0, t1, arg1, arg2);
2626     /* Adjust for negative input for the signed arg1.  */
2627     tcg_gen_sari_i64(t2, arg1, 63);
2628     tcg_gen_and_i64(t2, t2, arg2);
2629     tcg_gen_sub_i64(rh, t1, t2);
2630     tcg_gen_mov_i64(rl, t0);
2631     tcg_temp_free_i64(t0);
2632     tcg_temp_free_i64(t1);
2633     tcg_temp_free_i64(t2);
2634 }
2635 
2636 void tcg_gen_smin_i64(TCGv_i64 ret, TCGv_i64 a, TCGv_i64 b)
2637 {
2638     tcg_gen_movcond_i64(TCG_COND_LT, ret, a, b, a, b);
2639 }
2640 
2641 void tcg_gen_umin_i64(TCGv_i64 ret, TCGv_i64 a, TCGv_i64 b)
2642 {
2643     tcg_gen_movcond_i64(TCG_COND_LTU, ret, a, b, a, b);
2644 }
2645 
2646 void tcg_gen_smax_i64(TCGv_i64 ret, TCGv_i64 a, TCGv_i64 b)
2647 {
2648     tcg_gen_movcond_i64(TCG_COND_LT, ret, a, b, b, a);
2649 }
2650 
2651 void tcg_gen_umax_i64(TCGv_i64 ret, TCGv_i64 a, TCGv_i64 b)
2652 {
2653     tcg_gen_movcond_i64(TCG_COND_LTU, ret, a, b, b, a);
2654 }
2655 
2656 void tcg_gen_abs_i64(TCGv_i64 ret, TCGv_i64 a)
2657 {
2658     TCGv_i64 t = tcg_temp_ebb_new_i64();
2659 
2660     tcg_gen_sari_i64(t, a, 63);
2661     tcg_gen_xor_i64(ret, a, t);
2662     tcg_gen_sub_i64(ret, ret, t);
2663     tcg_temp_free_i64(t);
2664 }
2665 
2666 /* Size changing operations.  */
2667 
2668 void tcg_gen_extrl_i64_i32(TCGv_i32 ret, TCGv_i64 arg)
2669 {
2670     if (TCG_TARGET_REG_BITS == 32) {
2671         tcg_gen_mov_i32(ret, TCGV_LOW(arg));
2672     } else if (TCG_TARGET_HAS_extrl_i64_i32) {
2673         tcg_gen_op2(INDEX_op_extrl_i64_i32,
2674                     tcgv_i32_arg(ret), tcgv_i64_arg(arg));
2675     } else {
2676         tcg_gen_mov_i32(ret, (TCGv_i32)arg);
2677     }
2678 }
2679 
2680 void tcg_gen_extrh_i64_i32(TCGv_i32 ret, TCGv_i64 arg)
2681 {
2682     if (TCG_TARGET_REG_BITS == 32) {
2683         tcg_gen_mov_i32(ret, TCGV_HIGH(arg));
2684     } else if (TCG_TARGET_HAS_extrh_i64_i32) {
2685         tcg_gen_op2(INDEX_op_extrh_i64_i32,
2686                     tcgv_i32_arg(ret), tcgv_i64_arg(arg));
2687     } else {
2688         TCGv_i64 t = tcg_temp_ebb_new_i64();
2689         tcg_gen_shri_i64(t, arg, 32);
2690         tcg_gen_mov_i32(ret, (TCGv_i32)t);
2691         tcg_temp_free_i64(t);
2692     }
2693 }
2694 
2695 void tcg_gen_extu_i32_i64(TCGv_i64 ret, TCGv_i32 arg)
2696 {
2697     if (TCG_TARGET_REG_BITS == 32) {
2698         tcg_gen_mov_i32(TCGV_LOW(ret), arg);
2699         tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
2700     } else {
2701         tcg_gen_op2(INDEX_op_extu_i32_i64,
2702                     tcgv_i64_arg(ret), tcgv_i32_arg(arg));
2703     }
2704 }
2705 
2706 void tcg_gen_ext_i32_i64(TCGv_i64 ret, TCGv_i32 arg)
2707 {
2708     if (TCG_TARGET_REG_BITS == 32) {
2709         tcg_gen_mov_i32(TCGV_LOW(ret), arg);
2710         tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
2711     } else {
2712         tcg_gen_op2(INDEX_op_ext_i32_i64,
2713                     tcgv_i64_arg(ret), tcgv_i32_arg(arg));
2714     }
2715 }
2716 
2717 void tcg_gen_concat_i32_i64(TCGv_i64 dest, TCGv_i32 low, TCGv_i32 high)
2718 {
2719     TCGv_i64 tmp;
2720 
2721     if (TCG_TARGET_REG_BITS == 32) {
2722         tcg_gen_mov_i32(TCGV_LOW(dest), low);
2723         tcg_gen_mov_i32(TCGV_HIGH(dest), high);
2724         return;
2725     }
2726 
2727     tmp = tcg_temp_ebb_new_i64();
2728     /* These extensions are only needed for type correctness.
2729        We may be able to do better given target specific information.  */
2730     tcg_gen_extu_i32_i64(tmp, high);
2731     tcg_gen_extu_i32_i64(dest, low);
2732     /* If deposit is available, use it.  Otherwise use the extra
2733        knowledge that we have of the zero-extensions above.  */
2734     if (TCG_TARGET_HAS_deposit_i64 && TCG_TARGET_deposit_i64_valid(32, 32)) {
2735         tcg_gen_deposit_i64(dest, dest, tmp, 32, 32);
2736     } else {
2737         tcg_gen_shli_i64(tmp, tmp, 32);
2738         tcg_gen_or_i64(dest, dest, tmp);
2739     }
2740     tcg_temp_free_i64(tmp);
2741 }
2742 
2743 void tcg_gen_extr_i64_i32(TCGv_i32 lo, TCGv_i32 hi, TCGv_i64 arg)
2744 {
2745     if (TCG_TARGET_REG_BITS == 32) {
2746         tcg_gen_mov_i32(lo, TCGV_LOW(arg));
2747         tcg_gen_mov_i32(hi, TCGV_HIGH(arg));
2748     } else {
2749         tcg_gen_extrl_i64_i32(lo, arg);
2750         tcg_gen_extrh_i64_i32(hi, arg);
2751     }
2752 }
2753 
2754 void tcg_gen_extr32_i64(TCGv_i64 lo, TCGv_i64 hi, TCGv_i64 arg)
2755 {
2756     tcg_gen_ext32u_i64(lo, arg);
2757     tcg_gen_shri_i64(hi, arg, 32);
2758 }
2759 
2760 void tcg_gen_extr_i128_i64(TCGv_i64 lo, TCGv_i64 hi, TCGv_i128 arg)
2761 {
2762     tcg_gen_mov_i64(lo, TCGV128_LOW(arg));
2763     tcg_gen_mov_i64(hi, TCGV128_HIGH(arg));
2764 }
2765 
2766 void tcg_gen_concat_i64_i128(TCGv_i128 ret, TCGv_i64 lo, TCGv_i64 hi)
2767 {
2768     tcg_gen_mov_i64(TCGV128_LOW(ret), lo);
2769     tcg_gen_mov_i64(TCGV128_HIGH(ret), hi);
2770 }
2771 
2772 void tcg_gen_mov_i128(TCGv_i128 dst, TCGv_i128 src)
2773 {
2774     if (dst != src) {
2775         tcg_gen_mov_i64(TCGV128_LOW(dst), TCGV128_LOW(src));
2776         tcg_gen_mov_i64(TCGV128_HIGH(dst), TCGV128_HIGH(src));
2777     }
2778 }
2779 
2780 /* QEMU specific operations.  */
2781 
2782 void tcg_gen_exit_tb(const TranslationBlock *tb, unsigned idx)
2783 {
2784     /*
2785      * Let the jit code return the read-only version of the
2786      * TranslationBlock, so that we minimize the pc-relative
2787      * distance of the address of the exit_tb code to TB.
2788      * This will improve utilization of pc-relative address loads.
2789      *
2790      * TODO: Move this to translator_loop, so that all const
2791      * TranslationBlock pointers refer to read-only memory.
2792      * This requires coordination with targets that do not use
2793      * the translator_loop.
2794      */
2795     uintptr_t val = (uintptr_t)tcg_splitwx_to_rx((void *)tb) + idx;
2796 
2797     if (tb == NULL) {
2798         tcg_debug_assert(idx == 0);
2799     } else if (idx <= TB_EXIT_IDXMAX) {
2800 #ifdef CONFIG_DEBUG_TCG
2801         /* This is an exit following a goto_tb.  Verify that we have
2802            seen this numbered exit before, via tcg_gen_goto_tb.  */
2803         tcg_debug_assert(tcg_ctx->goto_tb_issue_mask & (1 << idx));
2804 #endif
2805     } else {
2806         /* This is an exit via the exitreq label.  */
2807         tcg_debug_assert(idx == TB_EXIT_REQUESTED);
2808     }
2809 
2810     tcg_gen_op1i(INDEX_op_exit_tb, val);
2811 }
2812 
2813 void tcg_gen_goto_tb(unsigned idx)
2814 {
2815     /* We tested CF_NO_GOTO_TB in translator_use_goto_tb. */
2816     tcg_debug_assert(!(tcg_ctx->gen_tb->cflags & CF_NO_GOTO_TB));
2817     /* We only support two chained exits.  */
2818     tcg_debug_assert(idx <= TB_EXIT_IDXMAX);
2819 #ifdef CONFIG_DEBUG_TCG
2820     /* Verify that we haven't seen this numbered exit before.  */
2821     tcg_debug_assert((tcg_ctx->goto_tb_issue_mask & (1 << idx)) == 0);
2822     tcg_ctx->goto_tb_issue_mask |= 1 << idx;
2823 #endif
2824     plugin_gen_disable_mem_helpers();
2825     tcg_gen_op1i(INDEX_op_goto_tb, idx);
2826 }
2827 
2828 void tcg_gen_lookup_and_goto_ptr(void)
2829 {
2830     TCGv_ptr ptr;
2831 
2832     if (tcg_ctx->gen_tb->cflags & CF_NO_GOTO_PTR) {
2833         tcg_gen_exit_tb(NULL, 0);
2834         return;
2835     }
2836 
2837     plugin_gen_disable_mem_helpers();
2838     ptr = tcg_temp_ebb_new_ptr();
2839     gen_helper_lookup_tb_ptr(ptr, cpu_env);
2840     tcg_gen_op1i(INDEX_op_goto_ptr, tcgv_ptr_arg(ptr));
2841     tcg_temp_free_ptr(ptr);
2842 }
2843