xref: /openbmc/qemu/tcg/tcg-op.c (revision efee71c8)
1 /*
2  * Tiny Code Generator for QEMU
3  *
4  * Copyright (c) 2008 Fabrice Bellard
5  *
6  * Permission is hereby granted, free of charge, to any person obtaining a copy
7  * of this software and associated documentation files (the "Software"), to deal
8  * in the Software without restriction, including without limitation the rights
9  * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
10  * copies of the Software, and to permit persons to whom the Software is
11  * furnished to do so, subject to the following conditions:
12  *
13  * The above copyright notice and this permission notice shall be included in
14  * all copies or substantial portions of the Software.
15  *
16  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19  * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20  * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21  * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
22  * THE SOFTWARE.
23  */
24 
25 #include "qemu/osdep.h"
26 #include "exec/exec-all.h"
27 #include "tcg/tcg.h"
28 #include "tcg/tcg-op.h"
29 #include "tcg/tcg-mo.h"
30 #include "trace-tcg.h"
31 #include "trace/mem.h"
32 #include "exec/plugin-gen.h"
33 
34 /* Reduce the number of ifdefs below.  This assumes that all uses of
35    TCGV_HIGH and TCGV_LOW are properly protected by a conditional that
36    the compiler can eliminate.  */
37 #if TCG_TARGET_REG_BITS == 64
38 extern TCGv_i32 TCGV_LOW_link_error(TCGv_i64);
39 extern TCGv_i32 TCGV_HIGH_link_error(TCGv_i64);
40 #define TCGV_LOW  TCGV_LOW_link_error
41 #define TCGV_HIGH TCGV_HIGH_link_error
42 #endif
43 
44 void tcg_gen_op1(TCGOpcode opc, TCGArg a1)
45 {
46     TCGOp *op = tcg_emit_op(opc);
47     op->args[0] = a1;
48 }
49 
50 void tcg_gen_op2(TCGOpcode opc, TCGArg a1, TCGArg a2)
51 {
52     TCGOp *op = tcg_emit_op(opc);
53     op->args[0] = a1;
54     op->args[1] = a2;
55 }
56 
57 void tcg_gen_op3(TCGOpcode opc, TCGArg a1, TCGArg a2, TCGArg a3)
58 {
59     TCGOp *op = tcg_emit_op(opc);
60     op->args[0] = a1;
61     op->args[1] = a2;
62     op->args[2] = a3;
63 }
64 
65 void tcg_gen_op4(TCGOpcode opc, TCGArg a1, TCGArg a2, TCGArg a3, TCGArg a4)
66 {
67     TCGOp *op = tcg_emit_op(opc);
68     op->args[0] = a1;
69     op->args[1] = a2;
70     op->args[2] = a3;
71     op->args[3] = a4;
72 }
73 
74 void tcg_gen_op5(TCGOpcode opc, TCGArg a1, TCGArg a2, TCGArg a3,
75                  TCGArg a4, TCGArg a5)
76 {
77     TCGOp *op = tcg_emit_op(opc);
78     op->args[0] = a1;
79     op->args[1] = a2;
80     op->args[2] = a3;
81     op->args[3] = a4;
82     op->args[4] = a5;
83 }
84 
85 void tcg_gen_op6(TCGOpcode opc, TCGArg a1, TCGArg a2, TCGArg a3,
86                  TCGArg a4, TCGArg a5, TCGArg a6)
87 {
88     TCGOp *op = tcg_emit_op(opc);
89     op->args[0] = a1;
90     op->args[1] = a2;
91     op->args[2] = a3;
92     op->args[3] = a4;
93     op->args[4] = a5;
94     op->args[5] = a6;
95 }
96 
97 void tcg_gen_mb(TCGBar mb_type)
98 {
99     if (tcg_ctx->tb_cflags & CF_PARALLEL) {
100         tcg_gen_op1(INDEX_op_mb, mb_type);
101     }
102 }
103 
104 /* 32 bit ops */
105 
106 void tcg_gen_movi_i32(TCGv_i32 ret, int32_t arg)
107 {
108     tcg_gen_mov_i32(ret, tcg_constant_i32(arg));
109 }
110 
111 void tcg_gen_addi_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
112 {
113     /* some cases can be optimized here */
114     if (arg2 == 0) {
115         tcg_gen_mov_i32(ret, arg1);
116     } else {
117         tcg_gen_add_i32(ret, arg1, tcg_constant_i32(arg2));
118     }
119 }
120 
121 void tcg_gen_subfi_i32(TCGv_i32 ret, int32_t arg1, TCGv_i32 arg2)
122 {
123     if (arg1 == 0 && TCG_TARGET_HAS_neg_i32) {
124         /* Don't recurse with tcg_gen_neg_i32.  */
125         tcg_gen_op2_i32(INDEX_op_neg_i32, ret, arg2);
126     } else {
127         tcg_gen_sub_i32(ret, tcg_constant_i32(arg1), arg2);
128     }
129 }
130 
131 void tcg_gen_subi_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
132 {
133     /* some cases can be optimized here */
134     if (arg2 == 0) {
135         tcg_gen_mov_i32(ret, arg1);
136     } else {
137         tcg_gen_sub_i32(ret, arg1, tcg_constant_i32(arg2));
138     }
139 }
140 
141 void tcg_gen_andi_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
142 {
143     /* Some cases can be optimized here.  */
144     switch (arg2) {
145     case 0:
146         tcg_gen_movi_i32(ret, 0);
147         return;
148     case -1:
149         tcg_gen_mov_i32(ret, arg1);
150         return;
151     case 0xff:
152         /* Don't recurse with tcg_gen_ext8u_i32.  */
153         if (TCG_TARGET_HAS_ext8u_i32) {
154             tcg_gen_op2_i32(INDEX_op_ext8u_i32, ret, arg1);
155             return;
156         }
157         break;
158     case 0xffff:
159         if (TCG_TARGET_HAS_ext16u_i32) {
160             tcg_gen_op2_i32(INDEX_op_ext16u_i32, ret, arg1);
161             return;
162         }
163         break;
164     }
165 
166     tcg_gen_and_i32(ret, arg1, tcg_constant_i32(arg2));
167 }
168 
169 void tcg_gen_ori_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
170 {
171     /* Some cases can be optimized here.  */
172     if (arg2 == -1) {
173         tcg_gen_movi_i32(ret, -1);
174     } else if (arg2 == 0) {
175         tcg_gen_mov_i32(ret, arg1);
176     } else {
177         tcg_gen_or_i32(ret, arg1, tcg_constant_i32(arg2));
178     }
179 }
180 
181 void tcg_gen_xori_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
182 {
183     /* Some cases can be optimized here.  */
184     if (arg2 == 0) {
185         tcg_gen_mov_i32(ret, arg1);
186     } else if (arg2 == -1 && TCG_TARGET_HAS_not_i32) {
187         /* Don't recurse with tcg_gen_not_i32.  */
188         tcg_gen_op2_i32(INDEX_op_not_i32, ret, arg1);
189     } else {
190         tcg_gen_xor_i32(ret, arg1, tcg_constant_i32(arg2));
191     }
192 }
193 
194 void tcg_gen_shli_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
195 {
196     tcg_debug_assert(arg2 >= 0 && arg2 < 32);
197     if (arg2 == 0) {
198         tcg_gen_mov_i32(ret, arg1);
199     } else {
200         tcg_gen_shl_i32(ret, arg1, tcg_constant_i32(arg2));
201     }
202 }
203 
204 void tcg_gen_shri_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
205 {
206     tcg_debug_assert(arg2 >= 0 && arg2 < 32);
207     if (arg2 == 0) {
208         tcg_gen_mov_i32(ret, arg1);
209     } else {
210         tcg_gen_shr_i32(ret, arg1, tcg_constant_i32(arg2));
211     }
212 }
213 
214 void tcg_gen_sari_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
215 {
216     tcg_debug_assert(arg2 >= 0 && arg2 < 32);
217     if (arg2 == 0) {
218         tcg_gen_mov_i32(ret, arg1);
219     } else {
220         tcg_gen_sar_i32(ret, arg1, tcg_constant_i32(arg2));
221     }
222 }
223 
224 void tcg_gen_brcond_i32(TCGCond cond, TCGv_i32 arg1, TCGv_i32 arg2, TCGLabel *l)
225 {
226     if (cond == TCG_COND_ALWAYS) {
227         tcg_gen_br(l);
228     } else if (cond != TCG_COND_NEVER) {
229         l->refs++;
230         tcg_gen_op4ii_i32(INDEX_op_brcond_i32, arg1, arg2, cond, label_arg(l));
231     }
232 }
233 
234 void tcg_gen_brcondi_i32(TCGCond cond, TCGv_i32 arg1, int32_t arg2, TCGLabel *l)
235 {
236     if (cond == TCG_COND_ALWAYS) {
237         tcg_gen_br(l);
238     } else if (cond != TCG_COND_NEVER) {
239         tcg_gen_brcond_i32(cond, arg1, tcg_constant_i32(arg2), l);
240     }
241 }
242 
243 void tcg_gen_setcond_i32(TCGCond cond, TCGv_i32 ret,
244                          TCGv_i32 arg1, TCGv_i32 arg2)
245 {
246     if (cond == TCG_COND_ALWAYS) {
247         tcg_gen_movi_i32(ret, 1);
248     } else if (cond == TCG_COND_NEVER) {
249         tcg_gen_movi_i32(ret, 0);
250     } else {
251         tcg_gen_op4i_i32(INDEX_op_setcond_i32, ret, arg1, arg2, cond);
252     }
253 }
254 
255 void tcg_gen_setcondi_i32(TCGCond cond, TCGv_i32 ret,
256                           TCGv_i32 arg1, int32_t arg2)
257 {
258     tcg_gen_setcond_i32(cond, ret, arg1, tcg_constant_i32(arg2));
259 }
260 
261 void tcg_gen_muli_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
262 {
263     if (arg2 == 0) {
264         tcg_gen_movi_i32(ret, 0);
265     } else if (is_power_of_2(arg2)) {
266         tcg_gen_shli_i32(ret, arg1, ctz32(arg2));
267     } else {
268         tcg_gen_mul_i32(ret, arg1, tcg_constant_i32(arg2));
269     }
270 }
271 
272 void tcg_gen_div_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
273 {
274     if (TCG_TARGET_HAS_div_i32) {
275         tcg_gen_op3_i32(INDEX_op_div_i32, ret, arg1, arg2);
276     } else if (TCG_TARGET_HAS_div2_i32) {
277         TCGv_i32 t0 = tcg_temp_new_i32();
278         tcg_gen_sari_i32(t0, arg1, 31);
279         tcg_gen_op5_i32(INDEX_op_div2_i32, ret, t0, arg1, t0, arg2);
280         tcg_temp_free_i32(t0);
281     } else {
282         gen_helper_div_i32(ret, arg1, arg2);
283     }
284 }
285 
286 void tcg_gen_rem_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
287 {
288     if (TCG_TARGET_HAS_rem_i32) {
289         tcg_gen_op3_i32(INDEX_op_rem_i32, ret, arg1, arg2);
290     } else if (TCG_TARGET_HAS_div_i32) {
291         TCGv_i32 t0 = tcg_temp_new_i32();
292         tcg_gen_op3_i32(INDEX_op_div_i32, t0, arg1, arg2);
293         tcg_gen_mul_i32(t0, t0, arg2);
294         tcg_gen_sub_i32(ret, arg1, t0);
295         tcg_temp_free_i32(t0);
296     } else if (TCG_TARGET_HAS_div2_i32) {
297         TCGv_i32 t0 = tcg_temp_new_i32();
298         tcg_gen_sari_i32(t0, arg1, 31);
299         tcg_gen_op5_i32(INDEX_op_div2_i32, t0, ret, arg1, t0, arg2);
300         tcg_temp_free_i32(t0);
301     } else {
302         gen_helper_rem_i32(ret, arg1, arg2);
303     }
304 }
305 
306 void tcg_gen_divu_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
307 {
308     if (TCG_TARGET_HAS_div_i32) {
309         tcg_gen_op3_i32(INDEX_op_divu_i32, ret, arg1, arg2);
310     } else if (TCG_TARGET_HAS_div2_i32) {
311         TCGv_i32 t0 = tcg_temp_new_i32();
312         tcg_gen_movi_i32(t0, 0);
313         tcg_gen_op5_i32(INDEX_op_divu2_i32, ret, t0, arg1, t0, arg2);
314         tcg_temp_free_i32(t0);
315     } else {
316         gen_helper_divu_i32(ret, arg1, arg2);
317     }
318 }
319 
320 void tcg_gen_remu_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
321 {
322     if (TCG_TARGET_HAS_rem_i32) {
323         tcg_gen_op3_i32(INDEX_op_remu_i32, ret, arg1, arg2);
324     } else if (TCG_TARGET_HAS_div_i32) {
325         TCGv_i32 t0 = tcg_temp_new_i32();
326         tcg_gen_op3_i32(INDEX_op_divu_i32, t0, arg1, arg2);
327         tcg_gen_mul_i32(t0, t0, arg2);
328         tcg_gen_sub_i32(ret, arg1, t0);
329         tcg_temp_free_i32(t0);
330     } else if (TCG_TARGET_HAS_div2_i32) {
331         TCGv_i32 t0 = tcg_temp_new_i32();
332         tcg_gen_movi_i32(t0, 0);
333         tcg_gen_op5_i32(INDEX_op_divu2_i32, t0, ret, arg1, t0, arg2);
334         tcg_temp_free_i32(t0);
335     } else {
336         gen_helper_remu_i32(ret, arg1, arg2);
337     }
338 }
339 
340 void tcg_gen_andc_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
341 {
342     if (TCG_TARGET_HAS_andc_i32) {
343         tcg_gen_op3_i32(INDEX_op_andc_i32, ret, arg1, arg2);
344     } else {
345         TCGv_i32 t0 = tcg_temp_new_i32();
346         tcg_gen_not_i32(t0, arg2);
347         tcg_gen_and_i32(ret, arg1, t0);
348         tcg_temp_free_i32(t0);
349     }
350 }
351 
352 void tcg_gen_eqv_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
353 {
354     if (TCG_TARGET_HAS_eqv_i32) {
355         tcg_gen_op3_i32(INDEX_op_eqv_i32, ret, arg1, arg2);
356     } else {
357         tcg_gen_xor_i32(ret, arg1, arg2);
358         tcg_gen_not_i32(ret, ret);
359     }
360 }
361 
362 void tcg_gen_nand_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
363 {
364     if (TCG_TARGET_HAS_nand_i32) {
365         tcg_gen_op3_i32(INDEX_op_nand_i32, ret, arg1, arg2);
366     } else {
367         tcg_gen_and_i32(ret, arg1, arg2);
368         tcg_gen_not_i32(ret, ret);
369     }
370 }
371 
372 void tcg_gen_nor_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
373 {
374     if (TCG_TARGET_HAS_nor_i32) {
375         tcg_gen_op3_i32(INDEX_op_nor_i32, ret, arg1, arg2);
376     } else {
377         tcg_gen_or_i32(ret, arg1, arg2);
378         tcg_gen_not_i32(ret, ret);
379     }
380 }
381 
382 void tcg_gen_orc_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
383 {
384     if (TCG_TARGET_HAS_orc_i32) {
385         tcg_gen_op3_i32(INDEX_op_orc_i32, ret, arg1, arg2);
386     } else {
387         TCGv_i32 t0 = tcg_temp_new_i32();
388         tcg_gen_not_i32(t0, arg2);
389         tcg_gen_or_i32(ret, arg1, t0);
390         tcg_temp_free_i32(t0);
391     }
392 }
393 
394 void tcg_gen_clz_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
395 {
396     if (TCG_TARGET_HAS_clz_i32) {
397         tcg_gen_op3_i32(INDEX_op_clz_i32, ret, arg1, arg2);
398     } else if (TCG_TARGET_HAS_clz_i64) {
399         TCGv_i64 t1 = tcg_temp_new_i64();
400         TCGv_i64 t2 = tcg_temp_new_i64();
401         tcg_gen_extu_i32_i64(t1, arg1);
402         tcg_gen_extu_i32_i64(t2, arg2);
403         tcg_gen_addi_i64(t2, t2, 32);
404         tcg_gen_clz_i64(t1, t1, t2);
405         tcg_gen_extrl_i64_i32(ret, t1);
406         tcg_temp_free_i64(t1);
407         tcg_temp_free_i64(t2);
408         tcg_gen_subi_i32(ret, ret, 32);
409     } else {
410         gen_helper_clz_i32(ret, arg1, arg2);
411     }
412 }
413 
414 void tcg_gen_clzi_i32(TCGv_i32 ret, TCGv_i32 arg1, uint32_t arg2)
415 {
416     tcg_gen_clz_i32(ret, arg1, tcg_constant_i32(arg2));
417 }
418 
419 void tcg_gen_ctz_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
420 {
421     if (TCG_TARGET_HAS_ctz_i32) {
422         tcg_gen_op3_i32(INDEX_op_ctz_i32, ret, arg1, arg2);
423     } else if (TCG_TARGET_HAS_ctz_i64) {
424         TCGv_i64 t1 = tcg_temp_new_i64();
425         TCGv_i64 t2 = tcg_temp_new_i64();
426         tcg_gen_extu_i32_i64(t1, arg1);
427         tcg_gen_extu_i32_i64(t2, arg2);
428         tcg_gen_ctz_i64(t1, t1, t2);
429         tcg_gen_extrl_i64_i32(ret, t1);
430         tcg_temp_free_i64(t1);
431         tcg_temp_free_i64(t2);
432     } else if (TCG_TARGET_HAS_ctpop_i32
433                || TCG_TARGET_HAS_ctpop_i64
434                || TCG_TARGET_HAS_clz_i32
435                || TCG_TARGET_HAS_clz_i64) {
436         TCGv_i32 z, t = tcg_temp_new_i32();
437 
438         if (TCG_TARGET_HAS_ctpop_i32 || TCG_TARGET_HAS_ctpop_i64) {
439             tcg_gen_subi_i32(t, arg1, 1);
440             tcg_gen_andc_i32(t, t, arg1);
441             tcg_gen_ctpop_i32(t, t);
442         } else {
443             /* Since all non-x86 hosts have clz(0) == 32, don't fight it.  */
444             tcg_gen_neg_i32(t, arg1);
445             tcg_gen_and_i32(t, t, arg1);
446             tcg_gen_clzi_i32(t, t, 32);
447             tcg_gen_xori_i32(t, t, 31);
448         }
449         z = tcg_constant_i32(0);
450         tcg_gen_movcond_i32(TCG_COND_EQ, ret, arg1, z, arg2, t);
451         tcg_temp_free_i32(t);
452     } else {
453         gen_helper_ctz_i32(ret, arg1, arg2);
454     }
455 }
456 
457 void tcg_gen_ctzi_i32(TCGv_i32 ret, TCGv_i32 arg1, uint32_t arg2)
458 {
459     if (!TCG_TARGET_HAS_ctz_i32 && TCG_TARGET_HAS_ctpop_i32 && arg2 == 32) {
460         /* This equivalence has the advantage of not requiring a fixup.  */
461         TCGv_i32 t = tcg_temp_new_i32();
462         tcg_gen_subi_i32(t, arg1, 1);
463         tcg_gen_andc_i32(t, t, arg1);
464         tcg_gen_ctpop_i32(ret, t);
465         tcg_temp_free_i32(t);
466     } else {
467         tcg_gen_ctz_i32(ret, arg1, tcg_constant_i32(arg2));
468     }
469 }
470 
471 void tcg_gen_clrsb_i32(TCGv_i32 ret, TCGv_i32 arg)
472 {
473     if (TCG_TARGET_HAS_clz_i32) {
474         TCGv_i32 t = tcg_temp_new_i32();
475         tcg_gen_sari_i32(t, arg, 31);
476         tcg_gen_xor_i32(t, t, arg);
477         tcg_gen_clzi_i32(t, t, 32);
478         tcg_gen_subi_i32(ret, t, 1);
479         tcg_temp_free_i32(t);
480     } else {
481         gen_helper_clrsb_i32(ret, arg);
482     }
483 }
484 
485 void tcg_gen_ctpop_i32(TCGv_i32 ret, TCGv_i32 arg1)
486 {
487     if (TCG_TARGET_HAS_ctpop_i32) {
488         tcg_gen_op2_i32(INDEX_op_ctpop_i32, ret, arg1);
489     } else if (TCG_TARGET_HAS_ctpop_i64) {
490         TCGv_i64 t = tcg_temp_new_i64();
491         tcg_gen_extu_i32_i64(t, arg1);
492         tcg_gen_ctpop_i64(t, t);
493         tcg_gen_extrl_i64_i32(ret, t);
494         tcg_temp_free_i64(t);
495     } else {
496         gen_helper_ctpop_i32(ret, arg1);
497     }
498 }
499 
500 void tcg_gen_rotl_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
501 {
502     if (TCG_TARGET_HAS_rot_i32) {
503         tcg_gen_op3_i32(INDEX_op_rotl_i32, ret, arg1, arg2);
504     } else {
505         TCGv_i32 t0, t1;
506 
507         t0 = tcg_temp_new_i32();
508         t1 = tcg_temp_new_i32();
509         tcg_gen_shl_i32(t0, arg1, arg2);
510         tcg_gen_subfi_i32(t1, 32, arg2);
511         tcg_gen_shr_i32(t1, arg1, t1);
512         tcg_gen_or_i32(ret, t0, t1);
513         tcg_temp_free_i32(t0);
514         tcg_temp_free_i32(t1);
515     }
516 }
517 
518 void tcg_gen_rotli_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
519 {
520     tcg_debug_assert(arg2 >= 0 && arg2 < 32);
521     /* some cases can be optimized here */
522     if (arg2 == 0) {
523         tcg_gen_mov_i32(ret, arg1);
524     } else if (TCG_TARGET_HAS_rot_i32) {
525         tcg_gen_rotl_i32(ret, arg1, tcg_constant_i32(arg2));
526     } else {
527         TCGv_i32 t0, t1;
528         t0 = tcg_temp_new_i32();
529         t1 = tcg_temp_new_i32();
530         tcg_gen_shli_i32(t0, arg1, arg2);
531         tcg_gen_shri_i32(t1, arg1, 32 - arg2);
532         tcg_gen_or_i32(ret, t0, t1);
533         tcg_temp_free_i32(t0);
534         tcg_temp_free_i32(t1);
535     }
536 }
537 
538 void tcg_gen_rotr_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
539 {
540     if (TCG_TARGET_HAS_rot_i32) {
541         tcg_gen_op3_i32(INDEX_op_rotr_i32, ret, arg1, arg2);
542     } else {
543         TCGv_i32 t0, t1;
544 
545         t0 = tcg_temp_new_i32();
546         t1 = tcg_temp_new_i32();
547         tcg_gen_shr_i32(t0, arg1, arg2);
548         tcg_gen_subfi_i32(t1, 32, arg2);
549         tcg_gen_shl_i32(t1, arg1, t1);
550         tcg_gen_or_i32(ret, t0, t1);
551         tcg_temp_free_i32(t0);
552         tcg_temp_free_i32(t1);
553     }
554 }
555 
556 void tcg_gen_rotri_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
557 {
558     tcg_debug_assert(arg2 >= 0 && arg2 < 32);
559     /* some cases can be optimized here */
560     if (arg2 == 0) {
561         tcg_gen_mov_i32(ret, arg1);
562     } else {
563         tcg_gen_rotli_i32(ret, arg1, 32 - arg2);
564     }
565 }
566 
567 void tcg_gen_deposit_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2,
568                          unsigned int ofs, unsigned int len)
569 {
570     uint32_t mask;
571     TCGv_i32 t1;
572 
573     tcg_debug_assert(ofs < 32);
574     tcg_debug_assert(len > 0);
575     tcg_debug_assert(len <= 32);
576     tcg_debug_assert(ofs + len <= 32);
577 
578     if (len == 32) {
579         tcg_gen_mov_i32(ret, arg2);
580         return;
581     }
582     if (TCG_TARGET_HAS_deposit_i32 && TCG_TARGET_deposit_i32_valid(ofs, len)) {
583         tcg_gen_op5ii_i32(INDEX_op_deposit_i32, ret, arg1, arg2, ofs, len);
584         return;
585     }
586 
587     t1 = tcg_temp_new_i32();
588 
589     if (TCG_TARGET_HAS_extract2_i32) {
590         if (ofs + len == 32) {
591             tcg_gen_shli_i32(t1, arg1, len);
592             tcg_gen_extract2_i32(ret, t1, arg2, len);
593             goto done;
594         }
595         if (ofs == 0) {
596             tcg_gen_extract2_i32(ret, arg1, arg2, len);
597             tcg_gen_rotli_i32(ret, ret, len);
598             goto done;
599         }
600     }
601 
602     mask = (1u << len) - 1;
603     if (ofs + len < 32) {
604         tcg_gen_andi_i32(t1, arg2, mask);
605         tcg_gen_shli_i32(t1, t1, ofs);
606     } else {
607         tcg_gen_shli_i32(t1, arg2, ofs);
608     }
609     tcg_gen_andi_i32(ret, arg1, ~(mask << ofs));
610     tcg_gen_or_i32(ret, ret, t1);
611  done:
612     tcg_temp_free_i32(t1);
613 }
614 
615 void tcg_gen_deposit_z_i32(TCGv_i32 ret, TCGv_i32 arg,
616                            unsigned int ofs, unsigned int len)
617 {
618     tcg_debug_assert(ofs < 32);
619     tcg_debug_assert(len > 0);
620     tcg_debug_assert(len <= 32);
621     tcg_debug_assert(ofs + len <= 32);
622 
623     if (ofs + len == 32) {
624         tcg_gen_shli_i32(ret, arg, ofs);
625     } else if (ofs == 0) {
626         tcg_gen_andi_i32(ret, arg, (1u << len) - 1);
627     } else if (TCG_TARGET_HAS_deposit_i32
628                && TCG_TARGET_deposit_i32_valid(ofs, len)) {
629         TCGv_i32 zero = tcg_constant_i32(0);
630         tcg_gen_op5ii_i32(INDEX_op_deposit_i32, ret, zero, arg, ofs, len);
631     } else {
632         /* To help two-operand hosts we prefer to zero-extend first,
633            which allows ARG to stay live.  */
634         switch (len) {
635         case 16:
636             if (TCG_TARGET_HAS_ext16u_i32) {
637                 tcg_gen_ext16u_i32(ret, arg);
638                 tcg_gen_shli_i32(ret, ret, ofs);
639                 return;
640             }
641             break;
642         case 8:
643             if (TCG_TARGET_HAS_ext8u_i32) {
644                 tcg_gen_ext8u_i32(ret, arg);
645                 tcg_gen_shli_i32(ret, ret, ofs);
646                 return;
647             }
648             break;
649         }
650         /* Otherwise prefer zero-extension over AND for code size.  */
651         switch (ofs + len) {
652         case 16:
653             if (TCG_TARGET_HAS_ext16u_i32) {
654                 tcg_gen_shli_i32(ret, arg, ofs);
655                 tcg_gen_ext16u_i32(ret, ret);
656                 return;
657             }
658             break;
659         case 8:
660             if (TCG_TARGET_HAS_ext8u_i32) {
661                 tcg_gen_shli_i32(ret, arg, ofs);
662                 tcg_gen_ext8u_i32(ret, ret);
663                 return;
664             }
665             break;
666         }
667         tcg_gen_andi_i32(ret, arg, (1u << len) - 1);
668         tcg_gen_shli_i32(ret, ret, ofs);
669     }
670 }
671 
672 void tcg_gen_extract_i32(TCGv_i32 ret, TCGv_i32 arg,
673                          unsigned int ofs, unsigned int len)
674 {
675     tcg_debug_assert(ofs < 32);
676     tcg_debug_assert(len > 0);
677     tcg_debug_assert(len <= 32);
678     tcg_debug_assert(ofs + len <= 32);
679 
680     /* Canonicalize certain special cases, even if extract is supported.  */
681     if (ofs + len == 32) {
682         tcg_gen_shri_i32(ret, arg, 32 - len);
683         return;
684     }
685     if (ofs == 0) {
686         tcg_gen_andi_i32(ret, arg, (1u << len) - 1);
687         return;
688     }
689 
690     if (TCG_TARGET_HAS_extract_i32
691         && TCG_TARGET_extract_i32_valid(ofs, len)) {
692         tcg_gen_op4ii_i32(INDEX_op_extract_i32, ret, arg, ofs, len);
693         return;
694     }
695 
696     /* Assume that zero-extension, if available, is cheaper than a shift.  */
697     switch (ofs + len) {
698     case 16:
699         if (TCG_TARGET_HAS_ext16u_i32) {
700             tcg_gen_ext16u_i32(ret, arg);
701             tcg_gen_shri_i32(ret, ret, ofs);
702             return;
703         }
704         break;
705     case 8:
706         if (TCG_TARGET_HAS_ext8u_i32) {
707             tcg_gen_ext8u_i32(ret, arg);
708             tcg_gen_shri_i32(ret, ret, ofs);
709             return;
710         }
711         break;
712     }
713 
714     /* ??? Ideally we'd know what values are available for immediate AND.
715        Assume that 8 bits are available, plus the special case of 16,
716        so that we get ext8u, ext16u.  */
717     switch (len) {
718     case 1 ... 8: case 16:
719         tcg_gen_shri_i32(ret, arg, ofs);
720         tcg_gen_andi_i32(ret, ret, (1u << len) - 1);
721         break;
722     default:
723         tcg_gen_shli_i32(ret, arg, 32 - len - ofs);
724         tcg_gen_shri_i32(ret, ret, 32 - len);
725         break;
726     }
727 }
728 
729 void tcg_gen_sextract_i32(TCGv_i32 ret, TCGv_i32 arg,
730                           unsigned int ofs, unsigned int len)
731 {
732     tcg_debug_assert(ofs < 32);
733     tcg_debug_assert(len > 0);
734     tcg_debug_assert(len <= 32);
735     tcg_debug_assert(ofs + len <= 32);
736 
737     /* Canonicalize certain special cases, even if extract is supported.  */
738     if (ofs + len == 32) {
739         tcg_gen_sari_i32(ret, arg, 32 - len);
740         return;
741     }
742     if (ofs == 0) {
743         switch (len) {
744         case 16:
745             tcg_gen_ext16s_i32(ret, arg);
746             return;
747         case 8:
748             tcg_gen_ext8s_i32(ret, arg);
749             return;
750         }
751     }
752 
753     if (TCG_TARGET_HAS_sextract_i32
754         && TCG_TARGET_extract_i32_valid(ofs, len)) {
755         tcg_gen_op4ii_i32(INDEX_op_sextract_i32, ret, arg, ofs, len);
756         return;
757     }
758 
759     /* Assume that sign-extension, if available, is cheaper than a shift.  */
760     switch (ofs + len) {
761     case 16:
762         if (TCG_TARGET_HAS_ext16s_i32) {
763             tcg_gen_ext16s_i32(ret, arg);
764             tcg_gen_sari_i32(ret, ret, ofs);
765             return;
766         }
767         break;
768     case 8:
769         if (TCG_TARGET_HAS_ext8s_i32) {
770             tcg_gen_ext8s_i32(ret, arg);
771             tcg_gen_sari_i32(ret, ret, ofs);
772             return;
773         }
774         break;
775     }
776     switch (len) {
777     case 16:
778         if (TCG_TARGET_HAS_ext16s_i32) {
779             tcg_gen_shri_i32(ret, arg, ofs);
780             tcg_gen_ext16s_i32(ret, ret);
781             return;
782         }
783         break;
784     case 8:
785         if (TCG_TARGET_HAS_ext8s_i32) {
786             tcg_gen_shri_i32(ret, arg, ofs);
787             tcg_gen_ext8s_i32(ret, ret);
788             return;
789         }
790         break;
791     }
792 
793     tcg_gen_shli_i32(ret, arg, 32 - len - ofs);
794     tcg_gen_sari_i32(ret, ret, 32 - len);
795 }
796 
797 /*
798  * Extract 32-bits from a 64-bit input, ah:al, starting from ofs.
799  * Unlike tcg_gen_extract_i32 above, len is fixed at 32.
800  */
801 void tcg_gen_extract2_i32(TCGv_i32 ret, TCGv_i32 al, TCGv_i32 ah,
802                           unsigned int ofs)
803 {
804     tcg_debug_assert(ofs <= 32);
805     if (ofs == 0) {
806         tcg_gen_mov_i32(ret, al);
807     } else if (ofs == 32) {
808         tcg_gen_mov_i32(ret, ah);
809     } else if (al == ah) {
810         tcg_gen_rotri_i32(ret, al, ofs);
811     } else if (TCG_TARGET_HAS_extract2_i32) {
812         tcg_gen_op4i_i32(INDEX_op_extract2_i32, ret, al, ah, ofs);
813     } else {
814         TCGv_i32 t0 = tcg_temp_new_i32();
815         tcg_gen_shri_i32(t0, al, ofs);
816         tcg_gen_deposit_i32(ret, t0, ah, 32 - ofs, ofs);
817         tcg_temp_free_i32(t0);
818     }
819 }
820 
821 void tcg_gen_movcond_i32(TCGCond cond, TCGv_i32 ret, TCGv_i32 c1,
822                          TCGv_i32 c2, TCGv_i32 v1, TCGv_i32 v2)
823 {
824     if (cond == TCG_COND_ALWAYS) {
825         tcg_gen_mov_i32(ret, v1);
826     } else if (cond == TCG_COND_NEVER) {
827         tcg_gen_mov_i32(ret, v2);
828     } else if (TCG_TARGET_HAS_movcond_i32) {
829         tcg_gen_op6i_i32(INDEX_op_movcond_i32, ret, c1, c2, v1, v2, cond);
830     } else {
831         TCGv_i32 t0 = tcg_temp_new_i32();
832         TCGv_i32 t1 = tcg_temp_new_i32();
833         tcg_gen_setcond_i32(cond, t0, c1, c2);
834         tcg_gen_neg_i32(t0, t0);
835         tcg_gen_and_i32(t1, v1, t0);
836         tcg_gen_andc_i32(ret, v2, t0);
837         tcg_gen_or_i32(ret, ret, t1);
838         tcg_temp_free_i32(t0);
839         tcg_temp_free_i32(t1);
840     }
841 }
842 
843 void tcg_gen_add2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 al,
844                       TCGv_i32 ah, TCGv_i32 bl, TCGv_i32 bh)
845 {
846     if (TCG_TARGET_HAS_add2_i32) {
847         tcg_gen_op6_i32(INDEX_op_add2_i32, rl, rh, al, ah, bl, bh);
848     } else {
849         TCGv_i64 t0 = tcg_temp_new_i64();
850         TCGv_i64 t1 = tcg_temp_new_i64();
851         tcg_gen_concat_i32_i64(t0, al, ah);
852         tcg_gen_concat_i32_i64(t1, bl, bh);
853         tcg_gen_add_i64(t0, t0, t1);
854         tcg_gen_extr_i64_i32(rl, rh, t0);
855         tcg_temp_free_i64(t0);
856         tcg_temp_free_i64(t1);
857     }
858 }
859 
860 void tcg_gen_sub2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 al,
861                       TCGv_i32 ah, TCGv_i32 bl, TCGv_i32 bh)
862 {
863     if (TCG_TARGET_HAS_sub2_i32) {
864         tcg_gen_op6_i32(INDEX_op_sub2_i32, rl, rh, al, ah, bl, bh);
865     } else {
866         TCGv_i64 t0 = tcg_temp_new_i64();
867         TCGv_i64 t1 = tcg_temp_new_i64();
868         tcg_gen_concat_i32_i64(t0, al, ah);
869         tcg_gen_concat_i32_i64(t1, bl, bh);
870         tcg_gen_sub_i64(t0, t0, t1);
871         tcg_gen_extr_i64_i32(rl, rh, t0);
872         tcg_temp_free_i64(t0);
873         tcg_temp_free_i64(t1);
874     }
875 }
876 
877 void tcg_gen_mulu2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 arg1, TCGv_i32 arg2)
878 {
879     if (TCG_TARGET_HAS_mulu2_i32) {
880         tcg_gen_op4_i32(INDEX_op_mulu2_i32, rl, rh, arg1, arg2);
881     } else if (TCG_TARGET_HAS_muluh_i32) {
882         TCGv_i32 t = tcg_temp_new_i32();
883         tcg_gen_op3_i32(INDEX_op_mul_i32, t, arg1, arg2);
884         tcg_gen_op3_i32(INDEX_op_muluh_i32, rh, arg1, arg2);
885         tcg_gen_mov_i32(rl, t);
886         tcg_temp_free_i32(t);
887     } else {
888         TCGv_i64 t0 = tcg_temp_new_i64();
889         TCGv_i64 t1 = tcg_temp_new_i64();
890         tcg_gen_extu_i32_i64(t0, arg1);
891         tcg_gen_extu_i32_i64(t1, arg2);
892         tcg_gen_mul_i64(t0, t0, t1);
893         tcg_gen_extr_i64_i32(rl, rh, t0);
894         tcg_temp_free_i64(t0);
895         tcg_temp_free_i64(t1);
896     }
897 }
898 
899 void tcg_gen_muls2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 arg1, TCGv_i32 arg2)
900 {
901     if (TCG_TARGET_HAS_muls2_i32) {
902         tcg_gen_op4_i32(INDEX_op_muls2_i32, rl, rh, arg1, arg2);
903     } else if (TCG_TARGET_HAS_mulsh_i32) {
904         TCGv_i32 t = tcg_temp_new_i32();
905         tcg_gen_op3_i32(INDEX_op_mul_i32, t, arg1, arg2);
906         tcg_gen_op3_i32(INDEX_op_mulsh_i32, rh, arg1, arg2);
907         tcg_gen_mov_i32(rl, t);
908         tcg_temp_free_i32(t);
909     } else if (TCG_TARGET_REG_BITS == 32) {
910         TCGv_i32 t0 = tcg_temp_new_i32();
911         TCGv_i32 t1 = tcg_temp_new_i32();
912         TCGv_i32 t2 = tcg_temp_new_i32();
913         TCGv_i32 t3 = tcg_temp_new_i32();
914         tcg_gen_mulu2_i32(t0, t1, arg1, arg2);
915         /* Adjust for negative inputs.  */
916         tcg_gen_sari_i32(t2, arg1, 31);
917         tcg_gen_sari_i32(t3, arg2, 31);
918         tcg_gen_and_i32(t2, t2, arg2);
919         tcg_gen_and_i32(t3, t3, arg1);
920         tcg_gen_sub_i32(rh, t1, t2);
921         tcg_gen_sub_i32(rh, rh, t3);
922         tcg_gen_mov_i32(rl, t0);
923         tcg_temp_free_i32(t0);
924         tcg_temp_free_i32(t1);
925         tcg_temp_free_i32(t2);
926         tcg_temp_free_i32(t3);
927     } else {
928         TCGv_i64 t0 = tcg_temp_new_i64();
929         TCGv_i64 t1 = tcg_temp_new_i64();
930         tcg_gen_ext_i32_i64(t0, arg1);
931         tcg_gen_ext_i32_i64(t1, arg2);
932         tcg_gen_mul_i64(t0, t0, t1);
933         tcg_gen_extr_i64_i32(rl, rh, t0);
934         tcg_temp_free_i64(t0);
935         tcg_temp_free_i64(t1);
936     }
937 }
938 
939 void tcg_gen_mulsu2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 arg1, TCGv_i32 arg2)
940 {
941     if (TCG_TARGET_REG_BITS == 32) {
942         TCGv_i32 t0 = tcg_temp_new_i32();
943         TCGv_i32 t1 = tcg_temp_new_i32();
944         TCGv_i32 t2 = tcg_temp_new_i32();
945         tcg_gen_mulu2_i32(t0, t1, arg1, arg2);
946         /* Adjust for negative input for the signed arg1.  */
947         tcg_gen_sari_i32(t2, arg1, 31);
948         tcg_gen_and_i32(t2, t2, arg2);
949         tcg_gen_sub_i32(rh, t1, t2);
950         tcg_gen_mov_i32(rl, t0);
951         tcg_temp_free_i32(t0);
952         tcg_temp_free_i32(t1);
953         tcg_temp_free_i32(t2);
954     } else {
955         TCGv_i64 t0 = tcg_temp_new_i64();
956         TCGv_i64 t1 = tcg_temp_new_i64();
957         tcg_gen_ext_i32_i64(t0, arg1);
958         tcg_gen_extu_i32_i64(t1, arg2);
959         tcg_gen_mul_i64(t0, t0, t1);
960         tcg_gen_extr_i64_i32(rl, rh, t0);
961         tcg_temp_free_i64(t0);
962         tcg_temp_free_i64(t1);
963     }
964 }
965 
966 void tcg_gen_ext8s_i32(TCGv_i32 ret, TCGv_i32 arg)
967 {
968     if (TCG_TARGET_HAS_ext8s_i32) {
969         tcg_gen_op2_i32(INDEX_op_ext8s_i32, ret, arg);
970     } else {
971         tcg_gen_shli_i32(ret, arg, 24);
972         tcg_gen_sari_i32(ret, ret, 24);
973     }
974 }
975 
976 void tcg_gen_ext16s_i32(TCGv_i32 ret, TCGv_i32 arg)
977 {
978     if (TCG_TARGET_HAS_ext16s_i32) {
979         tcg_gen_op2_i32(INDEX_op_ext16s_i32, ret, arg);
980     } else {
981         tcg_gen_shli_i32(ret, arg, 16);
982         tcg_gen_sari_i32(ret, ret, 16);
983     }
984 }
985 
986 void tcg_gen_ext8u_i32(TCGv_i32 ret, TCGv_i32 arg)
987 {
988     if (TCG_TARGET_HAS_ext8u_i32) {
989         tcg_gen_op2_i32(INDEX_op_ext8u_i32, ret, arg);
990     } else {
991         tcg_gen_andi_i32(ret, arg, 0xffu);
992     }
993 }
994 
995 void tcg_gen_ext16u_i32(TCGv_i32 ret, TCGv_i32 arg)
996 {
997     if (TCG_TARGET_HAS_ext16u_i32) {
998         tcg_gen_op2_i32(INDEX_op_ext16u_i32, ret, arg);
999     } else {
1000         tcg_gen_andi_i32(ret, arg, 0xffffu);
1001     }
1002 }
1003 
1004 void tcg_gen_bswap16_i32(TCGv_i32 ret, TCGv_i32 arg, int flags)
1005 {
1006     /* Only one extension flag may be present. */
1007     tcg_debug_assert(!(flags & TCG_BSWAP_OS) || !(flags & TCG_BSWAP_OZ));
1008 
1009     if (TCG_TARGET_HAS_bswap16_i32) {
1010         tcg_gen_op3i_i32(INDEX_op_bswap16_i32, ret, arg, flags);
1011     } else {
1012         TCGv_i32 t0 = tcg_temp_new_i32();
1013         TCGv_i32 t1 = tcg_temp_new_i32();
1014 
1015         tcg_gen_shri_i32(t0, arg, 8);
1016         if (!(flags & TCG_BSWAP_IZ)) {
1017             tcg_gen_ext8u_i32(t0, t0);
1018         }
1019 
1020         if (flags & TCG_BSWAP_OS) {
1021             tcg_gen_shli_i32(t1, arg, 24);
1022             tcg_gen_sari_i32(t1, t1, 16);
1023         } else if (flags & TCG_BSWAP_OZ) {
1024             tcg_gen_ext8u_i32(t1, arg);
1025             tcg_gen_shli_i32(t1, t1, 8);
1026         } else {
1027             tcg_gen_shli_i32(t1, arg, 8);
1028         }
1029 
1030         tcg_gen_or_i32(ret, t0, t1);
1031         tcg_temp_free_i32(t0);
1032         tcg_temp_free_i32(t1);
1033     }
1034 }
1035 
1036 void tcg_gen_bswap32_i32(TCGv_i32 ret, TCGv_i32 arg)
1037 {
1038     if (TCG_TARGET_HAS_bswap32_i32) {
1039         tcg_gen_op3i_i32(INDEX_op_bswap32_i32, ret, arg, 0);
1040     } else {
1041         TCGv_i32 t0 = tcg_temp_new_i32();
1042         TCGv_i32 t1 = tcg_temp_new_i32();
1043         TCGv_i32 t2 = tcg_constant_i32(0x00ff00ff);
1044 
1045                                         /* arg = abcd */
1046         tcg_gen_shri_i32(t0, arg, 8);   /*  t0 = .abc */
1047         tcg_gen_and_i32(t1, arg, t2);   /*  t1 = .b.d */
1048         tcg_gen_and_i32(t0, t0, t2);    /*  t0 = .a.c */
1049         tcg_gen_shli_i32(t1, t1, 8);    /*  t1 = b.d. */
1050         tcg_gen_or_i32(ret, t0, t1);    /* ret = badc */
1051 
1052         tcg_gen_shri_i32(t0, ret, 16);  /*  t0 = ..ba */
1053         tcg_gen_shli_i32(t1, ret, 16);  /*  t1 = dc.. */
1054         tcg_gen_or_i32(ret, t0, t1);    /* ret = dcba */
1055 
1056         tcg_temp_free_i32(t0);
1057         tcg_temp_free_i32(t1);
1058     }
1059 }
1060 
1061 void tcg_gen_smin_i32(TCGv_i32 ret, TCGv_i32 a, TCGv_i32 b)
1062 {
1063     tcg_gen_movcond_i32(TCG_COND_LT, ret, a, b, a, b);
1064 }
1065 
1066 void tcg_gen_umin_i32(TCGv_i32 ret, TCGv_i32 a, TCGv_i32 b)
1067 {
1068     tcg_gen_movcond_i32(TCG_COND_LTU, ret, a, b, a, b);
1069 }
1070 
1071 void tcg_gen_smax_i32(TCGv_i32 ret, TCGv_i32 a, TCGv_i32 b)
1072 {
1073     tcg_gen_movcond_i32(TCG_COND_LT, ret, a, b, b, a);
1074 }
1075 
1076 void tcg_gen_umax_i32(TCGv_i32 ret, TCGv_i32 a, TCGv_i32 b)
1077 {
1078     tcg_gen_movcond_i32(TCG_COND_LTU, ret, a, b, b, a);
1079 }
1080 
1081 void tcg_gen_abs_i32(TCGv_i32 ret, TCGv_i32 a)
1082 {
1083     TCGv_i32 t = tcg_temp_new_i32();
1084 
1085     tcg_gen_sari_i32(t, a, 31);
1086     tcg_gen_xor_i32(ret, a, t);
1087     tcg_gen_sub_i32(ret, ret, t);
1088     tcg_temp_free_i32(t);
1089 }
1090 
1091 /* 64-bit ops */
1092 
1093 #if TCG_TARGET_REG_BITS == 32
1094 /* These are all inline for TCG_TARGET_REG_BITS == 64.  */
1095 
1096 void tcg_gen_discard_i64(TCGv_i64 arg)
1097 {
1098     tcg_gen_discard_i32(TCGV_LOW(arg));
1099     tcg_gen_discard_i32(TCGV_HIGH(arg));
1100 }
1101 
1102 void tcg_gen_mov_i64(TCGv_i64 ret, TCGv_i64 arg)
1103 {
1104     TCGTemp *ts = tcgv_i64_temp(arg);
1105 
1106     /* Canonicalize TCGv_i64 TEMP_CONST into TCGv_i32 TEMP_CONST. */
1107     if (ts->kind == TEMP_CONST) {
1108         tcg_gen_movi_i64(ret, ts->val);
1109     } else {
1110         tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1111         tcg_gen_mov_i32(TCGV_HIGH(ret), TCGV_HIGH(arg));
1112     }
1113 }
1114 
1115 void tcg_gen_movi_i64(TCGv_i64 ret, int64_t arg)
1116 {
1117     tcg_gen_movi_i32(TCGV_LOW(ret), arg);
1118     tcg_gen_movi_i32(TCGV_HIGH(ret), arg >> 32);
1119 }
1120 
1121 void tcg_gen_ld8u_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
1122 {
1123     tcg_gen_ld8u_i32(TCGV_LOW(ret), arg2, offset);
1124     tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1125 }
1126 
1127 void tcg_gen_ld8s_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
1128 {
1129     tcg_gen_ld8s_i32(TCGV_LOW(ret), arg2, offset);
1130     tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1131 }
1132 
1133 void tcg_gen_ld16u_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
1134 {
1135     tcg_gen_ld16u_i32(TCGV_LOW(ret), arg2, offset);
1136     tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1137 }
1138 
1139 void tcg_gen_ld16s_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
1140 {
1141     tcg_gen_ld16s_i32(TCGV_LOW(ret), arg2, offset);
1142     tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1143 }
1144 
1145 void tcg_gen_ld32u_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
1146 {
1147     tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset);
1148     tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1149 }
1150 
1151 void tcg_gen_ld32s_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
1152 {
1153     tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset);
1154     tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1155 }
1156 
1157 void tcg_gen_ld_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
1158 {
1159     /* Since arg2 and ret have different types,
1160        they cannot be the same temporary */
1161 #ifdef HOST_WORDS_BIGENDIAN
1162     tcg_gen_ld_i32(TCGV_HIGH(ret), arg2, offset);
1163     tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset + 4);
1164 #else
1165     tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset);
1166     tcg_gen_ld_i32(TCGV_HIGH(ret), arg2, offset + 4);
1167 #endif
1168 }
1169 
1170 void tcg_gen_st_i64(TCGv_i64 arg1, TCGv_ptr arg2, tcg_target_long offset)
1171 {
1172 #ifdef HOST_WORDS_BIGENDIAN
1173     tcg_gen_st_i32(TCGV_HIGH(arg1), arg2, offset);
1174     tcg_gen_st_i32(TCGV_LOW(arg1), arg2, offset + 4);
1175 #else
1176     tcg_gen_st_i32(TCGV_LOW(arg1), arg2, offset);
1177     tcg_gen_st_i32(TCGV_HIGH(arg1), arg2, offset + 4);
1178 #endif
1179 }
1180 
1181 void tcg_gen_and_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1182 {
1183     tcg_gen_and_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1184     tcg_gen_and_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1185 }
1186 
1187 void tcg_gen_or_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1188 {
1189     tcg_gen_or_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1190     tcg_gen_or_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1191 }
1192 
1193 void tcg_gen_xor_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1194 {
1195     tcg_gen_xor_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1196     tcg_gen_xor_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1197 }
1198 
1199 void tcg_gen_shl_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1200 {
1201     gen_helper_shl_i64(ret, arg1, arg2);
1202 }
1203 
1204 void tcg_gen_shr_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1205 {
1206     gen_helper_shr_i64(ret, arg1, arg2);
1207 }
1208 
1209 void tcg_gen_sar_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1210 {
1211     gen_helper_sar_i64(ret, arg1, arg2);
1212 }
1213 
1214 void tcg_gen_mul_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1215 {
1216     TCGv_i64 t0;
1217     TCGv_i32 t1;
1218 
1219     t0 = tcg_temp_new_i64();
1220     t1 = tcg_temp_new_i32();
1221 
1222     tcg_gen_mulu2_i32(TCGV_LOW(t0), TCGV_HIGH(t0),
1223                       TCGV_LOW(arg1), TCGV_LOW(arg2));
1224 
1225     tcg_gen_mul_i32(t1, TCGV_LOW(arg1), TCGV_HIGH(arg2));
1226     tcg_gen_add_i32(TCGV_HIGH(t0), TCGV_HIGH(t0), t1);
1227     tcg_gen_mul_i32(t1, TCGV_HIGH(arg1), TCGV_LOW(arg2));
1228     tcg_gen_add_i32(TCGV_HIGH(t0), TCGV_HIGH(t0), t1);
1229 
1230     tcg_gen_mov_i64(ret, t0);
1231     tcg_temp_free_i64(t0);
1232     tcg_temp_free_i32(t1);
1233 }
1234 
1235 #else
1236 
1237 void tcg_gen_movi_i64(TCGv_i64 ret, int64_t arg)
1238 {
1239     tcg_gen_mov_i64(ret, tcg_constant_i64(arg));
1240 }
1241 
1242 #endif /* TCG_TARGET_REG_SIZE == 32 */
1243 
1244 void tcg_gen_addi_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1245 {
1246     /* some cases can be optimized here */
1247     if (arg2 == 0) {
1248         tcg_gen_mov_i64(ret, arg1);
1249     } else if (TCG_TARGET_REG_BITS == 64) {
1250         tcg_gen_add_i64(ret, arg1, tcg_constant_i64(arg2));
1251     } else {
1252         tcg_gen_add2_i32(TCGV_LOW(ret), TCGV_HIGH(ret),
1253                          TCGV_LOW(arg1), TCGV_HIGH(arg1),
1254                          tcg_constant_i32(arg2), tcg_constant_i32(arg2 >> 32));
1255     }
1256 }
1257 
1258 void tcg_gen_subfi_i64(TCGv_i64 ret, int64_t arg1, TCGv_i64 arg2)
1259 {
1260     if (arg1 == 0 && TCG_TARGET_HAS_neg_i64) {
1261         /* Don't recurse with tcg_gen_neg_i64.  */
1262         tcg_gen_op2_i64(INDEX_op_neg_i64, ret, arg2);
1263     } else if (TCG_TARGET_REG_BITS == 64) {
1264         tcg_gen_sub_i64(ret, tcg_constant_i64(arg1), arg2);
1265     } else {
1266         tcg_gen_sub2_i32(TCGV_LOW(ret), TCGV_HIGH(ret),
1267                          tcg_constant_i32(arg1), tcg_constant_i32(arg1 >> 32),
1268                          TCGV_LOW(arg2), TCGV_HIGH(arg2));
1269     }
1270 }
1271 
1272 void tcg_gen_subi_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1273 {
1274     /* some cases can be optimized here */
1275     if (arg2 == 0) {
1276         tcg_gen_mov_i64(ret, arg1);
1277     } else if (TCG_TARGET_REG_BITS == 64) {
1278         tcg_gen_sub_i64(ret, arg1, tcg_constant_i64(arg2));
1279     } else {
1280         tcg_gen_sub2_i32(TCGV_LOW(ret), TCGV_HIGH(ret),
1281                          TCGV_LOW(arg1), TCGV_HIGH(arg1),
1282                          tcg_constant_i32(arg2), tcg_constant_i32(arg2 >> 32));
1283     }
1284 }
1285 
1286 void tcg_gen_andi_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1287 {
1288     if (TCG_TARGET_REG_BITS == 32) {
1289         tcg_gen_andi_i32(TCGV_LOW(ret), TCGV_LOW(arg1), arg2);
1290         tcg_gen_andi_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), arg2 >> 32);
1291         return;
1292     }
1293 
1294     /* Some cases can be optimized here.  */
1295     switch (arg2) {
1296     case 0:
1297         tcg_gen_movi_i64(ret, 0);
1298         return;
1299     case -1:
1300         tcg_gen_mov_i64(ret, arg1);
1301         return;
1302     case 0xff:
1303         /* Don't recurse with tcg_gen_ext8u_i64.  */
1304         if (TCG_TARGET_HAS_ext8u_i64) {
1305             tcg_gen_op2_i64(INDEX_op_ext8u_i64, ret, arg1);
1306             return;
1307         }
1308         break;
1309     case 0xffff:
1310         if (TCG_TARGET_HAS_ext16u_i64) {
1311             tcg_gen_op2_i64(INDEX_op_ext16u_i64, ret, arg1);
1312             return;
1313         }
1314         break;
1315     case 0xffffffffu:
1316         if (TCG_TARGET_HAS_ext32u_i64) {
1317             tcg_gen_op2_i64(INDEX_op_ext32u_i64, ret, arg1);
1318             return;
1319         }
1320         break;
1321     }
1322 
1323     tcg_gen_and_i64(ret, arg1, tcg_constant_i64(arg2));
1324 }
1325 
1326 void tcg_gen_ori_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1327 {
1328     if (TCG_TARGET_REG_BITS == 32) {
1329         tcg_gen_ori_i32(TCGV_LOW(ret), TCGV_LOW(arg1), arg2);
1330         tcg_gen_ori_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), arg2 >> 32);
1331         return;
1332     }
1333     /* Some cases can be optimized here.  */
1334     if (arg2 == -1) {
1335         tcg_gen_movi_i64(ret, -1);
1336     } else if (arg2 == 0) {
1337         tcg_gen_mov_i64(ret, arg1);
1338     } else {
1339         tcg_gen_or_i64(ret, arg1, tcg_constant_i64(arg2));
1340     }
1341 }
1342 
1343 void tcg_gen_xori_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1344 {
1345     if (TCG_TARGET_REG_BITS == 32) {
1346         tcg_gen_xori_i32(TCGV_LOW(ret), TCGV_LOW(arg1), arg2);
1347         tcg_gen_xori_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), arg2 >> 32);
1348         return;
1349     }
1350     /* Some cases can be optimized here.  */
1351     if (arg2 == 0) {
1352         tcg_gen_mov_i64(ret, arg1);
1353     } else if (arg2 == -1 && TCG_TARGET_HAS_not_i64) {
1354         /* Don't recurse with tcg_gen_not_i64.  */
1355         tcg_gen_op2_i64(INDEX_op_not_i64, ret, arg1);
1356     } else {
1357         tcg_gen_xor_i64(ret, arg1, tcg_constant_i64(arg2));
1358     }
1359 }
1360 
1361 static inline void tcg_gen_shifti_i64(TCGv_i64 ret, TCGv_i64 arg1,
1362                                       unsigned c, bool right, bool arith)
1363 {
1364     tcg_debug_assert(c < 64);
1365     if (c == 0) {
1366         tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg1));
1367         tcg_gen_mov_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1));
1368     } else if (c >= 32) {
1369         c -= 32;
1370         if (right) {
1371             if (arith) {
1372                 tcg_gen_sari_i32(TCGV_LOW(ret), TCGV_HIGH(arg1), c);
1373                 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), 31);
1374             } else {
1375                 tcg_gen_shri_i32(TCGV_LOW(ret), TCGV_HIGH(arg1), c);
1376                 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1377             }
1378         } else {
1379             tcg_gen_shli_i32(TCGV_HIGH(ret), TCGV_LOW(arg1), c);
1380             tcg_gen_movi_i32(TCGV_LOW(ret), 0);
1381         }
1382     } else if (right) {
1383         if (TCG_TARGET_HAS_extract2_i32) {
1384             tcg_gen_extract2_i32(TCGV_LOW(ret),
1385                                  TCGV_LOW(arg1), TCGV_HIGH(arg1), c);
1386         } else {
1387             tcg_gen_shri_i32(TCGV_LOW(ret), TCGV_LOW(arg1), c);
1388             tcg_gen_deposit_i32(TCGV_LOW(ret), TCGV_LOW(ret),
1389                                 TCGV_HIGH(arg1), 32 - c, c);
1390         }
1391         if (arith) {
1392             tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), c);
1393         } else {
1394             tcg_gen_shri_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), c);
1395         }
1396     } else {
1397         if (TCG_TARGET_HAS_extract2_i32) {
1398             tcg_gen_extract2_i32(TCGV_HIGH(ret),
1399                                  TCGV_LOW(arg1), TCGV_HIGH(arg1), 32 - c);
1400         } else {
1401             TCGv_i32 t0 = tcg_temp_new_i32();
1402             tcg_gen_shri_i32(t0, TCGV_LOW(arg1), 32 - c);
1403             tcg_gen_deposit_i32(TCGV_HIGH(ret), t0,
1404                                 TCGV_HIGH(arg1), c, 32 - c);
1405             tcg_temp_free_i32(t0);
1406         }
1407         tcg_gen_shli_i32(TCGV_LOW(ret), TCGV_LOW(arg1), c);
1408     }
1409 }
1410 
1411 void tcg_gen_shli_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1412 {
1413     tcg_debug_assert(arg2 >= 0 && arg2 < 64);
1414     if (TCG_TARGET_REG_BITS == 32) {
1415         tcg_gen_shifti_i64(ret, arg1, arg2, 0, 0);
1416     } else if (arg2 == 0) {
1417         tcg_gen_mov_i64(ret, arg1);
1418     } else {
1419         tcg_gen_shl_i64(ret, arg1, tcg_constant_i64(arg2));
1420     }
1421 }
1422 
1423 void tcg_gen_shri_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1424 {
1425     tcg_debug_assert(arg2 >= 0 && arg2 < 64);
1426     if (TCG_TARGET_REG_BITS == 32) {
1427         tcg_gen_shifti_i64(ret, arg1, arg2, 1, 0);
1428     } else if (arg2 == 0) {
1429         tcg_gen_mov_i64(ret, arg1);
1430     } else {
1431         tcg_gen_shr_i64(ret, arg1, tcg_constant_i64(arg2));
1432     }
1433 }
1434 
1435 void tcg_gen_sari_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1436 {
1437     tcg_debug_assert(arg2 >= 0 && arg2 < 64);
1438     if (TCG_TARGET_REG_BITS == 32) {
1439         tcg_gen_shifti_i64(ret, arg1, arg2, 1, 1);
1440     } else if (arg2 == 0) {
1441         tcg_gen_mov_i64(ret, arg1);
1442     } else {
1443         tcg_gen_sar_i64(ret, arg1, tcg_constant_i64(arg2));
1444     }
1445 }
1446 
1447 void tcg_gen_brcond_i64(TCGCond cond, TCGv_i64 arg1, TCGv_i64 arg2, TCGLabel *l)
1448 {
1449     if (cond == TCG_COND_ALWAYS) {
1450         tcg_gen_br(l);
1451     } else if (cond != TCG_COND_NEVER) {
1452         l->refs++;
1453         if (TCG_TARGET_REG_BITS == 32) {
1454             tcg_gen_op6ii_i32(INDEX_op_brcond2_i32, TCGV_LOW(arg1),
1455                               TCGV_HIGH(arg1), TCGV_LOW(arg2),
1456                               TCGV_HIGH(arg2), cond, label_arg(l));
1457         } else {
1458             tcg_gen_op4ii_i64(INDEX_op_brcond_i64, arg1, arg2, cond,
1459                               label_arg(l));
1460         }
1461     }
1462 }
1463 
1464 void tcg_gen_brcondi_i64(TCGCond cond, TCGv_i64 arg1, int64_t arg2, TCGLabel *l)
1465 {
1466     if (TCG_TARGET_REG_BITS == 64) {
1467         tcg_gen_brcond_i64(cond, arg1, tcg_constant_i64(arg2), l);
1468     } else if (cond == TCG_COND_ALWAYS) {
1469         tcg_gen_br(l);
1470     } else if (cond != TCG_COND_NEVER) {
1471         l->refs++;
1472         tcg_gen_op6ii_i32(INDEX_op_brcond2_i32,
1473                           TCGV_LOW(arg1), TCGV_HIGH(arg1),
1474                           tcg_constant_i32(arg2),
1475                           tcg_constant_i32(arg2 >> 32),
1476                           cond, label_arg(l));
1477     }
1478 }
1479 
1480 void tcg_gen_setcond_i64(TCGCond cond, TCGv_i64 ret,
1481                          TCGv_i64 arg1, TCGv_i64 arg2)
1482 {
1483     if (cond == TCG_COND_ALWAYS) {
1484         tcg_gen_movi_i64(ret, 1);
1485     } else if (cond == TCG_COND_NEVER) {
1486         tcg_gen_movi_i64(ret, 0);
1487     } else {
1488         if (TCG_TARGET_REG_BITS == 32) {
1489             tcg_gen_op6i_i32(INDEX_op_setcond2_i32, TCGV_LOW(ret),
1490                              TCGV_LOW(arg1), TCGV_HIGH(arg1),
1491                              TCGV_LOW(arg2), TCGV_HIGH(arg2), cond);
1492             tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1493         } else {
1494             tcg_gen_op4i_i64(INDEX_op_setcond_i64, ret, arg1, arg2, cond);
1495         }
1496     }
1497 }
1498 
1499 void tcg_gen_setcondi_i64(TCGCond cond, TCGv_i64 ret,
1500                           TCGv_i64 arg1, int64_t arg2)
1501 {
1502     if (TCG_TARGET_REG_BITS == 64) {
1503         tcg_gen_setcond_i64(cond, ret, arg1, tcg_constant_i64(arg2));
1504     } else if (cond == TCG_COND_ALWAYS) {
1505         tcg_gen_movi_i64(ret, 1);
1506     } else if (cond == TCG_COND_NEVER) {
1507         tcg_gen_movi_i64(ret, 0);
1508     } else {
1509         tcg_gen_op6i_i32(INDEX_op_setcond2_i32, TCGV_LOW(ret),
1510                          TCGV_LOW(arg1), TCGV_HIGH(arg1),
1511                          tcg_constant_i32(arg2),
1512                          tcg_constant_i32(arg2 >> 32), cond);
1513         tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1514     }
1515 }
1516 
1517 void tcg_gen_muli_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1518 {
1519     if (arg2 == 0) {
1520         tcg_gen_movi_i64(ret, 0);
1521     } else if (is_power_of_2(arg2)) {
1522         tcg_gen_shli_i64(ret, arg1, ctz64(arg2));
1523     } else {
1524         TCGv_i64 t0 = tcg_const_i64(arg2);
1525         tcg_gen_mul_i64(ret, arg1, t0);
1526         tcg_temp_free_i64(t0);
1527     }
1528 }
1529 
1530 void tcg_gen_div_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1531 {
1532     if (TCG_TARGET_HAS_div_i64) {
1533         tcg_gen_op3_i64(INDEX_op_div_i64, ret, arg1, arg2);
1534     } else if (TCG_TARGET_HAS_div2_i64) {
1535         TCGv_i64 t0 = tcg_temp_new_i64();
1536         tcg_gen_sari_i64(t0, arg1, 63);
1537         tcg_gen_op5_i64(INDEX_op_div2_i64, ret, t0, arg1, t0, arg2);
1538         tcg_temp_free_i64(t0);
1539     } else {
1540         gen_helper_div_i64(ret, arg1, arg2);
1541     }
1542 }
1543 
1544 void tcg_gen_rem_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1545 {
1546     if (TCG_TARGET_HAS_rem_i64) {
1547         tcg_gen_op3_i64(INDEX_op_rem_i64, ret, arg1, arg2);
1548     } else if (TCG_TARGET_HAS_div_i64) {
1549         TCGv_i64 t0 = tcg_temp_new_i64();
1550         tcg_gen_op3_i64(INDEX_op_div_i64, t0, arg1, arg2);
1551         tcg_gen_mul_i64(t0, t0, arg2);
1552         tcg_gen_sub_i64(ret, arg1, t0);
1553         tcg_temp_free_i64(t0);
1554     } else if (TCG_TARGET_HAS_div2_i64) {
1555         TCGv_i64 t0 = tcg_temp_new_i64();
1556         tcg_gen_sari_i64(t0, arg1, 63);
1557         tcg_gen_op5_i64(INDEX_op_div2_i64, t0, ret, arg1, t0, arg2);
1558         tcg_temp_free_i64(t0);
1559     } else {
1560         gen_helper_rem_i64(ret, arg1, arg2);
1561     }
1562 }
1563 
1564 void tcg_gen_divu_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1565 {
1566     if (TCG_TARGET_HAS_div_i64) {
1567         tcg_gen_op3_i64(INDEX_op_divu_i64, ret, arg1, arg2);
1568     } else if (TCG_TARGET_HAS_div2_i64) {
1569         TCGv_i64 t0 = tcg_temp_new_i64();
1570         tcg_gen_movi_i64(t0, 0);
1571         tcg_gen_op5_i64(INDEX_op_divu2_i64, ret, t0, arg1, t0, arg2);
1572         tcg_temp_free_i64(t0);
1573     } else {
1574         gen_helper_divu_i64(ret, arg1, arg2);
1575     }
1576 }
1577 
1578 void tcg_gen_remu_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1579 {
1580     if (TCG_TARGET_HAS_rem_i64) {
1581         tcg_gen_op3_i64(INDEX_op_remu_i64, ret, arg1, arg2);
1582     } else if (TCG_TARGET_HAS_div_i64) {
1583         TCGv_i64 t0 = tcg_temp_new_i64();
1584         tcg_gen_op3_i64(INDEX_op_divu_i64, t0, arg1, arg2);
1585         tcg_gen_mul_i64(t0, t0, arg2);
1586         tcg_gen_sub_i64(ret, arg1, t0);
1587         tcg_temp_free_i64(t0);
1588     } else if (TCG_TARGET_HAS_div2_i64) {
1589         TCGv_i64 t0 = tcg_temp_new_i64();
1590         tcg_gen_movi_i64(t0, 0);
1591         tcg_gen_op5_i64(INDEX_op_divu2_i64, t0, ret, arg1, t0, arg2);
1592         tcg_temp_free_i64(t0);
1593     } else {
1594         gen_helper_remu_i64(ret, arg1, arg2);
1595     }
1596 }
1597 
1598 void tcg_gen_ext8s_i64(TCGv_i64 ret, TCGv_i64 arg)
1599 {
1600     if (TCG_TARGET_REG_BITS == 32) {
1601         tcg_gen_ext8s_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1602         tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1603     } else if (TCG_TARGET_HAS_ext8s_i64) {
1604         tcg_gen_op2_i64(INDEX_op_ext8s_i64, ret, arg);
1605     } else {
1606         tcg_gen_shli_i64(ret, arg, 56);
1607         tcg_gen_sari_i64(ret, ret, 56);
1608     }
1609 }
1610 
1611 void tcg_gen_ext16s_i64(TCGv_i64 ret, TCGv_i64 arg)
1612 {
1613     if (TCG_TARGET_REG_BITS == 32) {
1614         tcg_gen_ext16s_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1615         tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1616     } else if (TCG_TARGET_HAS_ext16s_i64) {
1617         tcg_gen_op2_i64(INDEX_op_ext16s_i64, ret, arg);
1618     } else {
1619         tcg_gen_shli_i64(ret, arg, 48);
1620         tcg_gen_sari_i64(ret, ret, 48);
1621     }
1622 }
1623 
1624 void tcg_gen_ext32s_i64(TCGv_i64 ret, TCGv_i64 arg)
1625 {
1626     if (TCG_TARGET_REG_BITS == 32) {
1627         tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1628         tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1629     } else if (TCG_TARGET_HAS_ext32s_i64) {
1630         tcg_gen_op2_i64(INDEX_op_ext32s_i64, ret, arg);
1631     } else {
1632         tcg_gen_shli_i64(ret, arg, 32);
1633         tcg_gen_sari_i64(ret, ret, 32);
1634     }
1635 }
1636 
1637 void tcg_gen_ext8u_i64(TCGv_i64 ret, TCGv_i64 arg)
1638 {
1639     if (TCG_TARGET_REG_BITS == 32) {
1640         tcg_gen_ext8u_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1641         tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1642     } else if (TCG_TARGET_HAS_ext8u_i64) {
1643         tcg_gen_op2_i64(INDEX_op_ext8u_i64, ret, arg);
1644     } else {
1645         tcg_gen_andi_i64(ret, arg, 0xffu);
1646     }
1647 }
1648 
1649 void tcg_gen_ext16u_i64(TCGv_i64 ret, TCGv_i64 arg)
1650 {
1651     if (TCG_TARGET_REG_BITS == 32) {
1652         tcg_gen_ext16u_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1653         tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1654     } else if (TCG_TARGET_HAS_ext16u_i64) {
1655         tcg_gen_op2_i64(INDEX_op_ext16u_i64, ret, arg);
1656     } else {
1657         tcg_gen_andi_i64(ret, arg, 0xffffu);
1658     }
1659 }
1660 
1661 void tcg_gen_ext32u_i64(TCGv_i64 ret, TCGv_i64 arg)
1662 {
1663     if (TCG_TARGET_REG_BITS == 32) {
1664         tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1665         tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1666     } else if (TCG_TARGET_HAS_ext32u_i64) {
1667         tcg_gen_op2_i64(INDEX_op_ext32u_i64, ret, arg);
1668     } else {
1669         tcg_gen_andi_i64(ret, arg, 0xffffffffu);
1670     }
1671 }
1672 
1673 void tcg_gen_bswap16_i64(TCGv_i64 ret, TCGv_i64 arg, int flags)
1674 {
1675     /* Only one extension flag may be present. */
1676     tcg_debug_assert(!(flags & TCG_BSWAP_OS) || !(flags & TCG_BSWAP_OZ));
1677 
1678     if (TCG_TARGET_REG_BITS == 32) {
1679         tcg_gen_bswap16_i32(TCGV_LOW(ret), TCGV_LOW(arg), flags);
1680         if (flags & TCG_BSWAP_OS) {
1681             tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1682         } else {
1683             tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1684         }
1685     } else if (TCG_TARGET_HAS_bswap16_i64) {
1686         tcg_gen_op3i_i64(INDEX_op_bswap16_i64, ret, arg, flags);
1687     } else {
1688         TCGv_i64 t0 = tcg_temp_new_i64();
1689         TCGv_i64 t1 = tcg_temp_new_i64();
1690 
1691         tcg_gen_shri_i64(t0, arg, 8);
1692         if (!(flags & TCG_BSWAP_IZ)) {
1693             tcg_gen_ext8u_i64(t0, t0);
1694         }
1695 
1696         if (flags & TCG_BSWAP_OS) {
1697             tcg_gen_shli_i64(t1, arg, 56);
1698             tcg_gen_sari_i64(t1, t1, 48);
1699         } else if (flags & TCG_BSWAP_OZ) {
1700             tcg_gen_ext8u_i64(t1, arg);
1701             tcg_gen_shli_i64(t1, t1, 8);
1702         } else {
1703             tcg_gen_shli_i64(t1, arg, 8);
1704         }
1705 
1706         tcg_gen_or_i64(ret, t0, t1);
1707         tcg_temp_free_i64(t0);
1708         tcg_temp_free_i64(t1);
1709     }
1710 }
1711 
1712 void tcg_gen_bswap32_i64(TCGv_i64 ret, TCGv_i64 arg, int flags)
1713 {
1714     /* Only one extension flag may be present. */
1715     tcg_debug_assert(!(flags & TCG_BSWAP_OS) || !(flags & TCG_BSWAP_OZ));
1716 
1717     if (TCG_TARGET_REG_BITS == 32) {
1718         tcg_gen_bswap32_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1719         if (flags & TCG_BSWAP_OS) {
1720             tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1721         } else {
1722             tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1723         }
1724     } else if (TCG_TARGET_HAS_bswap32_i64) {
1725         tcg_gen_op3i_i64(INDEX_op_bswap32_i64, ret, arg, flags);
1726     } else {
1727         TCGv_i64 t0 = tcg_temp_new_i64();
1728         TCGv_i64 t1 = tcg_temp_new_i64();
1729         TCGv_i64 t2 = tcg_constant_i64(0x00ff00ff);
1730 
1731                                             /* arg = xxxxabcd */
1732         tcg_gen_shri_i64(t0, arg, 8);       /*  t0 = .xxxxabc */
1733         tcg_gen_and_i64(t1, arg, t2);       /*  t1 = .....b.d */
1734         tcg_gen_and_i64(t0, t0, t2);        /*  t0 = .....a.c */
1735         tcg_gen_shli_i64(t1, t1, 8);        /*  t1 = ....b.d. */
1736         tcg_gen_or_i64(ret, t0, t1);        /* ret = ....badc */
1737 
1738         tcg_gen_shli_i64(t1, ret, 48);      /*  t1 = dc...... */
1739         tcg_gen_shri_i64(t0, ret, 16);      /*  t0 = ......ba */
1740         if (flags & TCG_BSWAP_OS) {
1741             tcg_gen_sari_i64(t1, t1, 32);   /*  t1 = ssssdc.. */
1742         } else {
1743             tcg_gen_shri_i64(t1, t1, 32);   /*  t1 = ....dc.. */
1744         }
1745         tcg_gen_or_i64(ret, t0, t1);        /* ret = ssssdcba */
1746 
1747         tcg_temp_free_i64(t0);
1748         tcg_temp_free_i64(t1);
1749     }
1750 }
1751 
1752 void tcg_gen_bswap64_i64(TCGv_i64 ret, TCGv_i64 arg)
1753 {
1754     if (TCG_TARGET_REG_BITS == 32) {
1755         TCGv_i32 t0, t1;
1756         t0 = tcg_temp_new_i32();
1757         t1 = tcg_temp_new_i32();
1758 
1759         tcg_gen_bswap32_i32(t0, TCGV_LOW(arg));
1760         tcg_gen_bswap32_i32(t1, TCGV_HIGH(arg));
1761         tcg_gen_mov_i32(TCGV_LOW(ret), t1);
1762         tcg_gen_mov_i32(TCGV_HIGH(ret), t0);
1763         tcg_temp_free_i32(t0);
1764         tcg_temp_free_i32(t1);
1765     } else if (TCG_TARGET_HAS_bswap64_i64) {
1766         tcg_gen_op3i_i64(INDEX_op_bswap64_i64, ret, arg, 0);
1767     } else {
1768         TCGv_i64 t0 = tcg_temp_new_i64();
1769         TCGv_i64 t1 = tcg_temp_new_i64();
1770         TCGv_i64 t2 = tcg_temp_new_i64();
1771 
1772                                         /* arg = abcdefgh */
1773         tcg_gen_movi_i64(t2, 0x00ff00ff00ff00ffull);
1774         tcg_gen_shri_i64(t0, arg, 8);   /*  t0 = .abcdefg */
1775         tcg_gen_and_i64(t1, arg, t2);   /*  t1 = .b.d.f.h */
1776         tcg_gen_and_i64(t0, t0, t2);    /*  t0 = .a.c.e.g */
1777         tcg_gen_shli_i64(t1, t1, 8);    /*  t1 = b.d.f.h. */
1778         tcg_gen_or_i64(ret, t0, t1);    /* ret = badcfehg */
1779 
1780         tcg_gen_movi_i64(t2, 0x0000ffff0000ffffull);
1781         tcg_gen_shri_i64(t0, ret, 16);  /*  t0 = ..badcfe */
1782         tcg_gen_and_i64(t1, ret, t2);   /*  t1 = ..dc..hg */
1783         tcg_gen_and_i64(t0, t0, t2);    /*  t0 = ..ba..fe */
1784         tcg_gen_shli_i64(t1, t1, 16);   /*  t1 = dc..hg.. */
1785         tcg_gen_or_i64(ret, t0, t1);    /* ret = dcbahgfe */
1786 
1787         tcg_gen_shri_i64(t0, ret, 32);  /*  t0 = ....dcba */
1788         tcg_gen_shli_i64(t1, ret, 32);  /*  t1 = hgfe.... */
1789         tcg_gen_or_i64(ret, t0, t1);    /* ret = hgfedcba */
1790 
1791         tcg_temp_free_i64(t0);
1792         tcg_temp_free_i64(t1);
1793         tcg_temp_free_i64(t2);
1794     }
1795 }
1796 
1797 void tcg_gen_not_i64(TCGv_i64 ret, TCGv_i64 arg)
1798 {
1799     if (TCG_TARGET_REG_BITS == 32) {
1800         tcg_gen_not_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1801         tcg_gen_not_i32(TCGV_HIGH(ret), TCGV_HIGH(arg));
1802     } else if (TCG_TARGET_HAS_not_i64) {
1803         tcg_gen_op2_i64(INDEX_op_not_i64, ret, arg);
1804     } else {
1805         tcg_gen_xori_i64(ret, arg, -1);
1806     }
1807 }
1808 
1809 void tcg_gen_andc_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1810 {
1811     if (TCG_TARGET_REG_BITS == 32) {
1812         tcg_gen_andc_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1813         tcg_gen_andc_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1814     } else if (TCG_TARGET_HAS_andc_i64) {
1815         tcg_gen_op3_i64(INDEX_op_andc_i64, ret, arg1, arg2);
1816     } else {
1817         TCGv_i64 t0 = tcg_temp_new_i64();
1818         tcg_gen_not_i64(t0, arg2);
1819         tcg_gen_and_i64(ret, arg1, t0);
1820         tcg_temp_free_i64(t0);
1821     }
1822 }
1823 
1824 void tcg_gen_eqv_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1825 {
1826     if (TCG_TARGET_REG_BITS == 32) {
1827         tcg_gen_eqv_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1828         tcg_gen_eqv_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1829     } else if (TCG_TARGET_HAS_eqv_i64) {
1830         tcg_gen_op3_i64(INDEX_op_eqv_i64, ret, arg1, arg2);
1831     } else {
1832         tcg_gen_xor_i64(ret, arg1, arg2);
1833         tcg_gen_not_i64(ret, ret);
1834     }
1835 }
1836 
1837 void tcg_gen_nand_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1838 {
1839     if (TCG_TARGET_REG_BITS == 32) {
1840         tcg_gen_nand_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1841         tcg_gen_nand_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1842     } else if (TCG_TARGET_HAS_nand_i64) {
1843         tcg_gen_op3_i64(INDEX_op_nand_i64, ret, arg1, arg2);
1844     } else {
1845         tcg_gen_and_i64(ret, arg1, arg2);
1846         tcg_gen_not_i64(ret, ret);
1847     }
1848 }
1849 
1850 void tcg_gen_nor_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1851 {
1852     if (TCG_TARGET_REG_BITS == 32) {
1853         tcg_gen_nor_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1854         tcg_gen_nor_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1855     } else if (TCG_TARGET_HAS_nor_i64) {
1856         tcg_gen_op3_i64(INDEX_op_nor_i64, ret, arg1, arg2);
1857     } else {
1858         tcg_gen_or_i64(ret, arg1, arg2);
1859         tcg_gen_not_i64(ret, ret);
1860     }
1861 }
1862 
1863 void tcg_gen_orc_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1864 {
1865     if (TCG_TARGET_REG_BITS == 32) {
1866         tcg_gen_orc_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1867         tcg_gen_orc_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1868     } else if (TCG_TARGET_HAS_orc_i64) {
1869         tcg_gen_op3_i64(INDEX_op_orc_i64, ret, arg1, arg2);
1870     } else {
1871         TCGv_i64 t0 = tcg_temp_new_i64();
1872         tcg_gen_not_i64(t0, arg2);
1873         tcg_gen_or_i64(ret, arg1, t0);
1874         tcg_temp_free_i64(t0);
1875     }
1876 }
1877 
1878 void tcg_gen_clz_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1879 {
1880     if (TCG_TARGET_HAS_clz_i64) {
1881         tcg_gen_op3_i64(INDEX_op_clz_i64, ret, arg1, arg2);
1882     } else {
1883         gen_helper_clz_i64(ret, arg1, arg2);
1884     }
1885 }
1886 
1887 void tcg_gen_clzi_i64(TCGv_i64 ret, TCGv_i64 arg1, uint64_t arg2)
1888 {
1889     if (TCG_TARGET_REG_BITS == 32
1890         && TCG_TARGET_HAS_clz_i32
1891         && arg2 <= 0xffffffffu) {
1892         TCGv_i32 t = tcg_temp_new_i32();
1893         tcg_gen_clzi_i32(t, TCGV_LOW(arg1), arg2 - 32);
1894         tcg_gen_addi_i32(t, t, 32);
1895         tcg_gen_clz_i32(TCGV_LOW(ret), TCGV_HIGH(arg1), t);
1896         tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1897         tcg_temp_free_i32(t);
1898     } else {
1899         TCGv_i64 t0 = tcg_const_i64(arg2);
1900         tcg_gen_clz_i64(ret, arg1, t0);
1901         tcg_temp_free_i64(t0);
1902     }
1903 }
1904 
1905 void tcg_gen_ctz_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1906 {
1907     if (TCG_TARGET_HAS_ctz_i64) {
1908         tcg_gen_op3_i64(INDEX_op_ctz_i64, ret, arg1, arg2);
1909     } else if (TCG_TARGET_HAS_ctpop_i64 || TCG_TARGET_HAS_clz_i64) {
1910         TCGv_i64 z, t = tcg_temp_new_i64();
1911 
1912         if (TCG_TARGET_HAS_ctpop_i64) {
1913             tcg_gen_subi_i64(t, arg1, 1);
1914             tcg_gen_andc_i64(t, t, arg1);
1915             tcg_gen_ctpop_i64(t, t);
1916         } else {
1917             /* Since all non-x86 hosts have clz(0) == 64, don't fight it.  */
1918             tcg_gen_neg_i64(t, arg1);
1919             tcg_gen_and_i64(t, t, arg1);
1920             tcg_gen_clzi_i64(t, t, 64);
1921             tcg_gen_xori_i64(t, t, 63);
1922         }
1923         z = tcg_constant_i64(0);
1924         tcg_gen_movcond_i64(TCG_COND_EQ, ret, arg1, z, arg2, t);
1925         tcg_temp_free_i64(t);
1926         tcg_temp_free_i64(z);
1927     } else {
1928         gen_helper_ctz_i64(ret, arg1, arg2);
1929     }
1930 }
1931 
1932 void tcg_gen_ctzi_i64(TCGv_i64 ret, TCGv_i64 arg1, uint64_t arg2)
1933 {
1934     if (TCG_TARGET_REG_BITS == 32
1935         && TCG_TARGET_HAS_ctz_i32
1936         && arg2 <= 0xffffffffu) {
1937         TCGv_i32 t32 = tcg_temp_new_i32();
1938         tcg_gen_ctzi_i32(t32, TCGV_HIGH(arg1), arg2 - 32);
1939         tcg_gen_addi_i32(t32, t32, 32);
1940         tcg_gen_ctz_i32(TCGV_LOW(ret), TCGV_LOW(arg1), t32);
1941         tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1942         tcg_temp_free_i32(t32);
1943     } else if (!TCG_TARGET_HAS_ctz_i64
1944                && TCG_TARGET_HAS_ctpop_i64
1945                && arg2 == 64) {
1946         /* This equivalence has the advantage of not requiring a fixup.  */
1947         TCGv_i64 t = tcg_temp_new_i64();
1948         tcg_gen_subi_i64(t, arg1, 1);
1949         tcg_gen_andc_i64(t, t, arg1);
1950         tcg_gen_ctpop_i64(ret, t);
1951         tcg_temp_free_i64(t);
1952     } else {
1953         TCGv_i64 t0 = tcg_const_i64(arg2);
1954         tcg_gen_ctz_i64(ret, arg1, t0);
1955         tcg_temp_free_i64(t0);
1956     }
1957 }
1958 
1959 void tcg_gen_clrsb_i64(TCGv_i64 ret, TCGv_i64 arg)
1960 {
1961     if (TCG_TARGET_HAS_clz_i64 || TCG_TARGET_HAS_clz_i32) {
1962         TCGv_i64 t = tcg_temp_new_i64();
1963         tcg_gen_sari_i64(t, arg, 63);
1964         tcg_gen_xor_i64(t, t, arg);
1965         tcg_gen_clzi_i64(t, t, 64);
1966         tcg_gen_subi_i64(ret, t, 1);
1967         tcg_temp_free_i64(t);
1968     } else {
1969         gen_helper_clrsb_i64(ret, arg);
1970     }
1971 }
1972 
1973 void tcg_gen_ctpop_i64(TCGv_i64 ret, TCGv_i64 arg1)
1974 {
1975     if (TCG_TARGET_HAS_ctpop_i64) {
1976         tcg_gen_op2_i64(INDEX_op_ctpop_i64, ret, arg1);
1977     } else if (TCG_TARGET_REG_BITS == 32 && TCG_TARGET_HAS_ctpop_i32) {
1978         tcg_gen_ctpop_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1));
1979         tcg_gen_ctpop_i32(TCGV_LOW(ret), TCGV_LOW(arg1));
1980         tcg_gen_add_i32(TCGV_LOW(ret), TCGV_LOW(ret), TCGV_HIGH(ret));
1981         tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1982     } else {
1983         gen_helper_ctpop_i64(ret, arg1);
1984     }
1985 }
1986 
1987 void tcg_gen_rotl_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1988 {
1989     if (TCG_TARGET_HAS_rot_i64) {
1990         tcg_gen_op3_i64(INDEX_op_rotl_i64, ret, arg1, arg2);
1991     } else {
1992         TCGv_i64 t0, t1;
1993         t0 = tcg_temp_new_i64();
1994         t1 = tcg_temp_new_i64();
1995         tcg_gen_shl_i64(t0, arg1, arg2);
1996         tcg_gen_subfi_i64(t1, 64, arg2);
1997         tcg_gen_shr_i64(t1, arg1, t1);
1998         tcg_gen_or_i64(ret, t0, t1);
1999         tcg_temp_free_i64(t0);
2000         tcg_temp_free_i64(t1);
2001     }
2002 }
2003 
2004 void tcg_gen_rotli_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
2005 {
2006     tcg_debug_assert(arg2 >= 0 && arg2 < 64);
2007     /* some cases can be optimized here */
2008     if (arg2 == 0) {
2009         tcg_gen_mov_i64(ret, arg1);
2010     } else if (TCG_TARGET_HAS_rot_i64) {
2011         tcg_gen_rotl_i64(ret, arg1, tcg_constant_i64(arg2));
2012     } else {
2013         TCGv_i64 t0, t1;
2014         t0 = tcg_temp_new_i64();
2015         t1 = tcg_temp_new_i64();
2016         tcg_gen_shli_i64(t0, arg1, arg2);
2017         tcg_gen_shri_i64(t1, arg1, 64 - arg2);
2018         tcg_gen_or_i64(ret, t0, t1);
2019         tcg_temp_free_i64(t0);
2020         tcg_temp_free_i64(t1);
2021     }
2022 }
2023 
2024 void tcg_gen_rotr_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
2025 {
2026     if (TCG_TARGET_HAS_rot_i64) {
2027         tcg_gen_op3_i64(INDEX_op_rotr_i64, ret, arg1, arg2);
2028     } else {
2029         TCGv_i64 t0, t1;
2030         t0 = tcg_temp_new_i64();
2031         t1 = tcg_temp_new_i64();
2032         tcg_gen_shr_i64(t0, arg1, arg2);
2033         tcg_gen_subfi_i64(t1, 64, arg2);
2034         tcg_gen_shl_i64(t1, arg1, t1);
2035         tcg_gen_or_i64(ret, t0, t1);
2036         tcg_temp_free_i64(t0);
2037         tcg_temp_free_i64(t1);
2038     }
2039 }
2040 
2041 void tcg_gen_rotri_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
2042 {
2043     tcg_debug_assert(arg2 >= 0 && arg2 < 64);
2044     /* some cases can be optimized here */
2045     if (arg2 == 0) {
2046         tcg_gen_mov_i64(ret, arg1);
2047     } else {
2048         tcg_gen_rotli_i64(ret, arg1, 64 - arg2);
2049     }
2050 }
2051 
2052 void tcg_gen_deposit_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2,
2053                          unsigned int ofs, unsigned int len)
2054 {
2055     uint64_t mask;
2056     TCGv_i64 t1;
2057 
2058     tcg_debug_assert(ofs < 64);
2059     tcg_debug_assert(len > 0);
2060     tcg_debug_assert(len <= 64);
2061     tcg_debug_assert(ofs + len <= 64);
2062 
2063     if (len == 64) {
2064         tcg_gen_mov_i64(ret, arg2);
2065         return;
2066     }
2067     if (TCG_TARGET_HAS_deposit_i64 && TCG_TARGET_deposit_i64_valid(ofs, len)) {
2068         tcg_gen_op5ii_i64(INDEX_op_deposit_i64, ret, arg1, arg2, ofs, len);
2069         return;
2070     }
2071 
2072     if (TCG_TARGET_REG_BITS == 32) {
2073         if (ofs >= 32) {
2074             tcg_gen_deposit_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1),
2075                                 TCGV_LOW(arg2), ofs - 32, len);
2076             tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg1));
2077             return;
2078         }
2079         if (ofs + len <= 32) {
2080             tcg_gen_deposit_i32(TCGV_LOW(ret), TCGV_LOW(arg1),
2081                                 TCGV_LOW(arg2), ofs, len);
2082             tcg_gen_mov_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1));
2083             return;
2084         }
2085     }
2086 
2087     t1 = tcg_temp_new_i64();
2088 
2089     if (TCG_TARGET_HAS_extract2_i64) {
2090         if (ofs + len == 64) {
2091             tcg_gen_shli_i64(t1, arg1, len);
2092             tcg_gen_extract2_i64(ret, t1, arg2, len);
2093             goto done;
2094         }
2095         if (ofs == 0) {
2096             tcg_gen_extract2_i64(ret, arg1, arg2, len);
2097             tcg_gen_rotli_i64(ret, ret, len);
2098             goto done;
2099         }
2100     }
2101 
2102     mask = (1ull << len) - 1;
2103     if (ofs + len < 64) {
2104         tcg_gen_andi_i64(t1, arg2, mask);
2105         tcg_gen_shli_i64(t1, t1, ofs);
2106     } else {
2107         tcg_gen_shli_i64(t1, arg2, ofs);
2108     }
2109     tcg_gen_andi_i64(ret, arg1, ~(mask << ofs));
2110     tcg_gen_or_i64(ret, ret, t1);
2111  done:
2112     tcg_temp_free_i64(t1);
2113 }
2114 
2115 void tcg_gen_deposit_z_i64(TCGv_i64 ret, TCGv_i64 arg,
2116                            unsigned int ofs, unsigned int len)
2117 {
2118     tcg_debug_assert(ofs < 64);
2119     tcg_debug_assert(len > 0);
2120     tcg_debug_assert(len <= 64);
2121     tcg_debug_assert(ofs + len <= 64);
2122 
2123     if (ofs + len == 64) {
2124         tcg_gen_shli_i64(ret, arg, ofs);
2125     } else if (ofs == 0) {
2126         tcg_gen_andi_i64(ret, arg, (1ull << len) - 1);
2127     } else if (TCG_TARGET_HAS_deposit_i64
2128                && TCG_TARGET_deposit_i64_valid(ofs, len)) {
2129         TCGv_i64 zero = tcg_constant_i64(0);
2130         tcg_gen_op5ii_i64(INDEX_op_deposit_i64, ret, zero, arg, ofs, len);
2131     } else {
2132         if (TCG_TARGET_REG_BITS == 32) {
2133             if (ofs >= 32) {
2134                 tcg_gen_deposit_z_i32(TCGV_HIGH(ret), TCGV_LOW(arg),
2135                                       ofs - 32, len);
2136                 tcg_gen_movi_i32(TCGV_LOW(ret), 0);
2137                 return;
2138             }
2139             if (ofs + len <= 32) {
2140                 tcg_gen_deposit_z_i32(TCGV_LOW(ret), TCGV_LOW(arg), ofs, len);
2141                 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
2142                 return;
2143             }
2144         }
2145         /* To help two-operand hosts we prefer to zero-extend first,
2146            which allows ARG to stay live.  */
2147         switch (len) {
2148         case 32:
2149             if (TCG_TARGET_HAS_ext32u_i64) {
2150                 tcg_gen_ext32u_i64(ret, arg);
2151                 tcg_gen_shli_i64(ret, ret, ofs);
2152                 return;
2153             }
2154             break;
2155         case 16:
2156             if (TCG_TARGET_HAS_ext16u_i64) {
2157                 tcg_gen_ext16u_i64(ret, arg);
2158                 tcg_gen_shli_i64(ret, ret, ofs);
2159                 return;
2160             }
2161             break;
2162         case 8:
2163             if (TCG_TARGET_HAS_ext8u_i64) {
2164                 tcg_gen_ext8u_i64(ret, arg);
2165                 tcg_gen_shli_i64(ret, ret, ofs);
2166                 return;
2167             }
2168             break;
2169         }
2170         /* Otherwise prefer zero-extension over AND for code size.  */
2171         switch (ofs + len) {
2172         case 32:
2173             if (TCG_TARGET_HAS_ext32u_i64) {
2174                 tcg_gen_shli_i64(ret, arg, ofs);
2175                 tcg_gen_ext32u_i64(ret, ret);
2176                 return;
2177             }
2178             break;
2179         case 16:
2180             if (TCG_TARGET_HAS_ext16u_i64) {
2181                 tcg_gen_shli_i64(ret, arg, ofs);
2182                 tcg_gen_ext16u_i64(ret, ret);
2183                 return;
2184             }
2185             break;
2186         case 8:
2187             if (TCG_TARGET_HAS_ext8u_i64) {
2188                 tcg_gen_shli_i64(ret, arg, ofs);
2189                 tcg_gen_ext8u_i64(ret, ret);
2190                 return;
2191             }
2192             break;
2193         }
2194         tcg_gen_andi_i64(ret, arg, (1ull << len) - 1);
2195         tcg_gen_shli_i64(ret, ret, ofs);
2196     }
2197 }
2198 
2199 void tcg_gen_extract_i64(TCGv_i64 ret, TCGv_i64 arg,
2200                          unsigned int ofs, unsigned int len)
2201 {
2202     tcg_debug_assert(ofs < 64);
2203     tcg_debug_assert(len > 0);
2204     tcg_debug_assert(len <= 64);
2205     tcg_debug_assert(ofs + len <= 64);
2206 
2207     /* Canonicalize certain special cases, even if extract is supported.  */
2208     if (ofs + len == 64) {
2209         tcg_gen_shri_i64(ret, arg, 64 - len);
2210         return;
2211     }
2212     if (ofs == 0) {
2213         tcg_gen_andi_i64(ret, arg, (1ull << len) - 1);
2214         return;
2215     }
2216 
2217     if (TCG_TARGET_REG_BITS == 32) {
2218         /* Look for a 32-bit extract within one of the two words.  */
2219         if (ofs >= 32) {
2220             tcg_gen_extract_i32(TCGV_LOW(ret), TCGV_HIGH(arg), ofs - 32, len);
2221             tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
2222             return;
2223         }
2224         if (ofs + len <= 32) {
2225             tcg_gen_extract_i32(TCGV_LOW(ret), TCGV_LOW(arg), ofs, len);
2226             tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
2227             return;
2228         }
2229         /* The field is split across two words.  One double-word
2230            shift is better than two double-word shifts.  */
2231         goto do_shift_and;
2232     }
2233 
2234     if (TCG_TARGET_HAS_extract_i64
2235         && TCG_TARGET_extract_i64_valid(ofs, len)) {
2236         tcg_gen_op4ii_i64(INDEX_op_extract_i64, ret, arg, ofs, len);
2237         return;
2238     }
2239 
2240     /* Assume that zero-extension, if available, is cheaper than a shift.  */
2241     switch (ofs + len) {
2242     case 32:
2243         if (TCG_TARGET_HAS_ext32u_i64) {
2244             tcg_gen_ext32u_i64(ret, arg);
2245             tcg_gen_shri_i64(ret, ret, ofs);
2246             return;
2247         }
2248         break;
2249     case 16:
2250         if (TCG_TARGET_HAS_ext16u_i64) {
2251             tcg_gen_ext16u_i64(ret, arg);
2252             tcg_gen_shri_i64(ret, ret, ofs);
2253             return;
2254         }
2255         break;
2256     case 8:
2257         if (TCG_TARGET_HAS_ext8u_i64) {
2258             tcg_gen_ext8u_i64(ret, arg);
2259             tcg_gen_shri_i64(ret, ret, ofs);
2260             return;
2261         }
2262         break;
2263     }
2264 
2265     /* ??? Ideally we'd know what values are available for immediate AND.
2266        Assume that 8 bits are available, plus the special cases of 16 and 32,
2267        so that we get ext8u, ext16u, and ext32u.  */
2268     switch (len) {
2269     case 1 ... 8: case 16: case 32:
2270     do_shift_and:
2271         tcg_gen_shri_i64(ret, arg, ofs);
2272         tcg_gen_andi_i64(ret, ret, (1ull << len) - 1);
2273         break;
2274     default:
2275         tcg_gen_shli_i64(ret, arg, 64 - len - ofs);
2276         tcg_gen_shri_i64(ret, ret, 64 - len);
2277         break;
2278     }
2279 }
2280 
2281 void tcg_gen_sextract_i64(TCGv_i64 ret, TCGv_i64 arg,
2282                           unsigned int ofs, unsigned int len)
2283 {
2284     tcg_debug_assert(ofs < 64);
2285     tcg_debug_assert(len > 0);
2286     tcg_debug_assert(len <= 64);
2287     tcg_debug_assert(ofs + len <= 64);
2288 
2289     /* Canonicalize certain special cases, even if sextract is supported.  */
2290     if (ofs + len == 64) {
2291         tcg_gen_sari_i64(ret, arg, 64 - len);
2292         return;
2293     }
2294     if (ofs == 0) {
2295         switch (len) {
2296         case 32:
2297             tcg_gen_ext32s_i64(ret, arg);
2298             return;
2299         case 16:
2300             tcg_gen_ext16s_i64(ret, arg);
2301             return;
2302         case 8:
2303             tcg_gen_ext8s_i64(ret, arg);
2304             return;
2305         }
2306     }
2307 
2308     if (TCG_TARGET_REG_BITS == 32) {
2309         /* Look for a 32-bit extract within one of the two words.  */
2310         if (ofs >= 32) {
2311             tcg_gen_sextract_i32(TCGV_LOW(ret), TCGV_HIGH(arg), ofs - 32, len);
2312         } else if (ofs + len <= 32) {
2313             tcg_gen_sextract_i32(TCGV_LOW(ret), TCGV_LOW(arg), ofs, len);
2314         } else if (ofs == 0) {
2315             tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg));
2316             tcg_gen_sextract_i32(TCGV_HIGH(ret), TCGV_HIGH(arg), 0, len - 32);
2317             return;
2318         } else if (len > 32) {
2319             TCGv_i32 t = tcg_temp_new_i32();
2320             /* Extract the bits for the high word normally.  */
2321             tcg_gen_sextract_i32(t, TCGV_HIGH(arg), ofs + 32, len - 32);
2322             /* Shift the field down for the low part.  */
2323             tcg_gen_shri_i64(ret, arg, ofs);
2324             /* Overwrite the shift into the high part.  */
2325             tcg_gen_mov_i32(TCGV_HIGH(ret), t);
2326             tcg_temp_free_i32(t);
2327             return;
2328         } else {
2329             /* Shift the field down for the low part, such that the
2330                field sits at the MSB.  */
2331             tcg_gen_shri_i64(ret, arg, ofs + len - 32);
2332             /* Shift the field down from the MSB, sign extending.  */
2333             tcg_gen_sari_i32(TCGV_LOW(ret), TCGV_LOW(ret), 32 - len);
2334         }
2335         /* Sign-extend the field from 32 bits.  */
2336         tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
2337         return;
2338     }
2339 
2340     if (TCG_TARGET_HAS_sextract_i64
2341         && TCG_TARGET_extract_i64_valid(ofs, len)) {
2342         tcg_gen_op4ii_i64(INDEX_op_sextract_i64, ret, arg, ofs, len);
2343         return;
2344     }
2345 
2346     /* Assume that sign-extension, if available, is cheaper than a shift.  */
2347     switch (ofs + len) {
2348     case 32:
2349         if (TCG_TARGET_HAS_ext32s_i64) {
2350             tcg_gen_ext32s_i64(ret, arg);
2351             tcg_gen_sari_i64(ret, ret, ofs);
2352             return;
2353         }
2354         break;
2355     case 16:
2356         if (TCG_TARGET_HAS_ext16s_i64) {
2357             tcg_gen_ext16s_i64(ret, arg);
2358             tcg_gen_sari_i64(ret, ret, ofs);
2359             return;
2360         }
2361         break;
2362     case 8:
2363         if (TCG_TARGET_HAS_ext8s_i64) {
2364             tcg_gen_ext8s_i64(ret, arg);
2365             tcg_gen_sari_i64(ret, ret, ofs);
2366             return;
2367         }
2368         break;
2369     }
2370     switch (len) {
2371     case 32:
2372         if (TCG_TARGET_HAS_ext32s_i64) {
2373             tcg_gen_shri_i64(ret, arg, ofs);
2374             tcg_gen_ext32s_i64(ret, ret);
2375             return;
2376         }
2377         break;
2378     case 16:
2379         if (TCG_TARGET_HAS_ext16s_i64) {
2380             tcg_gen_shri_i64(ret, arg, ofs);
2381             tcg_gen_ext16s_i64(ret, ret);
2382             return;
2383         }
2384         break;
2385     case 8:
2386         if (TCG_TARGET_HAS_ext8s_i64) {
2387             tcg_gen_shri_i64(ret, arg, ofs);
2388             tcg_gen_ext8s_i64(ret, ret);
2389             return;
2390         }
2391         break;
2392     }
2393     tcg_gen_shli_i64(ret, arg, 64 - len - ofs);
2394     tcg_gen_sari_i64(ret, ret, 64 - len);
2395 }
2396 
2397 /*
2398  * Extract 64 bits from a 128-bit input, ah:al, starting from ofs.
2399  * Unlike tcg_gen_extract_i64 above, len is fixed at 64.
2400  */
2401 void tcg_gen_extract2_i64(TCGv_i64 ret, TCGv_i64 al, TCGv_i64 ah,
2402                           unsigned int ofs)
2403 {
2404     tcg_debug_assert(ofs <= 64);
2405     if (ofs == 0) {
2406         tcg_gen_mov_i64(ret, al);
2407     } else if (ofs == 64) {
2408         tcg_gen_mov_i64(ret, ah);
2409     } else if (al == ah) {
2410         tcg_gen_rotri_i64(ret, al, ofs);
2411     } else if (TCG_TARGET_HAS_extract2_i64) {
2412         tcg_gen_op4i_i64(INDEX_op_extract2_i64, ret, al, ah, ofs);
2413     } else {
2414         TCGv_i64 t0 = tcg_temp_new_i64();
2415         tcg_gen_shri_i64(t0, al, ofs);
2416         tcg_gen_deposit_i64(ret, t0, ah, 64 - ofs, ofs);
2417         tcg_temp_free_i64(t0);
2418     }
2419 }
2420 
2421 void tcg_gen_movcond_i64(TCGCond cond, TCGv_i64 ret, TCGv_i64 c1,
2422                          TCGv_i64 c2, TCGv_i64 v1, TCGv_i64 v2)
2423 {
2424     if (cond == TCG_COND_ALWAYS) {
2425         tcg_gen_mov_i64(ret, v1);
2426     } else if (cond == TCG_COND_NEVER) {
2427         tcg_gen_mov_i64(ret, v2);
2428     } else if (TCG_TARGET_REG_BITS == 32) {
2429         TCGv_i32 t0 = tcg_temp_new_i32();
2430         TCGv_i32 t1 = tcg_temp_new_i32();
2431         tcg_gen_op6i_i32(INDEX_op_setcond2_i32, t0,
2432                          TCGV_LOW(c1), TCGV_HIGH(c1),
2433                          TCGV_LOW(c2), TCGV_HIGH(c2), cond);
2434 
2435         if (TCG_TARGET_HAS_movcond_i32) {
2436             tcg_gen_movi_i32(t1, 0);
2437             tcg_gen_movcond_i32(TCG_COND_NE, TCGV_LOW(ret), t0, t1,
2438                                 TCGV_LOW(v1), TCGV_LOW(v2));
2439             tcg_gen_movcond_i32(TCG_COND_NE, TCGV_HIGH(ret), t0, t1,
2440                                 TCGV_HIGH(v1), TCGV_HIGH(v2));
2441         } else {
2442             tcg_gen_neg_i32(t0, t0);
2443 
2444             tcg_gen_and_i32(t1, TCGV_LOW(v1), t0);
2445             tcg_gen_andc_i32(TCGV_LOW(ret), TCGV_LOW(v2), t0);
2446             tcg_gen_or_i32(TCGV_LOW(ret), TCGV_LOW(ret), t1);
2447 
2448             tcg_gen_and_i32(t1, TCGV_HIGH(v1), t0);
2449             tcg_gen_andc_i32(TCGV_HIGH(ret), TCGV_HIGH(v2), t0);
2450             tcg_gen_or_i32(TCGV_HIGH(ret), TCGV_HIGH(ret), t1);
2451         }
2452         tcg_temp_free_i32(t0);
2453         tcg_temp_free_i32(t1);
2454     } else if (TCG_TARGET_HAS_movcond_i64) {
2455         tcg_gen_op6i_i64(INDEX_op_movcond_i64, ret, c1, c2, v1, v2, cond);
2456     } else {
2457         TCGv_i64 t0 = tcg_temp_new_i64();
2458         TCGv_i64 t1 = tcg_temp_new_i64();
2459         tcg_gen_setcond_i64(cond, t0, c1, c2);
2460         tcg_gen_neg_i64(t0, t0);
2461         tcg_gen_and_i64(t1, v1, t0);
2462         tcg_gen_andc_i64(ret, v2, t0);
2463         tcg_gen_or_i64(ret, ret, t1);
2464         tcg_temp_free_i64(t0);
2465         tcg_temp_free_i64(t1);
2466     }
2467 }
2468 
2469 void tcg_gen_add2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 al,
2470                       TCGv_i64 ah, TCGv_i64 bl, TCGv_i64 bh)
2471 {
2472     if (TCG_TARGET_HAS_add2_i64) {
2473         tcg_gen_op6_i64(INDEX_op_add2_i64, rl, rh, al, ah, bl, bh);
2474     } else {
2475         TCGv_i64 t0 = tcg_temp_new_i64();
2476         TCGv_i64 t1 = tcg_temp_new_i64();
2477         tcg_gen_add_i64(t0, al, bl);
2478         tcg_gen_setcond_i64(TCG_COND_LTU, t1, t0, al);
2479         tcg_gen_add_i64(rh, ah, bh);
2480         tcg_gen_add_i64(rh, rh, t1);
2481         tcg_gen_mov_i64(rl, t0);
2482         tcg_temp_free_i64(t0);
2483         tcg_temp_free_i64(t1);
2484     }
2485 }
2486 
2487 void tcg_gen_sub2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 al,
2488                       TCGv_i64 ah, TCGv_i64 bl, TCGv_i64 bh)
2489 {
2490     if (TCG_TARGET_HAS_sub2_i64) {
2491         tcg_gen_op6_i64(INDEX_op_sub2_i64, rl, rh, al, ah, bl, bh);
2492     } else {
2493         TCGv_i64 t0 = tcg_temp_new_i64();
2494         TCGv_i64 t1 = tcg_temp_new_i64();
2495         tcg_gen_sub_i64(t0, al, bl);
2496         tcg_gen_setcond_i64(TCG_COND_LTU, t1, al, bl);
2497         tcg_gen_sub_i64(rh, ah, bh);
2498         tcg_gen_sub_i64(rh, rh, t1);
2499         tcg_gen_mov_i64(rl, t0);
2500         tcg_temp_free_i64(t0);
2501         tcg_temp_free_i64(t1);
2502     }
2503 }
2504 
2505 void tcg_gen_mulu2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 arg1, TCGv_i64 arg2)
2506 {
2507     if (TCG_TARGET_HAS_mulu2_i64) {
2508         tcg_gen_op4_i64(INDEX_op_mulu2_i64, rl, rh, arg1, arg2);
2509     } else if (TCG_TARGET_HAS_muluh_i64) {
2510         TCGv_i64 t = tcg_temp_new_i64();
2511         tcg_gen_op3_i64(INDEX_op_mul_i64, t, arg1, arg2);
2512         tcg_gen_op3_i64(INDEX_op_muluh_i64, rh, arg1, arg2);
2513         tcg_gen_mov_i64(rl, t);
2514         tcg_temp_free_i64(t);
2515     } else {
2516         TCGv_i64 t0 = tcg_temp_new_i64();
2517         tcg_gen_mul_i64(t0, arg1, arg2);
2518         gen_helper_muluh_i64(rh, arg1, arg2);
2519         tcg_gen_mov_i64(rl, t0);
2520         tcg_temp_free_i64(t0);
2521     }
2522 }
2523 
2524 void tcg_gen_muls2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 arg1, TCGv_i64 arg2)
2525 {
2526     if (TCG_TARGET_HAS_muls2_i64) {
2527         tcg_gen_op4_i64(INDEX_op_muls2_i64, rl, rh, arg1, arg2);
2528     } else if (TCG_TARGET_HAS_mulsh_i64) {
2529         TCGv_i64 t = tcg_temp_new_i64();
2530         tcg_gen_op3_i64(INDEX_op_mul_i64, t, arg1, arg2);
2531         tcg_gen_op3_i64(INDEX_op_mulsh_i64, rh, arg1, arg2);
2532         tcg_gen_mov_i64(rl, t);
2533         tcg_temp_free_i64(t);
2534     } else if (TCG_TARGET_HAS_mulu2_i64 || TCG_TARGET_HAS_muluh_i64) {
2535         TCGv_i64 t0 = tcg_temp_new_i64();
2536         TCGv_i64 t1 = tcg_temp_new_i64();
2537         TCGv_i64 t2 = tcg_temp_new_i64();
2538         TCGv_i64 t3 = tcg_temp_new_i64();
2539         tcg_gen_mulu2_i64(t0, t1, arg1, arg2);
2540         /* Adjust for negative inputs.  */
2541         tcg_gen_sari_i64(t2, arg1, 63);
2542         tcg_gen_sari_i64(t3, arg2, 63);
2543         tcg_gen_and_i64(t2, t2, arg2);
2544         tcg_gen_and_i64(t3, t3, arg1);
2545         tcg_gen_sub_i64(rh, t1, t2);
2546         tcg_gen_sub_i64(rh, rh, t3);
2547         tcg_gen_mov_i64(rl, t0);
2548         tcg_temp_free_i64(t0);
2549         tcg_temp_free_i64(t1);
2550         tcg_temp_free_i64(t2);
2551         tcg_temp_free_i64(t3);
2552     } else {
2553         TCGv_i64 t0 = tcg_temp_new_i64();
2554         tcg_gen_mul_i64(t0, arg1, arg2);
2555         gen_helper_mulsh_i64(rh, arg1, arg2);
2556         tcg_gen_mov_i64(rl, t0);
2557         tcg_temp_free_i64(t0);
2558     }
2559 }
2560 
2561 void tcg_gen_mulsu2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 arg1, TCGv_i64 arg2)
2562 {
2563     TCGv_i64 t0 = tcg_temp_new_i64();
2564     TCGv_i64 t1 = tcg_temp_new_i64();
2565     TCGv_i64 t2 = tcg_temp_new_i64();
2566     tcg_gen_mulu2_i64(t0, t1, arg1, arg2);
2567     /* Adjust for negative input for the signed arg1.  */
2568     tcg_gen_sari_i64(t2, arg1, 63);
2569     tcg_gen_and_i64(t2, t2, arg2);
2570     tcg_gen_sub_i64(rh, t1, t2);
2571     tcg_gen_mov_i64(rl, t0);
2572     tcg_temp_free_i64(t0);
2573     tcg_temp_free_i64(t1);
2574     tcg_temp_free_i64(t2);
2575 }
2576 
2577 void tcg_gen_smin_i64(TCGv_i64 ret, TCGv_i64 a, TCGv_i64 b)
2578 {
2579     tcg_gen_movcond_i64(TCG_COND_LT, ret, a, b, a, b);
2580 }
2581 
2582 void tcg_gen_umin_i64(TCGv_i64 ret, TCGv_i64 a, TCGv_i64 b)
2583 {
2584     tcg_gen_movcond_i64(TCG_COND_LTU, ret, a, b, a, b);
2585 }
2586 
2587 void tcg_gen_smax_i64(TCGv_i64 ret, TCGv_i64 a, TCGv_i64 b)
2588 {
2589     tcg_gen_movcond_i64(TCG_COND_LT, ret, a, b, b, a);
2590 }
2591 
2592 void tcg_gen_umax_i64(TCGv_i64 ret, TCGv_i64 a, TCGv_i64 b)
2593 {
2594     tcg_gen_movcond_i64(TCG_COND_LTU, ret, a, b, b, a);
2595 }
2596 
2597 void tcg_gen_abs_i64(TCGv_i64 ret, TCGv_i64 a)
2598 {
2599     TCGv_i64 t = tcg_temp_new_i64();
2600 
2601     tcg_gen_sari_i64(t, a, 63);
2602     tcg_gen_xor_i64(ret, a, t);
2603     tcg_gen_sub_i64(ret, ret, t);
2604     tcg_temp_free_i64(t);
2605 }
2606 
2607 /* Size changing operations.  */
2608 
2609 void tcg_gen_extrl_i64_i32(TCGv_i32 ret, TCGv_i64 arg)
2610 {
2611     if (TCG_TARGET_REG_BITS == 32) {
2612         tcg_gen_mov_i32(ret, TCGV_LOW(arg));
2613     } else if (TCG_TARGET_HAS_extrl_i64_i32) {
2614         tcg_gen_op2(INDEX_op_extrl_i64_i32,
2615                     tcgv_i32_arg(ret), tcgv_i64_arg(arg));
2616     } else {
2617         tcg_gen_mov_i32(ret, (TCGv_i32)arg);
2618     }
2619 }
2620 
2621 void tcg_gen_extrh_i64_i32(TCGv_i32 ret, TCGv_i64 arg)
2622 {
2623     if (TCG_TARGET_REG_BITS == 32) {
2624         tcg_gen_mov_i32(ret, TCGV_HIGH(arg));
2625     } else if (TCG_TARGET_HAS_extrh_i64_i32) {
2626         tcg_gen_op2(INDEX_op_extrh_i64_i32,
2627                     tcgv_i32_arg(ret), tcgv_i64_arg(arg));
2628     } else {
2629         TCGv_i64 t = tcg_temp_new_i64();
2630         tcg_gen_shri_i64(t, arg, 32);
2631         tcg_gen_mov_i32(ret, (TCGv_i32)t);
2632         tcg_temp_free_i64(t);
2633     }
2634 }
2635 
2636 void tcg_gen_extu_i32_i64(TCGv_i64 ret, TCGv_i32 arg)
2637 {
2638     if (TCG_TARGET_REG_BITS == 32) {
2639         tcg_gen_mov_i32(TCGV_LOW(ret), arg);
2640         tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
2641     } else {
2642         tcg_gen_op2(INDEX_op_extu_i32_i64,
2643                     tcgv_i64_arg(ret), tcgv_i32_arg(arg));
2644     }
2645 }
2646 
2647 void tcg_gen_ext_i32_i64(TCGv_i64 ret, TCGv_i32 arg)
2648 {
2649     if (TCG_TARGET_REG_BITS == 32) {
2650         tcg_gen_mov_i32(TCGV_LOW(ret), arg);
2651         tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
2652     } else {
2653         tcg_gen_op2(INDEX_op_ext_i32_i64,
2654                     tcgv_i64_arg(ret), tcgv_i32_arg(arg));
2655     }
2656 }
2657 
2658 void tcg_gen_concat_i32_i64(TCGv_i64 dest, TCGv_i32 low, TCGv_i32 high)
2659 {
2660     TCGv_i64 tmp;
2661 
2662     if (TCG_TARGET_REG_BITS == 32) {
2663         tcg_gen_mov_i32(TCGV_LOW(dest), low);
2664         tcg_gen_mov_i32(TCGV_HIGH(dest), high);
2665         return;
2666     }
2667 
2668     tmp = tcg_temp_new_i64();
2669     /* These extensions are only needed for type correctness.
2670        We may be able to do better given target specific information.  */
2671     tcg_gen_extu_i32_i64(tmp, high);
2672     tcg_gen_extu_i32_i64(dest, low);
2673     /* If deposit is available, use it.  Otherwise use the extra
2674        knowledge that we have of the zero-extensions above.  */
2675     if (TCG_TARGET_HAS_deposit_i64 && TCG_TARGET_deposit_i64_valid(32, 32)) {
2676         tcg_gen_deposit_i64(dest, dest, tmp, 32, 32);
2677     } else {
2678         tcg_gen_shli_i64(tmp, tmp, 32);
2679         tcg_gen_or_i64(dest, dest, tmp);
2680     }
2681     tcg_temp_free_i64(tmp);
2682 }
2683 
2684 void tcg_gen_extr_i64_i32(TCGv_i32 lo, TCGv_i32 hi, TCGv_i64 arg)
2685 {
2686     if (TCG_TARGET_REG_BITS == 32) {
2687         tcg_gen_mov_i32(lo, TCGV_LOW(arg));
2688         tcg_gen_mov_i32(hi, TCGV_HIGH(arg));
2689     } else {
2690         tcg_gen_extrl_i64_i32(lo, arg);
2691         tcg_gen_extrh_i64_i32(hi, arg);
2692     }
2693 }
2694 
2695 void tcg_gen_extr32_i64(TCGv_i64 lo, TCGv_i64 hi, TCGv_i64 arg)
2696 {
2697     tcg_gen_ext32u_i64(lo, arg);
2698     tcg_gen_shri_i64(hi, arg, 32);
2699 }
2700 
2701 /* QEMU specific operations.  */
2702 
2703 void tcg_gen_exit_tb(const TranslationBlock *tb, unsigned idx)
2704 {
2705     /*
2706      * Let the jit code return the read-only version of the
2707      * TranslationBlock, so that we minimize the pc-relative
2708      * distance of the address of the exit_tb code to TB.
2709      * This will improve utilization of pc-relative address loads.
2710      *
2711      * TODO: Move this to translator_loop, so that all const
2712      * TranslationBlock pointers refer to read-only memory.
2713      * This requires coordination with targets that do not use
2714      * the translator_loop.
2715      */
2716     uintptr_t val = (uintptr_t)tcg_splitwx_to_rx((void *)tb) + idx;
2717 
2718     if (tb == NULL) {
2719         tcg_debug_assert(idx == 0);
2720     } else if (idx <= TB_EXIT_IDXMAX) {
2721 #ifdef CONFIG_DEBUG_TCG
2722         /* This is an exit following a goto_tb.  Verify that we have
2723            seen this numbered exit before, via tcg_gen_goto_tb.  */
2724         tcg_debug_assert(tcg_ctx->goto_tb_issue_mask & (1 << idx));
2725 #endif
2726     } else {
2727         /* This is an exit via the exitreq label.  */
2728         tcg_debug_assert(idx == TB_EXIT_REQUESTED);
2729     }
2730 
2731     plugin_gen_disable_mem_helpers();
2732     tcg_gen_op1i(INDEX_op_exit_tb, val);
2733 }
2734 
2735 void tcg_gen_goto_tb(unsigned idx)
2736 {
2737     /* We tested CF_NO_GOTO_TB in translator_use_goto_tb. */
2738     tcg_debug_assert(!(tcg_ctx->tb_cflags & CF_NO_GOTO_TB));
2739     /* We only support two chained exits.  */
2740     tcg_debug_assert(idx <= TB_EXIT_IDXMAX);
2741 #ifdef CONFIG_DEBUG_TCG
2742     /* Verify that we haven't seen this numbered exit before.  */
2743     tcg_debug_assert((tcg_ctx->goto_tb_issue_mask & (1 << idx)) == 0);
2744     tcg_ctx->goto_tb_issue_mask |= 1 << idx;
2745 #endif
2746     plugin_gen_disable_mem_helpers();
2747     tcg_gen_op1i(INDEX_op_goto_tb, idx);
2748 }
2749 
2750 void tcg_gen_lookup_and_goto_ptr(void)
2751 {
2752     TCGv_ptr ptr;
2753 
2754     if (tcg_ctx->tb_cflags & CF_NO_GOTO_PTR) {
2755         tcg_gen_exit_tb(NULL, 0);
2756         return;
2757     }
2758 
2759     plugin_gen_disable_mem_helpers();
2760     ptr = tcg_temp_new_ptr();
2761     gen_helper_lookup_tb_ptr(ptr, cpu_env);
2762     tcg_gen_op1i(INDEX_op_goto_ptr, tcgv_ptr_arg(ptr));
2763     tcg_temp_free_ptr(ptr);
2764 }
2765 
2766 static inline MemOp tcg_canonicalize_memop(MemOp op, bool is64, bool st)
2767 {
2768     /* Trigger the asserts within as early as possible.  */
2769     (void)get_alignment_bits(op);
2770 
2771     switch (op & MO_SIZE) {
2772     case MO_8:
2773         op &= ~MO_BSWAP;
2774         break;
2775     case MO_16:
2776         break;
2777     case MO_32:
2778         if (!is64) {
2779             op &= ~MO_SIGN;
2780         }
2781         break;
2782     case MO_64:
2783         if (!is64) {
2784             tcg_abort();
2785         }
2786         break;
2787     }
2788     if (st) {
2789         op &= ~MO_SIGN;
2790     }
2791     return op;
2792 }
2793 
2794 static void gen_ldst_i32(TCGOpcode opc, TCGv_i32 val, TCGv addr,
2795                          MemOp memop, TCGArg idx)
2796 {
2797     TCGMemOpIdx oi = make_memop_idx(memop, idx);
2798 #if TARGET_LONG_BITS == 32
2799     tcg_gen_op3i_i32(opc, val, addr, oi);
2800 #else
2801     if (TCG_TARGET_REG_BITS == 32) {
2802         tcg_gen_op4i_i32(opc, val, TCGV_LOW(addr), TCGV_HIGH(addr), oi);
2803     } else {
2804         tcg_gen_op3(opc, tcgv_i32_arg(val), tcgv_i64_arg(addr), oi);
2805     }
2806 #endif
2807 }
2808 
2809 static void gen_ldst_i64(TCGOpcode opc, TCGv_i64 val, TCGv addr,
2810                          MemOp memop, TCGArg idx)
2811 {
2812     TCGMemOpIdx oi = make_memop_idx(memop, idx);
2813 #if TARGET_LONG_BITS == 32
2814     if (TCG_TARGET_REG_BITS == 32) {
2815         tcg_gen_op4i_i32(opc, TCGV_LOW(val), TCGV_HIGH(val), addr, oi);
2816     } else {
2817         tcg_gen_op3(opc, tcgv_i64_arg(val), tcgv_i32_arg(addr), oi);
2818     }
2819 #else
2820     if (TCG_TARGET_REG_BITS == 32) {
2821         tcg_gen_op5i_i32(opc, TCGV_LOW(val), TCGV_HIGH(val),
2822                          TCGV_LOW(addr), TCGV_HIGH(addr), oi);
2823     } else {
2824         tcg_gen_op3i_i64(opc, val, addr, oi);
2825     }
2826 #endif
2827 }
2828 
2829 static void tcg_gen_req_mo(TCGBar type)
2830 {
2831 #ifdef TCG_GUEST_DEFAULT_MO
2832     type &= TCG_GUEST_DEFAULT_MO;
2833 #endif
2834     type &= ~TCG_TARGET_DEFAULT_MO;
2835     if (type) {
2836         tcg_gen_mb(type | TCG_BAR_SC);
2837     }
2838 }
2839 
2840 static inline TCGv plugin_prep_mem_callbacks(TCGv vaddr)
2841 {
2842 #ifdef CONFIG_PLUGIN
2843     if (tcg_ctx->plugin_insn != NULL) {
2844         /* Save a copy of the vaddr for use after a load.  */
2845         TCGv temp = tcg_temp_new();
2846         tcg_gen_mov_tl(temp, vaddr);
2847         return temp;
2848     }
2849 #endif
2850     return vaddr;
2851 }
2852 
2853 static inline void plugin_gen_mem_callbacks(TCGv vaddr, uint16_t info)
2854 {
2855 #ifdef CONFIG_PLUGIN
2856     if (tcg_ctx->plugin_insn != NULL) {
2857         plugin_gen_empty_mem_callback(vaddr, info);
2858         tcg_temp_free(vaddr);
2859     }
2860 #endif
2861 }
2862 
2863 void tcg_gen_qemu_ld_i32(TCGv_i32 val, TCGv addr, TCGArg idx, MemOp memop)
2864 {
2865     MemOp orig_memop;
2866     uint16_t info = trace_mem_get_info(memop, idx, 0);
2867 
2868     tcg_gen_req_mo(TCG_MO_LD_LD | TCG_MO_ST_LD);
2869     memop = tcg_canonicalize_memop(memop, 0, 0);
2870     trace_guest_mem_before_tcg(tcg_ctx->cpu, cpu_env, addr, info);
2871 
2872     orig_memop = memop;
2873     if (!TCG_TARGET_HAS_MEMORY_BSWAP && (memop & MO_BSWAP)) {
2874         memop &= ~MO_BSWAP;
2875         /* The bswap primitive benefits from zero-extended input.  */
2876         if ((memop & MO_SSIZE) == MO_SW) {
2877             memop &= ~MO_SIGN;
2878         }
2879     }
2880 
2881     addr = plugin_prep_mem_callbacks(addr);
2882     gen_ldst_i32(INDEX_op_qemu_ld_i32, val, addr, memop, idx);
2883     plugin_gen_mem_callbacks(addr, info);
2884 
2885     if ((orig_memop ^ memop) & MO_BSWAP) {
2886         switch (orig_memop & MO_SIZE) {
2887         case MO_16:
2888             tcg_gen_bswap16_i32(val, val, (orig_memop & MO_SIGN
2889                                            ? TCG_BSWAP_IZ | TCG_BSWAP_OS
2890                                            : TCG_BSWAP_IZ | TCG_BSWAP_OZ));
2891             break;
2892         case MO_32:
2893             tcg_gen_bswap32_i32(val, val);
2894             break;
2895         default:
2896             g_assert_not_reached();
2897         }
2898     }
2899 }
2900 
2901 void tcg_gen_qemu_st_i32(TCGv_i32 val, TCGv addr, TCGArg idx, MemOp memop)
2902 {
2903     TCGv_i32 swap = NULL;
2904     uint16_t info = trace_mem_get_info(memop, idx, 1);
2905 
2906     tcg_gen_req_mo(TCG_MO_LD_ST | TCG_MO_ST_ST);
2907     memop = tcg_canonicalize_memop(memop, 0, 1);
2908     trace_guest_mem_before_tcg(tcg_ctx->cpu, cpu_env, addr, info);
2909 
2910     if (!TCG_TARGET_HAS_MEMORY_BSWAP && (memop & MO_BSWAP)) {
2911         swap = tcg_temp_new_i32();
2912         switch (memop & MO_SIZE) {
2913         case MO_16:
2914             tcg_gen_bswap16_i32(swap, val, 0);
2915             break;
2916         case MO_32:
2917             tcg_gen_bswap32_i32(swap, val);
2918             break;
2919         default:
2920             g_assert_not_reached();
2921         }
2922         val = swap;
2923         memop &= ~MO_BSWAP;
2924     }
2925 
2926     addr = plugin_prep_mem_callbacks(addr);
2927     if (TCG_TARGET_HAS_qemu_st8_i32 && (memop & MO_SIZE) == MO_8) {
2928         gen_ldst_i32(INDEX_op_qemu_st8_i32, val, addr, memop, idx);
2929     } else {
2930         gen_ldst_i32(INDEX_op_qemu_st_i32, val, addr, memop, idx);
2931     }
2932     plugin_gen_mem_callbacks(addr, info);
2933 
2934     if (swap) {
2935         tcg_temp_free_i32(swap);
2936     }
2937 }
2938 
2939 void tcg_gen_qemu_ld_i64(TCGv_i64 val, TCGv addr, TCGArg idx, MemOp memop)
2940 {
2941     MemOp orig_memop;
2942     uint16_t info;
2943 
2944     if (TCG_TARGET_REG_BITS == 32 && (memop & MO_SIZE) < MO_64) {
2945         tcg_gen_qemu_ld_i32(TCGV_LOW(val), addr, idx, memop);
2946         if (memop & MO_SIGN) {
2947             tcg_gen_sari_i32(TCGV_HIGH(val), TCGV_LOW(val), 31);
2948         } else {
2949             tcg_gen_movi_i32(TCGV_HIGH(val), 0);
2950         }
2951         return;
2952     }
2953 
2954     tcg_gen_req_mo(TCG_MO_LD_LD | TCG_MO_ST_LD);
2955     memop = tcg_canonicalize_memop(memop, 1, 0);
2956     info = trace_mem_get_info(memop, idx, 0);
2957     trace_guest_mem_before_tcg(tcg_ctx->cpu, cpu_env, addr, info);
2958 
2959     orig_memop = memop;
2960     if (!TCG_TARGET_HAS_MEMORY_BSWAP && (memop & MO_BSWAP)) {
2961         memop &= ~MO_BSWAP;
2962         /* The bswap primitive benefits from zero-extended input.  */
2963         if ((memop & MO_SIGN) && (memop & MO_SIZE) < MO_64) {
2964             memop &= ~MO_SIGN;
2965         }
2966     }
2967 
2968     addr = plugin_prep_mem_callbacks(addr);
2969     gen_ldst_i64(INDEX_op_qemu_ld_i64, val, addr, memop, idx);
2970     plugin_gen_mem_callbacks(addr, info);
2971 
2972     if ((orig_memop ^ memop) & MO_BSWAP) {
2973         int flags = (orig_memop & MO_SIGN
2974                      ? TCG_BSWAP_IZ | TCG_BSWAP_OS
2975                      : TCG_BSWAP_IZ | TCG_BSWAP_OZ);
2976         switch (orig_memop & MO_SIZE) {
2977         case MO_16:
2978             tcg_gen_bswap16_i64(val, val, flags);
2979             break;
2980         case MO_32:
2981             tcg_gen_bswap32_i64(val, val, flags);
2982             break;
2983         case MO_64:
2984             tcg_gen_bswap64_i64(val, val);
2985             break;
2986         default:
2987             g_assert_not_reached();
2988         }
2989     }
2990 }
2991 
2992 void tcg_gen_qemu_st_i64(TCGv_i64 val, TCGv addr, TCGArg idx, MemOp memop)
2993 {
2994     TCGv_i64 swap = NULL;
2995     uint16_t info;
2996 
2997     if (TCG_TARGET_REG_BITS == 32 && (memop & MO_SIZE) < MO_64) {
2998         tcg_gen_qemu_st_i32(TCGV_LOW(val), addr, idx, memop);
2999         return;
3000     }
3001 
3002     tcg_gen_req_mo(TCG_MO_LD_ST | TCG_MO_ST_ST);
3003     memop = tcg_canonicalize_memop(memop, 1, 1);
3004     info = trace_mem_get_info(memop, idx, 1);
3005     trace_guest_mem_before_tcg(tcg_ctx->cpu, cpu_env, addr, info);
3006 
3007     if (!TCG_TARGET_HAS_MEMORY_BSWAP && (memop & MO_BSWAP)) {
3008         swap = tcg_temp_new_i64();
3009         switch (memop & MO_SIZE) {
3010         case MO_16:
3011             tcg_gen_bswap16_i64(swap, val, 0);
3012             break;
3013         case MO_32:
3014             tcg_gen_bswap32_i64(swap, val, 0);
3015             break;
3016         case MO_64:
3017             tcg_gen_bswap64_i64(swap, val);
3018             break;
3019         default:
3020             g_assert_not_reached();
3021         }
3022         val = swap;
3023         memop &= ~MO_BSWAP;
3024     }
3025 
3026     addr = plugin_prep_mem_callbacks(addr);
3027     gen_ldst_i64(INDEX_op_qemu_st_i64, val, addr, memop, idx);
3028     plugin_gen_mem_callbacks(addr, info);
3029 
3030     if (swap) {
3031         tcg_temp_free_i64(swap);
3032     }
3033 }
3034 
3035 static void tcg_gen_ext_i32(TCGv_i32 ret, TCGv_i32 val, MemOp opc)
3036 {
3037     switch (opc & MO_SSIZE) {
3038     case MO_SB:
3039         tcg_gen_ext8s_i32(ret, val);
3040         break;
3041     case MO_UB:
3042         tcg_gen_ext8u_i32(ret, val);
3043         break;
3044     case MO_SW:
3045         tcg_gen_ext16s_i32(ret, val);
3046         break;
3047     case MO_UW:
3048         tcg_gen_ext16u_i32(ret, val);
3049         break;
3050     default:
3051         tcg_gen_mov_i32(ret, val);
3052         break;
3053     }
3054 }
3055 
3056 static void tcg_gen_ext_i64(TCGv_i64 ret, TCGv_i64 val, MemOp opc)
3057 {
3058     switch (opc & MO_SSIZE) {
3059     case MO_SB:
3060         tcg_gen_ext8s_i64(ret, val);
3061         break;
3062     case MO_UB:
3063         tcg_gen_ext8u_i64(ret, val);
3064         break;
3065     case MO_SW:
3066         tcg_gen_ext16s_i64(ret, val);
3067         break;
3068     case MO_UW:
3069         tcg_gen_ext16u_i64(ret, val);
3070         break;
3071     case MO_SL:
3072         tcg_gen_ext32s_i64(ret, val);
3073         break;
3074     case MO_UL:
3075         tcg_gen_ext32u_i64(ret, val);
3076         break;
3077     default:
3078         tcg_gen_mov_i64(ret, val);
3079         break;
3080     }
3081 }
3082 
3083 typedef void (*gen_atomic_cx_i32)(TCGv_i32, TCGv_env, TCGv,
3084                                   TCGv_i32, TCGv_i32, TCGv_i32);
3085 typedef void (*gen_atomic_cx_i64)(TCGv_i64, TCGv_env, TCGv,
3086                                   TCGv_i64, TCGv_i64, TCGv_i32);
3087 typedef void (*gen_atomic_op_i32)(TCGv_i32, TCGv_env, TCGv,
3088                                   TCGv_i32, TCGv_i32);
3089 typedef void (*gen_atomic_op_i64)(TCGv_i64, TCGv_env, TCGv,
3090                                   TCGv_i64, TCGv_i32);
3091 
3092 #ifdef CONFIG_ATOMIC64
3093 # define WITH_ATOMIC64(X) X,
3094 #else
3095 # define WITH_ATOMIC64(X)
3096 #endif
3097 
3098 static void * const table_cmpxchg[16] = {
3099     [MO_8] = gen_helper_atomic_cmpxchgb,
3100     [MO_16 | MO_LE] = gen_helper_atomic_cmpxchgw_le,
3101     [MO_16 | MO_BE] = gen_helper_atomic_cmpxchgw_be,
3102     [MO_32 | MO_LE] = gen_helper_atomic_cmpxchgl_le,
3103     [MO_32 | MO_BE] = gen_helper_atomic_cmpxchgl_be,
3104     WITH_ATOMIC64([MO_64 | MO_LE] = gen_helper_atomic_cmpxchgq_le)
3105     WITH_ATOMIC64([MO_64 | MO_BE] = gen_helper_atomic_cmpxchgq_be)
3106 };
3107 
3108 void tcg_gen_atomic_cmpxchg_i32(TCGv_i32 retv, TCGv addr, TCGv_i32 cmpv,
3109                                 TCGv_i32 newv, TCGArg idx, MemOp memop)
3110 {
3111     memop = tcg_canonicalize_memop(memop, 0, 0);
3112 
3113     if (!(tcg_ctx->tb_cflags & CF_PARALLEL)) {
3114         TCGv_i32 t1 = tcg_temp_new_i32();
3115         TCGv_i32 t2 = tcg_temp_new_i32();
3116 
3117         tcg_gen_ext_i32(t2, cmpv, memop & MO_SIZE);
3118 
3119         tcg_gen_qemu_ld_i32(t1, addr, idx, memop & ~MO_SIGN);
3120         tcg_gen_movcond_i32(TCG_COND_EQ, t2, t1, t2, newv, t1);
3121         tcg_gen_qemu_st_i32(t2, addr, idx, memop);
3122         tcg_temp_free_i32(t2);
3123 
3124         if (memop & MO_SIGN) {
3125             tcg_gen_ext_i32(retv, t1, memop);
3126         } else {
3127             tcg_gen_mov_i32(retv, t1);
3128         }
3129         tcg_temp_free_i32(t1);
3130     } else {
3131         gen_atomic_cx_i32 gen;
3132         TCGMemOpIdx oi;
3133 
3134         gen = table_cmpxchg[memop & (MO_SIZE | MO_BSWAP)];
3135         tcg_debug_assert(gen != NULL);
3136 
3137         oi = make_memop_idx(memop & ~MO_SIGN, idx);
3138         gen(retv, cpu_env, addr, cmpv, newv, tcg_constant_i32(oi));
3139 
3140         if (memop & MO_SIGN) {
3141             tcg_gen_ext_i32(retv, retv, memop);
3142         }
3143     }
3144 }
3145 
3146 void tcg_gen_atomic_cmpxchg_i64(TCGv_i64 retv, TCGv addr, TCGv_i64 cmpv,
3147                                 TCGv_i64 newv, TCGArg idx, MemOp memop)
3148 {
3149     memop = tcg_canonicalize_memop(memop, 1, 0);
3150 
3151     if (!(tcg_ctx->tb_cflags & CF_PARALLEL)) {
3152         TCGv_i64 t1 = tcg_temp_new_i64();
3153         TCGv_i64 t2 = tcg_temp_new_i64();
3154 
3155         tcg_gen_ext_i64(t2, cmpv, memop & MO_SIZE);
3156 
3157         tcg_gen_qemu_ld_i64(t1, addr, idx, memop & ~MO_SIGN);
3158         tcg_gen_movcond_i64(TCG_COND_EQ, t2, t1, t2, newv, t1);
3159         tcg_gen_qemu_st_i64(t2, addr, idx, memop);
3160         tcg_temp_free_i64(t2);
3161 
3162         if (memop & MO_SIGN) {
3163             tcg_gen_ext_i64(retv, t1, memop);
3164         } else {
3165             tcg_gen_mov_i64(retv, t1);
3166         }
3167         tcg_temp_free_i64(t1);
3168     } else if ((memop & MO_SIZE) == MO_64) {
3169 #ifdef CONFIG_ATOMIC64
3170         gen_atomic_cx_i64 gen;
3171         TCGMemOpIdx oi;
3172 
3173         gen = table_cmpxchg[memop & (MO_SIZE | MO_BSWAP)];
3174         tcg_debug_assert(gen != NULL);
3175 
3176         oi = make_memop_idx(memop, idx);
3177         gen(retv, cpu_env, addr, cmpv, newv, tcg_constant_i32(oi));
3178 #else
3179         gen_helper_exit_atomic(cpu_env);
3180         /* Produce a result, so that we have a well-formed opcode stream
3181            with respect to uses of the result in the (dead) code following.  */
3182         tcg_gen_movi_i64(retv, 0);
3183 #endif /* CONFIG_ATOMIC64 */
3184     } else {
3185         TCGv_i32 c32 = tcg_temp_new_i32();
3186         TCGv_i32 n32 = tcg_temp_new_i32();
3187         TCGv_i32 r32 = tcg_temp_new_i32();
3188 
3189         tcg_gen_extrl_i64_i32(c32, cmpv);
3190         tcg_gen_extrl_i64_i32(n32, newv);
3191         tcg_gen_atomic_cmpxchg_i32(r32, addr, c32, n32, idx, memop & ~MO_SIGN);
3192         tcg_temp_free_i32(c32);
3193         tcg_temp_free_i32(n32);
3194 
3195         tcg_gen_extu_i32_i64(retv, r32);
3196         tcg_temp_free_i32(r32);
3197 
3198         if (memop & MO_SIGN) {
3199             tcg_gen_ext_i64(retv, retv, memop);
3200         }
3201     }
3202 }
3203 
3204 static void do_nonatomic_op_i32(TCGv_i32 ret, TCGv addr, TCGv_i32 val,
3205                                 TCGArg idx, MemOp memop, bool new_val,
3206                                 void (*gen)(TCGv_i32, TCGv_i32, TCGv_i32))
3207 {
3208     TCGv_i32 t1 = tcg_temp_new_i32();
3209     TCGv_i32 t2 = tcg_temp_new_i32();
3210 
3211     memop = tcg_canonicalize_memop(memop, 0, 0);
3212 
3213     tcg_gen_qemu_ld_i32(t1, addr, idx, memop);
3214     tcg_gen_ext_i32(t2, val, memop);
3215     gen(t2, t1, t2);
3216     tcg_gen_qemu_st_i32(t2, addr, idx, memop);
3217 
3218     tcg_gen_ext_i32(ret, (new_val ? t2 : t1), memop);
3219     tcg_temp_free_i32(t1);
3220     tcg_temp_free_i32(t2);
3221 }
3222 
3223 static void do_atomic_op_i32(TCGv_i32 ret, TCGv addr, TCGv_i32 val,
3224                              TCGArg idx, MemOp memop, void * const table[])
3225 {
3226     gen_atomic_op_i32 gen;
3227     TCGMemOpIdx oi;
3228 
3229     memop = tcg_canonicalize_memop(memop, 0, 0);
3230 
3231     gen = table[memop & (MO_SIZE | MO_BSWAP)];
3232     tcg_debug_assert(gen != NULL);
3233 
3234     oi = make_memop_idx(memop & ~MO_SIGN, idx);
3235     gen(ret, cpu_env, addr, val, tcg_constant_i32(oi));
3236 
3237     if (memop & MO_SIGN) {
3238         tcg_gen_ext_i32(ret, ret, memop);
3239     }
3240 }
3241 
3242 static void do_nonatomic_op_i64(TCGv_i64 ret, TCGv addr, TCGv_i64 val,
3243                                 TCGArg idx, MemOp memop, bool new_val,
3244                                 void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64))
3245 {
3246     TCGv_i64 t1 = tcg_temp_new_i64();
3247     TCGv_i64 t2 = tcg_temp_new_i64();
3248 
3249     memop = tcg_canonicalize_memop(memop, 1, 0);
3250 
3251     tcg_gen_qemu_ld_i64(t1, addr, idx, memop);
3252     tcg_gen_ext_i64(t2, val, memop);
3253     gen(t2, t1, t2);
3254     tcg_gen_qemu_st_i64(t2, addr, idx, memop);
3255 
3256     tcg_gen_ext_i64(ret, (new_val ? t2 : t1), memop);
3257     tcg_temp_free_i64(t1);
3258     tcg_temp_free_i64(t2);
3259 }
3260 
3261 static void do_atomic_op_i64(TCGv_i64 ret, TCGv addr, TCGv_i64 val,
3262                              TCGArg idx, MemOp memop, void * const table[])
3263 {
3264     memop = tcg_canonicalize_memop(memop, 1, 0);
3265 
3266     if ((memop & MO_SIZE) == MO_64) {
3267 #ifdef CONFIG_ATOMIC64
3268         gen_atomic_op_i64 gen;
3269         TCGMemOpIdx oi;
3270 
3271         gen = table[memop & (MO_SIZE | MO_BSWAP)];
3272         tcg_debug_assert(gen != NULL);
3273 
3274         oi = make_memop_idx(memop & ~MO_SIGN, idx);
3275         gen(ret, cpu_env, addr, val, tcg_constant_i32(oi));
3276 #else
3277         gen_helper_exit_atomic(cpu_env);
3278         /* Produce a result, so that we have a well-formed opcode stream
3279            with respect to uses of the result in the (dead) code following.  */
3280         tcg_gen_movi_i64(ret, 0);
3281 #endif /* CONFIG_ATOMIC64 */
3282     } else {
3283         TCGv_i32 v32 = tcg_temp_new_i32();
3284         TCGv_i32 r32 = tcg_temp_new_i32();
3285 
3286         tcg_gen_extrl_i64_i32(v32, val);
3287         do_atomic_op_i32(r32, addr, v32, idx, memop & ~MO_SIGN, table);
3288         tcg_temp_free_i32(v32);
3289 
3290         tcg_gen_extu_i32_i64(ret, r32);
3291         tcg_temp_free_i32(r32);
3292 
3293         if (memop & MO_SIGN) {
3294             tcg_gen_ext_i64(ret, ret, memop);
3295         }
3296     }
3297 }
3298 
3299 #define GEN_ATOMIC_HELPER(NAME, OP, NEW)                                \
3300 static void * const table_##NAME[16] = {                                \
3301     [MO_8] = gen_helper_atomic_##NAME##b,                               \
3302     [MO_16 | MO_LE] = gen_helper_atomic_##NAME##w_le,                   \
3303     [MO_16 | MO_BE] = gen_helper_atomic_##NAME##w_be,                   \
3304     [MO_32 | MO_LE] = gen_helper_atomic_##NAME##l_le,                   \
3305     [MO_32 | MO_BE] = gen_helper_atomic_##NAME##l_be,                   \
3306     WITH_ATOMIC64([MO_64 | MO_LE] = gen_helper_atomic_##NAME##q_le)     \
3307     WITH_ATOMIC64([MO_64 | MO_BE] = gen_helper_atomic_##NAME##q_be)     \
3308 };                                                                      \
3309 void tcg_gen_atomic_##NAME##_i32                                        \
3310     (TCGv_i32 ret, TCGv addr, TCGv_i32 val, TCGArg idx, MemOp memop)    \
3311 {                                                                       \
3312     if (tcg_ctx->tb_cflags & CF_PARALLEL) {                             \
3313         do_atomic_op_i32(ret, addr, val, idx, memop, table_##NAME);     \
3314     } else {                                                            \
3315         do_nonatomic_op_i32(ret, addr, val, idx, memop, NEW,            \
3316                             tcg_gen_##OP##_i32);                        \
3317     }                                                                   \
3318 }                                                                       \
3319 void tcg_gen_atomic_##NAME##_i64                                        \
3320     (TCGv_i64 ret, TCGv addr, TCGv_i64 val, TCGArg idx, MemOp memop)    \
3321 {                                                                       \
3322     if (tcg_ctx->tb_cflags & CF_PARALLEL) {                             \
3323         do_atomic_op_i64(ret, addr, val, idx, memop, table_##NAME);     \
3324     } else {                                                            \
3325         do_nonatomic_op_i64(ret, addr, val, idx, memop, NEW,            \
3326                             tcg_gen_##OP##_i64);                        \
3327     }                                                                   \
3328 }
3329 
3330 GEN_ATOMIC_HELPER(fetch_add, add, 0)
3331 GEN_ATOMIC_HELPER(fetch_and, and, 0)
3332 GEN_ATOMIC_HELPER(fetch_or, or, 0)
3333 GEN_ATOMIC_HELPER(fetch_xor, xor, 0)
3334 GEN_ATOMIC_HELPER(fetch_smin, smin, 0)
3335 GEN_ATOMIC_HELPER(fetch_umin, umin, 0)
3336 GEN_ATOMIC_HELPER(fetch_smax, smax, 0)
3337 GEN_ATOMIC_HELPER(fetch_umax, umax, 0)
3338 
3339 GEN_ATOMIC_HELPER(add_fetch, add, 1)
3340 GEN_ATOMIC_HELPER(and_fetch, and, 1)
3341 GEN_ATOMIC_HELPER(or_fetch, or, 1)
3342 GEN_ATOMIC_HELPER(xor_fetch, xor, 1)
3343 GEN_ATOMIC_HELPER(smin_fetch, smin, 1)
3344 GEN_ATOMIC_HELPER(umin_fetch, umin, 1)
3345 GEN_ATOMIC_HELPER(smax_fetch, smax, 1)
3346 GEN_ATOMIC_HELPER(umax_fetch, umax, 1)
3347 
3348 static void tcg_gen_mov2_i32(TCGv_i32 r, TCGv_i32 a, TCGv_i32 b)
3349 {
3350     tcg_gen_mov_i32(r, b);
3351 }
3352 
3353 static void tcg_gen_mov2_i64(TCGv_i64 r, TCGv_i64 a, TCGv_i64 b)
3354 {
3355     tcg_gen_mov_i64(r, b);
3356 }
3357 
3358 GEN_ATOMIC_HELPER(xchg, mov2, 0)
3359 
3360 #undef GEN_ATOMIC_HELPER
3361