xref: /openbmc/qemu/tcg/tcg-op.c (revision f6476697)
1 /*
2  * Tiny Code Generator for QEMU
3  *
4  * Copyright (c) 2008 Fabrice Bellard
5  *
6  * Permission is hereby granted, free of charge, to any person obtaining a copy
7  * of this software and associated documentation files (the "Software"), to deal
8  * in the Software without restriction, including without limitation the rights
9  * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
10  * copies of the Software, and to permit persons to whom the Software is
11  * furnished to do so, subject to the following conditions:
12  *
13  * The above copyright notice and this permission notice shall be included in
14  * all copies or substantial portions of the Software.
15  *
16  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19  * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20  * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21  * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
22  * THE SOFTWARE.
23  */
24 
25 #include "qemu/osdep.h"
26 #include "exec/exec-all.h"
27 #include "tcg/tcg.h"
28 #include "tcg/tcg-op.h"
29 #include "tcg/tcg-mo.h"
30 #include "trace-tcg.h"
31 #include "trace/mem.h"
32 #include "exec/plugin-gen.h"
33 
34 /* Reduce the number of ifdefs below.  This assumes that all uses of
35    TCGV_HIGH and TCGV_LOW are properly protected by a conditional that
36    the compiler can eliminate.  */
37 #if TCG_TARGET_REG_BITS == 64
38 extern TCGv_i32 TCGV_LOW_link_error(TCGv_i64);
39 extern TCGv_i32 TCGV_HIGH_link_error(TCGv_i64);
40 #define TCGV_LOW  TCGV_LOW_link_error
41 #define TCGV_HIGH TCGV_HIGH_link_error
42 #endif
43 
44 void tcg_gen_op1(TCGOpcode opc, TCGArg a1)
45 {
46     TCGOp *op = tcg_emit_op(opc);
47     op->args[0] = a1;
48 }
49 
50 void tcg_gen_op2(TCGOpcode opc, TCGArg a1, TCGArg a2)
51 {
52     TCGOp *op = tcg_emit_op(opc);
53     op->args[0] = a1;
54     op->args[1] = a2;
55 }
56 
57 void tcg_gen_op3(TCGOpcode opc, TCGArg a1, TCGArg a2, TCGArg a3)
58 {
59     TCGOp *op = tcg_emit_op(opc);
60     op->args[0] = a1;
61     op->args[1] = a2;
62     op->args[2] = a3;
63 }
64 
65 void tcg_gen_op4(TCGOpcode opc, TCGArg a1, TCGArg a2, TCGArg a3, TCGArg a4)
66 {
67     TCGOp *op = tcg_emit_op(opc);
68     op->args[0] = a1;
69     op->args[1] = a2;
70     op->args[2] = a3;
71     op->args[3] = a4;
72 }
73 
74 void tcg_gen_op5(TCGOpcode opc, TCGArg a1, TCGArg a2, TCGArg a3,
75                  TCGArg a4, TCGArg a5)
76 {
77     TCGOp *op = tcg_emit_op(opc);
78     op->args[0] = a1;
79     op->args[1] = a2;
80     op->args[2] = a3;
81     op->args[3] = a4;
82     op->args[4] = a5;
83 }
84 
85 void tcg_gen_op6(TCGOpcode opc, TCGArg a1, TCGArg a2, TCGArg a3,
86                  TCGArg a4, TCGArg a5, TCGArg a6)
87 {
88     TCGOp *op = tcg_emit_op(opc);
89     op->args[0] = a1;
90     op->args[1] = a2;
91     op->args[2] = a3;
92     op->args[3] = a4;
93     op->args[4] = a5;
94     op->args[5] = a6;
95 }
96 
97 void tcg_gen_mb(TCGBar mb_type)
98 {
99     if (tcg_ctx->tb_cflags & CF_PARALLEL) {
100         tcg_gen_op1(INDEX_op_mb, mb_type);
101     }
102 }
103 
104 /* 32 bit ops */
105 
106 void tcg_gen_movi_i32(TCGv_i32 ret, int32_t arg)
107 {
108     tcg_gen_mov_i32(ret, tcg_constant_i32(arg));
109 }
110 
111 void tcg_gen_addi_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
112 {
113     /* some cases can be optimized here */
114     if (arg2 == 0) {
115         tcg_gen_mov_i32(ret, arg1);
116     } else {
117         tcg_gen_add_i32(ret, arg1, tcg_constant_i32(arg2));
118     }
119 }
120 
121 void tcg_gen_subfi_i32(TCGv_i32 ret, int32_t arg1, TCGv_i32 arg2)
122 {
123     if (arg1 == 0 && TCG_TARGET_HAS_neg_i32) {
124         /* Don't recurse with tcg_gen_neg_i32.  */
125         tcg_gen_op2_i32(INDEX_op_neg_i32, ret, arg2);
126     } else {
127         tcg_gen_sub_i32(ret, tcg_constant_i32(arg1), arg2);
128     }
129 }
130 
131 void tcg_gen_subi_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
132 {
133     /* some cases can be optimized here */
134     if (arg2 == 0) {
135         tcg_gen_mov_i32(ret, arg1);
136     } else {
137         tcg_gen_sub_i32(ret, arg1, tcg_constant_i32(arg2));
138     }
139 }
140 
141 void tcg_gen_andi_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
142 {
143     /* Some cases can be optimized here.  */
144     switch (arg2) {
145     case 0:
146         tcg_gen_movi_i32(ret, 0);
147         return;
148     case -1:
149         tcg_gen_mov_i32(ret, arg1);
150         return;
151     case 0xff:
152         /* Don't recurse with tcg_gen_ext8u_i32.  */
153         if (TCG_TARGET_HAS_ext8u_i32) {
154             tcg_gen_op2_i32(INDEX_op_ext8u_i32, ret, arg1);
155             return;
156         }
157         break;
158     case 0xffff:
159         if (TCG_TARGET_HAS_ext16u_i32) {
160             tcg_gen_op2_i32(INDEX_op_ext16u_i32, ret, arg1);
161             return;
162         }
163         break;
164     }
165 
166     tcg_gen_and_i32(ret, arg1, tcg_constant_i32(arg2));
167 }
168 
169 void tcg_gen_ori_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
170 {
171     /* Some cases can be optimized here.  */
172     if (arg2 == -1) {
173         tcg_gen_movi_i32(ret, -1);
174     } else if (arg2 == 0) {
175         tcg_gen_mov_i32(ret, arg1);
176     } else {
177         tcg_gen_or_i32(ret, arg1, tcg_constant_i32(arg2));
178     }
179 }
180 
181 void tcg_gen_xori_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
182 {
183     /* Some cases can be optimized here.  */
184     if (arg2 == 0) {
185         tcg_gen_mov_i32(ret, arg1);
186     } else if (arg2 == -1 && TCG_TARGET_HAS_not_i32) {
187         /* Don't recurse with tcg_gen_not_i32.  */
188         tcg_gen_op2_i32(INDEX_op_not_i32, ret, arg1);
189     } else {
190         tcg_gen_xor_i32(ret, arg1, tcg_constant_i32(arg2));
191     }
192 }
193 
194 void tcg_gen_shli_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
195 {
196     tcg_debug_assert(arg2 >= 0 && arg2 < 32);
197     if (arg2 == 0) {
198         tcg_gen_mov_i32(ret, arg1);
199     } else {
200         tcg_gen_shl_i32(ret, arg1, tcg_constant_i32(arg2));
201     }
202 }
203 
204 void tcg_gen_shri_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
205 {
206     tcg_debug_assert(arg2 >= 0 && arg2 < 32);
207     if (arg2 == 0) {
208         tcg_gen_mov_i32(ret, arg1);
209     } else {
210         tcg_gen_shr_i32(ret, arg1, tcg_constant_i32(arg2));
211     }
212 }
213 
214 void tcg_gen_sari_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
215 {
216     tcg_debug_assert(arg2 >= 0 && arg2 < 32);
217     if (arg2 == 0) {
218         tcg_gen_mov_i32(ret, arg1);
219     } else {
220         tcg_gen_sar_i32(ret, arg1, tcg_constant_i32(arg2));
221     }
222 }
223 
224 void tcg_gen_brcond_i32(TCGCond cond, TCGv_i32 arg1, TCGv_i32 arg2, TCGLabel *l)
225 {
226     if (cond == TCG_COND_ALWAYS) {
227         tcg_gen_br(l);
228     } else if (cond != TCG_COND_NEVER) {
229         l->refs++;
230         tcg_gen_op4ii_i32(INDEX_op_brcond_i32, arg1, arg2, cond, label_arg(l));
231     }
232 }
233 
234 void tcg_gen_brcondi_i32(TCGCond cond, TCGv_i32 arg1, int32_t arg2, TCGLabel *l)
235 {
236     if (cond == TCG_COND_ALWAYS) {
237         tcg_gen_br(l);
238     } else if (cond != TCG_COND_NEVER) {
239         tcg_gen_brcond_i32(cond, arg1, tcg_constant_i32(arg2), l);
240     }
241 }
242 
243 void tcg_gen_setcond_i32(TCGCond cond, TCGv_i32 ret,
244                          TCGv_i32 arg1, TCGv_i32 arg2)
245 {
246     if (cond == TCG_COND_ALWAYS) {
247         tcg_gen_movi_i32(ret, 1);
248     } else if (cond == TCG_COND_NEVER) {
249         tcg_gen_movi_i32(ret, 0);
250     } else {
251         tcg_gen_op4i_i32(INDEX_op_setcond_i32, ret, arg1, arg2, cond);
252     }
253 }
254 
255 void tcg_gen_setcondi_i32(TCGCond cond, TCGv_i32 ret,
256                           TCGv_i32 arg1, int32_t arg2)
257 {
258     tcg_gen_setcond_i32(cond, ret, arg1, tcg_constant_i32(arg2));
259 }
260 
261 void tcg_gen_muli_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
262 {
263     if (arg2 == 0) {
264         tcg_gen_movi_i32(ret, 0);
265     } else if (is_power_of_2(arg2)) {
266         tcg_gen_shli_i32(ret, arg1, ctz32(arg2));
267     } else {
268         tcg_gen_mul_i32(ret, arg1, tcg_constant_i32(arg2));
269     }
270 }
271 
272 void tcg_gen_div_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
273 {
274     if (TCG_TARGET_HAS_div_i32) {
275         tcg_gen_op3_i32(INDEX_op_div_i32, ret, arg1, arg2);
276     } else if (TCG_TARGET_HAS_div2_i32) {
277         TCGv_i32 t0 = tcg_temp_new_i32();
278         tcg_gen_sari_i32(t0, arg1, 31);
279         tcg_gen_op5_i32(INDEX_op_div2_i32, ret, t0, arg1, t0, arg2);
280         tcg_temp_free_i32(t0);
281     } else {
282         gen_helper_div_i32(ret, arg1, arg2);
283     }
284 }
285 
286 void tcg_gen_rem_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
287 {
288     if (TCG_TARGET_HAS_rem_i32) {
289         tcg_gen_op3_i32(INDEX_op_rem_i32, ret, arg1, arg2);
290     } else if (TCG_TARGET_HAS_div_i32) {
291         TCGv_i32 t0 = tcg_temp_new_i32();
292         tcg_gen_op3_i32(INDEX_op_div_i32, t0, arg1, arg2);
293         tcg_gen_mul_i32(t0, t0, arg2);
294         tcg_gen_sub_i32(ret, arg1, t0);
295         tcg_temp_free_i32(t0);
296     } else if (TCG_TARGET_HAS_div2_i32) {
297         TCGv_i32 t0 = tcg_temp_new_i32();
298         tcg_gen_sari_i32(t0, arg1, 31);
299         tcg_gen_op5_i32(INDEX_op_div2_i32, t0, ret, arg1, t0, arg2);
300         tcg_temp_free_i32(t0);
301     } else {
302         gen_helper_rem_i32(ret, arg1, arg2);
303     }
304 }
305 
306 void tcg_gen_divu_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
307 {
308     if (TCG_TARGET_HAS_div_i32) {
309         tcg_gen_op3_i32(INDEX_op_divu_i32, ret, arg1, arg2);
310     } else if (TCG_TARGET_HAS_div2_i32) {
311         TCGv_i32 t0 = tcg_temp_new_i32();
312         tcg_gen_movi_i32(t0, 0);
313         tcg_gen_op5_i32(INDEX_op_divu2_i32, ret, t0, arg1, t0, arg2);
314         tcg_temp_free_i32(t0);
315     } else {
316         gen_helper_divu_i32(ret, arg1, arg2);
317     }
318 }
319 
320 void tcg_gen_remu_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
321 {
322     if (TCG_TARGET_HAS_rem_i32) {
323         tcg_gen_op3_i32(INDEX_op_remu_i32, ret, arg1, arg2);
324     } else if (TCG_TARGET_HAS_div_i32) {
325         TCGv_i32 t0 = tcg_temp_new_i32();
326         tcg_gen_op3_i32(INDEX_op_divu_i32, t0, arg1, arg2);
327         tcg_gen_mul_i32(t0, t0, arg2);
328         tcg_gen_sub_i32(ret, arg1, t0);
329         tcg_temp_free_i32(t0);
330     } else if (TCG_TARGET_HAS_div2_i32) {
331         TCGv_i32 t0 = tcg_temp_new_i32();
332         tcg_gen_movi_i32(t0, 0);
333         tcg_gen_op5_i32(INDEX_op_divu2_i32, t0, ret, arg1, t0, arg2);
334         tcg_temp_free_i32(t0);
335     } else {
336         gen_helper_remu_i32(ret, arg1, arg2);
337     }
338 }
339 
340 void tcg_gen_andc_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
341 {
342     if (TCG_TARGET_HAS_andc_i32) {
343         tcg_gen_op3_i32(INDEX_op_andc_i32, ret, arg1, arg2);
344     } else {
345         TCGv_i32 t0 = tcg_temp_new_i32();
346         tcg_gen_not_i32(t0, arg2);
347         tcg_gen_and_i32(ret, arg1, t0);
348         tcg_temp_free_i32(t0);
349     }
350 }
351 
352 void tcg_gen_eqv_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
353 {
354     if (TCG_TARGET_HAS_eqv_i32) {
355         tcg_gen_op3_i32(INDEX_op_eqv_i32, ret, arg1, arg2);
356     } else {
357         tcg_gen_xor_i32(ret, arg1, arg2);
358         tcg_gen_not_i32(ret, ret);
359     }
360 }
361 
362 void tcg_gen_nand_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
363 {
364     if (TCG_TARGET_HAS_nand_i32) {
365         tcg_gen_op3_i32(INDEX_op_nand_i32, ret, arg1, arg2);
366     } else {
367         tcg_gen_and_i32(ret, arg1, arg2);
368         tcg_gen_not_i32(ret, ret);
369     }
370 }
371 
372 void tcg_gen_nor_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
373 {
374     if (TCG_TARGET_HAS_nor_i32) {
375         tcg_gen_op3_i32(INDEX_op_nor_i32, ret, arg1, arg2);
376     } else {
377         tcg_gen_or_i32(ret, arg1, arg2);
378         tcg_gen_not_i32(ret, ret);
379     }
380 }
381 
382 void tcg_gen_orc_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
383 {
384     if (TCG_TARGET_HAS_orc_i32) {
385         tcg_gen_op3_i32(INDEX_op_orc_i32, ret, arg1, arg2);
386     } else {
387         TCGv_i32 t0 = tcg_temp_new_i32();
388         tcg_gen_not_i32(t0, arg2);
389         tcg_gen_or_i32(ret, arg1, t0);
390         tcg_temp_free_i32(t0);
391     }
392 }
393 
394 void tcg_gen_clz_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
395 {
396     if (TCG_TARGET_HAS_clz_i32) {
397         tcg_gen_op3_i32(INDEX_op_clz_i32, ret, arg1, arg2);
398     } else if (TCG_TARGET_HAS_clz_i64) {
399         TCGv_i64 t1 = tcg_temp_new_i64();
400         TCGv_i64 t2 = tcg_temp_new_i64();
401         tcg_gen_extu_i32_i64(t1, arg1);
402         tcg_gen_extu_i32_i64(t2, arg2);
403         tcg_gen_addi_i64(t2, t2, 32);
404         tcg_gen_clz_i64(t1, t1, t2);
405         tcg_gen_extrl_i64_i32(ret, t1);
406         tcg_temp_free_i64(t1);
407         tcg_temp_free_i64(t2);
408         tcg_gen_subi_i32(ret, ret, 32);
409     } else {
410         gen_helper_clz_i32(ret, arg1, arg2);
411     }
412 }
413 
414 void tcg_gen_clzi_i32(TCGv_i32 ret, TCGv_i32 arg1, uint32_t arg2)
415 {
416     tcg_gen_clz_i32(ret, arg1, tcg_constant_i32(arg2));
417 }
418 
419 void tcg_gen_ctz_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
420 {
421     if (TCG_TARGET_HAS_ctz_i32) {
422         tcg_gen_op3_i32(INDEX_op_ctz_i32, ret, arg1, arg2);
423     } else if (TCG_TARGET_HAS_ctz_i64) {
424         TCGv_i64 t1 = tcg_temp_new_i64();
425         TCGv_i64 t2 = tcg_temp_new_i64();
426         tcg_gen_extu_i32_i64(t1, arg1);
427         tcg_gen_extu_i32_i64(t2, arg2);
428         tcg_gen_ctz_i64(t1, t1, t2);
429         tcg_gen_extrl_i64_i32(ret, t1);
430         tcg_temp_free_i64(t1);
431         tcg_temp_free_i64(t2);
432     } else if (TCG_TARGET_HAS_ctpop_i32
433                || TCG_TARGET_HAS_ctpop_i64
434                || TCG_TARGET_HAS_clz_i32
435                || TCG_TARGET_HAS_clz_i64) {
436         TCGv_i32 z, t = tcg_temp_new_i32();
437 
438         if (TCG_TARGET_HAS_ctpop_i32 || TCG_TARGET_HAS_ctpop_i64) {
439             tcg_gen_subi_i32(t, arg1, 1);
440             tcg_gen_andc_i32(t, t, arg1);
441             tcg_gen_ctpop_i32(t, t);
442         } else {
443             /* Since all non-x86 hosts have clz(0) == 32, don't fight it.  */
444             tcg_gen_neg_i32(t, arg1);
445             tcg_gen_and_i32(t, t, arg1);
446             tcg_gen_clzi_i32(t, t, 32);
447             tcg_gen_xori_i32(t, t, 31);
448         }
449         z = tcg_constant_i32(0);
450         tcg_gen_movcond_i32(TCG_COND_EQ, ret, arg1, z, arg2, t);
451         tcg_temp_free_i32(t);
452     } else {
453         gen_helper_ctz_i32(ret, arg1, arg2);
454     }
455 }
456 
457 void tcg_gen_ctzi_i32(TCGv_i32 ret, TCGv_i32 arg1, uint32_t arg2)
458 {
459     if (!TCG_TARGET_HAS_ctz_i32 && TCG_TARGET_HAS_ctpop_i32 && arg2 == 32) {
460         /* This equivalence has the advantage of not requiring a fixup.  */
461         TCGv_i32 t = tcg_temp_new_i32();
462         tcg_gen_subi_i32(t, arg1, 1);
463         tcg_gen_andc_i32(t, t, arg1);
464         tcg_gen_ctpop_i32(ret, t);
465         tcg_temp_free_i32(t);
466     } else {
467         tcg_gen_ctz_i32(ret, arg1, tcg_constant_i32(arg2));
468     }
469 }
470 
471 void tcg_gen_clrsb_i32(TCGv_i32 ret, TCGv_i32 arg)
472 {
473     if (TCG_TARGET_HAS_clz_i32) {
474         TCGv_i32 t = tcg_temp_new_i32();
475         tcg_gen_sari_i32(t, arg, 31);
476         tcg_gen_xor_i32(t, t, arg);
477         tcg_gen_clzi_i32(t, t, 32);
478         tcg_gen_subi_i32(ret, t, 1);
479         tcg_temp_free_i32(t);
480     } else {
481         gen_helper_clrsb_i32(ret, arg);
482     }
483 }
484 
485 void tcg_gen_ctpop_i32(TCGv_i32 ret, TCGv_i32 arg1)
486 {
487     if (TCG_TARGET_HAS_ctpop_i32) {
488         tcg_gen_op2_i32(INDEX_op_ctpop_i32, ret, arg1);
489     } else if (TCG_TARGET_HAS_ctpop_i64) {
490         TCGv_i64 t = tcg_temp_new_i64();
491         tcg_gen_extu_i32_i64(t, arg1);
492         tcg_gen_ctpop_i64(t, t);
493         tcg_gen_extrl_i64_i32(ret, t);
494         tcg_temp_free_i64(t);
495     } else {
496         gen_helper_ctpop_i32(ret, arg1);
497     }
498 }
499 
500 void tcg_gen_rotl_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
501 {
502     if (TCG_TARGET_HAS_rot_i32) {
503         tcg_gen_op3_i32(INDEX_op_rotl_i32, ret, arg1, arg2);
504     } else {
505         TCGv_i32 t0, t1;
506 
507         t0 = tcg_temp_new_i32();
508         t1 = tcg_temp_new_i32();
509         tcg_gen_shl_i32(t0, arg1, arg2);
510         tcg_gen_subfi_i32(t1, 32, arg2);
511         tcg_gen_shr_i32(t1, arg1, t1);
512         tcg_gen_or_i32(ret, t0, t1);
513         tcg_temp_free_i32(t0);
514         tcg_temp_free_i32(t1);
515     }
516 }
517 
518 void tcg_gen_rotli_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
519 {
520     tcg_debug_assert(arg2 >= 0 && arg2 < 32);
521     /* some cases can be optimized here */
522     if (arg2 == 0) {
523         tcg_gen_mov_i32(ret, arg1);
524     } else if (TCG_TARGET_HAS_rot_i32) {
525         tcg_gen_rotl_i32(ret, arg1, tcg_constant_i32(arg2));
526     } else {
527         TCGv_i32 t0, t1;
528         t0 = tcg_temp_new_i32();
529         t1 = tcg_temp_new_i32();
530         tcg_gen_shli_i32(t0, arg1, arg2);
531         tcg_gen_shri_i32(t1, arg1, 32 - arg2);
532         tcg_gen_or_i32(ret, t0, t1);
533         tcg_temp_free_i32(t0);
534         tcg_temp_free_i32(t1);
535     }
536 }
537 
538 void tcg_gen_rotr_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
539 {
540     if (TCG_TARGET_HAS_rot_i32) {
541         tcg_gen_op3_i32(INDEX_op_rotr_i32, ret, arg1, arg2);
542     } else {
543         TCGv_i32 t0, t1;
544 
545         t0 = tcg_temp_new_i32();
546         t1 = tcg_temp_new_i32();
547         tcg_gen_shr_i32(t0, arg1, arg2);
548         tcg_gen_subfi_i32(t1, 32, arg2);
549         tcg_gen_shl_i32(t1, arg1, t1);
550         tcg_gen_or_i32(ret, t0, t1);
551         tcg_temp_free_i32(t0);
552         tcg_temp_free_i32(t1);
553     }
554 }
555 
556 void tcg_gen_rotri_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
557 {
558     tcg_debug_assert(arg2 >= 0 && arg2 < 32);
559     /* some cases can be optimized here */
560     if (arg2 == 0) {
561         tcg_gen_mov_i32(ret, arg1);
562     } else {
563         tcg_gen_rotli_i32(ret, arg1, 32 - arg2);
564     }
565 }
566 
567 void tcg_gen_deposit_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2,
568                          unsigned int ofs, unsigned int len)
569 {
570     uint32_t mask;
571     TCGv_i32 t1;
572 
573     tcg_debug_assert(ofs < 32);
574     tcg_debug_assert(len > 0);
575     tcg_debug_assert(len <= 32);
576     tcg_debug_assert(ofs + len <= 32);
577 
578     if (len == 32) {
579         tcg_gen_mov_i32(ret, arg2);
580         return;
581     }
582     if (TCG_TARGET_HAS_deposit_i32 && TCG_TARGET_deposit_i32_valid(ofs, len)) {
583         tcg_gen_op5ii_i32(INDEX_op_deposit_i32, ret, arg1, arg2, ofs, len);
584         return;
585     }
586 
587     t1 = tcg_temp_new_i32();
588 
589     if (TCG_TARGET_HAS_extract2_i32) {
590         if (ofs + len == 32) {
591             tcg_gen_shli_i32(t1, arg1, len);
592             tcg_gen_extract2_i32(ret, t1, arg2, len);
593             goto done;
594         }
595         if (ofs == 0) {
596             tcg_gen_extract2_i32(ret, arg1, arg2, len);
597             tcg_gen_rotli_i32(ret, ret, len);
598             goto done;
599         }
600     }
601 
602     mask = (1u << len) - 1;
603     if (ofs + len < 32) {
604         tcg_gen_andi_i32(t1, arg2, mask);
605         tcg_gen_shli_i32(t1, t1, ofs);
606     } else {
607         tcg_gen_shli_i32(t1, arg2, ofs);
608     }
609     tcg_gen_andi_i32(ret, arg1, ~(mask << ofs));
610     tcg_gen_or_i32(ret, ret, t1);
611  done:
612     tcg_temp_free_i32(t1);
613 }
614 
615 void tcg_gen_deposit_z_i32(TCGv_i32 ret, TCGv_i32 arg,
616                            unsigned int ofs, unsigned int len)
617 {
618     tcg_debug_assert(ofs < 32);
619     tcg_debug_assert(len > 0);
620     tcg_debug_assert(len <= 32);
621     tcg_debug_assert(ofs + len <= 32);
622 
623     if (ofs + len == 32) {
624         tcg_gen_shli_i32(ret, arg, ofs);
625     } else if (ofs == 0) {
626         tcg_gen_andi_i32(ret, arg, (1u << len) - 1);
627     } else if (TCG_TARGET_HAS_deposit_i32
628                && TCG_TARGET_deposit_i32_valid(ofs, len)) {
629         TCGv_i32 zero = tcg_constant_i32(0);
630         tcg_gen_op5ii_i32(INDEX_op_deposit_i32, ret, zero, arg, ofs, len);
631     } else {
632         /* To help two-operand hosts we prefer to zero-extend first,
633            which allows ARG to stay live.  */
634         switch (len) {
635         case 16:
636             if (TCG_TARGET_HAS_ext16u_i32) {
637                 tcg_gen_ext16u_i32(ret, arg);
638                 tcg_gen_shli_i32(ret, ret, ofs);
639                 return;
640             }
641             break;
642         case 8:
643             if (TCG_TARGET_HAS_ext8u_i32) {
644                 tcg_gen_ext8u_i32(ret, arg);
645                 tcg_gen_shli_i32(ret, ret, ofs);
646                 return;
647             }
648             break;
649         }
650         /* Otherwise prefer zero-extension over AND for code size.  */
651         switch (ofs + len) {
652         case 16:
653             if (TCG_TARGET_HAS_ext16u_i32) {
654                 tcg_gen_shli_i32(ret, arg, ofs);
655                 tcg_gen_ext16u_i32(ret, ret);
656                 return;
657             }
658             break;
659         case 8:
660             if (TCG_TARGET_HAS_ext8u_i32) {
661                 tcg_gen_shli_i32(ret, arg, ofs);
662                 tcg_gen_ext8u_i32(ret, ret);
663                 return;
664             }
665             break;
666         }
667         tcg_gen_andi_i32(ret, arg, (1u << len) - 1);
668         tcg_gen_shli_i32(ret, ret, ofs);
669     }
670 }
671 
672 void tcg_gen_extract_i32(TCGv_i32 ret, TCGv_i32 arg,
673                          unsigned int ofs, unsigned int len)
674 {
675     tcg_debug_assert(ofs < 32);
676     tcg_debug_assert(len > 0);
677     tcg_debug_assert(len <= 32);
678     tcg_debug_assert(ofs + len <= 32);
679 
680     /* Canonicalize certain special cases, even if extract is supported.  */
681     if (ofs + len == 32) {
682         tcg_gen_shri_i32(ret, arg, 32 - len);
683         return;
684     }
685     if (ofs == 0) {
686         tcg_gen_andi_i32(ret, arg, (1u << len) - 1);
687         return;
688     }
689 
690     if (TCG_TARGET_HAS_extract_i32
691         && TCG_TARGET_extract_i32_valid(ofs, len)) {
692         tcg_gen_op4ii_i32(INDEX_op_extract_i32, ret, arg, ofs, len);
693         return;
694     }
695 
696     /* Assume that zero-extension, if available, is cheaper than a shift.  */
697     switch (ofs + len) {
698     case 16:
699         if (TCG_TARGET_HAS_ext16u_i32) {
700             tcg_gen_ext16u_i32(ret, arg);
701             tcg_gen_shri_i32(ret, ret, ofs);
702             return;
703         }
704         break;
705     case 8:
706         if (TCG_TARGET_HAS_ext8u_i32) {
707             tcg_gen_ext8u_i32(ret, arg);
708             tcg_gen_shri_i32(ret, ret, ofs);
709             return;
710         }
711         break;
712     }
713 
714     /* ??? Ideally we'd know what values are available for immediate AND.
715        Assume that 8 bits are available, plus the special case of 16,
716        so that we get ext8u, ext16u.  */
717     switch (len) {
718     case 1 ... 8: case 16:
719         tcg_gen_shri_i32(ret, arg, ofs);
720         tcg_gen_andi_i32(ret, ret, (1u << len) - 1);
721         break;
722     default:
723         tcg_gen_shli_i32(ret, arg, 32 - len - ofs);
724         tcg_gen_shri_i32(ret, ret, 32 - len);
725         break;
726     }
727 }
728 
729 void tcg_gen_sextract_i32(TCGv_i32 ret, TCGv_i32 arg,
730                           unsigned int ofs, unsigned int len)
731 {
732     tcg_debug_assert(ofs < 32);
733     tcg_debug_assert(len > 0);
734     tcg_debug_assert(len <= 32);
735     tcg_debug_assert(ofs + len <= 32);
736 
737     /* Canonicalize certain special cases, even if extract is supported.  */
738     if (ofs + len == 32) {
739         tcg_gen_sari_i32(ret, arg, 32 - len);
740         return;
741     }
742     if (ofs == 0) {
743         switch (len) {
744         case 16:
745             tcg_gen_ext16s_i32(ret, arg);
746             return;
747         case 8:
748             tcg_gen_ext8s_i32(ret, arg);
749             return;
750         }
751     }
752 
753     if (TCG_TARGET_HAS_sextract_i32
754         && TCG_TARGET_extract_i32_valid(ofs, len)) {
755         tcg_gen_op4ii_i32(INDEX_op_sextract_i32, ret, arg, ofs, len);
756         return;
757     }
758 
759     /* Assume that sign-extension, if available, is cheaper than a shift.  */
760     switch (ofs + len) {
761     case 16:
762         if (TCG_TARGET_HAS_ext16s_i32) {
763             tcg_gen_ext16s_i32(ret, arg);
764             tcg_gen_sari_i32(ret, ret, ofs);
765             return;
766         }
767         break;
768     case 8:
769         if (TCG_TARGET_HAS_ext8s_i32) {
770             tcg_gen_ext8s_i32(ret, arg);
771             tcg_gen_sari_i32(ret, ret, ofs);
772             return;
773         }
774         break;
775     }
776     switch (len) {
777     case 16:
778         if (TCG_TARGET_HAS_ext16s_i32) {
779             tcg_gen_shri_i32(ret, arg, ofs);
780             tcg_gen_ext16s_i32(ret, ret);
781             return;
782         }
783         break;
784     case 8:
785         if (TCG_TARGET_HAS_ext8s_i32) {
786             tcg_gen_shri_i32(ret, arg, ofs);
787             tcg_gen_ext8s_i32(ret, ret);
788             return;
789         }
790         break;
791     }
792 
793     tcg_gen_shli_i32(ret, arg, 32 - len - ofs);
794     tcg_gen_sari_i32(ret, ret, 32 - len);
795 }
796 
797 /*
798  * Extract 32-bits from a 64-bit input, ah:al, starting from ofs.
799  * Unlike tcg_gen_extract_i32 above, len is fixed at 32.
800  */
801 void tcg_gen_extract2_i32(TCGv_i32 ret, TCGv_i32 al, TCGv_i32 ah,
802                           unsigned int ofs)
803 {
804     tcg_debug_assert(ofs <= 32);
805     if (ofs == 0) {
806         tcg_gen_mov_i32(ret, al);
807     } else if (ofs == 32) {
808         tcg_gen_mov_i32(ret, ah);
809     } else if (al == ah) {
810         tcg_gen_rotri_i32(ret, al, ofs);
811     } else if (TCG_TARGET_HAS_extract2_i32) {
812         tcg_gen_op4i_i32(INDEX_op_extract2_i32, ret, al, ah, ofs);
813     } else {
814         TCGv_i32 t0 = tcg_temp_new_i32();
815         tcg_gen_shri_i32(t0, al, ofs);
816         tcg_gen_deposit_i32(ret, t0, ah, 32 - ofs, ofs);
817         tcg_temp_free_i32(t0);
818     }
819 }
820 
821 void tcg_gen_movcond_i32(TCGCond cond, TCGv_i32 ret, TCGv_i32 c1,
822                          TCGv_i32 c2, TCGv_i32 v1, TCGv_i32 v2)
823 {
824     if (cond == TCG_COND_ALWAYS) {
825         tcg_gen_mov_i32(ret, v1);
826     } else if (cond == TCG_COND_NEVER) {
827         tcg_gen_mov_i32(ret, v2);
828     } else if (TCG_TARGET_HAS_movcond_i32) {
829         tcg_gen_op6i_i32(INDEX_op_movcond_i32, ret, c1, c2, v1, v2, cond);
830     } else {
831         TCGv_i32 t0 = tcg_temp_new_i32();
832         TCGv_i32 t1 = tcg_temp_new_i32();
833         tcg_gen_setcond_i32(cond, t0, c1, c2);
834         tcg_gen_neg_i32(t0, t0);
835         tcg_gen_and_i32(t1, v1, t0);
836         tcg_gen_andc_i32(ret, v2, t0);
837         tcg_gen_or_i32(ret, ret, t1);
838         tcg_temp_free_i32(t0);
839         tcg_temp_free_i32(t1);
840     }
841 }
842 
843 void tcg_gen_add2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 al,
844                       TCGv_i32 ah, TCGv_i32 bl, TCGv_i32 bh)
845 {
846     if (TCG_TARGET_HAS_add2_i32) {
847         tcg_gen_op6_i32(INDEX_op_add2_i32, rl, rh, al, ah, bl, bh);
848     } else {
849         TCGv_i64 t0 = tcg_temp_new_i64();
850         TCGv_i64 t1 = tcg_temp_new_i64();
851         tcg_gen_concat_i32_i64(t0, al, ah);
852         tcg_gen_concat_i32_i64(t1, bl, bh);
853         tcg_gen_add_i64(t0, t0, t1);
854         tcg_gen_extr_i64_i32(rl, rh, t0);
855         tcg_temp_free_i64(t0);
856         tcg_temp_free_i64(t1);
857     }
858 }
859 
860 void tcg_gen_sub2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 al,
861                       TCGv_i32 ah, TCGv_i32 bl, TCGv_i32 bh)
862 {
863     if (TCG_TARGET_HAS_sub2_i32) {
864         tcg_gen_op6_i32(INDEX_op_sub2_i32, rl, rh, al, ah, bl, bh);
865     } else {
866         TCGv_i64 t0 = tcg_temp_new_i64();
867         TCGv_i64 t1 = tcg_temp_new_i64();
868         tcg_gen_concat_i32_i64(t0, al, ah);
869         tcg_gen_concat_i32_i64(t1, bl, bh);
870         tcg_gen_sub_i64(t0, t0, t1);
871         tcg_gen_extr_i64_i32(rl, rh, t0);
872         tcg_temp_free_i64(t0);
873         tcg_temp_free_i64(t1);
874     }
875 }
876 
877 void tcg_gen_mulu2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 arg1, TCGv_i32 arg2)
878 {
879     if (TCG_TARGET_HAS_mulu2_i32) {
880         tcg_gen_op4_i32(INDEX_op_mulu2_i32, rl, rh, arg1, arg2);
881     } else if (TCG_TARGET_HAS_muluh_i32) {
882         TCGv_i32 t = tcg_temp_new_i32();
883         tcg_gen_op3_i32(INDEX_op_mul_i32, t, arg1, arg2);
884         tcg_gen_op3_i32(INDEX_op_muluh_i32, rh, arg1, arg2);
885         tcg_gen_mov_i32(rl, t);
886         tcg_temp_free_i32(t);
887     } else {
888         TCGv_i64 t0 = tcg_temp_new_i64();
889         TCGv_i64 t1 = tcg_temp_new_i64();
890         tcg_gen_extu_i32_i64(t0, arg1);
891         tcg_gen_extu_i32_i64(t1, arg2);
892         tcg_gen_mul_i64(t0, t0, t1);
893         tcg_gen_extr_i64_i32(rl, rh, t0);
894         tcg_temp_free_i64(t0);
895         tcg_temp_free_i64(t1);
896     }
897 }
898 
899 void tcg_gen_muls2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 arg1, TCGv_i32 arg2)
900 {
901     if (TCG_TARGET_HAS_muls2_i32) {
902         tcg_gen_op4_i32(INDEX_op_muls2_i32, rl, rh, arg1, arg2);
903     } else if (TCG_TARGET_HAS_mulsh_i32) {
904         TCGv_i32 t = tcg_temp_new_i32();
905         tcg_gen_op3_i32(INDEX_op_mul_i32, t, arg1, arg2);
906         tcg_gen_op3_i32(INDEX_op_mulsh_i32, rh, arg1, arg2);
907         tcg_gen_mov_i32(rl, t);
908         tcg_temp_free_i32(t);
909     } else if (TCG_TARGET_REG_BITS == 32) {
910         TCGv_i32 t0 = tcg_temp_new_i32();
911         TCGv_i32 t1 = tcg_temp_new_i32();
912         TCGv_i32 t2 = tcg_temp_new_i32();
913         TCGv_i32 t3 = tcg_temp_new_i32();
914         tcg_gen_mulu2_i32(t0, t1, arg1, arg2);
915         /* Adjust for negative inputs.  */
916         tcg_gen_sari_i32(t2, arg1, 31);
917         tcg_gen_sari_i32(t3, arg2, 31);
918         tcg_gen_and_i32(t2, t2, arg2);
919         tcg_gen_and_i32(t3, t3, arg1);
920         tcg_gen_sub_i32(rh, t1, t2);
921         tcg_gen_sub_i32(rh, rh, t3);
922         tcg_gen_mov_i32(rl, t0);
923         tcg_temp_free_i32(t0);
924         tcg_temp_free_i32(t1);
925         tcg_temp_free_i32(t2);
926         tcg_temp_free_i32(t3);
927     } else {
928         TCGv_i64 t0 = tcg_temp_new_i64();
929         TCGv_i64 t1 = tcg_temp_new_i64();
930         tcg_gen_ext_i32_i64(t0, arg1);
931         tcg_gen_ext_i32_i64(t1, arg2);
932         tcg_gen_mul_i64(t0, t0, t1);
933         tcg_gen_extr_i64_i32(rl, rh, t0);
934         tcg_temp_free_i64(t0);
935         tcg_temp_free_i64(t1);
936     }
937 }
938 
939 void tcg_gen_mulsu2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 arg1, TCGv_i32 arg2)
940 {
941     if (TCG_TARGET_REG_BITS == 32) {
942         TCGv_i32 t0 = tcg_temp_new_i32();
943         TCGv_i32 t1 = tcg_temp_new_i32();
944         TCGv_i32 t2 = tcg_temp_new_i32();
945         tcg_gen_mulu2_i32(t0, t1, arg1, arg2);
946         /* Adjust for negative input for the signed arg1.  */
947         tcg_gen_sari_i32(t2, arg1, 31);
948         tcg_gen_and_i32(t2, t2, arg2);
949         tcg_gen_sub_i32(rh, t1, t2);
950         tcg_gen_mov_i32(rl, t0);
951         tcg_temp_free_i32(t0);
952         tcg_temp_free_i32(t1);
953         tcg_temp_free_i32(t2);
954     } else {
955         TCGv_i64 t0 = tcg_temp_new_i64();
956         TCGv_i64 t1 = tcg_temp_new_i64();
957         tcg_gen_ext_i32_i64(t0, arg1);
958         tcg_gen_extu_i32_i64(t1, arg2);
959         tcg_gen_mul_i64(t0, t0, t1);
960         tcg_gen_extr_i64_i32(rl, rh, t0);
961         tcg_temp_free_i64(t0);
962         tcg_temp_free_i64(t1);
963     }
964 }
965 
966 void tcg_gen_ext8s_i32(TCGv_i32 ret, TCGv_i32 arg)
967 {
968     if (TCG_TARGET_HAS_ext8s_i32) {
969         tcg_gen_op2_i32(INDEX_op_ext8s_i32, ret, arg);
970     } else {
971         tcg_gen_shli_i32(ret, arg, 24);
972         tcg_gen_sari_i32(ret, ret, 24);
973     }
974 }
975 
976 void tcg_gen_ext16s_i32(TCGv_i32 ret, TCGv_i32 arg)
977 {
978     if (TCG_TARGET_HAS_ext16s_i32) {
979         tcg_gen_op2_i32(INDEX_op_ext16s_i32, ret, arg);
980     } else {
981         tcg_gen_shli_i32(ret, arg, 16);
982         tcg_gen_sari_i32(ret, ret, 16);
983     }
984 }
985 
986 void tcg_gen_ext8u_i32(TCGv_i32 ret, TCGv_i32 arg)
987 {
988     if (TCG_TARGET_HAS_ext8u_i32) {
989         tcg_gen_op2_i32(INDEX_op_ext8u_i32, ret, arg);
990     } else {
991         tcg_gen_andi_i32(ret, arg, 0xffu);
992     }
993 }
994 
995 void tcg_gen_ext16u_i32(TCGv_i32 ret, TCGv_i32 arg)
996 {
997     if (TCG_TARGET_HAS_ext16u_i32) {
998         tcg_gen_op2_i32(INDEX_op_ext16u_i32, ret, arg);
999     } else {
1000         tcg_gen_andi_i32(ret, arg, 0xffffu);
1001     }
1002 }
1003 
1004 /* Note: we assume the two high bytes are set to zero */
1005 void tcg_gen_bswap16_i32(TCGv_i32 ret, TCGv_i32 arg)
1006 {
1007     if (TCG_TARGET_HAS_bswap16_i32) {
1008         tcg_gen_op2_i32(INDEX_op_bswap16_i32, ret, arg);
1009     } else {
1010         TCGv_i32 t0 = tcg_temp_new_i32();
1011 
1012         tcg_gen_ext8u_i32(t0, arg);
1013         tcg_gen_shli_i32(t0, t0, 8);
1014         tcg_gen_shri_i32(ret, arg, 8);
1015         tcg_gen_or_i32(ret, ret, t0);
1016         tcg_temp_free_i32(t0);
1017     }
1018 }
1019 
1020 void tcg_gen_bswap32_i32(TCGv_i32 ret, TCGv_i32 arg)
1021 {
1022     if (TCG_TARGET_HAS_bswap32_i32) {
1023         tcg_gen_op2_i32(INDEX_op_bswap32_i32, ret, arg);
1024     } else {
1025         TCGv_i32 t0 = tcg_temp_new_i32();
1026         TCGv_i32 t1 = tcg_temp_new_i32();
1027         TCGv_i32 t2 = tcg_constant_i32(0x00ff00ff);
1028 
1029                                         /* arg = abcd */
1030         tcg_gen_shri_i32(t0, arg, 8);   /*  t0 = .abc */
1031         tcg_gen_and_i32(t1, arg, t2);   /*  t1 = .b.d */
1032         tcg_gen_and_i32(t0, t0, t2);    /*  t0 = .a.c */
1033         tcg_gen_shli_i32(t1, t1, 8);    /*  t1 = b.d. */
1034         tcg_gen_or_i32(ret, t0, t1);    /* ret = badc */
1035 
1036         tcg_gen_shri_i32(t0, ret, 16);  /*  t0 = ..ba */
1037         tcg_gen_shli_i32(t1, ret, 16);  /*  t1 = dc.. */
1038         tcg_gen_or_i32(ret, t0, t1);    /* ret = dcba */
1039 
1040         tcg_temp_free_i32(t0);
1041         tcg_temp_free_i32(t1);
1042     }
1043 }
1044 
1045 void tcg_gen_smin_i32(TCGv_i32 ret, TCGv_i32 a, TCGv_i32 b)
1046 {
1047     tcg_gen_movcond_i32(TCG_COND_LT, ret, a, b, a, b);
1048 }
1049 
1050 void tcg_gen_umin_i32(TCGv_i32 ret, TCGv_i32 a, TCGv_i32 b)
1051 {
1052     tcg_gen_movcond_i32(TCG_COND_LTU, ret, a, b, a, b);
1053 }
1054 
1055 void tcg_gen_smax_i32(TCGv_i32 ret, TCGv_i32 a, TCGv_i32 b)
1056 {
1057     tcg_gen_movcond_i32(TCG_COND_LT, ret, a, b, b, a);
1058 }
1059 
1060 void tcg_gen_umax_i32(TCGv_i32 ret, TCGv_i32 a, TCGv_i32 b)
1061 {
1062     tcg_gen_movcond_i32(TCG_COND_LTU, ret, a, b, b, a);
1063 }
1064 
1065 void tcg_gen_abs_i32(TCGv_i32 ret, TCGv_i32 a)
1066 {
1067     TCGv_i32 t = tcg_temp_new_i32();
1068 
1069     tcg_gen_sari_i32(t, a, 31);
1070     tcg_gen_xor_i32(ret, a, t);
1071     tcg_gen_sub_i32(ret, ret, t);
1072     tcg_temp_free_i32(t);
1073 }
1074 
1075 /* 64-bit ops */
1076 
1077 #if TCG_TARGET_REG_BITS == 32
1078 /* These are all inline for TCG_TARGET_REG_BITS == 64.  */
1079 
1080 void tcg_gen_discard_i64(TCGv_i64 arg)
1081 {
1082     tcg_gen_discard_i32(TCGV_LOW(arg));
1083     tcg_gen_discard_i32(TCGV_HIGH(arg));
1084 }
1085 
1086 void tcg_gen_mov_i64(TCGv_i64 ret, TCGv_i64 arg)
1087 {
1088     TCGTemp *ts = tcgv_i64_temp(arg);
1089 
1090     /* Canonicalize TCGv_i64 TEMP_CONST into TCGv_i32 TEMP_CONST. */
1091     if (ts->kind == TEMP_CONST) {
1092         tcg_gen_movi_i64(ret, ts->val);
1093     } else {
1094         tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1095         tcg_gen_mov_i32(TCGV_HIGH(ret), TCGV_HIGH(arg));
1096     }
1097 }
1098 
1099 void tcg_gen_movi_i64(TCGv_i64 ret, int64_t arg)
1100 {
1101     tcg_gen_movi_i32(TCGV_LOW(ret), arg);
1102     tcg_gen_movi_i32(TCGV_HIGH(ret), arg >> 32);
1103 }
1104 
1105 void tcg_gen_ld8u_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
1106 {
1107     tcg_gen_ld8u_i32(TCGV_LOW(ret), arg2, offset);
1108     tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1109 }
1110 
1111 void tcg_gen_ld8s_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
1112 {
1113     tcg_gen_ld8s_i32(TCGV_LOW(ret), arg2, offset);
1114     tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1115 }
1116 
1117 void tcg_gen_ld16u_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
1118 {
1119     tcg_gen_ld16u_i32(TCGV_LOW(ret), arg2, offset);
1120     tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1121 }
1122 
1123 void tcg_gen_ld16s_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
1124 {
1125     tcg_gen_ld16s_i32(TCGV_LOW(ret), arg2, offset);
1126     tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1127 }
1128 
1129 void tcg_gen_ld32u_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
1130 {
1131     tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset);
1132     tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1133 }
1134 
1135 void tcg_gen_ld32s_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
1136 {
1137     tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset);
1138     tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1139 }
1140 
1141 void tcg_gen_ld_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
1142 {
1143     /* Since arg2 and ret have different types,
1144        they cannot be the same temporary */
1145 #ifdef HOST_WORDS_BIGENDIAN
1146     tcg_gen_ld_i32(TCGV_HIGH(ret), arg2, offset);
1147     tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset + 4);
1148 #else
1149     tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset);
1150     tcg_gen_ld_i32(TCGV_HIGH(ret), arg2, offset + 4);
1151 #endif
1152 }
1153 
1154 void tcg_gen_st_i64(TCGv_i64 arg1, TCGv_ptr arg2, tcg_target_long offset)
1155 {
1156 #ifdef HOST_WORDS_BIGENDIAN
1157     tcg_gen_st_i32(TCGV_HIGH(arg1), arg2, offset);
1158     tcg_gen_st_i32(TCGV_LOW(arg1), arg2, offset + 4);
1159 #else
1160     tcg_gen_st_i32(TCGV_LOW(arg1), arg2, offset);
1161     tcg_gen_st_i32(TCGV_HIGH(arg1), arg2, offset + 4);
1162 #endif
1163 }
1164 
1165 void tcg_gen_and_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1166 {
1167     tcg_gen_and_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1168     tcg_gen_and_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1169 }
1170 
1171 void tcg_gen_or_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1172 {
1173     tcg_gen_or_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1174     tcg_gen_or_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1175 }
1176 
1177 void tcg_gen_xor_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1178 {
1179     tcg_gen_xor_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1180     tcg_gen_xor_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1181 }
1182 
1183 void tcg_gen_shl_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1184 {
1185     gen_helper_shl_i64(ret, arg1, arg2);
1186 }
1187 
1188 void tcg_gen_shr_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1189 {
1190     gen_helper_shr_i64(ret, arg1, arg2);
1191 }
1192 
1193 void tcg_gen_sar_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1194 {
1195     gen_helper_sar_i64(ret, arg1, arg2);
1196 }
1197 
1198 void tcg_gen_mul_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1199 {
1200     TCGv_i64 t0;
1201     TCGv_i32 t1;
1202 
1203     t0 = tcg_temp_new_i64();
1204     t1 = tcg_temp_new_i32();
1205 
1206     tcg_gen_mulu2_i32(TCGV_LOW(t0), TCGV_HIGH(t0),
1207                       TCGV_LOW(arg1), TCGV_LOW(arg2));
1208 
1209     tcg_gen_mul_i32(t1, TCGV_LOW(arg1), TCGV_HIGH(arg2));
1210     tcg_gen_add_i32(TCGV_HIGH(t0), TCGV_HIGH(t0), t1);
1211     tcg_gen_mul_i32(t1, TCGV_HIGH(arg1), TCGV_LOW(arg2));
1212     tcg_gen_add_i32(TCGV_HIGH(t0), TCGV_HIGH(t0), t1);
1213 
1214     tcg_gen_mov_i64(ret, t0);
1215     tcg_temp_free_i64(t0);
1216     tcg_temp_free_i32(t1);
1217 }
1218 
1219 #else
1220 
1221 void tcg_gen_movi_i64(TCGv_i64 ret, int64_t arg)
1222 {
1223     tcg_gen_mov_i64(ret, tcg_constant_i64(arg));
1224 }
1225 
1226 #endif /* TCG_TARGET_REG_SIZE == 32 */
1227 
1228 void tcg_gen_addi_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1229 {
1230     /* some cases can be optimized here */
1231     if (arg2 == 0) {
1232         tcg_gen_mov_i64(ret, arg1);
1233     } else if (TCG_TARGET_REG_BITS == 64) {
1234         tcg_gen_add_i64(ret, arg1, tcg_constant_i64(arg2));
1235     } else {
1236         tcg_gen_add2_i32(TCGV_LOW(ret), TCGV_HIGH(ret),
1237                          TCGV_LOW(arg1), TCGV_HIGH(arg1),
1238                          tcg_constant_i32(arg2), tcg_constant_i32(arg2 >> 32));
1239     }
1240 }
1241 
1242 void tcg_gen_subfi_i64(TCGv_i64 ret, int64_t arg1, TCGv_i64 arg2)
1243 {
1244     if (arg1 == 0 && TCG_TARGET_HAS_neg_i64) {
1245         /* Don't recurse with tcg_gen_neg_i64.  */
1246         tcg_gen_op2_i64(INDEX_op_neg_i64, ret, arg2);
1247     } else if (TCG_TARGET_REG_BITS == 64) {
1248         tcg_gen_sub_i64(ret, tcg_constant_i64(arg1), arg2);
1249     } else {
1250         tcg_gen_sub2_i32(TCGV_LOW(ret), TCGV_HIGH(ret),
1251                          tcg_constant_i32(arg1), tcg_constant_i32(arg1 >> 32),
1252                          TCGV_LOW(arg2), TCGV_HIGH(arg2));
1253     }
1254 }
1255 
1256 void tcg_gen_subi_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1257 {
1258     /* some cases can be optimized here */
1259     if (arg2 == 0) {
1260         tcg_gen_mov_i64(ret, arg1);
1261     } else if (TCG_TARGET_REG_BITS == 64) {
1262         tcg_gen_sub_i64(ret, arg1, tcg_constant_i64(arg2));
1263     } else {
1264         tcg_gen_sub2_i32(TCGV_LOW(ret), TCGV_HIGH(ret),
1265                          TCGV_LOW(arg1), TCGV_HIGH(arg1),
1266                          tcg_constant_i32(arg2), tcg_constant_i32(arg2 >> 32));
1267     }
1268 }
1269 
1270 void tcg_gen_andi_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1271 {
1272     if (TCG_TARGET_REG_BITS == 32) {
1273         tcg_gen_andi_i32(TCGV_LOW(ret), TCGV_LOW(arg1), arg2);
1274         tcg_gen_andi_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), arg2 >> 32);
1275         return;
1276     }
1277 
1278     /* Some cases can be optimized here.  */
1279     switch (arg2) {
1280     case 0:
1281         tcg_gen_movi_i64(ret, 0);
1282         return;
1283     case -1:
1284         tcg_gen_mov_i64(ret, arg1);
1285         return;
1286     case 0xff:
1287         /* Don't recurse with tcg_gen_ext8u_i64.  */
1288         if (TCG_TARGET_HAS_ext8u_i64) {
1289             tcg_gen_op2_i64(INDEX_op_ext8u_i64, ret, arg1);
1290             return;
1291         }
1292         break;
1293     case 0xffff:
1294         if (TCG_TARGET_HAS_ext16u_i64) {
1295             tcg_gen_op2_i64(INDEX_op_ext16u_i64, ret, arg1);
1296             return;
1297         }
1298         break;
1299     case 0xffffffffu:
1300         if (TCG_TARGET_HAS_ext32u_i64) {
1301             tcg_gen_op2_i64(INDEX_op_ext32u_i64, ret, arg1);
1302             return;
1303         }
1304         break;
1305     }
1306 
1307     tcg_gen_and_i64(ret, arg1, tcg_constant_i64(arg2));
1308 }
1309 
1310 void tcg_gen_ori_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1311 {
1312     if (TCG_TARGET_REG_BITS == 32) {
1313         tcg_gen_ori_i32(TCGV_LOW(ret), TCGV_LOW(arg1), arg2);
1314         tcg_gen_ori_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), arg2 >> 32);
1315         return;
1316     }
1317     /* Some cases can be optimized here.  */
1318     if (arg2 == -1) {
1319         tcg_gen_movi_i64(ret, -1);
1320     } else if (arg2 == 0) {
1321         tcg_gen_mov_i64(ret, arg1);
1322     } else {
1323         tcg_gen_or_i64(ret, arg1, tcg_constant_i64(arg2));
1324     }
1325 }
1326 
1327 void tcg_gen_xori_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1328 {
1329     if (TCG_TARGET_REG_BITS == 32) {
1330         tcg_gen_xori_i32(TCGV_LOW(ret), TCGV_LOW(arg1), arg2);
1331         tcg_gen_xori_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), arg2 >> 32);
1332         return;
1333     }
1334     /* Some cases can be optimized here.  */
1335     if (arg2 == 0) {
1336         tcg_gen_mov_i64(ret, arg1);
1337     } else if (arg2 == -1 && TCG_TARGET_HAS_not_i64) {
1338         /* Don't recurse with tcg_gen_not_i64.  */
1339         tcg_gen_op2_i64(INDEX_op_not_i64, ret, arg1);
1340     } else {
1341         tcg_gen_xor_i64(ret, arg1, tcg_constant_i64(arg2));
1342     }
1343 }
1344 
1345 static inline void tcg_gen_shifti_i64(TCGv_i64 ret, TCGv_i64 arg1,
1346                                       unsigned c, bool right, bool arith)
1347 {
1348     tcg_debug_assert(c < 64);
1349     if (c == 0) {
1350         tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg1));
1351         tcg_gen_mov_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1));
1352     } else if (c >= 32) {
1353         c -= 32;
1354         if (right) {
1355             if (arith) {
1356                 tcg_gen_sari_i32(TCGV_LOW(ret), TCGV_HIGH(arg1), c);
1357                 tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), 31);
1358             } else {
1359                 tcg_gen_shri_i32(TCGV_LOW(ret), TCGV_HIGH(arg1), c);
1360                 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1361             }
1362         } else {
1363             tcg_gen_shli_i32(TCGV_HIGH(ret), TCGV_LOW(arg1), c);
1364             tcg_gen_movi_i32(TCGV_LOW(ret), 0);
1365         }
1366     } else if (right) {
1367         if (TCG_TARGET_HAS_extract2_i32) {
1368             tcg_gen_extract2_i32(TCGV_LOW(ret),
1369                                  TCGV_LOW(arg1), TCGV_HIGH(arg1), c);
1370         } else {
1371             tcg_gen_shri_i32(TCGV_LOW(ret), TCGV_LOW(arg1), c);
1372             tcg_gen_deposit_i32(TCGV_LOW(ret), TCGV_LOW(ret),
1373                                 TCGV_HIGH(arg1), 32 - c, c);
1374         }
1375         if (arith) {
1376             tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), c);
1377         } else {
1378             tcg_gen_shri_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), c);
1379         }
1380     } else {
1381         if (TCG_TARGET_HAS_extract2_i32) {
1382             tcg_gen_extract2_i32(TCGV_HIGH(ret),
1383                                  TCGV_LOW(arg1), TCGV_HIGH(arg1), 32 - c);
1384         } else {
1385             TCGv_i32 t0 = tcg_temp_new_i32();
1386             tcg_gen_shri_i32(t0, TCGV_LOW(arg1), 32 - c);
1387             tcg_gen_deposit_i32(TCGV_HIGH(ret), t0,
1388                                 TCGV_HIGH(arg1), c, 32 - c);
1389             tcg_temp_free_i32(t0);
1390         }
1391         tcg_gen_shli_i32(TCGV_LOW(ret), TCGV_LOW(arg1), c);
1392     }
1393 }
1394 
1395 void tcg_gen_shli_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1396 {
1397     tcg_debug_assert(arg2 >= 0 && arg2 < 64);
1398     if (TCG_TARGET_REG_BITS == 32) {
1399         tcg_gen_shifti_i64(ret, arg1, arg2, 0, 0);
1400     } else if (arg2 == 0) {
1401         tcg_gen_mov_i64(ret, arg1);
1402     } else {
1403         tcg_gen_shl_i64(ret, arg1, tcg_constant_i64(arg2));
1404     }
1405 }
1406 
1407 void tcg_gen_shri_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1408 {
1409     tcg_debug_assert(arg2 >= 0 && arg2 < 64);
1410     if (TCG_TARGET_REG_BITS == 32) {
1411         tcg_gen_shifti_i64(ret, arg1, arg2, 1, 0);
1412     } else if (arg2 == 0) {
1413         tcg_gen_mov_i64(ret, arg1);
1414     } else {
1415         tcg_gen_shr_i64(ret, arg1, tcg_constant_i64(arg2));
1416     }
1417 }
1418 
1419 void tcg_gen_sari_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1420 {
1421     tcg_debug_assert(arg2 >= 0 && arg2 < 64);
1422     if (TCG_TARGET_REG_BITS == 32) {
1423         tcg_gen_shifti_i64(ret, arg1, arg2, 1, 1);
1424     } else if (arg2 == 0) {
1425         tcg_gen_mov_i64(ret, arg1);
1426     } else {
1427         tcg_gen_sar_i64(ret, arg1, tcg_constant_i64(arg2));
1428     }
1429 }
1430 
1431 void tcg_gen_brcond_i64(TCGCond cond, TCGv_i64 arg1, TCGv_i64 arg2, TCGLabel *l)
1432 {
1433     if (cond == TCG_COND_ALWAYS) {
1434         tcg_gen_br(l);
1435     } else if (cond != TCG_COND_NEVER) {
1436         l->refs++;
1437         if (TCG_TARGET_REG_BITS == 32) {
1438             tcg_gen_op6ii_i32(INDEX_op_brcond2_i32, TCGV_LOW(arg1),
1439                               TCGV_HIGH(arg1), TCGV_LOW(arg2),
1440                               TCGV_HIGH(arg2), cond, label_arg(l));
1441         } else {
1442             tcg_gen_op4ii_i64(INDEX_op_brcond_i64, arg1, arg2, cond,
1443                               label_arg(l));
1444         }
1445     }
1446 }
1447 
1448 void tcg_gen_brcondi_i64(TCGCond cond, TCGv_i64 arg1, int64_t arg2, TCGLabel *l)
1449 {
1450     if (TCG_TARGET_REG_BITS == 64) {
1451         tcg_gen_brcond_i64(cond, arg1, tcg_constant_i64(arg2), l);
1452     } else if (cond == TCG_COND_ALWAYS) {
1453         tcg_gen_br(l);
1454     } else if (cond != TCG_COND_NEVER) {
1455         l->refs++;
1456         tcg_gen_op6ii_i32(INDEX_op_brcond2_i32,
1457                           TCGV_LOW(arg1), TCGV_HIGH(arg1),
1458                           tcg_constant_i32(arg2),
1459                           tcg_constant_i32(arg2 >> 32),
1460                           cond, label_arg(l));
1461     }
1462 }
1463 
1464 void tcg_gen_setcond_i64(TCGCond cond, TCGv_i64 ret,
1465                          TCGv_i64 arg1, TCGv_i64 arg2)
1466 {
1467     if (cond == TCG_COND_ALWAYS) {
1468         tcg_gen_movi_i64(ret, 1);
1469     } else if (cond == TCG_COND_NEVER) {
1470         tcg_gen_movi_i64(ret, 0);
1471     } else {
1472         if (TCG_TARGET_REG_BITS == 32) {
1473             tcg_gen_op6i_i32(INDEX_op_setcond2_i32, TCGV_LOW(ret),
1474                              TCGV_LOW(arg1), TCGV_HIGH(arg1),
1475                              TCGV_LOW(arg2), TCGV_HIGH(arg2), cond);
1476             tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1477         } else {
1478             tcg_gen_op4i_i64(INDEX_op_setcond_i64, ret, arg1, arg2, cond);
1479         }
1480     }
1481 }
1482 
1483 void tcg_gen_setcondi_i64(TCGCond cond, TCGv_i64 ret,
1484                           TCGv_i64 arg1, int64_t arg2)
1485 {
1486     if (TCG_TARGET_REG_BITS == 64) {
1487         tcg_gen_setcond_i64(cond, ret, arg1, tcg_constant_i64(arg2));
1488     } else if (cond == TCG_COND_ALWAYS) {
1489         tcg_gen_movi_i64(ret, 1);
1490     } else if (cond == TCG_COND_NEVER) {
1491         tcg_gen_movi_i64(ret, 0);
1492     } else {
1493         tcg_gen_op6i_i32(INDEX_op_setcond2_i32, TCGV_LOW(ret),
1494                          TCGV_LOW(arg1), TCGV_HIGH(arg1),
1495                          tcg_constant_i32(arg2),
1496                          tcg_constant_i32(arg2 >> 32), cond);
1497         tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1498     }
1499 }
1500 
1501 void tcg_gen_muli_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1502 {
1503     if (arg2 == 0) {
1504         tcg_gen_movi_i64(ret, 0);
1505     } else if (is_power_of_2(arg2)) {
1506         tcg_gen_shli_i64(ret, arg1, ctz64(arg2));
1507     } else {
1508         TCGv_i64 t0 = tcg_const_i64(arg2);
1509         tcg_gen_mul_i64(ret, arg1, t0);
1510         tcg_temp_free_i64(t0);
1511     }
1512 }
1513 
1514 void tcg_gen_div_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1515 {
1516     if (TCG_TARGET_HAS_div_i64) {
1517         tcg_gen_op3_i64(INDEX_op_div_i64, ret, arg1, arg2);
1518     } else if (TCG_TARGET_HAS_div2_i64) {
1519         TCGv_i64 t0 = tcg_temp_new_i64();
1520         tcg_gen_sari_i64(t0, arg1, 63);
1521         tcg_gen_op5_i64(INDEX_op_div2_i64, ret, t0, arg1, t0, arg2);
1522         tcg_temp_free_i64(t0);
1523     } else {
1524         gen_helper_div_i64(ret, arg1, arg2);
1525     }
1526 }
1527 
1528 void tcg_gen_rem_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1529 {
1530     if (TCG_TARGET_HAS_rem_i64) {
1531         tcg_gen_op3_i64(INDEX_op_rem_i64, ret, arg1, arg2);
1532     } else if (TCG_TARGET_HAS_div_i64) {
1533         TCGv_i64 t0 = tcg_temp_new_i64();
1534         tcg_gen_op3_i64(INDEX_op_div_i64, t0, arg1, arg2);
1535         tcg_gen_mul_i64(t0, t0, arg2);
1536         tcg_gen_sub_i64(ret, arg1, t0);
1537         tcg_temp_free_i64(t0);
1538     } else if (TCG_TARGET_HAS_div2_i64) {
1539         TCGv_i64 t0 = tcg_temp_new_i64();
1540         tcg_gen_sari_i64(t0, arg1, 63);
1541         tcg_gen_op5_i64(INDEX_op_div2_i64, t0, ret, arg1, t0, arg2);
1542         tcg_temp_free_i64(t0);
1543     } else {
1544         gen_helper_rem_i64(ret, arg1, arg2);
1545     }
1546 }
1547 
1548 void tcg_gen_divu_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1549 {
1550     if (TCG_TARGET_HAS_div_i64) {
1551         tcg_gen_op3_i64(INDEX_op_divu_i64, ret, arg1, arg2);
1552     } else if (TCG_TARGET_HAS_div2_i64) {
1553         TCGv_i64 t0 = tcg_temp_new_i64();
1554         tcg_gen_movi_i64(t0, 0);
1555         tcg_gen_op5_i64(INDEX_op_divu2_i64, ret, t0, arg1, t0, arg2);
1556         tcg_temp_free_i64(t0);
1557     } else {
1558         gen_helper_divu_i64(ret, arg1, arg2);
1559     }
1560 }
1561 
1562 void tcg_gen_remu_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1563 {
1564     if (TCG_TARGET_HAS_rem_i64) {
1565         tcg_gen_op3_i64(INDEX_op_remu_i64, ret, arg1, arg2);
1566     } else if (TCG_TARGET_HAS_div_i64) {
1567         TCGv_i64 t0 = tcg_temp_new_i64();
1568         tcg_gen_op3_i64(INDEX_op_divu_i64, t0, arg1, arg2);
1569         tcg_gen_mul_i64(t0, t0, arg2);
1570         tcg_gen_sub_i64(ret, arg1, t0);
1571         tcg_temp_free_i64(t0);
1572     } else if (TCG_TARGET_HAS_div2_i64) {
1573         TCGv_i64 t0 = tcg_temp_new_i64();
1574         tcg_gen_movi_i64(t0, 0);
1575         tcg_gen_op5_i64(INDEX_op_divu2_i64, t0, ret, arg1, t0, arg2);
1576         tcg_temp_free_i64(t0);
1577     } else {
1578         gen_helper_remu_i64(ret, arg1, arg2);
1579     }
1580 }
1581 
1582 void tcg_gen_ext8s_i64(TCGv_i64 ret, TCGv_i64 arg)
1583 {
1584     if (TCG_TARGET_REG_BITS == 32) {
1585         tcg_gen_ext8s_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1586         tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1587     } else if (TCG_TARGET_HAS_ext8s_i64) {
1588         tcg_gen_op2_i64(INDEX_op_ext8s_i64, ret, arg);
1589     } else {
1590         tcg_gen_shli_i64(ret, arg, 56);
1591         tcg_gen_sari_i64(ret, ret, 56);
1592     }
1593 }
1594 
1595 void tcg_gen_ext16s_i64(TCGv_i64 ret, TCGv_i64 arg)
1596 {
1597     if (TCG_TARGET_REG_BITS == 32) {
1598         tcg_gen_ext16s_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1599         tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1600     } else if (TCG_TARGET_HAS_ext16s_i64) {
1601         tcg_gen_op2_i64(INDEX_op_ext16s_i64, ret, arg);
1602     } else {
1603         tcg_gen_shli_i64(ret, arg, 48);
1604         tcg_gen_sari_i64(ret, ret, 48);
1605     }
1606 }
1607 
1608 void tcg_gen_ext32s_i64(TCGv_i64 ret, TCGv_i64 arg)
1609 {
1610     if (TCG_TARGET_REG_BITS == 32) {
1611         tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1612         tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
1613     } else if (TCG_TARGET_HAS_ext32s_i64) {
1614         tcg_gen_op2_i64(INDEX_op_ext32s_i64, ret, arg);
1615     } else {
1616         tcg_gen_shli_i64(ret, arg, 32);
1617         tcg_gen_sari_i64(ret, ret, 32);
1618     }
1619 }
1620 
1621 void tcg_gen_ext8u_i64(TCGv_i64 ret, TCGv_i64 arg)
1622 {
1623     if (TCG_TARGET_REG_BITS == 32) {
1624         tcg_gen_ext8u_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1625         tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1626     } else if (TCG_TARGET_HAS_ext8u_i64) {
1627         tcg_gen_op2_i64(INDEX_op_ext8u_i64, ret, arg);
1628     } else {
1629         tcg_gen_andi_i64(ret, arg, 0xffu);
1630     }
1631 }
1632 
1633 void tcg_gen_ext16u_i64(TCGv_i64 ret, TCGv_i64 arg)
1634 {
1635     if (TCG_TARGET_REG_BITS == 32) {
1636         tcg_gen_ext16u_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1637         tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1638     } else if (TCG_TARGET_HAS_ext16u_i64) {
1639         tcg_gen_op2_i64(INDEX_op_ext16u_i64, ret, arg);
1640     } else {
1641         tcg_gen_andi_i64(ret, arg, 0xffffu);
1642     }
1643 }
1644 
1645 void tcg_gen_ext32u_i64(TCGv_i64 ret, TCGv_i64 arg)
1646 {
1647     if (TCG_TARGET_REG_BITS == 32) {
1648         tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1649         tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1650     } else if (TCG_TARGET_HAS_ext32u_i64) {
1651         tcg_gen_op2_i64(INDEX_op_ext32u_i64, ret, arg);
1652     } else {
1653         tcg_gen_andi_i64(ret, arg, 0xffffffffu);
1654     }
1655 }
1656 
1657 /* Note: we assume the six high bytes are set to zero */
1658 void tcg_gen_bswap16_i64(TCGv_i64 ret, TCGv_i64 arg)
1659 {
1660     if (TCG_TARGET_REG_BITS == 32) {
1661         tcg_gen_bswap16_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1662         tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1663     } else if (TCG_TARGET_HAS_bswap16_i64) {
1664         tcg_gen_op2_i64(INDEX_op_bswap16_i64, ret, arg);
1665     } else {
1666         TCGv_i64 t0 = tcg_temp_new_i64();
1667 
1668         tcg_gen_ext8u_i64(t0, arg);
1669         tcg_gen_shli_i64(t0, t0, 8);
1670         tcg_gen_shri_i64(ret, arg, 8);
1671         tcg_gen_or_i64(ret, ret, t0);
1672         tcg_temp_free_i64(t0);
1673     }
1674 }
1675 
1676 /* Note: we assume the four high bytes are set to zero */
1677 void tcg_gen_bswap32_i64(TCGv_i64 ret, TCGv_i64 arg)
1678 {
1679     if (TCG_TARGET_REG_BITS == 32) {
1680         tcg_gen_bswap32_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1681         tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1682     } else if (TCG_TARGET_HAS_bswap32_i64) {
1683         tcg_gen_op2_i64(INDEX_op_bswap32_i64, ret, arg);
1684     } else {
1685         TCGv_i64 t0 = tcg_temp_new_i64();
1686         TCGv_i64 t1 = tcg_temp_new_i64();
1687         TCGv_i64 t2 = tcg_constant_i64(0x00ff00ff);
1688 
1689                                         /* arg = ....abcd */
1690         tcg_gen_shri_i64(t0, arg, 8);   /*  t0 = .....abc */
1691         tcg_gen_and_i64(t1, arg, t2);   /*  t1 = .....b.d */
1692         tcg_gen_and_i64(t0, t0, t2);    /*  t0 = .....a.c */
1693         tcg_gen_shli_i64(t1, t1, 8);    /*  t1 = ....b.d. */
1694         tcg_gen_or_i64(ret, t0, t1);    /* ret = ....badc */
1695 
1696         tcg_gen_shli_i64(t1, ret, 48);  /*  t1 = dc...... */
1697         tcg_gen_shri_i64(t0, ret, 16);  /*  t0 = ......ba */
1698         tcg_gen_shri_i64(t1, t1, 32);   /*  t1 = ....dc.. */
1699         tcg_gen_or_i64(ret, t0, t1);    /* ret = ....dcba */
1700 
1701         tcg_temp_free_i64(t0);
1702         tcg_temp_free_i64(t1);
1703     }
1704 }
1705 
1706 void tcg_gen_bswap64_i64(TCGv_i64 ret, TCGv_i64 arg)
1707 {
1708     if (TCG_TARGET_REG_BITS == 32) {
1709         TCGv_i32 t0, t1;
1710         t0 = tcg_temp_new_i32();
1711         t1 = tcg_temp_new_i32();
1712 
1713         tcg_gen_bswap32_i32(t0, TCGV_LOW(arg));
1714         tcg_gen_bswap32_i32(t1, TCGV_HIGH(arg));
1715         tcg_gen_mov_i32(TCGV_LOW(ret), t1);
1716         tcg_gen_mov_i32(TCGV_HIGH(ret), t0);
1717         tcg_temp_free_i32(t0);
1718         tcg_temp_free_i32(t1);
1719     } else if (TCG_TARGET_HAS_bswap64_i64) {
1720         tcg_gen_op2_i64(INDEX_op_bswap64_i64, ret, arg);
1721     } else {
1722         TCGv_i64 t0 = tcg_temp_new_i64();
1723         TCGv_i64 t1 = tcg_temp_new_i64();
1724         TCGv_i64 t2 = tcg_temp_new_i64();
1725 
1726                                         /* arg = abcdefgh */
1727         tcg_gen_movi_i64(t2, 0x00ff00ff00ff00ffull);
1728         tcg_gen_shri_i64(t0, arg, 8);   /*  t0 = .abcdefg */
1729         tcg_gen_and_i64(t1, arg, t2);   /*  t1 = .b.d.f.h */
1730         tcg_gen_and_i64(t0, t0, t2);    /*  t0 = .a.c.e.g */
1731         tcg_gen_shli_i64(t1, t1, 8);    /*  t1 = b.d.f.h. */
1732         tcg_gen_or_i64(ret, t0, t1);    /* ret = badcfehg */
1733 
1734         tcg_gen_movi_i64(t2, 0x0000ffff0000ffffull);
1735         tcg_gen_shri_i64(t0, ret, 16);  /*  t0 = ..badcfe */
1736         tcg_gen_and_i64(t1, ret, t2);   /*  t1 = ..dc..hg */
1737         tcg_gen_and_i64(t0, t0, t2);    /*  t0 = ..ba..fe */
1738         tcg_gen_shli_i64(t1, t1, 16);   /*  t1 = dc..hg.. */
1739         tcg_gen_or_i64(ret, t0, t1);    /* ret = dcbahgfe */
1740 
1741         tcg_gen_shri_i64(t0, ret, 32);  /*  t0 = ....dcba */
1742         tcg_gen_shli_i64(t1, ret, 32);  /*  t1 = hgfe.... */
1743         tcg_gen_or_i64(ret, t0, t1);    /* ret = hgfedcba */
1744 
1745         tcg_temp_free_i64(t0);
1746         tcg_temp_free_i64(t1);
1747         tcg_temp_free_i64(t2);
1748     }
1749 }
1750 
1751 void tcg_gen_not_i64(TCGv_i64 ret, TCGv_i64 arg)
1752 {
1753     if (TCG_TARGET_REG_BITS == 32) {
1754         tcg_gen_not_i32(TCGV_LOW(ret), TCGV_LOW(arg));
1755         tcg_gen_not_i32(TCGV_HIGH(ret), TCGV_HIGH(arg));
1756     } else if (TCG_TARGET_HAS_not_i64) {
1757         tcg_gen_op2_i64(INDEX_op_not_i64, ret, arg);
1758     } else {
1759         tcg_gen_xori_i64(ret, arg, -1);
1760     }
1761 }
1762 
1763 void tcg_gen_andc_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1764 {
1765     if (TCG_TARGET_REG_BITS == 32) {
1766         tcg_gen_andc_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1767         tcg_gen_andc_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1768     } else if (TCG_TARGET_HAS_andc_i64) {
1769         tcg_gen_op3_i64(INDEX_op_andc_i64, ret, arg1, arg2);
1770     } else {
1771         TCGv_i64 t0 = tcg_temp_new_i64();
1772         tcg_gen_not_i64(t0, arg2);
1773         tcg_gen_and_i64(ret, arg1, t0);
1774         tcg_temp_free_i64(t0);
1775     }
1776 }
1777 
1778 void tcg_gen_eqv_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1779 {
1780     if (TCG_TARGET_REG_BITS == 32) {
1781         tcg_gen_eqv_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1782         tcg_gen_eqv_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1783     } else if (TCG_TARGET_HAS_eqv_i64) {
1784         tcg_gen_op3_i64(INDEX_op_eqv_i64, ret, arg1, arg2);
1785     } else {
1786         tcg_gen_xor_i64(ret, arg1, arg2);
1787         tcg_gen_not_i64(ret, ret);
1788     }
1789 }
1790 
1791 void tcg_gen_nand_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1792 {
1793     if (TCG_TARGET_REG_BITS == 32) {
1794         tcg_gen_nand_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1795         tcg_gen_nand_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1796     } else if (TCG_TARGET_HAS_nand_i64) {
1797         tcg_gen_op3_i64(INDEX_op_nand_i64, ret, arg1, arg2);
1798     } else {
1799         tcg_gen_and_i64(ret, arg1, arg2);
1800         tcg_gen_not_i64(ret, ret);
1801     }
1802 }
1803 
1804 void tcg_gen_nor_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1805 {
1806     if (TCG_TARGET_REG_BITS == 32) {
1807         tcg_gen_nor_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1808         tcg_gen_nor_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1809     } else if (TCG_TARGET_HAS_nor_i64) {
1810         tcg_gen_op3_i64(INDEX_op_nor_i64, ret, arg1, arg2);
1811     } else {
1812         tcg_gen_or_i64(ret, arg1, arg2);
1813         tcg_gen_not_i64(ret, ret);
1814     }
1815 }
1816 
1817 void tcg_gen_orc_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1818 {
1819     if (TCG_TARGET_REG_BITS == 32) {
1820         tcg_gen_orc_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
1821         tcg_gen_orc_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
1822     } else if (TCG_TARGET_HAS_orc_i64) {
1823         tcg_gen_op3_i64(INDEX_op_orc_i64, ret, arg1, arg2);
1824     } else {
1825         TCGv_i64 t0 = tcg_temp_new_i64();
1826         tcg_gen_not_i64(t0, arg2);
1827         tcg_gen_or_i64(ret, arg1, t0);
1828         tcg_temp_free_i64(t0);
1829     }
1830 }
1831 
1832 void tcg_gen_clz_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1833 {
1834     if (TCG_TARGET_HAS_clz_i64) {
1835         tcg_gen_op3_i64(INDEX_op_clz_i64, ret, arg1, arg2);
1836     } else {
1837         gen_helper_clz_i64(ret, arg1, arg2);
1838     }
1839 }
1840 
1841 void tcg_gen_clzi_i64(TCGv_i64 ret, TCGv_i64 arg1, uint64_t arg2)
1842 {
1843     if (TCG_TARGET_REG_BITS == 32
1844         && TCG_TARGET_HAS_clz_i32
1845         && arg2 <= 0xffffffffu) {
1846         TCGv_i32 t = tcg_temp_new_i32();
1847         tcg_gen_clzi_i32(t, TCGV_LOW(arg1), arg2 - 32);
1848         tcg_gen_addi_i32(t, t, 32);
1849         tcg_gen_clz_i32(TCGV_LOW(ret), TCGV_HIGH(arg1), t);
1850         tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1851         tcg_temp_free_i32(t);
1852     } else {
1853         TCGv_i64 t0 = tcg_const_i64(arg2);
1854         tcg_gen_clz_i64(ret, arg1, t0);
1855         tcg_temp_free_i64(t0);
1856     }
1857 }
1858 
1859 void tcg_gen_ctz_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1860 {
1861     if (TCG_TARGET_HAS_ctz_i64) {
1862         tcg_gen_op3_i64(INDEX_op_ctz_i64, ret, arg1, arg2);
1863     } else if (TCG_TARGET_HAS_ctpop_i64 || TCG_TARGET_HAS_clz_i64) {
1864         TCGv_i64 z, t = tcg_temp_new_i64();
1865 
1866         if (TCG_TARGET_HAS_ctpop_i64) {
1867             tcg_gen_subi_i64(t, arg1, 1);
1868             tcg_gen_andc_i64(t, t, arg1);
1869             tcg_gen_ctpop_i64(t, t);
1870         } else {
1871             /* Since all non-x86 hosts have clz(0) == 64, don't fight it.  */
1872             tcg_gen_neg_i64(t, arg1);
1873             tcg_gen_and_i64(t, t, arg1);
1874             tcg_gen_clzi_i64(t, t, 64);
1875             tcg_gen_xori_i64(t, t, 63);
1876         }
1877         z = tcg_constant_i64(0);
1878         tcg_gen_movcond_i64(TCG_COND_EQ, ret, arg1, z, arg2, t);
1879         tcg_temp_free_i64(t);
1880         tcg_temp_free_i64(z);
1881     } else {
1882         gen_helper_ctz_i64(ret, arg1, arg2);
1883     }
1884 }
1885 
1886 void tcg_gen_ctzi_i64(TCGv_i64 ret, TCGv_i64 arg1, uint64_t arg2)
1887 {
1888     if (TCG_TARGET_REG_BITS == 32
1889         && TCG_TARGET_HAS_ctz_i32
1890         && arg2 <= 0xffffffffu) {
1891         TCGv_i32 t32 = tcg_temp_new_i32();
1892         tcg_gen_ctzi_i32(t32, TCGV_HIGH(arg1), arg2 - 32);
1893         tcg_gen_addi_i32(t32, t32, 32);
1894         tcg_gen_ctz_i32(TCGV_LOW(ret), TCGV_LOW(arg1), t32);
1895         tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1896         tcg_temp_free_i32(t32);
1897     } else if (!TCG_TARGET_HAS_ctz_i64
1898                && TCG_TARGET_HAS_ctpop_i64
1899                && arg2 == 64) {
1900         /* This equivalence has the advantage of not requiring a fixup.  */
1901         TCGv_i64 t = tcg_temp_new_i64();
1902         tcg_gen_subi_i64(t, arg1, 1);
1903         tcg_gen_andc_i64(t, t, arg1);
1904         tcg_gen_ctpop_i64(ret, t);
1905         tcg_temp_free_i64(t);
1906     } else {
1907         TCGv_i64 t0 = tcg_const_i64(arg2);
1908         tcg_gen_ctz_i64(ret, arg1, t0);
1909         tcg_temp_free_i64(t0);
1910     }
1911 }
1912 
1913 void tcg_gen_clrsb_i64(TCGv_i64 ret, TCGv_i64 arg)
1914 {
1915     if (TCG_TARGET_HAS_clz_i64 || TCG_TARGET_HAS_clz_i32) {
1916         TCGv_i64 t = tcg_temp_new_i64();
1917         tcg_gen_sari_i64(t, arg, 63);
1918         tcg_gen_xor_i64(t, t, arg);
1919         tcg_gen_clzi_i64(t, t, 64);
1920         tcg_gen_subi_i64(ret, t, 1);
1921         tcg_temp_free_i64(t);
1922     } else {
1923         gen_helper_clrsb_i64(ret, arg);
1924     }
1925 }
1926 
1927 void tcg_gen_ctpop_i64(TCGv_i64 ret, TCGv_i64 arg1)
1928 {
1929     if (TCG_TARGET_HAS_ctpop_i64) {
1930         tcg_gen_op2_i64(INDEX_op_ctpop_i64, ret, arg1);
1931     } else if (TCG_TARGET_REG_BITS == 32 && TCG_TARGET_HAS_ctpop_i32) {
1932         tcg_gen_ctpop_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1));
1933         tcg_gen_ctpop_i32(TCGV_LOW(ret), TCGV_LOW(arg1));
1934         tcg_gen_add_i32(TCGV_LOW(ret), TCGV_LOW(ret), TCGV_HIGH(ret));
1935         tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
1936     } else {
1937         gen_helper_ctpop_i64(ret, arg1);
1938     }
1939 }
1940 
1941 void tcg_gen_rotl_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1942 {
1943     if (TCG_TARGET_HAS_rot_i64) {
1944         tcg_gen_op3_i64(INDEX_op_rotl_i64, ret, arg1, arg2);
1945     } else {
1946         TCGv_i64 t0, t1;
1947         t0 = tcg_temp_new_i64();
1948         t1 = tcg_temp_new_i64();
1949         tcg_gen_shl_i64(t0, arg1, arg2);
1950         tcg_gen_subfi_i64(t1, 64, arg2);
1951         tcg_gen_shr_i64(t1, arg1, t1);
1952         tcg_gen_or_i64(ret, t0, t1);
1953         tcg_temp_free_i64(t0);
1954         tcg_temp_free_i64(t1);
1955     }
1956 }
1957 
1958 void tcg_gen_rotli_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1959 {
1960     tcg_debug_assert(arg2 >= 0 && arg2 < 64);
1961     /* some cases can be optimized here */
1962     if (arg2 == 0) {
1963         tcg_gen_mov_i64(ret, arg1);
1964     } else if (TCG_TARGET_HAS_rot_i64) {
1965         tcg_gen_rotl_i64(ret, arg1, tcg_constant_i64(arg2));
1966     } else {
1967         TCGv_i64 t0, t1;
1968         t0 = tcg_temp_new_i64();
1969         t1 = tcg_temp_new_i64();
1970         tcg_gen_shli_i64(t0, arg1, arg2);
1971         tcg_gen_shri_i64(t1, arg1, 64 - arg2);
1972         tcg_gen_or_i64(ret, t0, t1);
1973         tcg_temp_free_i64(t0);
1974         tcg_temp_free_i64(t1);
1975     }
1976 }
1977 
1978 void tcg_gen_rotr_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
1979 {
1980     if (TCG_TARGET_HAS_rot_i64) {
1981         tcg_gen_op3_i64(INDEX_op_rotr_i64, ret, arg1, arg2);
1982     } else {
1983         TCGv_i64 t0, t1;
1984         t0 = tcg_temp_new_i64();
1985         t1 = tcg_temp_new_i64();
1986         tcg_gen_shr_i64(t0, arg1, arg2);
1987         tcg_gen_subfi_i64(t1, 64, arg2);
1988         tcg_gen_shl_i64(t1, arg1, t1);
1989         tcg_gen_or_i64(ret, t0, t1);
1990         tcg_temp_free_i64(t0);
1991         tcg_temp_free_i64(t1);
1992     }
1993 }
1994 
1995 void tcg_gen_rotri_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
1996 {
1997     tcg_debug_assert(arg2 >= 0 && arg2 < 64);
1998     /* some cases can be optimized here */
1999     if (arg2 == 0) {
2000         tcg_gen_mov_i64(ret, arg1);
2001     } else {
2002         tcg_gen_rotli_i64(ret, arg1, 64 - arg2);
2003     }
2004 }
2005 
2006 void tcg_gen_deposit_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2,
2007                          unsigned int ofs, unsigned int len)
2008 {
2009     uint64_t mask;
2010     TCGv_i64 t1;
2011 
2012     tcg_debug_assert(ofs < 64);
2013     tcg_debug_assert(len > 0);
2014     tcg_debug_assert(len <= 64);
2015     tcg_debug_assert(ofs + len <= 64);
2016 
2017     if (len == 64) {
2018         tcg_gen_mov_i64(ret, arg2);
2019         return;
2020     }
2021     if (TCG_TARGET_HAS_deposit_i64 && TCG_TARGET_deposit_i64_valid(ofs, len)) {
2022         tcg_gen_op5ii_i64(INDEX_op_deposit_i64, ret, arg1, arg2, ofs, len);
2023         return;
2024     }
2025 
2026     if (TCG_TARGET_REG_BITS == 32) {
2027         if (ofs >= 32) {
2028             tcg_gen_deposit_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1),
2029                                 TCGV_LOW(arg2), ofs - 32, len);
2030             tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg1));
2031             return;
2032         }
2033         if (ofs + len <= 32) {
2034             tcg_gen_deposit_i32(TCGV_LOW(ret), TCGV_LOW(arg1),
2035                                 TCGV_LOW(arg2), ofs, len);
2036             tcg_gen_mov_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1));
2037             return;
2038         }
2039     }
2040 
2041     t1 = tcg_temp_new_i64();
2042 
2043     if (TCG_TARGET_HAS_extract2_i64) {
2044         if (ofs + len == 64) {
2045             tcg_gen_shli_i64(t1, arg1, len);
2046             tcg_gen_extract2_i64(ret, t1, arg2, len);
2047             goto done;
2048         }
2049         if (ofs == 0) {
2050             tcg_gen_extract2_i64(ret, arg1, arg2, len);
2051             tcg_gen_rotli_i64(ret, ret, len);
2052             goto done;
2053         }
2054     }
2055 
2056     mask = (1ull << len) - 1;
2057     if (ofs + len < 64) {
2058         tcg_gen_andi_i64(t1, arg2, mask);
2059         tcg_gen_shli_i64(t1, t1, ofs);
2060     } else {
2061         tcg_gen_shli_i64(t1, arg2, ofs);
2062     }
2063     tcg_gen_andi_i64(ret, arg1, ~(mask << ofs));
2064     tcg_gen_or_i64(ret, ret, t1);
2065  done:
2066     tcg_temp_free_i64(t1);
2067 }
2068 
2069 void tcg_gen_deposit_z_i64(TCGv_i64 ret, TCGv_i64 arg,
2070                            unsigned int ofs, unsigned int len)
2071 {
2072     tcg_debug_assert(ofs < 64);
2073     tcg_debug_assert(len > 0);
2074     tcg_debug_assert(len <= 64);
2075     tcg_debug_assert(ofs + len <= 64);
2076 
2077     if (ofs + len == 64) {
2078         tcg_gen_shli_i64(ret, arg, ofs);
2079     } else if (ofs == 0) {
2080         tcg_gen_andi_i64(ret, arg, (1ull << len) - 1);
2081     } else if (TCG_TARGET_HAS_deposit_i64
2082                && TCG_TARGET_deposit_i64_valid(ofs, len)) {
2083         TCGv_i64 zero = tcg_constant_i64(0);
2084         tcg_gen_op5ii_i64(INDEX_op_deposit_i64, ret, zero, arg, ofs, len);
2085     } else {
2086         if (TCG_TARGET_REG_BITS == 32) {
2087             if (ofs >= 32) {
2088                 tcg_gen_deposit_z_i32(TCGV_HIGH(ret), TCGV_LOW(arg),
2089                                       ofs - 32, len);
2090                 tcg_gen_movi_i32(TCGV_LOW(ret), 0);
2091                 return;
2092             }
2093             if (ofs + len <= 32) {
2094                 tcg_gen_deposit_z_i32(TCGV_LOW(ret), TCGV_LOW(arg), ofs, len);
2095                 tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
2096                 return;
2097             }
2098         }
2099         /* To help two-operand hosts we prefer to zero-extend first,
2100            which allows ARG to stay live.  */
2101         switch (len) {
2102         case 32:
2103             if (TCG_TARGET_HAS_ext32u_i64) {
2104                 tcg_gen_ext32u_i64(ret, arg);
2105                 tcg_gen_shli_i64(ret, ret, ofs);
2106                 return;
2107             }
2108             break;
2109         case 16:
2110             if (TCG_TARGET_HAS_ext16u_i64) {
2111                 tcg_gen_ext16u_i64(ret, arg);
2112                 tcg_gen_shli_i64(ret, ret, ofs);
2113                 return;
2114             }
2115             break;
2116         case 8:
2117             if (TCG_TARGET_HAS_ext8u_i64) {
2118                 tcg_gen_ext8u_i64(ret, arg);
2119                 tcg_gen_shli_i64(ret, ret, ofs);
2120                 return;
2121             }
2122             break;
2123         }
2124         /* Otherwise prefer zero-extension over AND for code size.  */
2125         switch (ofs + len) {
2126         case 32:
2127             if (TCG_TARGET_HAS_ext32u_i64) {
2128                 tcg_gen_shli_i64(ret, arg, ofs);
2129                 tcg_gen_ext32u_i64(ret, ret);
2130                 return;
2131             }
2132             break;
2133         case 16:
2134             if (TCG_TARGET_HAS_ext16u_i64) {
2135                 tcg_gen_shli_i64(ret, arg, ofs);
2136                 tcg_gen_ext16u_i64(ret, ret);
2137                 return;
2138             }
2139             break;
2140         case 8:
2141             if (TCG_TARGET_HAS_ext8u_i64) {
2142                 tcg_gen_shli_i64(ret, arg, ofs);
2143                 tcg_gen_ext8u_i64(ret, ret);
2144                 return;
2145             }
2146             break;
2147         }
2148         tcg_gen_andi_i64(ret, arg, (1ull << len) - 1);
2149         tcg_gen_shli_i64(ret, ret, ofs);
2150     }
2151 }
2152 
2153 void tcg_gen_extract_i64(TCGv_i64 ret, TCGv_i64 arg,
2154                          unsigned int ofs, unsigned int len)
2155 {
2156     tcg_debug_assert(ofs < 64);
2157     tcg_debug_assert(len > 0);
2158     tcg_debug_assert(len <= 64);
2159     tcg_debug_assert(ofs + len <= 64);
2160 
2161     /* Canonicalize certain special cases, even if extract is supported.  */
2162     if (ofs + len == 64) {
2163         tcg_gen_shri_i64(ret, arg, 64 - len);
2164         return;
2165     }
2166     if (ofs == 0) {
2167         tcg_gen_andi_i64(ret, arg, (1ull << len) - 1);
2168         return;
2169     }
2170 
2171     if (TCG_TARGET_REG_BITS == 32) {
2172         /* Look for a 32-bit extract within one of the two words.  */
2173         if (ofs >= 32) {
2174             tcg_gen_extract_i32(TCGV_LOW(ret), TCGV_HIGH(arg), ofs - 32, len);
2175             tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
2176             return;
2177         }
2178         if (ofs + len <= 32) {
2179             tcg_gen_extract_i32(TCGV_LOW(ret), TCGV_LOW(arg), ofs, len);
2180             tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
2181             return;
2182         }
2183         /* The field is split across two words.  One double-word
2184            shift is better than two double-word shifts.  */
2185         goto do_shift_and;
2186     }
2187 
2188     if (TCG_TARGET_HAS_extract_i64
2189         && TCG_TARGET_extract_i64_valid(ofs, len)) {
2190         tcg_gen_op4ii_i64(INDEX_op_extract_i64, ret, arg, ofs, len);
2191         return;
2192     }
2193 
2194     /* Assume that zero-extension, if available, is cheaper than a shift.  */
2195     switch (ofs + len) {
2196     case 32:
2197         if (TCG_TARGET_HAS_ext32u_i64) {
2198             tcg_gen_ext32u_i64(ret, arg);
2199             tcg_gen_shri_i64(ret, ret, ofs);
2200             return;
2201         }
2202         break;
2203     case 16:
2204         if (TCG_TARGET_HAS_ext16u_i64) {
2205             tcg_gen_ext16u_i64(ret, arg);
2206             tcg_gen_shri_i64(ret, ret, ofs);
2207             return;
2208         }
2209         break;
2210     case 8:
2211         if (TCG_TARGET_HAS_ext8u_i64) {
2212             tcg_gen_ext8u_i64(ret, arg);
2213             tcg_gen_shri_i64(ret, ret, ofs);
2214             return;
2215         }
2216         break;
2217     }
2218 
2219     /* ??? Ideally we'd know what values are available for immediate AND.
2220        Assume that 8 bits are available, plus the special cases of 16 and 32,
2221        so that we get ext8u, ext16u, and ext32u.  */
2222     switch (len) {
2223     case 1 ... 8: case 16: case 32:
2224     do_shift_and:
2225         tcg_gen_shri_i64(ret, arg, ofs);
2226         tcg_gen_andi_i64(ret, ret, (1ull << len) - 1);
2227         break;
2228     default:
2229         tcg_gen_shli_i64(ret, arg, 64 - len - ofs);
2230         tcg_gen_shri_i64(ret, ret, 64 - len);
2231         break;
2232     }
2233 }
2234 
2235 void tcg_gen_sextract_i64(TCGv_i64 ret, TCGv_i64 arg,
2236                           unsigned int ofs, unsigned int len)
2237 {
2238     tcg_debug_assert(ofs < 64);
2239     tcg_debug_assert(len > 0);
2240     tcg_debug_assert(len <= 64);
2241     tcg_debug_assert(ofs + len <= 64);
2242 
2243     /* Canonicalize certain special cases, even if sextract is supported.  */
2244     if (ofs + len == 64) {
2245         tcg_gen_sari_i64(ret, arg, 64 - len);
2246         return;
2247     }
2248     if (ofs == 0) {
2249         switch (len) {
2250         case 32:
2251             tcg_gen_ext32s_i64(ret, arg);
2252             return;
2253         case 16:
2254             tcg_gen_ext16s_i64(ret, arg);
2255             return;
2256         case 8:
2257             tcg_gen_ext8s_i64(ret, arg);
2258             return;
2259         }
2260     }
2261 
2262     if (TCG_TARGET_REG_BITS == 32) {
2263         /* Look for a 32-bit extract within one of the two words.  */
2264         if (ofs >= 32) {
2265             tcg_gen_sextract_i32(TCGV_LOW(ret), TCGV_HIGH(arg), ofs - 32, len);
2266         } else if (ofs + len <= 32) {
2267             tcg_gen_sextract_i32(TCGV_LOW(ret), TCGV_LOW(arg), ofs, len);
2268         } else if (ofs == 0) {
2269             tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg));
2270             tcg_gen_sextract_i32(TCGV_HIGH(ret), TCGV_HIGH(arg), 0, len - 32);
2271             return;
2272         } else if (len > 32) {
2273             TCGv_i32 t = tcg_temp_new_i32();
2274             /* Extract the bits for the high word normally.  */
2275             tcg_gen_sextract_i32(t, TCGV_HIGH(arg), ofs + 32, len - 32);
2276             /* Shift the field down for the low part.  */
2277             tcg_gen_shri_i64(ret, arg, ofs);
2278             /* Overwrite the shift into the high part.  */
2279             tcg_gen_mov_i32(TCGV_HIGH(ret), t);
2280             tcg_temp_free_i32(t);
2281             return;
2282         } else {
2283             /* Shift the field down for the low part, such that the
2284                field sits at the MSB.  */
2285             tcg_gen_shri_i64(ret, arg, ofs + len - 32);
2286             /* Shift the field down from the MSB, sign extending.  */
2287             tcg_gen_sari_i32(TCGV_LOW(ret), TCGV_LOW(ret), 32 - len);
2288         }
2289         /* Sign-extend the field from 32 bits.  */
2290         tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
2291         return;
2292     }
2293 
2294     if (TCG_TARGET_HAS_sextract_i64
2295         && TCG_TARGET_extract_i64_valid(ofs, len)) {
2296         tcg_gen_op4ii_i64(INDEX_op_sextract_i64, ret, arg, ofs, len);
2297         return;
2298     }
2299 
2300     /* Assume that sign-extension, if available, is cheaper than a shift.  */
2301     switch (ofs + len) {
2302     case 32:
2303         if (TCG_TARGET_HAS_ext32s_i64) {
2304             tcg_gen_ext32s_i64(ret, arg);
2305             tcg_gen_sari_i64(ret, ret, ofs);
2306             return;
2307         }
2308         break;
2309     case 16:
2310         if (TCG_TARGET_HAS_ext16s_i64) {
2311             tcg_gen_ext16s_i64(ret, arg);
2312             tcg_gen_sari_i64(ret, ret, ofs);
2313             return;
2314         }
2315         break;
2316     case 8:
2317         if (TCG_TARGET_HAS_ext8s_i64) {
2318             tcg_gen_ext8s_i64(ret, arg);
2319             tcg_gen_sari_i64(ret, ret, ofs);
2320             return;
2321         }
2322         break;
2323     }
2324     switch (len) {
2325     case 32:
2326         if (TCG_TARGET_HAS_ext32s_i64) {
2327             tcg_gen_shri_i64(ret, arg, ofs);
2328             tcg_gen_ext32s_i64(ret, ret);
2329             return;
2330         }
2331         break;
2332     case 16:
2333         if (TCG_TARGET_HAS_ext16s_i64) {
2334             tcg_gen_shri_i64(ret, arg, ofs);
2335             tcg_gen_ext16s_i64(ret, ret);
2336             return;
2337         }
2338         break;
2339     case 8:
2340         if (TCG_TARGET_HAS_ext8s_i64) {
2341             tcg_gen_shri_i64(ret, arg, ofs);
2342             tcg_gen_ext8s_i64(ret, ret);
2343             return;
2344         }
2345         break;
2346     }
2347     tcg_gen_shli_i64(ret, arg, 64 - len - ofs);
2348     tcg_gen_sari_i64(ret, ret, 64 - len);
2349 }
2350 
2351 /*
2352  * Extract 64 bits from a 128-bit input, ah:al, starting from ofs.
2353  * Unlike tcg_gen_extract_i64 above, len is fixed at 64.
2354  */
2355 void tcg_gen_extract2_i64(TCGv_i64 ret, TCGv_i64 al, TCGv_i64 ah,
2356                           unsigned int ofs)
2357 {
2358     tcg_debug_assert(ofs <= 64);
2359     if (ofs == 0) {
2360         tcg_gen_mov_i64(ret, al);
2361     } else if (ofs == 64) {
2362         tcg_gen_mov_i64(ret, ah);
2363     } else if (al == ah) {
2364         tcg_gen_rotri_i64(ret, al, ofs);
2365     } else if (TCG_TARGET_HAS_extract2_i64) {
2366         tcg_gen_op4i_i64(INDEX_op_extract2_i64, ret, al, ah, ofs);
2367     } else {
2368         TCGv_i64 t0 = tcg_temp_new_i64();
2369         tcg_gen_shri_i64(t0, al, ofs);
2370         tcg_gen_deposit_i64(ret, t0, ah, 64 - ofs, ofs);
2371         tcg_temp_free_i64(t0);
2372     }
2373 }
2374 
2375 void tcg_gen_movcond_i64(TCGCond cond, TCGv_i64 ret, TCGv_i64 c1,
2376                          TCGv_i64 c2, TCGv_i64 v1, TCGv_i64 v2)
2377 {
2378     if (cond == TCG_COND_ALWAYS) {
2379         tcg_gen_mov_i64(ret, v1);
2380     } else if (cond == TCG_COND_NEVER) {
2381         tcg_gen_mov_i64(ret, v2);
2382     } else if (TCG_TARGET_REG_BITS == 32) {
2383         TCGv_i32 t0 = tcg_temp_new_i32();
2384         TCGv_i32 t1 = tcg_temp_new_i32();
2385         tcg_gen_op6i_i32(INDEX_op_setcond2_i32, t0,
2386                          TCGV_LOW(c1), TCGV_HIGH(c1),
2387                          TCGV_LOW(c2), TCGV_HIGH(c2), cond);
2388 
2389         if (TCG_TARGET_HAS_movcond_i32) {
2390             tcg_gen_movi_i32(t1, 0);
2391             tcg_gen_movcond_i32(TCG_COND_NE, TCGV_LOW(ret), t0, t1,
2392                                 TCGV_LOW(v1), TCGV_LOW(v2));
2393             tcg_gen_movcond_i32(TCG_COND_NE, TCGV_HIGH(ret), t0, t1,
2394                                 TCGV_HIGH(v1), TCGV_HIGH(v2));
2395         } else {
2396             tcg_gen_neg_i32(t0, t0);
2397 
2398             tcg_gen_and_i32(t1, TCGV_LOW(v1), t0);
2399             tcg_gen_andc_i32(TCGV_LOW(ret), TCGV_LOW(v2), t0);
2400             tcg_gen_or_i32(TCGV_LOW(ret), TCGV_LOW(ret), t1);
2401 
2402             tcg_gen_and_i32(t1, TCGV_HIGH(v1), t0);
2403             tcg_gen_andc_i32(TCGV_HIGH(ret), TCGV_HIGH(v2), t0);
2404             tcg_gen_or_i32(TCGV_HIGH(ret), TCGV_HIGH(ret), t1);
2405         }
2406         tcg_temp_free_i32(t0);
2407         tcg_temp_free_i32(t1);
2408     } else if (TCG_TARGET_HAS_movcond_i64) {
2409         tcg_gen_op6i_i64(INDEX_op_movcond_i64, ret, c1, c2, v1, v2, cond);
2410     } else {
2411         TCGv_i64 t0 = tcg_temp_new_i64();
2412         TCGv_i64 t1 = tcg_temp_new_i64();
2413         tcg_gen_setcond_i64(cond, t0, c1, c2);
2414         tcg_gen_neg_i64(t0, t0);
2415         tcg_gen_and_i64(t1, v1, t0);
2416         tcg_gen_andc_i64(ret, v2, t0);
2417         tcg_gen_or_i64(ret, ret, t1);
2418         tcg_temp_free_i64(t0);
2419         tcg_temp_free_i64(t1);
2420     }
2421 }
2422 
2423 void tcg_gen_add2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 al,
2424                       TCGv_i64 ah, TCGv_i64 bl, TCGv_i64 bh)
2425 {
2426     if (TCG_TARGET_HAS_add2_i64) {
2427         tcg_gen_op6_i64(INDEX_op_add2_i64, rl, rh, al, ah, bl, bh);
2428     } else {
2429         TCGv_i64 t0 = tcg_temp_new_i64();
2430         TCGv_i64 t1 = tcg_temp_new_i64();
2431         tcg_gen_add_i64(t0, al, bl);
2432         tcg_gen_setcond_i64(TCG_COND_LTU, t1, t0, al);
2433         tcg_gen_add_i64(rh, ah, bh);
2434         tcg_gen_add_i64(rh, rh, t1);
2435         tcg_gen_mov_i64(rl, t0);
2436         tcg_temp_free_i64(t0);
2437         tcg_temp_free_i64(t1);
2438     }
2439 }
2440 
2441 void tcg_gen_sub2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 al,
2442                       TCGv_i64 ah, TCGv_i64 bl, TCGv_i64 bh)
2443 {
2444     if (TCG_TARGET_HAS_sub2_i64) {
2445         tcg_gen_op6_i64(INDEX_op_sub2_i64, rl, rh, al, ah, bl, bh);
2446     } else {
2447         TCGv_i64 t0 = tcg_temp_new_i64();
2448         TCGv_i64 t1 = tcg_temp_new_i64();
2449         tcg_gen_sub_i64(t0, al, bl);
2450         tcg_gen_setcond_i64(TCG_COND_LTU, t1, al, bl);
2451         tcg_gen_sub_i64(rh, ah, bh);
2452         tcg_gen_sub_i64(rh, rh, t1);
2453         tcg_gen_mov_i64(rl, t0);
2454         tcg_temp_free_i64(t0);
2455         tcg_temp_free_i64(t1);
2456     }
2457 }
2458 
2459 void tcg_gen_mulu2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 arg1, TCGv_i64 arg2)
2460 {
2461     if (TCG_TARGET_HAS_mulu2_i64) {
2462         tcg_gen_op4_i64(INDEX_op_mulu2_i64, rl, rh, arg1, arg2);
2463     } else if (TCG_TARGET_HAS_muluh_i64) {
2464         TCGv_i64 t = tcg_temp_new_i64();
2465         tcg_gen_op3_i64(INDEX_op_mul_i64, t, arg1, arg2);
2466         tcg_gen_op3_i64(INDEX_op_muluh_i64, rh, arg1, arg2);
2467         tcg_gen_mov_i64(rl, t);
2468         tcg_temp_free_i64(t);
2469     } else {
2470         TCGv_i64 t0 = tcg_temp_new_i64();
2471         tcg_gen_mul_i64(t0, arg1, arg2);
2472         gen_helper_muluh_i64(rh, arg1, arg2);
2473         tcg_gen_mov_i64(rl, t0);
2474         tcg_temp_free_i64(t0);
2475     }
2476 }
2477 
2478 void tcg_gen_muls2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 arg1, TCGv_i64 arg2)
2479 {
2480     if (TCG_TARGET_HAS_muls2_i64) {
2481         tcg_gen_op4_i64(INDEX_op_muls2_i64, rl, rh, arg1, arg2);
2482     } else if (TCG_TARGET_HAS_mulsh_i64) {
2483         TCGv_i64 t = tcg_temp_new_i64();
2484         tcg_gen_op3_i64(INDEX_op_mul_i64, t, arg1, arg2);
2485         tcg_gen_op3_i64(INDEX_op_mulsh_i64, rh, arg1, arg2);
2486         tcg_gen_mov_i64(rl, t);
2487         tcg_temp_free_i64(t);
2488     } else if (TCG_TARGET_HAS_mulu2_i64 || TCG_TARGET_HAS_muluh_i64) {
2489         TCGv_i64 t0 = tcg_temp_new_i64();
2490         TCGv_i64 t1 = tcg_temp_new_i64();
2491         TCGv_i64 t2 = tcg_temp_new_i64();
2492         TCGv_i64 t3 = tcg_temp_new_i64();
2493         tcg_gen_mulu2_i64(t0, t1, arg1, arg2);
2494         /* Adjust for negative inputs.  */
2495         tcg_gen_sari_i64(t2, arg1, 63);
2496         tcg_gen_sari_i64(t3, arg2, 63);
2497         tcg_gen_and_i64(t2, t2, arg2);
2498         tcg_gen_and_i64(t3, t3, arg1);
2499         tcg_gen_sub_i64(rh, t1, t2);
2500         tcg_gen_sub_i64(rh, rh, t3);
2501         tcg_gen_mov_i64(rl, t0);
2502         tcg_temp_free_i64(t0);
2503         tcg_temp_free_i64(t1);
2504         tcg_temp_free_i64(t2);
2505         tcg_temp_free_i64(t3);
2506     } else {
2507         TCGv_i64 t0 = tcg_temp_new_i64();
2508         tcg_gen_mul_i64(t0, arg1, arg2);
2509         gen_helper_mulsh_i64(rh, arg1, arg2);
2510         tcg_gen_mov_i64(rl, t0);
2511         tcg_temp_free_i64(t0);
2512     }
2513 }
2514 
2515 void tcg_gen_mulsu2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 arg1, TCGv_i64 arg2)
2516 {
2517     TCGv_i64 t0 = tcg_temp_new_i64();
2518     TCGv_i64 t1 = tcg_temp_new_i64();
2519     TCGv_i64 t2 = tcg_temp_new_i64();
2520     tcg_gen_mulu2_i64(t0, t1, arg1, arg2);
2521     /* Adjust for negative input for the signed arg1.  */
2522     tcg_gen_sari_i64(t2, arg1, 63);
2523     tcg_gen_and_i64(t2, t2, arg2);
2524     tcg_gen_sub_i64(rh, t1, t2);
2525     tcg_gen_mov_i64(rl, t0);
2526     tcg_temp_free_i64(t0);
2527     tcg_temp_free_i64(t1);
2528     tcg_temp_free_i64(t2);
2529 }
2530 
2531 void tcg_gen_smin_i64(TCGv_i64 ret, TCGv_i64 a, TCGv_i64 b)
2532 {
2533     tcg_gen_movcond_i64(TCG_COND_LT, ret, a, b, a, b);
2534 }
2535 
2536 void tcg_gen_umin_i64(TCGv_i64 ret, TCGv_i64 a, TCGv_i64 b)
2537 {
2538     tcg_gen_movcond_i64(TCG_COND_LTU, ret, a, b, a, b);
2539 }
2540 
2541 void tcg_gen_smax_i64(TCGv_i64 ret, TCGv_i64 a, TCGv_i64 b)
2542 {
2543     tcg_gen_movcond_i64(TCG_COND_LT, ret, a, b, b, a);
2544 }
2545 
2546 void tcg_gen_umax_i64(TCGv_i64 ret, TCGv_i64 a, TCGv_i64 b)
2547 {
2548     tcg_gen_movcond_i64(TCG_COND_LTU, ret, a, b, b, a);
2549 }
2550 
2551 void tcg_gen_abs_i64(TCGv_i64 ret, TCGv_i64 a)
2552 {
2553     TCGv_i64 t = tcg_temp_new_i64();
2554 
2555     tcg_gen_sari_i64(t, a, 63);
2556     tcg_gen_xor_i64(ret, a, t);
2557     tcg_gen_sub_i64(ret, ret, t);
2558     tcg_temp_free_i64(t);
2559 }
2560 
2561 /* Size changing operations.  */
2562 
2563 void tcg_gen_extrl_i64_i32(TCGv_i32 ret, TCGv_i64 arg)
2564 {
2565     if (TCG_TARGET_REG_BITS == 32) {
2566         tcg_gen_mov_i32(ret, TCGV_LOW(arg));
2567     } else if (TCG_TARGET_HAS_extrl_i64_i32) {
2568         tcg_gen_op2(INDEX_op_extrl_i64_i32,
2569                     tcgv_i32_arg(ret), tcgv_i64_arg(arg));
2570     } else {
2571         tcg_gen_mov_i32(ret, (TCGv_i32)arg);
2572     }
2573 }
2574 
2575 void tcg_gen_extrh_i64_i32(TCGv_i32 ret, TCGv_i64 arg)
2576 {
2577     if (TCG_TARGET_REG_BITS == 32) {
2578         tcg_gen_mov_i32(ret, TCGV_HIGH(arg));
2579     } else if (TCG_TARGET_HAS_extrh_i64_i32) {
2580         tcg_gen_op2(INDEX_op_extrh_i64_i32,
2581                     tcgv_i32_arg(ret), tcgv_i64_arg(arg));
2582     } else {
2583         TCGv_i64 t = tcg_temp_new_i64();
2584         tcg_gen_shri_i64(t, arg, 32);
2585         tcg_gen_mov_i32(ret, (TCGv_i32)t);
2586         tcg_temp_free_i64(t);
2587     }
2588 }
2589 
2590 void tcg_gen_extu_i32_i64(TCGv_i64 ret, TCGv_i32 arg)
2591 {
2592     if (TCG_TARGET_REG_BITS == 32) {
2593         tcg_gen_mov_i32(TCGV_LOW(ret), arg);
2594         tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
2595     } else {
2596         tcg_gen_op2(INDEX_op_extu_i32_i64,
2597                     tcgv_i64_arg(ret), tcgv_i32_arg(arg));
2598     }
2599 }
2600 
2601 void tcg_gen_ext_i32_i64(TCGv_i64 ret, TCGv_i32 arg)
2602 {
2603     if (TCG_TARGET_REG_BITS == 32) {
2604         tcg_gen_mov_i32(TCGV_LOW(ret), arg);
2605         tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
2606     } else {
2607         tcg_gen_op2(INDEX_op_ext_i32_i64,
2608                     tcgv_i64_arg(ret), tcgv_i32_arg(arg));
2609     }
2610 }
2611 
2612 void tcg_gen_concat_i32_i64(TCGv_i64 dest, TCGv_i32 low, TCGv_i32 high)
2613 {
2614     TCGv_i64 tmp;
2615 
2616     if (TCG_TARGET_REG_BITS == 32) {
2617         tcg_gen_mov_i32(TCGV_LOW(dest), low);
2618         tcg_gen_mov_i32(TCGV_HIGH(dest), high);
2619         return;
2620     }
2621 
2622     tmp = tcg_temp_new_i64();
2623     /* These extensions are only needed for type correctness.
2624        We may be able to do better given target specific information.  */
2625     tcg_gen_extu_i32_i64(tmp, high);
2626     tcg_gen_extu_i32_i64(dest, low);
2627     /* If deposit is available, use it.  Otherwise use the extra
2628        knowledge that we have of the zero-extensions above.  */
2629     if (TCG_TARGET_HAS_deposit_i64 && TCG_TARGET_deposit_i64_valid(32, 32)) {
2630         tcg_gen_deposit_i64(dest, dest, tmp, 32, 32);
2631     } else {
2632         tcg_gen_shli_i64(tmp, tmp, 32);
2633         tcg_gen_or_i64(dest, dest, tmp);
2634     }
2635     tcg_temp_free_i64(tmp);
2636 }
2637 
2638 void tcg_gen_extr_i64_i32(TCGv_i32 lo, TCGv_i32 hi, TCGv_i64 arg)
2639 {
2640     if (TCG_TARGET_REG_BITS == 32) {
2641         tcg_gen_mov_i32(lo, TCGV_LOW(arg));
2642         tcg_gen_mov_i32(hi, TCGV_HIGH(arg));
2643     } else {
2644         tcg_gen_extrl_i64_i32(lo, arg);
2645         tcg_gen_extrh_i64_i32(hi, arg);
2646     }
2647 }
2648 
2649 void tcg_gen_extr32_i64(TCGv_i64 lo, TCGv_i64 hi, TCGv_i64 arg)
2650 {
2651     tcg_gen_ext32u_i64(lo, arg);
2652     tcg_gen_shri_i64(hi, arg, 32);
2653 }
2654 
2655 /* QEMU specific operations.  */
2656 
2657 void tcg_gen_exit_tb(const TranslationBlock *tb, unsigned idx)
2658 {
2659     /*
2660      * Let the jit code return the read-only version of the
2661      * TranslationBlock, so that we minimize the pc-relative
2662      * distance of the address of the exit_tb code to TB.
2663      * This will improve utilization of pc-relative address loads.
2664      *
2665      * TODO: Move this to translator_loop, so that all const
2666      * TranslationBlock pointers refer to read-only memory.
2667      * This requires coordination with targets that do not use
2668      * the translator_loop.
2669      */
2670     uintptr_t val = (uintptr_t)tcg_splitwx_to_rx((void *)tb) + idx;
2671 
2672     if (tb == NULL) {
2673         tcg_debug_assert(idx == 0);
2674     } else if (idx <= TB_EXIT_IDXMAX) {
2675 #ifdef CONFIG_DEBUG_TCG
2676         /* This is an exit following a goto_tb.  Verify that we have
2677            seen this numbered exit before, via tcg_gen_goto_tb.  */
2678         tcg_debug_assert(tcg_ctx->goto_tb_issue_mask & (1 << idx));
2679 #endif
2680         /* When not chaining, exit without indicating a link.  */
2681         if (qemu_loglevel_mask(CPU_LOG_TB_NOCHAIN)) {
2682             val = 0;
2683         }
2684     } else {
2685         /* This is an exit via the exitreq label.  */
2686         tcg_debug_assert(idx == TB_EXIT_REQUESTED);
2687     }
2688 
2689     plugin_gen_disable_mem_helpers();
2690     tcg_gen_op1i(INDEX_op_exit_tb, val);
2691 }
2692 
2693 void tcg_gen_goto_tb(unsigned idx)
2694 {
2695     /* We only support two chained exits.  */
2696     tcg_debug_assert(idx <= TB_EXIT_IDXMAX);
2697 #ifdef CONFIG_DEBUG_TCG
2698     /* Verify that we havn't seen this numbered exit before.  */
2699     tcg_debug_assert((tcg_ctx->goto_tb_issue_mask & (1 << idx)) == 0);
2700     tcg_ctx->goto_tb_issue_mask |= 1 << idx;
2701 #endif
2702     plugin_gen_disable_mem_helpers();
2703     /* When not chaining, we simply fall through to the "fallback" exit.  */
2704     if (!qemu_loglevel_mask(CPU_LOG_TB_NOCHAIN)) {
2705         tcg_gen_op1i(INDEX_op_goto_tb, idx);
2706     }
2707 }
2708 
2709 void tcg_gen_lookup_and_goto_ptr(void)
2710 {
2711     if (TCG_TARGET_HAS_goto_ptr && !qemu_loglevel_mask(CPU_LOG_TB_NOCHAIN)) {
2712         TCGv_ptr ptr;
2713 
2714         plugin_gen_disable_mem_helpers();
2715         ptr = tcg_temp_new_ptr();
2716         gen_helper_lookup_tb_ptr(ptr, cpu_env);
2717         tcg_gen_op1i(INDEX_op_goto_ptr, tcgv_ptr_arg(ptr));
2718         tcg_temp_free_ptr(ptr);
2719     } else {
2720         tcg_gen_exit_tb(NULL, 0);
2721     }
2722 }
2723 
2724 static inline MemOp tcg_canonicalize_memop(MemOp op, bool is64, bool st)
2725 {
2726     /* Trigger the asserts within as early as possible.  */
2727     (void)get_alignment_bits(op);
2728 
2729     switch (op & MO_SIZE) {
2730     case MO_8:
2731         op &= ~MO_BSWAP;
2732         break;
2733     case MO_16:
2734         break;
2735     case MO_32:
2736         if (!is64) {
2737             op &= ~MO_SIGN;
2738         }
2739         break;
2740     case MO_64:
2741         if (!is64) {
2742             tcg_abort();
2743         }
2744         break;
2745     }
2746     if (st) {
2747         op &= ~MO_SIGN;
2748     }
2749     return op;
2750 }
2751 
2752 static void gen_ldst_i32(TCGOpcode opc, TCGv_i32 val, TCGv addr,
2753                          MemOp memop, TCGArg idx)
2754 {
2755     TCGMemOpIdx oi = make_memop_idx(memop, idx);
2756 #if TARGET_LONG_BITS == 32
2757     tcg_gen_op3i_i32(opc, val, addr, oi);
2758 #else
2759     if (TCG_TARGET_REG_BITS == 32) {
2760         tcg_gen_op4i_i32(opc, val, TCGV_LOW(addr), TCGV_HIGH(addr), oi);
2761     } else {
2762         tcg_gen_op3(opc, tcgv_i32_arg(val), tcgv_i64_arg(addr), oi);
2763     }
2764 #endif
2765 }
2766 
2767 static void gen_ldst_i64(TCGOpcode opc, TCGv_i64 val, TCGv addr,
2768                          MemOp memop, TCGArg idx)
2769 {
2770     TCGMemOpIdx oi = make_memop_idx(memop, idx);
2771 #if TARGET_LONG_BITS == 32
2772     if (TCG_TARGET_REG_BITS == 32) {
2773         tcg_gen_op4i_i32(opc, TCGV_LOW(val), TCGV_HIGH(val), addr, oi);
2774     } else {
2775         tcg_gen_op3(opc, tcgv_i64_arg(val), tcgv_i32_arg(addr), oi);
2776     }
2777 #else
2778     if (TCG_TARGET_REG_BITS == 32) {
2779         tcg_gen_op5i_i32(opc, TCGV_LOW(val), TCGV_HIGH(val),
2780                          TCGV_LOW(addr), TCGV_HIGH(addr), oi);
2781     } else {
2782         tcg_gen_op3i_i64(opc, val, addr, oi);
2783     }
2784 #endif
2785 }
2786 
2787 static void tcg_gen_req_mo(TCGBar type)
2788 {
2789 #ifdef TCG_GUEST_DEFAULT_MO
2790     type &= TCG_GUEST_DEFAULT_MO;
2791 #endif
2792     type &= ~TCG_TARGET_DEFAULT_MO;
2793     if (type) {
2794         tcg_gen_mb(type | TCG_BAR_SC);
2795     }
2796 }
2797 
2798 static inline TCGv plugin_prep_mem_callbacks(TCGv vaddr)
2799 {
2800 #ifdef CONFIG_PLUGIN
2801     if (tcg_ctx->plugin_insn != NULL) {
2802         /* Save a copy of the vaddr for use after a load.  */
2803         TCGv temp = tcg_temp_new();
2804         tcg_gen_mov_tl(temp, vaddr);
2805         return temp;
2806     }
2807 #endif
2808     return vaddr;
2809 }
2810 
2811 static inline void plugin_gen_mem_callbacks(TCGv vaddr, uint16_t info)
2812 {
2813 #ifdef CONFIG_PLUGIN
2814     if (tcg_ctx->plugin_insn != NULL) {
2815         plugin_gen_empty_mem_callback(vaddr, info);
2816         tcg_temp_free(vaddr);
2817     }
2818 #endif
2819 }
2820 
2821 void tcg_gen_qemu_ld_i32(TCGv_i32 val, TCGv addr, TCGArg idx, MemOp memop)
2822 {
2823     MemOp orig_memop;
2824     uint16_t info = trace_mem_get_info(memop, idx, 0);
2825 
2826     tcg_gen_req_mo(TCG_MO_LD_LD | TCG_MO_ST_LD);
2827     memop = tcg_canonicalize_memop(memop, 0, 0);
2828     trace_guest_mem_before_tcg(tcg_ctx->cpu, cpu_env, addr, info);
2829 
2830     orig_memop = memop;
2831     if (!TCG_TARGET_HAS_MEMORY_BSWAP && (memop & MO_BSWAP)) {
2832         memop &= ~MO_BSWAP;
2833         /* The bswap primitive requires zero-extended input.  */
2834         if ((memop & MO_SSIZE) == MO_SW) {
2835             memop &= ~MO_SIGN;
2836         }
2837     }
2838 
2839     addr = plugin_prep_mem_callbacks(addr);
2840     gen_ldst_i32(INDEX_op_qemu_ld_i32, val, addr, memop, idx);
2841     plugin_gen_mem_callbacks(addr, info);
2842 
2843     if ((orig_memop ^ memop) & MO_BSWAP) {
2844         switch (orig_memop & MO_SIZE) {
2845         case MO_16:
2846             tcg_gen_bswap16_i32(val, val);
2847             if (orig_memop & MO_SIGN) {
2848                 tcg_gen_ext16s_i32(val, val);
2849             }
2850             break;
2851         case MO_32:
2852             tcg_gen_bswap32_i32(val, val);
2853             break;
2854         default:
2855             g_assert_not_reached();
2856         }
2857     }
2858 }
2859 
2860 void tcg_gen_qemu_st_i32(TCGv_i32 val, TCGv addr, TCGArg idx, MemOp memop)
2861 {
2862     TCGv_i32 swap = NULL;
2863     uint16_t info = trace_mem_get_info(memop, idx, 1);
2864 
2865     tcg_gen_req_mo(TCG_MO_LD_ST | TCG_MO_ST_ST);
2866     memop = tcg_canonicalize_memop(memop, 0, 1);
2867     trace_guest_mem_before_tcg(tcg_ctx->cpu, cpu_env, addr, info);
2868 
2869     if (!TCG_TARGET_HAS_MEMORY_BSWAP && (memop & MO_BSWAP)) {
2870         swap = tcg_temp_new_i32();
2871         switch (memop & MO_SIZE) {
2872         case MO_16:
2873             tcg_gen_ext16u_i32(swap, val);
2874             tcg_gen_bswap16_i32(swap, swap);
2875             break;
2876         case MO_32:
2877             tcg_gen_bswap32_i32(swap, val);
2878             break;
2879         default:
2880             g_assert_not_reached();
2881         }
2882         val = swap;
2883         memop &= ~MO_BSWAP;
2884     }
2885 
2886     addr = plugin_prep_mem_callbacks(addr);
2887     if (TCG_TARGET_HAS_qemu_st8_i32 && (memop & MO_SIZE) == MO_8) {
2888         gen_ldst_i32(INDEX_op_qemu_st8_i32, val, addr, memop, idx);
2889     } else {
2890         gen_ldst_i32(INDEX_op_qemu_st_i32, val, addr, memop, idx);
2891     }
2892     plugin_gen_mem_callbacks(addr, info);
2893 
2894     if (swap) {
2895         tcg_temp_free_i32(swap);
2896     }
2897 }
2898 
2899 void tcg_gen_qemu_ld_i64(TCGv_i64 val, TCGv addr, TCGArg idx, MemOp memop)
2900 {
2901     MemOp orig_memop;
2902     uint16_t info;
2903 
2904     if (TCG_TARGET_REG_BITS == 32 && (memop & MO_SIZE) < MO_64) {
2905         tcg_gen_qemu_ld_i32(TCGV_LOW(val), addr, idx, memop);
2906         if (memop & MO_SIGN) {
2907             tcg_gen_sari_i32(TCGV_HIGH(val), TCGV_LOW(val), 31);
2908         } else {
2909             tcg_gen_movi_i32(TCGV_HIGH(val), 0);
2910         }
2911         return;
2912     }
2913 
2914     tcg_gen_req_mo(TCG_MO_LD_LD | TCG_MO_ST_LD);
2915     memop = tcg_canonicalize_memop(memop, 1, 0);
2916     info = trace_mem_get_info(memop, idx, 0);
2917     trace_guest_mem_before_tcg(tcg_ctx->cpu, cpu_env, addr, info);
2918 
2919     orig_memop = memop;
2920     if (!TCG_TARGET_HAS_MEMORY_BSWAP && (memop & MO_BSWAP)) {
2921         memop &= ~MO_BSWAP;
2922         /* The bswap primitive requires zero-extended input.  */
2923         if ((memop & MO_SIGN) && (memop & MO_SIZE) < MO_64) {
2924             memop &= ~MO_SIGN;
2925         }
2926     }
2927 
2928     addr = plugin_prep_mem_callbacks(addr);
2929     gen_ldst_i64(INDEX_op_qemu_ld_i64, val, addr, memop, idx);
2930     plugin_gen_mem_callbacks(addr, info);
2931 
2932     if ((orig_memop ^ memop) & MO_BSWAP) {
2933         switch (orig_memop & MO_SIZE) {
2934         case MO_16:
2935             tcg_gen_bswap16_i64(val, val);
2936             if (orig_memop & MO_SIGN) {
2937                 tcg_gen_ext16s_i64(val, val);
2938             }
2939             break;
2940         case MO_32:
2941             tcg_gen_bswap32_i64(val, val);
2942             if (orig_memop & MO_SIGN) {
2943                 tcg_gen_ext32s_i64(val, val);
2944             }
2945             break;
2946         case MO_64:
2947             tcg_gen_bswap64_i64(val, val);
2948             break;
2949         default:
2950             g_assert_not_reached();
2951         }
2952     }
2953 }
2954 
2955 void tcg_gen_qemu_st_i64(TCGv_i64 val, TCGv addr, TCGArg idx, MemOp memop)
2956 {
2957     TCGv_i64 swap = NULL;
2958     uint16_t info;
2959 
2960     if (TCG_TARGET_REG_BITS == 32 && (memop & MO_SIZE) < MO_64) {
2961         tcg_gen_qemu_st_i32(TCGV_LOW(val), addr, idx, memop);
2962         return;
2963     }
2964 
2965     tcg_gen_req_mo(TCG_MO_LD_ST | TCG_MO_ST_ST);
2966     memop = tcg_canonicalize_memop(memop, 1, 1);
2967     info = trace_mem_get_info(memop, idx, 1);
2968     trace_guest_mem_before_tcg(tcg_ctx->cpu, cpu_env, addr, info);
2969 
2970     if (!TCG_TARGET_HAS_MEMORY_BSWAP && (memop & MO_BSWAP)) {
2971         swap = tcg_temp_new_i64();
2972         switch (memop & MO_SIZE) {
2973         case MO_16:
2974             tcg_gen_ext16u_i64(swap, val);
2975             tcg_gen_bswap16_i64(swap, swap);
2976             break;
2977         case MO_32:
2978             tcg_gen_ext32u_i64(swap, val);
2979             tcg_gen_bswap32_i64(swap, swap);
2980             break;
2981         case MO_64:
2982             tcg_gen_bswap64_i64(swap, val);
2983             break;
2984         default:
2985             g_assert_not_reached();
2986         }
2987         val = swap;
2988         memop &= ~MO_BSWAP;
2989     }
2990 
2991     addr = plugin_prep_mem_callbacks(addr);
2992     gen_ldst_i64(INDEX_op_qemu_st_i64, val, addr, memop, idx);
2993     plugin_gen_mem_callbacks(addr, info);
2994 
2995     if (swap) {
2996         tcg_temp_free_i64(swap);
2997     }
2998 }
2999 
3000 static void tcg_gen_ext_i32(TCGv_i32 ret, TCGv_i32 val, MemOp opc)
3001 {
3002     switch (opc & MO_SSIZE) {
3003     case MO_SB:
3004         tcg_gen_ext8s_i32(ret, val);
3005         break;
3006     case MO_UB:
3007         tcg_gen_ext8u_i32(ret, val);
3008         break;
3009     case MO_SW:
3010         tcg_gen_ext16s_i32(ret, val);
3011         break;
3012     case MO_UW:
3013         tcg_gen_ext16u_i32(ret, val);
3014         break;
3015     default:
3016         tcg_gen_mov_i32(ret, val);
3017         break;
3018     }
3019 }
3020 
3021 static void tcg_gen_ext_i64(TCGv_i64 ret, TCGv_i64 val, MemOp opc)
3022 {
3023     switch (opc & MO_SSIZE) {
3024     case MO_SB:
3025         tcg_gen_ext8s_i64(ret, val);
3026         break;
3027     case MO_UB:
3028         tcg_gen_ext8u_i64(ret, val);
3029         break;
3030     case MO_SW:
3031         tcg_gen_ext16s_i64(ret, val);
3032         break;
3033     case MO_UW:
3034         tcg_gen_ext16u_i64(ret, val);
3035         break;
3036     case MO_SL:
3037         tcg_gen_ext32s_i64(ret, val);
3038         break;
3039     case MO_UL:
3040         tcg_gen_ext32u_i64(ret, val);
3041         break;
3042     default:
3043         tcg_gen_mov_i64(ret, val);
3044         break;
3045     }
3046 }
3047 
3048 #ifdef CONFIG_SOFTMMU
3049 typedef void (*gen_atomic_cx_i32)(TCGv_i32, TCGv_env, TCGv,
3050                                   TCGv_i32, TCGv_i32, TCGv_i32);
3051 typedef void (*gen_atomic_cx_i64)(TCGv_i64, TCGv_env, TCGv,
3052                                   TCGv_i64, TCGv_i64, TCGv_i32);
3053 typedef void (*gen_atomic_op_i32)(TCGv_i32, TCGv_env, TCGv,
3054                                   TCGv_i32, TCGv_i32);
3055 typedef void (*gen_atomic_op_i64)(TCGv_i64, TCGv_env, TCGv,
3056                                   TCGv_i64, TCGv_i32);
3057 #else
3058 typedef void (*gen_atomic_cx_i32)(TCGv_i32, TCGv_env, TCGv, TCGv_i32, TCGv_i32);
3059 typedef void (*gen_atomic_cx_i64)(TCGv_i64, TCGv_env, TCGv, TCGv_i64, TCGv_i64);
3060 typedef void (*gen_atomic_op_i32)(TCGv_i32, TCGv_env, TCGv, TCGv_i32);
3061 typedef void (*gen_atomic_op_i64)(TCGv_i64, TCGv_env, TCGv, TCGv_i64);
3062 #endif
3063 
3064 #ifdef CONFIG_ATOMIC64
3065 # define WITH_ATOMIC64(X) X,
3066 #else
3067 # define WITH_ATOMIC64(X)
3068 #endif
3069 
3070 static void * const table_cmpxchg[16] = {
3071     [MO_8] = gen_helper_atomic_cmpxchgb,
3072     [MO_16 | MO_LE] = gen_helper_atomic_cmpxchgw_le,
3073     [MO_16 | MO_BE] = gen_helper_atomic_cmpxchgw_be,
3074     [MO_32 | MO_LE] = gen_helper_atomic_cmpxchgl_le,
3075     [MO_32 | MO_BE] = gen_helper_atomic_cmpxchgl_be,
3076     WITH_ATOMIC64([MO_64 | MO_LE] = gen_helper_atomic_cmpxchgq_le)
3077     WITH_ATOMIC64([MO_64 | MO_BE] = gen_helper_atomic_cmpxchgq_be)
3078 };
3079 
3080 void tcg_gen_atomic_cmpxchg_i32(TCGv_i32 retv, TCGv addr, TCGv_i32 cmpv,
3081                                 TCGv_i32 newv, TCGArg idx, MemOp memop)
3082 {
3083     memop = tcg_canonicalize_memop(memop, 0, 0);
3084 
3085     if (!(tcg_ctx->tb_cflags & CF_PARALLEL)) {
3086         TCGv_i32 t1 = tcg_temp_new_i32();
3087         TCGv_i32 t2 = tcg_temp_new_i32();
3088 
3089         tcg_gen_ext_i32(t2, cmpv, memop & MO_SIZE);
3090 
3091         tcg_gen_qemu_ld_i32(t1, addr, idx, memop & ~MO_SIGN);
3092         tcg_gen_movcond_i32(TCG_COND_EQ, t2, t1, t2, newv, t1);
3093         tcg_gen_qemu_st_i32(t2, addr, idx, memop);
3094         tcg_temp_free_i32(t2);
3095 
3096         if (memop & MO_SIGN) {
3097             tcg_gen_ext_i32(retv, t1, memop);
3098         } else {
3099             tcg_gen_mov_i32(retv, t1);
3100         }
3101         tcg_temp_free_i32(t1);
3102     } else {
3103         gen_atomic_cx_i32 gen;
3104 
3105         gen = table_cmpxchg[memop & (MO_SIZE | MO_BSWAP)];
3106         tcg_debug_assert(gen != NULL);
3107 
3108 #ifdef CONFIG_SOFTMMU
3109         {
3110             TCGMemOpIdx oi = make_memop_idx(memop & ~MO_SIGN, idx);
3111             gen(retv, cpu_env, addr, cmpv, newv, tcg_constant_i32(oi));
3112         }
3113 #else
3114         gen(retv, cpu_env, addr, cmpv, newv);
3115 #endif
3116 
3117         if (memop & MO_SIGN) {
3118             tcg_gen_ext_i32(retv, retv, memop);
3119         }
3120     }
3121 }
3122 
3123 void tcg_gen_atomic_cmpxchg_i64(TCGv_i64 retv, TCGv addr, TCGv_i64 cmpv,
3124                                 TCGv_i64 newv, TCGArg idx, MemOp memop)
3125 {
3126     memop = tcg_canonicalize_memop(memop, 1, 0);
3127 
3128     if (!(tcg_ctx->tb_cflags & CF_PARALLEL)) {
3129         TCGv_i64 t1 = tcg_temp_new_i64();
3130         TCGv_i64 t2 = tcg_temp_new_i64();
3131 
3132         tcg_gen_ext_i64(t2, cmpv, memop & MO_SIZE);
3133 
3134         tcg_gen_qemu_ld_i64(t1, addr, idx, memop & ~MO_SIGN);
3135         tcg_gen_movcond_i64(TCG_COND_EQ, t2, t1, t2, newv, t1);
3136         tcg_gen_qemu_st_i64(t2, addr, idx, memop);
3137         tcg_temp_free_i64(t2);
3138 
3139         if (memop & MO_SIGN) {
3140             tcg_gen_ext_i64(retv, t1, memop);
3141         } else {
3142             tcg_gen_mov_i64(retv, t1);
3143         }
3144         tcg_temp_free_i64(t1);
3145     } else if ((memop & MO_SIZE) == MO_64) {
3146 #ifdef CONFIG_ATOMIC64
3147         gen_atomic_cx_i64 gen;
3148 
3149         gen = table_cmpxchg[memop & (MO_SIZE | MO_BSWAP)];
3150         tcg_debug_assert(gen != NULL);
3151 
3152 #ifdef CONFIG_SOFTMMU
3153         {
3154             TCGMemOpIdx oi = make_memop_idx(memop, idx);
3155             gen(retv, cpu_env, addr, cmpv, newv, tcg_constant_i32(oi));
3156         }
3157 #else
3158         gen(retv, cpu_env, addr, cmpv, newv);
3159 #endif
3160 #else
3161         gen_helper_exit_atomic(cpu_env);
3162         /* Produce a result, so that we have a well-formed opcode stream
3163            with respect to uses of the result in the (dead) code following.  */
3164         tcg_gen_movi_i64(retv, 0);
3165 #endif /* CONFIG_ATOMIC64 */
3166     } else {
3167         TCGv_i32 c32 = tcg_temp_new_i32();
3168         TCGv_i32 n32 = tcg_temp_new_i32();
3169         TCGv_i32 r32 = tcg_temp_new_i32();
3170 
3171         tcg_gen_extrl_i64_i32(c32, cmpv);
3172         tcg_gen_extrl_i64_i32(n32, newv);
3173         tcg_gen_atomic_cmpxchg_i32(r32, addr, c32, n32, idx, memop & ~MO_SIGN);
3174         tcg_temp_free_i32(c32);
3175         tcg_temp_free_i32(n32);
3176 
3177         tcg_gen_extu_i32_i64(retv, r32);
3178         tcg_temp_free_i32(r32);
3179 
3180         if (memop & MO_SIGN) {
3181             tcg_gen_ext_i64(retv, retv, memop);
3182         }
3183     }
3184 }
3185 
3186 static void do_nonatomic_op_i32(TCGv_i32 ret, TCGv addr, TCGv_i32 val,
3187                                 TCGArg idx, MemOp memop, bool new_val,
3188                                 void (*gen)(TCGv_i32, TCGv_i32, TCGv_i32))
3189 {
3190     TCGv_i32 t1 = tcg_temp_new_i32();
3191     TCGv_i32 t2 = tcg_temp_new_i32();
3192 
3193     memop = tcg_canonicalize_memop(memop, 0, 0);
3194 
3195     tcg_gen_qemu_ld_i32(t1, addr, idx, memop);
3196     tcg_gen_ext_i32(t2, val, memop);
3197     gen(t2, t1, t2);
3198     tcg_gen_qemu_st_i32(t2, addr, idx, memop);
3199 
3200     tcg_gen_ext_i32(ret, (new_val ? t2 : t1), memop);
3201     tcg_temp_free_i32(t1);
3202     tcg_temp_free_i32(t2);
3203 }
3204 
3205 static void do_atomic_op_i32(TCGv_i32 ret, TCGv addr, TCGv_i32 val,
3206                              TCGArg idx, MemOp memop, void * const table[])
3207 {
3208     gen_atomic_op_i32 gen;
3209 
3210     memop = tcg_canonicalize_memop(memop, 0, 0);
3211 
3212     gen = table[memop & (MO_SIZE | MO_BSWAP)];
3213     tcg_debug_assert(gen != NULL);
3214 
3215 #ifdef CONFIG_SOFTMMU
3216     {
3217         TCGMemOpIdx oi = make_memop_idx(memop & ~MO_SIGN, idx);
3218         gen(ret, cpu_env, addr, val, tcg_constant_i32(oi));
3219     }
3220 #else
3221     gen(ret, cpu_env, addr, val);
3222 #endif
3223 
3224     if (memop & MO_SIGN) {
3225         tcg_gen_ext_i32(ret, ret, memop);
3226     }
3227 }
3228 
3229 static void do_nonatomic_op_i64(TCGv_i64 ret, TCGv addr, TCGv_i64 val,
3230                                 TCGArg idx, MemOp memop, bool new_val,
3231                                 void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64))
3232 {
3233     TCGv_i64 t1 = tcg_temp_new_i64();
3234     TCGv_i64 t2 = tcg_temp_new_i64();
3235 
3236     memop = tcg_canonicalize_memop(memop, 1, 0);
3237 
3238     tcg_gen_qemu_ld_i64(t1, addr, idx, memop);
3239     tcg_gen_ext_i64(t2, val, memop);
3240     gen(t2, t1, t2);
3241     tcg_gen_qemu_st_i64(t2, addr, idx, memop);
3242 
3243     tcg_gen_ext_i64(ret, (new_val ? t2 : t1), memop);
3244     tcg_temp_free_i64(t1);
3245     tcg_temp_free_i64(t2);
3246 }
3247 
3248 static void do_atomic_op_i64(TCGv_i64 ret, TCGv addr, TCGv_i64 val,
3249                              TCGArg idx, MemOp memop, void * const table[])
3250 {
3251     memop = tcg_canonicalize_memop(memop, 1, 0);
3252 
3253     if ((memop & MO_SIZE) == MO_64) {
3254 #ifdef CONFIG_ATOMIC64
3255         gen_atomic_op_i64 gen;
3256 
3257         gen = table[memop & (MO_SIZE | MO_BSWAP)];
3258         tcg_debug_assert(gen != NULL);
3259 
3260 #ifdef CONFIG_SOFTMMU
3261         {
3262             TCGMemOpIdx oi = make_memop_idx(memop & ~MO_SIGN, idx);
3263             gen(ret, cpu_env, addr, val, tcg_constant_i32(oi));
3264         }
3265 #else
3266         gen(ret, cpu_env, addr, val);
3267 #endif
3268 #else
3269         gen_helper_exit_atomic(cpu_env);
3270         /* Produce a result, so that we have a well-formed opcode stream
3271            with respect to uses of the result in the (dead) code following.  */
3272         tcg_gen_movi_i64(ret, 0);
3273 #endif /* CONFIG_ATOMIC64 */
3274     } else {
3275         TCGv_i32 v32 = tcg_temp_new_i32();
3276         TCGv_i32 r32 = tcg_temp_new_i32();
3277 
3278         tcg_gen_extrl_i64_i32(v32, val);
3279         do_atomic_op_i32(r32, addr, v32, idx, memop & ~MO_SIGN, table);
3280         tcg_temp_free_i32(v32);
3281 
3282         tcg_gen_extu_i32_i64(ret, r32);
3283         tcg_temp_free_i32(r32);
3284 
3285         if (memop & MO_SIGN) {
3286             tcg_gen_ext_i64(ret, ret, memop);
3287         }
3288     }
3289 }
3290 
3291 #define GEN_ATOMIC_HELPER(NAME, OP, NEW)                                \
3292 static void * const table_##NAME[16] = {                                \
3293     [MO_8] = gen_helper_atomic_##NAME##b,                               \
3294     [MO_16 | MO_LE] = gen_helper_atomic_##NAME##w_le,                   \
3295     [MO_16 | MO_BE] = gen_helper_atomic_##NAME##w_be,                   \
3296     [MO_32 | MO_LE] = gen_helper_atomic_##NAME##l_le,                   \
3297     [MO_32 | MO_BE] = gen_helper_atomic_##NAME##l_be,                   \
3298     WITH_ATOMIC64([MO_64 | MO_LE] = gen_helper_atomic_##NAME##q_le)     \
3299     WITH_ATOMIC64([MO_64 | MO_BE] = gen_helper_atomic_##NAME##q_be)     \
3300 };                                                                      \
3301 void tcg_gen_atomic_##NAME##_i32                                        \
3302     (TCGv_i32 ret, TCGv addr, TCGv_i32 val, TCGArg idx, MemOp memop)    \
3303 {                                                                       \
3304     if (tcg_ctx->tb_cflags & CF_PARALLEL) {                             \
3305         do_atomic_op_i32(ret, addr, val, idx, memop, table_##NAME);     \
3306     } else {                                                            \
3307         do_nonatomic_op_i32(ret, addr, val, idx, memop, NEW,            \
3308                             tcg_gen_##OP##_i32);                        \
3309     }                                                                   \
3310 }                                                                       \
3311 void tcg_gen_atomic_##NAME##_i64                                        \
3312     (TCGv_i64 ret, TCGv addr, TCGv_i64 val, TCGArg idx, MemOp memop)    \
3313 {                                                                       \
3314     if (tcg_ctx->tb_cflags & CF_PARALLEL) {                             \
3315         do_atomic_op_i64(ret, addr, val, idx, memop, table_##NAME);     \
3316     } else {                                                            \
3317         do_nonatomic_op_i64(ret, addr, val, idx, memop, NEW,            \
3318                             tcg_gen_##OP##_i64);                        \
3319     }                                                                   \
3320 }
3321 
3322 GEN_ATOMIC_HELPER(fetch_add, add, 0)
3323 GEN_ATOMIC_HELPER(fetch_and, and, 0)
3324 GEN_ATOMIC_HELPER(fetch_or, or, 0)
3325 GEN_ATOMIC_HELPER(fetch_xor, xor, 0)
3326 GEN_ATOMIC_HELPER(fetch_smin, smin, 0)
3327 GEN_ATOMIC_HELPER(fetch_umin, umin, 0)
3328 GEN_ATOMIC_HELPER(fetch_smax, smax, 0)
3329 GEN_ATOMIC_HELPER(fetch_umax, umax, 0)
3330 
3331 GEN_ATOMIC_HELPER(add_fetch, add, 1)
3332 GEN_ATOMIC_HELPER(and_fetch, and, 1)
3333 GEN_ATOMIC_HELPER(or_fetch, or, 1)
3334 GEN_ATOMIC_HELPER(xor_fetch, xor, 1)
3335 GEN_ATOMIC_HELPER(smin_fetch, smin, 1)
3336 GEN_ATOMIC_HELPER(umin_fetch, umin, 1)
3337 GEN_ATOMIC_HELPER(smax_fetch, smax, 1)
3338 GEN_ATOMIC_HELPER(umax_fetch, umax, 1)
3339 
3340 static void tcg_gen_mov2_i32(TCGv_i32 r, TCGv_i32 a, TCGv_i32 b)
3341 {
3342     tcg_gen_mov_i32(r, b);
3343 }
3344 
3345 static void tcg_gen_mov2_i64(TCGv_i64 r, TCGv_i64 a, TCGv_i64 b)
3346 {
3347     tcg_gen_mov_i64(r, b);
3348 }
3349 
3350 GEN_ATOMIC_HELPER(xchg, mov2, 0)
3351 
3352 #undef GEN_ATOMIC_HELPER
3353